commit stringlengths 40 40 | old_file stringlengths 4 150 | new_file stringlengths 4 150 | old_contents stringlengths 0 3.26k | new_contents stringlengths 1 4.43k | subject stringlengths 15 501 | message stringlengths 15 4.06k | lang stringclasses 4 values | license stringclasses 13 values | repos stringlengths 5 91.5k | diff stringlengths 0 4.35k |
|---|---|---|---|---|---|---|---|---|---|---|
a3dc06b0389eccd9a97270399c9878968c2d910c | shopify_auth/__init__.py | shopify_auth/__init__.py | VERSION = (0, 1, 0)
__version__ = '.'.join(map(str, VERSION))
__author__ = 'Gavin Ballard' | import shopify
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
VERSION = (0, 1, 1)
__version__ = '.'.join(map(str, VERSION))
__author__ = 'Gavin Ballard'
def initialize():
if not settings.SHOPIFY_APP_API_KEY or not settings.SHOPIFY_APP_API_SECRET:
raise ImproperlyConfigured("SHOPIFY_APP_API_KEY and SHOPIFY_APP_API_SECRET must be set in settings")
shopify.Session.setup(api_key = settings.SHOPIFY_APP_API_KEY, secret = settings.SHOPIFY_APP_API_SECRET) | Add initialize() method to ShopifyAuth, which sets up the API key and secret of the app. | Add initialize() method to ShopifyAuth, which sets up the API key and secret of the app. | Python | mit | funkybob/django-shopify-auth,discolabs/django-shopify-auth,funkybob/django-shopify-auth,discolabs/django-shopify-auth,RafaAguilar/django-shopify-auth,RafaAguilar/django-shopify-auth | ---
+++
@@ -1,3 +1,15 @@
-VERSION = (0, 1, 0)
+import shopify
+
+from django.conf import settings
+from django.core.exceptions import ImproperlyConfigured
+
+
+VERSION = (0, 1, 1)
__version__ = '.'.join(map(str, VERSION))
__author__ = 'Gavin Ballard'
+
+
+def initialize():
+ if not settings.SHOPIFY_APP_API_KEY or not settings.SHOPIFY_APP_API_SECRET:
+ raise ImproperlyConfigured("SHOPIFY_APP_API_KEY and SHOPIFY_APP_API_SECRET must be set in settings")
+ shopify.Session.setup(api_key = settings.SHOPIFY_APP_API_KEY, secret = settings.SHOPIFY_APP_API_SECRET) |
f003fd0099b1817e965483e94a51745834a802de | simple_neural_network.py | simple_neural_network.py | # This code is inspired from this post:
# http://www.kdnuggets.com/2015/10/neural-network-python-tutorial.html?utm_content=buffer2cfea&utm_medium=social&utm_source=twitter.com&utm_campaign=buffer
import numpy as np
# Feature matrix and targets
X = np.array([[0,0,1],[0,1,1],[1,0,1],[1,1,1]])
print X.shape
y = np.array([[0,1,1,0]]).T
print y.shape
def sigmoid(x,y):
return 1/(1 + np.exp(-np.dot(x,y)))
# Random initialization
syn0 = 2*np.random.random((3,4)) - 1
syn1 = 2*np.random.random((4,1)) - 1
print syn0.shape, syn1.shape
# Number of back-propagation steps
N_STEPS = 60000
for step in xrange(N_STEPS):
l1 = sigmoid(X, syn0)
l2 = sigmoid(l1, syn1)
l2_delta = (y - l2)*(l2*(1-l2))
l1_delta = l2_delta.dot(syn1.T) * (l1 * (1-l1))
syn1 += l1.T.dot(l2_delta)
syn0 += X.T.dot(l1_delta)
| # This code is inspired from this post:
# http://www.kdnuggets.com/2015/10/neural-network-python-tutorial.html?utm_content=buffer2cfea&utm_medium=social&utm_source=twitter.com&utm_campaign=buffer
import numpy as np
np.random.seed(314)
# Feature matrix and targets
X = np.array([[0,0,1],[0,1,1],[1,0,1],[1,1,1]])
print X.shape
y = np.array([[0,1,1,0]]).T
print y.shape
def sigmoid(x,y):
return 1/(1 + np.exp(-np.dot(x,y)))
# Random initialization of first and second hidden layers
syn0 = 2*np.random.random((3,4)) - 1
syn1 = 2*np.random.random((4,1)) - 1
print syn0.shape, syn1.shape
# Number of back-propagation steps
N_STEPS = 60000
for step in xrange(N_STEPS):
l1 = sigmoid(X, syn0)
l2 = sigmoid(l1, syn1)
l2_delta = (y - l2)*(l2*(1-l2))
l1_delta = l2_delta.dot(syn1.T) * (l1 * (1-l1))
syn1 += l1.T.dot(l2_delta)
syn0 += X.T.dot(l1_delta)
| Set a seed for the simple neural network | Set a seed for the simple neural network
| Python | mit | yassineAlouini/ml-experiments,yassineAlouini/ml-experiments | ---
+++
@@ -2,6 +2,8 @@
# http://www.kdnuggets.com/2015/10/neural-network-python-tutorial.html?utm_content=buffer2cfea&utm_medium=social&utm_source=twitter.com&utm_campaign=buffer
import numpy as np
+
+np.random.seed(314)
# Feature matrix and targets
X = np.array([[0,0,1],[0,1,1],[1,0,1],[1,1,1]])
@@ -12,7 +14,7 @@
def sigmoid(x,y):
return 1/(1 + np.exp(-np.dot(x,y)))
-# Random initialization
+# Random initialization of first and second hidden layers
syn0 = 2*np.random.random((3,4)) - 1
syn1 = 2*np.random.random((4,1)) - 1
|
0688d285494e9c2ddb5b6ab35f2c0bd1dac02a54 | basecampx/client.py | basecampx/client.py | import json
import requests
class Client(object):
LAUNCHPAD_URL = 'https://launchpad.37signals.com'
BASE_URL = 'https://basecamp.com/%s/api/v1'
def __init__(self, access_token, user_agent, account_id=None):
"""Initialize client for making requests.
user_agent -- string identifying the app, and an url or email related
to the app; e.g. "BusyFlow (http://busyflow.com)".
"""
self.account_id = account_id
self.session = requests.session(
headers={'User-Agent': user_agent,
'Authorization': 'Bearer %s' % access_token,
'Content-Type': 'application/json; charset=utf-8'})
def accounts(self):
url = '%s/authorization.json' % self.LAUNCHPAD_URL
return json.loads(self.session.get(url).content)
| import json
import urlparse
import requests
class Client(object):
LAUNCHPAD_URL = 'https://launchpad.37signals.com/'
BASE_URL = 'https://basecamp.com/%s/api/v1/'
def __init__(self, access_token, user_agent, account_id=None):
"""Initialize client for making requests.
user_agent -- string identifying the app, and an url or email related
to the app; e.g. "BusyFlow (http://busyflow.com)".
"""
self.account_id = account_id
self.session = requests.session(
headers={'User-Agent': user_agent,
'Authorization': 'Bearer %s' % access_token,
'Content-Type': 'application/json; charset=utf-8'})
def accounts(self):
url = urlparse.urljoin(self.LAUNCHPAD_URL,'authorization.json')
return json.loads(self.session.get(url).content)
| Use urljoin to form urls. | Use urljoin to form urls.
| Python | mit | nous-consulting/basecamp-next | ---
+++
@@ -1,10 +1,11 @@
import json
+import urlparse
import requests
class Client(object):
- LAUNCHPAD_URL = 'https://launchpad.37signals.com'
- BASE_URL = 'https://basecamp.com/%s/api/v1'
+ LAUNCHPAD_URL = 'https://launchpad.37signals.com/'
+ BASE_URL = 'https://basecamp.com/%s/api/v1/'
def __init__(self, access_token, user_agent, account_id=None):
"""Initialize client for making requests.
@@ -19,5 +20,5 @@
'Content-Type': 'application/json; charset=utf-8'})
def accounts(self):
- url = '%s/authorization.json' % self.LAUNCHPAD_URL
+ url = urlparse.urljoin(self.LAUNCHPAD_URL,'authorization.json')
return json.loads(self.session.get(url).content) |
3abb2aa6a86603ab8811c47ffd61a851dc314276 | src/run.py | src/run.py | """This is the main function of twitter-news-bot project
It is intended to be run as a cronjob to periodically scan
for news of interest and Tweet about it
"""
import random
from twitter_bot.service.curator import Curator
from twitter_bot.service.news_reader import NewsReader
from twitter_bot.service.twitter import TwitterService
def main():
news_reader = NewsReader()
headlines = news_reader.get_headlines()
curator = Curator()
interesting_headlines = curator.keep_interesting_items(headlines)
if interesting_headlines:
tweet = random.choice(interesting_headlines)
print tweet
twitter_api = TwitterService()
twitter_api.post_tweet(tweet)
if __name__ == '__main__':
main()
| """This is the main function of twitter-news-bot project
It is intended to be run as a cronjob to periodically scan
for news of interest and Tweet about it
"""
import argparse
import random
from twitter_bot.service.curator import Curator
from twitter_bot.service.news_reader import NewsReader
from twitter_bot.service.twitter import TwitterService
def _get_command_args():
parser = argparse.ArgumentParser(description='''
Command for Twitter Bot to scan news sources, find an interesting
piece of news and tweet about it
''')
parser.add_argument('--debug', action='store_true')
return parser.parse_args()
def main():
command_args = _get_command_args()
news_reader = NewsReader()
headlines = news_reader.get_headlines()
curator = Curator()
interesting_headlines = curator.keep_interesting_items(headlines)
if interesting_headlines:
tweet = random.choice(interesting_headlines)
if command_args.debug:
print tweet
else:
twitter_api = TwitterService()
twitter_api.post_tweet(tweet)
else:
print 'No interesting news found'
if __name__ == '__main__':
main()
| Add argument parser to allow for debug mode | Add argument parser to allow for debug mode
| Python | mit | econne01/twitter-news-bot | ---
+++
@@ -3,13 +3,25 @@
It is intended to be run as a cronjob to periodically scan
for news of interest and Tweet about it
"""
+import argparse
import random
+
from twitter_bot.service.curator import Curator
from twitter_bot.service.news_reader import NewsReader
from twitter_bot.service.twitter import TwitterService
+def _get_command_args():
+ parser = argparse.ArgumentParser(description='''
+ Command for Twitter Bot to scan news sources, find an interesting
+ piece of news and tweet about it
+ ''')
+ parser.add_argument('--debug', action='store_true')
+ return parser.parse_args()
+
def main():
+ command_args = _get_command_args()
+
news_reader = NewsReader()
headlines = news_reader.get_headlines()
@@ -18,9 +30,13 @@
if interesting_headlines:
tweet = random.choice(interesting_headlines)
- print tweet
- twitter_api = TwitterService()
- twitter_api.post_tweet(tweet)
+ if command_args.debug:
+ print tweet
+ else:
+ twitter_api = TwitterService()
+ twitter_api.post_tweet(tweet)
+ else:
+ print 'No interesting news found'
if __name__ == '__main__': |
241df143d4f75404c6cda3ff0ab3fe2fccba5f79 | whistleblower/tasks.py | whistleblower/tasks.py | import json
import logging
import os
import subprocess
from celery import Celery
from celery.schedules import crontab
from whistleblower.targets.twitter import Post as TwitterPost
import whistleblower.queue
HOUR = 3600
ENABLED_TARGETS = [
TwitterPost,
]
RABBITMQ_URL = os.environ.get('CLOUDAMQP_URL', 'pyamqp://guest@localhost//')
app = Celery('tasks', broker=RABBITMQ_URL)
@app.on_after_configure.connect
def setup_periodic_tasks(sender, **kwargs):
sender.add_periodic_task(4 * HOUR, process_queue.s())
@app.task
def update_queue():
whistleblower.queue.Queue().update()
@app.task
def process_queue():
whistleblower.queue.Queue().process()
@app.task
def publish_reimbursement(reimbursement):
for target in ENABLED_TARGETS:
target(reimbursement).publish()
| import json
import logging
import os
import subprocess
from celery import Celery
from celery.schedules import crontab
from whistleblower.targets.twitter import Post as TwitterPost
import whistleblower.queue
HOUR = 3600
ENABLED_TARGETS = [
TwitterPost,
]
RABBITMQ_URL = os.environ.get('CLOUDAMQP_URL', 'pyamqp://guest@localhost//')
app = Celery('tasks', broker=RABBITMQ_URL)
@app.on_after_configure.connect
def setup_periodic_tasks(sender, **kwargs):
sender.add_periodic_task(3 * HOUR, process_queue.s())
@app.task
def update_queue():
whistleblower.queue.Queue().update()
@app.task
def process_queue():
whistleblower.queue.Queue().process()
@app.task
def publish_reimbursement(reimbursement):
for target in ENABLED_TARGETS:
target(reimbursement).publish()
| Reduce time window between posts to 3 hours | Reduce time window between posts to 3 hours
| Python | unlicense | datasciencebr/whistleblower | ---
+++
@@ -19,7 +19,7 @@
@app.on_after_configure.connect
def setup_periodic_tasks(sender, **kwargs):
- sender.add_periodic_task(4 * HOUR, process_queue.s())
+ sender.add_periodic_task(3 * HOUR, process_queue.s())
@app.task |
2cc9de18bf20753907c2c0e591b58ccefe1578e0 | erudite/components/commands/find_owner.py | erudite/components/commands/find_owner.py | """
Command that will allow for a user to inject triples into a database.
"""
from rhobot.components.commands.base_command import BaseCommand
from rdflib.namespace import FOAF
from rhobot.namespace import RHO
import logging
logger = logging.getLogger(__name__)
class FindOwner(BaseCommand):
def initialize_command(self):
super(FindOwner, self).initialize_command()
logger.info('Initialize Command')
self._initialize_command(identifier='find_owner', name='Find Owner',
additional_dependencies={'rho_bot_storage_client', 'rho_bot_scheduler', })
def command_start(self, request, initial_session):
"""
Provide the configuration details back to the requester and end the command.
:param request:
:param initial_session:
:return:
"""
storage = self.xmpp['rho_bot_storage_client'].create_payload()
storage.add_type(FOAF.Person, RHO.Owner)
results = self.xmpp['rho_bot_storage_client'].find_nodes(storage)
initial_session['payload'] = results.populate_payload()
initial_session['next'] = None
initial_session['has_next'] = False
promise = self.xmpp['rho_bot_scheduler'].promise()
promise.resolved(initial_session)
return promise
find_owner = FindOwner
| """
Command that will allow for a user to inject triples into a database.
"""
from rhobot.components.commands.base_command import BaseCommand
from rdflib.namespace import FOAF
from rhobot.namespace import RHO
from rhobot.components.storage import StoragePayload
import logging
logger = logging.getLogger(__name__)
class FindOwner(BaseCommand):
def initialize_command(self):
super(FindOwner, self).initialize_command()
logger.info('Initialize Command')
self._initialize_command(identifier='find_owner', name='Find Owner',
additional_dependencies={'rho_bot_storage_client', 'rho_bot_scheduler', })
def command_start(self, request, initial_session):
"""
Provide the configuration details back to the requester and end the command.
:param request:
:param initial_session:
:return:
"""
storage = StoragePayload()
storage.add_type(FOAF.Person, RHO.Owner)
promise = self.xmpp['rho_bot_storage_client'].find_nodes(storage)
def find_nodes_processor(results):
"""
Process the results and place the payload into the initial session value.
:param results:
:return: the initial session value.
"""
initial_session['payload'] = results.populate_payload()
return initial_session
# Finish populating the rest of initial_session values.
initial_session['next'] = None
initial_session['has_next'] = False
return promise.then(find_nodes_processor)
find_owner = FindOwner
| Update find owner to work with promises. | Update find owner to work with promises.
| Python | bsd-3-clause | rerobins/rho_erudite | ---
+++
@@ -4,6 +4,7 @@
from rhobot.components.commands.base_command import BaseCommand
from rdflib.namespace import FOAF
from rhobot.namespace import RHO
+from rhobot.components.storage import StoragePayload
import logging
logger = logging.getLogger(__name__)
@@ -25,19 +26,26 @@
:return:
"""
- storage = self.xmpp['rho_bot_storage_client'].create_payload()
+ storage = StoragePayload()
storage.add_type(FOAF.Person, RHO.Owner)
- results = self.xmpp['rho_bot_storage_client'].find_nodes(storage)
+ promise = self.xmpp['rho_bot_storage_client'].find_nodes(storage)
- initial_session['payload'] = results.populate_payload()
+ def find_nodes_processor(results):
+ """
+ Process the results and place the payload into the initial session value.
+ :param results:
+ :return: the initial session value.
+ """
+ initial_session['payload'] = results.populate_payload()
+
+ return initial_session
+
+ # Finish populating the rest of initial_session values.
initial_session['next'] = None
initial_session['has_next'] = False
- promise = self.xmpp['rho_bot_scheduler'].promise()
+ return promise.then(find_nodes_processor)
- promise.resolved(initial_session)
-
- return promise
find_owner = FindOwner |
144f14bc292e9621508ac755c70d679affddfb90 | corehq/apps/couch_sql_migration/management/commands/show_started_migrations.py | corehq/apps/couch_sql_migration/management/commands/show_started_migrations.py | from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
from operator import attrgetter
import six
from django.core.management.base import BaseCommand
from corehq.apps.domain_migration_flags.api import get_uncompleted_migrations
from ...progress import COUCH_TO_SQL_SLUG
class Command(BaseCommand):
"""Show domains for which the migration has been strated and not completed"""
def handle(self, **options):
migrations = get_uncompleted_migrations(COUCH_TO_SQL_SLUG)
for status, items in sorted(six.iteritems(migrations)):
print(status)
for item in sorted(items, key=attrgetter("domain")):
started = item.started_on
print(" {}{}".format(
item.domain,
started.strftime(" (%Y-%m-%d)") if started else "",
))
| from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
from operator import attrgetter
from django.core.management.base import BaseCommand
import six
from corehq.apps.domain_migration_flags.api import get_uncompleted_migrations
from ...progress import COUCH_TO_SQL_SLUG
from .migrate_multiple_domains_from_couch_to_sql import (
format_diff_stats,
get_diff_stats,
)
class Command(BaseCommand):
"""Show domains for which the migration has been strated and not completed"""
def handle(self, **options):
migrations = get_uncompleted_migrations(COUCH_TO_SQL_SLUG)
for status, items in sorted(six.iteritems(migrations)):
print(status)
print("=" * len(status))
print("")
for item in sorted(items, key=attrgetter("domain")):
started = item.started_on
print("{}{}".format(
item.domain,
started.strftime(" (%Y-%m-%d)") if started else "",
))
stats = get_diff_stats(item.domain)
print(format_diff_stats(stats))
print("")
| Print diff stats for each domain | Print diff stats for each domain
| Python | bsd-3-clause | dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq | ---
+++
@@ -4,12 +4,17 @@
from operator import attrgetter
+from django.core.management.base import BaseCommand
+
import six
-from django.core.management.base import BaseCommand
from corehq.apps.domain_migration_flags.api import get_uncompleted_migrations
from ...progress import COUCH_TO_SQL_SLUG
+from .migrate_multiple_domains_from_couch_to_sql import (
+ format_diff_stats,
+ get_diff_stats,
+)
class Command(BaseCommand):
@@ -19,9 +24,14 @@
migrations = get_uncompleted_migrations(COUCH_TO_SQL_SLUG)
for status, items in sorted(six.iteritems(migrations)):
print(status)
+ print("=" * len(status))
+ print("")
for item in sorted(items, key=attrgetter("domain")):
started = item.started_on
- print(" {}{}".format(
+ print("{}{}".format(
item.domain,
started.strftime(" (%Y-%m-%d)") if started else "",
))
+ stats = get_diff_stats(item.domain)
+ print(format_diff_stats(stats))
+ print("") |
61fa32fc65ea4dbc48f881efd70c82955d8bb15e | coda/coda_project/settings/test.py | coda/coda_project/settings/test.py | from .base import *
SITE_ID = 1
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql',
'NAME': 'coda_local',
'USER': os.getenv('DB_MYSQL_USER', default="root"),
'PASSWORD': os.getenv('DB_PASSWORD', default="root"),
'HOST': os.getenv('DB_HOST', default='db'),
}
}
| from .base import *
SITE_ID = 1
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql',
'NAME': 'coda_local',
'USER': os.getenv('DB_MYSQL_USER', default='root'),
'PASSWORD': os.getenv('DB_PASSWORD', default='root'),
'HOST': os.getenv('DB_HOST', default='db'),
}
}
| Change double quotes to single. | Change double quotes to single.
| Python | bsd-3-clause | unt-libraries/coda,unt-libraries/coda,unt-libraries/coda,unt-libraries/coda | ---
+++
@@ -6,8 +6,8 @@
'default': {
'ENGINE': 'django.db.backends.mysql',
'NAME': 'coda_local',
- 'USER': os.getenv('DB_MYSQL_USER', default="root"),
- 'PASSWORD': os.getenv('DB_PASSWORD', default="root"),
+ 'USER': os.getenv('DB_MYSQL_USER', default='root'),
+ 'PASSWORD': os.getenv('DB_PASSWORD', default='root'),
'HOST': os.getenv('DB_HOST', default='db'),
}
} |
0e8f8d7606380809c206e3e5db329040abe6f267 | knightos.py | knightos.py | import os
import requests
from sys import stderr, exit
from resources import get_resource_root
def get_key(platform):
if platform == "TI73": return 0x02
if platform == "TI83p" or platform == "TI83pSE": return 0x04
if platform == "TI84p" or platform == "TI84pSE": return 0x0A
if platform == "TI84pCSE": return 0x0F
def get_upgrade_ext(platform):
if platform == "TI73": return '73u'
if platform == "TI84pCSE": return '8cu'
return '8xu'
def get_privileged(platform):
if platform == "TI73": return 0x1C
if platform == "TI83p": return 0x1C
if platform == "TI83pSE": return 0x7C
if platform == "TI84p": return 0x3C
if platform == "TI84pSE": return 0x7C
if platform == "TI84pCSE": return 0xFC
def get_fat(platform):
if platform == "TI73": return 0x17
if platform == "TI83p": return 0x37
if platform == "TI83pSE": return 0x77
if platform == "TI84p": return 0x37
if platform == "TI84pSE": return 0x77
if platform == "TI84pCSE": return 0xF7
| import os
import requests
from sys import stderr, exit
from resources import get_resource_root
def get_key(platform):
if platform == "TI73": return 0x02
if platform == "TI83p" or platform == "TI83pSE": return 0x04
if platform == "TI84p" or platform == "TI84pSE": return 0x0A
if platform == "TI84pCSE": return 0x0F
def get_upgrade_ext(platform):
if platform == "TI73": return '73u'
if platform == "TI84pCSE": return '8cu'
return '8xu'
def get_privileged(platform):
if platform == "TI73": return 0x1C
if platform == "TI83p": return 0x1C
if platform == "TI83pSE": return 0x7C
if platform == "TI84p": return 0x3C
if platform == "TI84pSE": return 0x7C
if platform == "TI84pCSE": return 0xFC
def get_fat(platform):
if platform == "TI73": return 0x17
if platform == "TI83p": return 0x17
if platform == "TI83pSE": return 0x77
if platform == "TI84p": return 0x37
if platform == "TI84pSE": return 0x77
if platform == "TI84pCSE": return 0xF7
| Fix FAT constant for TI-83+ | Fix FAT constant for TI-83+
| Python | mit | KnightOS/sdk,KnightOS/sdk,KnightOS/sdk | ---
+++
@@ -24,7 +24,7 @@
def get_fat(platform):
if platform == "TI73": return 0x17
- if platform == "TI83p": return 0x37
+ if platform == "TI83p": return 0x17
if platform == "TI83pSE": return 0x77
if platform == "TI84p": return 0x37
if platform == "TI84pSE": return 0x77 |
5950f14ab025999b8161204595a7c35554fe46a0 | celery/decorators.py | celery/decorators.py | from celery.task.base import Task
from celery.registry import tasks
from inspect import getargspec
def task(**options):
"""Make a task out of any callable.
Examples:
>>> @task()
... def refresh_feed(url):
... return Feed.objects.get(url=url).refresh()
>>> refresh_feed("http://example.com/rss") # Regular
<Feed: http://example.com/rss>
>>> refresh_feed.delay("http://example.com/rss") # Async
<AsyncResult: 8998d0f4-da0b-4669-ba03-d5ab5ac6ad5d>
# With setting extra options and using retry.
>>> @task(exchange="feeds")
... def refresh_feed(url, **kwargs):
... try:
... return Feed.objects.get(url=url).refresh()
... except socket.error, exc:
... refresh_feed.retry(args=[url], kwargs=kwargs,
... exc=exc)
"""
def _create_task_cls(fun):
name = options.pop("name", None)
cls_name = fun.__name__
def run(self, *args, **kwargs):
return fun(*args, **kwargs)
run.__name__ = fun.__name__
run.argspec = getargspec(fun)
cls_dict = dict(options)
cls_dict["run"] = run
cls_dict["__module__"] = fun.__module__
task = type(cls_name, (Task, ), cls_dict)()
return task
return _create_task_cls
| from celery.task.base import Task
from inspect import getargspec
def task(**options):
"""Make a task out of any callable.
Examples:
>>> @task()
... def refresh_feed(url):
... return Feed.objects.get(url=url).refresh()
>>> refresh_feed("http://example.com/rss") # Regular
<Feed: http://example.com/rss>
>>> refresh_feed.delay("http://example.com/rss") # Async
<AsyncResult: 8998d0f4-da0b-4669-ba03-d5ab5ac6ad5d>
# With setting extra options and using retry.
>>> @task(exchange="feeds")
... def refresh_feed(url, **kwargs):
... try:
... return Feed.objects.get(url=url).refresh()
... except socket.error, exc:
... refresh_feed.retry(args=[url], kwargs=kwargs,
... exc=exc)
"""
def _create_task_cls(fun):
base = options.pop("base", Task)
cls_name = fun.__name__
def run(self, *args, **kwargs):
return fun(*args, **kwargs)
run.__name__ = fun.__name__
run.argspec = getargspec(fun)
cls_dict = dict(options)
cls_dict["run"] = run
cls_dict["__module__"] = fun.__module__
task = type(cls_name, (base, ), cls_dict)()
return task
return _create_task_cls
| Allow base=PeriodicTask argument to task decorator | Allow base=PeriodicTask argument to task decorator
| Python | bsd-3-clause | WoLpH/celery,cbrepo/celery,ask/celery,cbrepo/celery,frac/celery,mitsuhiko/celery,WoLpH/celery,ask/celery,frac/celery,mitsuhiko/celery | ---
+++
@@ -1,5 +1,4 @@
from celery.task.base import Task
-from celery.registry import tasks
from inspect import getargspec
@@ -32,7 +31,7 @@
"""
def _create_task_cls(fun):
- name = options.pop("name", None)
+ base = options.pop("base", Task)
cls_name = fun.__name__
@@ -45,7 +44,7 @@
cls_dict["run"] = run
cls_dict["__module__"] = fun.__module__
- task = type(cls_name, (Task, ), cls_dict)()
+ task = type(cls_name, (base, ), cls_dict)()
return task
|
e88899fe11f1216e25e6f42f4af2acf003b22071 | documentation/doxygen/makeimage.py | documentation/doxygen/makeimage.py | #! /usr/bin/env python
import ROOT
import shutil
import os
def makeimage(MacroName, ImageName, OutDir, cp, py, batch):
'''Generates the ImageName output of the macro MacroName'''
if batch:
ROOT.gROOT.SetBatch(1)
if py: execfile(MacroName)
else: ROOT.gInterpreter.ProcessLine(".x " + MacroName)
if cp:
MN = MacroName.split("(")[0]
MNBase = os.path.basename(MN)
shutil.copyfile("%s" %MN,"%s/macros/%s" %(OutDir,MNBase))
canvases = ROOT.gROOT.GetListOfCanvases()
for ImageNum,can in enumerate(canvases):
ImageNum += 1
can.SaveAs("%s/html/pict%d_%s" %(OutDir,ImageNum,ImageName))
f = open ("NumberOfImages.dat","w")
f.write("%d\n" %ImageNum)
f.close()
if __name__ == "__main__":
from sys import argv
makeimage(argv[1], argv[2], argv[3], bool(argv[4]), bool(argv[5]), bool(argv[6])) | #! /usr/bin/env python
import ROOT
import shutil
import os
def makeimage(MacroName, ImageName, OutDir, cp, py, batch):
'''Generates the ImageName output of the macro MacroName'''
ROOT.gStyle.SetImageScaling(3.)
if batch:
ROOT.gROOT.SetBatch(1)
if py: execfile(MacroName)
else: ROOT.gInterpreter.ProcessLine(".x " + MacroName)
if cp:
MN = MacroName.split("(")[0]
MNBase = os.path.basename(MN)
shutil.copyfile("%s" %MN,"%s/macros/%s" %(OutDir,MNBase))
s = open ("ImagesSizes.dat","w")
canvases = ROOT.gROOT.GetListOfCanvases()
for ImageNum,can in enumerate(canvases):
ImageNum += 1
can.SaveAs("%s/html/pict%d_%s" %(OutDir,ImageNum,ImageName))
cw = can.GetWindowWidth()
s.write("%d\n" %cw)
s.close()
f = open ("NumberOfImages.dat","w")
f.write("%d\n" %ImageNum)
f.close()
if __name__ == "__main__":
from sys import argv
makeimage(argv[1], argv[2], argv[3], bool(argv[4]), bool(argv[5]), bool(argv[6])) | Implement high def pictures for python tutorials. | Implement high def pictures for python tutorials.
| Python | lgpl-2.1 | olifre/root,olifre/root,olifre/root,root-mirror/root,karies/root,root-mirror/root,olifre/root,olifre/root,olifre/root,olifre/root,karies/root,root-mirror/root,karies/root,root-mirror/root,karies/root,karies/root,karies/root,olifre/root,root-mirror/root,karies/root,olifre/root,olifre/root,karies/root,root-mirror/root,olifre/root,root-mirror/root,karies/root,karies/root,root-mirror/root,karies/root,root-mirror/root,root-mirror/root,root-mirror/root | ---
+++
@@ -6,6 +6,8 @@
def makeimage(MacroName, ImageName, OutDir, cp, py, batch):
'''Generates the ImageName output of the macro MacroName'''
+
+ ROOT.gStyle.SetImageScaling(3.)
if batch:
ROOT.gROOT.SetBatch(1)
@@ -18,13 +20,20 @@
MNBase = os.path.basename(MN)
shutil.copyfile("%s" %MN,"%s/macros/%s" %(OutDir,MNBase))
+ s = open ("ImagesSizes.dat","w")
+
canvases = ROOT.gROOT.GetListOfCanvases()
for ImageNum,can in enumerate(canvases):
ImageNum += 1
can.SaveAs("%s/html/pict%d_%s" %(OutDir,ImageNum,ImageName))
- f = open ("NumberOfImages.dat","w")
- f.write("%d\n" %ImageNum)
- f.close()
+ cw = can.GetWindowWidth()
+ s.write("%d\n" %cw)
+
+ s.close()
+
+ f = open ("NumberOfImages.dat","w")
+ f.write("%d\n" %ImageNum)
+ f.close()
if __name__ == "__main__":
from sys import argv |
5b7db97d615b9e30a2780a45bd1c38690e6e2e51 | src/practica/practica_turtlebot_mariano/src/node.py | src/practica/practica_turtlebot_mariano/src/node.py | #!/usr/bin/env python
# TODO is it necessary here?
import roslib; roslib.load_manifest('practica_turtlebot')
import rospy
from driver import Driver
from driver import Point
if __name__ == '__main__':
try:
# Starts a unique node with name driver
rospy.init_node('driver')
# Get current position
start_position = Point(0,0,0)
end_position = Point(0,0,0)
# Create driver
driver = Driver(start_position, end_position)
# Tell him what to do
driver.go_forward()
# Hand control over to ROS
# This function will only exit when the user press Ctrl + C
# Does not do anything. Only handles here the program
rospy.spin()
except rospy.ROSInterruptException:
pass
| #!/usr/bin/env python
# TODO is it necessary here?
import roslib; roslib.load_manifest('practica_turtlebot')
import rospy
from driver import Driver
from driver import Point
if __name__ == '__main__':
try:
# Starts a unique node with name driver
rospy.init_node('driver')
# Get current position
start_position = Point(0,0,0)
end_position = Point(0,0,0)
# Create driver
driver = Driver(start_position, end_position)
# Tell him what to do
# driver.bug_0()
driver.stop_on_obstacle()
# Hand control over to ROS
# This function will only exit when the user press Ctrl + C
# Does not do anything. Only handles here the program
except rospy.ROSInterruptException:
pass
| Add impl. of stop on obstacle | Add impl. of stop on obstacle | Python | mit | Sharekhan/catkin_ws | ---
+++
@@ -8,26 +8,23 @@
from driver import Point
if __name__ == '__main__':
- try:
- # Starts a unique node with name driver
- rospy.init_node('driver')
+ try:
+ # Starts a unique node with name driver
+ rospy.init_node('driver')
- # Get current position
- start_position = Point(0,0,0)
- end_position = Point(0,0,0)
+ # Get current position
+ start_position = Point(0,0,0)
+ end_position = Point(0,0,0)
- # Create driver
- driver = Driver(start_position, end_position)
+ # Create driver
+ driver = Driver(start_position, end_position)
- # Tell him what to do
- driver.go_forward()
+ # Tell him what to do
+ # driver.bug_0()
+ driver.stop_on_obstacle()
-
- # Hand control over to ROS
- # This function will only exit when the user press Ctrl + C
- # Does not do anything. Only handles here the program
- rospy.spin()
-
-
- except rospy.ROSInterruptException:
- pass
+ # Hand control over to ROS
+ # This function will only exit when the user press Ctrl + C
+ # Does not do anything. Only handles here the program
+ except rospy.ROSInterruptException:
+ pass |
1cc15cbe37e1118f102f05d9530d6f0a6055d638 | handler/base_handler.py | handler/base_handler.py | import os
from serf_master import SerfHandler
from utils import with_payload, truncated_stdout
class BaseHandler(SerfHandler):
@truncated_stdout
@with_payload
def where(self, role=None):
my_role = os.environ.get('ROLE', 'no_role')
if my_role == role:
print(self.my_info())
def my_info(self):
return {
'ip': os.environ.get('ADVERTISE', None)
} | import os
from serf_master import SerfHandler
from utils import with_payload, truncated_stdout
class BaseHandler(SerfHandler):
def __init__(self, *args, **kwargs):
super(BaseHandler, self).__init__(*args, **kwargs)
self.setup()
def setup(self):
pass
@truncated_stdout
@with_payload
def where(self, role=None):
my_role = os.environ.get('ROLE', 'no_role')
if my_role == role:
print(self.my_info())
def my_info(self):
return {
'ip': os.environ.get('ADVERTISE', None)
} | Add setup method to base serf handler | Add setup method to base serf handler
| Python | mit | waltermoreira/serfnode,waltermoreira/serfnode,waltermoreira/serfnode | ---
+++
@@ -5,6 +5,13 @@
class BaseHandler(SerfHandler):
+
+ def __init__(self, *args, **kwargs):
+ super(BaseHandler, self).__init__(*args, **kwargs)
+ self.setup()
+
+ def setup(self):
+ pass
@truncated_stdout
@with_payload |
551d4dc7cb8839fdf8269284c81079b2b29f2ba5 | version.py | version.py | major = 0
minor=0
patch=20
branch="master"
timestamp=1376526219.94 | major = 0
minor=0
patch=21
branch="master"
timestamp=1376526439.16 | Tag commit for v0.0.21-master generated by gitmake.py | Tag commit for v0.0.21-master generated by gitmake.py
| Python | mit | ryansturmer/gitmake | ---
+++
@@ -1,5 +1,5 @@
major = 0
minor=0
-patch=20
+patch=21
branch="master"
-timestamp=1376526219.94
+timestamp=1376526439.16 |
fa73ac1d9451cbef8be65cfcd2f03762831f4212 | website_snippet_data_slider/__openerp__.py | website_snippet_data_slider/__openerp__.py | # -*- coding: utf-8 -*-
# © 2016-TODAY LasLabs Inc.
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
{
"name": "Website Snippet - Data Slider",
"summary": "Abstract data slider for use on website. Primary use is product slider.",
"version": "9.0.1.0.0",
"category": "Website",
"website": "https://laslabs.com/",
"author": "LasLabs",
"license": "AGPL-3",
"application": False,
"installable": True,
"depends": [
"website",
],
"data": [
"views/assets.xml",
'views/snippet_template.xml',
],
}
| # -*- coding: utf-8 -*-
# © 2016-TODAY LasLabs Inc.
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
{
"name": "Website Snippet - Data Slider",
"summary":
"Abstract data slider for use on website."
" Primary use (and default implementation) is product slider.",
"version": "9.0.1.0.0",
"category": "Website",
"website": "https://laslabs.com/",
"author": "LasLabs",
"license": "AGPL-3",
"application": False,
"installable": True,
"depends": [
"website",
],
"data": [
"views/assets.xml",
'views/snippet_template.xml',
],
}
| Update summary to fix flake in website_snippet_data_slider | Update summary to fix flake in website_snippet_data_slider
| Python | agpl-3.0 | laslabs/odoo-website,laslabs/odoo-website,laslabs/odoo-website | ---
+++
@@ -4,7 +4,9 @@
{
"name": "Website Snippet - Data Slider",
- "summary": "Abstract data slider for use on website. Primary use is product slider.",
+ "summary":
+ "Abstract data slider for use on website."
+ " Primary use (and default implementation) is product slider.",
"version": "9.0.1.0.0",
"category": "Website",
"website": "https://laslabs.com/", |
813b81293cd2bd69982aef36ad09fc52f7bea1f6 | relaygram/http_server.py | relaygram/http_server.py | import http.server
from threading import Thread
import os.path
class HTTPHandler:
def __init__(self, config):
self.config = config
handler = HTTPHandler.make_http_handler('C:/tmp/test/')
self.httpd = http.server.HTTPServer(('', 8000), handler)
self.thread = Thread(target=self.main_loop)
def run(self):
self.thread.start()
return self
def main_loop(self):
self.httpd.serve_forever()
@staticmethod
def make_http_handler(root_path):
class RelayGramHTTPHandler(http.server.BaseHTTPRequestHandler):
def __init__(self, *args, **kwargs):
super(RelayGramHTTPHandler, self).__init__(*args, **kwargs)
def do_GET(self):
file_path = os.path.abspath(root_path + self.path)
if os.path.commonpath([root_path, file_path]) != os.path.abspath(root_path): # Detect path traversal attempt
self.send_error(501, "Nice try")
else:
if not os.path.exists(file_path) or not os.path.isfile(file_path):
self.send_error(404, 'File Not Found')
else:
self.send_response(200)
self.wfile.write(open(file_path, mode='rb').read())
return RelayGramHTTPHandler
| import http.server
from threading import Thread
import os.path
class HTTPHandler:
def __init__(self, config):
self.config = config
handler = HTTPHandler.make_http_handler(self.config['media_dir'])
self.httpd = http.server.HTTPServer(('', self.config['media']['port']), handler)
self.thread = Thread(target=self.main_loop)
def run(self):
self.thread.start()
return self
def main_loop(self):
self.httpd.serve_forever()
@staticmethod
def make_http_handler(root_path):
class RelayGramHTTPHandler(http.server.BaseHTTPRequestHandler):
def __init__(self, *args, **kwargs):
super(RelayGramHTTPHandler, self).__init__(*args, **kwargs)
def do_GET(self):
file_path = os.path.abspath(root_path + self.path)
if os.path.commonpath([root_path, file_path]) != os.path.abspath(root_path): # Detect path traversal attempt
self.send_error(501, "Nice try")
else:
if not os.path.exists(file_path) or not os.path.isfile(file_path):
self.send_error(404, 'File Not Found')
else:
self.send_response(200)
self.wfile.write(open(file_path, mode='rb').read())
return RelayGramHTTPHandler
| Use proper settings for httpd server. | Use proper settings for httpd server.
| Python | mit | Surye/relaygram | ---
+++
@@ -7,8 +7,8 @@
def __init__(self, config):
self.config = config
- handler = HTTPHandler.make_http_handler('C:/tmp/test/')
- self.httpd = http.server.HTTPServer(('', 8000), handler)
+ handler = HTTPHandler.make_http_handler(self.config['media_dir'])
+ self.httpd = http.server.HTTPServer(('', self.config['media']['port']), handler)
self.thread = Thread(target=self.main_loop)
|
103f232f6b4c12e1d1c643c48c5055d66d7a126d | workshopvenues/venues/tests/test_models.py | workshopvenues/venues/tests/test_models.py | """
This file demonstrates writing tests using the unittest module. These will pass
when you run "./manage.py test --settings=workshopvenues.settings_test venues"
Replace this with more appropriate tests for your application.
"""
from django.test import TestCase
from .factories import FacilityFactory, CountryFactory, CityFactory, VenueFactory, ImageFactory
class ModelsTest(TestCase):
def test_create_facility(self):
fac_wifi = FacilityFactory.create(name = 'WiFi')
self.assertTrue(fac_wifi.id >= 0)
fac_elevator = FacilityFactory.create(name = 'Elevator')
self.assertTrue(fac_elevator.id >= 0)
def test_create_country(self):
country = CountryFactory.create(name = 'United Kingdom')
self.assertTrue(country.id >= 0)
def test_create_city(self):
# Create the City
city = CityFactory.create(name = 'London')
self.assertTrue(city.id >= 0)
def test_create_venue(self):
venue = VenueFactory.create(facilities = (FacilityFactory(name = 'WiFI'),
FacilityFactory.create(name = 'Elevator')))
self.assertTrue(venue.id >= 0)
def test_create_image(self):
image = ImageFactory.create()
self.assertTrue(image.id >= 0)
| """
This file demonstrates writing tests using the unittest module. These will pass
when you run "./manage.py test --settings=workshopvenues.settings_test venues"
Replace this with more appropriate tests for your application.
"""
from django.test import TestCase
from .factories import FacilityFactory, CountryFactory, CityFactory, VenueFactory, ImageFactory
class ModelsTest(TestCase):
def test_create_facility(self):
fac_wifi = FacilityFactory.create(name = 'WiFi')
self.assertTrue(fac_wifi.id >= 0)
fac_elevator = FacilityFactory.create(name = 'Elevator')
self.assertTrue(fac_elevator.id >= 0)
def test_create_country(self):
country = CountryFactory.create(name = 'United Kingdom')
self.assertTrue(country.id >= 0)
def test_create_city(self):
# Create the City
city = CityFactory.create(name = 'London')
self.assertTrue(city.id >= 0)
def test_create_venue(self):
venue = VenueFactory.create(facilities = (FacilityFactory(name = 'WiFI'),
FacilityFactory.create(name = 'Elevator')))
self.assertTrue(venue.id >= 0)
def test_build_venue(self):
venue = VenueFactory.build(facilities = (FacilityFactory(name = 'WiFI'),
FacilityFactory.build(name = 'Elevator')))
self.assertEqual(venue.id, None, "Venue id is None when is not saved.")
def test_create_image(self):
image = ImageFactory.create()
self.assertTrue(image.id >= 0)
| Add an additional test for Venue model to cover all the factory cases. | Add an additional test for Venue model to cover all the factory cases.
| Python | bsd-3-clause | andreagrandi/workshopvenues | ---
+++
@@ -29,6 +29,11 @@
venue = VenueFactory.create(facilities = (FacilityFactory(name = 'WiFI'),
FacilityFactory.create(name = 'Elevator')))
self.assertTrue(venue.id >= 0)
+
+ def test_build_venue(self):
+ venue = VenueFactory.build(facilities = (FacilityFactory(name = 'WiFI'),
+ FacilityFactory.build(name = 'Elevator')))
+ self.assertEqual(venue.id, None, "Venue id is None when is not saved.")
def test_create_image(self):
image = ImageFactory.create() |
b977de3af3ae93a57f36e1d6eea234f01cbc7a61 | py/selenium/__init__.py | py/selenium/__init__.py | # Licensed to the Software Freedom Conservancy (SFC) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The SFC licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from selenium import selenium
__version__ = "2.53.0"
| # Licensed to the Software Freedom Conservancy (SFC) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The SFC licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
__version__ = "2.53.0"
| Remove import of Selenium RC | Remove import of Selenium RC
| Python | apache-2.0 | bayandin/selenium,carlosroh/selenium,asolntsev/selenium,Herst/selenium,alb-i986/selenium,joshuaduffy/selenium,oddui/selenium,TikhomirovSergey/selenium,sankha93/selenium,mojwang/selenium,lmtierney/selenium,carlosroh/selenium,jsakamoto/selenium,mach6/selenium,Herst/selenium,krmahadevan/selenium,DrMarcII/selenium,tbeadle/selenium,valfirst/selenium,joshuaduffy/selenium,Ardesco/selenium,Tom-Trumper/selenium,mojwang/selenium,jsakamoto/selenium,markodolancic/selenium,titusfortner/selenium,5hawnknight/selenium,davehunt/selenium,davehunt/selenium,carlosroh/selenium,GorK-ChO/selenium,asolntsev/selenium,krmahadevan/selenium,carlosroh/selenium,oddui/selenium,titusfortner/selenium,mojwang/selenium,jabbrwcky/selenium,kalyanjvn1/selenium,sag-enorman/selenium,markodolancic/selenium,5hawnknight/selenium,twalpole/selenium,Dude-X/selenium,davehunt/selenium,dibagga/selenium,GorK-ChO/selenium,kalyanjvn1/selenium,DrMarcII/selenium,asolntsev/selenium,GorK-ChO/selenium,HtmlUnit/selenium,Dude-X/selenium,Ardesco/selenium,mojwang/selenium,gurayinan/selenium,valfirst/selenium,jsakamoto/selenium,chrisblock/selenium,tbeadle/selenium,TikhomirovSergey/selenium,5hawnknight/selenium,Jarob22/selenium,xmhubj/selenium,Tom-Trumper/selenium,joshuaduffy/selenium,Herst/selenium,xsyntrex/selenium,titusfortner/selenium,valfirst/selenium,asolntsev/selenium,oddui/selenium,Jarob22/selenium,jabbrwcky/selenium,kalyanjvn1/selenium,chrisblock/selenium,jsakamoto/selenium,lmtierney/selenium,xmhubj/selenium,kalyanjvn1/selenium,DrMarcII/selenium,Ardesco/selenium,lmtierney/selenium,bayandin/selenium,GorK-ChO/selenium,lmtierney/selenium,GorK-ChO/selenium,jsakamoto/selenium,jabbrwcky/selenium,xsyntrex/selenium,joshmgrant/selenium,asashour/selenium,xsyntrex/selenium,twalpole/selenium,joshbruning/selenium,markodolancic/selenium,uchida/selenium,dibagga/selenium,alb-i986/selenium,SeleniumHQ/selenium,alb-i986/selenium,sankha93/selenium,lmtierney/selenium,asashour/selenium,TikhomirovSergey/selenium,Dude-X/selenium,gurayinan/selenium,jabbrwcky/selenium,oddui/selenium,oddui/selenium,SeleniumHQ/selenium,sankha93/selenium,bayandin/selenium,mach6/selenium,TikhomirovSergey/selenium,valfirst/selenium,SeleniumHQ/selenium,jabbrwcky/selenium,mach6/selenium,chrisblock/selenium,davehunt/selenium,twalpole/selenium,mojwang/selenium,lmtierney/selenium,juangj/selenium,SeleniumHQ/selenium,gurayinan/selenium,HtmlUnit/selenium,juangj/selenium,oddui/selenium,chrisblock/selenium,sankha93/selenium,krmahadevan/selenium,joshbruning/selenium,dibagga/selenium,jabbrwcky/selenium,joshuaduffy/selenium,alb-i986/selenium,alb-i986/selenium,joshuaduffy/selenium,GorK-ChO/selenium,alb-i986/selenium,tbeadle/selenium,chrisblock/selenium,joshmgrant/selenium,xsyntrex/selenium,twalpole/selenium,twalpole/selenium,bayandin/selenium,uchida/selenium,5hawnknight/selenium,twalpole/selenium,Tom-Trumper/selenium,Ardesco/selenium,Herst/selenium,oddui/selenium,juangj/selenium,TikhomirovSergey/selenium,titusfortner/selenium,kalyanjvn1/selenium,gurayinan/selenium,Tom-Trumper/selenium,GorK-ChO/selenium,Tom-Trumper/selenium,SeleniumHQ/selenium,bayandin/selenium,davehunt/selenium,xmhubj/selenium,TikhomirovSergey/selenium,jsakamoto/selenium,chrisblock/selenium,HtmlUnit/selenium,markodolancic/selenium,sag-enorman/selenium,Tom-Trumper/selenium,juangj/selenium,bayandin/selenium,joshbruning/selenium,TikhomirovSergey/selenium,DrMarcII/selenium,joshbruning/selenium,tbeadle/selenium,mojwang/selenium,xmhubj/selenium,Dude-X/selenium,joshmgrant/selenium,Jarob22/selenium,sankha93/selenium,jabbrwcky/selenium,mojwang/selenium,HtmlUnit/selenium,juangj/selenium,mojwang/selenium,Dude-X/selenium,jsakamoto/selenium,sag-enorman/selenium,joshmgrant/selenium,Jarob22/selenium,carlosroh/selenium,DrMarcII/selenium,GorK-ChO/selenium,dibagga/selenium,uchida/selenium,joshuaduffy/selenium,SeleniumHQ/selenium,chrisblock/selenium,joshuaduffy/selenium,sag-enorman/selenium,krmahadevan/selenium,carlosroh/selenium,Herst/selenium,xsyntrex/selenium,xmhubj/selenium,titusfortner/selenium,kalyanjvn1/selenium,valfirst/selenium,sankha93/selenium,tbeadle/selenium,asashour/selenium,mach6/selenium,twalpole/selenium,bayandin/selenium,HtmlUnit/selenium,joshmgrant/selenium,sag-enorman/selenium,Ardesco/selenium,Ardesco/selenium,sag-enorman/selenium,oddui/selenium,Tom-Trumper/selenium,dibagga/selenium,markodolancic/selenium,asashour/selenium,xmhubj/selenium,alb-i986/selenium,Jarob22/selenium,titusfortner/selenium,jsakamoto/selenium,valfirst/selenium,joshmgrant/selenium,HtmlUnit/selenium,5hawnknight/selenium,davehunt/selenium,tbeadle/selenium,asolntsev/selenium,kalyanjvn1/selenium,HtmlUnit/selenium,uchida/selenium,gurayinan/selenium,titusfortner/selenium,5hawnknight/selenium,joshmgrant/selenium,joshmgrant/selenium,carlosroh/selenium,krmahadevan/selenium,jabbrwcky/selenium,chrisblock/selenium,lmtierney/selenium,alb-i986/selenium,lmtierney/selenium,jsakamoto/selenium,carlosroh/selenium,SeleniumHQ/selenium,joshmgrant/selenium,Herst/selenium,5hawnknight/selenium,twalpole/selenium,Tom-Trumper/selenium,kalyanjvn1/selenium,sankha93/selenium,uchida/selenium,Ardesco/selenium,HtmlUnit/selenium,HtmlUnit/selenium,GorK-ChO/selenium,asashour/selenium,asashour/selenium,xsyntrex/selenium,sag-enorman/selenium,Jarob22/selenium,Herst/selenium,xsyntrex/selenium,mach6/selenium,mojwang/selenium,kalyanjvn1/selenium,joshbruning/selenium,Herst/selenium,tbeadle/selenium,bayandin/selenium,asolntsev/selenium,SeleniumHQ/selenium,valfirst/selenium,5hawnknight/selenium,valfirst/selenium,markodolancic/selenium,joshbruning/selenium,davehunt/selenium,DrMarcII/selenium,davehunt/selenium,sag-enorman/selenium,Dude-X/selenium,lmtierney/selenium,bayandin/selenium,joshmgrant/selenium,SeleniumHQ/selenium,joshuaduffy/selenium,Jarob22/selenium,Tom-Trumper/selenium,asashour/selenium,titusfortner/selenium,davehunt/selenium,Jarob22/selenium,xsyntrex/selenium,uchida/selenium,asolntsev/selenium,Dude-X/selenium,valfirst/selenium,juangj/selenium,mach6/selenium,joshbruning/selenium,chrisblock/selenium,joshuaduffy/selenium,titusfortner/selenium,titusfortner/selenium,gurayinan/selenium,sankha93/selenium,sankha93/selenium,markodolancic/selenium,twalpole/selenium,asashour/selenium,juangj/selenium,5hawnknight/selenium,Ardesco/selenium,uchida/selenium,Jarob22/selenium,xmhubj/selenium,dibagga/selenium,tbeadle/selenium,krmahadevan/selenium,SeleniumHQ/selenium,krmahadevan/selenium,titusfortner/selenium,DrMarcII/selenium,asolntsev/selenium,dibagga/selenium,uchida/selenium,SeleniumHQ/selenium,valfirst/selenium,joshbruning/selenium,gurayinan/selenium,juangj/selenium,sag-enorman/selenium,jabbrwcky/selenium,DrMarcII/selenium,Dude-X/selenium,HtmlUnit/selenium,xmhubj/selenium,dibagga/selenium,asashour/selenium,mach6/selenium,xmhubj/selenium,gurayinan/selenium,Herst/selenium,xsyntrex/selenium,TikhomirovSergey/selenium,joshmgrant/selenium,TikhomirovSergey/selenium,juangj/selenium,Ardesco/selenium,joshbruning/selenium,valfirst/selenium,mach6/selenium,krmahadevan/selenium,markodolancic/selenium,alb-i986/selenium,uchida/selenium,DrMarcII/selenium,Dude-X/selenium,oddui/selenium,krmahadevan/selenium,markodolancic/selenium,asolntsev/selenium,dibagga/selenium,tbeadle/selenium,mach6/selenium,carlosroh/selenium,gurayinan/selenium | ---
+++
@@ -15,7 +15,5 @@
# specific language governing permissions and limitations
# under the License.
-from selenium import selenium
-
__version__ = "2.53.0" |
e0b2ce4b0287e8321cddde6c658a833dcf147974 | features.py | features.py | import numpy as np
def mean_energy(x_blocks):
return np.sqrt(np.mean(x_blocks**2, axis=1))
if __name__ == '__main__':
import matplotlib.pyplot as plt
from files import load_wav
from analysis import split_to_blocks
def analyze_mean_energy(file, block_size=1024):
x, fs = load_wav(file)
blocks, t = split_to_blocks(x, block_size)
y = mean_energy(blocks)
plt.semilogy(t, y)
plt.ylim(0, 1)
| import numpy as np
from numpy.linalg import norm
def mean_power(x_blocks):
return np.sqrt(np.mean(x_blocks**2, axis=-1))
def power(x_blocks):
return np.sqrt(np.sum(x_blocks**2, axis=-1))
def mean_energy(x_blocks):
return np.mean(x_blocks**2, axis=-1)
def energy(x_blocks):
return np.sum(x_blocks**2, axis=-1)
if __name__ == '__main__':
import matplotlib.pyplot as plt
from files import load_wav
from analysis import split_to_blocks
def analyze_mean_energy(file, block_size=1024):
x, fs = load_wav(file)
blocks, t = split_to_blocks(x, block_size)
y = mean_energy(blocks)
plt.semilogy(t, y)
plt.ylim(0, 1)
| Add computation on energy and power (mean and total). | Add computation on energy and power (mean and total).
| Python | mit | bzamecnik/tfr,bzamecnik/tfr | ---
+++
@@ -1,7 +1,17 @@
import numpy as np
+from numpy.linalg import norm
+
+def mean_power(x_blocks):
+ return np.sqrt(np.mean(x_blocks**2, axis=-1))
+
+def power(x_blocks):
+ return np.sqrt(np.sum(x_blocks**2, axis=-1))
def mean_energy(x_blocks):
- return np.sqrt(np.mean(x_blocks**2, axis=1))
+ return np.mean(x_blocks**2, axis=-1)
+
+def energy(x_blocks):
+ return np.sum(x_blocks**2, axis=-1)
if __name__ == '__main__':
@@ -12,6 +22,6 @@
def analyze_mean_energy(file, block_size=1024):
x, fs = load_wav(file)
blocks, t = split_to_blocks(x, block_size)
- y = mean_energy(blocks)
+ y = mean_energy(blocks)
plt.semilogy(t, y)
plt.ylim(0, 1) |
74a84d76492088f8d9df8d2712041381e57c29bb | dbaas/dbaas/settings_test.py | dbaas/dbaas/settings_test.py | from settings import * # noqa
import os
TEST_DISCOVER_ROOT = os.path.abspath(os.path.join(__file__, '../..'))
# Comment this line for turn on debug on tests
LOGGING = {}
DEBUG = 0
TEST_RUNNER = 'django_nose.NoseTestSuiteRunner'
NOSE_ARGS = [
'--verbosity=0',
'--no-byte-compile',
'--debug-log=error_test.log',
# '-l',
'-s', # comment this line to use pdb
# '-x',
'--nologcapture',
# '--collect-only'
]
if CI: # noqa
NOSE_ARGS += [
'--with-coverage',
'--cover-package=application',
'--with-xunit',
'--xunit-file=test-report.xml',
'--cover-xml',
'--cover-xml-file=coverage.xml'
]
| from settings import * # noqa
import os
TEST_DISCOVER_ROOT = os.path.abspath(os.path.join(__file__, '../..'))
# Comment this line for turn on debug on tests
LOGGING = {}
DEBUG = 0
TEST_RUNNER = 'django_nose.NoseTestSuiteRunner'
NOSE_ARGS = [
'--verbosity=2',
'--no-byte-compile',
'--debug-log=error_test.log',
# '-l',
'-s', # comment this line to use pdb
# '-x',
'--nologcapture',
# '--collect-only'
]
if CI: # noqa
NOSE_ARGS += [
'--with-coverage',
'--cover-package=application',
'--with-xunit',
'--xunit-file=test-report.xml',
'--cover-xml',
'--cover-xml-file=coverage.xml'
]
| Change verbosity of testes. To fix travis | Change verbosity of testes. To fix travis
| Python | bsd-3-clause | globocom/database-as-a-service,globocom/database-as-a-service,globocom/database-as-a-service,globocom/database-as-a-service | ---
+++
@@ -9,7 +9,7 @@
DEBUG = 0
TEST_RUNNER = 'django_nose.NoseTestSuiteRunner'
NOSE_ARGS = [
- '--verbosity=0',
+ '--verbosity=2',
'--no-byte-compile',
'--debug-log=error_test.log',
# '-l', |
fc6aae454464aa31f1be401148645310ea9ee2b9 | cloud4rpi/errors.py | cloud4rpi/errors.py | # -*- coding: utf-8 -*-
import subprocess
TYPE_WARN_MSG = 'WARNING! A string "%s" passed to a numeric variable. ' \
'Change the variable type or the passed value.' \
class InvalidTokenError(Exception):
pass
class InvalidConfigError(TypeError):
pass
class UnexpectedVariableTypeError(TypeError):
pass
class UnexpectedVariableValueTypeError(TypeError):
pass
class MqttConnectionError(Exception):
def __init__(self, code):
super(MqttConnectionError, self).__init__()
self.code = code
class NotSupportedError(Exception):
pass
__messages = {
KeyboardInterrupt: 'Interrupted',
subprocess.CalledProcessError: 'Try run with sudo',
InvalidTokenError:
'Device token {0} is invalid. Please verify it.',
InvalidConfigError:
'Configuration is invalid. It must be an array.',
UnexpectedVariableTypeError:
('Unexpected type for the "{0}" variable. '
'It must be "bool", "numeric", "string" or "location".'),
UnexpectedVariableValueTypeError:
'Unexpected value type for variable: {0}'
}
def get_error_message(e):
return __messages.get(type(e), 'Unexpected error: {0}').format(e.message)
| # -*- coding: utf-8 -*-
import subprocess
TYPE_WARN_MSG = 'WARNING! A string "%s" passed to a numeric variable. ' \
'Change the variable type or the passed value.' \
class InvalidTokenError(Exception):
pass
class InvalidConfigError(TypeError):
pass
class UnexpectedVariableTypeError(TypeError):
pass
class UnexpectedVariableValueTypeError(TypeError):
pass
class MqttConnectionError(Exception):
def __init__(self, code):
super(MqttConnectionError, self).__init__()
self.code = code
class NotSupportedError(Exception):
pass
__messages = {
KeyboardInterrupt: 'Interrupted',
subprocess.CalledProcessError: 'Try run with sudo',
InvalidTokenError:
'Device token {0} is invalid. Please verify it.',
InvalidConfigError:
'Configuration is invalid. It must be an array.',
UnexpectedVariableTypeError:
('Unexpected type for the "{0}" variable. '
'It must be "bool", "numeric", "string" or "location".'),
UnexpectedVariableValueTypeError:
'Unexpected value type for variable: {0}'
}
def get_error_message(e):
return __messages.get(type(e), 'Unexpected error: {0}').format(str(type(e)) + str(e.args))
| Fix receiving an error message for python2 & 3 | Fix receiving an error message for python2 & 3
| Python | mit | cloud4rpi/cloud4rpi | ---
+++
@@ -49,4 +49,4 @@
def get_error_message(e):
- return __messages.get(type(e), 'Unexpected error: {0}').format(e.message)
+ return __messages.get(type(e), 'Unexpected error: {0}').format(str(type(e)) + str(e.args)) |
c17fed815cd062b37ebe5e6118da43afcf89db1f | relay_api/core/relay.py | relay_api/core/relay.py | import RPi.GPIO as GPIO
class relay():
def __init__(self, gpio_num, NC=False):
self.gpio_num = gpio_num
GPIO.setmode(GPIO.BCM)
try:
GPIO.input(self.gpio_num)
raise LookupError("Relay is already in use!")
except RuntimeError:
GPIO.setup(self.gpio_num, GPIO.OUT)
except ValueError:
raise LookupError("Relay number invalid!")
if NC:
self.on()
else:
self.off()
def on(self):
GPIO.output(self.gpio_num, GPIO.HIGH)
def off(self):
GPIO.output(self.gpio_num, GPIO.LOW)
def get_state(self):
return GPIO.input(self.gpio_num)
def cleanup(self):
GPIO.cleanup(self.gpio_num)
| import RPi.GPIO as GPIO
class relay():
def __init__(self, gpio_num, NC=False):
self.gpio = gpio_num
self.nc = NC
GPIO.setmode(GPIO.BCM)
try:
GPIO.input(self.gpio)
raise LookupError("Relay is already in use!")
except RuntimeError:
GPIO.setup(self.gpio, GPIO.OUT)
except ValueError:
raise LookupError("Relay number invalid!")
if self.nc:
self.on()
else:
self.off()
def on(self):
GPIO.output(self.gpio, GPIO.HIGH)
self.state = True
def off(self):
GPIO.output(self.gpio, GPIO.LOW)
self.state = False
def get_state(self):
return self.state
def cleanup(self):
GPIO.cleanup(self.gpio)
| Add nc and state as attributes. Change name of gpio_num to gpio | Add nc and state as attributes. Change name of gpio_num to gpio
| Python | mit | pahumadad/raspi-relay-api | ---
+++
@@ -3,28 +3,31 @@
class relay():
def __init__(self, gpio_num, NC=False):
- self.gpio_num = gpio_num
+ self.gpio = gpio_num
+ self.nc = NC
GPIO.setmode(GPIO.BCM)
try:
- GPIO.input(self.gpio_num)
+ GPIO.input(self.gpio)
raise LookupError("Relay is already in use!")
except RuntimeError:
- GPIO.setup(self.gpio_num, GPIO.OUT)
+ GPIO.setup(self.gpio, GPIO.OUT)
except ValueError:
raise LookupError("Relay number invalid!")
- if NC:
+ if self.nc:
self.on()
else:
self.off()
def on(self):
- GPIO.output(self.gpio_num, GPIO.HIGH)
+ GPIO.output(self.gpio, GPIO.HIGH)
+ self.state = True
def off(self):
- GPIO.output(self.gpio_num, GPIO.LOW)
+ GPIO.output(self.gpio, GPIO.LOW)
+ self.state = False
def get_state(self):
- return GPIO.input(self.gpio_num)
+ return self.state
def cleanup(self):
- GPIO.cleanup(self.gpio_num)
+ GPIO.cleanup(self.gpio) |
abff14b5804bf43bc2bffeac6418259580bdbae5 | makecard.py | makecard.py | #!/usr/bin/env python
import svgwrite
def main():
print 'test'
if __name__ == '__main__':
main()
| #!/usr/bin/env python
import sys
import svgwrite
def main():
drawing = svgwrite.Drawing(size=('1000', '1400'))
img = svgwrite.image.Image('bullets/NYCS-bull-trans-1.svg',insert=(100, 100), size=(100,100))
drawing.add(img)
sys.stdout.write(drawing.tostring())
if __name__ == '__main__':
main()
| Include the first bullet svg | Include the first bullet svg
| Python | apache-2.0 | nanaze/xmascard | ---
+++
@@ -1,9 +1,17 @@
#!/usr/bin/env python
+import sys
import svgwrite
def main():
- print 'test'
+ drawing = svgwrite.Drawing(size=('1000', '1400'))
+
+ img = svgwrite.image.Image('bullets/NYCS-bull-trans-1.svg',insert=(100, 100), size=(100,100))
+
+ drawing.add(img)
+
+ sys.stdout.write(drawing.tostring())
+
if __name__ == '__main__':
main() |
d2699a9848652a17967ee5243055e811a7d0909b | lib/node_modules/@stdlib/math/base/special/logit/test/fixtures/python/runner.py | lib/node_modules/@stdlib/math/base/special/logit/test/fixtures/python/runner.py | #!/usr/bin/env python
"""Generate fixtures."""
import os
import json
import numpy as np
from scipy import special
# Get the file path:
FILE = os.path.realpath(__file__)
# Extract the directory in which this file resides:
DIR = os.path.dirname(FILE)
def gen(x, name):
"""Generates fixture data and writes them to file.
# Arguments
* `x`: domain
* `name::str`: output filename
# Examples
``` python
python> x = linspace(-1000, 1000, 2001);
python> gen(x, \"./data.json\");
```
"""
y = special.logit(x)
# Store data to be written to file as a dictionary:
data = {
"x": x.tolist(),
"expected": y.tolist()
}
# Based on the script directory, create an output filepath:
filepath = os.path.join(DIR, name)
with open(filepath, 'w') as outfile:
json.dump(data, outfile)
def main():
"""Generate fixture data."""
x = np.linspace(0.0001, 0.25, 500)
gen(x, "small.json")
x = np.linspace(0.25, 0.75, 500)
gen(x, "medium.json")
x = np.linspace(0.75, 0.9999, 500)
gen(x, "large.json")
if __name__ == "__main__":
main()
| #!/usr/bin/env python
"""Generate fixtures."""
import os
import json
import numpy as np
from scipy import special
# Get the file path:
FILE = os.path.realpath(__file__)
# Extract the directory in which this file resides:
DIR = os.path.dirname(FILE)
def gen(x, name):
"""Generates fixture data and writes them to file.
# Arguments
* `x`: domain
* `name::str`: output filename
# Examples
``` python
python> x = linspace(0.0, 1.0, 2001);
python> gen(x, \"./data.json\");
```
"""
y = special.logit(x)
# Store data to be written to file as a dictionary:
data = {
"x": x.tolist(),
"expected": y.tolist()
}
# Based on the script directory, create an output filepath:
filepath = os.path.join(DIR, name)
with open(filepath, 'w') as outfile:
json.dump(data, outfile)
def main():
"""Generate fixture data."""
x = np.linspace(0.0001, 0.25, 500)
gen(x, "small.json")
x = np.linspace(0.25, 0.75, 500)
gen(x, "medium.json")
x = np.linspace(0.75, 0.9999, 500)
gen(x, "large.json")
if __name__ == "__main__":
main()
| Fix range of values in example code | Fix range of values in example code
| Python | apache-2.0 | stdlib-js/stdlib,stdlib-js/stdlib,stdlib-js/stdlib,stdlib-js/stdlib,stdlib-js/stdlib,stdlib-js/stdlib,stdlib-js/stdlib,stdlib-js/stdlib | ---
+++
@@ -25,7 +25,7 @@
# Examples
``` python
- python> x = linspace(-1000, 1000, 2001);
+ python> x = linspace(0.0, 1.0, 2001);
python> gen(x, \"./data.json\");
```
""" |
b29e607d56ab07d07f4e33e2229a728cf0be1585 | usability/python-markdown/pymdpreprocessor.py | usability/python-markdown/pymdpreprocessor.py | """This preprocessor replaces Python code in markdowncell with the result
stored in cell metadata
"""
#-----------------------------------------------------------------------------
# Copyright (c) 2014, Juergen Hasch
#
# Distributed under the terms of the Modified BSD License.
#
#-----------------------------------------------------------------------------
from IPython.nbconvert.preprocessors import *
import re
class PyMarkdownPreprocessor(Preprocessor):
def replace_variables(self,source,variables):
"""
Replace {{variablename}} with stored value
"""
try:
replaced = re.sub("{{(.*?)}}", lambda m: variables[m.group(1)] , source)
except TypeError:
replaced = source
return replaced
def preprocess_cell(self, cell, resources, index):
"""
Preprocess cell
Parameters
----------
cell : NotebookNode cell
Notebook cell being processed
resources : dictionary
Additional resources used in the conversion process. Allows
preprocessors to pass variables into the Jinja engine.
cell_index : int
Index of the cell being processed (see base.py)
"""
if cell.cell_type == "markdown":
if hasattr(cell['metadata'], 'variables'):
variables = cell['metadata']['variables']
if len(variables) > 0:
cell.source = self.replace_variables(cell.source, variables)
return cell, resources
| # -*- coding: utf-8 -*-
"""This preprocessor replaces Python code in markdowncell with the result
stored in cell metadata
"""
from nbconvert.preprocessors import *
import re
def get_variable( match, variables):
try:
x = variables[match]
return x
except KeyError:
return ""
class PyMarkdownPreprocessor(Preprocessor):
def replace_variables(self,source,variables):
"""
Replace {{variablename}} with stored value
"""
try:
replaced = re.sub("{{(.*?)}}", lambda m: get_variable(m.group(1),variables) , source)
except TypeError:
replaced = source
return replaced
def preprocess_cell(self, cell, resources, index):
"""
Preprocess cell
Parameters
----------
cell : NotebookNode cell
Notebook cell being processed
resources : dictionary
Additional resources used in the conversion process. Allows
preprocessors to pass variables into the Jinja engine.
cell_index : int
Index of the cell being processed (see base.py)
"""
if cell.cell_type == "markdown":
if hasattr(cell['metadata'], 'variables'):
variables = cell['metadata']['variables']
if len(variables) > 0:
cell.source = self.replace_variables(cell.source, variables)
return cell, resources
| Update preprocessor for 4.x: New imports and make it more robust | Update preprocessor for 4.x: New imports and make it more robust
| Python | bsd-3-clause | jbn/IPython-notebook-extensions,juhasch/IPython-notebook-extensions,ipython-contrib/IPython-notebook-extensions,Konubinix/IPython-notebook-extensions,jcb91/IPython-notebook-extensions,andyneff/IPython-notebook-extensions,jcb91/IPython-notebook-extensions,andyneff/IPython-notebook-extensions,ipython-contrib/IPython-notebook-extensions,benvarkey/IPython-notebook-extensions,ipython-contrib/IPython-notebook-extensions,motleytech/IPython-notebook-extensions,motleytech/IPython-notebook-extensions,Konubinix/IPython-notebook-extensions,motleytech/IPython-notebook-extensions,ipython-contrib/IPython-notebook-extensions,benvarkey/IPython-notebook-extensions,benvarkey/IPython-notebook-extensions,jbn/IPython-notebook-extensions,juhasch/IPython-notebook-extensions,andyneff/IPython-notebook-extensions,jcb91/IPython-notebook-extensions,benvarkey/IPython-notebook-extensions,jbn/IPython-notebook-extensions,Konubinix/IPython-notebook-extensions,andyneff/IPython-notebook-extensions,motleytech/IPython-notebook-extensions,jbn/IPython-notebook-extensions,juhasch/IPython-notebook-extensions | ---
+++
@@ -1,25 +1,27 @@
+# -*- coding: utf-8 -*-
"""This preprocessor replaces Python code in markdowncell with the result
stored in cell metadata
"""
-#-----------------------------------------------------------------------------
-# Copyright (c) 2014, Juergen Hasch
-#
-# Distributed under the terms of the Modified BSD License.
-#
-#-----------------------------------------------------------------------------
-
-from IPython.nbconvert.preprocessors import *
+from nbconvert.preprocessors import *
import re
+def get_variable( match, variables):
+ try:
+ x = variables[match]
+ return x
+ except KeyError:
+ return ""
+
+
class PyMarkdownPreprocessor(Preprocessor):
-
+
def replace_variables(self,source,variables):
"""
Replace {{variablename}} with stored value
"""
try:
- replaced = re.sub("{{(.*?)}}", lambda m: variables[m.group(1)] , source)
+ replaced = re.sub("{{(.*?)}}", lambda m: get_variable(m.group(1),variables) , source)
except TypeError:
replaced = source
return replaced |
b323d60597038d4a7ac5698f704907ef5bd87489 | Utilities.py | Utilities.py | # Contains a lot of one-offs that aren't easy to deal with.
import logging
import Hand
def reset():
global overload, resources, combo, turn, turnOffset, us, them, numMinions
overload = 0
resources = '0' # Relevant for Wild Growth, which gives a card if at full.
combo = False # Relevant for Rogues, where Combo can change how cards work
turn = 0
turnOffset = 0 # Only tell the user what's happening before their turn
us = '0' # player id
them = '0' # player id
numMinions = 0
reset()
def ourTurn():
global turn, turnOffset
return (turn + 1*turnOffset)%2 == 0
def wentFirst(truth):
global turnOffset
if truth:
logging.info("You are going first")
Hand.hand = [Hand.card('Mulliganned') for _ in range(4)] + [Hand.card(-1, note='The Coin')]
turnOffset = 1
else:
logging.info("You are going second")
Hand.hand = [Hand.card('Mulliganned') for _ in range(3)]
turnOffset = 0
| # Contains a lot of one-offs that aren't easy to deal with.
import logging
import Hand
def reset():
global overload, resources, combo, turn, turnOffset, us, them, numMinions
overload = 0
resources = '0' # Relevant for Wild Growth, which gives a card if at full.
combo = False # Relevant for Rogues, where Combo can change how cards work
turn = 0
turnOffset = 0 # Only tell the user what's happening before their turn
us = '0' # player id
them = '0' # player id
numMinions = 0
reset()
def ourTurn():
global turn, turnOffset
return (turn + 1*turnOffset)%2 == 0
def wentFirst(truth):
global turnOffset
if truth:
logging.info("You are going first")
Hand.hand = [Hand.card('Mulliganned') for _ in range(4)] + [Hand.card('The Coin')]
turnOffset = 1
else:
logging.info("You are going second")
Hand.hand = [Hand.card('Mulliganned') for _ in range(3)]
turnOffset = 0
| Fix for a small bug | Fix for a small bug
| Python | apache-2.0 | jbzdarkid/HearthstonePro | ---
+++
@@ -22,7 +22,7 @@
global turnOffset
if truth:
logging.info("You are going first")
- Hand.hand = [Hand.card('Mulliganned') for _ in range(4)] + [Hand.card(-1, note='The Coin')]
+ Hand.hand = [Hand.card('Mulliganned') for _ in range(4)] + [Hand.card('The Coin')]
turnOffset = 1
else:
logging.info("You are going second") |
dbe7c01ed649abb1cbd8efe07a6633951cb1943e | tests/integration/states/test_handle_error.py | tests/integration/states/test_handle_error.py | # -*- coding: utf-8 -*-
'''
tests for host state
'''
# Import Python libs
from __future__ import absolute_import
# Import Salt Testing libs
from tests.support.case import ModuleCase
class HandleErrorTest(ModuleCase):
'''
Validate that ordering works correctly
'''
def test_handle_error(self):
'''
Test how an error can be recovered
'''
# without sync_states, the custom state may not be installed
# (resulting in :
# State salttest.hello found in sls issue-... is unavailable
ret = self.run_function('state.sls', ['issue-9983-handleerror'])
self.assertTrue(
'An exception occurred in this state: Traceback'
in ret[[a for a in ret][0]]['comment'])
| # -*- coding: utf-8 -*-
'''
tests for host state
'''
# Import Python libs
from __future__ import absolute_import, unicode_literals
# Import Salt Testing libs
from tests.support.case import ModuleCase
class HandleErrorTest(ModuleCase):
'''
Validate that ordering works correctly
'''
def test_function_do_not_return_dictionary_type(self):
'''
Handling a case when function returns anything but a dictionary type
'''
ret = self.run_function('state.sls', ['issue-9983-handleerror'])
self.assertTrue('Data must be a dictionary type' in ret[[a for a in ret][0]]['comment'])
self.assertTrue(not ret[[a for a in ret][0]]['result'])
self.assertTrue(ret[[a for a in ret][0]]['changes'] == {})
| Update integration test: docs, add more checks, rename | Update integration test: docs, add more checks, rename
| Python | apache-2.0 | saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt | ---
+++
@@ -4,7 +4,7 @@
'''
# Import Python libs
-from __future__ import absolute_import
+from __future__ import absolute_import, unicode_literals
# Import Salt Testing libs
from tests.support.case import ModuleCase
@@ -14,14 +14,11 @@
'''
Validate that ordering works correctly
'''
- def test_handle_error(self):
+ def test_function_do_not_return_dictionary_type(self):
'''
- Test how an error can be recovered
+ Handling a case when function returns anything but a dictionary type
'''
- # without sync_states, the custom state may not be installed
- # (resulting in :
- # State salttest.hello found in sls issue-... is unavailable
ret = self.run_function('state.sls', ['issue-9983-handleerror'])
- self.assertTrue(
- 'An exception occurred in this state: Traceback'
- in ret[[a for a in ret][0]]['comment'])
+ self.assertTrue('Data must be a dictionary type' in ret[[a for a in ret][0]]['comment'])
+ self.assertTrue(not ret[[a for a in ret][0]]['result'])
+ self.assertTrue(ret[[a for a in ret][0]]['changes'] == {}) |
66165e490e785120b4ae3d96ec3fac3f7af69350 | sale_payment_method_automatic_workflow/__openerp__.py | sale_payment_method_automatic_workflow/__openerp__.py | # -*- coding: utf-8 -*-
##############################################################################
#
# Author: Guewen Baconnier
# Copyright 2015 Camptocamp SA
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{'name': 'Sale Payment Method - Automatic Reconcile',
'version': '1.0',
'author': ['Camptocamp', 'Akretion'],
'license': 'AGPL-3',
'category': 'Generic Modules/Others',
'depends': ['sale_payment_method',
'sale_automatic_workflow'],
'website': 'http://www.camptocamp.com',
'data': [],
'test': [],
'installable': True,
'auto_install': False,
}
| # -*- coding: utf-8 -*-
##############################################################################
#
# Author: Guewen Baconnier
# Copyright 2015 Camptocamp SA
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{'name': 'Sale Payment Method - Automatic Reconcile',
'version': '1.0',
'author': ['Camptocamp', 'Akretion'],
'license': 'AGPL-3',
'category': 'Generic Modules/Others',
'depends': ['sale_payment_method',
'sale_automatic_workflow'],
'website': 'http://www.camptocamp.com',
'data': [],
'test': [],
'installable': True,
'auto_install': True,
}
| Set the module as auto_install | Set the module as auto_install
So it installs when both sale_payment_method and sale_automatic_workflow are installed.
This module acts as the glue between them
| Python | agpl-3.0 | brain-tec/sale-workflow,akretion/sale-workflow,factorlibre/sale-workflow,open-synergy/sale-workflow,acsone/sale-workflow,thomaspaulb/sale-workflow,diagramsoftware/sale-workflow,Endika/sale-workflow,fevxie/sale-workflow,ddico/sale-workflow,acsone/sale-workflow,jabibi/sale-workflow,akretion/sale-workflow,brain-tec/sale-workflow,Antiun/sale-workflow,BT-cserra/sale-workflow,Eficent/sale-workflow | ---
+++
@@ -30,5 +30,5 @@
'data': [],
'test': [],
'installable': True,
- 'auto_install': False,
+ 'auto_install': True,
} |
14414263ef7578ec0c710e99de0f62c49319c6be | saw-remote-api/python/tests/saw/test_provers.py | saw-remote-api/python/tests/saw/test_provers.py | from cryptol import cryptoltypes
from cryptol.bitvector import BV
import saw
from saw.proofscript import *
import unittest
from pathlib import Path
def cry(exp):
return cryptoltypes.CryptolLiteral(exp)
class ProverTest(unittest.TestCase):
@classmethod
def setUpClass(self):
saw.connect(reset_server=True)
@classmethod
def tearDownClass(self):
saw.reset_server()
saw.disconnect()
def test_provers(self):
if __name__ == "__main__": saw.view(saw.LogResults())
simple_thm = cry('\(x:[8]) -> x != x+1')
self.assertTrue(saw.prove(simple_thm, ProofScript([abc])).is_valid())
self.assertTrue(saw.prove(simple_thm, ProofScript([yices([])])).is_valid())
self.assertTrue(saw.prove(simple_thm, ProofScript([z3([])])).is_valid())
self.assertTrue(saw.prove(simple_thm, ProofScript([Admit()])).is_valid())
self.assertTrue(saw.prove(cry('True'), ProofScript([Trivial()])).is_valid())
simple_non_thm = cry('\(x:[8]) -> x != 5')
pr = saw.prove(simple_non_thm, ProofScript([z3([])]))
self.assertFalse(pr.is_valid())
cex = pr.get_counterexample()
self.assertEqual(cex, [('x', BV(8, 0x05))])
if __name__ == "__main__":
unittest.main()
| from cryptol import cryptoltypes
from cryptol.bitvector import BV
import saw
from saw.proofscript import *
import unittest
from pathlib import Path
def cry(exp):
return cryptoltypes.CryptolLiteral(exp)
class ProverTest(unittest.TestCase):
@classmethod
def setUpClass(self):
saw.connect(reset_server=True)
@classmethod
def tearDownClass(self):
saw.reset_server()
saw.disconnect()
def test_provers(self):
if __name__ == "__main__": saw.view(saw.LogResults())
simple_thm = cry('\(x:[8]) -> x != x+1')
self.assertTrue(saw.prove(simple_thm, ProofScript([abc])).is_valid())
self.assertTrue(saw.prove(simple_thm, ProofScript([z3([])])).is_valid())
self.assertTrue(saw.prove(simple_thm, ProofScript([Admit()])).is_valid())
self.assertTrue(saw.prove(cry('True'), ProofScript([Trivial()])).is_valid())
simple_non_thm = cry('\(x:[8]) -> x != 5')
pr = saw.prove(simple_non_thm, ProofScript([z3([])]))
self.assertFalse(pr.is_valid())
cex = pr.get_counterexample()
self.assertEqual(cex, [('x', BV(8, 0x05))])
if __name__ == "__main__":
unittest.main()
| Remove Yices test from RPC prover test | Remove Yices test from RPC prover test
| Python | bsd-3-clause | GaloisInc/saw-script,GaloisInc/saw-script,GaloisInc/saw-script,GaloisInc/saw-script,GaloisInc/saw-script | ---
+++
@@ -27,7 +27,6 @@
simple_thm = cry('\(x:[8]) -> x != x+1')
self.assertTrue(saw.prove(simple_thm, ProofScript([abc])).is_valid())
- self.assertTrue(saw.prove(simple_thm, ProofScript([yices([])])).is_valid())
self.assertTrue(saw.prove(simple_thm, ProofScript([z3([])])).is_valid())
self.assertTrue(saw.prove(simple_thm, ProofScript([Admit()])).is_valid()) |
61b9a0e69b2db362e526bd3312e0e47609a42fad | scenarios/UAC/bob_cfg.py | scenarios/UAC/bob_cfg.py | from lib.test_config import AUTH_CREDS as AUTH_CREDS_orig
allargs = (('SHA-512-256', 'SHA-256', 'MD5', 'MD5-sess'), \
('SHA-512-256', 'SHA-256', 'SHA-256-sess', 'MD5'), \
('SHA-512-256', 'SHA-512-256-sess', 'SHA-256', 'MD5')) \
class AUTH_CREDS(AUTH_CREDS_orig):
enalgs = None
realm = 'VoIPTests.NET'
def __init__(self):
tlist = list(allalgs)
shuffle(tlist)
self.enalgs = tlist[0]
AUTH_CREDS_orig.__init__(self, 'mightyuser', 's3cr3tpAssw0Rd')
| from random import shuffle
from lib.test_config import AUTH_CREDS as AUTH_CREDS_orig
allalgs = (('SHA-512-256', 'SHA-256', 'MD5', 'MD5-sess'), \
('SHA-512-256', 'SHA-256', 'SHA-256-sess', 'MD5'), \
('SHA-512-256', 'SHA-512-256-sess', 'SHA-256', 'MD5')) \
class AUTH_CREDS(AUTH_CREDS_orig):
enalgs = None
realm = 'VoIPTests.NET'
def __init__(self):
tlist = list(allalgs)
shuffle(tlist)
self.enalgs = tlist[0]
AUTH_CREDS_orig.__init__(self, 'mightyuser', 's3cr3tpAssw0Rd')
| Fix typo: allargs -> allalgs. Add missing import of shuffle(). | Fix typo: allargs -> allalgs. Add missing import of shuffle().
| Python | bsd-2-clause | sippy/voiptests,sippy/voiptests | ---
+++
@@ -1,6 +1,7 @@
+from random import shuffle
from lib.test_config import AUTH_CREDS as AUTH_CREDS_orig
-allargs = (('SHA-512-256', 'SHA-256', 'MD5', 'MD5-sess'), \
+allalgs = (('SHA-512-256', 'SHA-256', 'MD5', 'MD5-sess'), \
('SHA-512-256', 'SHA-256', 'SHA-256-sess', 'MD5'), \
('SHA-512-256', 'SHA-512-256-sess', 'SHA-256', 'MD5')) \
|
56471d264671b652b4b40619f709dc6b8e02eac1 | dragonflow/db/models/host_route.py | dragonflow/db/models/host_route.py | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import dragonflow.db.field_types as df_fields
import dragonflow.db.model_framework as mf
@mf.construct_nb_db_model
class HostRoute(mf.ModelBase):
id = None
destination = df_fields.IpNetworkField(required=True)
nexthop = df_fields.IpAddressField(required=True)
| # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from jsonmodels import models
import dragonflow.db.field_types as df_fields
class HostRoute(models.Base):
destination = df_fields.IpNetworkField(required=True)
nexthop = df_fields.IpAddressField(required=True)
| Change HostRoute to a plain model | Change HostRoute to a plain model
Since HostRoute doesn't have id, store it as a plain db model.
Change-Id: I3dbb9e5ffa42bf48f47b7010ee6baf470b55e85e
Partially-Implements: bp refactor-nb-api
| Python | apache-2.0 | openstack/dragonflow,openstack/dragonflow,openstack/dragonflow | ---
+++
@@ -10,12 +10,11 @@
# License for the specific language governing permissions and limitations
# under the License.
+from jsonmodels import models
+
import dragonflow.db.field_types as df_fields
-import dragonflow.db.model_framework as mf
-@mf.construct_nb_db_model
-class HostRoute(mf.ModelBase):
- id = None
+class HostRoute(models.Base):
destination = df_fields.IpNetworkField(required=True)
nexthop = df_fields.IpAddressField(required=True) |
2e1f4ffa667bcff2c10caf64be345f3e8619232f | python/simple_types.py | python/simple_types.py | # Many built-in types have built-in names
assert(type(5) == int)
assert(type(True) == bool)
assert(type(5.7) == float)
assert(type(9 + 5j) == complex)
assert(type((8, 'dog', False)) == tuple)
assert(type('hello') == str)
assert(type(b'hello') == bytes)
assert(type([1, '', False]) == list)
assert(type(range(1,10)) == range)
assert(type({1, 2, 3}) == set)
assert(type(frozenset([1, 2, 3])) == frozenset)
assert(type({'x': 1, 'y': 2}) == dict)
assert(type(slice([1, 2, 3])) == slice)
# Some do not, but we can still "see" them
assert(str(type(None)) == "<class 'NoneType'>")
assert(str(type(NotImplemented)) == "<class 'NotImplementedType'>")
# Built-in vs. User-defined functions
def plus_two(x):
return x + 2
assert(str(type(plus_two)) == "<class 'function'>")
assert(str(type(max)) == "<class 'builtin_function_or_method'>")
# Even modules are types!
import math
assert(str(type(math)) == "<class 'module'>")
| # Many built-in types have built-in names
assert(type(5) == int)
assert(type(True) == bool)
assert(type(5.7) == float)
assert(type(9 + 5j) == complex)
assert(type((8, 'dog', False)) == tuple)
assert(type('hello') == str)
assert(type(b'hello') == bytes)
assert(type([1, '', False]) == list)
assert(type(range(1,10)) == range)
assert(type({1, 2, 3}) == set)
assert(type(frozenset([1, 2, 3])) == frozenset)
assert(type({'x': 1, 'y': 2}) == dict)
assert(type(slice([1, 2, 3])) == slice)
# Some do not, but we can still "see" them
assert(str(type(None)) == "<class 'NoneType'>")
assert(str(type(NotImplemented)) == "<class 'NotImplementedType'>")
# Built-in vs. User-defined functions
def plus_two(x):
return x + 2
assert(str(type(plus_two)) == "<class 'function'>")
assert(str(type(max)) == "<class 'builtin_function_or_method'>")
# Even modules are types!
import math
assert(str(type(math)) == "<class 'module'>")
# Many built-in modules define their own types
from datetime import date
assert(type(date(1969,7,20)) == date) | Add example of date type in Python | Add example of date type in Python
| Python | mit | rtoal/ple,rtoal/ple,rtoal/polyglot,rtoal/ple,rtoal/ple,rtoal/polyglot,rtoal/ple,rtoal/polyglot,rtoal/polyglot,rtoal/polyglot,rtoal/polyglot,rtoal/polyglot,rtoal/polyglot,rtoal/polyglot,rtoal/polyglot,rtoal/ple,rtoal/ple,rtoal/ple,rtoal/polyglot,rtoal/polyglot,rtoal/ple,rtoal/ple,rtoal/polyglot,rtoal/ple,rtoal/ple,rtoal/ple,rtoal/ple,rtoal/ple,rtoal/polyglot,rtoal/ple,rtoal/polyglot | ---
+++
@@ -26,3 +26,7 @@
# Even modules are types!
import math
assert(str(type(math)) == "<class 'module'>")
+
+# Many built-in modules define their own types
+from datetime import date
+assert(type(date(1969,7,20)) == date) |
517a65e5fba0ec302a05ad550f473bd72a719398 | test/test_recipes.py | test/test_recipes.py | from __future__ import print_function, absolute_import
import json
import os
import sys
from imp import reload
from io import StringIO
import pytest
import yaml
from adr import query
from adr.main import run_recipe
class new_run_query(object):
def __init__(self, test):
self.test = test
def __call__(self, query, *args, **kwargs):
print(self.test['queries'].keys())
if query not in self.test['queries']:
pytest.fail("no test data found for query '{}' in '{}.test'".format(
query, self.test['recipe']))
for result in self.test['queries'][query]:
yield result
def test_recipe(monkeypatch, recipe_test):
monkeypatch.setattr(query, 'run_query', new_run_query(recipe_test))
module = 'adr.recipes.{}'.format(recipe_test['recipe'])
if module in sys.modules:
reload(sys.modules[module])
result = json.loads(run_recipe(recipe_test['recipe'], recipe_test['args'], fmt='json'))
buf = StringIO()
yaml.dump(result, buf)
print("Yaml formatted result for copy/paste:")
print(buf.getvalue())
assert result == recipe_test['expected']
| from __future__ import print_function, absolute_import
import json
import os
import sys
from imp import reload
from io import BytesIO, StringIO
import pytest
import yaml
from adr import query
from adr.main import run_recipe
class new_run_query(object):
def __init__(self, test):
self.test = test
def __call__(self, query, *args, **kwargs):
print(self.test['queries'].keys())
if query not in self.test['queries']:
pytest.fail("no test data found for query '{}' in '{}.test'".format(
query, self.test['recipe']))
for result in self.test['queries'][query]:
yield result
def test_recipe(monkeypatch, recipe_test):
monkeypatch.setattr(query, 'run_query', new_run_query(recipe_test))
module = 'adr.recipes.{}'.format(recipe_test['recipe'])
if module in sys.modules:
reload(sys.modules[module])
result = json.loads(run_recipe(recipe_test['recipe'], recipe_test['args'], fmt='json'))
if sys.version_info > (3, 0):
buf = StringIO()
else:
buf = BytesIO()
yaml.dump(result, buf)
print("Yaml formatted result for copy/paste:")
print(buf.getvalue())
assert result == recipe_test['expected']
| Fix python 2 error when dumping expected test results | Fix python 2 error when dumping expected test results
| Python | mpl-2.0 | ahal/active-data-recipes,ahal/active-data-recipes | ---
+++
@@ -4,7 +4,7 @@
import os
import sys
from imp import reload
-from io import StringIO
+from io import BytesIO, StringIO
import pytest
import yaml
@@ -36,7 +36,10 @@
result = json.loads(run_recipe(recipe_test['recipe'], recipe_test['args'], fmt='json'))
- buf = StringIO()
+ if sys.version_info > (3, 0):
+ buf = StringIO()
+ else:
+ buf = BytesIO()
yaml.dump(result, buf)
print("Yaml formatted result for copy/paste:")
print(buf.getvalue()) |
5d21ad5ae63addac0892242fe774250a2934fc87 | awx/lib/metrics.py | awx/lib/metrics.py | # -*- coding: utf-8 -*-
from __future__ import absolute_import
import logging
from functools import wraps
from django_statsd.clients import statsd
logger = logging.getLogger(__name__)
def task_timer(fn):
@wraps(fn)
def __wrapped__(self, *args, **kwargs):
statsd.incr('tasks.{}.{}.count'.format(
self.name.rsplit('.', 1)[-1],
fn.__name__))
with statsd.timer('tasks.{}.{}.timer'.format(
self.name.rsplit('.', 1)[-1],
fn.__name__)):
return fn(self, *args, **kwargs)
return __wrapped__
class BaseTimer(object):
def __init__(self, name, prefix=None):
self.name = name.rsplit('.', 1)[-1]
if prefix:
self.name = '{}.{}'.format(prefix, self.name)
def __call__(self, fn):
@wraps(fn)
def __wrapped__(obj, *args, **kwargs):
statsd.incr('{}.{}.count'.format(
self.name,
fn.__name__
))
with statsd.timer('{}.{}.timer'.format(
self.name,
fn.__name__
)):
return fn(obj, *args, **kwargs)
return __wrapped__
| # -*- coding: utf-8 -*-
from __future__ import absolute_import
import logging
from functools import wraps
from django_statsd.clients import statsd
logger = logging.getLogger(__name__)
def task_timer(fn):
@wraps(fn)
def __wrapped__(self, *args, **kwargs):
statsd.incr('tasks.{0}.{1}.count'.format(
self.name.rsplit('.', 1)[-1],
fn.__name__))
with statsd.timer('tasks.{0}.{1}.timer'.format(
self.name.rsplit('.', 1)[-1],
fn.__name__)):
return fn(self, *args, **kwargs)
return __wrapped__
class BaseTimer(object):
def __init__(self, name, prefix=None):
self.name = name.rsplit('.', 1)[-1]
if prefix:
self.name = '{0}.{1}'.format(prefix, self.name)
def __call__(self, fn):
@wraps(fn)
def __wrapped__(obj, *args, **kwargs):
statsd.incr('{0}.{1}.count'.format(
self.name,
fn.__name__
))
with statsd.timer('{0}.{1}.timer'.format(
self.name,
fn.__name__
)):
return fn(obj, *args, **kwargs)
return __wrapped__
| Fix up statsd work to support python 2.6 | Fix up statsd work to support python 2.6
Format specifiers must include field specifier
| Python | apache-2.0 | snahelou/awx,wwitzel3/awx,wwitzel3/awx,wwitzel3/awx,snahelou/awx,snahelou/awx,snahelou/awx,wwitzel3/awx | ---
+++
@@ -11,10 +11,10 @@
def task_timer(fn):
@wraps(fn)
def __wrapped__(self, *args, **kwargs):
- statsd.incr('tasks.{}.{}.count'.format(
+ statsd.incr('tasks.{0}.{1}.count'.format(
self.name.rsplit('.', 1)[-1],
fn.__name__))
- with statsd.timer('tasks.{}.{}.timer'.format(
+ with statsd.timer('tasks.{0}.{1}.timer'.format(
self.name.rsplit('.', 1)[-1],
fn.__name__)):
return fn(self, *args, **kwargs)
@@ -25,16 +25,16 @@
def __init__(self, name, prefix=None):
self.name = name.rsplit('.', 1)[-1]
if prefix:
- self.name = '{}.{}'.format(prefix, self.name)
+ self.name = '{0}.{1}'.format(prefix, self.name)
def __call__(self, fn):
@wraps(fn)
def __wrapped__(obj, *args, **kwargs):
- statsd.incr('{}.{}.count'.format(
+ statsd.incr('{0}.{1}.count'.format(
self.name,
fn.__name__
))
- with statsd.timer('{}.{}.timer'.format(
+ with statsd.timer('{0}.{1}.timer'.format(
self.name,
fn.__name__
)): |
50c877733f052ce7235ee193d5c8ba88a266df60 | app/sense.py | app/sense.py | import threading
import time
class SensorThread(object):
def __init__(self, notify, delay=0):
self.notify = notify
self.delay = delay
self.interval = 1
self.distance = -1
def start(self, robot):
self.robot = robot
thread = threading.Thread(target=self.run, args=())
thread.daemon = True # Daemonize thread
thread.start() # Start the execution
def run(self):
while True:
distance = self.robot.distance()
if not self.distance == distance:
self.notify.emit('sense', distance)
self.distance = distance
print "distance %scm" % distance
time.sleep(self.interval)
| import threading
import time
class SensorThread(object):
def __init__(self, notify, delay=0):
self.notify = notify
self.delay = delay
self.interval = 1
self.distance = -1
def start(self, robot):
self.robot = robot
thread = threading.Thread(target=self.run, args=())
thread.daemon = True # Daemonize thread
thread.start() # Start the execution
def run(self):
while True:
distance = int(self.robot.distance())
if not self.distance == distance:
self.notify.emit('sense', distance)
self.distance = distance
print "distance %scm" % distance
time.sleep(self.interval)
| Use integer distances to avoid so many events. | Use integer distances to avoid so many events.
| Python | bsd-2-clause | legorovers/legoflask,legorovers/legoflask,legorovers/legoflask | ---
+++
@@ -17,7 +17,7 @@
def run(self):
while True:
- distance = self.robot.distance()
+ distance = int(self.robot.distance())
if not self.distance == distance:
self.notify.emit('sense', distance)
self.distance = distance |
432323eaf442db41d6486841168c159732d8dfe0 | bhgcal/__init__.py | bhgcal/__init__.py | from __future__ import unicode_literals
import os
from datetime import datetime
from dateutil.relativedelta import relativedelta
import requests
login_url = 'http://bukkesprangetnatur.barnehage.no/LogOn'
ics_url = ('http://bukkesprangetnatur.barnehage.no/Ukeplan/'
'PlanMonthAsICalendar/61?year={year}&month={month}')
def main():
user, password = os.environ['USER'], os.environ['PASSWORD']
session = requests.Session()
response = session.post(
login_url, data={'UserName': user, 'Password': password})
response.raise_for_status()
now = datetime.now()
head = session.get(
ics_url.format(year=now.year, month=now.month)).text
tail = session.get(
ics_url.format(
year=now.year, month=(
now + relativedelta(month=1)).month)).text
# Stitch together the two ics files
print(('\n'.join(head.split('\n')[:-3])
+ '\n'.join(tail.split('\n')[3:])).encode('utf-8'))
| from __future__ import unicode_literals
from datetime import datetime
import os
import sys
from dateutil.relativedelta import relativedelta
import requests
bases = {'r\xf8sslyngen': 59,
'myrulla': 61}
login_url = 'http://bukkesprangetnatur.barnehage.no/LogOn'
ics_url = ('http://bukkesprangetnatur.barnehage.no/Ukeplan/'
'PlanMonthAsICalendar/{base}?year={year}&month={month}')
def stdin_decode(data):
if sys.stdin.encoding is not None:
return data.decode(sys.stdin.encoding)
else:
# Just assume we're modern if nothing else is specified
return data.decode('utf-8')
def stdout_encode(data):
if sys.stdout.encoding is not None:
return data.encode(sys.stdout.encoding)
else:
# Just assume we're modern if nothing else is specified
return data.encode('utf-8')
def main():
base, user, password = (
stdin_decode(os.environ['BASE']), os.environ['USER'],
os.environ['PASSWORD'])
session = requests.Session()
response = session.post(
login_url, data={'UserName': user, 'Password': password})
response.raise_for_status()
now = datetime.now()
head = (
session.get(ics_url.format(
base=bases[base], year=now.year, month=now.month))
.content.decode('utf-8'))
tail = (
session.get(
ics_url.format(
base=bases[base], year=now.year, month=(
now + relativedelta(month=1)).month))
.content.decode('utf-8'))
# Stitch together the two ics files
print(stdout_encode('\n'.join(head.split('\n')[:-3])
+ '\n'.join(tail.split('\n')[3:])))
| Support iterating over the different bases | Support iterating over the different bases
And fix the encoding issue that suddenly became obvious.
| Python | mit | asmundg/bhgcal | ---
+++
@@ -1,30 +1,56 @@
from __future__ import unicode_literals
+from datetime import datetime
import os
-from datetime import datetime
+import sys
from dateutil.relativedelta import relativedelta
import requests
+bases = {'r\xf8sslyngen': 59,
+ 'myrulla': 61}
+
login_url = 'http://bukkesprangetnatur.barnehage.no/LogOn'
ics_url = ('http://bukkesprangetnatur.barnehage.no/Ukeplan/'
- 'PlanMonthAsICalendar/61?year={year}&month={month}')
+ 'PlanMonthAsICalendar/{base}?year={year}&month={month}')
+
+
+def stdin_decode(data):
+ if sys.stdin.encoding is not None:
+ return data.decode(sys.stdin.encoding)
+ else:
+ # Just assume we're modern if nothing else is specified
+ return data.decode('utf-8')
+
+
+def stdout_encode(data):
+ if sys.stdout.encoding is not None:
+ return data.encode(sys.stdout.encoding)
+ else:
+ # Just assume we're modern if nothing else is specified
+ return data.encode('utf-8')
def main():
- user, password = os.environ['USER'], os.environ['PASSWORD']
+ base, user, password = (
+ stdin_decode(os.environ['BASE']), os.environ['USER'],
+ os.environ['PASSWORD'])
session = requests.Session()
response = session.post(
login_url, data={'UserName': user, 'Password': password})
response.raise_for_status()
now = datetime.now()
- head = session.get(
- ics_url.format(year=now.year, month=now.month)).text
- tail = session.get(
- ics_url.format(
- year=now.year, month=(
- now + relativedelta(month=1)).month)).text
+ head = (
+ session.get(ics_url.format(
+ base=bases[base], year=now.year, month=now.month))
+ .content.decode('utf-8'))
+ tail = (
+ session.get(
+ ics_url.format(
+ base=bases[base], year=now.year, month=(
+ now + relativedelta(month=1)).month))
+ .content.decode('utf-8'))
# Stitch together the two ics files
- print(('\n'.join(head.split('\n')[:-3])
- + '\n'.join(tail.split('\n')[3:])).encode('utf-8'))
+ print(stdout_encode('\n'.join(head.split('\n')[:-3])
+ + '\n'.join(tail.split('\n')[3:]))) |
eb1568e9baf3d60a8d1e3ea59c49d54dc7b34437 | tests/test_pgbackup.py | tests/test_pgbackup.py | # coding: utf-8
"""
Unit tests for essential functions in postgresql backup.
"""
from unittest.mock import MagicMock, mock_open, patch
import pytest
import smdba.postgresqlgate
class TestPgBackup:
"""
Test suite for postgresql backup.
"""
@patch("smdba.postgresqlgate.os.path.exists", MagicMock(return_value=False))
def test_init_pkbackup_checks_archivecleaup(self):
"""
Test constructor of pkgbackup pg_archivecleanup installed
:return:
"""
with pytest.raises(Exception) as exc:
smdba.postgresqlgate.PgBackup("/target")
assert "The utility pg_archivecleanup was not found on the path." in str(exc)
| # coding: utf-8
"""
Unit tests for essential functions in postgresql backup.
"""
from unittest.mock import MagicMock, mock_open, patch
import pytest
import smdba.postgresqlgate
class TestPgBackup:
"""
Test suite for postgresql backup.
"""
@patch("smdba.postgresqlgate.os.path.exists", MagicMock(return_value=False))
def test_init_pgbackup_checks_archivecleaup(self):
"""
Test constructor of pgbackup pg_archivecleanup installed
:return:
"""
with pytest.raises(Exception) as exc:
smdba.postgresqlgate.PgBackup("/target")
assert "The utility pg_archivecleanup was not found on the path." in str(exc)
@patch("smdba.postgresqlgate.os.path.exists", MagicMock(return_value=True))
def test_init_pgbackup_sets_pgdata_path(self):
"""
Test constructor of pgbackup for pg_data is set correctly.
:return:
"""
target = "/some/target"
pg_data = "/opt/pg_data"
pgbk = smdba.postgresqlgate.PgBackup(target_path=target, pg_data=pg_data)
assert pgbk.target_path == target
assert pgbk.pg_data == pg_data
pgbk = smdba.postgresqlgate.PgBackup(target_path=target)
assert pgbk.pg_data == pgbk.DEFAULT_PG_DATA
| Add test for constructor of pgbackup for pg_data is set correctly. | Add test for constructor of pgbackup for pg_data is set correctly.
| Python | mit | SUSE/smdba,SUSE/smdba | ---
+++
@@ -12,9 +12,9 @@
Test suite for postgresql backup.
"""
@patch("smdba.postgresqlgate.os.path.exists", MagicMock(return_value=False))
- def test_init_pkbackup_checks_archivecleaup(self):
+ def test_init_pgbackup_checks_archivecleaup(self):
"""
- Test constructor of pkgbackup pg_archivecleanup installed
+ Test constructor of pgbackup pg_archivecleanup installed
:return:
"""
@@ -22,3 +22,20 @@
with pytest.raises(Exception) as exc:
smdba.postgresqlgate.PgBackup("/target")
assert "The utility pg_archivecleanup was not found on the path." in str(exc)
+
+ @patch("smdba.postgresqlgate.os.path.exists", MagicMock(return_value=True))
+ def test_init_pgbackup_sets_pgdata_path(self):
+ """
+ Test constructor of pgbackup for pg_data is set correctly.
+
+ :return:
+ """
+ target = "/some/target"
+ pg_data = "/opt/pg_data"
+ pgbk = smdba.postgresqlgate.PgBackup(target_path=target, pg_data=pg_data)
+
+ assert pgbk.target_path == target
+ assert pgbk.pg_data == pg_data
+
+ pgbk = smdba.postgresqlgate.PgBackup(target_path=target)
+ assert pgbk.pg_data == pgbk.DEFAULT_PG_DATA |
643c60364266c9015b919a39ff8f0807e6138efc | fileupload/views.py | fileupload/views.py | from fileupload.models import Picture
from django.views.generic import CreateView, DeleteView
from django.http import HttpResponse
from django.utils import simplejson
from django.core.urlresolvers import reverse
from django.conf import settings
class PictureCreateView(CreateView):
model = Picture
def form_valid(self, form):
self.object = form.save()
f = self.request.FILES.get('file')
data = [{'name': f.name, 'url': settings.MEDIA_URL + "pictures/" + f.name, 'thumbnail_url': settings.MEDIA_URL + "pictures/" + f.name, 'delete_url': reverse('upload-delete', args=[f.name]), 'delete_type': "DELETE"}]
return JSONResponse(data)
class PictureDeleteView(DeleteView):
model = Picture
def delete(self, request, *args, **kwargs):
self.object = self.get_object()
self.object.delete()
return JSONResponse(True)
class JSONResponse(HttpResponse):
""" JSON response class """
def __init__(self,obj='',json_opts={},mimetype="application/json",*args,**kwargs):
content = simplejson.dumps(obj,**json_opts)
super(JSONResponse,self).__init__(content,mimetype,*args,**kwargs)
| from fileupload.models import Picture
from django.views.generic import CreateView, DeleteView
from django.http import HttpResponse
from django.utils import simplejson
from django.core.urlresolvers import reverse
from django.conf import settings
class PictureCreateView(CreateView):
model = Picture
def form_valid(self, form):
self.object = form.save()
f = self.request.FILES.get('file')
data = [{'name': f.name, 'url': settings.MEDIA_URL + "pictures/" + f.name, 'thumbnail_url': settings.MEDIA_URL + "pictures/" + f.name, 'delete_url': reverse('upload-delete', args=[f.name]), 'delete_type': "DELETE"}]
return JSONResponse(data)
class PictureDeleteView(DeleteView):
model = Picture
def delete(self, request, *args, **kwargs):
self.object = self.get_object()
self.object.delete()
return JSONResponse(True)
class JSONResponse(HttpResponse):
"""JSON response class. This does not help browsers not liking application/json."""
def __init__(self,obj='',json_opts={},mimetype="application/json",*args,**kwargs):
content = simplejson.dumps(obj,**json_opts)
super(JSONResponse,self).__init__(content,mimetype,*args,**kwargs)
| Add comment about browsers not liking application/json. | Add comment about browsers not liking application/json.
| Python | mit | Imaginashion/cloud-vision,extremoburo/django-jquery-file-upload,minhlongdo/django-jquery-file-upload,extremoburo/django-jquery-file-upload,Imaginashion/cloud-vision,Imaginashion/cloud-vision,extremoburo/django-jquery-file-upload,vaniakov/django-jquery-file-upload,vaniakov/django-jquery-file-upload,madteckhead/django-jquery-file-upload,sigurdga/django-jquery-file-upload,indrajithi/mgc-django,sigurdga/django-jquery-file-upload,sigurdga/django-jquery-file-upload,Imaginashion/cloud-vision,minhlongdo/django-jquery-file-upload,vaniakov/django-jquery-file-upload,madteckhead/django-jquery-file-upload,Imaginashion/cloud-vision,indrajithi/mgc-django,Imaginashion/cloud-vision,minhlongdo/django-jquery-file-upload | ---
+++
@@ -25,7 +25,7 @@
return JSONResponse(True)
class JSONResponse(HttpResponse):
- """ JSON response class """
+ """JSON response class. This does not help browsers not liking application/json."""
def __init__(self,obj='',json_opts={},mimetype="application/json",*args,**kwargs):
content = simplejson.dumps(obj,**json_opts)
super(JSONResponse,self).__init__(content,mimetype,*args,**kwargs) |
371f67f290d19021b488057780292ae1009bca9b | fresque/__init__.py | fresque/__init__.py | from __future__ import absolute_import, unicode_literals, print_function
import os
from flask import Flask
from flask.ext.migrate import Migrate, MigrateCommand
from flask.ext.script import Manager
from flask.ext.sqlalchemy import SQLAlchemy
from pkg_resources import resource_filename
app = Flask(__name__)
app.config.from_object('fresque.default_config')
if 'FRESQUE_CONFIG' in os.environ: # pragma: no cover
app.config.from_envvar('FRESQUE_CONFIG')
from fresque import database
db = SQLAlchemy(app)
Migrate(app, db, directory=resource_filename("fresque", "migrations"))
manager = Manager(app)
manager.add_command('db', MigrateCommand)
from fresque import views
#from fresque import views, models
if __name__ == '__main__':
manager.run()
| # -*- coding: utf-8 -*-
'''
Top level of the fresque application.
'''
from __future__ import absolute_import, unicode_literals, print_function
import logging
import logging.handlers
import os
import sys
import urlparse
import flask
from flask.ext.fas_openid import FAS
APP = flask.Flask(__name__)
APP.config.from_object('fresque.default_config')
if 'FRESQUE_CONFIG' in os.environ: # pragma: no cover
APP.config.from_envvar('FRESQUE_CONFIG')
# Set up FAS extension
FAS = FAS(APP)
# TODO: Add email handler (except on debug mode)
# Log to stderr as well
STDERR_LOG = logging.StreamHandler(sys.stderr)
STDERR_LOG.setLevel(logging.INFO)
APP.logger.addHandler(STDERR_LOG)
LOG = APP.logger
import fresque.lib
import fresque.proxy
APP.wsgi_app = fresque.proxy.ReverseProxied(APP.wsgi_app)
SESSION = fresque.lib.create_session(APP.config['SQLALCHEMY_DATABASE_URI'])
def is_authenticated():
""" Returns wether a user is authenticated or not.
"""
return hasattr(flask.g, 'fas_user') and flask.g.fas_user is not None
def is_safe_url(target):
""" Checks that the target url is safe and sending to the current
website not some other malicious one.
"""
ref_url = urlparse.urlparse(flask.request.host_url)
test_url = urlparse.urlparse(
urlparse.urljoin(flask.request.host_url, target))
return test_url.scheme in ('http', 'https') and \
ref_url.netloc == test_url.netloc
from fresque import views
| Rework a little bit the top level module of the application | Rework a little bit the top level module of the application
Start working on the logging
Add couple of utility functions (as is_authenticated)
Fix case of the APP variable, to be pep8
Make the application working behind a reverse proxy
Make the application use the FAS plugin for authentication
| Python | agpl-3.0 | whitel/fresque,rahulrrixe/fresque,vivekanand1101/fresque,rahulrrixe/fresque,vivekanand1101/fresque,whitel/fresque,rahulrrixe/fresque,whitel/fresque,fedora-infra/fresque,fedora-infra/fresque,fedora-infra/fresque,fedora-infra/fresque,vivekanand1101/fresque,rahulrrixe/fresque,vivekanand1101/fresque,whitel/fresque | ---
+++
@@ -1,31 +1,62 @@
+# -*- coding: utf-8 -*-
+
+'''
+Top level of the fresque application.
+'''
+
from __future__ import absolute_import, unicode_literals, print_function
+import logging
+import logging.handlers
import os
+import sys
+import urlparse
-from flask import Flask
-from flask.ext.migrate import Migrate, MigrateCommand
-from flask.ext.script import Manager
-from flask.ext.sqlalchemy import SQLAlchemy
-from pkg_resources import resource_filename
+import flask
+from flask.ext.fas_openid import FAS
+
+APP = flask.Flask(__name__)
+APP.config.from_object('fresque.default_config')
+if 'FRESQUE_CONFIG' in os.environ: # pragma: no cover
+ APP.config.from_envvar('FRESQUE_CONFIG')
-app = Flask(__name__)
-app.config.from_object('fresque.default_config')
-if 'FRESQUE_CONFIG' in os.environ: # pragma: no cover
- app.config.from_envvar('FRESQUE_CONFIG')
-
-from fresque import database
-
-db = SQLAlchemy(app)
-Migrate(app, db, directory=resource_filename("fresque", "migrations"))
-manager = Manager(app)
-manager.add_command('db', MigrateCommand)
+# Set up FAS extension
+FAS = FAS(APP)
-from fresque import views
-#from fresque import views, models
+# TODO: Add email handler (except on debug mode)
+
+# Log to stderr as well
+STDERR_LOG = logging.StreamHandler(sys.stderr)
+STDERR_LOG.setLevel(logging.INFO)
+APP.logger.addHandler(STDERR_LOG)
+
+LOG = APP.logger
+
+import fresque.lib
+import fresque.proxy
+APP.wsgi_app = fresque.proxy.ReverseProxied(APP.wsgi_app)
+SESSION = fresque.lib.create_session(APP.config['SQLALCHEMY_DATABASE_URI'])
-if __name__ == '__main__':
- manager.run()
+
+def is_authenticated():
+ """ Returns wether a user is authenticated or not.
+ """
+ return hasattr(flask.g, 'fas_user') and flask.g.fas_user is not None
+
+
+def is_safe_url(target):
+ """ Checks that the target url is safe and sending to the current
+ website not some other malicious one.
+ """
+ ref_url = urlparse.urlparse(flask.request.host_url)
+ test_url = urlparse.urlparse(
+ urlparse.urljoin(flask.request.host_url, target))
+ return test_url.scheme in ('http', 'https') and \
+ ref_url.netloc == test_url.netloc
+
+from fresque import views
+ |
6672a0634265e09366a9274d3c2a04afca49cf02 | dirtree_filter.py | dirtree_filter.py | class DirTreeFilter(object):
def __init__(self, show_hidden=False, show_files=True, show_dirs=True):
self.show_hidden = show_hidden
self.show_files = show_files
self.show_dirs = show_dirs
self.hidden_exts = [".pyc", ".pyo", ".o", ".a", ".obj", ".lib", ".swp", "~"]
self.hidden_dirs = ["CVS", "__pycache__"]
def __call__(self, info):
if info.hidden and not self.show_hidden:
return False
if info.is_file and not self.show_files:
return False
elif info.is_dir:
if not self.show_dirs:
return False
if info.filename in self.hidden_dirs:
return False
for ext in self.hidden_exts:
if info.filename.endswith(ext):
return False
if info.filename.startswith(".#"):
return False
return True
| import re
def compile_file_patterns(patterns):
return re.compile("$%s^" % "|".join("(%s)" % re.escape(p).replace("\\*", ".*") for p in patterns))
hidden_files = [".*", "*~", "*.swp", "*.pyc", "*.pyo", "*.o", "*.a", "*.obj", "*.lib", "*.class"]
hidden_dirs = ["CVS", "__pycache__"]
class DirTreeFilter(object):
def __init__(self, show_hidden=False, show_files=True, show_dirs=True,
hidden_files=hidden_files, hidden_dirs=hidden_dirs):
self.show_hidden = show_hidden
self.show_files = show_files
self.show_dirs = show_dirs
self.r_hidden_file = compile_file_patterns(hidden_files)
self.r_hidden_dir = compile_file_patterns(hidden_dirs)
def __call__(self, info):
if info.hidden and not self.show_hidden:
return False
if info.is_file and not self.show_files:
return False
if info.is_dir:
if not self.show_dirs:
return False
if self.r_hidden_dir.match(info.filename):
return False
else:
if self.r_hidden_file.match(info.filename):
return False
return True
| Use file patterns compiled to regular expressions to match hidden files. | Use file patterns compiled to regular expressions to match hidden files.
| Python | mit | shaurz/devo | ---
+++
@@ -1,24 +1,31 @@
+import re
+
+def compile_file_patterns(patterns):
+ return re.compile("$%s^" % "|".join("(%s)" % re.escape(p).replace("\\*", ".*") for p in patterns))
+
+hidden_files = [".*", "*~", "*.swp", "*.pyc", "*.pyo", "*.o", "*.a", "*.obj", "*.lib", "*.class"]
+hidden_dirs = ["CVS", "__pycache__"]
+
class DirTreeFilter(object):
- def __init__(self, show_hidden=False, show_files=True, show_dirs=True):
+ def __init__(self, show_hidden=False, show_files=True, show_dirs=True,
+ hidden_files=hidden_files, hidden_dirs=hidden_dirs):
self.show_hidden = show_hidden
self.show_files = show_files
self.show_dirs = show_dirs
- self.hidden_exts = [".pyc", ".pyo", ".o", ".a", ".obj", ".lib", ".swp", "~"]
- self.hidden_dirs = ["CVS", "__pycache__"]
+ self.r_hidden_file = compile_file_patterns(hidden_files)
+ self.r_hidden_dir = compile_file_patterns(hidden_dirs)
def __call__(self, info):
if info.hidden and not self.show_hidden:
return False
if info.is_file and not self.show_files:
return False
- elif info.is_dir:
+ if info.is_dir:
if not self.show_dirs:
return False
- if info.filename in self.hidden_dirs:
+ if self.r_hidden_dir.match(info.filename):
return False
- for ext in self.hidden_exts:
- if info.filename.endswith(ext):
+ else:
+ if self.r_hidden_file.match(info.filename):
return False
- if info.filename.startswith(".#"):
- return False
return True |
5b215758adab39923399db98b5975fc76d389472 | __init__.py | __init__.py | # -*- coding: utf-8 -*-
import configparser
import optparse
from blo import Blo
if __name__ == '__main__':
parser = optparse.OptionParser("usage: %prog [option] markdown_file.md")
parser.add_option("-c", "--config", dest="config_file",
default="./blo.cfg", type="string", help="specify configuration file path to run on")
(options, args) = parser.parse_args()
if len(args) != 1:
parser.error("incorrect number of arguments")
cfg_file = options.config_file
B = Blo()
# TODO: implement main routine of Blo.
# blo [-c config_file] markdown_file.md
# -- if no -c option then load config file from default path (current directory).
# ---- if no configuration file on current directory blo said error.
# 1. init database (database name from environment variable or configuration file)
# 2. parse markdown file from command line argument.
# -- if command line argument path is directory then it will do recursive in directory.
# 3. generate html and commit to database
pass
| # -*- coding: utf-8 -*-
import optparse
from blo import Blo
if __name__ == '__main__':
parser = optparse.OptionParser("usage: %prog [options] markdown_file.md")
parser.add_option("-c", "--config", dest="config_file",
default="./blo.cfg", type="string", help="specify configuration file path to run on")
(options, args) = parser.parse_args()
if len(args) != 1:
parser.error("incorrect number of arguments")
cfg_file = options.config_file
blo_main = Blo(cfg_file)
blo_main.insert_article(args[0])
print('%s complete process.'%('blo',))
| Implement main section of blo package. | Implement main section of blo package.
| Python | mit | 10nin/blo,10nin/blo | ---
+++
@@ -1,10 +1,9 @@
# -*- coding: utf-8 -*-
-import configparser
import optparse
from blo import Blo
if __name__ == '__main__':
- parser = optparse.OptionParser("usage: %prog [option] markdown_file.md")
+ parser = optparse.OptionParser("usage: %prog [options] markdown_file.md")
parser.add_option("-c", "--config", dest="config_file",
default="./blo.cfg", type="string", help="specify configuration file path to run on")
(options, args) = parser.parse_args()
@@ -12,14 +11,7 @@
parser.error("incorrect number of arguments")
cfg_file = options.config_file
- B = Blo()
+ blo_main = Blo(cfg_file)
+ blo_main.insert_article(args[0])
- # TODO: implement main routine of Blo.
- # blo [-c config_file] markdown_file.md
- # -- if no -c option then load config file from default path (current directory).
- # ---- if no configuration file on current directory blo said error.
- # 1. init database (database name from environment variable or configuration file)
- # 2. parse markdown file from command line argument.
- # -- if command line argument path is directory then it will do recursive in directory.
- # 3. generate html and commit to database
- pass
+ print('%s complete process.'%('blo',)) |
cf61b61b6908940c465bec2bfc4575ec0f657c72 | apps/externalsites/urls.py | apps/externalsites/urls.py | # Amara, universalsubtitles.org
#
# Copyright (C) 2013 Participatory Culture Foundation
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see
# http://www.gnu.org/licenses/agpl-3.0.html.
from django.conf.urls import patterns, url
urlpatterns = patterns('externalsites.views',
url(r'^resync/(?P<video_url_id>\d+)/(?P<language_code>\w+)/$', 'resync', name='resync'),
)
| # Amara, universalsubtitles.org
#
# Copyright (C) 2013 Participatory Culture Foundation
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see
# http://www.gnu.org/licenses/agpl-3.0.html.
from django.conf.urls import patterns, url
urlpatterns = patterns('externalsites.views',
url(r'^resync/(?P<video_url_id>\d+)/(?P<language_code>[\w-]+)/$', 'resync', name='resync'),
)
| Allow "-" chars in the resync view | Allow "-" chars in the resync view
| Python | agpl-3.0 | wevoice/wesub,ReachingOut/unisubs,norayr/unisubs,ujdhesa/unisubs,pculture/unisubs,pculture/unisubs,pculture/unisubs,wevoice/wesub,ReachingOut/unisubs,ujdhesa/unisubs,ReachingOut/unisubs,norayr/unisubs,ReachingOut/unisubs,eloquence/unisubs,ofer43211/unisubs,ujdhesa/unisubs,pculture/unisubs,ofer43211/unisubs,ofer43211/unisubs,eloquence/unisubs,wevoice/wesub,eloquence/unisubs,norayr/unisubs,wevoice/wesub,eloquence/unisubs,ofer43211/unisubs,ujdhesa/unisubs,norayr/unisubs | ---
+++
@@ -19,5 +19,5 @@
from django.conf.urls import patterns, url
urlpatterns = patterns('externalsites.views',
- url(r'^resync/(?P<video_url_id>\d+)/(?P<language_code>\w+)/$', 'resync', name='resync'),
+ url(r'^resync/(?P<video_url_id>\d+)/(?P<language_code>[\w-]+)/$', 'resync', name='resync'),
) |
c3bb6f173478419662aa96b2378fa459c0a4ed6a | pystruct/datasets/dataset_loaders.py | pystruct/datasets/dataset_loaders.py | import cPickle
from os.path import dirname
from os.path import join
import numpy as np
def load_letters():
"""Load the OCR letters dataset.
This is a chain classification task.
Each example consists of a word, segmented into letters.
The first letter of each word is ommited from the data,
as it was a capital letter (in contrast to all other letters).
"""
module_path = dirname(__file__)
data_file = open(join(module_path, 'letters.pickle'))
data = cPickle.load(data_file)
# we add an easy to use image representation:
data['images'] = [np.hstack([l.reshape(16, 8) for l in word])
for word in data['data']]
return data
def load_scene():
module_path = dirname(__file__)
data_file = open(join(module_path, 'scene.pickle'))
return cPickle.load(data_file)
def load_snakes():
module_path = dirname(__file__)
data_file = open(join(module_path, 'snakes.pickle'))
return cPickle.load(data_file)
| import cPickle
from os.path import dirname
from os.path import join
import numpy as np
def load_letters():
"""Load the OCR letters dataset.
This is a chain classification task.
Each example consists of a word, segmented into letters.
The first letter of each word is ommited from the data,
as it was a capital letter (in contrast to all other letters).
"""
module_path = dirname(__file__)
data_file = open(join(module_path, 'letters.pickle'),'rb')
data = cPickle.load(data_file)
# we add an easy to use image representation:
data['images'] = [np.hstack([l.reshape(16, 8) for l in word])
for word in data['data']]
return data
def load_scene():
module_path = dirname(__file__)
data_file = open(join(module_path, 'scene.pickle'))
return cPickle.load(data_file)
def load_snakes():
module_path = dirname(__file__)
data_file = open(join(module_path, 'snakes.pickle'))
return cPickle.load(data_file)
| FIX sample data load for Windows | FIX sample data load for Windows
| Python | bsd-2-clause | massmutual/pystruct,massmutual/pystruct,amueller/pystruct,wattlebird/pystruct,pystruct/pystruct,d-mittal/pystruct,pystruct/pystruct,amueller/pystruct,wattlebird/pystruct,d-mittal/pystruct | ---
+++
@@ -14,7 +14,7 @@
as it was a capital letter (in contrast to all other letters).
"""
module_path = dirname(__file__)
- data_file = open(join(module_path, 'letters.pickle'))
+ data_file = open(join(module_path, 'letters.pickle'),'rb')
data = cPickle.load(data_file)
# we add an easy to use image representation:
data['images'] = [np.hstack([l.reshape(16, 8) for l in word]) |
357fc83908fe09da2c69be78afdae9f1cf5c4b0d | hjlog/forms/post.py | hjlog/forms/post.py | from flask_wtf import Form
from wtforms import TextAreaField, StringField, SelectField, BooleanField
from wtforms.validators import InputRequired, Optional, Length
class PostForm(Form):
title = StringField('제목', validators=[InputRequired(), Length(max=120)])
body = TextAreaField('내용', validators=[InputRequired()])
private = BooleanField('이 글을 비공개로 작성합니다', default=True)
tags = StringField('글갈피', validators=[Length(max=100)])
category = SelectField('분류', validators=[Optional()],
choices=[('everyday', 'EVERYDAY'),
('study', 'STUDY'),
('idea', 'IDEA'),
('world', 'WORLD')])
| from flask_wtf import Form
from wtforms import TextAreaField, StringField, SelectField, BooleanField
from wtforms.validators import InputRequired, Optional, Length
class PostForm(Form):
title = StringField('제목', validators=[InputRequired(), Length(max=120)])
body = TextAreaField('내용', validators=[InputRequired()])
private = BooleanField('비공개 설정', default=True)
tags = StringField('글갈피', validators=[Length(max=100)])
category = SelectField('분류', validators=[Optional()],
choices=[('everyday', 'EVERYDAY'),
('study', 'STUDY'),
('idea', 'IDEA'),
('world', 'WORLD')])
| Change label text of 'private' Field | Change label text of 'private' Field
| Python | mit | heejongahn/hjlog,heejongahn/hjlog,heejongahn/hjlog,heejongahn/hjlog | ---
+++
@@ -5,7 +5,7 @@
class PostForm(Form):
title = StringField('제목', validators=[InputRequired(), Length(max=120)])
body = TextAreaField('내용', validators=[InputRequired()])
- private = BooleanField('이 글을 비공개로 작성합니다', default=True)
+ private = BooleanField('비공개 설정', default=True)
tags = StringField('글갈피', validators=[Length(max=100)])
category = SelectField('분류', validators=[Optional()],
choices=[('everyday', 'EVERYDAY'), |
b3839c72a831589dd707b38ae2088fd4b304faa1 | django_filters/rest_framework/filterset.py | django_filters/rest_framework/filterset.py |
from __future__ import absolute_import
from copy import deepcopy
from django.db import models
from django import forms
from django.utils.translation import ugettext_lazy as _
from django_filters import filterset
from .filters import BooleanFilter, IsoDateTimeFilter
from .. import compat, utils
FILTER_FOR_DBFIELD_DEFAULTS = deepcopy(filterset.FILTER_FOR_DBFIELD_DEFAULTS)
FILTER_FOR_DBFIELD_DEFAULTS.update({
models.DateTimeField: {'filter_class': IsoDateTimeFilter},
models.BooleanField: {'filter_class': BooleanFilter},
})
class FilterSet(filterset.FilterSet):
FILTER_DEFAULTS = FILTER_FOR_DBFIELD_DEFAULTS
def __init__(self, *args, **kwargs):
super(FilterSet, self).__init__(*args, **kwargs)
if compat.is_crispy():
from crispy_forms.helper import FormHelper
from crispy_forms.layout import Layout, Submit
layout_components = list(self.form.fields.keys()) + [
Submit('', _('Submit'), css_class='btn-default'),
]
helper = FormHelper()
helper.form_method = 'GET'
helper.template_pack = 'bootstrap3'
helper.layout = Layout(*layout_components)
self.form.helper = helper
@property
def qs(self):
from rest_framework.exceptions import ValidationError
try:
return super(FilterSet, self).qs
except forms.ValidationError as e:
raise ValidationError(utils.raw_validation(e))
|
from __future__ import absolute_import
from copy import deepcopy
from django.db import models
from django import forms
from django.utils.translation import ugettext_lazy as _
from django_filters import filterset
from .filters import BooleanFilter, IsoDateTimeFilter
from .. import compat, utils
FILTER_FOR_DBFIELD_DEFAULTS = deepcopy(filterset.FILTER_FOR_DBFIELD_DEFAULTS)
FILTER_FOR_DBFIELD_DEFAULTS.update({
models.DateTimeField: {'filter_class': IsoDateTimeFilter},
models.BooleanField: {'filter_class': BooleanFilter},
})
class FilterSet(filterset.FilterSet):
FILTER_DEFAULTS = FILTER_FOR_DBFIELD_DEFAULTS
@property
def form(self):
form = super(FilterSet, self).form
if compat.is_crispy():
from crispy_forms.helper import FormHelper
from crispy_forms.layout import Layout, Submit
layout_components = list(form.fields.keys()) + [
Submit('', _('Submit'), css_class='btn-default'),
]
helper = FormHelper()
helper.form_method = 'GET'
helper.template_pack = 'bootstrap3'
helper.layout = Layout(*layout_components)
form.helper = helper
return form
@property
def qs(self):
from rest_framework.exceptions import ValidationError
try:
return super(FilterSet, self).qs
except forms.ValidationError as e:
raise ValidationError(utils.raw_validation(e))
| Move crispy helper to '.form' property | Move crispy helper to '.form' property
| Python | bsd-3-clause | alex/django-filter,alex/django-filter | ---
+++
@@ -21,14 +21,15 @@
class FilterSet(filterset.FilterSet):
FILTER_DEFAULTS = FILTER_FOR_DBFIELD_DEFAULTS
- def __init__(self, *args, **kwargs):
- super(FilterSet, self).__init__(*args, **kwargs)
+ @property
+ def form(self):
+ form = super(FilterSet, self).form
if compat.is_crispy():
from crispy_forms.helper import FormHelper
from crispy_forms.layout import Layout, Submit
- layout_components = list(self.form.fields.keys()) + [
+ layout_components = list(form.fields.keys()) + [
Submit('', _('Submit'), css_class='btn-default'),
]
helper = FormHelper()
@@ -36,7 +37,9 @@
helper.template_pack = 'bootstrap3'
helper.layout = Layout(*layout_components)
- self.form.helper = helper
+ form.helper = helper
+
+ return form
@property
def qs(self): |
550106fbff26c16cdf2269dc0778814c05ed1e3b | nap/apps.py | nap/apps.py |
from django.apps import AppConfig
from django.utils.module_loading import autodiscover_modules
class NapConfig(AppConfig):
'''App Config that performs auto-discover on ready.'''
def ready(self):
super(NapConfig, self).ready()
autodiscover_modules('publishers')
|
from django.apps import AppConfig
from django.utils.module_loading import autodiscover_modules
class NapConfig(AppConfig):
'''App Config that performs auto-discover on ready.'''
name = 'nap'
def ready(self):
super(NapConfig, self).ready()
autodiscover_modules('publishers')
| Fix to include mandatory name attribute | Fix to include mandatory name attribute | Python | bsd-3-clause | MarkusH/django-nap,limbera/django-nap | ---
+++
@@ -6,6 +6,8 @@
class NapConfig(AppConfig):
'''App Config that performs auto-discover on ready.'''
+
+ name = 'nap'
def ready(self):
super(NapConfig, self).ready() |
14398ec42c0d31d577278d8748b0617650f91775 | porick/controllers/create.py | porick/controllers/create.py | import logging
from pylons import request, response, session, tmpl_context as c, url
from pylons.controllers.util import abort, redirect
import porick.lib.helpers as h
from porick.lib.auth import authorize
from porick.lib.base import BaseController, render
from porick.lib.create import create_quote, create_user
log = logging.getLogger(__name__)
class CreateController(BaseController):
def quote(self):
authorize()
c.page = 'new quote'
if request.environ['REQUEST_METHOD'] == 'GET':
return render('/create/form.mako')
elif request.environ['REQUEST_METHOD'] == 'POST':
quote_body = request.params.get('quote_body', '')
if not quote_body:
abort(400)
notes = request.params.get('notes', '')
tags = request.params.get('tags', '').split(' ')
result = create_quote(quote_body, notes, tags)
if result:
return render('/create/success.mako')
else:
abort(500)
else:
abort(400)
| import logging
from pylons import request, response, session, tmpl_context as c, url
from pylons.controllers.util import abort, redirect
import porick.lib.helpers as h
from porick.lib.auth import authorize
from porick.lib.base import BaseController, render
from porick.lib.create import create_quote, create_user
log = logging.getLogger(__name__)
class CreateController(BaseController):
def quote(self):
authorize()
c.page = 'new quote'
if request.environ['REQUEST_METHOD'] == 'GET':
return render('/create/form.mako')
elif request.environ['REQUEST_METHOD'] == 'POST':
quote_body = request.params.get('quote_body', '')
if not quote_body:
abort(400)
notes = request.params.get('notes', '')
tags = filter(None, request.params.get('tags', '').replace(',', ' ').split(' '))
result = create_quote(quote_body, notes, tags)
if result:
return render('/create/success.mako')
else:
abort(500)
else:
abort(400)
| Deal with comma-separated tags lists HNGH | Deal with comma-separated tags lists HNGH
| Python | apache-2.0 | kopf/porick,kopf/porick,kopf/porick | ---
+++
@@ -23,7 +23,8 @@
if not quote_body:
abort(400)
notes = request.params.get('notes', '')
- tags = request.params.get('tags', '').split(' ')
+ tags = filter(None, request.params.get('tags', '').replace(',', ' ').split(' '))
+
result = create_quote(quote_body, notes, tags)
if result: |
99bd91cac200f9e83ee710ac8758fd20ac1febfa | examples/find_facial_features_in_picture.py | examples/find_facial_features_in_picture.py | from PIL import Image, ImageDraw
import face_recognition
# Load the jpg file into a numpy array
image = face_recognition.load_image_file("biden.jpg")
# Find all facial features in all the faces in the image
face_landmarks_list = face_recognition.face_landmarks(image)
print("I found {} face(s) in this photograph.".format(len(face_landmarks_list)))
for face_landmarks in face_landmarks_list:
# Print the location of each facial feature in this image
for facial_feature in face_landmarks.keys():
print("The {} in this face has the following points: {}".format(facial_feature, face_landmarks[facial_feature]))
# Let's trace out each facial feature in the image with a line!
pil_image = Image.fromarray(image)
d = ImageDraw.Draw(pil_image)
for facial_feature in face_landmarks.keys():
d.line(face_landmarks[facial_feature], width=5)
pil_image.show()
| from PIL import Image, ImageDraw
import face_recognition
# Load the jpg file into a numpy array
image = face_recognition.load_image_file("two_people.jpg")
# Find all facial features in all the faces in the image
face_landmarks_list = face_recognition.face_landmarks(image)
print("I found {} face(s) in this photograph.".format(len(face_landmarks_list)))
# Create a PIL imagedraw object so we can draw on the picture
pil_image = Image.fromarray(image)
d = ImageDraw.Draw(pil_image)
for face_landmarks in face_landmarks_list:
# Print the location of each facial feature in this image
for facial_feature in face_landmarks.keys():
print("The {} in this face has the following points: {}".format(facial_feature, face_landmarks[facial_feature]))
# Let's trace out each facial feature in the image with a line!
for facial_feature in face_landmarks.keys():
d.line(face_landmarks[facial_feature], width=5)
# Show the picture
pil_image.show()
| Tweak demo to show multiple faces in one window instead of separate windows | Tweak demo to show multiple faces in one window instead of separate windows
| Python | mit | ageitgey/face_recognition | ---
+++
@@ -2,12 +2,16 @@
import face_recognition
# Load the jpg file into a numpy array
-image = face_recognition.load_image_file("biden.jpg")
+image = face_recognition.load_image_file("two_people.jpg")
# Find all facial features in all the faces in the image
face_landmarks_list = face_recognition.face_landmarks(image)
print("I found {} face(s) in this photograph.".format(len(face_landmarks_list)))
+
+# Create a PIL imagedraw object so we can draw on the picture
+pil_image = Image.fromarray(image)
+d = ImageDraw.Draw(pil_image)
for face_landmarks in face_landmarks_list:
@@ -16,10 +20,8 @@
print("The {} in this face has the following points: {}".format(facial_feature, face_landmarks[facial_feature]))
# Let's trace out each facial feature in the image with a line!
- pil_image = Image.fromarray(image)
- d = ImageDraw.Draw(pil_image)
-
for facial_feature in face_landmarks.keys():
d.line(face_landmarks[facial_feature], width=5)
- pil_image.show()
+# Show the picture
+pil_image.show() |
5b2e154fe28a32eb128c9c1060c1954eb1664c3f | child_sync_typo3/wizard/child_depart_wizard.py | child_sync_typo3/wizard/child_depart_wizard.py | # -*- encoding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2014 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Emanuel Cino <ecino@compassion.ch>
#
# The licence is in the file __openerp__.py
#
##############################################################################
from openerp.osv import orm
from ..model.sync_typo3 import Sync_typo3
class end_sponsorship_wizard(orm.TransientModel):
_inherit = 'end.sponsorship.wizard'
def child_depart(self, cr, uid, ids, context=None):
wizard = self.browse(cr, uid, ids[0], context)
child = wizard.child_id
res = True
if child.state == 'I':
res = child.child_remove_from_typo3()
res = super(end_sponsorship_wizard, self).child_depart(
cr, uid, ids, context) and res
return res or Sync_typo3.typo3_index_error(cr, uid, self, context)
| # -*- encoding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2014 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Emanuel Cino <ecino@compassion.ch>
#
# The licence is in the file __openerp__.py
#
##############################################################################
from openerp.osv import orm
from ..model.sync_typo3 import Sync_typo3
class child_depart_wizard(orm.TransientModel):
_inherit = 'child.depart.wizard'
def child_depart(self, cr, uid, ids, context=None):
wizard = self.browse(cr, uid, ids[0], context)
child = wizard.child_id
res = True
if child.state == 'I':
res = child.child_remove_from_typo3()
res = super(child_depart_wizard, self).child_depart(
cr, uid, ids, context) and res
return res or Sync_typo3.typo3_index_error(cr, uid, self, context)
| Correct wrong inheritance on sponsorship_typo3 child_depart wizard. | Correct wrong inheritance on sponsorship_typo3 child_depart wizard.
| Python | agpl-3.0 | eicher31/compassion-switzerland,ecino/compassion-switzerland,Secheron/compassion-switzerland,ecino/compassion-switzerland,eicher31/compassion-switzerland,ndtran/compassion-switzerland,CompassionCH/compassion-switzerland,CompassionCH/compassion-switzerland,Secheron/compassion-switzerland,eicher31/compassion-switzerland,CompassionCH/compassion-switzerland,MickSandoz/compassion-switzerland,ndtran/compassion-switzerland,MickSandoz/compassion-switzerland,ecino/compassion-switzerland | ---
+++
@@ -12,8 +12,8 @@
from ..model.sync_typo3 import Sync_typo3
-class end_sponsorship_wizard(orm.TransientModel):
- _inherit = 'end.sponsorship.wizard'
+class child_depart_wizard(orm.TransientModel):
+ _inherit = 'child.depart.wizard'
def child_depart(self, cr, uid, ids, context=None):
wizard = self.browse(cr, uid, ids[0], context)
@@ -23,7 +23,7 @@
if child.state == 'I':
res = child.child_remove_from_typo3()
- res = super(end_sponsorship_wizard, self).child_depart(
+ res = super(child_depart_wizard, self).child_depart(
cr, uid, ids, context) and res
return res or Sync_typo3.typo3_index_error(cr, uid, self, context) |
5293a24bc2ab6a3aa1c9fc98d857c79548509356 | explanatory_style.py | explanatory_style.py | import gate
class EventAttributionUnit:
"""event, attribution must be gate.Annotation objects
"""
def __init__(self, event, attribution):
self._event = event
self._attribution = attribution
for annotation in [self._event, self._attribution]:
# if type(anntotation) != "Annotation":
if not isinstance(annotation, gate.Annotation):
raise TypeError("Not a gate.Annotation object!")
def get_event(self):
return self._event
def get_attribution(self):
return self._attribution
def get_event_attribution_units(events, attributions):
return [
EventAttributionUnit( event, attribution )
for attribution in attributions
for event in events
if event._id == attribution._caused_event_id
]
# def CoPos():
# def CoNeg():
| import gate
class EventAttributionUnit:
def __init__(self, event, attribution):
"""event, attribution must be gate.Annotation objects
"""
self._event = event
self._attribution = attribution
for annotation in [self._event, self._attribution]:
if not isinstance(annotation, gate.Annotation):
raise TypeError("Not a gate.Annotation object!")
def get_event(self):
return self._event
def get_attribution(self):
return self._attribution
def get_event_attribution_units(events,
attributions):
"""Given an iterable of events and one of attributions, return a list of
EventAttributionUnit objects
"""
return [
EventAttributionUnit(
attribution.get_caused_event(events),
attribution
)
for attribution in attributions
]
# def CoPos():
# def CoNeg():
if __name__ == "__main__":
test_file = "/home/nick/hilt/pes/conversations/16/4-MG-2014-06-02_PES_3_consensus.xml"
annotation_file = gate.AnnotationFile(test_file)
text_with_nodes = annotation_file._text_with_nodes
raw_events = []
raw_attributions = []
annotations = annotation_file.iter_annotations()
for annotation in annotations:
if "event" in annotation._type.lower():
raw_events.append(annotation)
elif "attribution" in annotation._type.lower():
raw_attributions.append(annotation)
events = gate.concatenate_annotations(raw_events)
attributions = gate.concatenate_annotations(raw_attributions)
event_attribution_units = get_event_attribution_units(
events,
attributions
)
for x in event_attribution_units:
print(
x.get_event().get_concatenated_text(text_with_nodes, " "),
x.get_attribution().get_concatenated_text(text_with_nodes, " ")
)
| Add __main__ program for running on files | Add __main__ program for running on files
| Python | mit | nickwbarber/HILT-annotations | ---
+++
@@ -2,13 +2,12 @@
class EventAttributionUnit:
- """event, attribution must be gate.Annotation objects
- """
def __init__(self, event, attribution):
+ """event, attribution must be gate.Annotation objects
+ """
self._event = event
self._attribution = attribution
for annotation in [self._event, self._attribution]:
- # if type(anntotation) != "Annotation":
if not isinstance(annotation, gate.Annotation):
raise TypeError("Not a gate.Annotation object!")
@@ -18,13 +17,49 @@
def get_attribution(self):
return self._attribution
-def get_event_attribution_units(events, attributions):
+def get_event_attribution_units(events,
+ attributions):
+ """Given an iterable of events and one of attributions, return a list of
+ EventAttributionUnit objects
+ """
return [
- EventAttributionUnit( event, attribution )
+ EventAttributionUnit(
+ attribution.get_caused_event(events),
+ attribution
+ )
for attribution in attributions
- for event in events
- if event._id == attribution._caused_event_id
]
# def CoPos():
# def CoNeg():
+
+if __name__ == "__main__":
+
+ test_file = "/home/nick/hilt/pes/conversations/16/4-MG-2014-06-02_PES_3_consensus.xml"
+
+
+ annotation_file = gate.AnnotationFile(test_file)
+ text_with_nodes = annotation_file._text_with_nodes
+
+ raw_events = []
+ raw_attributions = []
+ annotations = annotation_file.iter_annotations()
+ for annotation in annotations:
+ if "event" in annotation._type.lower():
+ raw_events.append(annotation)
+ elif "attribution" in annotation._type.lower():
+ raw_attributions.append(annotation)
+
+ events = gate.concatenate_annotations(raw_events)
+ attributions = gate.concatenate_annotations(raw_attributions)
+
+ event_attribution_units = get_event_attribution_units(
+ events,
+ attributions
+ )
+
+ for x in event_attribution_units:
+ print(
+ x.get_event().get_concatenated_text(text_with_nodes, " "),
+ x.get_attribution().get_concatenated_text(text_with_nodes, " ")
+ ) |
b1196e347129e79bd616cc572714982be6739d3c | indra/pipeline/decorators.py | indra/pipeline/decorators.py | pipeline_functions = {}
def register_pipeline(function):
if function.__name__ in pipeline_functions:
raise ExistingFunctionError(
'%s is already registered with %s.%s' % (
function.__name__, function.__module__, function.__name__))
pipeline_functions[function.__name__] = function
return function
class ExistingFunctionError(Exception):
pass
| pipeline_functions = {}
def register_pipeline(function):
"""Decorator to register a function for the assembly pipeline."""
if function.__name__ in pipeline_functions:
raise ExistingFunctionError(
'%s is already registered with %s.%s' % (
function.__name__, function.__module__, function.__name__))
pipeline_functions[function.__name__] = function
return function
class ExistingFunctionError(Exception):
pass
| Add minimal docstring for decorator | Add minimal docstring for decorator
| Python | bsd-2-clause | johnbachman/belpy,johnbachman/belpy,johnbachman/indra,sorgerlab/indra,johnbachman/belpy,sorgerlab/belpy,sorgerlab/indra,johnbachman/indra,sorgerlab/belpy,bgyori/indra,sorgerlab/indra,bgyori/indra,sorgerlab/belpy,bgyori/indra,johnbachman/indra | ---
+++
@@ -2,6 +2,7 @@
def register_pipeline(function):
+ """Decorator to register a function for the assembly pipeline."""
if function.__name__ in pipeline_functions:
raise ExistingFunctionError(
'%s is already registered with %s.%s' % ( |
b69bf4dd6e9c1d8b9133c2a8f2b18ac8d41f3145 | src/streaming-programs/car-average-speeds.py | src/streaming-programs/car-average-speeds.py | #!/usr/bin/python
import sys
import json
# Count average speeds for links
def main(locationdata_dictionary_file):
locationdata = {}
with open(locationdata_dictionary_file, "r") as dictionary_file:
locationdata = json.load(dictionary_file)
for input_line in sys.stdin:
data = json.loads(input_line)
for recognition in data['recognitions']:
try:
link_data = (item for item in locationdata['links'] if item['id'] == recognition['id']).next()
average_speed = (link_data['dist'] / recognition['tt']) * 3.6
print "LongValueSum:" + str(recognition['id']) + "_speedsum\t" + str(int(average_speed))
print "LongValueSum:" + str(recognition['id']) + "_speedcount\t1"
except:
pass
if __name__ == "__main__":
main(sys.argv[1])
| #!/usr/bin/python
import sys
import json
# Count average speeds for links
def main(locationdata_dictionary_file):
locationdata = {}
with open(locationdata_dictionary_file, "r") as dictionary_file:
locationdata = json.load(dictionary_file)
for input_line in sys.stdin:
data = json.loads(input_line)
for recognition in data['recognitions']:
try:
link_data = (item for item in locationdata['links'] if item['id'] == recognition['id']).next()
average_speed = (link_data['dist'] / recognition['tt']) * 3.6
print "LongValueSum:" + str(recognition['id']) + "_speedsum\t" + str(int(average_speed))
print "LongValueSum:" + str(recognition['id']) + "_speedcount\t1"
except:
pass
if __name__ == "__main__":
if len(sys.argv) > 1:
main(sys.argv[1])
else:
main("locationdata.json")
| Use default dictionary when no arg passed | Use default dictionary when no arg passed
| Python | mit | gofore/aws-emr,gofore/aws-emr,gofore/aws-emr,gofore/aws-emr | ---
+++
@@ -23,4 +23,8 @@
pass
if __name__ == "__main__":
- main(sys.argv[1])
+
+ if len(sys.argv) > 1:
+ main(sys.argv[1])
+ else:
+ main("locationdata.json") |
6ee4cd2ace969365a4898e3f89944e8ddbdca1c8 | wolme/wallet/models.py | wolme/wallet/models.py | from __future__ import unicode_literals
from django.conf import settings
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
from django.utils.translation import ugettext as _
@python_2_unicode_compatible
class Tag(models.Model):
slug = models.SlugField(unique=True)
description = models.TextField(null=True, blank=True)
def __str__(self):
return self.slug
@python_2_unicode_compatible
class Wallet(models.Model):
CURRENCIES = (
("EUR", "EUR"),
)
owner = models.ForeignKey(settings.AUTH_USER_MODEL, related_name='wallets')
label = models.CharField(max_length=100)
description = models.TextField(null=True, blank=True)
currency = models.CharField(max_length=3, null=False, blank=False, choices=CURRENCIES)
def __str__(self):
return "{} ({})".format(self.label, self.currency)
@python_2_unicode_compatible
class Movement(models.Model):
wallet = models.ForeignKey(Wallet, related_name="movements")
date = models.DateTimeField()
amount = models.DecimalField(max_digits=11, decimal_places=2)
tags = models.ManyToManyField(Tag, related_name="movements")
def __str__(self):
return "{} - {:.2f} for {} on {}".format(
self.type, self.amount, self.wallet, self.date)
| from __future__ import unicode_literals
from django.conf import settings
from django.db import models
from django.utils import timezone
from django.utils.encoding import python_2_unicode_compatible
from django.utils.translation import ugettext as _
@python_2_unicode_compatible
class Tag(models.Model):
slug = models.SlugField(unique=True)
description = models.TextField(null=True, blank=True)
def __str__(self):
return self.slug
@python_2_unicode_compatible
class Wallet(models.Model):
CURRENCIES = (
("EUR", "EUR"),
)
owner = models.ForeignKey(settings.AUTH_USER_MODEL, related_name='wallets')
label = models.CharField(max_length=100)
description = models.TextField(null=True, blank=True)
currency = models.CharField(max_length=3, null=False, blank=False, choices=CURRENCIES)
def __str__(self):
return "{} ({})".format(self.label, self.currency)
@python_2_unicode_compatible
class Movement(models.Model):
wallet = models.ForeignKey(Wallet, related_name="movements")
date = models.DateTimeField(default=timezone.now())
amount = models.DecimalField(max_digits=11, decimal_places=2)
tags = models.ManyToManyField(Tag, related_name="movements")
def __str__(self):
return "{} - {:.2f} for {} on {}".format(
self.type, self.amount, self.wallet, self.date)
| Add default to movement date | Add default to movement date
| Python | bsd-2-clause | synasius/wolme | ---
+++
@@ -2,6 +2,7 @@
from django.conf import settings
from django.db import models
+from django.utils import timezone
from django.utils.encoding import python_2_unicode_compatible
from django.utils.translation import ugettext as _
@@ -32,7 +33,7 @@
@python_2_unicode_compatible
class Movement(models.Model):
wallet = models.ForeignKey(Wallet, related_name="movements")
- date = models.DateTimeField()
+ date = models.DateTimeField(default=timezone.now())
amount = models.DecimalField(max_digits=11, decimal_places=2)
tags = models.ManyToManyField(Tag, related_name="movements") |
cbc60512f0f29ba3444573b6fd835e1505e5e35c | radar/radar/validation/fetal_anomaly_scans.py | radar/radar/validation/fetal_anomaly_scans.py | from radar.validation.data_sources import DataSourceValidationMixin
from radar.validation.core import Field, Validation
from radar.validation.meta import MetaValidationMixin
from radar.validation.patients import PatientValidationMixin
from radar.validation.validators import required, optional, min_, max_, none_if_blank, max_length
class FetalAnomalyScanValidation(PatientValidationMixin, DataSourceValidationMixin, MetaValidationMixin, Validation):
date_of_scan = Field([required()])
gestational_age = Field([required(), min_(8 * 7, 'days'), max_(45 * 7, 'days')])
oligohydramnios = Field([optional()])
right_anomaly_details = Field([none_if_blank(), optional(), max_length(1000)])
right_ultrasound_details = Field([none_if_blank(), optional(), max_length(1000)])
left_anomaly_details = Field([none_if_blank(), optional(), max_length(1000)])
left_ultrasound_details = Field([none_if_blank(), optional(), max_length(1000)])
| from radar.validation.data_sources import DataSourceValidationMixin
from radar.validation.core import Field, Validation
from radar.validation.meta import MetaValidationMixin
from radar.validation.patients import PatientValidationMixin
from radar.validation.validators import required, optional, min_, max_, none_if_blank, max_length, not_in_future
class FetalAnomalyScanValidation(PatientValidationMixin, DataSourceValidationMixin, MetaValidationMixin, Validation):
date_of_scan = Field([required(), not_in_future()])
gestational_age = Field([required(), min_(8 * 7, 'days'), max_(45 * 7, 'days')])
oligohydramnios = Field([optional()])
right_anomaly_details = Field([none_if_blank(), optional(), max_length(1000)])
right_ultrasound_details = Field([none_if_blank(), optional(), max_length(1000)])
left_anomaly_details = Field([none_if_blank(), optional(), max_length(1000)])
left_ultrasound_details = Field([none_if_blank(), optional(), max_length(1000)])
| Check date of scan is not in future | Check date of scan is not in future
| Python | agpl-3.0 | renalreg/radar,renalreg/radar,renalreg/radar,renalreg/radar | ---
+++
@@ -2,11 +2,11 @@
from radar.validation.core import Field, Validation
from radar.validation.meta import MetaValidationMixin
from radar.validation.patients import PatientValidationMixin
-from radar.validation.validators import required, optional, min_, max_, none_if_blank, max_length
+from radar.validation.validators import required, optional, min_, max_, none_if_blank, max_length, not_in_future
class FetalAnomalyScanValidation(PatientValidationMixin, DataSourceValidationMixin, MetaValidationMixin, Validation):
- date_of_scan = Field([required()])
+ date_of_scan = Field([required(), not_in_future()])
gestational_age = Field([required(), min_(8 * 7, 'days'), max_(45 * 7, 'days')])
oligohydramnios = Field([optional()])
right_anomaly_details = Field([none_if_blank(), optional(), max_length(1000)]) |
6ef190887b38df4f5212a8a7017e002051734c9f | lokar/bib.py | lokar/bib.py | # coding=utf-8
from __future__ import unicode_literals
from .marc import Record
from .util import etree, parse_xml, show_diff
class Bib(object):
""" An Alma Bib record """
def __init__(self, alma, xml):
self.alma = alma
self.orig_xml = xml.encode('utf-8')
self.init(xml)
def init(self, xml):
self.doc = parse_xml(xml)
self.mms_id = self.doc.findtext('mms_id')
self.marc_record = Record(self.doc.find('record'))
self.linked_to_cz = self.doc.findtext('linked_record_id[@type="CZ"]') or None
def save(self, diff=False):
# Save record back to Alma
post_data = etree.tostring(self.doc, encoding='UTF-8')
if diff:
show_diff(self.orig_xml, post_data)
response = self.alma.put('/bibs/{}'.format(self.mms_id),
data=post_data,
headers={'Content-Type': 'application/xml'})
self.init(response)
def dump(self, filename):
# Dump record to file
with open(filename, 'wb') as f:
f.write(etree.tostring(self.doc, pretty_print=True))
| # coding=utf-8
from __future__ import unicode_literals
from io import BytesIO
from .marc import Record
from .util import etree, parse_xml, show_diff
class Bib(object):
""" An Alma Bib record """
def __init__(self, alma, xml):
self.alma = alma
self.orig_xml = xml.encode('utf-8')
self.init(xml)
def init(self, xml):
self.doc = parse_xml(xml)
self.mms_id = self.doc.findtext('mms_id')
self.marc_record = Record(self.doc.find('record'))
self.linked_to_cz = self.doc.findtext('linked_record_id[@type="CZ"]') or None
def save(self, diff=False):
# Save record back to Alma
post_data = ('<?xml version="1.0" encoding="UTF-8" standalone="yes"?>'.encode('utf-8') +
etree.tostring(self.doc, encoding='UTF-8'))
if diff:
show_diff(self.orig_xml, post_data)
response = self.alma.put('/bibs/{}'.format(self.mms_id),
data=BytesIO(post_data),
headers={'Content-Type': 'application/xml'})
self.init(response)
def dump(self, filename):
# Dump record to file
with open(filename, 'wb') as f:
f.write(etree.tostring(self.doc, pretty_print=True))
| Add xml header and post data as stream-like object just to be sure | Add xml header and post data as stream-like object just to be sure
| Python | agpl-3.0 | scriptotek/almar,scriptotek/lokar | ---
+++
@@ -2,6 +2,7 @@
from __future__ import unicode_literals
+from io import BytesIO
from .marc import Record
from .util import etree, parse_xml, show_diff
@@ -24,13 +25,14 @@
def save(self, diff=False):
# Save record back to Alma
- post_data = etree.tostring(self.doc, encoding='UTF-8')
+ post_data = ('<?xml version="1.0" encoding="UTF-8" standalone="yes"?>'.encode('utf-8') +
+ etree.tostring(self.doc, encoding='UTF-8'))
if diff:
show_diff(self.orig_xml, post_data)
response = self.alma.put('/bibs/{}'.format(self.mms_id),
- data=post_data,
+ data=BytesIO(post_data),
headers={'Content-Type': 'application/xml'})
self.init(response) |
e9f2a3c29185466f1c92121e9f4e4b727fb20fd0 | scripts/rename_tutorial_src_files.py | scripts/rename_tutorial_src_files.py | #%%
from pathlib import Path, PurePath
from string import digits
directory = Path("./docs/tutorial/src")
dirs = sorted([Path(f) for f in directory.iterdir()])
d: PurePath
sufix = "__out__"
for d in dirs:
if d.name.endswith(sufix):
continue
output_dir_name = d.name + "__out__"
output_directory = directory / output_dir_name
output_directory.mkdir(exist_ok=True)
files = sorted([Path(f) for f in d.iterdir()])
for i, f in enumerate(files):
index = str(i + 1).zfill(3)
new_name = output_directory / f"tutorial{index}.py"
print(new_name)
f.rename(new_name)
for d in dirs:
current_dir = Path(str(d) + sufix)
print(current_dir)
current_dir.rename(d)
#%%
| #%%
from pathlib import Path, PurePath
from string import digits
directory = Path("./docs/tutorial/src")
skip_names = {"bigger_applications"}
skip_dirs = {directory / name for name in skip_names}
dirs = sorted([Path(f) for f in directory.iterdir() if f not in skip_dirs])
d: PurePath
sufix = "__out__"
for d in dirs:
if d.name.endswith(sufix):
continue
output_dir_name = d.name + "__out__"
output_directory = directory / output_dir_name
output_directory.mkdir(exist_ok=True)
files = sorted([Path(f) for f in d.iterdir()])
f: PurePath
for i, f in enumerate(files):
index = str(i + 1).zfill(3)
if f.name != "__init__.py" and f.name.endswith(".py"):
new_name = output_directory / f"tutorial{index}.py"
else:
new_name = output_directory / f.name
print(new_name)
f.rename(new_name)
for d in dirs:
current_dir = Path(str(d) + sufix)
print(current_dir)
current_dir.rename(d)
#%%
| Update tutorial renamer to exclude files | :sparkles: Update tutorial renamer to exclude files
| Python | mit | tiangolo/fastapi,tiangolo/fastapi,tiangolo/fastapi | ---
+++
@@ -3,7 +3,9 @@
from string import digits
directory = Path("./docs/tutorial/src")
-dirs = sorted([Path(f) for f in directory.iterdir()])
+skip_names = {"bigger_applications"}
+skip_dirs = {directory / name for name in skip_names}
+dirs = sorted([Path(f) for f in directory.iterdir() if f not in skip_dirs])
d: PurePath
sufix = "__out__"
for d in dirs:
@@ -13,9 +15,13 @@
output_directory = directory / output_dir_name
output_directory.mkdir(exist_ok=True)
files = sorted([Path(f) for f in d.iterdir()])
+ f: PurePath
for i, f in enumerate(files):
index = str(i + 1).zfill(3)
- new_name = output_directory / f"tutorial{index}.py"
+ if f.name != "__init__.py" and f.name.endswith(".py"):
+ new_name = output_directory / f"tutorial{index}.py"
+ else:
+ new_name = output_directory / f.name
print(new_name)
f.rename(new_name)
|
1f745b3dbfbeb5c328d8ef69de0da0ff3618eb35 | purchase_order_line_invoicing/tests/__init__.py | purchase_order_line_invoicing/tests/__init__.py | # -*- coding: utf-8 -*-
from . import test_purchase_order_line_invoice_wizard
| # -*- coding: utf-8 -*-
from . import test_purchase_order_line_invoicing_wizard
| Fix wrong import after module renaming | Fix wrong import after module renaming
| Python | agpl-3.0 | OCA/purchase-workflow,OCA/purchase-workflow | ---
+++
@@ -1,2 +1,2 @@
# -*- coding: utf-8 -*-
-from . import test_purchase_order_line_invoice_wizard
+from . import test_purchase_order_line_invoicing_wizard |
0998953838a36cec14ab356d13e84732fb02167a | examples/tf/demo.py | examples/tf/demo.py | # python3
# Copyright 2020 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Demo for TF AI Pipeline."""
from ai_pipeline.models import TFModel
def main():
config = "examples/tf/config.yaml"
model = TFModel(config)
job_id = model.train()
model.serve(job_id=job_id)
if __name__ == "__main__":
main()
| # python3
# Copyright 2020 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Demo for TF AI Pipeline."""
from ai_pipeline.models import TFModel
def main():
config = "examples/tf/config.yaml"
pred_input = [{"age": 25,
"workclass": " Private",
"education": " 11th",
"education_num": 7,
"marital_status":" Never-married",
"occupation": " Machine-op-inspct",
"relationship": " Own-child",
"race": " Black",
"gender": " Male",
"capital_gain": 0,
"capital_loss": 0,
"hours_per_week": 40,
"native_country": " United-States"}]
model = TFModel(config)
job_id = model.train()
version = model.serve(job_id=job_id)
preds = model.online_predict(pred_input, version=version)
print("Features: {}".format(pred_input))
print("Predictions: {}".format(preds))
if __name__ == "__main__":
main()
| Add online prediction example for TF | Add online prediction example for TF
Change-Id: I508aaca04576b3bda500fae3350351ef7b251747
| Python | apache-2.0 | GoogleCloudPlatform/ml-pipeline-generator-python,GoogleCloudPlatform/ml-pipeline-generator-python | ---
+++
@@ -18,10 +18,27 @@
def main():
config = "examples/tf/config.yaml"
+ pred_input = [{"age": 25,
+ "workclass": " Private",
+ "education": " 11th",
+ "education_num": 7,
+ "marital_status":" Never-married",
+ "occupation": " Machine-op-inspct",
+ "relationship": " Own-child",
+ "race": " Black",
+ "gender": " Male",
+ "capital_gain": 0,
+ "capital_loss": 0,
+ "hours_per_week": 40,
+ "native_country": " United-States"}]
+
model = TFModel(config)
+ job_id = model.train()
+ version = model.serve(job_id=job_id)
+ preds = model.online_predict(pred_input, version=version)
- job_id = model.train()
- model.serve(job_id=job_id)
+ print("Features: {}".format(pred_input))
+ print("Predictions: {}".format(preds))
if __name__ == "__main__": |
4563ad431102bd578582dfd6af41fe68ac7c6c26 | examples/basic.py | examples/basic.py | import time
from simpleflow import (
activity,
Workflow,
futures,
)
@activity.with_attributes(task_list='quickstart', version='example')
def increment(x):
return x + 1
@activity.with_attributes(task_list='quickstart', version='example')
def double(x):
return x * 2
# A simpleflow activity can be any callable, so a function works, but a class
# will also work given the processing happens in __init__()
@activity.with_attributes(task_list='quickstart', version='example')
class Delay(object):
def __init__(self, t, x):
time.sleep(t)
return x
class BasicWorkflow(Workflow):
name = 'basic'
version = 'example'
task_list = 'example'
def run(self, x, t=30):
y = self.submit(increment, x)
yy = self.submit(Delay, t, y)
z = self.submit(double, y)
print '({x} + 1) * 2 = {result}'.format(
x=x,
result=z.result)
futures.wait(yy, z)
return z.result
| import time
from simpleflow import (
activity,
Workflow,
futures,
)
@activity.with_attributes(task_list='quickstart', version='example')
def increment(x):
return x + 1
@activity.with_attributes(task_list='quickstart', version='example')
def double(x):
return x * 2
@activity.with_attributes(task_list='quickstart', version='example')
def delay(t, x):
time.sleep(t)
return x
class BasicWorkflow(Workflow):
name = 'basic'
version = 'example'
task_list = 'example'
def run(self, x, t=30):
y = self.submit(increment, x)
yy = self.submit(delay, t, y)
z = self.submit(double, y)
print '({x} + 1) * 2 = {result}'.format(
x=x,
result=z.result)
futures.wait(yy, z)
return z.result
| Revert "Update example workflow to show you can use classes" | Revert "Update example workflow to show you can use classes"
This reverts commit dbce79102efa8fee233af95939f1ff0b9d060b00.
| Python | mit | botify-labs/simpleflow,botify-labs/simpleflow | ---
+++
@@ -16,13 +16,11 @@
def double(x):
return x * 2
-# A simpleflow activity can be any callable, so a function works, but a class
-# will also work given the processing happens in __init__()
+
@activity.with_attributes(task_list='quickstart', version='example')
-class Delay(object):
- def __init__(self, t, x):
- time.sleep(t)
- return x
+def delay(t, x):
+ time.sleep(t)
+ return x
class BasicWorkflow(Workflow):
@@ -32,7 +30,7 @@
def run(self, x, t=30):
y = self.submit(increment, x)
- yy = self.submit(Delay, t, y)
+ yy = self.submit(delay, t, y)
z = self.submit(double, y)
print '({x} + 1) * 2 = {result}'.format( |
bd0a572faf851ee01177c44fc2fe64770ab4f38a | app/main/views/index.py | app/main/views/index.py | import markdown
import os
from flask import render_template, url_for, redirect, Markup
from app.main import main
from flask_login import login_required
from flask.ext.login import current_user
from mdx_gfm import GithubFlavoredMarkdownExtension
@main.route('/')
def index():
if current_user and current_user.is_authenticated():
return redirect(url_for('main.choose_service'))
return render_template('views/signedout.html')
@main.route("/verify-mobile")
@login_required
def verify_mobile():
return render_template('views/verify-mobile.html')
@main.route('/cookies')
def cookies():
return render_template('views/cookies.html')
@main.route('/trial-mode')
def trial_mode():
return render_template('views/trial-mode.html')
@main.route('/pricing')
def pricing():
return render_template('views/pricing.html')
@main.route('/terms')
def terms():
return render_template('views/terms-of-use.html')
@main.route('/documentation')
def documentation():
curr_dir = os.path.dirname(os.path.realpath(__file__))
with open(os.path.join(curr_dir, '../../../docs/index.md')) as source:
return render_template(
'views/documentation.html',
body=Markup(markdown.markdown(
source.read(),
extensions=[GithubFlavoredMarkdownExtension()]
))
)
| import markdown
import os
from flask import render_template, url_for, redirect, Markup
from app.main import main
from flask_login import login_required
from flask.ext.login import current_user
from mdx_gfm import GithubFlavoredMarkdownExtension
@main.route('/')
def index():
if current_user and current_user.is_authenticated():
return redirect(url_for('main.choose_service'))
return render_template('views/signedout.html')
@main.route("/verify-mobile")
@login_required
def verify_mobile():
return render_template('views/verify-mobile.html')
@main.route('/cookies')
def cookies():
return render_template('views/cookies.html')
@main.route('/trial-mode')
def trial_mode():
return render_template('views/trial-mode.html')
@main.route('/pricing')
def pricing():
return render_template('views/pricing.html')
@main.route('/terms')
def terms():
return render_template('views/terms-of-use.html')
@main.route('/documentation')
def documentation():
curr_dir = os.path.dirname(os.path.realpath(__file__))
with open(os.path.join(curr_dir, '../../../docs/index.md'), encoding='utf-8') as source:
return render_template(
'views/documentation.html',
body=Markup(markdown.markdown(
source.read(),
extensions=[GithubFlavoredMarkdownExtension()]
))
)
| Add encoding to the documentation file. | Add encoding to the documentation file.
| Python | mit | alphagov/notifications-admin,gov-cjwaszczuk/notifications-admin,alphagov/notifications-admin,gov-cjwaszczuk/notifications-admin,alphagov/notifications-admin,gov-cjwaszczuk/notifications-admin,gov-cjwaszczuk/notifications-admin,alphagov/notifications-admin | ---
+++
@@ -44,7 +44,7 @@
@main.route('/documentation')
def documentation():
curr_dir = os.path.dirname(os.path.realpath(__file__))
- with open(os.path.join(curr_dir, '../../../docs/index.md')) as source:
+ with open(os.path.join(curr_dir, '../../../docs/index.md'), encoding='utf-8') as source:
return render_template(
'views/documentation.html',
body=Markup(markdown.markdown( |
b235ae762adb76fe9835d98f7e2a4fc3d92db251 | src/util/sortLargeFIs.py | src/util/sortLargeFIs.py | import os, sys
from operator import itemgetter
def errorExit(msg):
sys.stderr.write(msg)
sys.exit(1)
def main():
# Verify arguments
if len(sys.argv) != 2:
errorExit("Usage: {} FILE\n".format(os.path.basename(sys.argv[0])))
fileName = sys.argv[1]
if not os.path.isfile(fileName):
errorExit("{} does not exist, or is not a file\n".format(fileName))
results = []
with open(fileName) as FILE:
for line in FILE:
tokens = line.split("\t")
frequency = float(tokens[1])
results.append((line, frequency))
results.sort(key=itemgetter(1), reverse=True)
for tup in results:
sys.stdout.write(tup[0])
if __name__ == "__main__":
main()
| import os, sys
from operator import itemgetter
def errorExit(msg):
sys.stderr.write(msg)
sys.exit(1)
def main():
# Verify arguments
if len(sys.argv) != 2:
errorExit("Usage: {} FILE\n".format(os.path.basename(sys.argv[0])))
fileName = sys.argv[1]
if not os.path.isfile(fileName):
errorExit("{} does not exist, or is not a file\n".format(fileName))
results = []
with open(fileName) as FILE:
for line in FILE:
tokens = line.split("}")
itemset = tokens[0][1:-1]
frequency = float((tokens[1].split(" "))[0][2:-3])
results.append((itemset + "\t" + str(frequency)+"\n", frequency))
results.sort(key=itemgetter(1), reverse=True)
for tup in results:
sys.stdout.write(tup[0])
if __name__ == "__main__":
main()
| Modify to handle ARtool output | Modify to handle ARtool output
| Python | apache-2.0 | jdebrabant/parallel_arules,jdebrabant/parallel_arules,jdebrabant/parallel_arules,jdebrabant/parallel_arules | ---
+++
@@ -16,9 +16,10 @@
results = []
with open(fileName) as FILE:
for line in FILE:
- tokens = line.split("\t")
- frequency = float(tokens[1])
- results.append((line, frequency))
+ tokens = line.split("}")
+ itemset = tokens[0][1:-1]
+ frequency = float((tokens[1].split(" "))[0][2:-3])
+ results.append((itemset + "\t" + str(frequency)+"\n", frequency))
results.sort(key=itemgetter(1), reverse=True)
|
f22fa6d0c1b7e3bde95554f87af7254c2c381c41 | django_app_lti/urls.py | django_app_lti/urls.py | from django.urls import path
from .views import LTILaunchView, LTIToolConfigView, logout_view, logged_out_view
urlpatterns = [
path('', LTILaunchView.as_view(), name='index'),
path('launch', LTILaunchView.as_view(), name='launch'),
path('config', LTIToolConfigView.as_view(), name='config'),
path('logout', logout_view, name="logout"),
path('logged-out', logged_out_view, name="logged-out"),
]
| from django.urls import path
from .views import LTILaunchView, LTIToolConfigView, logout_view, logged_out_view
app_name = 'lti'
urlpatterns = [
path('', LTILaunchView.as_view(), name='index'),
path('launch', LTILaunchView.as_view(), name='launch'),
path('config', LTIToolConfigView.as_view(), name='config'),
path('logout', logout_view, name="logout"),
path('logged-out', logged_out_view, name="logged-out"),
]
| Add app_name to url module | Add app_name to url module
| Python | bsd-3-clause | Harvard-ATG/django-app-lti | ---
+++
@@ -1,6 +1,7 @@
from django.urls import path
from .views import LTILaunchView, LTIToolConfigView, logout_view, logged_out_view
+app_name = 'lti'
urlpatterns = [
path('', LTILaunchView.as_view(), name='index'),
path('launch', LTILaunchView.as_view(), name='launch'), |
1f8cc2ffe1f4c9b390a5dc19a2bd9eb4601f0055 | ledger/migrations/0002_auto_20170717_2255.py | ledger/migrations/0002_auto_20170717_2255.py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.3 on 2017-07-18 03:55
from __future__ import unicode_literals
from django.db import migrations, connection
def load_data(apps, schema_editor):
Account = apps.get_model("ledger", "Account")
Account(name="Cash", type="asset").save()
Account(name="Bank", type="asset").save()
Account(name="Fees", type="revenue").save()
Account(name="Deposits", type="revenue").save()
Account(name="Administrative", type="expense").save()
Account(name="Purchases", type="expense").save()
def remove_data(apps, schema_editor):
with connection.cursor() as cursor:
cursor.execute('DELETE FROM ledger_entry')
cursor.execute('DELETE FROM ledger_account')
class Migration(migrations.Migration):
dependencies = [
('ledger', '0001_initial'),
]
operations = [
migrations.RunPython(load_data, remove_data)
]
| # -*- coding: utf-8 -*-
# Generated by Django 1.11.3 on 2017-07-18 03:55
from __future__ import unicode_literals
from django.db import migrations, connection
def load_data(apps, schema_editor):
Account = apps.get_model("ledger", "Account")
Account(name="Cash", type="asset").save()
Account(name="Bank", type="asset").save()
Account(name="Fees", type="revenue").save()
Account(name="Deposits", type="revenue").save()
Account(name="Administrative", type="expense").save()
Account(name="Purchases", type="expense").save()
Account(name="Balance", type="equity").save()
def remove_data(apps, schema_editor):
with connection.cursor() as cursor:
cursor.execute('DELETE FROM ledger_entry')
cursor.execute('DELETE FROM ledger_account')
class Migration(migrations.Migration):
dependencies = [
('ledger', '0001_initial'),
]
operations = [
migrations.RunPython(load_data, remove_data)
]
| Add a balance (equity) account | Add a balance (equity) account
| Python | mpl-2.0 | jackbravo/condorest-django,jackbravo/condorest-django,jackbravo/condorest-django | ---
+++
@@ -14,6 +14,7 @@
Account(name="Deposits", type="revenue").save()
Account(name="Administrative", type="expense").save()
Account(name="Purchases", type="expense").save()
+ Account(name="Balance", type="equity").save()
def remove_data(apps, schema_editor): |
6908fb4f5796e0b2f44ce93f54227f3873bb9a9b | masters/master.client.dart.packages/packages.py | masters/master.client.dart.packages/packages.py | # Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
PACKAGES = [
{
'name' : 'core-elements',
'package_dependencies' : [],
},
{
'name' : 'paper-elements',
'package_dependencies' : ['core-elements'],
},
{
'name' : 'dart-protobuf',
'package_dependencies' : [],
},
{
'name' : 'gcloud',
'package_dependencies' : [],
},
]
| # Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
PACKAGES = [
{
'name' : 'core-elements',
'package_dependencies' : [],
},
{
'name' : 'paper-elements',
'package_dependencies' : ['core-elements'],
},
{
'name' : 'dart-protobuf',
'package_dependencies' : [],
},
{
'name' : 'gcloud',
'package_dependencies' : [],
},
{
'name' : 'googleapis_auth',
'package_dependencies' : [],
},
]
| Add googleapis_auth to dart package waterfall | Add googleapis_auth to dart package waterfall
Review URL: https://codereview.chromium.org/574283003
git-svn-id: 239fca9b83025a0b6f823aeeca02ba5be3d9fd76@291992 0039d316-1c4b-4281-b951-d872f2087c98
| Python | bsd-3-clause | eunchong/build,eunchong/build,eunchong/build,eunchong/build | ---
+++
@@ -19,4 +19,8 @@
'name' : 'gcloud',
'package_dependencies' : [],
},
+ {
+ 'name' : 'googleapis_auth',
+ 'package_dependencies' : [],
+ },
] |
924bee7b0a8b11aa0f1506584966533924d29478 | django_hash_filter/templatetags/hash_filter.py | django_hash_filter/templatetags/hash_filter.py | from django import template
from django.template.defaultfilters import stringfilter
from django.template.base import TemplateSyntaxError
import hashlib
from django_hash_filter.templatetags import get_available_hashes
register = template.Library()
@register.filter
@stringfilter
def hash(value, arg):
"""
Returns a hex-digest of the passed in value for the hash algorithm given.
"""
arg = str(arg).lower()
if not arg in get_available_hashes():
raise TemplateSyntaxError("The %s hash algorithm does not exist." % arg)
try:
f = getattr(hashlib, arg)
hashed = f(value).hexdigest()
except Exception:
raise ValueError("The %s hash algorithm cannot produce a hex digest. Ensure that OpenSSL is properly installed." % arg)
return hashed | from django import template
from django.template.defaultfilters import stringfilter
from django.template.base import TemplateSyntaxError
import hashlib
from django_hash_filter.templatetags import get_available_hashes
register = template.Library()
@register.filter
@stringfilter
def hash(value, arg):
"""
Returns a hex-digest of the passed in value for the hash algorithm given.
"""
arg = str(arg).lower()
if not arg in get_available_hashes():
raise TemplateSyntaxError("The %s hash algorithm does not exist. Supported algorithms are: %" % (arg, get_available_hashes()))
try:
f = getattr(hashlib, arg)
hashed = f(value).hexdigest()
except Exception:
raise ValueError("The %s hash algorithm cannot produce a hex digest. Ensure that OpenSSL is properly installed." % arg)
return hashed
| Add helpful text to template error | Add helpful text to template error | Python | mit | andrewjsledge/django-hash-filter | ---
+++
@@ -14,7 +14,7 @@
"""
arg = str(arg).lower()
if not arg in get_available_hashes():
- raise TemplateSyntaxError("The %s hash algorithm does not exist." % arg)
+ raise TemplateSyntaxError("The %s hash algorithm does not exist. Supported algorithms are: %" % (arg, get_available_hashes()))
try:
f = getattr(hashlib, arg)
hashed = f(value).hexdigest() |
aa10d2c0d49fd28afcda2b67f969fdb4a1d3072b | backend/breach/views.py | backend/breach/views.py | from django.http import HttpResponse
from django.views.decorators.csrf import csrf_exempt
def get_work(request):
return HttpResponse('Not implemented')
@csrf_exempt
def work_completed(request):
return HttpResponse('Not implemented')
| import json
from django.http import HttpResponse
from django.views.decorators.csrf import csrf_exempt
def create_new_work():
return {'url': 'https://www.dimkarakostas.com/?breach-test',
'amount': 10,
'timeout': 0}
def get_work(request):
new_work = create_new_work()
return HttpResponse(json.dumps(new_work), content_type='application/json')
@csrf_exempt
def work_completed(request):
return HttpResponse('Not implemented')
| Change get_work to response with work JSON | Change get_work to response with work JSON
| Python | mit | dimkarakostas/rupture,esarafianou/rupture,dimkarakostas/rupture,dionyziz/rupture,dionyziz/rupture,esarafianou/rupture,dimriou/rupture,dionyziz/rupture,esarafianou/rupture,esarafianou/rupture,dimkarakostas/rupture,dimriou/rupture,dimriou/rupture,dimkarakostas/rupture,dimriou/rupture,dionyziz/rupture,dimriou/rupture,dimkarakostas/rupture,dionyziz/rupture | ---
+++
@@ -1,9 +1,17 @@
+import json
from django.http import HttpResponse
from django.views.decorators.csrf import csrf_exempt
+def create_new_work():
+ return {'url': 'https://www.dimkarakostas.com/?breach-test',
+ 'amount': 10,
+ 'timeout': 0}
+
+
def get_work(request):
- return HttpResponse('Not implemented')
+ new_work = create_new_work()
+ return HttpResponse(json.dumps(new_work), content_type='application/json')
@csrf_exempt |
804edb8d7423ee882e483bec8ffe551a168602b4 | contentstore/models.py | contentstore/models.py | from django.db import models
from django.utils.translation import ugettext_lazy as _
class Schedule(models.Model):
minute = models.CharField(_('minute'), max_length=64, default='*')
hour = models.CharField(_('hour'), max_length=64, default='*')
day_of_week = models.CharField(
_('day of week'), max_length=64, default='*',
)
day_of_month = models.CharField(
_('day of month'), max_length=64, default='*',
)
month_of_year = models.CharField(
_('month of year'), max_length=64, default='*',
)
class Meta:
verbose_name = _('schedule')
verbose_name_plural = _('schedules')
ordering = ['month_of_year', 'day_of_month',
'day_of_week', 'hour', 'minute']
def __unicode__(self):
rfield = lambda f: f and str(f).replace(' ', '') or '*'
return '{0} {1} {2} {3} {4} (m/h/d/dM/MY)'.format(
rfield(self.minute), rfield(self.hour), rfield(self.day_of_week),
rfield(self.day_of_month), rfield(self.month_of_year),
)
| from django.db import models
from django.utils.translation import ugettext_lazy as _
class Schedule(models.Model):
"""
Schdules (sometimes referred to as Protocols) are the method used to
define the rate and frequency at which the messages are sent to
the recipient
"""
minute = models.CharField(_('minute'), max_length=64, default='*')
hour = models.CharField(_('hour'), max_length=64, default='*')
day_of_week = models.CharField(
_('day of week'), max_length=64, default='*',
)
day_of_month = models.CharField(
_('day of month'), max_length=64, default='*',
)
month_of_year = models.CharField(
_('month of year'), max_length=64, default='*',
)
class Meta:
verbose_name = _('schedule')
verbose_name_plural = _('schedules')
ordering = ['month_of_year', 'day_of_month',
'day_of_week', 'hour', 'minute']
def __unicode__(self):
rfield = lambda f: f and str(f).replace(' ', '') or '*'
return '{0} {1} {2} {3} {4} (m/h/d/dM/MY)'.format(
rfield(self.minute), rfield(self.hour), rfield(self.day_of_week),
rfield(self.day_of_month), rfield(self.month_of_year),
)
| Add docstring to Schedule model | Add docstring to Schedule model
| Python | bsd-3-clause | praekelt/django-messaging-contentstore,praekelt/django-messaging-contentstore | ---
+++
@@ -3,6 +3,12 @@
class Schedule(models.Model):
+
+ """
+ Schdules (sometimes referred to as Protocols) are the method used to
+ define the rate and frequency at which the messages are sent to
+ the recipient
+ """
minute = models.CharField(_('minute'), max_length=64, default='*')
hour = models.CharField(_('hour'), max_length=64, default='*')
day_of_week = models.CharField( |
0f7853c3568791f0e93ece57d2fc750dbc93b963 | starlette/concurrency.py | starlette/concurrency.py | import asyncio
import functools
import typing
from typing import Any, AsyncGenerator, Iterator
try:
import contextvars # Python 3.7+ only.
except ImportError: # pragma: no cover
contextvars = None # type: ignore
async def run_in_threadpool(
func: typing.Callable, *args: typing.Any, **kwargs: typing.Any
) -> typing.Any:
loop = asyncio.get_event_loop()
if contextvars is not None: # pragma: no cover
# Ensure we run in the same context
child = functools.partial(func, *args, **kwargs)
context = contextvars.copy_context()
func = context.run
args = (child,)
elif kwargs: # pragma: no cover
# loop.run_in_executor doesn't accept 'kwargs', so bind them in here
func = functools.partial(func, **kwargs)
return await loop.run_in_executor(None, func, *args)
class _StopIteration(Exception):
pass
def _next(iterator: Iterator) -> Any:
# We can't raise `StopIteration` from within the threadpool iterator
# and catch it outside that context, so we coerce them into a different
# exception type.
try:
return next(iterator)
except StopIteration:
raise _StopIteration
async def iterate_in_threadpool(iterator: Iterator) -> AsyncGenerator:
while True:
try:
yield await run_in_threadpool(_next, iterator)
except _StopIteration:
break
| import asyncio
import functools
import typing
from typing import Any, AsyncGenerator, Iterator
try:
import contextvars # Python 3.7+ only.
except ImportError: # pragma: no cover
contextvars = None # type: ignore
T = typing.TypeVar("T")
async def run_in_threadpool(
func: typing.Callable[..., T], *args: typing.Any, **kwargs: typing.Any
) -> T:
loop = asyncio.get_event_loop()
if contextvars is not None: # pragma: no cover
# Ensure we run in the same context
child = functools.partial(func, *args, **kwargs)
context = contextvars.copy_context()
func = context.run
args = (child,)
elif kwargs: # pragma: no cover
# loop.run_in_executor doesn't accept 'kwargs', so bind them in here
func = functools.partial(func, **kwargs)
return await loop.run_in_executor(None, func, *args)
class _StopIteration(Exception):
pass
def _next(iterator: Iterator) -> Any:
# We can't raise `StopIteration` from within the threadpool iterator
# and catch it outside that context, so we coerce them into a different
# exception type.
try:
return next(iterator)
except StopIteration:
raise _StopIteration
async def iterate_in_threadpool(iterator: Iterator) -> AsyncGenerator:
while True:
try:
yield await run_in_threadpool(_next, iterator)
except _StopIteration:
break
| Add type hint for run_in_threadpool return type | Add type hint for run_in_threadpool return type
| Python | bsd-3-clause | encode/starlette,encode/starlette | ---
+++
@@ -8,10 +8,12 @@
except ImportError: # pragma: no cover
contextvars = None # type: ignore
+T = typing.TypeVar("T")
+
async def run_in_threadpool(
- func: typing.Callable, *args: typing.Any, **kwargs: typing.Any
-) -> typing.Any:
+ func: typing.Callable[..., T], *args: typing.Any, **kwargs: typing.Any
+) -> T:
loop = asyncio.get_event_loop()
if contextvars is not None: # pragma: no cover
# Ensure we run in the same context |
52c7efbe7f9a24f568768fb926f487a276a47f51 | numba/typesystem/exttypes/attributestype.py | numba/typesystem/exttypes/attributestype.py | # -*- coding: utf-8 -*-
"""
Extension attribute table type. Supports ordered (struct) fields, or
unordered (hash-based) fields.
"""
from numba.typesystem import *
from numba.typesystem.exttypes import ordering
#------------------------------------------------------------------------
# Extension Attributes Type
#------------------------------------------------------------------------
class ExtensionAttributesTableType(NumbaType):
"""
Type for extension type attributes.
"""
def __init__(self, parents):
# List of parent extension attribute table types
self.parents = parents
# attribute_name -> attribute_type
self.attributedict = {}
def create_attribute_ordering(self, orderer=ordering.unordered):
"""
Create a consistent attribute ordering with the base types.
ordering ∈ { unordered, extending, ... }
"""
self.attributes = orderer(ordering.AttributeTable(self))
def need_tp_dealloc(self):
"""
Returns whether this extension type needs a tp_dealloc, tp_traverse
and tp_clear filled out.
"""
if self.parent_type is not None and self.parent_type.need_tp_dealloc:
result = False
else:
field_types = self.attribute_struct.fielddict.itervalues()
result = any(map(is_obj, field_types))
self._need_tp_dealloc = result
return result
| # -*- coding: utf-8 -*-
"""
Extension attribute table type. Supports ordered (struct) fields, or
unordered (hash-based) fields.
"""
import numba
from numba.typesystem import NumbaType, is_obj
from numba.typesystem.exttypes import ordering
#------------------------------------------------------------------------
# Extension Attributes Type
#------------------------------------------------------------------------
class ExtensionAttributesTableType(NumbaType):
"""
Type for extension type attributes.
"""
def __init__(self, py_class, parents):
self.py_class = py_class
# List of parent extension attribute table types
self.parents = parents
# attribute_name -> attribute_type
self.attributedict = {}
# Ordered list of attribute names
self.attributes = None
def to_struct(self):
return numba.struct([(attr, self.attributedict[attr])
for attr in self.attributes])
def create_attribute_ordering(self, orderer=ordering.unordered):
"""
Create a consistent attribute ordering with the base types.
ordering ∈ { unordered, extending, ... }
"""
self.attributes = orderer(ordering.AttributeTable(self))
def need_tp_dealloc(self):
"""
Returns whether this extension type needs a tp_dealloc, tp_traverse
and tp_clear filled out.
"""
if self.parent_type is not None and self.parent_type.need_tp_dealloc:
result = False
else:
field_types = self.attribute_struct.fielddict.itervalues()
result = any(map(is_obj, field_types))
self._need_tp_dealloc = result
return result
| Add to_struct to attribute table | Add to_struct to attribute table
| Python | bsd-2-clause | gmarkall/numba,sklam/numba,GaZ3ll3/numba,stonebig/numba,IntelLabs/numba,IntelLabs/numba,GaZ3ll3/numba,cpcloud/numba,GaZ3ll3/numba,seibert/numba,GaZ3ll3/numba,gdementen/numba,gdementen/numba,sklam/numba,cpcloud/numba,stuartarchibald/numba,stuartarchibald/numba,shiquanwang/numba,stefanseefeld/numba,shiquanwang/numba,IntelLabs/numba,numba/numba,jriehl/numba,stuartarchibald/numba,pombredanne/numba,cpcloud/numba,seibert/numba,stefanseefeld/numba,stonebig/numba,gmarkall/numba,stefanseefeld/numba,ssarangi/numba,stuartarchibald/numba,sklam/numba,pitrou/numba,shiquanwang/numba,seibert/numba,pitrou/numba,gdementen/numba,gdementen/numba,IntelLabs/numba,stuartarchibald/numba,gmarkall/numba,stonebig/numba,ssarangi/numba,ssarangi/numba,jriehl/numba,pombredanne/numba,IntelLabs/numba,ssarangi/numba,seibert/numba,cpcloud/numba,pitrou/numba,sklam/numba,gdementen/numba,jriehl/numba,pombredanne/numba,pombredanne/numba,gmarkall/numba,gmarkall/numba,jriehl/numba,ssarangi/numba,pombredanne/numba,GaZ3ll3/numba,sklam/numba,stefanseefeld/numba,pitrou/numba,cpcloud/numba,numba/numba,stonebig/numba,numba/numba,seibert/numba,jriehl/numba,numba/numba,pitrou/numba,stefanseefeld/numba,stonebig/numba,numba/numba | ---
+++
@@ -5,7 +5,8 @@
unordered (hash-based) fields.
"""
-from numba.typesystem import *
+import numba
+from numba.typesystem import NumbaType, is_obj
from numba.typesystem.exttypes import ordering
#------------------------------------------------------------------------
@@ -17,12 +18,21 @@
Type for extension type attributes.
"""
- def __init__(self, parents):
+ def __init__(self, py_class, parents):
+ self.py_class = py_class
+
# List of parent extension attribute table types
self.parents = parents
# attribute_name -> attribute_type
self.attributedict = {}
+
+ # Ordered list of attribute names
+ self.attributes = None
+
+ def to_struct(self):
+ return numba.struct([(attr, self.attributedict[attr])
+ for attr in self.attributes])
def create_attribute_ordering(self, orderer=ordering.unordered):
""" |
3075a10c56fb38611134aa15c06b6da8cc777868 | enthought/pyface/tasks/task_window_layout.py | enthought/pyface/tasks/task_window_layout.py | # Enthought library imports.
from enthought.traits.api import Dict, HasStrictTraits, Instance, List, Str, \
Tuple
# Local imports.
from task_layout import TaskLayout
class TaskWindowLayout(HasStrictTraits):
""" A picklable object that describes the layout and state of a TaskWindow.
"""
# The ID of the active task. If unspecified, the first task will be active.
active_task = Str
# The IDs of all the tasks attached to the window.
tasks = List(Str)
# The position of the window.
position = Tuple(-1, -1)
# The size of the window.
size = Tuple(800, 600)
# A map from task IDs to their respective layouts. Set by the framework.
layout_state = Dict(Str, Instance(TaskLayout))
| # Enthought library imports.
from enthought.traits.api import Dict, HasStrictTraits, Instance, List, Str, \
Tuple
# Local imports.
from task_layout import TaskLayout
class TaskWindowLayout(HasStrictTraits):
""" A picklable object that describes the layout and state of a TaskWindow.
"""
# The ID of the active task. If unspecified, the first task will be active.
active_task = Str
# The IDs of all the tasks attached to the window.
tasks = List(Str)
# The position of the window.
position = Tuple(-1, -1)
# The size of the window.
size = Tuple(800, 600)
# A map from task IDs to their respective layouts. Set by the framework.
layout_state = Dict(Str, Instance(TaskLayout))
def get_active_task(self):
""" Returns the ID of the active task in the layout, or None if there is
no active task.
"""
if self.active_task:
return self.active_task
elif self.tasks:
return self.tasks[0]
return None
def is_equivalent_to(self, layout):
""" Returns whether two layouts are equivalent, i.e. whether they
contain the same tasks.
"""
return isinstance(layout, TaskWindowLayout) and \
self.get_active_task() == layout.get_active_task() and \
self.tasks == layout.tasks
| Add a few useful utility methods to TaskWindowLayout. | Add a few useful utility methods to TaskWindowLayout.
| Python | bsd-3-clause | brett-patterson/pyface,pankajp/pyface,geggo/pyface,geggo/pyface,enthought/traitsgui | ---
+++
@@ -24,3 +24,21 @@
# A map from task IDs to their respective layouts. Set by the framework.
layout_state = Dict(Str, Instance(TaskLayout))
+
+ def get_active_task(self):
+ """ Returns the ID of the active task in the layout, or None if there is
+ no active task.
+ """
+ if self.active_task:
+ return self.active_task
+ elif self.tasks:
+ return self.tasks[0]
+ return None
+
+ def is_equivalent_to(self, layout):
+ """ Returns whether two layouts are equivalent, i.e. whether they
+ contain the same tasks.
+ """
+ return isinstance(layout, TaskWindowLayout) and \
+ self.get_active_task() == layout.get_active_task() and \
+ self.tasks == layout.tasks |
c430aa52d22cb2aa97fa11b225047f5b52fa5326 | security.py | security.py | def file_is_allowed(file):
return file.endswith(".py") and "__init__" not in file.title() and "Sample_City" not in file.title()
| def file_is_allowed(file):
return file.endswith(".py") and "__Init__" not in file.title() and "Sample_City" not in file.title()
| Fix bug '__init__' showing in list of cities | Fix bug '__init__' showing in list of cities
| Python | mit | offenesdresden/ParkAPI,Mic92/ParkAPI,offenesdresden/ParkAPI,Mic92/ParkAPI | ---
+++
@@ -1,2 +1,2 @@
def file_is_allowed(file):
- return file.endswith(".py") and "__init__" not in file.title() and "Sample_City" not in file.title()
+ return file.endswith(".py") and "__Init__" not in file.title() and "Sample_City" not in file.title() |
663f44e94c22f8ac889a1d7608e6ab0e3cbf9ad3 | checkeol.py | checkeol.py | # Check files for incorrect newlines
import fnmatch, os
def check_file(fname):
for n, line in enumerate(open(fname, "rb")):
if "\r" in line:
print "%s@%d: CR found" % (fname, n)
return
def check_files(root, patterns):
for root, dirs, files in os.walk(root):
for f in files:
fname = os.path.join(root, f)
for p in patterns:
if fnmatch.fnmatch(fname, p):
check_file(fname)
break
if '.svn' in dirs:
dirs.remove('.svn')
check_files("coverage", ["*.py"])
check_files("test", ["*.py"])
check_file("setup.py")
| # Check files for incorrect newlines
import fnmatch, os
def check_file(fname):
for n, line in enumerate(open(fname, "rb")):
if "\r" in line:
print "%s@%d: CR found" % (fname, n)
return
def check_files(root, patterns):
for root, dirs, files in os.walk(root):
for f in files:
fname = os.path.join(root, f)
for p in patterns:
if fnmatch.fnmatch(fname, p):
check_file(fname)
break
if '.svn' in dirs:
dirs.remove('.svn')
check_files("coverage", ["*.py"])
check_files("test", ["*.py", "*,cover"])
check_file("setup.py")
| Check on the EOL chars in ,cover gold files. | Check on the EOL chars in ,cover gold files.
| Python | apache-2.0 | larsbutler/coveragepy,jayhetee/coveragepy,nedbat/coveragepy,blueyed/coveragepy,hugovk/coveragepy,7WebPages/coveragepy,nedbat/coveragepy,jayhetee/coveragepy,blueyed/coveragepy,larsbutler/coveragepy,jayhetee/coveragepy,larsbutler/coveragepy,larsbutler/coveragepy,hugovk/coveragepy,nedbat/coveragepy,jayhetee/coveragepy,hugovk/coveragepy,7WebPages/coveragepy,blueyed/coveragepy,blueyed/coveragepy,larsbutler/coveragepy,jayhetee/coveragepy,7WebPages/coveragepy,hugovk/coveragepy,blueyed/coveragepy,nedbat/coveragepy,hugovk/coveragepy,7WebPages/coveragepy,nedbat/coveragepy | ---
+++
@@ -20,5 +20,5 @@
dirs.remove('.svn')
check_files("coverage", ["*.py"])
-check_files("test", ["*.py"])
+check_files("test", ["*.py", "*,cover"])
check_file("setup.py") |
251e13b96ed10e48b69ccf5d625d673a5507f222 | requests_kerberos/__init__.py | requests_kerberos/__init__.py | """
requests Kerberos/GSSAPI authentication library
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Requests is an HTTP library, written in Python, for human beings. This library
adds optional Kerberos/GSSAPI authentication support and supports mutual
authentication. Basic GET usage:
>>> import requests
>>> from requests_kerberos import HTTPKerberosAuth
>>> r = requests.get("http://example.org", auth=HTTPKerberosAuth())
The entire `requests.api` should be supported.
"""
import logging
import sys
from .kerberos_ import HTTPKerberosAuth, REQUIRED, OPTIONAL, DISABLED
from .exceptions import MutualAuthenticationError
from .compat import NullHandler
logging.getLogger(__name__).addHandler(NullHandler())
__all__ = [HTTPKerberosAuth, MutualAuthenticationError, REQUIRED, OPTIONAL, DISABLED]
__version__ = '0.1'
| """
requests Kerberos/GSSAPI authentication library
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Requests is an HTTP library, written in Python, for human beings. This library
adds optional Kerberos/GSSAPI authentication support and supports mutual
authentication. Basic GET usage:
>>> import requests
>>> from requests_kerberos import HTTPKerberosAuth
>>> r = requests.get("http://example.org", auth=HTTPKerberosAuth())
The entire `requests.api` should be supported.
"""
import logging
import sys
from .kerberos_ import HTTPKerberosAuth, REQUIRED, OPTIONAL, DISABLED
from .exceptions import MutualAuthenticationError
from .compat import NullHandler
logging.getLogger(__name__).addHandler(NullHandler())
__all__ = [HTTPKerberosAuth, MutualAuthenticationError]
__version__ = '0.1'
| Remove REQUIRED, OPTIONAL, DISABLED from default exports | Remove REQUIRED, OPTIONAL, DISABLED from default exports
Prevent polluting the callers namespace with generically named constants.
| Python | isc | requests/requests-kerberos,AbsoluteMSTR/requests-kerberos,rbcarson/requests-kerberos,requests/requests-kerberos,danc86/requests-kerberos | ---
+++
@@ -21,5 +21,5 @@
logging.getLogger(__name__).addHandler(NullHandler())
-__all__ = [HTTPKerberosAuth, MutualAuthenticationError, REQUIRED, OPTIONAL, DISABLED]
+__all__ = [HTTPKerberosAuth, MutualAuthenticationError]
__version__ = '0.1' |
bcc3a4e4c8b3117deea4c7621653f65b588537f9 | keystone/common/policies/token_revocation.py | keystone/common/policies/token_revocation.py | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_policy import policy
from keystone.common.policies import base
token_revocation_policies = [
policy.DocumentedRuleDefault(
name=base.IDENTITY % 'revocation_list',
check_str=base.RULE_SERVICE_OR_ADMIN,
description='List revoked PKI tokens.',
operations=[{'path': '/v3/auth/tokens/OS-PKI/revoked',
'method': 'GET'}])
]
def list_rules():
return token_revocation_policies
| # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_policy import policy
from keystone.common.policies import base
token_revocation_policies = [
policy.DocumentedRuleDefault(
name=base.IDENTITY % 'revocation_list',
check_str=base.RULE_SERVICE_OR_ADMIN,
# NOTE(lbragstad): Documenting scope_types here doesn't really make a
# difference since this API is going to return an empty list regardless
# of the token scope used in the API call. More-or-less just doing this
# for consistency with other policies.
scope_types=['system', 'project'],
description='List revoked PKI tokens.',
operations=[{'path': '/v3/auth/tokens/OS-PKI/revoked',
'method': 'GET'}])
]
def list_rules():
return token_revocation_policies
| Add scope_types to token revocation policies | Add scope_types to token revocation policies
This doesn't seem useful since the API will return an empty list
regardless because PKI support has been removed.
More or less doing this for consistency.
Change-Id: Iaa2925119fa6c9e2324546ed44aa54bac51dba05
| Python | apache-2.0 | mahak/keystone,openstack/keystone,openstack/keystone,mahak/keystone,openstack/keystone,mahak/keystone | ---
+++
@@ -18,6 +18,11 @@
policy.DocumentedRuleDefault(
name=base.IDENTITY % 'revocation_list',
check_str=base.RULE_SERVICE_OR_ADMIN,
+ # NOTE(lbragstad): Documenting scope_types here doesn't really make a
+ # difference since this API is going to return an empty list regardless
+ # of the token scope used in the API call. More-or-less just doing this
+ # for consistency with other policies.
+ scope_types=['system', 'project'],
description='List revoked PKI tokens.',
operations=[{'path': '/v3/auth/tokens/OS-PKI/revoked',
'method': 'GET'}]) |
c7ed2e94f10b680eef9942f2bda1d246f11595c5 | src/foremast/consts.py | src/foremast/consts.py | """Load base config and export package constants."""
import logging
from configparser import ConfigParser
from os.path import expanduser
LOG = logging.getLogger(__name__)
def find_config():
"""Look for config in config_locations. If not found, give a fatal error.
Returns:
ConfigParser: found configuration file
"""
config_locations = [
'./.foremast/foremast.cfg',
expanduser('~/.foremast/foremast.cfg'),
'/etc/foremast/foremast.cfg',
]
configurations = ConfigParser()
cfg_file = configurations.read(config_locations)
if not cfg_file:
LOG.error('No config found in the following locations: %s\n', config_locations)
return configurations
config = find_config()
API_URL = config['base']['gate_api_url']
GIT_URL = config['base']['git_url']
GITLAB_TOKEN = config['credentials']['gitlab_token']
SLACK_TOKEN = config['credentials']['slack_token']
DOMAIN = config['base']['domain']
ENVS = set(config['base']['envs'].split(','))
REGIONS = set(config['base']['regions'].split(','))
ASG_WHITELIST = set(config['whitelists']['asg_whitelist'].split(','))
HEADERS = {
'accept': '*/*',
'content-type': 'application/json',
'user-agent': 'foremast',
}
LOGGING_FORMAT = ('%(asctime)s [%(levelname)s] %(name)s:%(funcName)s:' '%(lineno)d - %(message)s')
| """Load base config and export package constants."""
import logging
from configparser import ConfigParser
from os.path import expanduser
LOG = logging.getLogger(__name__)
def find_config():
"""Look for **foremast.cfg** in config_locations.
If not found, give a fatal error.
Returns:
ConfigParser: found configuration file
"""
config_locations = [
'./.foremast/foremast.cfg',
expanduser('~/.foremast/foremast.cfg'),
'/etc/foremast/foremast.cfg',
]
configurations = ConfigParser()
cfg_file = configurations.read(config_locations)
if not cfg_file:
LOG.error('No config found in the following locations: %s\n', config_locations)
return configurations
config = find_config()
API_URL = config['base']['gate_api_url']
GIT_URL = config['base']['git_url']
GITLAB_TOKEN = config['credentials']['gitlab_token']
SLACK_TOKEN = config['credentials']['slack_token']
DOMAIN = config['base']['domain']
ENVS = set(config['base']['envs'].split(','))
REGIONS = set(config['base']['regions'].split(','))
ASG_WHITELIST = set(config['whitelists']['asg_whitelist'].split(','))
HEADERS = {
'accept': '*/*',
'content-type': 'application/json',
'user-agent': 'foremast',
}
LOGGING_FORMAT = ('%(asctime)s [%(levelname)s] %(name)s:%(funcName)s:' '%(lineno)d - %(message)s')
| Update docstring to include config file name | docs: Update docstring to include config file name
| Python | apache-2.0 | gogoair/foremast,gogoair/foremast | ---
+++
@@ -7,7 +7,9 @@
def find_config():
- """Look for config in config_locations. If not found, give a fatal error.
+ """Look for **foremast.cfg** in config_locations.
+
+ If not found, give a fatal error.
Returns:
ConfigParser: found configuration file |
1e6fcb134f55cb70ddd394a051a86c45aa50c944 | cli_helpers/tabular_output/tabulate_adapter.py | cli_helpers/tabular_output/tabulate_adapter.py | from cli_helpers.packages import tabulate
from .preprocessors import bytes_to_string, align_decimals
tabulate.PRESERVE_WHITESPACE = True
supported_markup_formats = ('mediawiki', 'html', 'latex', 'latex_booktabs',
'textile', 'moinmoin', 'jira')
supported_table_formats = ('plain', 'simple', 'grid', 'fancy_grid', 'pipe',
'orgtbl', 'psql', 'rst')
supported_formats = supported_markup_formats + supported_table_formats
preprocessors = (bytes_to_string, align_decimals)
def adapter(data, headers, table_format=None, missing_value='', **_):
"""Wrap tabulate inside a function for TabularOutputFormatter."""
kwargs = {'tablefmt': table_format, 'missingval': missing_value,
'disable_numparse': True}
if table_format in supported_markup_formats:
kwargs.update(numalign=None, stralign=None)
return tabulate.tabulate(data, headers, **kwargs)
| from cli_helpers.packages import tabulate
from .preprocessors import bytes_to_string, align_decimals
supported_markup_formats = ('mediawiki', 'html', 'latex', 'latex_booktabs',
'textile', 'moinmoin', 'jira')
supported_table_formats = ('plain', 'simple', 'grid', 'fancy_grid', 'pipe',
'orgtbl', 'psql', 'rst')
supported_formats = supported_markup_formats + supported_table_formats
preprocessors = (bytes_to_string, align_decimals)
def adapter(data, headers, table_format=None, missing_value='',
disable_numparse=True, preserve_whitespace=True, **_):
"""Wrap tabulate inside a function for TabularOutputFormatter."""
kwargs = {'tablefmt': table_format, 'missingval': missing_value,
'disable_numparse': disable_numparse}
if table_format in supported_markup_formats:
kwargs.update(numalign=None, stralign=None)
tabulate.PRESERVE_WHITESPACE = preserve_whitespace
return tabulate.tabulate(data, headers, **kwargs)
| Make whitespace and numparse configurable. | Make whitespace and numparse configurable.
| Python | bsd-3-clause | dbcli/cli_helpers,dbcli/cli_helpers | ---
+++
@@ -1,7 +1,5 @@
from cli_helpers.packages import tabulate
from .preprocessors import bytes_to_string, align_decimals
-
-tabulate.PRESERVE_WHITESPACE = True
supported_markup_formats = ('mediawiki', 'html', 'latex', 'latex_booktabs',
'textile', 'moinmoin', 'jira')
@@ -12,11 +10,14 @@
preprocessors = (bytes_to_string, align_decimals)
-def adapter(data, headers, table_format=None, missing_value='', **_):
+def adapter(data, headers, table_format=None, missing_value='',
+ disable_numparse=True, preserve_whitespace=True, **_):
"""Wrap tabulate inside a function for TabularOutputFormatter."""
kwargs = {'tablefmt': table_format, 'missingval': missing_value,
- 'disable_numparse': True}
+ 'disable_numparse': disable_numparse}
if table_format in supported_markup_formats:
kwargs.update(numalign=None, stralign=None)
+ tabulate.PRESERVE_WHITESPACE = preserve_whitespace
+
return tabulate.tabulate(data, headers, **kwargs) |
aa0ebe55ae5804f4f324a83de64e0879228261bd | securedrop/request_that_secures_file_uploads.py | securedrop/request_that_secures_file_uploads.py | from flask import wrappers
from tempfile import NamedTemporaryFile
from io import BytesIO
class RequestThatSecuresFileUploads(wrappers.Request):
def _secure_file_stream(self, total_content_length, content_type, filename=None,
content_length=None):
if total_content_length > 1024 * 512:
tf = NamedTemporaryFile(delete=False)
# Save the name of the temporary file on the request object so we can `shred` it later
self._temporary_file_name = tf.name
return tf
return BytesIO()
def make_form_data_parser(self):
return self.form_data_parser_class(self._secure_file_stream,
self.charset,
self.encoding_errors,
self.max_form_memory_size,
self.max_content_length,
self.parameter_storage_class)
| from io import BytesIO
from flask import wrappers
from secure_tempfile import SecureTemporaryFile
class RequestThatSecuresFileUploads(wrappers.Request):
def _secure_file_stream(self, total_content_length, content_type,
filename=None, content_length=None):
"""Storage class for data streamed in from requests.
If the data is relatively small (512KB), just store it in
memory. Otherwise, use the SecureTemporaryFile class to buffer
it on disk, encrypted with an ephemeral key to mitigate
forensic recovery of the plaintext.
"""
if total_content_length > 1024 * 512:
# We don't use `config.TEMP_DIR` here because that
# directory is exposed via X-Send-File and there is no
# reason for these files to be publicly accessible. See
# note in `config.py` for more info. Instead, we just use
# `/tmp`, which has the additional benefit of being
# automatically cleared on reboot.
return SecureTemporaryFile('/tmp')
return BytesIO()
def make_form_data_parser(self):
return self.form_data_parser_class(self._secure_file_stream,
self.charset,
self.encoding_errors,
self.max_form_memory_size,
self.max_content_length,
self.parameter_storage_class)
| Use SecureTemporaryFile for Source Interface requests | Use SecureTemporaryFile for Source Interface requests
| Python | agpl-3.0 | jeann2013/securedrop,chadmiller/securedrop,harlo/securedrop,micahflee/securedrop,jrosco/securedrop,GabeIsman/securedrop,chadmiller/securedrop,heartsucker/securedrop,heartsucker/securedrop,chadmiller/securedrop,pwplus/securedrop,micahflee/securedrop,jeann2013/securedrop,jaseg/securedrop,ehartsuyker/securedrop,jrosco/securedrop,ehartsuyker/securedrop,jaseg/securedrop,jeann2013/securedrop,harlo/securedrop,ageis/securedrop,ehartsuyker/securedrop,heartsucker/securedrop,conorsch/securedrop,garrettr/securedrop,jeann2013/securedrop,pwplus/securedrop,conorsch/securedrop,conorsch/securedrop,pwplus/securedrop,GabeIsman/securedrop,jeann2013/securedrop,GabeIsman/securedrop,jaseg/securedrop,kelcecil/securedrop,garrettr/securedrop,jaseg/securedrop,chadmiller/securedrop,pwplus/securedrop,harlo/securedrop,kelcecil/securedrop,jrosco/securedrop,ageis/securedrop,harlo/securedrop,micahflee/securedrop,kelcecil/securedrop,kelcecil/securedrop,jaseg/securedrop,GabeIsman/securedrop,chadmiller/securedrop,ehartsuyker/securedrop,garrettr/securedrop,ehartsuyker/securedrop,harlo/securedrop,heartsucker/securedrop,micahflee/securedrop,jrosco/securedrop,heartsucker/securedrop,kelcecil/securedrop,jeann2013/securedrop,harlo/securedrop,GabeIsman/securedrop,jaseg/securedrop,ageis/securedrop,GabeIsman/securedrop,pwplus/securedrop,jrosco/securedrop,garrettr/securedrop,conorsch/securedrop,pwplus/securedrop,chadmiller/securedrop,jrosco/securedrop,conorsch/securedrop,ageis/securedrop,kelcecil/securedrop,ehartsuyker/securedrop | ---
+++
@@ -1,17 +1,31 @@
+from io import BytesIO
+
from flask import wrappers
-from tempfile import NamedTemporaryFile
-from io import BytesIO
+
+from secure_tempfile import SecureTemporaryFile
class RequestThatSecuresFileUploads(wrappers.Request):
- def _secure_file_stream(self, total_content_length, content_type, filename=None,
- content_length=None):
+ def _secure_file_stream(self, total_content_length, content_type,
+ filename=None, content_length=None):
+ """Storage class for data streamed in from requests.
+
+ If the data is relatively small (512KB), just store it in
+ memory. Otherwise, use the SecureTemporaryFile class to buffer
+ it on disk, encrypted with an ephemeral key to mitigate
+ forensic recovery of the plaintext.
+
+ """
if total_content_length > 1024 * 512:
- tf = NamedTemporaryFile(delete=False)
- # Save the name of the temporary file on the request object so we can `shred` it later
- self._temporary_file_name = tf.name
- return tf
+ # We don't use `config.TEMP_DIR` here because that
+ # directory is exposed via X-Send-File and there is no
+ # reason for these files to be publicly accessible. See
+ # note in `config.py` for more info. Instead, we just use
+ # `/tmp`, which has the additional benefit of being
+ # automatically cleared on reboot.
+ return SecureTemporaryFile('/tmp')
return BytesIO()
+
def make_form_data_parser(self):
return self.form_data_parser_class(self._secure_file_stream, |
9ba9e26888578e66469a63e412f46cf151fbcfd7 | common/data_refinery_common/test_microarray.py | common/data_refinery_common/test_microarray.py | from unittest.mock import Mock, patch
from django.test import TestCase
from data_refinery_common import microarray
CEL_FILE_HUMAN = "test-files/C30057.CEL"
CEL_FILE_RAT = "test-files/SG2_u34a.CEL"
CEL_FILE_MOUSE = "test-files/97_(Mouse430_2).CEL"
CEL_FILE_ZEBRAFISH = "test-files/CONTROL6.cel"
class MicroarrayTestCase(TestCase):
def test_get_platform_from_CEL(self):
self.assertEqual("hgu95av2", microarray.get_platform_from_CEL(CEL_FILE_HUMAN))
self.assertEqual("rgu34a", microarray.get_platform_from_CEL(CEL_FILE_RAT))
self.assertEqual("mouse4302", microarray.get_platform_from_CEL(CEL_FILE_MOUSE))
self.assertEqual("zebgene11st", microarray.get_platform_from_CEL(CEL_FILE_ZEBRAFISH))
| from unittest.mock import Mock, patch
from django.test import TestCase
from data_refinery_common import microarray
CEL_FILE_HUMAN = "test-files/C30057.CEL.gz"
CEL_FILE_RAT = "test-files/SG2_u34a.CEL.gz"
CEL_FILE_MOUSE = "test-files/97_(Mouse430_2).CEL.gz"
CEL_FILE_ZEBRAFISH = "test-files/CONTROL6.cel.gz"
class MicroarrayTestCase(TestCase):
def test_get_platform_from_CEL(self):
self.assertEqual("hgu95av2", microarray.get_platform_from_CEL(CEL_FILE_HUMAN))
self.assertEqual("rgu34a", microarray.get_platform_from_CEL(CEL_FILE_RAT))
self.assertEqual("mouse4302", microarray.get_platform_from_CEL(CEL_FILE_MOUSE))
self.assertEqual("zebgene11st", microarray.get_platform_from_CEL(CEL_FILE_ZEBRAFISH))
| Update test file paths for common to point to compressed versions. | Update test file paths for common to point to compressed versions.
| Python | bsd-3-clause | data-refinery/data_refinery,data-refinery/data_refinery,data-refinery/data_refinery | ---
+++
@@ -2,10 +2,10 @@
from django.test import TestCase
from data_refinery_common import microarray
-CEL_FILE_HUMAN = "test-files/C30057.CEL"
-CEL_FILE_RAT = "test-files/SG2_u34a.CEL"
-CEL_FILE_MOUSE = "test-files/97_(Mouse430_2).CEL"
-CEL_FILE_ZEBRAFISH = "test-files/CONTROL6.cel"
+CEL_FILE_HUMAN = "test-files/C30057.CEL.gz"
+CEL_FILE_RAT = "test-files/SG2_u34a.CEL.gz"
+CEL_FILE_MOUSE = "test-files/97_(Mouse430_2).CEL.gz"
+CEL_FILE_ZEBRAFISH = "test-files/CONTROL6.cel.gz"
class MicroarrayTestCase(TestCase): |
a0d79fac86d77de6c7ce9f76b269c18dc0972482 | domain_squeezer/urls.py | domain_squeezer/urls.py | """
URL path mapping for our Django app.
"""
from django.conf.urls import url
from . import views
from .settings import URL_MAX_WORDS
urlpatterns = [
url(r'^/*$', views.index, name='squeezer-index'),
]
# match every single element in URL path
for times in range(1, URL_MAX_WORDS + 1):
pattern = r'^/*' + (r'([^/]+)/+' * times) + r'$'
urlpatterns.append(
url(pattern, views.path, name='squeezer-path')
)
| """
URL path mapping for our Django app.
"""
from django.conf.urls import url
from . import views
from .settings import URL_MAX_WORDS
urlpatterns = [
url(r'^$', views.index, name='squeezer-index'),
]
# match every single element in URL path
for times in range(1, URL_MAX_WORDS + 1):
pattern = r'^' + (r'([^/]+)/+' * times) + r'$'
urlpatterns.append(
url(pattern, views.path, name='squeezer-path')
)
| Address Django warnings on url pattern regex | Address Django warnings on url pattern regex
| Python | mit | bittner/django-domain-squeezer,bittner/django-domain-squeezer | ---
+++
@@ -7,12 +7,12 @@
from .settings import URL_MAX_WORDS
urlpatterns = [
- url(r'^/*$', views.index, name='squeezer-index'),
+ url(r'^$', views.index, name='squeezer-index'),
]
# match every single element in URL path
for times in range(1, URL_MAX_WORDS + 1):
- pattern = r'^/*' + (r'([^/]+)/+' * times) + r'$'
+ pattern = r'^' + (r'([^/]+)/+' * times) + r'$'
urlpatterns.append(
url(pattern, views.path, name='squeezer-path')
) |
d330925b8e1f559bb16f75cca25ce583ccaa7f90 | rnacentral/rnacentral/urls.py | rnacentral/rnacentral/urls.py | """
Copyright [2009-2014] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from django.conf.urls import patterns, url, include
from django.views.generic import TemplateView
urlpatterns = patterns('',
# RNAcentral portal
url(r'', include('portal.urls')),
# REST API (use trailing slashes)
url(r'^api/current/', include('apiv1.urls')),
url(r'^api/v1/', include('apiv1.urls')),
# robots.txt
url(r'^robots\.txt$', TemplateView.as_view(template_name='robots.txt', content_type='text/plain')),
# export metadata search results
url(r'^export/', include('export.urls')),
# sequence search
url(r'^sequence-search-new/', include('nhmmer.urls')),
)
| """
Copyright [2009-2014] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from django.conf.urls import patterns, url, include
from django.views.generic import TemplateView
urlpatterns = patterns('',
# RNAcentral portal
url(r'', include('portal.urls')),
# REST API (use trailing slashes)
url(r'^api/current/', include('apiv1.urls')),
url(r'^api/v1/', include('apiv1.urls')),
# robots.txt
url(r'^robots\.txt$', TemplateView.as_view(template_name='robots.txt', content_type='text/plain')),
# export metadata search results
url(r'^export/', include('export.urls')),
# sequence search
url(r'^sequence-search-nhmmer/', include('nhmmer.urls')),
)
| Rename new sequence search url | Rename new sequence search url
| Python | apache-2.0 | RNAcentral/rnacentral-webcode,RNAcentral/rnacentral-webcode,RNAcentral/rnacentral-webcode,RNAcentral/rnacentral-webcode | ---
+++
@@ -26,5 +26,5 @@
# export metadata search results
url(r'^export/', include('export.urls')),
# sequence search
- url(r'^sequence-search-new/', include('nhmmer.urls')),
+ url(r'^sequence-search-nhmmer/', include('nhmmer.urls')),
) |
42518bb29357194257b45989f9c64028f27ac804 | boundaryservice/urls.py | boundaryservice/urls.py | from django.conf.urls.defaults import patterns, include, url
from boundaryservice.views import *
urlpatterns = patterns('',
url(r'^boundary-set/$', BoundarySetListView.as_view(), name='boundaryservice_set_list'),
url(r'^boundary-set/(?P<slug>[\w_-]+)/$', BoundarySetDetailView.as_view(), name='boundaryservice_set_detail'),
url(r'^boundary/$', BoundaryListView.as_view(), name='boundaryservice_boundary_list'),
url(r'^boundary/(?P<set_slug>[\w_-]+)/$', BoundaryListView.as_view(), name='boundaryservice_boundary_list'),
url(r'^boundary/(?P<set_slug>[\w_-]+)/(?P<geo_field>shape|simple_shape|centroid)$', BoundaryListView.as_view()),
url(r'^boundary/(?P<set_slug>[\w_-]+)/(?P<slug>[\w_-]+)/$', BoundaryDetailView.as_view(), name='boundaryservice_boundary_detail'),
url(r'^boundary/(?P<set_slug>[\w_-]+)/(?P<slug>[\w_-]+)/(?P<geo_field>shape|simple_shape|centroid)$', BoundaryGeoDetailView.as_view()),
)
| from django.conf.urls.defaults import patterns, include, url
from boundaryservice.views import *
urlpatterns = patterns('',
url(r'^boundary-set/$', BoundarySetListView.as_view(), name='boundaryservice_set_list'),
url(r'^boundary-set/(?P<slug>[\w_-]+)/$', BoundarySetDetailView.as_view(), name='boundaryservice_set_detail'),
url(r'^boundary/$', BoundaryListView.as_view(), name='boundaryservice_boundary_list'),
url(r'^boundary/(?P<geo_field>shape|simple_shape|centroid)$', BoundaryListView.as_view()),
url(r'^boundary/(?P<set_slug>[\w_-]+)/$', BoundaryListView.as_view(), name='boundaryservice_boundary_list'),
url(r'^boundary/(?P<set_slug>[\w_-]+)/(?P<geo_field>shape|simple_shape|centroid)$', BoundaryListView.as_view()),
url(r'^boundary/(?P<set_slug>[\w_-]+)/(?P<slug>[\w_-]+)/$', BoundaryDetailView.as_view(), name='boundaryservice_boundary_detail'),
url(r'^boundary/(?P<set_slug>[\w_-]+)/(?P<slug>[\w_-]+)/(?P<geo_field>shape|simple_shape|centroid)$', BoundaryGeoDetailView.as_view()),
)
| Allow shape output on /boundary/ queries | Allow shape output on /boundary/ queries
| Python | mit | opencorato/represent-boundaries,opencorato/represent-boundaries,datamade/represent-boundaries,opencorato/represent-boundaries,datamade/represent-boundaries,datamade/represent-boundaries | ---
+++
@@ -6,6 +6,7 @@
url(r'^boundary-set/$', BoundarySetListView.as_view(), name='boundaryservice_set_list'),
url(r'^boundary-set/(?P<slug>[\w_-]+)/$', BoundarySetDetailView.as_view(), name='boundaryservice_set_detail'),
url(r'^boundary/$', BoundaryListView.as_view(), name='boundaryservice_boundary_list'),
+ url(r'^boundary/(?P<geo_field>shape|simple_shape|centroid)$', BoundaryListView.as_view()),
url(r'^boundary/(?P<set_slug>[\w_-]+)/$', BoundaryListView.as_view(), name='boundaryservice_boundary_list'),
url(r'^boundary/(?P<set_slug>[\w_-]+)/(?P<geo_field>shape|simple_shape|centroid)$', BoundaryListView.as_view()),
url(r'^boundary/(?P<set_slug>[\w_-]+)/(?P<slug>[\w_-]+)/$', BoundaryDetailView.as_view(), name='boundaryservice_boundary_detail'), |
95cfb8176432fcf289571026ebfb88626ad8b3fb | mopidy_scrobbler/__init__.py | mopidy_scrobbler/__init__.py | import os
from mopidy import config, ext
__version__ = "1.2.1"
class Extension(ext.Extension):
dist_name = "Mopidy-Scrobbler"
ext_name = "scrobbler"
version = __version__
def get_default_config(self):
conf_file = os.path.join(os.path.dirname(__file__), "ext.conf")
return config.read(conf_file)
def get_config_schema(self):
schema = super().get_config_schema()
schema["username"] = config.String()
schema["password"] = config.Secret()
return schema
def setup(self, registry):
from .frontend import ScrobblerFrontend
registry.add("frontend", ScrobblerFrontend)
| import pathlib
from mopidy import config, ext
__version__ = "1.2.1"
class Extension(ext.Extension):
dist_name = "Mopidy-Scrobbler"
ext_name = "scrobbler"
version = __version__
def get_default_config(self):
return config.read(pathlib.Path(__file__).parent / "ext.conf")
def get_config_schema(self):
schema = super().get_config_schema()
schema["username"] = config.String()
schema["password"] = config.Secret()
return schema
def setup(self, registry):
from .frontend import ScrobblerFrontend
registry.add("frontend", ScrobblerFrontend)
| Use pathlib to read ext.conf | Use pathlib to read ext.conf
| Python | apache-2.0 | mopidy/mopidy-scrobbler | ---
+++
@@ -1,4 +1,4 @@
-import os
+import pathlib
from mopidy import config, ext
@@ -12,8 +12,7 @@
version = __version__
def get_default_config(self):
- conf_file = os.path.join(os.path.dirname(__file__), "ext.conf")
- return config.read(conf_file)
+ return config.read(pathlib.Path(__file__).parent / "ext.conf")
def get_config_schema(self):
schema = super().get_config_schema() |
4aab1eb2d2d3a0c9b9c4ab6df23b043e6822ff84 | examples/delta/delta.py | examples/delta/delta.py | import sys
from SALib.analyze import delta
from SALib.util import read_param_file
import numpy as np
sys.path.append('../..')
# Read the parameter range file and generate samples
# Since this is "given data", the bounds in the parameter file will not be used
# but the columns are still expected
problem = read_param_file('../../src/SALib/test_functions/params/Ishigami.txt')
X = np.loadtxt('model_input.txt')
Y = np.loadtxt('model_output.txt')
# Perform the sensitivity analysis using the model output
# Specify which column of the output file to analyze (zero-indexed)
Si = delta.analyze(problem, X, Y, num_resamples=10, conf_level=0.95, print_to_console=False)
# Returns a dictionary with keys 'delta', 'delta_conf', 'S1', 'S1_conf'
print(str(Si['delta']))
| import sys
from SALib.analyze import delta
from SALib.util import read_param_file
import numpy as np
sys.path.append('../..')
# Read the parameter range file and generate samples
# Since this is "given data", the bounds in the parameter file will not be used
# but the columns are still expected
problem = read_param_file('../../src/SALib/test_functions/params/Ishigami.txt')
X = np.loadtxt('../data/model_input.txt')
Y = np.loadtxt('../data/model_output.txt')
# Perform the sensitivity analysis using the model output
# Specify which column of the output file to analyze (zero-indexed)
Si = delta.analyze(problem, X, Y, num_resamples=10, conf_level=0.95, print_to_console=False)
# Returns a dictionary with keys 'delta', 'delta_conf', 'S1', 'S1_conf'
print(str(Si['delta']))
| Fix up example with corrected path | Fix up example with corrected path
| Python | mit | jdherman/SALib,SALib/SALib,jdherman/SALib | ---
+++
@@ -12,8 +12,8 @@
# Since this is "given data", the bounds in the parameter file will not be used
# but the columns are still expected
problem = read_param_file('../../src/SALib/test_functions/params/Ishigami.txt')
-X = np.loadtxt('model_input.txt')
-Y = np.loadtxt('model_output.txt')
+X = np.loadtxt('../data/model_input.txt')
+Y = np.loadtxt('../data/model_output.txt')
# Perform the sensitivity analysis using the model output
# Specify which column of the output file to analyze (zero-indexed) |
4e9de4dd4c408a056f72c833d89832a1981a7b0d | features/tags/forms.py | features/tags/forms.py | from django import forms
from django.db.models.functions import Lower
from . import models
class TagGroup(forms.ModelForm):
class Meta:
model = models.Tagged
fields = []
group = forms.ModelChoiceField(label='Gruppe', queryset=None)
def __init__(self, **kwargs):
tagger = kwargs.pop('tagger')
super().__init__(**kwargs)
self.fields['group'].queryset = tagger.groups.exclude(
tags__tag=self.instance.tag).order_by(Lower('name'))
def save(self, commit=True):
self.instance.tagged = self.cleaned_data['group']
return super().save(commit)
| from django import forms
from django.db.models.functions import Lower
from . import models
class TagGroup(forms.ModelForm):
class Meta:
model = models.Tagged
fields = []
group = forms.ModelChoiceField(label='Gruppe', queryset=None)
def __init__(self, **kwargs):
tagger = kwargs.pop('tagger')
super().__init__(**kwargs)
self.fields['group'].queryset = tagger.groups.exclude(
tags__tag=self.instance.tag).order_by(Lower('name'))
def save(self, commit=True):
if commit and not self.instance.tag.pk:
self.instance.tag.save()
self.instance.tag = self.instance.tag
self.instance.tagged = self.cleaned_data['group']
return super().save(commit)
| Fix save for empty tags | Fix save for empty tags
| Python | agpl-3.0 | stadtgestalten/stadtgestalten,stadtgestalten/stadtgestalten,stadtgestalten/stadtgestalten | ---
+++
@@ -18,5 +18,8 @@
tags__tag=self.instance.tag).order_by(Lower('name'))
def save(self, commit=True):
+ if commit and not self.instance.tag.pk:
+ self.instance.tag.save()
+ self.instance.tag = self.instance.tag
self.instance.tagged = self.cleaned_data['group']
return super().save(commit) |
8c00c71de736c54c22fedfae86101eb99846ba4f | anyjson.py | anyjson.py | """
Get the best JSON encoder/decoder available on this system.
"""
__version__ = "0.1"
__author__ = "Rune Halvorsen <runefh@gmail.com>"
__homepage__ = "http://bitbucket.org/runeh/anyjson/"
__docformat__ = "restructuredtext"
"""
.. function:: serialize(obj)
Serialize the object to JSON.
.. function:: deserialize(obj)
Deserialize JSON-encoded object to a Python object.
"""
# Try to import a module that provides json parsing and emitting, starting
# with the fastest alternative and falling back to the slower ones.
try:
# cjson is the fastest
import cjson
serialize = cjson.encode
deserialize = cjson.decode
except ImportError:
try:
# Then try to find simplejson. Later versions has C speedups which
# makes it pretty fast.
import simplejson
serialize = simplejson.dumps
deserialize = simplejson.loads
except ImportError:
try:
# Then try to find the python 2.6 stdlib json module.
import json
serialize = json.dumps
deserialize = json.loads
except ImportError:
# If all of the above fails, try to fallback to the simplejson
# embedded in Django.
from django.utils import simplejson
serialize = simplejson.dumps
deserialize = simplejson.loads
| """
Get the best JSON encoder/decoder available on this system.
"""
__version__ = "0.1"
__author__ = "Rune Halvorsen <runefh@gmail.com>"
__homepage__ = "http://bitbucket.org/runeh/anyjson/"
__docformat__ = "restructuredtext"
"""
.. function:: serialize(obj)
Serialize the object to JSON.
.. function:: deserialize(obj)
Deserialize JSON-encoded object to a Python object.
"""
# Try to import a module that provides json parsing and emitting, starting
# with the fastest alternative and falling back to the slower ones.
try:
# cjson is the fastest
import cjson
serialize = cjson.encode
deserialize = cjson.decode
except ImportError:
try:
# Then try to find simplejson. Later versions has C speedups which
# makes it pretty fast.
import simplejson
serialize = simplejson.dumps
deserialize = simplejson.loads
except ImportError:
try:
# Then try to find the python 2.6 stdlib json module.
import json
serialize = json.dumps
deserialize = json.loads
except ImportError:
try:
# If all of the above fails, try to fallback to the simplejson
# embedded in Django.
from django.utils import simplejson
serialize = simplejson.dumps
deserialize = simplejson.loads
except:
raise ImportError("No json module found")
| Raise our own ImportError if all fails. Looks better than to complain about django when that happens | Raise our own ImportError if all fails. Looks better than to complain about
django when that happens
| Python | bsd-3-clause | newvem/anyjson,kennethreitz-archive/anyjson | ---
+++
@@ -40,8 +40,12 @@
serialize = json.dumps
deserialize = json.loads
except ImportError:
- # If all of the above fails, try to fallback to the simplejson
- # embedded in Django.
- from django.utils import simplejson
- serialize = simplejson.dumps
- deserialize = simplejson.loads
+ try:
+ # If all of the above fails, try to fallback to the simplejson
+ # embedded in Django.
+ from django.utils import simplejson
+ serialize = simplejson.dumps
+ deserialize = simplejson.loads
+ except:
+ raise ImportError("No json module found")
+ |
4706d6feaff7057d04def0544e291900a754558e | nbgrader/apps/solutionapp.py | nbgrader/apps/solutionapp.py | from IPython.config.loader import Config
from IPython.config.application import catch_config_error
from IPython.utils.traitlets import Unicode
from nbgrader.apps.customnbconvertapp import CustomNbConvertApp
class SolutionApp(CustomNbConvertApp):
name = Unicode(u'nbgrader-solution')
description = Unicode(u'Prepare a solution version of an assignment')
def _export_format_default(self):
return 'notebook'
def build_extra_config(self):
self.extra_config = Config()
self.extra_config.Exporter.preprocessors = [
'nbgrader.preprocessors.IncludeHeaderFooter',
'nbgrader.preprocessors.TableOfContents',
'nbgrader.preprocessors.RenderSolutions',
'nbgrader.preprocessors.ExtractTests',
'IPython.nbconvert.preprocessors.ExecutePreprocessor'
]
self.extra_config.RenderSolutions.solution = True
self.config.merge(self.extra_config)
| from IPython.config.loader import Config
from IPython.config.application import catch_config_error
from IPython.utils.traitlets import Unicode
from nbgrader.apps.customnbconvertapp import CustomNbConvertApp
class SolutionApp(CustomNbConvertApp):
name = Unicode(u'nbgrader-solution')
description = Unicode(u'Prepare a solution version of an assignment')
def _export_format_default(self):
return 'notebook'
def build_extra_config(self):
self.extra_config = Config()
self.extra_config.Exporter.preprocessors = [
'nbgrader.preprocessors.IncludeHeaderFooter',
'nbgrader.preprocessors.TableOfContents',
'nbgrader.preprocessors.RenderSolutions',
'nbgrader.preprocessors.ExtractTests',
'IPython.nbconvert.preprocessors.ExecutePreprocessor'
]
self.extra_config.RenderSolutions.solution = True
self.extra_config.NbGraderApp.writer_class = 'IPython.nbconvert.writers.FilesWriter'
self.config.merge(self.extra_config)
| Add files writer to solution app | Add files writer to solution app
| Python | bsd-3-clause | ellisonbg/nbgrader,jupyter/nbgrader,modulexcite/nbgrader,ellisonbg/nbgrader,modulexcite/nbgrader,jupyter/nbgrader,ellisonbg/nbgrader,dementrock/nbgrader,jdfreder/nbgrader,jupyter/nbgrader,EdwardJKim/nbgrader,jhamrick/nbgrader,MatKallada/nbgrader,dementrock/nbgrader,jhamrick/nbgrader,jupyter/nbgrader,alope107/nbgrader,alope107/nbgrader,EdwardJKim/nbgrader,ellisonbg/nbgrader,jhamrick/nbgrader,jupyter/nbgrader,jdfreder/nbgrader,jhamrick/nbgrader,EdwardJKim/nbgrader,EdwardJKim/nbgrader,MatKallada/nbgrader | ---
+++
@@ -21,4 +21,5 @@
'IPython.nbconvert.preprocessors.ExecutePreprocessor'
]
self.extra_config.RenderSolutions.solution = True
+ self.extra_config.NbGraderApp.writer_class = 'IPython.nbconvert.writers.FilesWriter'
self.config.merge(self.extra_config) |
20733c6b3d3bc249098297a73341f56e781aabbe | plugins/storage/storagetype/test/test_integration.py | plugins/storage/storagetype/test/test_integration.py | #!/usr/bin/env python
#-*- coding: utf-8 -*-
#This software is distributed under the Creative Commons license (CC0) version 1.0. A copy of this license should have been distributed with this software.
#The license can also be read online: <https://creativecommons.org/publicdomain/zero/1.0/>. If this online license differs from the license provided with this software, the license provided with this software should be applied.
"""
Tests for each storage plug-in whether it properly implements the storage
interface.
"""
import luna.test_case
class TestIntegration(luna.test_case.TestCase):
pass #Not implemented yet. | #!/usr/bin/env python
#-*- coding: utf-8 -*-
#This software is distributed under the Creative Commons license (CC0) version 1.0. A copy of this license should have been distributed with this software.
#The license can also be read online: <https://creativecommons.org/publicdomain/zero/1.0/>. If this online license differs from the license provided with this software, the license provided with this software should be applied.
"""
Tests for each storage plug-in whether it properly implements the storage
interface.
"""
import os.path #To get the plug-in directory.
import luna.plugins #To get the plug-ins to test with.
import luna.test_case
plugin_base = os.path.join(os.path.dirname(os.path.abspath(__file__)), "..", "..", "..") #The main directory containing this plug-in (as well as all others we're hoping to find).
luna.plugins.add_plugin_location(plugin_base)
luna.plugins.discover()
class TestIntegration(luna.test_case.TestCase):
"""
Tests for each storage plug-in whether it properly implements the storage
interface.
"""
@luna.test_case.parametrise(luna.plugins.plugins_by_type["storage"])
def test_can_read(self, storage, **other_metadata):
self.assertTrue(callable(storage["can_read"]), "The can_read function must be callable.") | Add simple test case to test plug-in discovery | Add simple test case to test plug-in discovery
This just tests if can_read is callable for now, but at least the plug-in discovery in tests works. It doesn't work beautifully, but we can work from here.
| Python | cc0-1.0 | Ghostkeeper/Luna | ---
+++
@@ -9,7 +9,20 @@
interface.
"""
+import os.path #To get the plug-in directory.
+import luna.plugins #To get the plug-ins to test with.
import luna.test_case
+plugin_base = os.path.join(os.path.dirname(os.path.abspath(__file__)), "..", "..", "..") #The main directory containing this plug-in (as well as all others we're hoping to find).
+luna.plugins.add_plugin_location(plugin_base)
+luna.plugins.discover()
+
class TestIntegration(luna.test_case.TestCase):
- pass #Not implemented yet.
+ """
+ Tests for each storage plug-in whether it properly implements the storage
+ interface.
+ """
+
+ @luna.test_case.parametrise(luna.plugins.plugins_by_type["storage"])
+ def test_can_read(self, storage, **other_metadata):
+ self.assertTrue(callable(storage["can_read"]), "The can_read function must be callable.") |
b4d97079b6a74e9a2001b50c66b9eee6bc57ba66 | swiftclient/__init__.py | swiftclient/__init__.py | # -*- encoding: utf-8 -*-
# Copyright (c) 2012 Rackspace
# flake8: noqa
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
""""
OpenStack Swift Python client binding.
"""
from .client import *
# At setup.py time, we haven't installed anything yet, so there
# is nothing that is able to set this version property. Squelching
# that exception here should be fine- if there are problems with
# pkg_resources in a real install, that will manifest itself as
# an error still
try:
from swiftclient import version
__version__ = version.version_string
except Exception:
pass
| # -*- encoding: utf-8 -*-
# Copyright (c) 2012 Rackspace
# flake8: noqa
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
OpenStack Swift Python client binding.
"""
from .client import *
# At setup.py time, we haven't installed anything yet, so there
# is nothing that is able to set this version property. Squelching
# that exception here should be fine- if there are problems with
# pkg_resources in a real install, that will manifest itself as
# an error still
try:
from swiftclient import version
__version__ = version.version_string
except Exception:
pass
| Remove extra double quote from docstring | Remove extra double quote from docstring
The extra " was visible on
http://docs.openstack.org/developer/python-swiftclient/swiftclient.html
Change-Id: I7d61c8259a4f13464c11ae7e3fa28eb3a58e4baa
| Python | apache-2.0 | krnflake/python-hubicclient,varunarya10/python-swiftclient,jeseem/python-swiftclient,JioCloud/python-swiftclient,sohonetlabs/python-swiftclient,pratikmallya/python-swiftclient,iostackproject/IO-Bandwidth-Differentiation-Client,JioCloud/python-swiftclient,openstack/python-swiftclient,pratikmallya/python-swiftclient,varunarya10/python-swiftclient,sohonetlabs/python-swiftclient,ironsmile/python-swiftclient,VyacheslavHashov/python-swiftclient,iostackproject/IO-Bandwidth-Differentiation-Client,jeseem/python-swiftclient,VyacheslavHashov/python-swiftclient,openstack/python-swiftclient,ironsmile/python-swiftclient | ---
+++
@@ -14,7 +14,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-""""
+"""
OpenStack Swift Python client binding.
"""
from .client import * |
23a9aaae78cc4d9228f8d0705647fbcadcaf7975 | markymark/fields.py | markymark/fields.py | from django import forms
from django.db import models
from .widgets import MarkdownTextarea
class MarkdownFormField(forms.fields.CharField):
def __init__(self, *args, **kwargs):
kwargs['widget'] = MarkdownTextarea
super(MarkdownFormField, self).__init__(*args, **kwargs)
class MarkdownField(models.TextField):
def formfield(self, form_class=MarkdownFormField, **kwargs):
return super(MarkdownField, self).formfield(
form_class=form_class, **kwargs)
| from django import forms
from django.db import models
from .widgets import MarkdownTextarea
class MarkdownFormField(forms.fields.CharField):
def __init__(self, *args, **kwargs):
kwargs['widget'] = kwargs.pop('widget', MarkdownTextarea)
super(MarkdownFormField, self).__init__(*args, **kwargs)
class MarkdownField(models.TextField):
def formfield(self, form_class=MarkdownFormField, **kwargs):
return super(MarkdownField, self).formfield(
form_class=form_class, **kwargs)
| Allow widget overwriting on form field | Allow widget overwriting on form field
| Python | mit | moccu/django-markymark,moccu/django-markymark,moccu/django-markymark | ---
+++
@@ -6,7 +6,7 @@
class MarkdownFormField(forms.fields.CharField):
def __init__(self, *args, **kwargs):
- kwargs['widget'] = MarkdownTextarea
+ kwargs['widget'] = kwargs.pop('widget', MarkdownTextarea)
super(MarkdownFormField, self).__init__(*args, **kwargs)
|
dbe28b1d00a17acdd276263c9042dbd7b5dfc311 | src/adhocracy_kit/adhocracy_kit/__init__.py | src/adhocracy_kit/adhocracy_kit/__init__.py | """Adhocracy extension."""
from pyramid.config import Configurator
from adhocracy_core import root_factory
def includeme(config):
"""Setup adhocracy extension.
The kit package should be exactly like the spd package but with different
root permissions and default translations for the emails.
"""
# copied from adhocracy_spd (without resources and translations)
config.include('adhocracy_core')
config.commit()
config.include('.sheets')
config.include('.workflows')
config.include('.evolution')
# add translations
config.add_translation_dirs('adhocracy_core:locale/')
# copoied from adhocracy_spd.resources resources
config.include('adhocracy-spd.resources.digital_leben')
# include kit resource types
config.include('.resources')
def main(global_config, **settings):
""" Return a Pyramid WSGI application. """
config = Configurator(settings=settings, root_factory=root_factory)
includeme(config)
return config.make_wsgi_app()
| """Adhocracy extension."""
from pyramid.config import Configurator
from adhocracy_core import root_factory
def includeme(config):
"""Setup adhocracy extension.
The kit package should be exactly like the spd package but with different
root permissions and default translations for the emails.
"""
# copied from adhocracy_spd (without resources and translations)
config.include('adhocracy_core')
config.commit()
config.include('adhocracy_spd.sheets')
config.include('adhocracy_spd.workflows')
config.include('adhocracy_spd.evolution')
# add translations
config.add_translation_dirs('adhocracy_core:locale/')
# copoied from adhocracy_spd.resources resources
config.include('adhocracy_spd.resources.digital_leben')
# include kit resource types
config.include('.resources')
def main(global_config, **settings):
""" Return a Pyramid WSGI application. """
config = Configurator(settings=settings, root_factory=root_factory)
includeme(config)
return config.make_wsgi_app()
| Fix wrong config includes in kit package | Fix wrong config includes in kit package
| Python | agpl-3.0 | liqd/adhocracy3.mercator,fhartwig/adhocracy3.mercator,liqd/adhocracy3.mercator,liqd/adhocracy3.mercator,fhartwig/adhocracy3.mercator,fhartwig/adhocracy3.mercator,fhartwig/adhocracy3.mercator,fhartwig/adhocracy3.mercator,liqd/adhocracy3.mercator,fhartwig/adhocracy3.mercator,liqd/adhocracy3.mercator,fhartwig/adhocracy3.mercator,liqd/adhocracy3.mercator,liqd/adhocracy3.mercator | ---
+++
@@ -13,15 +13,15 @@
# copied from adhocracy_spd (without resources and translations)
config.include('adhocracy_core')
config.commit()
- config.include('.sheets')
- config.include('.workflows')
- config.include('.evolution')
+ config.include('adhocracy_spd.sheets')
+ config.include('adhocracy_spd.workflows')
+ config.include('adhocracy_spd.evolution')
# add translations
config.add_translation_dirs('adhocracy_core:locale/')
# copoied from adhocracy_spd.resources resources
- config.include('adhocracy-spd.resources.digital_leben')
+ config.include('adhocracy_spd.resources.digital_leben')
# include kit resource types
config.include('.resources') |
2e299c5c2a35d3cd42be43c90af41c28e5d27c15 | reindexer/reindex_shard_generator/src/test_reindex_shard_config.py | reindexer/reindex_shard_generator/src/test_reindex_shard_config.py | # -*- encoding: utf-8
import pytest
from reindex_shard_config import create_reindex_shard
@pytest.mark.parametrize(
'source_name, source_id, expected_reindex_shard', [
('sierra', 'b0000001', 'sierra/2441'),
('miro', 'A0000001', 'miro/128')
])
def test_create_reindex_shard(source_name, source_id, expected_reindex_shard):
reindex_shard = create_reindex_shard(
source_name=source_name,
source_id=source_id
)
assert reindex_shard == expected_reindex_shard
@pytest.mark.parametrize('source_name', ['foo', 13, None])
def test_unrecognised_source_name_is_ValueError(source_name):
with pytest.raises(ValueError) as err:
reindex_shard = create_reindex_shard(
source_name=source_name,
source_id='0001'
)
assert err.value.args[0].startswith('Unrecognised source name')
assert repr(source_name) in err.value.args[0]
| # -*- encoding: utf-8
import pytest
from reindex_shard_config import create_reindex_shard
@pytest.mark.parametrize(
'source_name, source_id, expected_reindex_shard', [
('sierra', 'b0000001', 'sierra/2441'),
('miro', 'A0000001', 'miro/128')
])
def test_create_reindex_shard(source_name, source_id, expected_reindex_shard):
reindex_shard = create_reindex_shard(
source_name=source_name,
source_id=source_id
)
assert reindex_shard == expected_reindex_shard
@pytest.mark.parametrize('source_name', ['foo', 13, None])
def test_unrecognised_source_name_is_ValueError(source_name):
with pytest.raises(ValueError) as err:
create_reindex_shard(
source_name=source_name,
source_id='0001'
)
assert err.value.args[0].startswith('Unrecognised source name')
assert repr(source_name) in err.value.args[0]
| Fix a Python lint error | Fix a Python lint error
| Python | mit | wellcometrust/platform-api,wellcometrust/platform-api,wellcometrust/platform-api,wellcometrust/platform-api | ---
+++
@@ -21,7 +21,7 @@
@pytest.mark.parametrize('source_name', ['foo', 13, None])
def test_unrecognised_source_name_is_ValueError(source_name):
with pytest.raises(ValueError) as err:
- reindex_shard = create_reindex_shard(
+ create_reindex_shard(
source_name=source_name,
source_id='0001'
) |
eff924e07999bd0aaaa36373c658efb1ffefe5c7 | magpie/utils/solr.py | magpie/utils/solr.py | from mysolr import Solr
from magpie.settings import settings
_solr = None
def open_solr_connection(core_name):
global _solr
if not _solr:
url = '{}/{}'.format(settings.SOLR_URL, core_name)
_solr = Solr(url)
return _solr | from mysolr import Solr
from magpie.settings import settings
_solr = None
def open_solr_connection(core_name):
global _solr
if not _solr:
url = '{}/{}'.format(settings.SOLR_URL, core_name)
_solr = Solr(url)
return _solr
def escape_solr_query(query):
"""
Escape special chars for Solr queries.
"""
chars = ['+', '-', '&&', '||', '!', '(', ')', '{', '}', '[', ']', '^', '"', '~', '*', '?',
':', '/', ' ']
for char in chars:
query = query.replace(char, '\{}'.format(char))
return query | Add method to escape special chars in Solr queries | Add method to escape special chars in Solr queries
| Python | apache-2.0 | nimiq/moogle-project | ---
+++
@@ -12,3 +12,15 @@
url = '{}/{}'.format(settings.SOLR_URL, core_name)
_solr = Solr(url)
return _solr
+
+
+def escape_solr_query(query):
+ """
+ Escape special chars for Solr queries.
+ """
+ chars = ['+', '-', '&&', '||', '!', '(', ')', '{', '}', '[', ']', '^', '"', '~', '*', '?',
+ ':', '/', ' ']
+ for char in chars:
+ query = query.replace(char, '\{}'.format(char))
+
+ return query |
8d831e4834b61c04b3f5f2d8a812095eea8c022f | personDb.py | personDb.py | import pickle
class PersonDb(object):
def __init__(self, dbName, autoload = True):
self.dbName = dbName
self.db = None
if autoload:
self.setup()
def setup(self):
self.db = PersonDb.load(self.dbName)
self.getGroups()
def getGroups(self):
tmp = set()
for person in self.db:
for group in person.group:
tmp.append(person)
self.groups = tmp
def save(data, fileName = 'database'):
filen = PersonDb.fileExtension(fileName)
with open(filen,"wb") as pickleOut:
pickle.dump(data, pickleOut)
def load(fileName = 'database'):
filen = PersonDb.fileExtension(fileName)
with open(filen, "rb") as pickleIn:
data = pickle.load(pickleIn)
return data
def fileExtension(fileName):
result = fileName.strip()
if '.json' not in result:
result += '.json'
return result
| import pickle
import os
class PersonDb(object):
def __init__(self, dbName, autoload = True):
self.dbName = dbName
self.db = None
if autoload:
self.setup()
def setup(self):
self.db = PersonDb.load(self.dbName)
self.getGroups()
def getGroups(self):
tmp = set()
for person in self.db:
for group in person.group:
tmp.append(person)
self.groups = tmp
def save(data, fileName = 'database'):
filen = PersonDb.fileExtension(fileName)
with open(filen,"wb") as pickleOut:
pickle.dump(data, pickleOut)
def load(fileName = 'database'):
filen = PersonDb.fileExtension(fileName)
if os.path.exists(filen):
with open(filen, "rb") as pickleIn:
data = pickle.load(pickleIn)
return data
else:
return []
def fileExtension(fileName):
result = fileName.strip()
if '.json' not in result:
result += '.json'
return result
| Add handling of not present file. | Add handling of not present file.
Signed-off-by: Matej Dujava <03ce64f61b3ea1fda633fb2a103b989e3272d16b@gmail.com>
| Python | mit | matejd11/birthdayNotify | ---
+++
@@ -1,4 +1,5 @@
import pickle
+import os
class PersonDb(object):
@@ -26,9 +27,12 @@
def load(fileName = 'database'):
filen = PersonDb.fileExtension(fileName)
- with open(filen, "rb") as pickleIn:
- data = pickle.load(pickleIn)
- return data
+ if os.path.exists(filen):
+ with open(filen, "rb") as pickleIn:
+ data = pickle.load(pickleIn)
+ return data
+ else:
+ return []
def fileExtension(fileName):
result = fileName.strip() |
77bfe0b92e9bc8ffa23f91a4a9b18093ae8f5f8a | ehriportal/portal/utils.py | ehriportal/portal/utils.py | """Utility functions for dealing with repository and geo data."""
from incf.countryutils import transformations
import babel
# Hacky dictionary of official country/languages names
# we want to substitute for friendlier versions...
# A more permenant solution is needed to this.
SUBNAMES = {
"United Kingdom of Great Britain & Northern Ireland": "United Kingdom",
}
def language_name_from_code(code, locale="en"):
"""Get lang display name."""
# TODO: Find the correct way to do this
return babel.Locale(locale).languages.get(code, "")
def get_country_from_code(code):
"""Get the country code from a coutry name."""
try:
name = transformations.cc_to_cn(code)
return SUBNAMES.get(name, name)
except KeyError:
pass
| """Utility functions for dealing with repository and geo data."""
import json
import datetime
from types import MethodType
from incf.countryutils import transformations
import babel
from haystack.query import SearchQuerySet
from django.core.paginator import Paginator, Page, InvalidPage, EmptyPage
from haystack.models import SearchResult
from haystack.query import SearchQuerySet
# Hacky dictionary of official country/languages names
# we want to substitute for friendlier versions...
# A more permenant solution is needed to this.
SUBNAMES = {
"United Kingdom of Great Britain & Northern Ireland": "United Kingdom",
}
def language_name_from_code(code, locale="en"):
"""Get lang display name."""
# TODO: Find the correct way to do this
return babel.Locale(locale).languages.get(code, "")
def get_country_from_code(code):
"""Get the country code from a coutry name."""
try:
name = transformations.cc_to_cn(code)
return SUBNAMES.get(name, name)
except KeyError:
pass
class HaystackPaginationEncoder(json.JSONEncoder):
"""JSON Encoder a Django pagination object."""
def default(self, obj):
# handle dates
if isinstance(obj, datetime.datetime):
return obj.strftime('%Y-%m-%dT%H:%M:%S')
# handle searchresult objects
elif isinstance(obj, SearchResult):
return dict([(f, getattr(obj, f)) for f in obj.get_stored_fields() \
if f != u'suggestions'])
# handle pagination objects
elif isinstance(obj, Page):
serializedpage = dict(
object_list=[self.default(r) for r in obj.object_list])
for attr in ("end_index", "has_next", "has_other_pages",
"has_previous", "next_page_number", "number",
"start_index", "previous_page_number", "object_list"):
v = getattr(obj, attr)
if isinstance(v, MethodType):
serializedpage[attr] = v()
elif isinstance(v, (str, int)):
serializedpage[attr] = v
return serializedpage
return json.JSONEncoder.default(self, obj)
| Add a class to JSON encode a page object containing Haystack search results. Note: this is a stopgap measure and quite inefficient, since Haystack turns the Solr data into objects and this turns it back into JSON, but at least it's not specific to a search engine backend. | Add a class to JSON encode a page object containing Haystack search results. Note: this is a stopgap measure and quite inefficient, since Haystack turns the Solr data into objects and this turns it back into JSON, but at least it's not specific to a search engine backend.
| Python | mit | mikesname/ehri-collections,mikesname/ehri-collections,mikesname/ehri-collections | ---
+++
@@ -1,7 +1,15 @@
"""Utility functions for dealing with repository and geo data."""
+import json
+import datetime
+from types import MethodType
from incf.countryutils import transformations
import babel
+
+from haystack.query import SearchQuerySet
+from django.core.paginator import Paginator, Page, InvalidPage, EmptyPage
+from haystack.models import SearchResult
+from haystack.query import SearchQuerySet
# Hacky dictionary of official country/languages names
# we want to substitute for friendlier versions...
@@ -25,3 +33,27 @@
pass
+class HaystackPaginationEncoder(json.JSONEncoder):
+ """JSON Encoder a Django pagination object."""
+ def default(self, obj):
+ # handle dates
+ if isinstance(obj, datetime.datetime):
+ return obj.strftime('%Y-%m-%dT%H:%M:%S')
+ # handle searchresult objects
+ elif isinstance(obj, SearchResult):
+ return dict([(f, getattr(obj, f)) for f in obj.get_stored_fields() \
+ if f != u'suggestions'])
+ # handle pagination objects
+ elif isinstance(obj, Page):
+ serializedpage = dict(
+ object_list=[self.default(r) for r in obj.object_list])
+ for attr in ("end_index", "has_next", "has_other_pages",
+ "has_previous", "next_page_number", "number",
+ "start_index", "previous_page_number", "object_list"):
+ v = getattr(obj, attr)
+ if isinstance(v, MethodType):
+ serializedpage[attr] = v()
+ elif isinstance(v, (str, int)):
+ serializedpage[attr] = v
+ return serializedpage
+ return json.JSONEncoder.default(self, obj) |
42901554db49cd1204054ea695cea6ee4e368b1e | tests/basics/int-long.py | tests/basics/int-long.py | # This tests long ints for 32-bit machine
a = 0x1ffffffff
b = 0x100000000
print(a)
print(b)
print(a + b)
print(a - b)
print(b - a)
# overflows long long implementation
#print(a * b)
print(a // b)
print(a % b)
print(a & b)
print(a | b)
print(a ^ b)
print(a << 3)
print(a >> 1)
a += b
print(a)
a -= 123456
print(a)
a *= 257
print(a)
a //= 257
print(a)
a %= b
print(a)
a ^= b
print(a)
a |= b
print(a)
a &= b
print(a)
a <<= 5
print(a)
a >>= 1
print(a)
| # This tests long ints for 32-bit machine
a = 0x1ffffffff
b = 0x100000000
print(a)
print(b)
print(a + b)
print(a - b)
print(b - a)
# overflows long long implementation
#print(a * b)
print(a // b)
print(a % b)
print(a & b)
print(a | b)
print(a ^ b)
print(a << 3)
print(a >> 1)
a += b
print(a)
a -= 123456
print(a)
a *= 257
print(a)
a //= 257
print(a)
a %= b
print(a)
a ^= b
print(a)
a |= b
print(a)
a &= b
print(a)
a <<= 5
print(a)
a >>= 1
print(a)
# Test referential integrity of long ints
a = 0x1ffffffff
b = a
a += 1
print(a)
print(b)
| Add regression test for improper inplace op implementation. | objint_longlong: Add regression test for improper inplace op implementation.
| Python | mit | jmarcelino/pycom-micropython,cwyark/micropython,puuu/micropython,rubencabrera/micropython,lbattraw/micropython,warner83/micropython,skybird6672/micropython,methoxid/micropystat,blmorris/micropython,alex-march/micropython,rubencabrera/micropython,dxxb/micropython,firstval/micropython,ahotam/micropython,TDAbboud/micropython,misterdanb/micropython,PappaPeppar/micropython,stonegithubs/micropython,kostyll/micropython,pozetroninc/micropython,slzatz/micropython,misterdanb/micropython,deshipu/micropython,toolmacher/micropython,xhat/micropython,puuu/micropython,firstval/micropython,danicampora/micropython,dxxb/micropython,tobbad/micropython,drrk/micropython,hosaka/micropython,micropython/micropython-esp32,lowRISC/micropython,ChuckM/micropython,noahwilliamsson/micropython,SHA2017-badge/micropython-esp32,tralamazza/micropython,aethaniel/micropython,HenrikSolver/micropython,danicampora/micropython,puuu/micropython,lowRISC/micropython,kerneltask/micropython,infinnovation/micropython,aitjcize/micropython,lbattraw/micropython,bvernoux/micropython,vriera/micropython,Peetz0r/micropython-esp32,matthewelse/micropython,MrSurly/micropython,neilh10/micropython,SungEun-Steve-Kim/test-mp,noahchense/micropython,feilongfl/micropython,bvernoux/micropython,stonegithubs/micropython,slzatz/micropython,redbear/micropython,misterdanb/micropython,HenrikSolver/micropython,aethaniel/micropython,ChuckM/micropython,pfalcon/micropython,ceramos/micropython,ceramos/micropython,methoxid/micropystat,swegener/micropython,galenhz/micropython,ChuckM/micropython,omtinez/micropython,orionrobots/micropython,infinnovation/micropython,adafruit/micropython,EcmaXp/micropython,oopy/micropython,stonegithubs/micropython,utopiaprince/micropython,suda/micropython,omtinez/micropython,ceramos/micropython,jmarcelino/pycom-micropython,EcmaXp/micropython,cloudformdesign/micropython,praemdonck/micropython,cloudformdesign/micropython,torwag/micropython,deshipu/micropython,EcmaXp/micropython,firstval/micropython,slzatz/micropython,orionrobots/micropython,Timmenem/micropython,ryannathans/micropython,cwyark/micropython,xhat/micropython,ericsnowcurrently/micropython,jimkmc/micropython,MrSurly/micropython-esp32,kerneltask/micropython,danicampora/micropython,ganshun666/micropython,MrSurly/micropython,ernesto-g/micropython,Timmenem/micropython,vitiral/micropython,MrSurly/micropython,henriknelson/micropython,mianos/micropython,dhylands/micropython,methoxid/micropystat,puuu/micropython,dxxb/micropython,noahchense/micropython,dhylands/micropython,chrisdearman/micropython,jmarcelino/pycom-micropython,praemdonck/micropython,tobbad/micropython,dmazzella/micropython,ruffy91/micropython,paul-xxx/micropython,emfcamp/micropython,mhoffma/micropython,ceramos/micropython,redbear/micropython,KISSMonX/micropython,kerneltask/micropython,blmorris/micropython,rubencabrera/micropython,adafruit/micropython,Timmenem/micropython,oopy/micropython,pozetroninc/micropython,Vogtinator/micropython,micropython/micropython-esp32,lowRISC/micropython,vriera/micropython,alex-robbins/micropython,xhat/micropython,infinnovation/micropython,aethaniel/micropython,tralamazza/micropython,aethaniel/micropython,tobbad/micropython,ChuckM/micropython,toolmacher/micropython,lbattraw/micropython,cnoviello/micropython,KISSMonX/micropython,kostyll/micropython,ernesto-g/micropython,warner83/micropython,KISSMonX/micropython,supergis/micropython,turbinenreiter/micropython,infinnovation/micropython,matthewelse/micropython,HenrikSolver/micropython,ericsnowcurrently/micropython,alex-robbins/micropython,bvernoux/micropython,SungEun-Steve-Kim/test-mp,jlillest/micropython,matthewelse/micropython,tdautc19841202/micropython,tuc-osg/micropython,vitiral/micropython,jmarcelino/pycom-micropython,vitiral/micropython,hiway/micropython,lbattraw/micropython,aitjcize/micropython,alex-march/micropython,adafruit/circuitpython,AriZuu/micropython,noahwilliamsson/micropython,lbattraw/micropython,feilongfl/micropython,xuxiaoxin/micropython,ganshun666/micropython,dinau/micropython,kerneltask/micropython,mgyenik/micropython,hiway/micropython,mgyenik/micropython,MrSurly/micropython-esp32,ChuckM/micropython,trezor/micropython,noahchense/micropython,heisewangluo/micropython,xyb/micropython,ruffy91/micropython,adamkh/micropython,adafruit/circuitpython,pozetroninc/micropython,SHA2017-badge/micropython-esp32,matthewelse/micropython,Vogtinator/micropython,chrisdearman/micropython,adafruit/circuitpython,emfcamp/micropython,pramasoul/micropython,swegener/micropython,heisewangluo/micropython,SHA2017-badge/micropython-esp32,tobbad/micropython,tuc-osg/micropython,Timmenem/micropython,jlillest/micropython,adafruit/micropython,hiway/micropython,Vogtinator/micropython,MrSurly/micropython-esp32,dhylands/micropython,micropython/micropython-esp32,Peetz0r/micropython-esp32,mianos/micropython,xuxiaoxin/micropython,suda/micropython,PappaPeppar/micropython,noahwilliamsson/micropython,warner83/micropython,selste/micropython,selste/micropython,henriknelson/micropython,selste/micropython,MrSurly/micropython-esp32,methoxid/micropystat,puuu/micropython,torwag/micropython,EcmaXp/micropython,henriknelson/micropython,hiway/micropython,drrk/micropython,heisewangluo/micropython,adafruit/circuitpython,ernesto-g/micropython,tuc-osg/micropython,blmorris/micropython,Vogtinator/micropython,infinnovation/micropython,praemdonck/micropython,suda/micropython,ganshun666/micropython,martinribelotta/micropython,ruffy91/micropython,jimkmc/micropython,trezor/micropython,ericsnowcurrently/micropython,ruffy91/micropython,blazewicz/micropython,jimkmc/micropython,pozetroninc/micropython,KISSMonX/micropython,tdautc19841202/micropython,PappaPeppar/micropython,tuc-osg/micropython,lowRISC/micropython,supergis/micropython,MrSurly/micropython-esp32,TDAbboud/micropython,Timmenem/micropython,utopiaprince/micropython,kerneltask/micropython,blazewicz/micropython,MrSurly/micropython,emfcamp/micropython,tobbad/micropython,supergis/micropython,xyb/micropython,ahotam/micropython,mianos/micropython,drrk/micropython,mgyenik/micropython,PappaPeppar/micropython,pramasoul/micropython,pfalcon/micropython,tdautc19841202/micropython,xyb/micropython,adafruit/circuitpython,heisewangluo/micropython,martinribelotta/micropython,tralamazza/micropython,tuc-osg/micropython,ganshun666/micropython,trezor/micropython,hosaka/micropython,noahchense/micropython,dmazzella/micropython,mhoffma/micropython,martinribelotta/micropython,TDAbboud/micropython,swegener/micropython,cloudformdesign/micropython,micropython/micropython-esp32,neilh10/micropython,omtinez/micropython,bvernoux/micropython,ahotam/micropython,blazewicz/micropython,danicampora/micropython,dinau/micropython,galenhz/micropython,pfalcon/micropython,mpalomer/micropython,skybird6672/micropython,pozetroninc/micropython,jmarcelino/pycom-micropython,toolmacher/micropython,utopiaprince/micropython,pfalcon/micropython,adamkh/micropython,adamkh/micropython,ryannathans/micropython,cnoviello/micropython,suda/micropython,dhylands/micropython,paul-xxx/micropython,xuxiaoxin/micropython,tdautc19841202/micropython,SHA2017-badge/micropython-esp32,SungEun-Steve-Kim/test-mp,xyb/micropython,EcmaXp/micropython,KISSMonX/micropython,matthewelse/micropython,slzatz/micropython,turbinenreiter/micropython,paul-xxx/micropython,alex-march/micropython,chrisdearman/micropython,mgyenik/micropython,suda/micropython,selste/micropython,pramasoul/micropython,toolmacher/micropython,aethaniel/micropython,martinribelotta/micropython,skybird6672/micropython,noahwilliamsson/micropython,adafruit/circuitpython,mianos/micropython,heisewangluo/micropython,tralamazza/micropython,utopiaprince/micropython,kostyll/micropython,jimkmc/micropython,matthewelse/micropython,feilongfl/micropython,mpalomer/micropython,chrisdearman/micropython,ryannathans/micropython,emfcamp/micropython,ruffy91/micropython,redbear/micropython,lowRISC/micropython,dinau/micropython,galenhz/micropython,turbinenreiter/micropython,cnoviello/micropython,alex-robbins/micropython,kostyll/micropython,HenrikSolver/micropython,turbinenreiter/micropython,xhat/micropython,misterdanb/micropython,galenhz/micropython,TDAbboud/micropython,skybird6672/micropython,torwag/micropython,warner83/micropython,MrSurly/micropython,orionrobots/micropython,pramasoul/micropython,blazewicz/micropython,HenrikSolver/micropython,adamkh/micropython,blazewicz/micropython,pramasoul/micropython,alex-march/micropython,orionrobots/micropython,ryannathans/micropython,mpalomer/micropython,deshipu/micropython,xuxiaoxin/micropython,rubencabrera/micropython,alex-robbins/micropython,torwag/micropython,cwyark/micropython,ericsnowcurrently/micropython,redbear/micropython,firstval/micropython,jimkmc/micropython,hosaka/micropython,vriera/micropython,selste/micropython,omtinez/micropython,cloudformdesign/micropython,cwyark/micropython,Peetz0r/micropython-esp32,neilh10/micropython,paul-xxx/micropython,turbinenreiter/micropython,AriZuu/micropython,mhoffma/micropython,noahwilliamsson/micropython,blmorris/micropython,redbear/micropython,supergis/micropython,dinau/micropython,trezor/micropython,oopy/micropython,AriZuu/micropython,mianos/micropython,ganshun666/micropython,galenhz/micropython,paul-xxx/micropython,praemdonck/micropython,tdautc19841202/micropython,ernesto-g/micropython,misterdanb/micropython,SHA2017-badge/micropython-esp32,stonegithubs/micropython,henriknelson/micropython,jlillest/micropython,mpalomer/micropython,jlillest/micropython,aitjcize/micropython,aitjcize/micropython,rubencabrera/micropython,praemdonck/micropython,toolmacher/micropython,PappaPeppar/micropython,swegener/micropython,mgyenik/micropython,omtinez/micropython,alex-robbins/micropython,slzatz/micropython,vriera/micropython,dxxb/micropython,adafruit/micropython,Peetz0r/micropython-esp32,noahchense/micropython,cnoviello/micropython,martinribelotta/micropython,deshipu/micropython,dinau/micropython,cloudformdesign/micropython,firstval/micropython,trezor/micropython,dhylands/micropython,methoxid/micropystat,dxxb/micropython,orionrobots/micropython,mpalomer/micropython,emfcamp/micropython,AriZuu/micropython,ryannathans/micropython,vriera/micropython,deshipu/micropython,jlillest/micropython,dmazzella/micropython,drrk/micropython,bvernoux/micropython,xuxiaoxin/micropython,blmorris/micropython,skybird6672/micropython,feilongfl/micropython,ahotam/micropython,kostyll/micropython,danicampora/micropython,neilh10/micropython,stonegithubs/micropython,vitiral/micropython,oopy/micropython,AriZuu/micropython,adamkh/micropython,dmazzella/micropython,feilongfl/micropython,hiway/micropython,utopiaprince/micropython,cnoviello/micropython,ahotam/micropython,mhoffma/micropython,henriknelson/micropython,micropython/micropython-esp32,mhoffma/micropython,ericsnowcurrently/micropython,cwyark/micropython,vitiral/micropython,alex-march/micropython,warner83/micropython,neilh10/micropython,xyb/micropython,Vogtinator/micropython,supergis/micropython,drrk/micropython,pfalcon/micropython,hosaka/micropython,ceramos/micropython,oopy/micropython,adafruit/micropython,TDAbboud/micropython,chrisdearman/micropython,hosaka/micropython,ernesto-g/micropython,SungEun-Steve-Kim/test-mp,SungEun-Steve-Kim/test-mp,Peetz0r/micropython-esp32,swegener/micropython,torwag/micropython,xhat/micropython | ---
+++
@@ -37,3 +37,10 @@
print(a)
a >>= 1
print(a)
+
+# Test referential integrity of long ints
+a = 0x1ffffffff
+b = a
+a += 1
+print(a)
+print(b) |
1771e1d37f48c62dc20c3a83e480b98cb7c4500c | xoinvader/application.py | xoinvader/application.py | import time
class Application(object):
def __init__(self, startup_args={}):
self._state = None
self._states = {}
self._mspf = None # ms per frame
@property
def state(self):
return self._state
@state.setter
def state(self, name):
if name in self._states:
self._state = self._states[name]
else:
raise KeyError("No such state: '{0}'.".format(name))
def register_state(self, state):
"""Add new state and initiate it with owner."""
name = state.__name__
self._states[name] = state(self)
if len(self._states) == 1:
self._state = self._states[name]
def loop(self):
while True:
start_time = time.perf_counter()
self._state.events()
self._state.update()
self._state.render()
finish_time = time.perf_counter()
delta = finish_time - start_time
if delta <= self._mspf:
time.sleep((self._mspf - delta) / 1000.0)
else:
pass # Log FPS drawdowns.
| import time
class Application(object):
def __init__(self, startup_args={}):
self._state = None
self._states = {}
self._mspf = None # ms per frame
self._running = False
@property
def running(self):
return self._running
@property
def state(self):
if self._state:
return self._state.__class__.__name__
else:
raise AttributeError("There is no available state.")
@state.setter
def state(self, name):
if name in self._states:
self._state = self._states[name]
else:
raise KeyError("No such state: '{0}'.".format(name))
def register_state(self, state):
"""Add new state and initiate it with owner."""
name = state.__name__
self._states[name] = state(self)
if len(self._states) == 1:
self._state = self._states[name]
def stop(self):
self._running = False
def loop(self):
if self._state:
self._running = True
else:
raise AttributeError("There is no avalable state.")
while self._running:
start_time = time.perf_counter()
self._state.events()
self._state.update()
self._state.render()
finish_time = time.perf_counter()
delta = finish_time - start_time
if delta <= self._mspf:
time.sleep((self._mspf - delta) / 1000.0)
else:
pass # Log FPS drawdowns.
| Make loop stopable, some fixes. | Make loop stopable, some fixes.
| Python | mit | pkulev/xoinvader,pankshok/xoinvader | ---
+++
@@ -6,10 +6,18 @@
self._state = None
self._states = {}
self._mspf = None # ms per frame
+ self._running = False
+
+ @property
+ def running(self):
+ return self._running
@property
def state(self):
- return self._state
+ if self._state:
+ return self._state.__class__.__name__
+ else:
+ raise AttributeError("There is no available state.")
@state.setter
def state(self, name):
@@ -25,8 +33,16 @@
if len(self._states) == 1:
self._state = self._states[name]
+ def stop(self):
+ self._running = False
+
def loop(self):
- while True:
+ if self._state:
+ self._running = True
+ else:
+ raise AttributeError("There is no avalable state.")
+
+ while self._running:
start_time = time.perf_counter()
self._state.events() |
98ba566742bb6c4be1783287843183242d48bd41 | gitmostwanted/config.py | gitmostwanted/config.py | # pylint: disable=C1001
class Config():
CELERY_BROKER_URL = ''
SQLALCHEMY_ECHO = False
SECRET_KEY = ''
TESTING = False
DEBUG = False
class ConfigDevelopment(Config):
SQLALCHEMY_ECHO = True
DEBUG = True
class ConfigTesting(Config):
SECRET_KEY = 'testing' # noqa
TESTING = True
class ConfigProduction(Config):
pass
| # pylint: disable=C1001
class Config:
CELERY_BROKER_URL = ''
SQLALCHEMY_ECHO = False
SECRET_KEY = ''
TESTING = False
DEBUG = True
class ConfigDevelopment(Config):
SQLALCHEMY_ECHO = True
class ConfigTesting(Config):
SECRET_KEY = 'testing' # noqa
TESTING = True
class ConfigProduction(Config):
DEBUG = False
| DEBUG = True for the Testing env | DEBUG = True for the Testing env
| Python | mit | kkamkou/gitmostwanted.com,kkamkou/gitmostwanted.com,kkamkou/gitmostwanted.com,kkamkou/gitmostwanted.com | ---
+++
@@ -1,15 +1,14 @@
# pylint: disable=C1001
-class Config():
+class Config:
CELERY_BROKER_URL = ''
SQLALCHEMY_ECHO = False
SECRET_KEY = ''
TESTING = False
- DEBUG = False
+ DEBUG = True
class ConfigDevelopment(Config):
SQLALCHEMY_ECHO = True
- DEBUG = True
class ConfigTesting(Config):
@@ -18,4 +17,4 @@
class ConfigProduction(Config):
- pass
+ DEBUG = False |
ea46030784640d86a70c382ece7913eeb0996ba9 | 01_dataprep/trn_to_phn.py | 01_dataprep/trn_to_phn.py | #!/usr/bin/env python3
import os
import sys
def main(trn_file, phn_dir):
phone_map = {v[0]: v[1].strip() for v in (l.split(None, 1) for l in open('data/phone_map', encoding='utf-8'))}
for line in open(trn_file):
parts = line.split()
sentence = parts[:-1]
sid = parts[-1][1:-1]
phn = open(os.path.join(phn_dir,sid+".phn"), "w", encoding="iso8859-15")
print("0 0 __", file=phn)
phones = '_'
for word in sentence:
for c in word:
phones += phone_map[c]
phones += '_'
for j in range(1, len(phones)-1):
if phones[j] == '_':
print("0 0 _", file=phn)
continue
lci = j -1
while lci > 0 and phones[lci] == '_':
lci -= 1
rci = j +1
while rci < len(phones) - 1 and phones[rci] == '_':
rci += 1
print("0 0 {}-{}+{}".format(phones[lci], phones[j], phones[rci]), file=phn)
print("0 0 __", file=phn)
phn.close()
if __name__ == "__main__":
main(sys.argv[1], sys.argv[2]) | #!/usr/bin/env python3
import os
import sys
def main(langdat_dir, trn_file, phn_dir):
phone_map = {v[0]: v[1].strip() for v in (l.split(None, 1) for l in open('{}/phones'.format(langdat_dir), encoding='utf-8'))}
for line in open(trn_file):
parts = line.split()
sentence = parts[:-1]
sid = parts[-1][1:-1]
phn = open(os.path.join(phn_dir,sid+".phn"), "w", encoding="iso8859-15")
print("0 0 __", file=phn)
phones = '_'
for word in sentence:
for c in word:
phones += phone_map[c]
phones += '_'
for j in range(1, len(phones)-1):
if phones[j] == '_':
print("0 0 _", file=phn)
continue
lci = j -1
while lci > 0 and phones[lci] == '_':
lci -= 1
rci = j +1
while rci < len(phones) - 1 and phones[rci] == '_':
rci += 1
print("0 0 {}-{}+{}".format(phones[lci], phones[j], phones[rci]), file=phn)
print("0 0 __", file=phn)
phn.close()
if __name__ == "__main__":
main(sys.argv[1], sys.argv[2], sys.argv[3]) | Fix trn to phn script | Fix trn to phn script
| Python | bsd-3-clause | phsmit/iwclul2016-scripts,phsmit/iwclul2016-scripts,phsmit/iwclul2016-scripts | ---
+++
@@ -3,8 +3,8 @@
import sys
-def main(trn_file, phn_dir):
- phone_map = {v[0]: v[1].strip() for v in (l.split(None, 1) for l in open('data/phone_map', encoding='utf-8'))}
+def main(langdat_dir, trn_file, phn_dir):
+ phone_map = {v[0]: v[1].strip() for v in (l.split(None, 1) for l in open('{}/phones'.format(langdat_dir), encoding='utf-8'))}
for line in open(trn_file):
parts = line.split()
@@ -43,4 +43,4 @@
if __name__ == "__main__":
- main(sys.argv[1], sys.argv[2])
+ main(sys.argv[1], sys.argv[2], sys.argv[3]) |
03ad5a9e31127828ce7f14de61af80af20362624 | test/field/test_date.py | test/field/test_date.py | # encoding: utf-8
from __future__ import unicode_literals
from common import FieldExam
from marrow.mongo.field import Date
class TestDateField(FieldExam):
__field__ = Date
| # encoding: utf-8
from __future__ import unicode_literals
from datetime import datetime
from bson import ObjectId
from common import FieldExam
from marrow.mongo.field import Date
class TestDateField(FieldExam):
__field__ = Date
def test_date_like_oid(self, Sample):
oid = ObjectId('586846800000000000000000')
assert Sample(oid).field.replace(tzinfo=None) == datetime(2017, 1, 1)
| Add test for extraction of dates from ObjectIds. | Add test for extraction of dates from ObjectIds.
| Python | mit | marrow/mongo | ---
+++
@@ -2,9 +2,17 @@
from __future__ import unicode_literals
+from datetime import datetime
+
+from bson import ObjectId
from common import FieldExam
from marrow.mongo.field import Date
class TestDateField(FieldExam):
__field__ = Date
+
+ def test_date_like_oid(self, Sample):
+ oid = ObjectId('586846800000000000000000')
+
+ assert Sample(oid).field.replace(tzinfo=None) == datetime(2017, 1, 1) |
467a359445dd5b1f20a4d622d60a0970d08fc1f1 | samklang_pages/admin.py | samklang_pages/admin.py | from django.contrib import admin
from samklang_pages.models import Page, PageWidget
from samklang_pages.forms import PageForm
class PageWidgetInline(admin.TabularInline):
model = PageWidget
class PageAdminForm(PageForm):
class Meta:
fields = ('url', 'name', 'content', 'site', 'user', 'group', 'admingroup',)# 'document_class')
class PageAdmin(admin.ModelAdmin):
form = PageAdminForm
list_display = ('url', 'name')
search_fields = ('url', 'name')
inlines = [
PageWidgetInline,
]
admin.site.register(Page, PageAdmin)
class PageWidgetAdmin(admin.ModelAdmin):
pass
admin.site.register(PageWidget, PageWidgetAdmin)
| from django.contrib import admin
from samklang_pages.models import Page, PageWidget
from samklang_pages.forms import PageForm
class PageWidgetInline(admin.TabularInline):
model = PageWidget
class PageAdminForm(PageForm):
class Meta:
fields = ('url', 'name', 'content', 'site', 'user', 'group', 'admingroup',)# 'document_class')
class PageAdmin(admin.ModelAdmin):
form = PageAdminForm
list_display = ('url', 'name')
search_fields = ('url', 'name')
inlines = [
PageWidgetInline,
]
admin.site.register(Page, PageAdmin)
class PageWidgetAdmin(admin.ModelAdmin):
pass
#admin.site.register(PageWidget, PageWidgetAdmin)
| Hide pagewidgets away for now | Hide pagewidgets away for now
| Python | agpl-3.0 | sigurdga/samklang-pages,sigurdga/samklang-pages | ---
+++
@@ -23,5 +23,5 @@
class PageWidgetAdmin(admin.ModelAdmin):
pass
-admin.site.register(PageWidget, PageWidgetAdmin)
+#admin.site.register(PageWidget, PageWidgetAdmin)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.