commit stringlengths 40 40 | old_file stringlengths 4 150 | new_file stringlengths 4 150 | old_contents stringlengths 0 3.26k | new_contents stringlengths 1 4.43k | subject stringlengths 15 501 | message stringlengths 15 4.06k | lang stringclasses 4 values | license stringclasses 13 values | repos stringlengths 5 91.5k | diff stringlengths 0 4.35k |
|---|---|---|---|---|---|---|---|---|---|---|
835aa149e4bccd7bcf94390d1a878133b79b768f | yaacl/models.py | yaacl/models.py | # -*- coding: utf-8 -*-
from datetime import datetime
from django.db import models
from django.utils.translation import gettext_lazy as _
from django.utils.encoding import python_2_unicode_compatible
@python_2_unicode_compatible
class ACL(models.Model):
acl_list = {}
resource = models.CharField(
_("Resource name"),
max_length=255,
db_index=True,
)
display = models.CharField(
_("displayed name"),
max_length=255,
null=True,
blank=True,
)
created_at = models.DateTimeField(
_("Creation time"),
default=datetime.now(),
)
is_available = models.BooleanField(
_("Is available to assign"),
default=True,
)
class Meta:
app_label = 'yaacl'
def __str__(self):
if self.display:
return "%s (%s)" % (self.display, self.resource)
else:
return self.resource
| # -*- coding: utf-8 -*-
from datetime import datetime
from django.db import models
from django.utils.translation import gettext_lazy as _
from django.utils.encoding import python_2_unicode_compatible
@python_2_unicode_compatible
class ACL(models.Model):
acl_list = {}
resource = models.CharField(
_("Resource name"),
max_length=255,
db_index=True,
)
display = models.CharField(
_("displayed name"),
max_length=255,
null=True,
blank=True,
)
created_at = models.DateTimeField(
_("Creation time"),
auto_now_add=True,
)
is_available = models.BooleanField(
_("Is available to assign"),
default=True,
)
class Meta:
app_label = 'yaacl'
def __str__(self):
if self.display:
return "%s (%s)" % (self.display, self.resource)
else:
return self.resource
| Use `auto_now_add` to make ACL.created_at timezone aware | Use `auto_now_add` to make ACL.created_at timezone aware
| Python | mit | Alkemic/yaACL,Alkemic/yaACL | ---
+++
@@ -22,7 +22,7 @@
)
created_at = models.DateTimeField(
_("Creation time"),
- default=datetime.now(),
+ auto_now_add=True,
)
is_available = models.BooleanField(
_("Is available to assign"), |
3507d71223122a72d8e71fbf30849586485b0790 | manage.py | manage.py | # -*- coding: utf-8 -*-
from flask import current_app, g
from flask.ext.script import Manager, Server, prompt_bool
from massa import create_app
manager = Manager(create_app)
manager.add_option('-c', '--config', dest='config', required=False)
manager.add_command('runserver', Server(
use_debugger = True,
use_reloader = True,
host = '0.0.0.0',
port = 5000,
))
@manager.command
def db_create_tables():
"""Create all the db tables."""
current_app.preprocess_request()
db = g.sl('db')
db.create_tables()
@manager.command
def db_drop_tables():
"""Drop all the db tables."""
if prompt_bool('Are you sure you want to drop all the db tables?'):
current_app.preprocess_request()
db = g.sl('db')
db.drop_tables()
@manager.command
def db_recreate_tables():
"""Drop and create all the db tables."""
if prompt_bool('Are you sure you want to recreate all the db tables?'):
current_app.preprocess_request()
db = g.sl('db')
db.drop_tables()
db.create_tables()
if __name__ == '__main__':
manager.run()
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
from flask import current_app, g
from flask.ext.script import Manager, Server, prompt_bool
from massa import create_app
manager = Manager(create_app)
manager.add_option('-c', '--config', dest='config', required=False)
manager.add_command('runserver', Server(
use_debugger = True,
use_reloader = True,
host = '0.0.0.0',
port = 5000,
))
@manager.command
def db_create_tables():
"""Create all the db tables."""
current_app.preprocess_request()
db = g.sl('db')
db.create_tables()
@manager.command
def db_drop_tables():
"""Drop all the db tables."""
if prompt_bool('Are you sure you want to drop all the db tables?'):
current_app.preprocess_request()
db = g.sl('db')
db.drop_tables()
@manager.command
def db_recreate_tables():
"""Drop and create all the db tables."""
if prompt_bool('Are you sure you want to recreate all the db tables?'):
current_app.preprocess_request()
db = g.sl('db')
db.drop_tables()
db.create_tables()
if __name__ == '__main__':
manager.run()
| Add a shebang for a python interpreter. | Add a shebang for a python interpreter. | Python | mit | jaapverloop/massa | ---
+++
@@ -1,3 +1,4 @@
+#!/usr/bin/env python
# -*- coding: utf-8 -*-
from flask import current_app, g |
9685ab2793ad9dc79df5c6f1bd1c22b302769b2c | py/garage/garage/asyncs/utils.py | py/garage/garage/asyncs/utils.py | __all__ = [
'CircuitBreaker',
'timer',
]
import asyncio
import collections
import time
class CircuitBreaker:
"""Break (disconnect) when no less than `count` errors happened
within last `period` seconds.
"""
class Disconnected(Exception):
pass
def __init__(self, *, count, period, clock=None):
self.timestamps = collections.deque(maxlen=count)
self.period = period
self.clock = clock or time.monotonic
@property
def connected(self):
if len(self.timestamps) < self.timestamps.maxlen:
return True
if self.timestamps[0] + self.period < self.clock():
return True
return False
def count(self, raises=Disconnected):
self.timestamps.append(self.clock())
if raises and not self.connected:
raise raises
async def timer(timeout, *, raises=asyncio.TimeoutError, loop=None):
"""Wait until timeout. If timeout is None or negative, wait forever."""
if timeout is None or timeout < 0:
await asyncio.Event(loop=loop).wait() # Wait forever.
else:
await asyncio.sleep(timeout, loop=loop)
if raises:
raise raises
| __all__ = [
'CircuitBreaker',
'timer',
]
import asyncio
import collections
import time
class CircuitBreaker:
"""Break (disconnect) when no less than `count` errors happened
within last `period` seconds.
"""
class Disconnected(Exception):
pass
def __init__(self, *, count, period, clock=None):
self.timestamps = collections.deque(maxlen=count)
self.period = period
self.clock = clock or time.monotonic
@property
def connected(self):
if len(self.timestamps) < self.timestamps.maxlen:
return True
if self.timestamps[0] + self.period < self.clock():
return True
return False
def count(self, raises=Disconnected):
self.timestamps.append(self.clock())
if self.connected:
return True
elif raises:
raise raises
else:
return False
async def timer(timeout, *, raises=asyncio.TimeoutError, loop=None):
"""Wait until timeout. If timeout is None or negative, wait forever."""
if timeout is None or timeout < 0:
await asyncio.Event(loop=loop).wait() # Wait forever.
else:
await asyncio.sleep(timeout, loop=loop)
if raises:
raise raises
| Make CircuitBreaker.count return connection status | Make CircuitBreaker.count return connection status
| Python | mit | clchiou/garage,clchiou/garage,clchiou/garage,clchiou/garage | ---
+++
@@ -31,8 +31,12 @@
def count(self, raises=Disconnected):
self.timestamps.append(self.clock())
- if raises and not self.connected:
+ if self.connected:
+ return True
+ elif raises:
raise raises
+ else:
+ return False
async def timer(timeout, *, raises=asyncio.TimeoutError, loop=None): |
fee9504387319dd406eb5131281c6344a427fad7 | insanity/layers.py | insanity/layers.py | import numpy as np
import theano
import theano.tensor as T
from theano.tensor.nnet import conv
from theano.tensor.nnet import softmax
from theano.tensor import shared_randomstreams
from theano.tensor.signal import downsample
class FullyConnectedLayer(object):
def __init__(self, previousLayer, numNeurons, activation, miniBatchSize, dropout=0.0):
self.numNeurons = numNeurons
self.activation = activation
self.dropout = dropout
#Initialize weights and biases.
self.weights = theano.shared(
np.asarray(
np.random.normal(
loc=0.0, scale=np.sqrt(1.0/n_out), size=(previousLayer.numNeurons, self.numNeurons)),
dtype=theano.config.floatX),
name='weights', borrow=True)
self.biases = theano.shared(
np.asarray(np.random.normal(loc=0.0, scale=1.0, size=(self.numNeurons,)),
dtype=theano.config.floatX),
name='biases', borrow=True)
self.learningParams = [self.weights, self.biases]
#Configure layer processing procedure.
something = previousLayer.output
somethingElse = previousLayer.outputDropout
self.output = 0
self.outputDropout = 0
| import numpy as np
import theano
import theano.tensor as T
from theano.tensor.nnet import conv
from theano.tensor.nnet import softmax
from theano.tensor import shared_randomstreams
from theano.tensor.signal import downsample
class FullyConnectedLayer(object):
def __init__(self, previousLayer, numNeurons, activation, miniBatchSize, dropout=0.0):
self.numNeurons = numNeurons
self.activation = activation
self.dropout = dropout
#Initialize weights and biases.
self.weights = theano.shared(
np.asarray(
np.random.normal(
loc=0.0, scale=np.sqrt(1.0/n_out), size=(previousLayer.numNeurons, self.numNeurons)),
dtype=theano.config.floatX),
name='weights', borrow=True)
self.biases = theano.shared(
np.asarray(np.random.normal(loc=0.0, scale=1.0, size=(self.numNeurons,)),
dtype=theano.config.floatX),
name='biases', borrow=True)
self.learningParams = [self.weights, self.biases]
#Configure non-dropout processing.
self.input = previousLayer.output.reshape((miniBatchSize, previousLayer.numNeurons))
self.output = self.activation((1-self.dropout)*T.dot(self.input, self.weights) + self.biases)
#Configure dropout processing.
self.inputDropout = dropoutLayer(inptDropout.reshape((miniBatchSize, previousLayer.numNeurons)), self.dropout)
self.outputDropout = self.activation(T.dot(self.inputDropout, self.weights) + self.biases)
| Add processing procedure to FullyConnectedLayer. | Add processing procedure to FullyConnectedLayer.
| Python | cc0-1.0 | cn04/insanity | ---
+++
@@ -28,8 +28,10 @@
self.learningParams = [self.weights, self.biases]
- #Configure layer processing procedure.
- something = previousLayer.output
- somethingElse = previousLayer.outputDropout
- self.output = 0
- self.outputDropout = 0
+ #Configure non-dropout processing.
+ self.input = previousLayer.output.reshape((miniBatchSize, previousLayer.numNeurons))
+ self.output = self.activation((1-self.dropout)*T.dot(self.input, self.weights) + self.biases)
+
+ #Configure dropout processing.
+ self.inputDropout = dropoutLayer(inptDropout.reshape((miniBatchSize, previousLayer.numNeurons)), self.dropout)
+ self.outputDropout = self.activation(T.dot(self.inputDropout, self.weights) + self.biases) |
290bd79ddd3108e0b66822c4bf997cc5dd2e765d | deployment/datapusher_settings.py | deployment/datapusher_settings.py | import uuid
DEBUG = False
TESTING = False
SECRET_KEY = str(uuid.uuid4())
USERNAME = str(uuid.uuid4())
PASSWORD = str(uuid.uuid4())
NAME = 'datapusher'
# database
SQLALCHEMY_DATABASE_URI = 'postgresql://localhost/datapusher'
# webserver host and port
HOST = '0.0.0.0'
PORT = 8800
# logging
#FROM_EMAIL = 'server-error@example.com'
#ADMINS = ['yourname@example.com'] # where to send emails
LOG_FILE = '/tmp/ckan_datapusher_service.log'
STDERR = True | import uuid
import os
DEBUG = False
TESTING = False
SECRET_KEY = str(uuid.uuid4())
USERNAME = str(uuid.uuid4())
PASSWORD = str(uuid.uuid4())
NAME = 'datapusher'
# database
SQLALCHEMY_DATABASE_URI = 'postgresql://%s@localhost/%s' % (
os.environ['CKAN_DATAPUSHER'],
os.environ['CKAN_DATAPUSHER_DB'],
)
# webserver host and port
HOST = '0.0.0.0'
PORT = 8800
# logging
#FROM_EMAIL = 'server-error@example.com'
#ADMINS = ['yourname@example.com'] # where to send emails
LOG_FILE = '/tmp/ckan_datapusher_service.log'
STDERR = True | Use postgresql as jobstorage for datapusher-srv | Use postgresql as jobstorage for datapusher-srv
| Python | agpl-3.0 | ESRC-CDRC/ckan-datapusher-service | ---
+++
@@ -1,4 +1,5 @@
import uuid
+import os
DEBUG = False
TESTING = False
@@ -10,7 +11,10 @@
# database
-SQLALCHEMY_DATABASE_URI = 'postgresql://localhost/datapusher'
+SQLALCHEMY_DATABASE_URI = 'postgresql://%s@localhost/%s' % (
+ os.environ['CKAN_DATAPUSHER'],
+ os.environ['CKAN_DATAPUSHER_DB'],
+)
# webserver host and port
|
4425aa1170a1acd3ed69c32ba5e3885301593524 | salt/returners/redis_return.py | salt/returners/redis_return.py | '''
Return data to a redis server
To enable this returner the minion will need the python client for redis
installed and the following values configured in the minion or master
config, these are the defaults:
redis.db: '0'
redis.host: 'salt'
redis.port: 6379
'''
# Import python libs
import json
try:
import redis
has_redis = True
except ImportError:
has_redis = False
def __virtual__():
if not has_redis:
return False
return 'redis_return'
def _get_serv():
'''
Return a redis server object
'''
return redis.Redis(
host=__salt__['config.option']('redis.host'),
port=__salt__['config.option']('redis.port'),
db=__salt__['config.option']('redis.db'))
def returner(ret):
'''
Return data to a redis data store
'''
serv = _get_serv()
serv.sadd('{0}:jobs'.format(ret['id']))
serv.set('{0}:{1}'.format(ret['jid'], json.dumps(ret['return'])))
serv.sadd('jobs', ret['jid'])
serv.sadd(ret['jid'], ret['id'])
| '''
Return data to a redis server
To enable this returner the minion will need the python client for redis
installed and the following values configured in the minion or master
config, these are the defaults:
redis.db: '0'
redis.host: 'salt'
redis.port: 6379
'''
# Import python libs
import json
try:
import redis
has_redis = True
except ImportError:
has_redis = False
def __virtual__():
if not has_redis:
return False
return 'redis_return'
def _get_serv():
'''
Return a redis server object
'''
return redis.Redis(
host=__salt__['config.option']('redis.host'),
port=__salt__['config.option']('redis.port'),
db=__salt__['config.option']('redis.db'))
def returner(ret):
'''
Return data to a redis data store
'''
serv = _get_serv()
serv.set('{0}:{1}'.format(ret['id'], ret['jid']), json.dumps(ret))
serv.lpush('{0}:{1}'.format(ret['id'], ret['fun']), ret['jid'])
serv.sadd('minions', ret['id'])
| Restructure redis returner, since it did notwork before anyway | Restructure redis returner, since it did notwork before anyway
| Python | apache-2.0 | saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt | ---
+++
@@ -41,7 +41,6 @@
Return data to a redis data store
'''
serv = _get_serv()
- serv.sadd('{0}:jobs'.format(ret['id']))
- serv.set('{0}:{1}'.format(ret['jid'], json.dumps(ret['return'])))
- serv.sadd('jobs', ret['jid'])
- serv.sadd(ret['jid'], ret['id'])
+ serv.set('{0}:{1}'.format(ret['id'], ret['jid']), json.dumps(ret))
+ serv.lpush('{0}:{1}'.format(ret['id'], ret['fun']), ret['jid'])
+ serv.sadd('minions', ret['id']) |
b2d9b56ceb96718d1f3edc8ec019ca7218e33e7d | src/rnaseq_lib/math/__init__.py | src/rnaseq_lib/math/__init__.py | import numpy as np
# Outlier
def iqr_bounds(ys):
"""
Return upper and lower bound for an array of values
Lower bound: Q1 - (IQR * 1.5)
Upper bound: Q3 + (IQR * 1.5)
:param list ys: List of values to calculate IQR
:return: Upper and lower bound
:rtype: tuple(float, float)
"""
quartile_1, quartile_3 = np.percentile(ys, [25, 75])
iqr = quartile_3 - quartile_1
lower_bound = quartile_1 - (iqr * 1.5)
upper_bound = quartile_3 + (iqr * 1.5)
return upper_bound, lower_bound
# Normalization
def min_max_normalize(df):
return (df - df.min()) / (df.max() - df.min())
def mean_normalize(df):
return (df - df.mean()) / df.std()
def l2norm(x, pad=0.001):
"""
Log2 normalization function
:param float x: Input value
:param int|float pad: Pad value (to handle zeros)
:return: log2(x+1) normalized value
:rtype: float
"""
return np.log2(x + pad)
| import numpy as np
# Outlier
def iqr_bounds(ys):
"""
Return upper and lower bound for an array of values
Lower bound: Q1 - (IQR * 1.5)
Upper bound: Q3 + (IQR * 1.5)
:param list ys: List of values to calculate IQR
:return: Upper and lower bound
:rtype: tuple(float, float)
"""
quartile_1, quartile_3 = np.percentile(ys, [25, 75])
iqr = quartile_3 - quartile_1
lower_bound = quartile_1 - (iqr * 1.5)
upper_bound = quartile_3 + (iqr * 1.5)
return upper_bound, lower_bound
# Normalization
def min_max_normalize(df):
return (df - df.min()) / (df.max() - df.min())
def mean_normalize(df):
return (df - df.mean()) / df.std()
def softmax(df):
"""
Normalizes columns to sum to 1
:param pd.DataFrame df: Dataframe to normalize
:return: Normalized DataFrame
:rtype: pd.DataFrame
"""
return df.divide(df.sum())
def l2norm(x, pad=0.001):
"""
Log2 normalization function
:param float x: Input value
:param int|float pad: Pad value (to handle zeros)
:return: log2(x+1) normalized value
:rtype: float
"""
return np.log2(x + pad)
| Add docstring for softmax normalization function | Add docstring for softmax normalization function
| Python | mit | jvivian/rnaseq-lib,jvivian/rnaseq-lib | ---
+++
@@ -29,6 +29,17 @@
return (df - df.mean()) / df.std()
+def softmax(df):
+ """
+ Normalizes columns to sum to 1
+
+ :param pd.DataFrame df: Dataframe to normalize
+ :return: Normalized DataFrame
+ :rtype: pd.DataFrame
+ """
+ return df.divide(df.sum())
+
+
def l2norm(x, pad=0.001):
"""
Log2 normalization function |
78a9ee621e20bf1fe930bd0d2046715c5737df03 | web/patlms-web/web/urls.py | web/patlms-web/web/urls.py | """web URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.8/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import include, url
from django.contrib import admin
urlpatterns = [
url(r'^general/', include('general.urls')),
url(r'^admin/', include(admin.site.urls)),
# index
url(r'^$', 'general.views.status', name='index'),
]
| """web URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.8/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import include, url
from django.contrib import admin
urlpatterns = [
url(r'^general/', include('general.urls', namespace='general')),
url(r'^admin/', include(admin.site.urls)),
# index
url(r'^$', 'general.views.status', name='index'),
]
| Add general module to general namespace in web module | Add general module to general namespace in web module
| Python | mit | chyla/slas,chyla/pat-lms,chyla/pat-lms,chyla/pat-lms,chyla/pat-lms,chyla/pat-lms,chyla/pat-lms,chyla/slas,chyla/slas,chyla/slas,chyla/slas,chyla/pat-lms,chyla/slas,chyla/slas | ---
+++
@@ -16,7 +16,7 @@
from django.contrib import admin
urlpatterns = [
- url(r'^general/', include('general.urls')),
+ url(r'^general/', include('general.urls', namespace='general')),
url(r'^admin/', include(admin.site.urls)),
# index |
4dbe38996e5bfd6b3f12be1f9cde8de379108934 | keysmith.py | keysmith.py | #!/usr/bin/env python
from __future__ import print_function
import argparse
import os
import random
import sys
def natural_int(x):
x = int(x)
if x < 0:
raise argparse.ArgumentTypeError(str(x) + ' is not a natural number.')
return x
def random_word(words):
""" Generate a random word. """
return random.SystemRandom().choice(words)
def random_key(words, degree):
""" Generate a random key. """
key = ''
for i in range(degree):
key += random_word(words).strip()
return key
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument(
'-d', '--degree',
help='Specify the number of words to include.',
type=natural_int,
default=3
)
parser.add_argument(
'-s', '--source',
help='Specify the word list to use.',
default=os.path.join(os.path.dirname(sys.argv[0]), 'word.list')
)
args = parser.parse_args()
with open(args.source, 'r') as source:
words = source.readlines()
print(random_key(words, args.degree))
| #!/usr/bin/env python
from __future__ import print_function
import argparse
import os
import random
import sys
def natural_int(x):
x = int(x)
if x < 0:
raise argparse.ArgumentTypeError(str(x) + ' is not a natural number.')
return x
def random_word(words):
""" Generate a random word. """
return random.SystemRandom().choice(words)
def random_key(words, degree):
""" Generate a random key. """
key = ''
for i in range(degree):
key += random_word(words).strip()
return key
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description='Authentication Token Generator'
)
parser.add_argument(
'-d', '--degree',
help='Specify the number of words to include.',
type=natural_int,
default=3
)
parser.add_argument(
'-s', '--source',
help='Specify the word list to use.',
default=os.path.join(os.path.dirname(sys.argv[0]), 'word.list')
)
args = parser.parse_args()
with open(args.source, 'r') as source:
words = source.readlines()
print(random_key(words, args.degree))
| Add a description to the argparser. | Add a description to the argparser.
| Python | bsd-3-clause | dmtucker/keysmith | ---
+++
@@ -24,7 +24,9 @@
return key
if __name__ == '__main__':
- parser = argparse.ArgumentParser()
+ parser = argparse.ArgumentParser(
+ description='Authentication Token Generator'
+ )
parser.add_argument(
'-d', '--degree',
help='Specify the number of words to include.', |
f5a5f185958ed3088518f3a2fca15ff7b57e982c | manage.py | manage.py | # manage.py
import os
import subprocess
from flask_migrate import Migrate, MigrateCommand
from flask_script import Manager, Shell
from app import create_app, db
from app.models import Users, Agencies, Requests, Responses, Events, Reasons, Permissions, Roles
app = create_app(os.getenv('FLASK_CONFIG') or 'default')
manager = Manager(app)
migrate = Migrate(app, db)
def make_shell_context():
return dict(
app=app,
db=db,
Users=Users,
Agencies=Agencies,
Requests=Requests,
Responses=Responses,
Events=Events,
Reasons=Reasons,
Permissions=Permissions,
Roles=Roles
)
manager.add_command("shell", Shell(make_context=make_shell_context))
manager.add_command('db', MigrateCommand)
@manager
def celery():
subprocess.call(['celery', 'worker', '-A', 'celery_worker.celery', '--loglevel=info'])
if __name__ == "__main__":
manager.run()
| # manage.py
import os
import subprocess
from flask_migrate import Migrate, MigrateCommand
from flask_script import Manager, Shell, Command
from app import create_app, db
from app.models import Users, Agencies, Requests, Responses, Events, Reasons, Permissions, Roles
app = create_app(os.getenv('FLASK_CONFIG') or 'default')
manager = Manager(app)
migrate = Migrate(app, db)
class Celery(Command):
"""
Runs Celery
"""
def run(self):
subprocess.call(['celery', 'worker', '-A', 'celery_worker.celery', '--loglevel=info'])
def make_shell_context():
return dict(
app=app,
db=db,
Users=Users,
Agencies=Agencies,
Requests=Requests,
Responses=Responses,
Events=Events,
Reasons=Reasons,
Permissions=Permissions,
Roles=Roles
)
manager.add_command("shell", Shell(make_context=make_shell_context))
manager.add_command("db", MigrateCommand)
manager.add_command("celery", Celery())
if __name__ == "__main__":
manager.run()
| Fix a problem with the celery cli commoand Allows runserver to be used separately from celery. | Fix a problem with the celery cli commoand
Allows runserver to be used separately from celery.
| Python | apache-2.0 | CityOfNewYork/NYCOpenRecords,CityOfNewYork/NYCOpenRecords,CityOfNewYork/NYCOpenRecords,CityOfNewYork/NYCOpenRecords,CityOfNewYork/NYCOpenRecords | ---
+++
@@ -3,7 +3,7 @@
import subprocess
from flask_migrate import Migrate, MigrateCommand
-from flask_script import Manager, Shell
+from flask_script import Manager, Shell, Command
from app import create_app, db
from app.models import Users, Agencies, Requests, Responses, Events, Reasons, Permissions, Roles
@@ -11,6 +11,15 @@
app = create_app(os.getenv('FLASK_CONFIG') or 'default')
manager = Manager(app)
migrate = Migrate(app, db)
+
+
+class Celery(Command):
+ """
+ Runs Celery
+ """
+
+ def run(self):
+ subprocess.call(['celery', 'worker', '-A', 'celery_worker.celery', '--loglevel=info'])
def make_shell_context():
@@ -28,11 +37,8 @@
)
manager.add_command("shell", Shell(make_context=make_shell_context))
-manager.add_command('db', MigrateCommand)
-
-@manager
-def celery():
- subprocess.call(['celery', 'worker', '-A', 'celery_worker.celery', '--loglevel=info'])
+manager.add_command("db", MigrateCommand)
+manager.add_command("celery", Celery())
if __name__ == "__main__": |
f511eb39aef5005df6ac5d234d1da7ae829983f9 | manage.py | manage.py | #!/usr/bin/env python
import os
import sys
from website.app import init_app
if __name__ == "__main__":
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'api.base.settings')
from django.core.management import execute_from_command_line
init_app(set_backends=True, routes=False, attach_request_handlers=False)
if 'livereload' in sys.argv:
from django.core.wsgi import get_wsgi_application
from livereload import Server
import django.conf as conf
conf.settings.STATIC_URL = '/static/'
application = get_wsgi_application()
server = Server(application)
server.watch('api/')
server.serve(port=8000)
else:
execute_from_command_line(sys.argv)
| #!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'api.base.settings')
from django.core.management import execute_from_command_line
from website.app import init_app
init_app(set_backends=True, routes=False, attach_request_handlers=False)
if 'livereload' in sys.argv:
from django.core.wsgi import get_wsgi_application
from livereload import Server
import django.conf as conf
conf.settings.STATIC_URL = '/static/'
application = get_wsgi_application()
server = Server(application)
server.watch('api/')
server.serve(port=8000)
else:
execute_from_command_line(sys.argv)
| Move init_app after DJANGO_SETTINGS_MODULE set up | Move init_app after DJANGO_SETTINGS_MODULE set up
| Python | apache-2.0 | GageGaskins/osf.io,njantrania/osf.io,kch8qx/osf.io,njantrania/osf.io,zachjanicki/osf.io,felliott/osf.io,ZobairAlijan/osf.io,DanielSBrown/osf.io,adlius/osf.io,felliott/osf.io,rdhyee/osf.io,haoyuchen1992/osf.io,zamattiac/osf.io,crcresearch/osf.io,alexschiller/osf.io,doublebits/osf.io,baylee-d/osf.io,acshi/osf.io,emetsger/osf.io,SSJohns/osf.io,asanfilippo7/osf.io,brandonPurvis/osf.io,caneruguz/osf.io,TomHeatwole/osf.io,CenterForOpenScience/osf.io,hmoco/osf.io,kwierman/osf.io,jmcarp/osf.io,pattisdr/osf.io,GageGaskins/osf.io,TomBaxter/osf.io,baylee-d/osf.io,binoculars/osf.io,rdhyee/osf.io,monikagrabowska/osf.io,cslzchen/osf.io,KAsante95/osf.io,danielneis/osf.io,CenterForOpenScience/osf.io,crcresearch/osf.io,billyhunt/osf.io,emetsger/osf.io,alexschiller/osf.io,mluke93/osf.io,mattclark/osf.io,aaxelb/osf.io,brandonPurvis/osf.io,cosenal/osf.io,haoyuchen1992/osf.io,aaxelb/osf.io,Nesiehr/osf.io,jnayak1/osf.io,emetsger/osf.io,ticklemepierce/osf.io,cslzchen/osf.io,binoculars/osf.io,zachjanicki/osf.io,mattclark/osf.io,samchrisinger/osf.io,pattisdr/osf.io,zamattiac/osf.io,danielneis/osf.io,icereval/osf.io,MerlinZhang/osf.io,jnayak1/osf.io,samchrisinger/osf.io,saradbowman/osf.io,monikagrabowska/osf.io,jmcarp/osf.io,HalcyonChimera/osf.io,chrisseto/osf.io,CenterForOpenScience/osf.io,aaxelb/osf.io,Ghalko/osf.io,kch8qx/osf.io,mluke93/osf.io,wearpants/osf.io,doublebits/osf.io,njantrania/osf.io,caseyrollins/osf.io,caneruguz/osf.io,RomanZWang/osf.io,laurenrevere/osf.io,HalcyonChimera/osf.io,leb2dg/osf.io,mluo613/osf.io,leb2dg/osf.io,mattclark/osf.io,brianjgeiger/osf.io,samanehsan/osf.io,zamattiac/osf.io,acshi/osf.io,mfraezz/osf.io,ZobairAlijan/osf.io,HalcyonChimera/osf.io,ckc6cz/osf.io,sbt9uc/osf.io,laurenrevere/osf.io,chennan47/osf.io,sbt9uc/osf.io,Ghalko/osf.io,petermalcolm/osf.io,cwisecarver/osf.io,RomanZWang/osf.io,mfraezz/osf.io,binoculars/osf.io,haoyuchen1992/osf.io,asanfilippo7/osf.io,samanehsan/osf.io,RomanZWang/osf.io,wearpants/osf.io,abought/osf.io,GageGaskins/osf.io,hmoco/osf.io,mluo613/osf.io,brianjgeiger/osf.io,erinspace/osf.io,adlius/osf.io,MerlinZhang/osf.io,caseyrygt/osf.io,cwisecarver/osf.io,baylee-d/osf.io,abought/osf.io,kch8qx/osf.io,crcresearch/osf.io,rdhyee/osf.io,kwierman/osf.io,brandonPurvis/osf.io,petermalcolm/osf.io,zachjanicki/osf.io,jnayak1/osf.io,amyshi188/osf.io,KAsante95/osf.io,billyhunt/osf.io,mfraezz/osf.io,mluo613/osf.io,GageGaskins/osf.io,brianjgeiger/osf.io,ZobairAlijan/osf.io,doublebits/osf.io,amyshi188/osf.io,MerlinZhang/osf.io,sbt9uc/osf.io,samchrisinger/osf.io,Johnetordoff/osf.io,petermalcolm/osf.io,caneruguz/osf.io,Nesiehr/osf.io,TomHeatwole/osf.io,sbt9uc/osf.io,cosenal/osf.io,ckc6cz/osf.io,ticklemepierce/osf.io,HalcyonChimera/osf.io,kwierman/osf.io,KAsante95/osf.io,danielneis/osf.io,abought/osf.io,zamattiac/osf.io,acshi/osf.io,monikagrabowska/osf.io,pattisdr/osf.io,mfraezz/osf.io,TomHeatwole/osf.io,doublebits/osf.io,RomanZWang/osf.io,billyhunt/osf.io,caseyrollins/osf.io,felliott/osf.io,caseyrygt/osf.io,cslzchen/osf.io,emetsger/osf.io,wearpants/osf.io,SSJohns/osf.io,kch8qx/osf.io,hmoco/osf.io,leb2dg/osf.io,arpitar/osf.io,saradbowman/osf.io,arpitar/osf.io,mluke93/osf.io,ticklemepierce/osf.io,TomBaxter/osf.io,amyshi188/osf.io,Ghalko/osf.io,Johnetordoff/osf.io,CenterForOpenScience/osf.io,mluo613/osf.io,sloria/osf.io,felliott/osf.io,Nesiehr/osf.io,GageGaskins/osf.io,icereval/osf.io,ckc6cz/osf.io,danielneis/osf.io,cosenal/osf.io,RomanZWang/osf.io,Ghalko/osf.io,cwisecarver/osf.io,KAsante95/osf.io,Johnetordoff/osf.io,ckc6cz/osf.io,jnayak1/osf.io,leb2dg/osf.io,MerlinZhang/osf.io,njantrania/osf.io,adlius/osf.io,hmoco/osf.io,billyhunt/osf.io,abought/osf.io,KAsante95/osf.io,DanielSBrown/osf.io,asanfilippo7/osf.io,caseyrygt/osf.io,zachjanicki/osf.io,SSJohns/osf.io,Nesiehr/osf.io,doublebits/osf.io,arpitar/osf.io,asanfilippo7/osf.io,arpitar/osf.io,cslzchen/osf.io,samchrisinger/osf.io,alexschiller/osf.io,petermalcolm/osf.io,chennan47/osf.io,TomBaxter/osf.io,wearpants/osf.io,monikagrabowska/osf.io,acshi/osf.io,sloria/osf.io,ticklemepierce/osf.io,rdhyee/osf.io,acshi/osf.io,cosenal/osf.io,erinspace/osf.io,kch8qx/osf.io,brandonPurvis/osf.io,brianjgeiger/osf.io,erinspace/osf.io,DanielSBrown/osf.io,chrisseto/osf.io,aaxelb/osf.io,jmcarp/osf.io,cwisecarver/osf.io,chennan47/osf.io,mluo613/osf.io,amyshi188/osf.io,alexschiller/osf.io,haoyuchen1992/osf.io,icereval/osf.io,jmcarp/osf.io,kwierman/osf.io,Johnetordoff/osf.io,samanehsan/osf.io,alexschiller/osf.io,caneruguz/osf.io,laurenrevere/osf.io,caseyrygt/osf.io,DanielSBrown/osf.io,ZobairAlijan/osf.io,TomHeatwole/osf.io,adlius/osf.io,SSJohns/osf.io,chrisseto/osf.io,caseyrollins/osf.io,brandonPurvis/osf.io,mluke93/osf.io,monikagrabowska/osf.io,chrisseto/osf.io,sloria/osf.io,billyhunt/osf.io,samanehsan/osf.io | ---
+++
@@ -1,13 +1,14 @@
#!/usr/bin/env python
import os
import sys
-from website.app import init_app
if __name__ == "__main__":
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'api.base.settings')
from django.core.management import execute_from_command_line
+ from website.app import init_app
+
init_app(set_backends=True, routes=False, attach_request_handlers=False)
|
2d90da5171dee8ed143c53d89d7f99bf910f083f | instana/util.py | instana/util.py | import random
import os
import time
import struct
import binascii
import sys
if sys.version_info.major is 2:
string_types = basestring
else:
string_types = str
_rnd = random.Random()
_current_pid = 0
def generate_id():
""" Generate a 64bit signed integer for use as a Span or Trace ID """
global _current_pid
pid = os.getpid()
if (_current_pid != pid):
_current_pid = pid
_rnd.seed(int(1000000 * time.time()) ^ pid)
return _rnd.randint(-9223372036854775808, 9223372036854775807)
def id_to_header(id):
""" Convert a 64bit signed integer to an unsigned base 16 hex string """
if not isinstance(id, int):
return ""
byteString = struct.pack('>q', id)
return binascii.hexlify(byteString).decode('UTF-8').lstrip('0')
def header_to_id(header):
""" Convert an unsigned base 16 hex string into a 64bit signed integer """
if not isinstance(header, string_types):
return 0
# Pad the header to 16 chars
header = header.zfill(16)
r = binascii.unhexlify(header)
return struct.unpack('>q', r)[0]
| import random
import os
import time
import struct
import binascii
import sys
if sys.version_info.major is 2:
string_types = basestring
else:
string_types = str
_rnd = random.Random()
_current_pid = 0
def generate_id():
""" Generate a 64bit signed integer for use as a Span or Trace ID """
global _current_pid
pid = os.getpid()
if (_current_pid != pid):
_current_pid = pid
_rnd.seed(int(1000000 * time.time()) ^ pid)
return _rnd.randint(-9223372036854775808, 9223372036854775807)
def id_to_header(id):
""" Convert a 64bit signed integer to an unsigned base 16 hex string """
if not isinstance(id, int):
return ""
byteString = struct.pack('>q', id)
return str(binascii.hexlify(byteString).decode('UTF-8').lstrip('0'))
def header_to_id(header):
""" Convert an unsigned base 16 hex string into a 64bit signed integer """
if not isinstance(header, string_types):
return 0
# Pad the header to 16 chars
header = header.zfill(16)
r = binascii.unhexlify(header)
return struct.unpack('>q', r)[0]
| Make sure only strings are returned for HTTP headers | Make sure only strings are returned for HTTP headers
| Python | mit | instana/python-sensor,instana/python-sensor | ---
+++
@@ -32,7 +32,7 @@
return ""
byteString = struct.pack('>q', id)
- return binascii.hexlify(byteString).decode('UTF-8').lstrip('0')
+ return str(binascii.hexlify(byteString).decode('UTF-8').lstrip('0'))
def header_to_id(header): |
2bd14f768ce7d82f7ef84d1e67d61afda5044581 | st2common/st2common/constants/logging.py | st2common/st2common/constants/logging.py | # Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
__all__ = [
'DEFAULT_LOGGING_CONF_PATH'
]
BASE_PATH = os.path.abspath(os.path.dirname(__file__))
DEFAULT_LOGGING_CONF_PATH = os.path.join(BASE_PATH, '../conf/base.logging.conf')
DEFAULT_LOGGING_CONF_PATH = os.path.abspath(DEFAULT_LOGGING_CONF_PATH)
| # Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
__all__ = [
'DEFAULT_LOGGING_CONF_PATH'
]
BASE_PATH = os.path.dirname(os.path.abspath((__file__)))
DEFAULT_LOGGING_CONF_PATH = os.path.join(BASE_PATH, '../conf/base.logging.conf')
DEFAULT_LOGGING_CONF_PATH = os.path.abspath(DEFAULT_LOGGING_CONF_PATH)
| Use the correct base path. | Use the correct base path.
| Python | apache-2.0 | punalpatel/st2,lakshmi-kannan/st2,Plexxi/st2,jtopjian/st2,Plexxi/st2,grengojbo/st2,emedvedev/st2,punalpatel/st2,peak6/st2,dennybaa/st2,alfasin/st2,Plexxi/st2,Itxaka/st2,pinterb/st2,StackStorm/st2,nzlosh/st2,grengojbo/st2,nzlosh/st2,Itxaka/st2,pixelrebel/st2,Plexxi/st2,tonybaloney/st2,peak6/st2,StackStorm/st2,jtopjian/st2,lakshmi-kannan/st2,Itxaka/st2,StackStorm/st2,alfasin/st2,emedvedev/st2,grengojbo/st2,pinterb/st2,pixelrebel/st2,pixelrebel/st2,dennybaa/st2,StackStorm/st2,nzlosh/st2,tonybaloney/st2,armab/st2,lakshmi-kannan/st2,armab/st2,nzlosh/st2,pinterb/st2,jtopjian/st2,punalpatel/st2,dennybaa/st2,peak6/st2,emedvedev/st2,alfasin/st2,tonybaloney/st2,armab/st2 | ---
+++
@@ -19,7 +19,7 @@
'DEFAULT_LOGGING_CONF_PATH'
]
-BASE_PATH = os.path.abspath(os.path.dirname(__file__))
+BASE_PATH = os.path.dirname(os.path.abspath((__file__)))
DEFAULT_LOGGING_CONF_PATH = os.path.join(BASE_PATH, '../conf/base.logging.conf')
DEFAULT_LOGGING_CONF_PATH = os.path.abspath(DEFAULT_LOGGING_CONF_PATH) |
0a5e0935782bdd4c8669a39566d619aa4816ab60 | custom/aaa/utils.py | custom/aaa/utils.py | from __future__ import absolute_import
from __future__ import unicode_literals
from corehq.apps.locations.models import LocationType, SQLLocation
def build_location_filters(location_id):
try:
location = SQLLocation.objects.get(location_id=location_id)
except SQLLocation.DoesNotExist:
return {'state_id': 'ALL'}
location_ancestors = location.get_ancestors(include_self=True)
filters = {
"{}_id".format(ancestor.location_type.code): ancestor.location_id
for ancestor in location_ancestors
}
location_type = location.location_type
child_location_type = LocationType.objects.filter(domain=location_type.domain, parent_type=location_type)
filters["{}_id".format(child_location_type.code)] = 'All'
return filters
| from __future__ import absolute_import
from __future__ import unicode_literals
from django.db import connections
from corehq.apps.locations.models import LocationType, SQLLocation
from custom.aaa.models import AggAwc, AggVillage, CcsRecord, Child, Woman
def build_location_filters(location_id):
try:
location = SQLLocation.objects.get(location_id=location_id)
except SQLLocation.DoesNotExist:
return {'state_id': 'ALL'}
location_ancestors = location.get_ancestors(include_self=True)
filters = {
"{}_id".format(ancestor.location_type.code): ancestor.location_id
for ancestor in location_ancestors
}
location_type = location.location_type
child_location_type = LocationType.objects.filter(domain=location_type.domain, parent_type=location_type)
filters["{}_id".format(child_location_type.code)] = 'All'
return filters
def explain_aggregation_queries(domain, window_start, window_end):
queries = {}
for cls in (AggAwc, AggVillage, CcsRecord, Child, Woman):
for agg_query in cls.aggregation_queries:
explanation = _explain_query(cls, agg_query, domain, window_start, window_end)
queries[explanation[0]] = explanation[1]
return queries
def _explain_query(cls, method, domain, window_start, window_end):
agg_query, agg_params = method(domain, window_start, window_end)
with connections['aaa-data'].cursor() as cursor:
cursor.execute('explain ' + agg_query, agg_params)
return cls.__name__ + method.__name__, cursor.fetchall()
| Create easy explanations for aggregation queries | Create easy explanations for aggregation queries
| Python | bsd-3-clause | dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq | ---
+++
@@ -1,7 +1,10 @@
from __future__ import absolute_import
from __future__ import unicode_literals
+from django.db import connections
+
from corehq.apps.locations.models import LocationType, SQLLocation
+from custom.aaa.models import AggAwc, AggVillage, CcsRecord, Child, Woman
def build_location_filters(location_id):
@@ -22,3 +25,20 @@
filters["{}_id".format(child_location_type.code)] = 'All'
return filters
+
+
+def explain_aggregation_queries(domain, window_start, window_end):
+ queries = {}
+ for cls in (AggAwc, AggVillage, CcsRecord, Child, Woman):
+ for agg_query in cls.aggregation_queries:
+ explanation = _explain_query(cls, agg_query, domain, window_start, window_end)
+ queries[explanation[0]] = explanation[1]
+
+ return queries
+
+
+def _explain_query(cls, method, domain, window_start, window_end):
+ agg_query, agg_params = method(domain, window_start, window_end)
+ with connections['aaa-data'].cursor() as cursor:
+ cursor.execute('explain ' + agg_query, agg_params)
+ return cls.__name__ + method.__name__, cursor.fetchall() |
ec771b7186065443e282be84fbeda5897caba913 | buildbot_travis/steps/base.py | buildbot_travis/steps/base.py | from buildbot.process import buildstep
from buildbot.process.buildstep import SUCCESS, FAILURE, EXCEPTION
from buildbot.process.properties import Properties
from twisted.internet import defer
from ..travisyml import TravisYml
class ConfigurableStep(buildstep.LoggingBuildStep):
"""
Base class for a step which can be tuned by changing settings in .travis.yml
"""
@defer.inlineCallbacks
def getStepConfig(self):
config = TravisYml()
struct = self.build.getProperty(".travis.yml", None)
if struct:
config.parse(struct)
defer.returnValue(config)
log = self.addLog(".travis.yml")
cmd = self.cmd = buildstep.RemoteShellCommand(workdir="build", command=["cat", ".travis.yml"])
cmd.useLog(log, False, "stdio")
yield self.runCommand(cmd)
self.cmd = None
if cmd.rc != 0:
raise buildstep.BuildStepFailed()
config = TravisYml()
config.parse(log.getText())
self.build.setProperty(".travis.yml", config.config, ".VCS")
defer.returnValue(config)
| from buildbot.process import buildstep
from buildbot.process.buildstep import SUCCESS, FAILURE, EXCEPTION
from buildbot.process.properties import Properties
from twisted.internet import defer
from ..travisyml import TravisYml
class ConfigurableStep(buildstep.LoggingBuildStep):
"""
Base class for a step which can be tuned by changing settings in .travis.yml
"""
@defer.inlineCallbacks
def getStepConfig(self):
log = self.addLog(".travis.yml")
cmd = self.cmd = buildstep.RemoteShellCommand(workdir="build", command=["cat", ".travis.yml"])
cmd.useLog(log, False, "stdio")
yield self.runCommand(cmd)
self.cmd = None
if cmd.rc != 0:
raise buildstep.BuildStepFailed()
config = TravisYml()
config.parse(log.getText())
defer.returnValue(config)
| Revert "Save .travis.yml into build properties" | Revert "Save .travis.yml into build properties"
The data is > 1024 so no dice.
This reverts commit 10960fd1465afb8de92e8fd35b1affca4f950e27.
| Python | unknown | tardyp/buildbot_travis,tardyp/buildbot_travis,isotoma/buildbot_travis,tardyp/buildbot_travis,buildbot/buildbot_travis,buildbot/buildbot_travis,buildbot/buildbot_travis,tardyp/buildbot_travis,isotoma/buildbot_travis | ---
+++
@@ -14,13 +14,6 @@
@defer.inlineCallbacks
def getStepConfig(self):
- config = TravisYml()
-
- struct = self.build.getProperty(".travis.yml", None)
- if struct:
- config.parse(struct)
- defer.returnValue(config)
-
log = self.addLog(".travis.yml")
cmd = self.cmd = buildstep.RemoteShellCommand(workdir="build", command=["cat", ".travis.yml"])
cmd.useLog(log, False, "stdio")
@@ -32,7 +25,5 @@
config = TravisYml()
config.parse(log.getText())
- self.build.setProperty(".travis.yml", config.config, ".VCS")
-
defer.returnValue(config)
|
864dd5866110ae00248a316cdd62af3241eef47b | runserver.py | runserver.py | # Copyright (c) 2015-2016 Anish Athalye (me@anishathalye.com)
#
# This software is released under AGPLv3. See the included LICENSE.txt for
# details.
if __name__ == '__main__':
from gavel import app
from gavel.settings import PORT
import os
if os.environ.get('DEBUG', False):
app.debug = True
port = PORT
app.run(host='0.0.0.0', port=port)
| # Copyright (c) 2015-2016 Anish Athalye (me@anishathalye.com)
#
# This software is released under AGPLv3. See the included LICENSE.txt for
# details.
if __name__ == '__main__':
from gavel import app
from gavel.settings import PORT
import os
extra_files = []
if os.environ.get('DEBUG', False):
app.debug = True
extra_files.append('./config.yaml')
app.run(
host='0.0.0.0',
port=PORT,
extra_files=extra_files
)
| Make app reload on changing config file | Make app reload on changing config file
| Python | agpl-3.0 | atagh/gavel-clone,anishathalye/gavel,anishathalye/gavel,anishathalye/gavel,atagh/gavel-clone | ---
+++
@@ -8,7 +8,13 @@
from gavel.settings import PORT
import os
+ extra_files = []
if os.environ.get('DEBUG', False):
app.debug = True
- port = PORT
- app.run(host='0.0.0.0', port=port)
+ extra_files.append('./config.yaml')
+
+ app.run(
+ host='0.0.0.0',
+ port=PORT,
+ extra_files=extra_files
+ ) |
4069a7017d0bbda2aa4d436741619304df3f654f | flaskiwsapp/snippets/customApi.py | flaskiwsapp/snippets/customApi.py | '''
Created on Sep 16, 2016
@author: rtorres
'''
from flask_restful import Api
from flask_jwt import JWTError
from flask import jsonify
import collections
class CustomApi(Api):
"""A simple class to keep the default Errors behaviour."""
def handle_error(self, e):
if isinstance(e, JWTError):
return jsonify(
collections.OrderedDict([
('status_code', e.status_code),
('error', e.error),
('description', e.description),
])
), e.status_code, e.headers
elif isinstance(e, Exception):
return jsonify(
collections.OrderedDict([
('status_code', HTTP_500),
('error', str(type(e))),
('description', e.args[0]),
])
), HTTP_500_INTERNAL_SERVER_ERROR
return super(CustomApi, self).handle_error(e)
| '''
Created on Sep 16, 2016
@author: rtorres
'''
from flask_restful import Api
from flask_jwt import JWTError
from flask import jsonify
import collections
from flask_api.status import HTTP_501_NOT_IMPLEMENTED
DUMMY_ERROR_CODE = '1000'
class CustomApi(Api):
"""A simple class to keep the default Errors behaviour."""
def handle_error(self, e):
response_dict = {'data': {}}
error = {}
if isinstance(e, JWTError):
error.update({'status': e.status_code})
error.update({'title': e.error})
error.update({'detail': e.description})
error.update({'code': DUMMY_ERROR_CODE})
response_dict['data'] = error
return jsonify(response_dict), e.status_code, e.headers
elif isinstance(e, Exception):
error.update({'status': HTTP_501_NOT_IMPLEMENTED})
error.update({'title': str(type(e))})
error.update({'detail': e.args[0]})
error.update({'code': DUMMY_ERROR_CODE})
response_dict['data'] = error
return jsonify(response_dict), HTTP_501_NOT_IMPLEMENTED, None
return super(CustomApi, self).handle_error(e)
| Customize handle error to JsonApi standard | Customize handle error to JsonApi standard | Python | mit | rafasis1986/EngineeringMidLevel,rafasis1986/EngineeringMidLevel,rafasis1986/EngineeringMidLevel,rafasis1986/EngineeringMidLevel,rafasis1986/EngineeringMidLevel | ---
+++
@@ -7,27 +7,29 @@
from flask_jwt import JWTError
from flask import jsonify
import collections
+from flask_api.status import HTTP_501_NOT_IMPLEMENTED
+DUMMY_ERROR_CODE = '1000'
class CustomApi(Api):
"""A simple class to keep the default Errors behaviour."""
def handle_error(self, e):
+ response_dict = {'data': {}}
+ error = {}
if isinstance(e, JWTError):
- return jsonify(
- collections.OrderedDict([
- ('status_code', e.status_code),
- ('error', e.error),
- ('description', e.description),
- ])
- ), e.status_code, e.headers
+ error.update({'status': e.status_code})
+ error.update({'title': e.error})
+ error.update({'detail': e.description})
+ error.update({'code': DUMMY_ERROR_CODE})
+ response_dict['data'] = error
+ return jsonify(response_dict), e.status_code, e.headers
elif isinstance(e, Exception):
- return jsonify(
- collections.OrderedDict([
- ('status_code', HTTP_500),
- ('error', str(type(e))),
- ('description', e.args[0]),
- ])
- ), HTTP_500_INTERNAL_SERVER_ERROR
+ error.update({'status': HTTP_501_NOT_IMPLEMENTED})
+ error.update({'title': str(type(e))})
+ error.update({'detail': e.args[0]})
+ error.update({'code': DUMMY_ERROR_CODE})
+ response_dict['data'] = error
+ return jsonify(response_dict), HTTP_501_NOT_IMPLEMENTED, None
return super(CustomApi, self).handle_error(e) |
f8d43af9b2772f642fddc21f941e1c8da635bcaa | sudoku.py | sudoku.py | import os
import pickle as pck
import numpy as np
from pprint import pprint
import sys
from scripts.sudokuExtractor import Extractor
from scripts.train import NeuralNetwork
from scripts.sudoku_str import SudokuStr
class Sudoku(object):
def __init__(self, name):
image_path = self.getImagePath(name)
cells = Extractor(image_path).cells
neuralnetpath = os.getcwd() + '/networks/net'
sizes, biases, wts = pck.load(open(neuralnetpath, 'r'))
net = NeuralNetwork(customValues=(sizes, biases, wts))
self.res = [[None for _ in range(9)] for _ in range(9)]
for i, row in enumerate(cells):
for j, cell in enumerate(row):
vector = np.reshape(cell, (784, 1))
x = net.feedforward(vector)
x[0] = 0
s = sum(x)
if list(x[np.argmax(x)])[0] / s > 0.8:
self.res[i][j] = str(np.argmax(x))
else:
self.res[i][j] = ' '
s = SudokuStr(self.res)
print(s)
print('')
print(s.solve())
def getImagePath(self, name):
return os.path.abspath(name)
Sudoku(sys.argv[1])
| import os
import pickle as pck
import numpy as np
from pprint import pprint
import sys
from scripts.sudokuExtractor import Extractor
from scripts.train import NeuralNetwork
from scripts.sudoku_str import SudokuStr
class Sudoku(object):
def __init__(self, name):
image_path = self.getImagePath(name)
cells = Extractor(image_path).cells
neuralnetpath = os.getcwd() + '/networks/net'
sizes, biases, wts = pck.load(open(neuralnetpath, 'r'))
net = NeuralNetwork(customValues=(sizes, biases, wts))
self.res = [[None for _ in range(9)] for _ in range(9)]
for i, row in enumerate(cells):
for j, cell in enumerate(row):
vector = np.reshape(cell, (784, 1))
x = net.feedforward(vector)
x[0] = 0
s = sum(x)
if list(x[np.argmax(x)])[0] / s > 0.8:
self.res[i][j] = str(np.argmax(x))
else:
self.res[i][j] = ' '
s = SudokuStr(self.res)
print(s)
print('')
if sudopy.parse_grid(str(s)):
print(s.solve())
else:
print('No solution found. Please rescan the puzzle.')
def getImagePath(self, name):
return os.path.abspath(name)
Sudoku(sys.argv[1])
| Test the puzzle before solving | Test the puzzle before solving
Norvig's code makes this easier than I thought! | Python | mit | prajwalkr/SnapSudoku,ymittal/SnapSudoku,ymittal/SnapSudoku | ---
+++
@@ -32,7 +32,10 @@
s = SudokuStr(self.res)
print(s)
print('')
- print(s.solve())
+ if sudopy.parse_grid(str(s)):
+ print(s.solve())
+ else:
+ print('No solution found. Please rescan the puzzle.')
def getImagePath(self, name):
return os.path.abspath(name) |
a8b606fc5b95bc3728082cee4341ca8075bab965 | python/lumidatumclient/classes.py | python/lumidatumclient/classes.py | import os
import requests
class LumidatumClient(object):
def __init__(self, authentication_token, model_id=None, host_address='https://www.lumidatum.com'):
self.authentication_token = authentication_token
self.model_id = str(model_id)
self.host_address = host_address
def getRecommendations(self, parameters, model_id=None):
"""
Get recommendations for a model specified by model_id.
Returns a list of id/score pairs in descending order from the highest score.
"""
selected_model_id = str(model_if) if model_id else self.model_id
if selected_model_id is None:
raise ValueError('model_id must be specified either at initialization of LumidatumClient or in client method call.')
headers = {
'Authorization': self.authentication_token,
'content-type': 'application/json',
}
response = requests.post(
os.path.join(self.host_address, 'api/predict', selected_model_id),
parameters,
headers=headers
)
return response.json()
def getRecommendationDescriptions(self, parameters, model_id=None):
"""
Get human readable recommendations.
"""
parameters['human_readable'] = True
return self.getRecommendations(self, parameters, model_id)
| import os
import requests
class LumidatumClient(object):
def __init__(self, authentication_token, model_id=None, host_address='https://www.lumidatum.com'):
self.authentication_token = authentication_token
self.model_id = str(model_id)
self.host_address = host_address
def getRecommendations(self, parameters, model_id=None):
"""
Get recommendations for a model specified by model_id.
Returns a list of id/score pairs in descending order from the highest score.
"""
selected_model_id = str(model_id) if model_id else self.model_id
if selected_model_id is None:
raise ValueError('model_id must be specified either at initialization of LumidatumClient or in client method call.')
headers = {
'Authorization': self.authentication_token,
'content-type': 'application/json',
}
response = requests.post(
os.path.join(self.host_address, 'api/predict', selected_model_id),
parameters,
headers=headers
)
return response.json()
def getRecommendationDescriptions(self, parameters, model_id=None):
"""
Get human readable recommendations.
"""
parameters['human_readable'] = True
return self.getRecommendations(self, parameters, model_id)
| Fix for os.path.join with model_id, was breaking on non-string model_id values. | Fix for os.path.join with model_id, was breaking on non-string model_id values.
| Python | mit | daws/lumidatumclients,Lumidatum/lumidatumclients,Lumidatum/lumidatumclients,Lumidatum/lumidatumclients | ---
+++
@@ -16,7 +16,7 @@
Returns a list of id/score pairs in descending order from the highest score.
"""
- selected_model_id = str(model_if) if model_id else self.model_id
+ selected_model_id = str(model_id) if model_id else self.model_id
if selected_model_id is None:
raise ValueError('model_id must be specified either at initialization of LumidatumClient or in client method call.')
|
952e681fe6aaf5fa20b2c3a83d3097f87286e98b | wagtail/search/backends/database/sqlite/utils.py | wagtail/search/backends/database/sqlite/utils.py | import sqlite3
from django.db import OperationalError
def fts5_available():
# based on https://stackoverflow.com/a/36656216/1853523
if sqlite3.sqlite_version_info < (3, 19, 0):
# Prior to version 3.19, SQLite doesn't support FTS5 queries with
# column filters ('{column_1 column_2} : query'), which the sqlite
# fulltext backend needs
return False
tmp_db = sqlite3.connect(':memory:')
try:
tmp_db.execute('CREATE VIRTUAL TABLE fts5test USING fts5 (data);')
except OperationalError:
return False
finally:
tmp_db.close()
return True
def fts_table_exists():
from wagtail.search.models import SQLiteFTSIndexEntry
try:
# ignore result of query; we are only interested in the query failing,
# not the presence of index entries
SQLiteFTSIndexEntry.objects.exists()
except OperationalError:
return False
return True
| import sqlite3
from django.db import OperationalError
def fts5_available():
# based on https://stackoverflow.com/a/36656216/1853523
if sqlite3.sqlite_version_info < (3, 19, 0):
# Prior to version 3.19, SQLite doesn't support FTS5 queries with
# column filters ('{column_1 column_2} : query'), which the sqlite
# fulltext backend needs
return False
tmp_db = sqlite3.connect(':memory:')
try:
tmp_db.execute('CREATE VIRTUAL TABLE fts5test USING fts5 (data);')
except sqlite3.OperationalError:
return False
finally:
tmp_db.close()
return True
def fts_table_exists():
from wagtail.search.models import SQLiteFTSIndexEntry
try:
# ignore result of query; we are only interested in the query failing,
# not the presence of index entries
SQLiteFTSIndexEntry.objects.exists()
except OperationalError:
return False
return True
| Fix Sqlite FTS5 compatibility check | Fix Sqlite FTS5 compatibility check
As per https://github.com/wagtail/wagtail/issues/7798#issuecomment-1021544265 - the direct query against the sqlite3 library will fail with sqlite3.OperationalError, not django.db.OperationalError.
| Python | bsd-3-clause | wagtail/wagtail,jnns/wagtail,wagtail/wagtail,zerolab/wagtail,wagtail/wagtail,rsalmaso/wagtail,thenewguy/wagtail,zerolab/wagtail,rsalmaso/wagtail,rsalmaso/wagtail,mixxorz/wagtail,jnns/wagtail,zerolab/wagtail,thenewguy/wagtail,jnns/wagtail,mixxorz/wagtail,jnns/wagtail,rsalmaso/wagtail,wagtail/wagtail,mixxorz/wagtail,thenewguy/wagtail,zerolab/wagtail,mixxorz/wagtail,zerolab/wagtail,wagtail/wagtail,mixxorz/wagtail,thenewguy/wagtail,thenewguy/wagtail,rsalmaso/wagtail | ---
+++
@@ -14,7 +14,7 @@
tmp_db = sqlite3.connect(':memory:')
try:
tmp_db.execute('CREATE VIRTUAL TABLE fts5test USING fts5 (data);')
- except OperationalError:
+ except sqlite3.OperationalError:
return False
finally:
tmp_db.close() |
d4dc22be443fb73157d542f86dbee5d89ac5a713 | imagersite/imager_images/tests.py | imagersite/imager_images/tests.py | from django.test import TestCase
# Create your tests here.
| from __future__ import unicode_literals
from django.contrib.auth.models import User
from django.test import TestCase
import factory
from faker import Faker
from imager_profile.models import ImagerProfile
from .models import Album, Photo
# Create your tests here.
| Add imports to imager_images test | Add imports to imager_images test
| Python | mit | jesseklein406/django-imager,jesseklein406/django-imager,jesseklein406/django-imager | ---
+++
@@ -1,3 +1,10 @@
+from __future__ import unicode_literals
+from django.contrib.auth.models import User
from django.test import TestCase
+import factory
+from faker import Faker
+
+from imager_profile.models import ImagerProfile
+from .models import Album, Photo
# Create your tests here. |
c22bb1da7e9a6c0f2fdaddfd3cf6b549de0ad9cb | Scapy/ip_forward.py | Scapy/ip_forward.py | #!/etc/usr/python
from scapy.all import *
import sys
iface = "eth0"
filter = "ip"
#victim in this case is the initiator
VICTIM_IP = "192.168.1.121"
MY_IP = "192.168.1.154"
# gateway is the target
GATEWAY_IP = "192.168.1.171"
#VICTIM_MAC = "### don't want so show###"
MY_MAC = "08:00:27:7b:80:18"
#target mac address
GATEWAY_MAC = "08:00:27:24:08:34"
def handle_packet(packet):
if (packet[IP].dst == GATEWAY_IP) and (packet[Ether].dst == MY_MAC):
# we change the packet destination to the target machine
packet[Ether].dst = GATEWAY_MAC
# TODO: block iscsi packets with an if condition
if(packet[TCP]):
# sprintf("{Raw:%Raw.load%\n}")
print str(packet) + "\n"
sendp(packet)
print "A packet from " + packet[IP].src + " redirected!"
#printing redirected packets load
#sprintf("{Raw:%Raw.load%\n}")
sniff(prn=handle_packet, filter=filter, iface=iface, store=0)
| #!/etc/usr/python
from scapy.all import *
import sys
iface = "eth0"
filter = "ip"
#victim in this case is the initiator
VICTIM_IP = "192.168.1.121"
MY_IP = "192.168.1.154"
# gateway is the target
GATEWAY_IP = "192.168.1.171"
#VICTIM_MAC = "### don't want so show###"
MY_MAC = "08:00:27:7b:80:18"
#target mac address
GATEWAY_MAC = "08:00:27:24:08:34"
def handle_packet(packet):
if (packet[IP].dst == GATEWAY_IP) and (packet[Ether].dst == MY_MAC):
# we change the packet destination to the target machine
packet[Ether].dst = GATEWAY_MAC
# TODO: block iscsi packets with an if condition
if(packet[TCP]):
# shows what the packet contains
packet.show()
# TODO: create condition to check/filter the 'dport' packet tcp argument for 'iscsi_target'
sendp(packet)
print "A packet from " + packet[IP].src + " redirected!"
sniff(prn=handle_packet, filter=filter, iface=iface, store=0)
| Add packet show command and TODO. | Add packet show command and TODO.
| Python | mit | Illinois-tech-ITM/BSMP-2016-ISCSI-Packet-Injection,illinoistech-itm/pykkon,illinoistech-itm/pykkon,Illinois-tech-ITM/BSMP-2016-ISCSI-Packet-Injection,illinoistech-itm/pykkon,Illinois-tech-ITM/BSMP-2016-ISCSI-Packet-Injection | ---
+++
@@ -21,10 +21,10 @@
packet[Ether].dst = GATEWAY_MAC
# TODO: block iscsi packets with an if condition
if(packet[TCP]):
- # sprintf("{Raw:%Raw.load%\n}")
- print str(packet) + "\n"
+ # shows what the packet contains
+ packet.show()
+ # TODO: create condition to check/filter the 'dport' packet tcp argument for 'iscsi_target'
sendp(packet)
print "A packet from " + packet[IP].src + " redirected!"
- #printing redirected packets load
- #sprintf("{Raw:%Raw.load%\n}")
+
sniff(prn=handle_packet, filter=filter, iface=iface, store=0) |
3289027d2cc5b07a83dca422bfc14114854618f8 | kazoo/__init__.py | kazoo/__init__.py | import os
from kazoo.zkclient import ZooKeeperClient
__all__ = ['ZooKeeperClient']
# ZK C client likes to spew log info to STDERR. disable that unless an
# env is present.
def disable_zookeeper_log():
import zookeeper
zookeeper.set_log_stream(open('/dev/null'))
if not "KAZOO_LOG_ENABLED" in os.environ:
disable_zookeeper_log()
def patch_extras():
# workaround for http://code.google.com/p/gevent/issues/detail?id=112
# gevent isn't patching threading._sleep which causes problems
# for Condition objects
from gevent import sleep
import threading
threading._sleep = sleep
if "KAZOO_TEST_GEVENT_PATCH" in os.environ:
from gevent import monkey; monkey.patch_all()
patch_extras()
| import os
from kazoo.zkclient import ZooKeeperClient
from kazoo.client import KazooClient
__all__ = ['ZooKeeperClient', 'KazooClient']
# ZK C client likes to spew log info to STDERR. disable that unless an
# env is present.
def disable_zookeeper_log():
import zookeeper
zookeeper.set_log_stream(open('/dev/null'))
if not "KAZOO_LOG_ENABLED" in os.environ:
disable_zookeeper_log()
def patch_extras():
# workaround for http://code.google.com/p/gevent/issues/detail?id=112
# gevent isn't patching threading._sleep which causes problems
# for Condition objects
from gevent import sleep
import threading
threading._sleep = sleep
if "KAZOO_TEST_GEVENT_PATCH" in os.environ:
from gevent import monkey; monkey.patch_all()
patch_extras()
| Add KazooClient to top-level module | Add KazooClient to top-level module | Python | apache-2.0 | nimbusproject/kazoo | ---
+++
@@ -1,8 +1,9 @@
import os
from kazoo.zkclient import ZooKeeperClient
+from kazoo.client import KazooClient
-__all__ = ['ZooKeeperClient']
+__all__ = ['ZooKeeperClient', 'KazooClient']
# ZK C client likes to spew log info to STDERR. disable that unless an |
f55cc84fa738d5fe2c7d9d75d05c6a74a1e0571c | calibre_books/calibre/search_indexes.py | calibre_books/calibre/search_indexes.py | from haystack import indexes
from unidecode import unidecode
from .models import Book
class BookIndex(indexes.SearchIndex, indexes.Indexable):
text = indexes.CharField(document=True, use_template=False)
genres = indexes.MultiValueField(null=True)
def get_model(self):
return Book
def index_queryset(self, using=None):
return self.get_model().objects.prefetch_related(
'authors', 'comments', 'identifiers', 'languages')
def prepare_genres(self, obj):
return ','.join(obj.genres) or None
def prepare(self, obj):
self.prepared_data = super(BookIndex, self).prepare(obj)
text = [obj.title, obj.isbn, obj.uuid]
if obj.series:
text.extend([obj.series])
authors = [author.name for author in obj.authors.all()]
authors.extend(map(unidecode, authors))
text.extend(set(authors))
text.extend(obj.tags.all())
text.extend(obj.publishers.all())
text.extend(['lang:%s' % l.lang_code for l in obj.languages.all()])
self.prepared_data['text'] = u' '.join(map(unicode, text))
return self.prepared_data
| from haystack import indexes
from unidecode import unidecode
from .models import Book
class BookIndex(indexes.SearchIndex, indexes.Indexable):
text = indexes.CharField(document=True, use_template=False)
genres = indexes.MultiValueField(null=True)
def get_model(self):
return Book
def index_queryset(self, using=None):
return self.get_model().objects.prefetch_related(
'authors', 'comments', 'identifiers', 'languages')
def prepare_genres(self, obj):
return ','.join(obj.genres) or None
def prepare(self, obj):
self.prepared_data = super(BookIndex, self).prepare(obj)
text = [obj.title, obj.isbn, obj.uuid]
if obj.series:
text.extend([obj.series])
authors = [author.name for author in obj.authors.all()]
authors.extend(map(unidecode, authors))
text.extend(set(authors))
text.extend(obj.tags.all())
text.extend(obj.publishers.all())
text.extend(['lang:%s' % l.lang_code for l in obj.languages.all()])
text.extend(['publisher:%s' % p.name for p in obj.publishers.all()])
self.prepared_data['text'] = u' '.join(map(unicode, text))
return self.prepared_data
| Add ability to explicitly search by publisher | Add ability to explicitly search by publisher
| Python | bsd-2-clause | bogdal/calibre-books,bogdal/calibre-books | ---
+++
@@ -29,5 +29,6 @@
text.extend(obj.tags.all())
text.extend(obj.publishers.all())
text.extend(['lang:%s' % l.lang_code for l in obj.languages.all()])
+ text.extend(['publisher:%s' % p.name for p in obj.publishers.all()])
self.prepared_data['text'] = u' '.join(map(unicode, text))
return self.prepared_data |
c0c98cb88ac22aee0f7e630fc70a91d5b03faee0 | api/api.py | api/api.py | from django.db.models import Q
from django_filters.rest_framework import DjangoFilterBackend, FilterSet, CharFilter
from rest_framework import routers, viewsets
from vehicles.models import Vehicle, Livery, VehicleType
from .serializers import VehicleSerializer, LiverySerializer, VehicleTypeSerializer
class VehicleFilter(FilterSet):
search = CharFilter(method='search_filter', label='Search')
def search_filter(self, queryset, name, value):
value = value.upper()
return queryset.filter(
Q(reg=value) | Q(fleet_code=value)
)
class Meta:
model = Vehicle
fields = ['operator', 'vehicle_type', 'livery', 'withdrawn', 'reg', 'fleet_code']
class VehicleViewSet(viewsets.ReadOnlyModelViewSet):
queryset = Vehicle.objects.select_related('operator', 'vehicle_type', 'livery').order_by('id')
serializer_class = VehicleSerializer
filter_backends = [DjangoFilterBackend]
filterset_class = VehicleFilter
class LiveryViewSet(viewsets.ReadOnlyModelViewSet):
queryset = Livery.objects.all()
serializer_class = LiverySerializer
class VehicleTypeViewSet(viewsets.ReadOnlyModelViewSet):
queryset = VehicleType.objects.all()
serializer_class = VehicleTypeSerializer
router = routers.DefaultRouter()
router.register('vehicles', VehicleViewSet)
router.register('liveries', LiveryViewSet)
router.register('vehicletypes', VehicleTypeViewSet)
| from django.db.models import Q
from django_filters.rest_framework import DjangoFilterBackend, FilterSet, CharFilter
from rest_framework import routers, viewsets
from vehicles.models import Vehicle, Livery, VehicleType
from .serializers import VehicleSerializer, LiverySerializer, VehicleTypeSerializer
class VehicleFilter(FilterSet):
search = CharFilter(method='search_filter', label='Search')
def search_filter(self, queryset, name, value):
value = value.upper()
return queryset.filter(
Q(reg=value) | Q(fleet_code=value)
)
class Meta:
model = Vehicle
fields = ['id', 'operator', 'vehicle_type', 'livery', 'withdrawn', 'reg', 'fleet_code']
class VehicleViewSet(viewsets.ReadOnlyModelViewSet):
queryset = Vehicle.objects.select_related('operator', 'vehicle_type', 'livery').order_by('id')
serializer_class = VehicleSerializer
filter_backends = [DjangoFilterBackend]
filterset_class = VehicleFilter
class LiveryViewSet(viewsets.ReadOnlyModelViewSet):
queryset = Livery.objects.all()
serializer_class = LiverySerializer
class VehicleTypeViewSet(viewsets.ReadOnlyModelViewSet):
queryset = VehicleType.objects.all()
serializer_class = VehicleTypeSerializer
router = routers.DefaultRouter()
router.register('vehicles', VehicleViewSet)
router.register('liveries', LiveryViewSet)
router.register('vehicletypes', VehicleTypeViewSet)
| Add ID filter to Vehicle API | Add ID filter to Vehicle API
Add an extra filter to the Vehicle API of ID. This has advantages of:
- The ability to look up the vehicle when the ID is the only thing known
- The ability to update data based on the previously saved ID so a lookup is not required again | Python | mpl-2.0 | jclgoodwin/bustimes.org.uk,jclgoodwin/bustimes.org.uk,jclgoodwin/bustimes.org.uk,jclgoodwin/bustimes.org.uk | ---
+++
@@ -16,7 +16,7 @@
class Meta:
model = Vehicle
- fields = ['operator', 'vehicle_type', 'livery', 'withdrawn', 'reg', 'fleet_code']
+ fields = ['id', 'operator', 'vehicle_type', 'livery', 'withdrawn', 'reg', 'fleet_code']
class VehicleViewSet(viewsets.ReadOnlyModelViewSet): |
b8666e3a2e2c4ee17bfbfa8d17e4625b84c79040 | app/PRESUBMIT.py | app/PRESUBMIT.py | #!/usr/bin/python
# Copyright (c) 2009 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Makes sure that the app/ code is cpplint clean."""
INCLUDE_CPP_FILES_ONLY = (
r'.*\.cc$', r'.*\.h$'
)
EXCLUDE = (
# Autogenerated window resources files are off limits
r'.*resource.h$',
)
def CheckChangeOnUpload(input_api, output_api):
results = []
black_list = input_api.DEFAULT_BLACK_LIST + EXCLUDE
sources = lambda x: input_api.FilterSourceFile(
x, white_list=INCLUDE_CPP_FILES_ONLY, black_list=black_list)
results.extend(input_api.canned_checks.CheckChangeLintsClean(
input_api, output_api, sources))
return results
| #!/usr/bin/python
# Copyright (c) 2009 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Makes sure that the app/ code is cpplint clean."""
INCLUDE_CPP_FILES_ONLY = (
r'.*\.cc$', r'.*\.h$'
)
EXCLUDE = (
# Autogenerated window resources files are off limits
r'.*resource.h$',
)
def CheckChangeOnUpload(input_api, output_api):
results = []
black_list = input_api.DEFAULT_BLACK_LIST + EXCLUDE
sources = lambda x: input_api.FilterSourceFile(
x, white_list=INCLUDE_CPP_FILES_ONLY, black_list=black_list)
results.extend(input_api.canned_checks.CheckChangeLintsClean(
input_api, output_api, sources))
return results
def GetPreferredTrySlaves():
return ['win', 'linux', 'linux_view', 'linux_chromeos', 'mac']
| Make all changes to app/ run on all trybot platforms, not just the big three. Anyone who's changing a header here may break the chromeos build. | Make all changes to app/ run on all trybot platforms, not just the big three.
Anyone who's changing a header here may break the chromeos build.
BUG=none
TEST=none
Review URL: http://codereview.chromium.org/2838027
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@51000 0039d316-1c4b-4281-b951-d872f2087c98
| Python | bsd-3-clause | gavinp/chromium,Crystalnix/house-of-life-chromium,gavinp/chromium,ropik/chromium,adobe/chromium,ropik/chromium,adobe/chromium,adobe/chromium,ropik/chromium,adobe/chromium,gavinp/chromium,Crystalnix/house-of-life-chromium,gavinp/chromium,Crystalnix/house-of-life-chromium,Crystalnix/house-of-life-chromium,yitian134/chromium,Crystalnix/house-of-life-chromium,Crystalnix/house-of-life-chromium,yitian134/chromium,adobe/chromium,ropik/chromium,Crystalnix/house-of-life-chromium,adobe/chromium,Crystalnix/house-of-life-chromium,adobe/chromium,ropik/chromium,adobe/chromium,ropik/chromium,gavinp/chromium,Crystalnix/house-of-life-chromium,yitian134/chromium,Crystalnix/house-of-life-chromium,gavinp/chromium,yitian134/chromium,yitian134/chromium,gavinp/chromium,ropik/chromium,gavinp/chromium,yitian134/chromium,adobe/chromium,ropik/chromium,yitian134/chromium,gavinp/chromium,Crystalnix/house-of-life-chromium,yitian134/chromium,gavinp/chromium,adobe/chromium,yitian134/chromium,adobe/chromium,yitian134/chromium,ropik/chromium | ---
+++
@@ -23,3 +23,6 @@
results.extend(input_api.canned_checks.CheckChangeLintsClean(
input_api, output_api, sources))
return results
+
+def GetPreferredTrySlaves():
+ return ['win', 'linux', 'linux_view', 'linux_chromeos', 'mac'] |
c5b01a233ae2dc52b2acedb7e1648a892a2be021 | project4/step_1.py | project4/step_1.py | #!/usr/bin/env python
# `json` is a module that helps us use the JSON data format.
import json
# `requests` is a module for interacting with the Internet
import requests
def main():
url = 'https://www.govtrack.us/api/v2/bill?congress=112&order_by=-current_status_date'
# Read the `requests` documentation for information. I promise it
# isn't that scary.
# http://docs.python-requests.org/en/latest/user/quickstart/#json-response-content
# Request the data
r = requests.get(url)
# Since we know our data will be JSON, let's automatically convert
# it to a Python dict.
data = r.json()
# `json.dumps()` is a way to print a Python dict in a more
# human-readable way.
print json.dumps(data, indent=4)
if __name__ == '__main__':
main()
| #!/usr/bin/env python
# `json` is a module that helps us use the JSON data format.
import json
# `requests` is a module for interacting with the Internet
import requests
def main():
url = 'https://www.govtrack.us/api/v2/bill?congress=112&order_by=-current_status_date'
# Read the `requests` documentation for information. I promise it
# isn't that scary.
# http://docs.python-requests.org/en/latest/user/quickstart/#json-response-content
# Request the data
r = requests.get(url)
# Since we know our data will be JSON, let's automatically convert
# it to a Python dict.
data = r.json()
# If the network is down, we can use a local version of this
# file.
#with open('bills.json', 'r') as f:
# data = json.load(f)
# `json.dumps()` is a way to print a Python dict in a more
# human-readable way.
print json.dumps(data, indent=4)
if __name__ == '__main__':
main()
| Add backup code in case network is down | Add backup code in case network is down
| Python | mit | tommeagher/pycar14,rnagle/pycar,ireapps/pycar,tommeagher/pycar14 | ---
+++
@@ -20,6 +20,11 @@
# it to a Python dict.
data = r.json()
+ # If the network is down, we can use a local version of this
+ # file.
+ #with open('bills.json', 'r') as f:
+ # data = json.load(f)
+
# `json.dumps()` is a way to print a Python dict in a more
# human-readable way.
print json.dumps(data, indent=4) |
069f0024a7de3399333dac2d6b5e4cdab28e81b6 | cryptography/bindings/openssl/bignum.py | cryptography/bindings/openssl/bignum.py | # Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
INCLUDES = """
#include <openssl/bn.h>
"""
TYPES = """
typedef ... BIGNUM;
typedef ... BN_ULONG;
// Possibly belongs in an asn1.py
typedef ... ASN1_INTEGER;
"""
FUNCTIONS = """
BIGNUM *BN_new();
void BN_free(BIGNUM *);
int BN_set_word(BIGNUM *, BN_ULONG);
char *BN_bn2hex(const BIGNUM *);
int BN_hex2bn(BIGNUM **, const char *);
"""
MACROS = """
ASN1_INTEGER *BN_to_ASN1_INTEGER(BIGNUM *, ASN1_INTEGER *);
"""
| # Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
INCLUDES = """
#include <openssl/bn.h>
"""
TYPES = """
typedef ... BIGNUM;
typedef ... BN_ULONG;
"""
FUNCTIONS = """
BIGNUM *BN_new();
void BN_free(BIGNUM *);
int BN_set_word(BIGNUM *, BN_ULONG);
char *BN_bn2hex(const BIGNUM *);
int BN_hex2bn(BIGNUM **, const char *);
"""
MACROS = """
"""
| Remove this, it properly belongs to ASN1, and that's for a seperate PR | Remove this, it properly belongs to ASN1, and that's for a seperate PR
| Python | bsd-3-clause | glyph/cryptography,dstufft/cryptography,Ayrx/cryptography,sholsapp/cryptography,skeuomorf/cryptography,kimvais/cryptography,Ayrx/cryptography,sholsapp/cryptography,skeuomorf/cryptography,sholsapp/cryptography,Lukasa/cryptography,dstufft/cryptography,Lukasa/cryptography,kimvais/cryptography,Lukasa/cryptography,Hasimir/cryptography,bwhmather/cryptography,sholsapp/cryptography,bwhmather/cryptography,glyph/cryptography,bwhmather/cryptography,Hasimir/cryptography,Ayrx/cryptography,Hasimir/cryptography,bwhmather/cryptography,skeuomorf/cryptography,kimvais/cryptography,Hasimir/cryptography,Ayrx/cryptography,dstufft/cryptography,skeuomorf/cryptography,dstufft/cryptography,dstufft/cryptography,kimvais/cryptography | ---
+++
@@ -18,9 +18,6 @@
TYPES = """
typedef ... BIGNUM;
typedef ... BN_ULONG;
-
-// Possibly belongs in an asn1.py
-typedef ... ASN1_INTEGER;
"""
FUNCTIONS = """
@@ -34,5 +31,4 @@
"""
MACROS = """
-ASN1_INTEGER *BN_to_ASN1_INTEGER(BIGNUM *, ASN1_INTEGER *);
""" |
61fad64a141a63578fb978ed729824b0e5317e3d | modules/syscmd.py | modules/syscmd.py | import urllib.request
import os
## Get HTML for given url
def getHtml( self, url, useragent):
try:
if useragent == True:
user_agent = "Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.1; Trident/6.0)"
headers = { 'User-Agent' : user_agent }
req = urllib.request.Request(url, None, headers)
else:
req = urllib.request.Request(url, None)
html = urllib.request.urlopen(req).read()
return(html)
except urllib.error.HTTPError as msg:
return(msg)
except:
if self.config["debug"] == "true":
print("Fetching data faile for some reason")
## End
## Check if the city exists in Finland
def checkCity ( self, city ):
try:
line = ""
city = city.title()
with open("modules/data/cities.txt", "r", encoding="UTF-8") as file:
for l in file:
line = l.strip()
if city == line:
return(True)
except IOError as msg:
print(msg)
## End | import urllib.request
import os
import re
## Get HTML for given url
def getHtml( self, url, useragent):
try:
if useragent == True:
user_agent = "Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.1; Trident/6.0)"
headers = { 'User-Agent' : user_agent }
req = urllib.request.Request(url, None, headers)
else:
req = urllib.request.Request(url, None)
html = urllib.request.urlopen(req, timeout = 10).read()
return(html)
except Exception as e:
if self.config["debug"] == "true":
print(e)
## End
## Check if the city exists in Finland
def checkCity ( city ):
try:
line = ""
city = city.title()
with open("modules/data/cities.txt", "r", encoding="UTF-8") as file:
for l in file:
line = l.strip()
if city == line:
return(True)
except IOError as e:
print(e)
## End
def delHtml( html ):
html = re.sub('<[^<]+?>', '', html)
return(html) | Add timeout for the request and catch more exceptions and print them if debug enabled in the conf | Add timeout for the request and catch more exceptions and print them if debug enabled in the conf
| Python | mit | jasuka/pyBot,jasuka/pyBot | ---
+++
@@ -1,5 +1,6 @@
import urllib.request
import os
+import re
## Get HTML for given url
def getHtml( self, url, useragent):
@@ -11,17 +12,15 @@
else:
req = urllib.request.Request(url, None)
- html = urllib.request.urlopen(req).read()
+ html = urllib.request.urlopen(req, timeout = 10).read()
return(html)
- except urllib.error.HTTPError as msg:
- return(msg)
- except:
+ except Exception as e:
if self.config["debug"] == "true":
- print("Fetching data faile for some reason")
+ print(e)
## End
## Check if the city exists in Finland
-def checkCity ( self, city ):
+def checkCity ( city ):
try:
line = ""
@@ -31,6 +30,10 @@
line = l.strip()
if city == line:
return(True)
- except IOError as msg:
- print(msg)
+ except IOError as e:
+ print(e)
## End
+
+def delHtml( html ):
+ html = re.sub('<[^<]+?>', '', html)
+ return(html) |
698a997b7eb9a5dd2e10e2e7129414ab0d6c59fe | numba/__init__.py | numba/__init__.py | import sys
import logging
logging.basicConfig(level=logging.DEBUG)
try:
from . import minivect
except ImportError:
print logging.error("Did you forget to update submodule minivect?")
print logging.error("Run 'git submodule init' followed by 'git submodule update'")
raise
import _numba_types
from ._numba_types import *
__all__ = _numba_types.__all__
| import sys
import logging
logging.basicConfig(level=logging.DEBUG,
format="\n\033[1m%(levelname)s -- %(module)s:%(lineno)d:%(funcName)s\033[0m\n%(message)s")
try:
from . import minivect
except ImportError:
print logging.error("Did you forget to update submodule minivect?")
print logging.error("Run 'git submodule init' followed by 'git submodule update'")
raise
import _numba_types
from ._numba_types import *
__all__ = _numba_types.__all__
| Make logger format easier to read | Make logger format easier to read | Python | bsd-2-clause | gdementen/numba,gmarkall/numba,shiquanwang/numba,jriehl/numba,jriehl/numba,seibert/numba,stefanseefeld/numba,pombredanne/numba,stuartarchibald/numba,sklam/numba,stefanseefeld/numba,numba/numba,numba/numba,gdementen/numba,stefanseefeld/numba,jriehl/numba,pombredanne/numba,stuartarchibald/numba,ssarangi/numba,pitrou/numba,seibert/numba,GaZ3ll3/numba,stonebig/numba,ssarangi/numba,GaZ3ll3/numba,IntelLabs/numba,gdementen/numba,pombredanne/numba,sklam/numba,cpcloud/numba,gmarkall/numba,pitrou/numba,shiquanwang/numba,cpcloud/numba,seibert/numba,pitrou/numba,GaZ3ll3/numba,GaZ3ll3/numba,stonebig/numba,pombredanne/numba,numba/numba,seibert/numba,stonebig/numba,IntelLabs/numba,ssarangi/numba,shiquanwang/numba,cpcloud/numba,sklam/numba,pitrou/numba,cpcloud/numba,IntelLabs/numba,sklam/numba,GaZ3ll3/numba,pitrou/numba,gmarkall/numba,stuartarchibald/numba,stefanseefeld/numba,pombredanne/numba,numba/numba,stonebig/numba,stuartarchibald/numba,IntelLabs/numba,IntelLabs/numba,stonebig/numba,numba/numba,stefanseefeld/numba,jriehl/numba,seibert/numba,cpcloud/numba,gmarkall/numba,gdementen/numba,ssarangi/numba,gmarkall/numba,stuartarchibald/numba,jriehl/numba,sklam/numba,gdementen/numba,ssarangi/numba | ---
+++
@@ -1,7 +1,8 @@
import sys
import logging
-logging.basicConfig(level=logging.DEBUG)
+logging.basicConfig(level=logging.DEBUG,
+ format="\n\033[1m%(levelname)s -- %(module)s:%(lineno)d:%(funcName)s\033[0m\n%(message)s")
try:
from . import minivect |
9a26460377db4f177014ab8583cec63302a56190 | jp2_online/settings/production.py | jp2_online/settings/production.py | # -*- coding: utf-8 -*-
from .base import *
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = False
ALLOWED_HOSTS = ['138.197.197.47']
CORS_ORIGIN_WHITELIST = ('138.197.197.47')
STATIC_ROOT = os.path.join(BASE_DIR, "../static/") | # -*- coding: utf-8 -*-
from .base import *
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = False
ALLOWED_HOSTS = ['138.197.197.47', 'junipero.erikiado.com']
CORS_ORIGIN_WHITELIST = ('138.197.197.47')
STATIC_ROOT = os.path.join(BASE_DIR, "../static/") | Add host for offline tests | Add host for offline tests
| Python | mit | erikiado/jp2_online,erikiado/jp2_online,erikiado/jp2_online | ---
+++
@@ -4,7 +4,7 @@
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = False
-ALLOWED_HOSTS = ['138.197.197.47']
+ALLOWED_HOSTS = ['138.197.197.47', 'junipero.erikiado.com']
CORS_ORIGIN_WHITELIST = ('138.197.197.47')
|
f3001e7e72f366fde962bbdd52f38a983d9f7026 | routes/__init__.py | routes/__init__.py | from routes.index import index
from routes.project_page import project
from routes.user_overview import user_overview
from routes.project_overview import group_overview, series_overview
from routes.login import login
def setup_routes(app):
"""
Sets up all the routes for the webapp.
:param app:
:return:
"""
app.router.add_get('/', index)
app.router.add_post('/login', login)
app.router.add_get('/user_overview', user_overview)
app.router.add_get('/projects/', group_overview)
app.router.add_get('/projects/legacy/{group_series}/{group_part}', group_overview)
app.router.add_get('/projects/legacy/{group_series}/', series_overview)
app.router.add_get('/users/{user_name}/{project_name}', project)
| from routes.index import index
from routes.project_page import project
from routes.user_overview import user_overview
from routes.project_overview import group_overview, series_overview
from routes.login import login
def setup_routes(app):
"""
Sets up all the routes for the webapp.
:param app:
:return:
"""
app.router.add_get('/', index)
app.router.add_post('/login', login)
app.router.add_get('/user_overview', user_overview)
app.router.add_get('/projects/', group_overview)
app.router.add_get('/projects/legacy/{group_series}/{group_part}', group_overview)
app.router.add_get('/projects/legacy/{group_series}/', series_overview)
app.router.add_get('/projects/{project_name}', project)
| Rename route '/users/{user_name}/{project_name}' to '/projects/{project_name}' | Rename route '/users/{user_name}/{project_name}' to '/projects/{project_name}'
| Python | agpl-3.0 | wtsi-hgi/CoGS-Webapp,wtsi-hgi/CoGS-Webapp,wtsi-hgi/CoGS-Webapp | ---
+++
@@ -18,4 +18,4 @@
app.router.add_get('/projects/', group_overview)
app.router.add_get('/projects/legacy/{group_series}/{group_part}', group_overview)
app.router.add_get('/projects/legacy/{group_series}/', series_overview)
- app.router.add_get('/users/{user_name}/{project_name}', project)
+ app.router.add_get('/projects/{project_name}', project) |
17623451e96030c4604f97560c045079577f5a6d | src/scriptworker/task_process.py | src/scriptworker/task_process.py | #!/usr/bin/env python
"""TaskProcess class to provide helper methods around worker task's process.
Attributes:
log (logging.Logger): the log object for this module
"""
import asyncio
import logging
import os
import signal
from asyncio.subprocess import Process
log = logging.getLogger(__name__)
class TaskProcess:
"""Wraps worker task's process."""
def __init__(self, process: Process):
"""Constructor.
Args:
process (Process): task process
"""
self.process = process
self.stopped_due_to_worker_shutdown = False
async def worker_shutdown_stop(self):
"""Invoke on worker shutdown to stop task process."""
self.stopped_due_to_worker_shutdown = True
await self.stop()
async def stop(self):
"""Stop the current task process.
Starts with SIGTERM, gives the process 1 second to terminate, then kills it
"""
# negate pid so that signals apply to process group
pgid = -self.process.pid
try:
os.kill(pgid, signal.SIGTERM)
await asyncio.sleep(1)
os.kill(pgid, signal.SIGKILL)
except (OSError, ProcessLookupError):
return
| #!/usr/bin/env python
"""TaskProcess class to provide helper methods around worker task's process.
Attributes:
log (logging.Logger): the log object for this module
"""
import asyncio
import logging
import os
import signal
from asyncio.subprocess import Process
log = logging.getLogger(__name__)
class TaskProcess:
"""Wraps worker task's process."""
def __init__(self, process: Process):
"""Constructor.
Args:
process (Process): task process
"""
self.process = process
self.stopped_due_to_worker_shutdown = False
async def worker_shutdown_stop(self) -> None:
"""Invoke on worker shutdown to stop task process."""
self.stopped_due_to_worker_shutdown = True
await self.stop()
async def stop(self):
"""Stop the current task process.
Starts with SIGTERM, gives the process 1 second to terminate, then kills it
"""
# negate pid so that signals apply to process group
pgid = -self.process.pid
try:
os.kill(pgid, signal.SIGTERM)
await asyncio.sleep(1)
os.kill(pgid, signal.SIGKILL)
except (OSError, ProcessLookupError):
return
| Add type annotation to worker_shutdown_stop, which was called in a typed context | Add type annotation to worker_shutdown_stop, which was called in a typed context
| Python | mpl-2.0 | mozilla-releng/scriptworker,mozilla-releng/scriptworker,escapewindow/scriptworker,escapewindow/scriptworker | ---
+++
@@ -27,7 +27,7 @@
self.process = process
self.stopped_due_to_worker_shutdown = False
- async def worker_shutdown_stop(self):
+ async def worker_shutdown_stop(self) -> None:
"""Invoke on worker shutdown to stop task process."""
self.stopped_due_to_worker_shutdown = True
await self.stop() |
bbf48a79539493fcade9b5cdb4b1c637b64961ee | tests/test_optimistic_strategy.py | tests/test_optimistic_strategy.py | from nose.tools import assert_true, assert_false
from imagekit.cachefiles import ImageCacheFile
from mock import Mock
from .utils import create_image
from django.core.files.storage import FileSystemStorage
from imagekit.cachefiles.backends import Simple as SimpleCFBackend
from imagekit.cachefiles.strategies import Optimistic as OptimisticStrategy
class ImageGenerator(object):
def generate(self):
return create_image()
def get_hash(self):
return 'abc123'
def get_image_cache_file():
storage = Mock(FileSystemStorage)
backend = SimpleCFBackend()
strategy = OptimisticStrategy()
generator = ImageGenerator()
return ImageCacheFile(generator, storage=storage,
cachefile_backend=backend,
cachefile_strategy=strategy)
def test_no_io_on_bool():
"""
When checking the truthiness of an ImageCacheFile, the storage shouldn't
peform IO operations.
"""
file = get_image_cache_file()
bool(file)
assert_false(file.storage.exists.called)
assert_false(file.storage.open.called)
| from nose.tools import assert_true, assert_false
from imagekit.cachefiles import ImageCacheFile
from mock import Mock
from .utils import create_image
from django.core.files.storage import FileSystemStorage
from imagekit.cachefiles.backends import Simple as SimpleCFBackend
from imagekit.cachefiles.strategies import Optimistic as OptimisticStrategy
class ImageGenerator(object):
def generate(self):
return create_image()
def get_hash(self):
return 'abc123'
def get_image_cache_file():
storage = Mock(FileSystemStorage)
backend = SimpleCFBackend()
strategy = OptimisticStrategy()
generator = ImageGenerator()
return ImageCacheFile(generator, storage=storage,
cachefile_backend=backend,
cachefile_strategy=strategy)
def test_no_io_on_bool():
"""
When checking the truthiness of an ImageCacheFile, the storage shouldn't
peform IO operations.
"""
file = get_image_cache_file()
bool(file)
assert_false(file.storage.exists.called)
assert_false(file.storage.open.called)
def test_no_io_on_url():
"""
When getting the URL of an ImageCacheFile, the storage shouldn't be
checked.
"""
file = get_image_cache_file()
file.url
assert_false(file.storage.exists.called)
assert_false(file.storage.open.called)
| Test that there isn't IO done when you get a URL | Test that there isn't IO done when you get a URL
| Python | bsd-3-clause | FundedByMe/django-imagekit,tawanda/django-imagekit,FundedByMe/django-imagekit,tawanda/django-imagekit | ---
+++
@@ -35,3 +35,15 @@
bool(file)
assert_false(file.storage.exists.called)
assert_false(file.storage.open.called)
+
+
+def test_no_io_on_url():
+ """
+ When getting the URL of an ImageCacheFile, the storage shouldn't be
+ checked.
+
+ """
+ file = get_image_cache_file()
+ file.url
+ assert_false(file.storage.exists.called)
+ assert_false(file.storage.open.called) |
ef8f869c5a254d2e3d84c3fa8829215da88681b4 | djangocms_export_objects/tests/docs.py | djangocms_export_objects/tests/docs.py | # -*- coding: utf-8 -*-
from __future__ import with_statement
import os
import socket
from sphinx.application import Sphinx
from six import StringIO
from .base import unittest
from .tmpdir import temp_dir
from unittest import skipIf
ROOT_DIR = os.path.dirname(__file__)
DOCS_DIR = os.path.abspath(os.path.join(ROOT_DIR, u'..', u'..', u'docs'))
def has_no_internet():
try:
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
s.connect(('4.4.4.2', 80))
s.send(b"hello")
except socket.error: # no internet
return True
return False
class DocsTestCase(unittest.TestCase):
"""
Test docs building correctly for HTML
"""
@skipIf(has_no_internet(), "No internet")
def test_html(self):
nullout = StringIO()
with temp_dir() as OUT_DIR:
app = Sphinx(
DOCS_DIR,
DOCS_DIR,
OUT_DIR,
OUT_DIR,
"html",
warningiserror=False,
status=nullout,
)
try:
app.build()
except:
print(nullout.getvalue())
raise
| # -*- coding: utf-8 -*-
from __future__ import with_statement
import os
import socket
from sphinx.application import Sphinx
from six import StringIO
from .base import unittest
from .tmpdir import temp_dir
ROOT_DIR = os.path.dirname(__file__)
DOCS_DIR = os.path.abspath(os.path.join(ROOT_DIR, u'..', u'..', u'docs'))
def has_no_internet():
try:
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
s.connect(('4.4.4.2', 80))
s.send(b"hello")
except socket.error: # no internet
return True
return False
class DocsTestCase(unittest.TestCase):
"""
Test docs building correctly for HTML
"""
@unittest.skipIf(has_no_internet(), "No internet")
def test_html(self):
nullout = StringIO()
with temp_dir() as OUT_DIR:
app = Sphinx(
DOCS_DIR,
DOCS_DIR,
OUT_DIR,
OUT_DIR,
"html",
warningiserror=False,
status=nullout,
)
try:
app.build()
except:
print(nullout.getvalue())
raise
| Fix build on python 2.6 | Fix build on python 2.6
| Python | bsd-3-clause | nephila/djangocms-export-objects,nephila/djangocms-export-objects | ---
+++
@@ -8,7 +8,6 @@
from .base import unittest
from .tmpdir import temp_dir
-from unittest import skipIf
ROOT_DIR = os.path.dirname(__file__)
DOCS_DIR = os.path.abspath(os.path.join(ROOT_DIR, u'..', u'..', u'docs'))
@@ -28,7 +27,7 @@
"""
Test docs building correctly for HTML
"""
- @skipIf(has_no_internet(), "No internet")
+ @unittest.skipIf(has_no_internet(), "No internet")
def test_html(self):
nullout = StringIO()
with temp_dir() as OUT_DIR: |
1a2b8b05f17a5974de997f98ed108f0a57dcd1b0 | aldryn_newsblog/__init__.py | aldryn_newsblog/__init__.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
__version__ = '0.6.0'
| # -*- coding: utf-8 -*-
from __future__ import unicode_literals
__version__ = '0.6.1'
| Bump version to 0.6.1 for PyPI | Bump version to 0.6.1 for PyPI
| Python | bsd-3-clause | czpython/aldryn-newsblog,czpython/aldryn-newsblog,czpython/aldryn-newsblog,czpython/aldryn-newsblog | ---
+++
@@ -2,4 +2,4 @@
from __future__ import unicode_literals
-__version__ = '0.6.0'
+__version__ = '0.6.1' |
34d94f771b61a73ee484fc576f8e5dd2d0b14a0f | softwareindex/handlers/coreapi.py | softwareindex/handlers/coreapi.py | import requests, json, urllib
SEARCH_URL = 'http://core.ac.uk:80/api-v2/articles/search/'
API_KEY = 'FILL THIS IN'
def getCOREMentions(identifier, **kwargs):
"""Return the number of mentions in CORE and a descriptor, as a tuple.
Needs an API key, which can be obtained here: http://core.ac.uk/api-keys/register"""
params = {
'apiKey': API_KEY,
'metadata': 'false',
'pageSize': 100,
'page': 1
}
params.update(kwargs)
response = requests.get(SEARCH_URL + urllib.quote_plus(identifier), params=params)
response.raise_for_status()
results = response.json()
return (len(results['data'] or []),
'mentions in Open Access articles (via http://core.ac.uk/)')
| import requests, json, urllib
SEARCH_URL = 'http://core.kmi.open.ac.uk/api/search/'
API_KEY = 'FILL THIS IN'
def getCOREMentions(identifier, **kwargs):
"""Return the number of mentions in CORE and a descriptor, as a tuple.
Needs an API key, which can be obtained here: http://core.ac.uk/api-keys/register"""
params = {
'api_key': API_KEY,
'format': 'json',
}
params.update(kwargs)
response = requests.get(SEARCH_URL + urllib.quote_plus(identifier), params=params)
response.raise_for_status()
results = response.json()
score = results['ListRecords'][0]['total_hits']
return score, 'mentions in Open Access articles (via http://core.ac.uk/)'
| Switch to using the v1 API to get total hits. | Switch to using the v1 API to get total hits.
| Python | bsd-3-clause | softwaresaved/softwareindex,softwaresaved/softwareindex | ---
+++
@@ -1,6 +1,6 @@
import requests, json, urllib
-SEARCH_URL = 'http://core.ac.uk:80/api-v2/articles/search/'
+SEARCH_URL = 'http://core.kmi.open.ac.uk/api/search/'
API_KEY = 'FILL THIS IN'
def getCOREMentions(identifier, **kwargs):
@@ -8,10 +8,8 @@
Needs an API key, which can be obtained here: http://core.ac.uk/api-keys/register"""
params = {
- 'apiKey': API_KEY,
- 'metadata': 'false',
- 'pageSize': 100,
- 'page': 1
+ 'api_key': API_KEY,
+ 'format': 'json',
}
params.update(kwargs)
@@ -19,6 +17,6 @@
response.raise_for_status()
results = response.json()
+ score = results['ListRecords'][0]['total_hits']
- return (len(results['data'] or []),
- 'mentions in Open Access articles (via http://core.ac.uk/)')
+ return score, 'mentions in Open Access articles (via http://core.ac.uk/)' |
4c04979de66cf5d0858ff00002ef40df196ccd05 | serfnode/build/handler/handler.py | serfnode/build/handler/handler.py | #!/usr/bin/env python
import os
from serf_master import SerfHandlerProxy
from base_handler import BaseHandler
try:
from my_handler import MyHandler
except ImportError:
print "Could not import user's handler."
print "Defaulting to dummy handler."
MyHandler = BaseHandler
if __name__ == '__main__':
handler = SerfHandlerProxy()
role = os.environ.get('ROLE') or 'no_role'
handler.register(role, MyHandler())
handler.run() | #!/usr/bin/env python
import os
from serf_master import SerfHandlerProxy
from base_handler import BaseHandler
try:
from my_handler import MyHandler
except ImportError:
MyHandler = BaseHandler
if __name__ == '__main__':
handler = SerfHandlerProxy()
role = os.environ.get('ROLE') or 'no_role'
handler.register(role, MyHandler())
handler.run() | Remove prints that interfere with json output | Remove prints that interfere with json output | Python | mit | waltermoreira/serfnode,waltermoreira/serfnode,waltermoreira/serfnode | ---
+++
@@ -6,8 +6,6 @@
try:
from my_handler import MyHandler
except ImportError:
- print "Could not import user's handler."
- print "Defaulting to dummy handler."
MyHandler = BaseHandler
|
ef4f85808c061f81f123fb91b52fd4c8eb3e32b6 | peel/api.py | peel/api.py | from tastypie.resources import ModelResource
from tastypie.authorization import Authorization
from peel.models import Article
class ArticleResource(ModelResource):
def dehydrate_tags(self, bundle):
# Needed to properly serialize tags into a valid JSON list of strings.
return bundle.obj.tags
def build_filters(self, filters=None):
"""
Add support for exclude filtering
See https://github.com/toastdriven/django-tastypie/issues/524
"""
if not filters:
return filters
applicable_filters = {}
# Separate out normal filters and the __ne operations.
# Normal filtering
filter_params = dict([(x, filters[x]) for x in filter(lambda x: not x.endswith('__ne'), filters)])
applicable_filters['filter'] = super(type(self), self).build_filters(filter_params)
# Exclude filtering
exclude_params = dict([(x[:-4], filters[x]) for x in filter(lambda x: x.endswith('__ne'), filters)])
applicable_filters['exclude'] = super(type(self), self).build_filters(exclude_params)
return applicable_filters
def apply_filters(self, request, applicable_filters):
"""
Add support for exclude filtering
See https://github.com/toastdriven/django-tastypie/issues/524
"""
objects = self.get_object_list(request)
# Distinguish between normal filters and exclude filters
f = applicable_filters.get('filter')
if f:
objects = objects.filter(**f)
e = applicable_filters.get('exclude')
if e:
for exclusion_filter, value in e.items():
objects = objects.exclude(**{exclusion_filter: value})
return objects
class Meta:
queryset = Article.objects.all()
ordering = ['created_at', 'updated_at']
filtering = {
'status': ('exact', 'ne'),
'created_at': ('lt', 'gt'),
'updated_at': ('lt', 'gt'),
}
authorization = Authorization()
always_return_data = True
| from tastypie.resources import ModelResource
from tastypie.authorization import Authorization
from peel.models import Article
class ArticleResource(ModelResource):
def dehydrate_tags(self, bundle):
# Needed to properly serialize tags into a valid JSON list of strings.
return bundle.obj.tags
class Meta:
queryset = Article.objects.all()
ordering = ['created_at', 'updated_at']
filtering = {
'status': ('exact', 'in'),
'created_at': ('lt', 'gt'),
'updated_at': ('lt', 'gt'),
}
authorization = Authorization()
always_return_data = True
| Remove exclude filtering support from API | Remove exclude filtering support from API
I'll just use 'in' instead...
Reverts fa099b12f8a4c4d4aa2eb2954c6c315f3a79ea84.
| Python | mit | imiric/peel,imiric/peel,imiric/peel | ---
+++
@@ -9,49 +9,11 @@
# Needed to properly serialize tags into a valid JSON list of strings.
return bundle.obj.tags
- def build_filters(self, filters=None):
- """
- Add support for exclude filtering
- See https://github.com/toastdriven/django-tastypie/issues/524
- """
- if not filters:
- return filters
-
- applicable_filters = {}
-
- # Separate out normal filters and the __ne operations.
- # Normal filtering
- filter_params = dict([(x, filters[x]) for x in filter(lambda x: not x.endswith('__ne'), filters)])
- applicable_filters['filter'] = super(type(self), self).build_filters(filter_params)
-
- # Exclude filtering
- exclude_params = dict([(x[:-4], filters[x]) for x in filter(lambda x: x.endswith('__ne'), filters)])
- applicable_filters['exclude'] = super(type(self), self).build_filters(exclude_params)
-
- return applicable_filters
-
- def apply_filters(self, request, applicable_filters):
- """
- Add support for exclude filtering
- See https://github.com/toastdriven/django-tastypie/issues/524
- """
- objects = self.get_object_list(request)
-
- # Distinguish between normal filters and exclude filters
- f = applicable_filters.get('filter')
- if f:
- objects = objects.filter(**f)
- e = applicable_filters.get('exclude')
- if e:
- for exclusion_filter, value in e.items():
- objects = objects.exclude(**{exclusion_filter: value})
- return objects
-
class Meta:
queryset = Article.objects.all()
ordering = ['created_at', 'updated_at']
filtering = {
- 'status': ('exact', 'ne'),
+ 'status': ('exact', 'in'),
'created_at': ('lt', 'gt'),
'updated_at': ('lt', 'gt'),
} |
be88549f5a2f95090018b2f44bdebb8b270f9997 | bash_kernel/bash_kernel.py | bash_kernel/bash_kernel.py | from __future__ import print_function
from jupyter_kernel import MagicKernel
class BashKernel(MagicKernel):
implementation = 'Bash'
implementation_version = '1.0'
language = 'bash'
language_version = '0.1'
banner = "Bash kernel - interact with a bash prompt"
def get_usage(self):
return "This is the bash kernel."
def do_execute_direct(self, code):
shell_magic = self.line_magics['shell']
resp, error = shell_magic.eval(code.strip())
if error:
self.Error(error)
if resp:
self.Print(resp)
def add_complete(self, matches, token):
shell_magic = self.line_magics['shell']
matches.extend(shell_magic.get_completions(token))
if __name__ == '__main__':
from IPython.kernel.zmq.kernelapp import IPKernelApp
IPKernelApp.launch_instance(kernel_class=BashKernel)
| from __future__ import print_function
from jupyter_kernel import MagicKernel
class BashKernel(MagicKernel):
implementation = 'Bash'
implementation_version = '1.0'
language = 'bash'
language_version = '0.1'
banner = "Bash kernel - interact with a bash prompt"
def get_usage(self):
return "This is the bash kernel."
def do_execute_direct(self, code):
shell_magic = self.line_magics['shell']
resp, error = shell_magic.eval(code.strip())
if error:
self.Error(error)
if resp:
self.Print(resp)
def get_completions(self, token):
shell_magic = self.line_magics['shell']
return shell_magic.get_completions(token)
def get_kernel_help_on(self, expr, level=0):
shell_magic = self.line_magics['shell']
return shell_magic.get_help_on(expr, level)
if __name__ == '__main__':
from IPython.kernel.zmq.kernelapp import IPKernelApp
IPKernelApp.launch_instance(kernel_class=BashKernel)
| Update bash kernel to use new shell api and shell help. | Update bash kernel to use new shell api and shell help.
| Python | bsd-3-clause | Calysto/metakernel | ---
+++
@@ -21,9 +21,13 @@
if resp:
self.Print(resp)
- def add_complete(self, matches, token):
+ def get_completions(self, token):
shell_magic = self.line_magics['shell']
- matches.extend(shell_magic.get_completions(token))
+ return shell_magic.get_completions(token)
+
+ def get_kernel_help_on(self, expr, level=0):
+ shell_magic = self.line_magics['shell']
+ return shell_magic.get_help_on(expr, level)
if __name__ == '__main__':
from IPython.kernel.zmq.kernelapp import IPKernelApp |
54d5a984aeecd9bad501ec484c173f2dc504dfa5 | dict.py | dict.py | #! /usr/bin/env python2
# -*- coding: utf-8 -*-
import os
import sys
import json
import urllib
import datetime
import subprocess
# api key, 1000 times per hour
APIKEY = 'WGCxN9fzvCxPo0nqlzGLCPUc'
PATH = '~/vocabulary' # make sure the path exist
FILENAME = os.path.join(os.path.expanduser(PATH), str(datetime.date.today()) + '.txt')
def main():
word = subprocess.check_output('xsel')
params = urllib.urlencode({'from': 'auto', 'to': 'auto', 'client_id':APIKEY, 'q': word})
f = urllib.urlopen("http://openapi.baidu.com/public/2.0/bmt/translate?%s", params)
j = json.loads(f.read())
d = dict(j['trans_result'][0])
subprocess.call(['notify-send', word, d['dst']])
with open(FILENAME, 'a+', 0) as f:
f.write(word + '\n')
if __name__ == '__main__':
main()
| #! /usr/bin/env python2
# -*- coding: utf-8 -*-
import os
import sys
import json
import urllib
import datetime
import subprocess
import random
import md5
# api key, six million per month
APPID = 'You baidu translate appid'
APIKEY = 'You baidu translate apikey'
PATH = '~/vocabulary' # make sure the path exist
FILENAME = os.path.join(os.path.expanduser(PATH), str(datetime.date.today()) + '.txt')
def main():
word = subprocess.check_output('xsel')
salt = random.randint(32768, 65536)
sign = APPID + word + str(salt) + APIKEY
m1 = md5.new()
m1.update(sign)
sign = m1.hexdigest()
params = urllib.urlencode({'q': word, 'from': 'auto', 'to': 'zh', 'appid':APPID, 'salt': salt, 'sign': sign})
f = urllib.urlopen("http://api.fanyi.baidu.com/api/trans/vip/translate?%s", params)
j = json.loads(f.read())
d = dict(j['trans_result'][0])
subprocess.call(['notify-send', word, d['dst']])
with open(FILENAME, 'a+', 0) as f:
f.write(word + '\n')
if __name__ == '__main__':
main()
| Migrate to new translate api | Migrate to new translate api
| Python | mit | pidofme/T4LE | ---
+++
@@ -7,16 +7,24 @@
import urllib
import datetime
import subprocess
+import random
+import md5
-# api key, 1000 times per hour
-APIKEY = 'WGCxN9fzvCxPo0nqlzGLCPUc'
+# api key, six million per month
+APPID = 'You baidu translate appid'
+APIKEY = 'You baidu translate apikey'
PATH = '~/vocabulary' # make sure the path exist
FILENAME = os.path.join(os.path.expanduser(PATH), str(datetime.date.today()) + '.txt')
def main():
word = subprocess.check_output('xsel')
- params = urllib.urlencode({'from': 'auto', 'to': 'auto', 'client_id':APIKEY, 'q': word})
- f = urllib.urlopen("http://openapi.baidu.com/public/2.0/bmt/translate?%s", params)
+ salt = random.randint(32768, 65536)
+ sign = APPID + word + str(salt) + APIKEY
+ m1 = md5.new()
+ m1.update(sign)
+ sign = m1.hexdigest()
+ params = urllib.urlencode({'q': word, 'from': 'auto', 'to': 'zh', 'appid':APPID, 'salt': salt, 'sign': sign})
+ f = urllib.urlopen("http://api.fanyi.baidu.com/api/trans/vip/translate?%s", params)
j = json.loads(f.read())
d = dict(j['trans_result'][0])
subprocess.call(['notify-send', word, d['dst']]) |
d2c552b8996ce1ef8a2d5ef64f6a2b60ce306cf3 | setmagic/models.py | setmagic/models.py | from django.db import models
class Setting(models.Model):
name = models.CharField(max_length=40, unique=True)
label = models.CharField(max_length=60)
help_text = models.TextField()
current_value = models.TextField(blank=True, null=True)
class Meta:
app_label = 'setmagic'
def __str__(self):
return u'{name} ({label})'.format(
name=self.name,
label=self.label,
)
| from django.db import models
from django.utils.encoding import python_2_unicode_compatible
@python_2_unicode_compatible
class Setting(models.Model):
name = models.CharField(max_length=40, unique=True)
label = models.CharField(max_length=60)
help_text = models.TextField()
current_value = models.TextField(blank=True, null=True)
class Meta:
app_label = 'setmagic'
def __str__(self):
return u'{name} ({label})'.format(
name=self.name,
label=self.label,
)
| Add model unicode support for Python 2 | Add model unicode support for Python 2
| Python | mit | 7ws/django-setmagic | ---
+++
@@ -1,6 +1,8 @@
from django.db import models
+from django.utils.encoding import python_2_unicode_compatible
+@python_2_unicode_compatible
class Setting(models.Model):
name = models.CharField(max_length=40, unique=True)
label = models.CharField(max_length=60) |
196162fe0782cb0e5934dd51f96b4f1d05a108ed | tools/bots/functional_testing.py | tools/bots/functional_testing.py | #!/usr/bin/python
# Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file
# for details. All rights reserved. Use of this source code is governed by a
# BSD-style license that can be found in the LICENSE file.
"""
Buildbot steps for functional testing master and slaves
"""
if __name__ == '__main__':
print "Functional testing placeholder"
| #!/usr/bin/python
# Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file
# for details. All rights reserved. Use of this source code is governed by a
# BSD-style license that can be found in the LICENSE file.
"""
Buildbot steps for functional testing master and slaves
"""
import os
import re
import shutil
import sys
import bot
import bot_utils
FT_BUILDER = r'ft-slave-(linux|mac)'
EDITOR_LOCATION='/home/chrome-bot/Desktop'
def SrcConfig(name, is_buildbot):
"""Returns info for the current buildbot based on the name of the builder.
- mode: always "release"
- system: always "linux" or "mac"
"""
pattern = re.match(FT_BUILDER, name)
if not pattern:
return None
return bot.BuildInfo('none', 'none', 'release', pattern.group(1))
def Run(args):
print "Running: %s" % ' '.join(args)
sys.stdout.flush()
bot.RunProcess(args)
def FTSteps(config):
revision = int(os.environ['BUILDBOT_GOT_REVISION'])
bot_name, _ = bot.GetBotName()
print bot_name
channel = bot_utils.GetChannelFromName(bot_name)
namer = bot_utils.GCSNamer(channel=channel)
system = config.system
if system == 'mac':
system = 'macos'
editor_path = namer.editor_zipfilepath(revision, system, 'x64')
gsutils = bot_utils.GSUtil()
local_path = os.path.join(EDITOR_LOCATION, 'editor.zip')
if os.path.exists(local_path):
os.remove(local_path)
local_extracted = os.path.join(EDITOR_LOCATION, 'dart')
shutil.rmtree(local_extracted, ignore_errors=True)
gsutils.execute(['cp', editor_path, local_path])
Run(['unzip', local_path, '-d', EDITOR_LOCATION])
if __name__ == '__main__':
bot.RunBot(SrcConfig, FTSteps)
| Add build steps to functional testing annotated steps script | Add build steps to functional testing annotated steps script
R=messick@google.com
Review URL: https://codereview.chromium.org//312503005
git-svn-id: c93d8a2297af3b929165606efe145742a534bc71@36878 260f80e4-7a28-3924-810f-c04153c831b5
| Python | bsd-3-clause | dart-lang/sdk,dartino/dart-sdk,dartino/dart-sdk,dartino/dart-sdk,dart-archive/dart-sdk,dartino/dart-sdk,dartino/dart-sdk,dart-archive/dart-sdk,dartino/dart-sdk,dartino/dart-sdk,dart-lang/sdk,dartino/dart-sdk,dart-archive/dart-sdk,dart-lang/sdk,dart-archive/dart-sdk,dart-lang/sdk,dart-lang/sdk,dart-archive/dart-sdk,dartino/dart-sdk,dart-lang/sdk,dart-archive/dart-sdk,dart-archive/dart-sdk,dart-lang/sdk,dart-archive/dart-sdk,dart-archive/dart-sdk,dart-lang/sdk | ---
+++
@@ -8,5 +8,52 @@
Buildbot steps for functional testing master and slaves
"""
+import os
+import re
+import shutil
+import sys
+
+import bot
+import bot_utils
+
+FT_BUILDER = r'ft-slave-(linux|mac)'
+
+EDITOR_LOCATION='/home/chrome-bot/Desktop'
+
+def SrcConfig(name, is_buildbot):
+ """Returns info for the current buildbot based on the name of the builder.
+
+ - mode: always "release"
+ - system: always "linux" or "mac"
+ """
+ pattern = re.match(FT_BUILDER, name)
+ if not pattern:
+ return None
+ return bot.BuildInfo('none', 'none', 'release', pattern.group(1))
+
+def Run(args):
+ print "Running: %s" % ' '.join(args)
+ sys.stdout.flush()
+ bot.RunProcess(args)
+
+def FTSteps(config):
+ revision = int(os.environ['BUILDBOT_GOT_REVISION'])
+ bot_name, _ = bot.GetBotName()
+ print bot_name
+ channel = bot_utils.GetChannelFromName(bot_name)
+ namer = bot_utils.GCSNamer(channel=channel)
+ system = config.system
+ if system == 'mac':
+ system = 'macos'
+ editor_path = namer.editor_zipfilepath(revision, system, 'x64')
+ gsutils = bot_utils.GSUtil()
+ local_path = os.path.join(EDITOR_LOCATION, 'editor.zip')
+ if os.path.exists(local_path):
+ os.remove(local_path)
+ local_extracted = os.path.join(EDITOR_LOCATION, 'dart')
+ shutil.rmtree(local_extracted, ignore_errors=True)
+ gsutils.execute(['cp', editor_path, local_path])
+ Run(['unzip', local_path, '-d', EDITOR_LOCATION])
+
if __name__ == '__main__':
- print "Functional testing placeholder"
+ bot.RunBot(SrcConfig, FTSteps) |
7e080edea2139c5cce907f4d752320943b044ac7 | game.py | game.py |
people = '123456'
room = 'abcdef'
# murder configuration
# who was where
# who is the murderer
# current configuration
# who was where
# player location
| import random
people = '123456'
room = 'abcdef'
# murder configuration
# who was where
# who is the murderer
# current configuration
# who was where
# player location
murder_config_people = list(people)
random.shuffle(murder_config_people)
murder_location = random.choice(room)
murderer = people[room.find(murder_location)]
current_config_people = list(people)
random.shuffle(current_config_people)
current_location = random.choice(room)
print( current_config_people)
print( current_location)
| Add random people and rooms | Add random people and rooms
| Python | mit | tomviner/dojo-adventure-game | ---
+++
@@ -1,3 +1,4 @@
+import random
people = '123456'
room = 'abcdef'
@@ -10,3 +11,15 @@
# who was where
# player location
+murder_config_people = list(people)
+random.shuffle(murder_config_people)
+murder_location = random.choice(room)
+murderer = people[room.find(murder_location)]
+
+
+current_config_people = list(people)
+random.shuffle(current_config_people)
+current_location = random.choice(room)
+
+print( current_config_people)
+print( current_location) |
582c0e432db918237d1dcbcc4034983408766b4f | thinc/layers/featureextractor.py | thinc/layers/featureextractor.py | from typing import List, Union, Callable, Tuple
from ..types import Ints2d, Doc
from ..model import Model
from ..config import registry
InT = List[Doc]
OutT = List[Ints2d]
@registry.layers("FeatureExtractor.v1")
def FeatureExtractor(columns: List[Union[int, str]]) -> Model[InT, OutT]:
return Model("extract_features", forward, attrs={"columns": columns})
def forward(model: Model[InT, OutT], docs, is_train: bool) -> Tuple[OutT, Callable]:
columns = model.attrs["columns"]
features: OutT = []
for doc in docs:
if hasattr(doc, "to_array"):
attrs = doc.to_array(columns)
else:
attrs = doc.doc.to_array(columns)[doc.start : doc.end]
features.append(model.ops.asarray2i(attrs, dtype="uint64"))
backprop: Callable[[OutT], List] = lambda d_features: []
return features, backprop
| from typing import List, Union, Callable, Tuple
from ..types import Ints2d, Doc
from ..model import Model
from ..config import registry
InT = List[Doc]
OutT = List[Ints2d]
@registry.layers("FeatureExtractor.v1")
def FeatureExtractor(columns: List[Union[int, str]]) -> Model[InT, OutT]:
return Model("extract_features", forward, attrs={"columns": columns})
def forward(model: Model[InT, OutT], docs, is_train: bool) -> Tuple[OutT, Callable]:
columns = model.attrs["columns"]
features: OutT = []
for doc in docs:
if hasattr(doc, "to_array"):
attrs = doc.to_array(columns)
else:
attrs = doc.doc.to_array(columns)[doc.start : doc.end]
if attrs.ndim == 1:
attrs = attrs.reshape((attrs.shape[0], 1))
features.append(model.ops.asarray2i(attrs, dtype="uint64"))
backprop: Callable[[OutT], List] = lambda d_features: []
return features, backprop
| Make sure FeatureExtractor returns array2i | Make sure FeatureExtractor returns array2i
| Python | mit | spacy-io/thinc,explosion/thinc,spacy-io/thinc,explosion/thinc,explosion/thinc,spacy-io/thinc,explosion/thinc | ---
+++
@@ -22,6 +22,8 @@
attrs = doc.to_array(columns)
else:
attrs = doc.doc.to_array(columns)[doc.start : doc.end]
+ if attrs.ndim == 1:
+ attrs = attrs.reshape((attrs.shape[0], 1))
features.append(model.ops.asarray2i(attrs, dtype="uint64"))
backprop: Callable[[OutT], List] = lambda d_features: [] |
e9ace6b47d2b9a86988903d3572dc2b8074bf78a | home.py | home.py | #!/usr/bin/env python
import os
from flask import Flask
app = Flask(__name__)
@app.route('/')
def hello():
return open('index.html').read()
port = os.getenv('VCAP_APP_PORT', '5000')
if __name__ == "__main__":
app.run(host='0.0.0.0', port=int(port))
| #!/usr/bin/env python
import os
from flask import Flask
app = Flask(__name__)
@app.route('/')
def hello():
return open('index.html').read()
port = os.getenv('PORT', '5000')
if __name__ == "__main__":
app.run(host='0.0.0.0', port=int(port))
| Update environment variable name for port to listen on from VCAP_APP_PORT (deprecated) -> PORT | Update environment variable name for port to listen on from VCAP_APP_PORT (deprecated) -> PORT
| Python | apache-2.0 | CenturyLinkCloud/af-python-jumpstart,CenturyLinkCloud/af-python-jumpstart | ---
+++
@@ -9,7 +9,7 @@
def hello():
return open('index.html').read()
-port = os.getenv('VCAP_APP_PORT', '5000')
+port = os.getenv('PORT', '5000')
if __name__ == "__main__":
app.run(host='0.0.0.0', port=int(port)) |
09ae901f6def59a2d44aa994cb545afb559f9eb1 | dodo_commands/system_commands/activate.py | dodo_commands/system_commands/activate.py | # noqa
from dodo_commands.system_commands import DodoCommand
from dodo_commands.dodo_activate import Activator
class Command(DodoCommand): # noqa
help = ""
decorators = []
def add_arguments_imp(self, parser): # noqa
parser.add_argument('project', nargs='?')
group = parser.add_mutually_exclusive_group()
group.add_argument('--latest', action="store_true")
group.add_argument('--create', action="store_true")
def handle_imp(self, project, latest, create, **kwargs): # noqa
Activator().run(project, latest, create)
| # noqa
from dodo_commands.system_commands import DodoCommand, CommandError
from dodo_commands.dodo_activate import Activator
class Command(DodoCommand): # noqa
help = ""
decorators = []
def add_arguments_imp(self, parser): # noqa
parser.add_argument('project', nargs='?')
group = parser.add_mutually_exclusive_group()
group.add_argument('--latest', action="store_true")
group.add_argument('--create', action="store_true")
def handle_imp(self, project, latest, create, **kwargs): # noqa
if not project and not latest:
raise CommandError("No project was specified")
Activator().run(project, latest, create)
| Fix crash when no project is specified | Fix crash when no project is specified
| Python | mit | mnieber/dodo_commands | ---
+++
@@ -1,5 +1,5 @@
# noqa
-from dodo_commands.system_commands import DodoCommand
+from dodo_commands.system_commands import DodoCommand, CommandError
from dodo_commands.dodo_activate import Activator
@@ -15,4 +15,6 @@
group.add_argument('--create', action="store_true")
def handle_imp(self, project, latest, create, **kwargs): # noqa
+ if not project and not latest:
+ raise CommandError("No project was specified")
Activator().run(project, latest, create) |
a10c866db352e19acf8ade1d3e7cedc9a68ce06f | server/settings.py | server/settings.py | import os
DOMAIN = {
"texts": {
"schema": {
"name": {
"type": "string",
"required": True,
"unique": True,
},
"fulltext": {
"type": "string",
"required": True,
},
}
}
}
RESOURCE_METHODS = ["GET", "POST"]
ITEM_METHODS = ["GET"]
DEBUG = os.environ.get("EVE_DEBUG", False)
APP_SECRET_KEY = os.environ["APP_SECRET_KEY"]
OAUTH_CLIENT_ID = (os.environ["OAUTH_CLIENT_ID"])
OAUTH_CLIENT_SECRET = os.environ["OAUTH_CLIENT_SECRET"]
OAUTH_REDIRECT_URI = os.environ["OAUTH_REDIRECT_URI"]
OAUTH_AUTH_URI = 'https://accounts.google.com/o/oauth2/auth'
OAUTH_TOKEN_URI = 'https://accounts.google.com/o/oauth2/token'
OAUTH_USER_INFO = 'https://www.googleapis.com/userinfo/v2/me'
OAUTH_SCOPE = 'email'
| import os
DOMAIN = {
"texts": {
"schema": {
"name": {
"type": "string",
"required": True,
"unique": True,
},
"fulltext": {
"type": "string",
"required": True,
},
}
}
}
RESOURCE_METHODS = ["GET", "POST"]
ITEM_METHODS = ["GET"]
DEBUG = os.environ.get("EVE_DEBUG", False)
if DEBUG:
os.environ["OAUTHLIB_INSECURE_TRANSPORT"] = "1"
APP_SECRET_KEY = os.environ["APP_SECRET_KEY"]
OAUTH_CLIENT_ID = (os.environ["OAUTH_CLIENT_ID"])
OAUTH_CLIENT_SECRET = os.environ["OAUTH_CLIENT_SECRET"]
OAUTH_REDIRECT_URI = os.environ["OAUTH_REDIRECT_URI"]
OAUTH_AUTH_URI = 'https://accounts.google.com/o/oauth2/auth'
OAUTH_TOKEN_URI = 'https://accounts.google.com/o/oauth2/token'
OAUTH_USER_INFO = 'https://www.googleapis.com/userinfo/v2/me'
OAUTH_SCOPE = 'email'
| Allow insecure oauth transport in development. | Allow insecure oauth transport in development.
| Python | mit | mattiaslundberg/typer,mattiaslundberg/typer,mattiaslundberg/typer,mattiaslundberg/typer | ---
+++
@@ -21,6 +21,9 @@
DEBUG = os.environ.get("EVE_DEBUG", False)
+if DEBUG:
+ os.environ["OAUTHLIB_INSECURE_TRANSPORT"] = "1"
+
APP_SECRET_KEY = os.environ["APP_SECRET_KEY"]
OAUTH_CLIENT_ID = (os.environ["OAUTH_CLIENT_ID"])
OAUTH_CLIENT_SECRET = os.environ["OAUTH_CLIENT_SECRET"] |
01a86c09b768f6cc4e5bf9b389d09512f9e56ceb | sample_agent.py | sample_agent.py | import numpy as np
import matplotlib.pyplot as plt
class Agent(object):
def __init__(self, dim_action):
self.dim_action = dim_action
def act(self, ob, reward, done, vision):
#print("ACT!")
# Get an Observation from the environment.
# Each observation vectors are numpy array.
# focus, opponents, track sensors are scaled into [0, 1]. When the agent
# is out of the road, sensor variables return -1/200.
# rpm, wheelSpinVel are raw values and then needed to be preprocessed.
# vision is given as a tensor with size of (3, 64, 64) <-- rgb
# and values are in [0, 255]
if vision is False:
focus, speedX, speedY, speedZ, opponents, rpm, track, wheelSpinVel = ob
else:
focus, speedX, speedY, speedZ, opponents, rpm, track, wheelSpinVel, vision = ob
""" The code below is for checking the vision input. This is very heavy for real-time Control
So you may need to remove.
"""
"""
img = np.ndarray((64,64,3))
for i in range(3):
img[:, :, i] = 255 - vision[i]
plt.imshow(img, origin='lower')
plt.draw()
plt.pause(0.001)
"""
return np.tanh(np.random.randn(self.dim_action)) # random action
| import numpy as np
import matplotlib.pyplot as plt
class Agent(object):
def __init__(self, dim_action):
self.dim_action = dim_action
def act(self, ob, reward, done, vision_on):
#print("ACT!")
# Get an Observation from the environment.
# Each observation vectors are numpy array.
# focus, opponents, track sensors are scaled into [0, 1]. When the agent
# is out of the road, sensor variables return -1/200.
# rpm, wheelSpinVel are raw values and then needed to be preprocessed.
# vision is given as a tensor with size of (64*64, 3) = (4096, 3) <-- rgb
# and values are in [0, 255]
if vision_on is False:
focus, speedX, speedY, speedZ, opponents, rpm, track, wheelSpinVel = ob
else:
focus, speedX, speedY, speedZ, opponents, rpm, track, wheelSpinVel, vision = ob
""" The code below is for checking the vision input. This is very heavy for real-time Control
So you may need to remove.
"""
print(vision.shape)
"""
img = np.ndarray((64,64,3))
for i in range(3):
img[:, :, i] = 255 - vision[:, i].reshape((64, 64))
plt.imshow(img, origin='lower')
plt.draw()
plt.pause(0.001)
"""
return np.tanh(np.random.randn(self.dim_action)) # random action
| Update to follow the new observation format (follow the vision input of OpenAI ATARI environment) | Update to follow the new observation format
(follow the vision input of OpenAI ATARI environment)
| Python | mit | travistang/late_fyt,travistang/late_fyt,ugo-nama-kun/gym_torcs,travistang/late_fyt,ugo-nama-kun/gym_torcs,ugo-nama-kun/gym_torcs,travistang/late_fyt,travistang/late_fyt,ugo-nama-kun/gym_torcs,ugo-nama-kun/gym_torcs,travistang/late_fyt,ugo-nama-kun/gym_torcs,travistang/late_fyt,ugo-nama-kun/gym_torcs | ---
+++
@@ -5,7 +5,7 @@
def __init__(self, dim_action):
self.dim_action = dim_action
- def act(self, ob, reward, done, vision):
+ def act(self, ob, reward, done, vision_on):
#print("ACT!")
# Get an Observation from the environment.
@@ -13,9 +13,9 @@
# focus, opponents, track sensors are scaled into [0, 1]. When the agent
# is out of the road, sensor variables return -1/200.
# rpm, wheelSpinVel are raw values and then needed to be preprocessed.
- # vision is given as a tensor with size of (3, 64, 64) <-- rgb
+ # vision is given as a tensor with size of (64*64, 3) = (4096, 3) <-- rgb
# and values are in [0, 255]
- if vision is False:
+ if vision_on is False:
focus, speedX, speedY, speedZ, opponents, rpm, track, wheelSpinVel = ob
else:
focus, speedX, speedY, speedZ, opponents, rpm, track, wheelSpinVel, vision = ob
@@ -23,10 +23,11 @@
""" The code below is for checking the vision input. This is very heavy for real-time Control
So you may need to remove.
"""
+ print(vision.shape)
"""
img = np.ndarray((64,64,3))
for i in range(3):
- img[:, :, i] = 255 - vision[i]
+ img[:, :, i] = 255 - vision[:, i].reshape((64, 64))
plt.imshow(img, origin='lower')
plt.draw() |
d5697d6176dd9a3d54abc13d38f94f1a326eac84 | dog/core/botcollection.py | dog/core/botcollection.py | import discord
def user_to_bot_ratio(guild: discord.Guild):
""" Calculates the user to bot ratio for a guild. """
bots = len(list(filter(lambda u: u.bot, guild.members)))
users = len(list(filter(lambda u: not u.bot, guild.members)))
ratio = bots / users
return ratio
async def is_blacklisted(bot, guild_id: int) -> bool:
""" Returns a bool indicating whether a guild has been blacklisted. """
async with bot.pgpool.acquire() as conn:
blacklisted_record = await conn.fetchrow('SELECT * FROM blacklisted_guilds WHERE guild_id = $1', guild_id)
return blacklisted_record is not None
async def is_bot_collection(bot, guild: discord.Guild):
""" Returns a bool indicating whether a guild is a collection. """
if await is_blacklisted(bot, guild.id):
return True
# keywords in the guild name
if any([keyword in guild.name.lower() for keyword in ('bot collection', 'bot hell')]):
return True
# special guilds that shouldn't be classified as a bot collection
if guild.id in (110373943822540800, 228317351672545290):
return False
# ratio too big!
if user_to_bot_ratio(guild) >= 8:
return True
return False
| import discord
def user_to_bot_ratio(guild: discord.Guild):
bots, users = 0, 0
for member in guild.bots:
if member.bot:
bots += 1
else:
users += 1
return bots / users
async def is_blacklisted(bot, guild_id: int) -> bool:
""" Returns a bool indicating whether a guild has been blacklisted. """
async with bot.pgpool.acquire() as conn:
blacklisted_record = await conn.fetchrow('SELECT * FROM blacklisted_guilds WHERE guild_id = $1', guild_id)
return blacklisted_record is not None
async def is_bot_collection(bot, guild: discord.Guild):
""" Returns a bool indicating whether a guild is a collection. """
if await is_blacklisted(bot, guild.id):
return True
# keywords in the guild name
if any([keyword in guild.name.lower() for keyword in ('bot collection', 'bot hell')]):
return True
# special guilds that shouldn't be classified as a bot collection
if guild.id in (110373943822540800, 228317351672545290):
return False
# ratio too big!
if user_to_bot_ratio(guild) >= 8:
return True
return False
| Use Fuyu's VeryCool™ UTBR impl | Use Fuyu's VeryCool™ UTBR impl
| Python | mit | slice/dogbot,sliceofcode/dogbot,slice/dogbot,sliceofcode/dogbot,slice/dogbot | ---
+++
@@ -2,12 +2,15 @@
def user_to_bot_ratio(guild: discord.Guild):
- """ Calculates the user to bot ratio for a guild. """
- bots = len(list(filter(lambda u: u.bot, guild.members)))
- users = len(list(filter(lambda u: not u.bot, guild.members)))
+ bots, users = 0, 0
+ for member in guild.bots:
+ if member.bot:
+ bots += 1
+ else:
+ users += 1
- ratio = bots / users
- return ratio
+ return bots / users
+
async def is_blacklisted(bot, guild_id: int) -> bool:
""" Returns a bool indicating whether a guild has been blacklisted. """ |
c1b797b74098fd6f7ea480f7f1bf496d5f52bdc7 | signac/__init__.py | signac/__init__.py | # Copyright (c) 2016 The Regents of the University of Michigan
# All rights reserved.
# This software is licensed under the BSD 3-Clause License.
"""The signac framework aids in the management of large and
heterogeneous data spaces.
It provides a simple and robust data model to create a
well-defined indexable storage layout for data and metadata.
This makes it easier to operate on large data spaces,
streamlines post-processing and analysis and makes data
collectively accessible."""
from __future__ import absolute_import
from . import common
from . import contrib
from . import db
from . import gui
from .common import errors
from .contrib import Project
from .contrib import get_project
from .contrib import init_project
from .contrib import fetch
from .contrib import export_one
from .contrib import export
from .contrib import export_to_mirror
from .contrib import export_pymongo
from .contrib import fetch_one # deprecated
from .contrib import filesystems as fs
from .db import get_database
__version__ = '0.5.0'
__all__ = ['__version__', 'common', 'contrib', 'db', 'gui',
'errors',
'Project', 'get_project', 'init_project',
'get_database', 'fetch', 'fetch_one',
'export_one', 'export', 'export_to_mirror',
'export_pymongo', 'fs'
]
| # Copyright (c) 2016 The Regents of the University of Michigan
# All rights reserved.
# This software is licensed under the BSD 3-Clause License.
"""The signac framework aids in the management of large and
heterogeneous data spaces.
It provides a simple and robust data model to create a
well-defined indexable storage layout for data and metadata.
This makes it easier to operate on large data spaces,
streamlines post-processing and analysis and makes data
collectively accessible."""
from __future__ import absolute_import
from . import contrib
from . import db
from . import gui
from .contrib import Project
from .contrib import get_project
from .contrib import init_project
from .contrib import fetch
from .contrib import export_one
from .contrib import export
from .contrib import export_to_mirror
from .contrib import export_pymongo
from .contrib import fetch_one # deprecated
from .contrib import filesystems as fs
from .db import get_database
__version__ = '0.5.0'
__all__ = ['__version__', 'contrib', 'db', 'gui',
'Project', 'get_project', 'init_project',
'get_database', 'fetch', 'fetch_one',
'export_one', 'export', 'export_to_mirror',
'export_pymongo', 'fs'
]
| Remove common and errors from root namespace. | Remove common and errors from root namespace.
| Python | bsd-3-clause | csadorf/signac,csadorf/signac | ---
+++
@@ -11,11 +11,9 @@
collectively accessible."""
from __future__ import absolute_import
-from . import common
from . import contrib
from . import db
from . import gui
-from .common import errors
from .contrib import Project
from .contrib import get_project
from .contrib import init_project
@@ -30,8 +28,7 @@
__version__ = '0.5.0'
-__all__ = ['__version__', 'common', 'contrib', 'db', 'gui',
- 'errors',
+__all__ = ['__version__', 'contrib', 'db', 'gui',
'Project', 'get_project', 'init_project',
'get_database', 'fetch', 'fetch_one',
'export_one', 'export', 'export_to_mirror', |
e99c230f2bf7bdc010552c03ca657adddebaf818 | chessfellows/chess/urls.py | chessfellows/chess/urls.py | from django.conf.urls import patterns, url
from django.contrib import admin
from chess import views
admin.autodiscover()
urlpatterns = patterns('',
url(r'^accounts/home/', views.home_page, name='home'),
url(r'^accounts/history/$', views.history_page, name='history'),
url(r'^accounts/profile/$', views.profile_page, name='profile'),
)
| from django.conf.urls import patterns, url
from django.contrib import admin
from chess import views
admin.autodiscover()
urlpatterns = patterns('',
url(r'^accounts/home/', views.home_page, name='home'),
url(r'^accounts/history/$', views.history_page, name='history'),
url(r'^accounts/profile/$', views.profile_page, name='profile'),
url(r'^$', views.base, name='base'),
)
| Add url for landing page (/) that links to the base view | Add url for landing page (/) that links to the base view
| Python | mit | EyuelAbebe/gamer,EyuelAbebe/gamer | ---
+++
@@ -8,7 +8,7 @@
url(r'^accounts/home/', views.home_page, name='home'),
url(r'^accounts/history/$', views.history_page, name='history'),
url(r'^accounts/profile/$', views.profile_page, name='profile'),
-
+ url(r'^$', views.base, name='base'),
)
|
bdcef725ae19e8601d1969b61dba53133f5861c0 | grr/server/grr_response_server/server_startup_test.py | grr/server/grr_response_server/server_startup_test.py | #!/usr/bin/env python
from absl.testing import absltest
from grr_response_server import cronjobs
from grr_response_server import server_startup
from grr_response_server.rdfvalues import cronjobs as rdf_cronjobs
from grr.test_lib import test_lib
from grr.test_lib import testing_startup
class CronJobRegistryTest(test_lib.GRRBaseTest):
@classmethod
def setUpClass(cls):
super(CronJobRegistryTest, cls).setUpClass()
testing_startup.TestInit()
with test_lib.ConfigOverrider({"Server.initialized": True}):
server_startup.Init()
# TODO: Remove once metaclass registry madness is resolved.
def testCronJobRegistryInstantiation(self):
for job_cls in cronjobs.CronJobRegistry.CRON_REGISTRY.values():
job = rdf_cronjobs.CronJob(cron_job_id="foobar")
job_run = rdf_cronjobs.CronJobRun(cron_job_id="foobar", status="RUNNING")
job_cls(job_run, job) # Should not fail.
if __name__ == "__main__":
absltest.main()
| #!/usr/bin/env python
from absl.testing import absltest
from grr_response_server import cronjobs
from grr_response_server.rdfvalues import cronjobs as rdf_cronjobs
from grr.test_lib import test_lib
from grr.test_lib import testing_startup
class CronJobRegistryTest(test_lib.GRRBaseTest):
@classmethod
def setUpClass(cls):
super(CronJobRegistryTest, cls).setUpClass()
testing_startup.TestInit()
# TODO: Remove once metaclass registry madness is resolved.
def testCronJobRegistryInstantiation(self):
# We import the `server_startup` module to ensure that all cron jobs classes
# that are really used on the server are imported and populate the registry.
# pylint: disable=unused-variable, g-import-not-at-top
from grr_response_server import server_startup
# pylint: enable=unused-variable, g-import-not-at-top
for job_cls in cronjobs.CronJobRegistry.CRON_REGISTRY.values():
job = rdf_cronjobs.CronJob(cron_job_id="foobar")
job_run = rdf_cronjobs.CronJobRun(cron_job_id="foobar", status="RUNNING")
job_cls(job_run, job) # Should not fail.
if __name__ == "__main__":
absltest.main()
| Fix state leak causing flakiness. | Fix state leak causing flakiness.
| Python | apache-2.0 | google/grr,google/grr,google/grr,google/grr,google/grr,google/grr,google/grr | ---
+++
@@ -2,7 +2,6 @@
from absl.testing import absltest
from grr_response_server import cronjobs
-from grr_response_server import server_startup
from grr_response_server.rdfvalues import cronjobs as rdf_cronjobs
from grr.test_lib import test_lib
from grr.test_lib import testing_startup
@@ -13,13 +12,16 @@
@classmethod
def setUpClass(cls):
super(CronJobRegistryTest, cls).setUpClass()
-
testing_startup.TestInit()
- with test_lib.ConfigOverrider({"Server.initialized": True}):
- server_startup.Init()
# TODO: Remove once metaclass registry madness is resolved.
def testCronJobRegistryInstantiation(self):
+ # We import the `server_startup` module to ensure that all cron jobs classes
+ # that are really used on the server are imported and populate the registry.
+ # pylint: disable=unused-variable, g-import-not-at-top
+ from grr_response_server import server_startup
+ # pylint: enable=unused-variable, g-import-not-at-top
+
for job_cls in cronjobs.CronJobRegistry.CRON_REGISTRY.values():
job = rdf_cronjobs.CronJob(cron_job_id="foobar")
job_run = rdf_cronjobs.CronJobRun(cron_job_id="foobar", status="RUNNING") |
e61385b03663b4b56c929b3ebdb0e0505b8fb2ff | client/raspi_rest_client.py | client/raspi_rest_client.py | #!/bin/python3
""" This script contains functions for the REST client.
Author: Julien Delplanque
"""
import http.client
import json
def get_pacman_pkgs_to_update(ip: str, username: str, passwd: str):
""" Get the list of packages from the REST server hosted by
the raspberry pi.
Keyword arguments:
ip - the ip of the raspberry pi
username - your username
passwd - your password
"""
conn = http.client.HTTPConnection(ip+":5000")
conn.request("GET", "/pkgtoupdate")
response = conn.getresponse()
j = json.loads(response.read().decode("utf-8"))
return j.get("pacman")
def get_sensors_data(ip: str, username: str, passwd: str):
""" Get the list of sensors data from the REST server hosted by
the raspberry pi.
Keyword arguments:
ip - the ip of the raspberry pi
username - your username
passwd - your password
"""
conn = http.client.HTTPConnection(ip+":5000")
conn.request("GET", "/sensors")
response = conn.getresponse()
return json.loads(response.read().decode("utf-8"))
| #!/bin/python3
""" This script contains functions for the REST client.
Author: Julien Delplanque
"""
import http.client
import json
def get_pacman_pkgs_to_update(ip: str, username: str=None, passwd: str=None):
""" Get the list of packages from the REST server hosted by
the raspberry pi.
TODO implement login.
Keyword arguments:
ip - the ip of the raspberry pi
username - your username
passwd - your password
"""
conn = http.client.HTTPConnection(ip+":5000")
conn.request("GET", "/pkgtoupdate")
response = conn.getresponse()
j = json.loads(response.read().decode("utf-8"))
return j.get("pacman")
def get_sensors_data(ip: str, username: str=None, passwd: str=None):
""" Get the list of sensors data from the REST server hosted by
the raspberry pi.
TODO implement login.
Keyword arguments:
ip - the ip of the raspberry pi
username - your username
passwd - your password
"""
conn = http.client.HTTPConnection(ip+":5000")
conn.request("GET", "/sensors")
response = conn.getresponse()
return json.loads(response.read().decode("utf-8"))
| Add default values for username and passwd and add a TODO. | Add default values for username and passwd and add a TODO.
| Python | mit | juliendelplanque/raspirestmonitor | ---
+++
@@ -6,9 +6,11 @@
import http.client
import json
-def get_pacman_pkgs_to_update(ip: str, username: str, passwd: str):
+def get_pacman_pkgs_to_update(ip: str, username: str=None, passwd: str=None):
""" Get the list of packages from the REST server hosted by
the raspberry pi.
+
+ TODO implement login.
Keyword arguments:
ip - the ip of the raspberry pi
@@ -21,10 +23,12 @@
j = json.loads(response.read().decode("utf-8"))
return j.get("pacman")
-def get_sensors_data(ip: str, username: str, passwd: str):
+def get_sensors_data(ip: str, username: str=None, passwd: str=None):
""" Get the list of sensors data from the REST server hosted by
the raspberry pi.
+ TODO implement login.
+
Keyword arguments:
ip - the ip of the raspberry pi
username - your username |
a43634b3c9ec4d47d8ec032e34a197210a6dddb7 | gesture_recognition/gesture_recognizer.py | gesture_recognition/gesture_recognizer.py | """
Main script to execute the gesture recognition software.
"""
# Import native python libraries
import inspect
import os
import sys
from listener import MyListener
from face_detection import face_detector_gui
import time
# Setup environment variables
src_dir = os.path.dirname(inspect.getfile(inspect.currentframe()))
# Windows and Linux
arch_dir = '../LeapSDK/lib/x64' if sys.maxsize > 2**32 else '../LeapSDK/lib/x86'
# Mac
# arch_dir = os.path.abspath(os.path.join(src_dir, '../LeapSDK/lib')
sys.path.insert(0, os.path.abspath(os.path.join(src_dir, arch_dir)))
sys.path.insert(0, "../LeapSDK/lib")
# Import LeapSDK
import Leap
def run_step():
face = face_detector_gui.FDetector()
face.run()
print face.face_detected
if face.face_detected:
# Create a sample listener and controller
listener = MyListener()
controller = Leap.Controller()
controller.set_policy_flags(Leap.Controller.POLICY_IMAGES)
# Have the sample listener receive events from the controller
controller.add_listener(listener)
t_end = time.time() + 20
while time.time() < t_end:
pass
return
if __name__ == "__main__":
while True:
run_step()
| """
Main script to execute the gesture recognition software.
"""
# Import native python libraries
import inspect
import os
import sys
from listener import MyListener
from face_detection import face_detector_gui
import time
# Setup environment variables
src_dir = os.path.dirname(inspect.getfile(inspect.currentframe()))
# Windows and Linux
arch_dir = '../LeapSDK/lib/x64' if sys.maxsize > 2**32 else '../LeapSDK/lib/x86'
# Mac
# arch_dir = os.path.abspath(os.path.join(src_dir, '../LeapSDK/lib')
sys.path.insert(0, os.path.abspath(os.path.join(src_dir, arch_dir)))
sys.path.insert(0, "../LeapSDK/lib")
# Import LeapSDK
import Leap
def run_step():
face = face_detector_gui.FDetector()
face.run()
print('Is face detected? ', face.face_detected)
if face.face_detected:
print("Enabled Gesture Recognition")
# Create a sample listener and controller
listener = MyListener()
controller = Leap.Controller()
controller.set_policy_flags(Leap.Controller.POLICY_IMAGES)
# Have the sample listener receive events from the controller
controller.add_listener(listener)
t_end = time.time() + 20
while time.time() < t_end:
pass
print('Disabled Gesture Recognition')
return
if __name__ == "__main__":
while True:
run_step()
| Add some prints to improve verbosity | Add some prints to improve verbosity
| Python | mit | oscarorti/pae-gesture-recognition,oscarorti/pae-gesture-recognition,oscarorti/pae-gesture-recognition,oscarorti/pae-gesture-recognition,oscarorti/pae-gesture-recognition,oscarorti/pae-gesture-recognition,oscarorti/pae-gesture-recognition | ---
+++
@@ -28,8 +28,9 @@
face = face_detector_gui.FDetector()
face.run()
- print face.face_detected
+ print('Is face detected? ', face.face_detected)
if face.face_detected:
+ print("Enabled Gesture Recognition")
# Create a sample listener and controller
listener = MyListener()
controller = Leap.Controller()
@@ -40,6 +41,7 @@
t_end = time.time() + 20
while time.time() < t_end:
pass
+ print('Disabled Gesture Recognition')
return
|
b98dcfbff114b26475c327492e8fcd8fff17c902 | alg_prim_minimum_spanning_tree.py | alg_prim_minimum_spanning_tree.py | from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
def prim():
"""Prim's Minimum Spanning Tree in weighted graph."""
pass
def main():
pass
if __name__ == '__main__':
main()
| from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from ds_min_priority_queue_tuple import MinPriorityQueue
def prim():
"""Prim's algorithm for minimum spanning tree in weighted graph.
Time complexity for graph G(V, E): (|V|+|E|)log(|V|).
"""
pass
def main():
w_graph_d = {
'a': {'b': 1, 'd': 4, 'e': 3},
'b': {'a': 1, 'd': 4, 'e': 2},
'c': {'e': 4, 'f': 5},
'd': {'a': 4, 'b': 4, 'e': 4},
'e': {'a': 3, 'b': 2, 'c': 4, 'd': 4, 'f': 7},
'f': {'c': 5, 'e': 7}
}
start_vertex = 'a'
print('w_graph_d:\n{}'.format(w_graph_d))
print('Prim minimum spanning tree from {}:'.format(start_vertex))
pass
if __name__ == '__main__':
main()
| Add weighted undirected graph in main() | Add weighted undirected graph in main()
| Python | bsd-2-clause | bowen0701/algorithms_data_structures | ---
+++
@@ -2,13 +2,32 @@
from __future__ import division
from __future__ import print_function
+import numpy as np
+
+from ds_min_priority_queue_tuple import MinPriorityQueue
+
+
def prim():
- """Prim's Minimum Spanning Tree in weighted graph."""
+ """Prim's algorithm for minimum spanning tree in weighted graph.
+
+ Time complexity for graph G(V, E): (|V|+|E|)log(|V|).
+ """
pass
def main():
- pass
+ w_graph_d = {
+ 'a': {'b': 1, 'd': 4, 'e': 3},
+ 'b': {'a': 1, 'd': 4, 'e': 2},
+ 'c': {'e': 4, 'f': 5},
+ 'd': {'a': 4, 'b': 4, 'e': 4},
+ 'e': {'a': 3, 'b': 2, 'c': 4, 'd': 4, 'f': 7},
+ 'f': {'c': 5, 'e': 7}
+ }
+ start_vertex = 'a'
+ print('w_graph_d:\n{}'.format(w_graph_d))
+ print('Prim minimum spanning tree from {}:'.format(start_vertex))
+ pass
if __name__ == '__main__': |
236ceba56d78733af5fa6b77907298bf2a07de58 | fabfile/dbengine.py | fabfile/dbengine.py | ###################################################################
#
# Copyright (c) 2013 Miing.org <samuel.miing@gmail.com>
#
# This software is licensed under the GNU Affero General Public
# License version 3 (AGPLv3), as published by the Free Software
# Foundation, and may be copied, distributed, and modified under
# those terms.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# file LICENSE for more details.
#
###################################################################
from .postgresql import (
pgsql_createuser,
pgsql_dropdb,
pgsql_createdb,
pgsql_dropuser,
)
from .django import syncdb grantuser
def setup_pgsql_database():
"""Setup PostgreSQL database"""
pgsql_createuser()
pgsql_createdb()
syncdb()
grantuser()
def drop_pgsql_database():
"""Clean PostgreSQL database"""
pgsql_dropdb()
pgsql_dropuser()
| ###################################################################
#
# Copyright (c) 2013 Miing.org <samuel.miing@gmail.com>
#
# This software is licensed under the GNU Affero General Public
# License version 3 (AGPLv3), as published by the Free Software
# Foundation, and may be copied, distributed, and modified under
# those terms.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# file LICENSE for more details.
#
###################################################################
from .postgresql import (
pgsql_createuser,
pgsql_dropdb,
pgsql_createdb,
pgsql_dropuser,
)
from .django import syncdb, grantuser
def setup_pgsql_database():
"""Setup PostgreSQL database"""
pgsql_createuser()
pgsql_createdb()
syncdb()
grantuser()
def drop_pgsql_database():
"""Clean PostgreSQL database"""
pgsql_dropdb()
pgsql_dropuser()
| Fix 'cant import from .django' | Fix 'cant import from .django'
| Python | agpl-3.0 | miing/mci_migo,miing/mci_migo,miing/mci_migo | ---
+++
@@ -21,7 +21,7 @@
pgsql_createdb,
pgsql_dropuser,
)
-from .django import syncdb grantuser
+from .django import syncdb, grantuser
def setup_pgsql_database(): |
44d6af63406e2f825c44238fd5bde0c49dde0620 | nexus/conf.py | nexus/conf.py | from django.conf import settings
MEDIA_PREFIX = getattr(settings, 'NEXUS_MEDIA_PREFIX', '/nexus/media/')
| from django.conf import settings
MEDIA_PREFIX = getattr(settings, 'NEXUS_MEDIA_PREFIX', '/nexus/media/')
if getattr(settings, 'NEXUS_USE_DJANGO_MEDIA_URL', False):
MEDIA_PREFIX = getattr(settings, 'MEDIA_URL', MEDIA_PREFIX)
| Add a setting NEXUS_USE_DJANGO_MEDIA_URL to easily use Django's MEDIA_URL for the nexus MEDIA_PREFIX. | Add a setting NEXUS_USE_DJANGO_MEDIA_URL to easily use Django's MEDIA_URL for the nexus MEDIA_PREFIX.
If you want to make custom modifications to the nexus media it makes sense to have it under your own app's media folder and the NEXUS_USE_DJANGO_MEDIA_URL allows the MEDIA_URL to be DRY. This repetition would be a hassle if you have multiple settings files with their own MEDIA_URLs and having to then repeat the NEXUS_MEDIA_PREFIX in each settings file.
| Python | apache-2.0 | Raekkeri/nexus,graingert/nexus,graingert/nexus,disqus/nexus,YPlan/nexus,disqus/nexus,graingert/nexus,YPlan/nexus,brilliant-org/nexus,YPlan/nexus,roverdotcom/nexus,roverdotcom/nexus,disqus/nexus,brilliant-org/nexus,Raekkeri/nexus,blueprinthealth/nexus,roverdotcom/nexus,blueprinthealth/nexus,blueprinthealth/nexus,brilliant-org/nexus | ---
+++
@@ -1,3 +1,6 @@
from django.conf import settings
MEDIA_PREFIX = getattr(settings, 'NEXUS_MEDIA_PREFIX', '/nexus/media/')
+
+if getattr(settings, 'NEXUS_USE_DJANGO_MEDIA_URL', False):
+ MEDIA_PREFIX = getattr(settings, 'MEDIA_URL', MEDIA_PREFIX) |
caefc529d55f2b036f1b39688d8c27b3bd019d69 | cybox/core/event.py | cybox/core/event.py | # Copyright (c) 2013, The MITRE Corporation. All rights reserved.
# See LICENSE.txt for complete terms.
import cybox
import cybox.bindings.cybox_core as core_binding
from cybox.common import VocabString, StructuredText, MeasureSource
from cybox.core import Actions, Frequency
class EventType(VocabString):
_XSI_TYPE = 'cyboxVocabs:EventTypeVocab-1.0.1'
class Event(cybox.Entity):
_binding = core_binding
_binding_class = core_binding.EventType
_namespace = 'http://cybox.mitre.org/cybox-2'
id_ = cybox.TypedField("id")
idref = cybox.TypedField("idref")
type_ = cybox.TypedField("Type", EventType)
description = cybox.TypedField("Description", StructuredText)
observation_method = cybox.TypedField("Observation_Method", MeasureSource)
actions = cybox.TypedField("Actions", Actions)
frequency = cybox.TypedField("Frequency", Frequency)
events = cybox.TypedField("Event", multiple=True)
# Allow recursive definition of events
Event.events.type_ = Event
| # Copyright (c) 2013, The MITRE Corporation. All rights reserved.
# See LICENSE.txt for complete terms.
import cybox
import cybox.bindings.cybox_core as core_binding
from cybox.common import VocabString, StructuredText, MeasureSource
from cybox.core import Actions, Frequency
class EventType(VocabString):
_XSI_TYPE = 'cyboxVocabs:EventTypeVocab-1.0.1'
class Event(cybox.Entity):
_binding = core_binding
_binding_class = core_binding.EventType
_namespace = 'http://cybox.mitre.org/cybox-2'
id_ = cybox.TypedField("id")
idref = cybox.TypedField("idref")
type_ = cybox.TypedField("Type", EventType)
description = cybox.TypedField("Description", StructuredText)
observation_method = cybox.TypedField("Observation_Method", MeasureSource)
actions = cybox.TypedField("Actions", Actions)
frequency = cybox.TypedField("Frequency", Frequency)
event = cybox.TypedField("Event", multiple=True)
# Allow recursive definition of events
Event.event.type_ = Event
| Fix typo in property name | Fix typo in property name
| Python | bsd-3-clause | CybOXProject/python-cybox | ---
+++
@@ -25,7 +25,7 @@
actions = cybox.TypedField("Actions", Actions)
frequency = cybox.TypedField("Frequency", Frequency)
- events = cybox.TypedField("Event", multiple=True)
+ event = cybox.TypedField("Event", multiple=True)
# Allow recursive definition of events
-Event.events.type_ = Event
+Event.event.type_ = Event |
ad2f413700c2cdf1a50562fb7d2e26e066778ff5 | image_cropping/thumbnail_processors.py | image_cropping/thumbnail_processors.py | import logging
logger = logging.getLogger(__name__)
def crop_corners(image, box=None, **kwargs):
"""
Crop corners to the selection defined by image_cropping
"""
if box and box[0] != '-':
try:
values = [int(x) for x in box.split(',')]
if sum(values) < 0:
return image
width = abs(values[2] - values[0])
height = abs(values[3] - values[1])
if width and height and (width != image.size[0] or height != image.size[1]):
image = image.crop(values)
except (ValueError, IndexError):
# There's garbage in the cropping field, ignore
logger.warning('Unable to parse "box" parameter value "%s". Ignoring.' % box)
return image
| import logging
logger = logging.getLogger(__name__)
def crop_corners(image, box=None, **kwargs):
"""
Crop corners to the selection defined by image_cropping
`box` is a string of the format 'x1,y1,x2,y1' or a four-tuple of integers.
"""
if isinstance(box, basestring):
if box.startswith('-'):
pass # TBC: what does this indicate? No-op value?
else:
try:
box = map(int, box.split(','))
except (ValueError, IndexError):
# There's garbage in the cropping field, ignore
logger.warning(
'Unable to parse "box" parameter "%s". Ignoring.' % box)
if isinstance(box, (list, tuple)):
if len(box) == 4:
if sum(box) < 0:
pass # TODO: add explanatory comment for this please
else:
width = abs(box[2] - box[0])
height = abs(box[3] - box[1])
if width and height and (width, height) != image.size:
image = image.crop(box)
else:
logger.warning(
'"box" parameter requires four values. Ignoring "%r".' % (box,)
)
return image
| Improve thumbnail processor a little | Improve thumbnail processor a little
| Python | bsd-3-clause | henriquechehad/django-image-cropping,henriquechehad/django-image-cropping,henriquechehad/django-image-cropping,winzard/django-image-cropping,winzard/django-image-cropping,winzard/django-image-cropping | ---
+++
@@ -6,20 +6,33 @@
def crop_corners(image, box=None, **kwargs):
"""
Crop corners to the selection defined by image_cropping
+
+ `box` is a string of the format 'x1,y1,x2,y1' or a four-tuple of integers.
"""
+ if isinstance(box, basestring):
+ if box.startswith('-'):
+ pass # TBC: what does this indicate? No-op value?
+ else:
+ try:
+ box = map(int, box.split(','))
+ except (ValueError, IndexError):
+ # There's garbage in the cropping field, ignore
+ logger.warning(
+ 'Unable to parse "box" parameter "%s". Ignoring.' % box)
- if box and box[0] != '-':
- try:
- values = [int(x) for x in box.split(',')]
- if sum(values) < 0:
- return image
- width = abs(values[2] - values[0])
- height = abs(values[3] - values[1])
- if width and height and (width != image.size[0] or height != image.size[1]):
- image = image.crop(values)
- except (ValueError, IndexError):
- # There's garbage in the cropping field, ignore
- logger.warning('Unable to parse "box" parameter value "%s". Ignoring.' % box)
+ if isinstance(box, (list, tuple)):
+ if len(box) == 4:
+ if sum(box) < 0:
+ pass # TODO: add explanatory comment for this please
+ else:
+ width = abs(box[2] - box[0])
+ height = abs(box[3] - box[1])
+ if width and height and (width, height) != image.size:
+ image = image.crop(box)
+ else:
+ logger.warning(
+ '"box" parameter requires four values. Ignoring "%r".' % (box,)
+ )
return image
|
c5f9b9bc76f797156b73a2bb26b80ebf23d62fe4 | polyaxon/pipelines/celery_task.py | polyaxon/pipelines/celery_task.py | from pipelines.models import Operation
from polyaxon.celery_api import CeleryTask
class OperationTask(CeleryTask):
"""Base operation celery task with basic logging."""
_operation = None
def run(self, *args, **kwargs):
self._operation = Operation.objects.get(id=kwargs['query_id'])
super(OperationTask, self).run(*args, **kwargs)
def on_failure(self, exc, task_id, args, kwargs, einfo):
"""Update query status and send email notification to a user"""
super(OperationTask, self).on_failure(exc, task_id, args, kwargs, einfo)
self._operation.on_failure()
def on_retry(self, exc, task_id, args, kwargs, einfo):
super(OperationTask, self).on_retry(exc, task_id, args, kwargs, einfo)
self._operation.on_retry()
def on_success(self, retval, task_id, args, kwargs):
"""Send email notification and a file, if requested to do so by a user"""
super(OperationTask, self).on_success(retval, task_id, args, kwargs)
self._operation.on_success()
| from pipelines.models import Operation
from polyaxon.celery_api import CeleryTask
class OperationTask(CeleryTask):
"""Base operation celery task with basic logging."""
_operation = None
def __call__(self, *args, **kwargs):
self._operation = Operation.objects.get(id=kwargs['query_id'])
self._operation.on_run()
self.max_retries = self._operation.max_retries
self.countdown = self._operation.get_countdown(self.request.retries)
super(OperationTask, self).__call__(*args, **kwargs)
def on_failure(self, exc, task_id, args, kwargs, einfo):
"""Update query status and send email notification to a user"""
super(OperationTask, self).on_failure(exc, task_id, args, kwargs, einfo)
self._operation.on_failure()
def on_retry(self, exc, task_id, args, kwargs, einfo):
super(OperationTask, self).on_retry(exc, task_id, args, kwargs, einfo)
self._operation.on_retry()
def on_success(self, retval, task_id, args, kwargs):
"""Send email notification and a file, if requested to do so by a user"""
super(OperationTask, self).on_success(retval, task_id, args, kwargs)
self._operation.on_success()
| Update OperationCelery with max_retries and countdown logic | Update OperationCelery with max_retries and countdown logic
| Python | apache-2.0 | polyaxon/polyaxon,polyaxon/polyaxon,polyaxon/polyaxon | ---
+++
@@ -6,9 +6,13 @@
"""Base operation celery task with basic logging."""
_operation = None
- def run(self, *args, **kwargs):
+ def __call__(self, *args, **kwargs):
self._operation = Operation.objects.get(id=kwargs['query_id'])
- super(OperationTask, self).run(*args, **kwargs)
+ self._operation.on_run()
+ self.max_retries = self._operation.max_retries
+ self.countdown = self._operation.get_countdown(self.request.retries)
+
+ super(OperationTask, self).__call__(*args, **kwargs)
def on_failure(self, exc, task_id, args, kwargs, einfo):
"""Update query status and send email notification to a user""" |
0476093e01784c86138794261ca2e49a9b2e0cb5 | armstrong/core/arm_wells/admin.py | armstrong/core/arm_wells/admin.py | from django.conf import settings
from django.contrib import admin
from django.contrib.contenttypes import generic
from reversion.admin import VersionAdmin
from . import models
class NodeAdmin(VersionAdmin):
pass
class NodeInline(admin.TabularInline):
model = models.Node
extra = 1
# This is for Grappelli
sortable_field_name = "order"
related_lookup_fields = {
'generic': ['content_type', 'object_id', ]
}
class WellAdmin(VersionAdmin):
list_display = ('type', 'pub_date', 'expires', 'active', )
inlines = [
NodeInline,
]
class WellTypeAdmin(VersionAdmin):
pass
admin.site.register(models.Node, NodeAdmin)
admin.site.register(models.Well, WellAdmin)
admin.site.register(models.WellType, WellTypeAdmin)
| from django.conf import settings
from django.contrib import admin
from django.contrib.contenttypes import generic
from reversion.admin import VersionAdmin
from armstrong.hatband.options import GenericKeyInline
from . import models
class NodeAdmin(VersionAdmin):
pass
class NodeInline(GenericKeyInline):
model = models.Node
extra = 1
# This is for Grappelli
sortable_field_name = "order"
related_lookup_fields = {
'generic': ['content_type', 'object_id', ]
}
class WellAdmin(VersionAdmin):
list_display = ('type', 'pub_date', 'expires', 'active', )
inlines = [
NodeInline,
]
class WellTypeAdmin(VersionAdmin):
pass
admin.site.register(models.Node, NodeAdmin)
admin.site.register(models.Well, WellAdmin)
admin.site.register(models.WellType, WellTypeAdmin)
| Switch out to use the new GenericKeyInline | Switch out to use the new GenericKeyInline
| Python | apache-2.0 | dmclain/armstrong.core.arm_wells,armstrong/armstrong.core.arm_wells,armstrong/armstrong.core.arm_wells,dmclain/armstrong.core.arm_wells,texastribune/armstrong.core.arm_wells,armstrong/armstrong.core.arm_wells,texastribune/armstrong.core.arm_wells | ---
+++
@@ -2,6 +2,8 @@
from django.contrib import admin
from django.contrib.contenttypes import generic
from reversion.admin import VersionAdmin
+
+from armstrong.hatband.options import GenericKeyInline
from . import models
@@ -10,7 +12,7 @@
pass
-class NodeInline(admin.TabularInline):
+class NodeInline(GenericKeyInline):
model = models.Node
extra = 1
|
6fa8603d0abc69539c2c4f8d1205f2ebb47fc017 | tests/core/tools/test_runner/test_yaml_runner.py | tests/core/tools/test_runner/test_yaml_runner.py | from openfisca_core.tools.test_runner import _run_test, _get_tax_benefit_system
from openfisca_core.errors import VariableNotFound
import pytest
class TaxBenefitSystem:
def __init__(self):
self.variables = {}
def get_package_metadata(self):
return {"name": "Test", "version": "Test"}
def apply_reform(self, path):
return Reform(self)
class Reform(TaxBenefitSystem):
def __init__(self, baseline):
self.baseline = baseline
class Simulation:
def __init__(self):
self.tax_benefit_system = TaxBenefitSystem()
self.entities = {}
def get_entity(self, plural = None):
return None
def test_variable_not_found():
test = {"output": {"unknown_variable": 0}}
with pytest.raises(VariableNotFound) as excinfo:
_run_test(Simulation(), test)
assert excinfo.value.variable_name == "unknown_variable"
class reform_ab(Reform):
def apply(self):
self.key = self.__class__.__name__
class reform_ba(Reform):
def apply(self):
self.key = self.__class__.__name__
def test_tax_benefit_systems_with_reform_cache():
baseline = TaxBenefitSystem()
ab_tax_benefit_system = _get_tax_benefit_system(baseline, 'ab', [])
ba_tax_benefit_system = _get_tax_benefit_system(baseline, 'ba', [])
assert ab_tax_benefit_system != ba_tax_benefit_system
| from openfisca_core.tools.test_runner import _run_test, _get_tax_benefit_system
from openfisca_core.errors import VariableNotFound
import pytest
class TaxBenefitSystem:
def __init__(self):
self.variables = {}
def get_package_metadata(self):
return {"name": "Test", "version": "Test"}
def apply_reform(self, path):
return Reform(self)
class Reform(TaxBenefitSystem):
def __init__(self, baseline):
self.baseline = baseline
class Simulation:
def __init__(self):
self.tax_benefit_system = TaxBenefitSystem()
self.entities = {}
def get_entity(self, plural = None):
return None
def test_variable_not_found():
test = {"output": {"unknown_variable": 0}}
with pytest.raises(VariableNotFound) as excinfo:
_run_test(Simulation(), test)
assert excinfo.value.variable_name == "unknown_variable"
def test_tax_benefit_systems_with_reform_cache():
baseline = TaxBenefitSystem()
ab_tax_benefit_system = _get_tax_benefit_system(baseline, 'ab', [])
ba_tax_benefit_system = _get_tax_benefit_system(baseline, 'ba', [])
assert ab_tax_benefit_system != ba_tax_benefit_system
def test_yaml_one_reform():
pass
def test_yaml_reforms_list():
# Test order
pass
| Remove unused code in test | Remove unused code in test
| Python | agpl-3.0 | openfisca/openfisca-core,openfisca/openfisca-core | ---
+++
@@ -37,19 +37,18 @@
assert excinfo.value.variable_name == "unknown_variable"
-class reform_ab(Reform):
- def apply(self):
- self.key = self.__class__.__name__
-
-
-class reform_ba(Reform):
- def apply(self):
- self.key = self.__class__.__name__
-
-
def test_tax_benefit_systems_with_reform_cache():
baseline = TaxBenefitSystem()
ab_tax_benefit_system = _get_tax_benefit_system(baseline, 'ab', [])
ba_tax_benefit_system = _get_tax_benefit_system(baseline, 'ba', [])
assert ab_tax_benefit_system != ba_tax_benefit_system
+
+
+def test_yaml_one_reform():
+ pass
+
+
+def test_yaml_reforms_list():
+ # Test order
+ pass |
8fa346532068aadf510ebcc1ef795527f7b68597 | frigg_worker/api.py | frigg_worker/api.py | # -*- coding: utf-8 -*-
import logging
import socket
import requests
logger = logging.getLogger(__name__)
class APIWrapper(object):
def __init__(self, options):
self.token = options['hq_token']
self.url = options['hq_url']
@property
def headers(self):
return {
'content-type': 'application/json',
'FRIGG_WORKER_TOKEN': self.token,
'x-frigg-worker-host': socket.getfqdn()
}
def get(self, url):
return requests.post(url, headers=self.headers)
def post(self, url, data):
return requests.post(url, data=data, headers=self.headers)
def report_run(self, endpoint, build_id, build):
response = self.post(self.url, data=build)
logger.info('Reported build to hq, hq response status-code: {0}, data:\n{1}'.format(
response.status_code,
build
))
if response.status_code != 200:
logger.error('Report of build failed, response status-code: {0}, data:\n{1}'.format(
response.status_code,
build
))
with open('build-{0}-hq-response.html'.format(build_id), 'w') as f:
f.write(response.text)
return response
| # -*- coding: utf-8 -*-
import logging
import socket
import requests
logger = logging.getLogger(__name__)
class APIWrapper(object):
def __init__(self, options):
self.token = options['hq_token']
self.url = options['hq_url']
@property
def headers(self):
return {
'content-type': 'application/json',
'FRIGG_WORKER_TOKEN': self.token,
'x-frigg-worker-token': self.token,
'x-frigg-worker-host': socket.getfqdn()
}
def get(self, url):
return requests.post(url, headers=self.headers)
def post(self, url, data):
return requests.post(url, data=data, headers=self.headers)
def report_run(self, endpoint, build_id, build):
response = self.post(self.url, data=build)
logger.info('Reported build to hq, hq response status-code: {0}, data:\n{1}'.format(
response.status_code,
build
))
if response.status_code != 200:
logger.error('Report of build failed, response status-code: {0}, data:\n{1}'.format(
response.status_code,
build
))
with open('build-{0}-hq-response.html'.format(build_id), 'w') as f:
f.write(response.text)
return response
| Add x-frigg-worker-token header to hq requests | fix: Add x-frigg-worker-token header to hq requests
This will in time be to remove the FRIGG_WORKER_TOKEN header.
| Python | mit | frigg/frigg-worker | ---
+++
@@ -18,6 +18,7 @@
return {
'content-type': 'application/json',
'FRIGG_WORKER_TOKEN': self.token,
+ 'x-frigg-worker-token': self.token,
'x-frigg-worker-host': socket.getfqdn()
}
|
d5d8a20922e44bb4fb6b905729d13771c6eab592 | babybuddy/settings/development.py | babybuddy/settings/development.py | from .base import *
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.11/howto/deployment/checklist/
SECRET_KEY = 'CHANGE ME'
DEBUG = True
# Database
# https://docs.djangoproject.com/en/1.11/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.11/howto/static-files/
STATICFILES_STORAGE = 'django.contrib.staticfiles.storage.StaticFilesStorage'
# Django Rest Framework
# http://www.django-rest-framework.org/#
REST_FRAMEWORK['DEFAULT_RENDERER_CLASSES'] = (
'rest_framework.renderers.JSONRenderer',
'rest_framework.renderers.BrowsableAPIRenderer',
)
| from .base import *
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.11/howto/deployment/checklist/
SECRET_KEY = 'CHANGE ME'
DEBUG = True
# Database
# https://docs.djangoproject.com/en/1.11/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.11/howto/static-files/
#
# Comment out STATICFILES_STORAGE and uncomment DEBUG = False to test with
# production static files.
# DEBUG = False
STATICFILES_STORAGE = 'django.contrib.staticfiles.storage.StaticFilesStorage'
# Django Rest Framework
# http://www.django-rest-framework.org/#
REST_FRAMEWORK['DEFAULT_RENDERER_CLASSES'] = (
'rest_framework.renderers.JSONRenderer',
'rest_framework.renderers.BrowsableAPIRenderer',
)
| Add note to dev settings re: testing production assets. | Add note to dev settings re: testing production assets.
| Python | bsd-2-clause | cdubz/babybuddy,cdubz/babybuddy,cdubz/babybuddy | ---
+++
@@ -20,7 +20,11 @@
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.11/howto/static-files/
+#
+# Comment out STATICFILES_STORAGE and uncomment DEBUG = False to test with
+# production static files.
+# DEBUG = False
STATICFILES_STORAGE = 'django.contrib.staticfiles.storage.StaticFilesStorage'
|
fce3dd3b08f2ff8500be4d694e9d384bd61b82ab | quickly/families/models.py | quickly/families/models.py | from django.db import models
from quickly.buttons.models import EmergencyButtonClient
class FamilyMember(models.Model):
"""
Model which defines families of the platform with authentication
possibilities and a phone number which can be sent to
emergency services.
"""
phone_number = models.CharField(max_length=15)
email = models.EmailField()
emergency_button_client = models.ForeignKey(EmergencyButtonClient)
| from django.db import models
from quickly.buttons.models import EmergencyButtonClient
class FamilyMember(models.Model):
"""
Model which defines families of the platform with authentication
possibilities and a phone number which can be sent to
emergency services.
"""
phone_number = models.CharField(max_length=15)
email = models.EmailField()
name = models.CharField(max_length=255, blank=True)
emergency_button_client = models.ForeignKey(EmergencyButtonClient)
| Add name to family member | Add name to family member
| Python | mit | wearespindle/quickly.press,wearespindle/quickly.press,wearespindle/quickly.press | ---
+++
@@ -11,4 +11,5 @@
"""
phone_number = models.CharField(max_length=15)
email = models.EmailField()
+ name = models.CharField(max_length=255, blank=True)
emergency_button_client = models.ForeignKey(EmergencyButtonClient) |
250e720550a514457e5f698e80ed89e50abee482 | tbmodels/_kdotp.py | tbmodels/_kdotp.py | import numpy as np
import scipy.linalg as la
from fsc.export import export
from fsc.hdf5_io import subscribe_hdf5, SimpleHDF5Mapping
@export
@subscribe_hdf5('tbmodels.model', check_on_load=False)
class KdotpModel(SimpleHDF5Mapping):
HDF5_ATTRIBUTES = ['taylor_coefficients']
def __init__(self, taylor_coefficients):
self.taylor_coefficients = {
tuple(key): np.array(mat, dtype=complex)
for key, mat in taylor_coefficients.items()
}
def hamilton(self, k):
return sum(sum(kval**p for kval, p in zip(k, pow)) * mat for pow, mat in self.taylor_coefficients.items())
def eigenval(self, k):
return la.eigvalsh(self.hamilton(k))
| import numpy as np
import scipy.linalg as la
from fsc.export import export
from fsc.hdf5_io import subscribe_hdf5, SimpleHDF5Mapping
@export
@subscribe_hdf5('tbmodels.kdotp_model', check_on_load=False)
class KdotpModel(SimpleHDF5Mapping):
HDF5_ATTRIBUTES = ['taylor_coefficients']
def __init__(self, taylor_coefficients):
self.taylor_coefficients = {
tuple(key): np.array(mat, dtype=complex)
for key, mat in taylor_coefficients.items()
}
def hamilton(self, k):
return sum(np.prod(np.array(k)**np.array(pow)) * mat for pow, mat in self.taylor_coefficients.items())
def eigenval(self, k):
return la.eigvalsh(self.hamilton(k))
| Fix computation of Hamiltonian for k.p models. | Fix computation of Hamiltonian for k.p models.
| Python | apache-2.0 | Z2PackDev/TBmodels,Z2PackDev/TBmodels | ---
+++
@@ -6,7 +6,7 @@
@export
-@subscribe_hdf5('tbmodels.model', check_on_load=False)
+@subscribe_hdf5('tbmodels.kdotp_model', check_on_load=False)
class KdotpModel(SimpleHDF5Mapping):
HDF5_ATTRIBUTES = ['taylor_coefficients']
@@ -17,7 +17,7 @@
}
def hamilton(self, k):
- return sum(sum(kval**p for kval, p in zip(k, pow)) * mat for pow, mat in self.taylor_coefficients.items())
+ return sum(np.prod(np.array(k)**np.array(pow)) * mat for pow, mat in self.taylor_coefficients.items())
def eigenval(self, k):
return la.eigvalsh(self.hamilton(k)) |
c0e68d9e4fe18154deb412d5897702603883cc06 | statsd/__init__.py | statsd/__init__.py | try:
from django.conf import settings
except ImportError:
settings = None
from client import StatsClient
__all__ = ['StatsClient', 'statsd', 'VERSION']
VERSION = (0, 1)
if settings:
host = getattr(settings, 'STATSD_HOST', 'localhost')
port = getattr(settings, 'STATSD_PORT', 8125)
statsd = StatsClient(host, port)
| try:
from django.conf import settings
except ImportError:
settings = None
from client import StatsClient
__all__ = ['StatsClient', 'statsd', 'VERSION']
VERSION = (0, 1)
if settings:
try:
host = getattr(settings, 'STATSD_HOST', 'localhost')
port = getattr(settings, 'STATSD_PORT', 8125)
statsd = StatsClient(host, port)
except ImportError:
statsd = None
| Support Django being on the path but unused. | Support Django being on the path but unused.
| Python | mit | Khan/pystatsd,wujuguang/pystatsd,deathowl/pystatsd,lyft/pystatsd,lyft/pystatsd,smarkets/pystatsd,jsocol/pystatsd,Khan/pystatsd | ---
+++
@@ -12,6 +12,9 @@
if settings:
- host = getattr(settings, 'STATSD_HOST', 'localhost')
- port = getattr(settings, 'STATSD_PORT', 8125)
- statsd = StatsClient(host, port)
+ try:
+ host = getattr(settings, 'STATSD_HOST', 'localhost')
+ port = getattr(settings, 'STATSD_PORT', 8125)
+ statsd = StatsClient(host, port)
+ except ImportError:
+ statsd = None |
412084e5cd7d59d48bf889570f168759a5f4b775 | sentry/templatetags/sentry_admin_helpers.py | sentry/templatetags/sentry_admin_helpers.py | """
sentry.templatetags.sentry_admin_helpers
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2012 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
from django import template
from sentry.conf import settings
register = template.Library()
@register.filter
def avg_events_per_day(project):
"""
Project is expected to have already been annotated with avg_events_per_n
and n_value properties.
"""
if not project.avg_events_per_n:
per_day = 0
else:
n_per_hour = (60 / settings.MINUTE_NORMALIZATION)
per_day = int(project.avg_events_per_n / project.n_value) - (project.n_value % n_per_hour)
return per_day
| """
sentry.templatetags.sentry_admin_helpers
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2012 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
from django import template
from sentry.conf import settings
register = template.Library()
@register.filter
def avg_events_per_day(project):
"""
Project is expected to have already been annotated with avg_events_per_n
and n_value properties.
"""
if not project.avg_events_per_n:
per_day = 0
else:
n_per_hour = (60 / settings.MINUTE_NORMALIZATION)
per_day = int(project.avg_events_per_n / project.n_value) * (project.n_value % n_per_hour)
return per_day
| Correct the math on project events/day | Correct the math on project events/day
| Python | bsd-3-clause | ifduyue/sentry,jean/sentry,chayapan/django-sentry,kevinlondon/sentry,songyi199111/sentry,BuildingLink/sentry,Kryz/sentry,wong2/sentry,drcapulet/sentry,korealerts1/sentry,felixbuenemann/sentry,drcapulet/sentry,Natim/sentry,NickPresta/sentry,ngonzalvez/sentry,looker/sentry,rdio/sentry,kevinlondon/sentry,beeftornado/sentry,boneyao/sentry,daevaorn/sentry,llonchj/sentry,wong2/sentry,songyi199111/sentry,nicholasserra/sentry,fuziontech/sentry,TedaLIEz/sentry,boneyao/sentry,1tush/sentry,kevinastone/sentry,camilonova/sentry,Natim/sentry,ewdurbin/sentry,jokey2k/sentry,pauloschilling/sentry,fotinakis/sentry,beni55/sentry,SilentCircle/sentry,BayanGroup/sentry,gg7/sentry,BayanGroup/sentry,daevaorn/sentry,ifduyue/sentry,gencer/sentry,beeftornado/sentry,felixbuenemann/sentry,BuildingLink/sentry,imankulov/sentry,SilentCircle/sentry,NickPresta/sentry,beni55/sentry,kevinastone/sentry,camilonova/sentry,JTCunning/sentry,vperron/sentry,JamesMura/sentry,JackDanger/sentry,JTCunning/sentry,alexm92/sentry,fotinakis/sentry,mvaled/sentry,JackDanger/sentry,hongliang5623/sentry,JamesMura/sentry,fuziontech/sentry,TedaLIEz/sentry,alex/sentry,zenefits/sentry,BuildingLink/sentry,zenefits/sentry,mvaled/sentry,SilentCircle/sentry,jean/sentry,gencer/sentry,alex/sentry,JackDanger/sentry,1tush/sentry,imankulov/sentry,nicholasserra/sentry,SilentCircle/sentry,alexm92/sentry,JTCunning/sentry,ewdurbin/sentry,hongliang5623/sentry,gencer/sentry,alex/sentry,looker/sentry,JamesMura/sentry,vperron/sentry,1tush/sentry,nicholasserra/sentry,ifduyue/sentry,NickPresta/sentry,chayapan/django-sentry,ngonzalvez/sentry,wujuguang/sentry,rdio/sentry,pauloschilling/sentry,zenefits/sentry,BuildingLink/sentry,BuildingLink/sentry,daevaorn/sentry,gencer/sentry,pauloschilling/sentry,rdio/sentry,ngonzalvez/sentry,camilonova/sentry,rdio/sentry,hongliang5623/sentry,songyi199111/sentry,JamesMura/sentry,jokey2k/sentry,vperron/sentry,wujuguang/sentry,llonchj/sentry,mitsuhiko/sentry,korealerts1/sentry,zenefits/sentry,fotinakis/sentry,llonchj/sentry,argonemyth/sentry,mvaled/sentry,ifduyue/sentry,drcapulet/sentry,jean/sentry,wujuguang/sentry,boneyao/sentry,jokey2k/sentry,Kryz/sentry,ewdurbin/sentry,JamesMura/sentry,mvaled/sentry,zenefits/sentry,Kryz/sentry,fotinakis/sentry,beni55/sentry,jean/sentry,Natim/sentry,NickPresta/sentry,gg7/sentry,ifduyue/sentry,felixbuenemann/sentry,daevaorn/sentry,chayapan/django-sentry,kevinastone/sentry,mitsuhiko/sentry,BayanGroup/sentry,kevinlondon/sentry,mvaled/sentry,looker/sentry,looker/sentry,alexm92/sentry,gg7/sentry,argonemyth/sentry,korealerts1/sentry,gencer/sentry,TedaLIEz/sentry,wong2/sentry,jean/sentry,argonemyth/sentry,fuziontech/sentry,imankulov/sentry,mvaled/sentry,looker/sentry,beeftornado/sentry | ---
+++
@@ -22,6 +22,6 @@
per_day = 0
else:
n_per_hour = (60 / settings.MINUTE_NORMALIZATION)
- per_day = int(project.avg_events_per_n / project.n_value) - (project.n_value % n_per_hour)
+ per_day = int(project.avg_events_per_n / project.n_value) * (project.n_value % n_per_hour)
return per_day |
427f02c7f6c93e15d219d975d337a97d74a88b42 | convergence-tests/runall.py | convergence-tests/runall.py | import os
import time
import multiprocessing
threads = 4
dev_null = "/dev/null"
input_dir = "./convergence_inputs/"
log_file = dev_null
call = "nice -n 19 ionice -c2 -n7 ../build/main.x "
call_end = " >> " + log_file
syscall_arr = []
input_files = os.listdir(input_dir)
if __name__ == "__main__":
pool = multiprocessing.Pool(processes=threads)
for fname in input_files:
inp_path = input_dir + fname
syscall = call + inp_path + call_end
syscall_arr.append(syscall)
if log_file is not dev_null:
os.remove(log_file)
start_time = time.time()
pool.map(os.system, syscall_arr)
pool.close()
pool.join()
end_time = time.time()
print("Runtime: ", end_time-start_time)
| import os
import time
import multiprocessing
threads = 4
os.environ["OMP_NUM_THREADS"] = "1"
dev_null = "/dev/null"
input_dir = "./convergence_inputs/"
log_file = "log.log"
call = "nice -n 19 ionice -c2 -n7 ../build/main.x "
call_end = " >> " + log_file
syscall_arr = []
input_files = os.listdir(input_dir)
if __name__ == "__main__":
pool = multiprocessing.Pool(processes=threads)
for fname in input_files:
inp_path = input_dir + fname
syscall = call + inp_path + call_end
syscall_arr.append(syscall)
if log_file is not dev_null:
try:
os.remove(log_file)
except:
pass
start_time = time.time()
pool.map(os.system, syscall_arr)
pool.close()
pool.join()
end_time = time.time()
print("Runtime: ", end_time-start_time)
| Update parallel convergence test runs to not spawn OMP threads | Update parallel convergence test runs to not spawn OMP threads | Python | mit | kramer314/1d-vd-test,kramer314/1d-vd-test | ---
+++
@@ -4,10 +4,12 @@
threads = 4
+os.environ["OMP_NUM_THREADS"] = "1"
+
dev_null = "/dev/null"
input_dir = "./convergence_inputs/"
-log_file = dev_null
+log_file = "log.log"
call = "nice -n 19 ionice -c2 -n7 ../build/main.x "
call_end = " >> " + log_file
@@ -25,7 +27,10 @@
syscall_arr.append(syscall)
if log_file is not dev_null:
- os.remove(log_file)
+ try:
+ os.remove(log_file)
+ except:
+ pass
start_time = time.time()
|
01e4b6c3cbd11058e3d60a635048998c24138ddb | instana/__init__.py | instana/__init__.py | from __future__ import absolute_import
import opentracing
from .sensor import Sensor
from .tracer import InstanaTracer
from .options import Options
# Import & initialize instrumentation
from .instrumentation import urllib3
"""
The Instana package has two core components: the sensor and the tracer.
The sensor is individual to each python process and handles process metric
collection and reporting.
The tracer upholds the OpenTracing API and is responsible for reporting
span data to Instana.
"""
__author__ = 'Instana Inc.'
__copyright__ = 'Copyright 2017 Instana Inc.'
__credits__ = ['Pavlo Baron', 'Peter Giacomo Lombardo']
__license__ = 'MIT'
__version__ = '0.7.0'
__maintainer__ = 'Peter Giacomo Lombardo'
__email__ = 'peter.lombardo@instana.com'
# For any given Python process, we only want one sensor as multiple would
# collect/report metrics in duplicate, triplicate etc..
#
# Usage example:
#
# import instana
# instana.global_sensor
#
global_sensor = Sensor(Options())
# The global OpenTracing compatible tracer used internally by
# this package.
#
# Usage example:
#
# import instana
# instana.internal_tracer.start_span(...)
#
internal_tracer = InstanaTracer()
# Set ourselves as the tracer.
opentracing.tracer = internal_tracer
| from __future__ import absolute_import
import os
import opentracing
from .sensor import Sensor
from .tracer import InstanaTracer
from .options import Options
if "INSTANA_DISABLE_AUTO_INSTR" not in os.environ:
# Import & initialize instrumentation
from .instrumentation import urllib3
"""
The Instana package has two core components: the sensor and the tracer.
The sensor is individual to each python process and handles process metric
collection and reporting.
The tracer upholds the OpenTracing API and is responsible for reporting
span data to Instana.
"""
__author__ = 'Instana Inc.'
__copyright__ = 'Copyright 2017 Instana Inc.'
__credits__ = ['Pavlo Baron', 'Peter Giacomo Lombardo']
__license__ = 'MIT'
__version__ = '0.7.0'
__maintainer__ = 'Peter Giacomo Lombardo'
__email__ = 'peter.lombardo@instana.com'
# For any given Python process, we only want one sensor as multiple would
# collect/report metrics in duplicate, triplicate etc..
#
# Usage example:
#
# import instana
# instana.global_sensor
#
global_sensor = Sensor(Options())
# The global OpenTracing compatible tracer used internally by
# this package.
#
# Usage example:
#
# import instana
# instana.internal_tracer.start_span(...)
#
internal_tracer = InstanaTracer()
# Set ourselves as the tracer.
opentracing.tracer = internal_tracer
| Add environment variable to disable automatic instrumentation | Add environment variable to disable automatic instrumentation
| Python | mit | instana/python-sensor,instana/python-sensor | ---
+++
@@ -1,11 +1,13 @@
from __future__ import absolute_import
+import os
import opentracing
from .sensor import Sensor
from .tracer import InstanaTracer
from .options import Options
-# Import & initialize instrumentation
-from .instrumentation import urllib3
+if "INSTANA_DISABLE_AUTO_INSTR" not in os.environ:
+ # Import & initialize instrumentation
+ from .instrumentation import urllib3
"""
The Instana package has two core components: the sensor and the tracer. |
d5b5421c95b1e2feb4646a42b5aca71a2280e30c | tests/dojo_test.py | tests/dojo_test.py | import unittest
from src.dojo import Dojo
class TestCreateRoom (unittest.TestCase):
def test_create_room_successfully(self):
my_class_instance = Dojo()
initial_room_count = len(my_class_instance.all_rooms)
blue_office = my_class_instance.create_room("office", "Blue")
self.assertTrue(blue_office)
new_room_count = len(my_class_instance.all_rooms)
self.assertEqual(new_room_count - initial_room_count, 1)
def test_create_rooms_successfully(self):
my_class_instance = Dojo()
initial_room_count = len(my_class_instance.all_rooms)
offices = my_class_instance.create_room("office", "Blue", "Black", "Brown")
self.assertTrue(offices)
new_room_count = len(my_class_instance.all_rooms)
self.assertEqual(new_room_count - initial_room_count, 3) | import unittest
from src.dojo import Dojo
class TestCreateRoom (unittest.TestCase):
def test_create_room_successfully(self):
my_class_instance = Dojo()
initial_room_count = len(my_class_instance.all_rooms)
blue_office = my_class_instance.create_room("office", "Blue")
self.assertTrue(blue_office)
new_room_count = len(my_class_instance.all_rooms)
self.assertEqual(new_room_count - initial_room_count, 1)
def test_create_rooms_successfully(self):
my_class_instance = Dojo()
initial_room_count = len(my_class_instance.all_rooms)
offices = my_class_instance.create_room("office", "Blue", "Black", "Brown")
self.assertTrue(offices)
new_room_count = len(my_class_instance.all_rooms)
self.assertEqual(new_room_count - initial_room_count, 3)
def test_person_added_to_system(self):
initial_person_count = len(self.dojo.all_people)
person = self.dojo.add_person("Neil", "Armstrong", "Staff")
self.assertTrue(person)
new_person_count = len(self.dojo.all_people)
self.assertEqual(new_person_count - initial_person_count, 1) | Create test to check that a person has been added | Create test to check that a person has been added
| Python | mit | EdwinKato/Space-Allocator,EdwinKato/Space-Allocator | ---
+++
@@ -18,3 +18,10 @@
self.assertTrue(offices)
new_room_count = len(my_class_instance.all_rooms)
self.assertEqual(new_room_count - initial_room_count, 3)
+
+ def test_person_added_to_system(self):
+ initial_person_count = len(self.dojo.all_people)
+ person = self.dojo.add_person("Neil", "Armstrong", "Staff")
+ self.assertTrue(person)
+ new_person_count = len(self.dojo.all_people)
+ self.assertEqual(new_person_count - initial_person_count, 1) |
65c5474936dca27023e45c1644fa2a9492e9a420 | tests/convergence_tests/run_convergence_tests_lspr.py | tests/convergence_tests/run_convergence_tests_lspr.py | import os
import time
import subprocess
import datetime
from check_for_meshes import check_mesh
# tests to run
tests = ['sphere_lspr.py', 'sphere_multiple_lspr.py']
# specify CUDA device to use
CUDA_DEVICE = '0'
ENV = os.environ.copy()
ENV['CUDA_DEVICE'] = CUDA_DEVICE
mesh_file = ''
folder_name = 'lspr_convergence_test_meshes'
rename_folder = 'geometry_lspr'
size = '~3MB'
check_mesh(mesh_file, folder_name, rename_folder, size)
tic = time.time()
for test in tests:
subprocess.call(['python', '{}'.format(test)])
toc = time.time()
print("Total runtime for convergence tests: ")
print(str(datetime.timedelta(seconds=(toc - tic))))
| import os
import time
import subprocess
import datetime
from check_for_meshes import check_mesh
# tests to run
tests = ['sphere_lspr.py', 'sphere_multiple_lspr.py']
# specify CUDA device to use
CUDA_DEVICE = '0'
ENV = os.environ.copy()
ENV['CUDA_DEVICE'] = CUDA_DEVICE
mesh_file = 'https://zenodo.org/record/580786/files/pygbe-lspr_convergence_test_meshes.zip'
folder_name = 'lspr_convergence_test_meshes'
rename_folder = 'geometry_lspr'
size = '~3MB'
check_mesh(mesh_file, folder_name, rename_folder, size)
tic = time.time()
for test in tests:
subprocess.call(['python', '{}'.format(test)])
toc = time.time()
print("Total runtime for convergence tests: ")
print(str(datetime.timedelta(seconds=(toc - tic))))
| Add path to convergence test lspr zip file | Add path to convergence test lspr zip file
| Python | bsd-3-clause | barbagroup/pygbe,barbagroup/pygbe,barbagroup/pygbe | ---
+++
@@ -14,7 +14,7 @@
ENV = os.environ.copy()
ENV['CUDA_DEVICE'] = CUDA_DEVICE
-mesh_file = ''
+mesh_file = 'https://zenodo.org/record/580786/files/pygbe-lspr_convergence_test_meshes.zip'
folder_name = 'lspr_convergence_test_meshes'
rename_folder = 'geometry_lspr'
size = '~3MB' |
c614d5e636ad22c470ac730ceb292a10c9537c6b | ibmcnx/doc/DataSources.py | ibmcnx/doc/DataSources.py | ######
# Check ExId (GUID) by Email through JDBC
#
# Author: Christoph Stoettner
# Mail: christoph.stoettner@stoeps.de
# Documentation: http://scripting101.stoeps.de
#
# Version: 2.0
# Date: 2014-06-04
#
# License: Apache 2.0
#
# Check ExId of a User in all Connections Applications
import ibmcnx.functions
dbs = ['FNOSDS', 'FNGCDDS', 'IBM_FORMS_DATA_SOURCE', 'activities', 'blogs', 'communities', 'dogear', 'files', 'forum', 'homepage', 'metrics', 'mobile', 'news', 'oauth provider', 'profiles', 'search', 'wikis'] # List of all databases to check
for db in dbs.splitlines():
t1 = ibmcnx.functions.getDSId( db )
AdminConfig.list( t1 ) | ######
# Check ExId (GUID) by Email through JDBC
#
# Author: Christoph Stoettner
# Mail: christoph.stoettner@stoeps.de
# Documentation: http://scripting101.stoeps.de
#
# Version: 2.0
# Date: 2014-06-04
#
# License: Apache 2.0
#
# Check ExId of a User in all Connections Applications
import ibmcnx.functions
dbs = ['FNOSDS', 'FNGCDDS', 'IBM_FORMS_DATA_SOURCE', 'activities', 'blogs', 'communities', 'dogear', 'files', 'forum', 'homepage', 'metrics', 'mobile', 'news', 'oauth provider', 'profiles', 'search', 'wikis'] # List of all databases to check
for db in dbs:
t1 = ibmcnx.functions.getDSId( db )
AdminConfig.list( t1 ) | Create documentation of DataSource Settings | : Create documentation of DataSource Settings
Task-Url: | Python | apache-2.0 | stoeps13/ibmcnx2,stoeps13/ibmcnx2 | ---
+++
@@ -16,6 +16,6 @@
dbs = ['FNOSDS', 'FNGCDDS', 'IBM_FORMS_DATA_SOURCE', 'activities', 'blogs', 'communities', 'dogear', 'files', 'forum', 'homepage', 'metrics', 'mobile', 'news', 'oauth provider', 'profiles', 'search', 'wikis'] # List of all databases to check
-for db in dbs.splitlines():
+for db in dbs:
t1 = ibmcnx.functions.getDSId( db )
AdminConfig.list( t1 ) |
591aaa938c22b797fc6bbeb5050ec489cc966a47 | tests/run_tests.py | tests/run_tests.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from unittest import main
from test_core import *
from test_lazy import *
if __name__ == '__main__':
main()
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Hack to allow us to run tests before installing.
import sys, os
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(sys.argv[0]), '..')))
from unittest import main
from test_core import *
from test_lazy import *
if __name__ == '__main__':
main()
| Make running unit tests more friendly | Make running unit tests more friendly
| Python | mit | CovenantEyes/py_stringlike | ---
+++
@@ -1,5 +1,9 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
+
+# Hack to allow us to run tests before installing.
+import sys, os
+sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(sys.argv[0]), '..')))
from unittest import main
|
80b148f31b616ee85e63ddc524a6dd2910b5a467 | tests/test_auth.py | tests/test_auth.py | import random
import unittest
from six.moves import input
from .config import *
from tweepy import API, OAuthHandler
class TweepyAuthTests(unittest.TestCase):
def testoauth(self):
auth = OAuthHandler(oauth_consumer_key, oauth_consumer_secret)
# test getting access token
auth_url = auth.get_authorization_url()
print('Please authorize: ' + auth_url)
verifier = input('PIN: ').strip()
self.assertTrue(len(verifier) > 0)
access_token = auth.get_access_token(verifier)
self.assertTrue(access_token is not None)
# build api object test using oauth
api = API(auth)
s = api.update_status('test %i' % random.randint(0, 1000))
api.destroy_status(s.id)
def testaccesstype(self):
auth = OAuthHandler(oauth_consumer_key, oauth_consumer_secret)
auth_url = auth.get_authorization_url(access_type='read')
print('Please open: ' + auth_url)
answer = input('Did Twitter only request read permissions? (y/n) ')
self.assertEqual('y', answer.lower())
| import random
import unittest
from .config import *
from tweepy import API, OAuthHandler
class TweepyAuthTests(unittest.TestCase):
def testoauth(self):
auth = OAuthHandler(oauth_consumer_key, oauth_consumer_secret)
# test getting access token
auth_url = auth.get_authorization_url()
print('Please authorize: ' + auth_url)
verifier = input('PIN: ').strip()
self.assertTrue(len(verifier) > 0)
access_token = auth.get_access_token(verifier)
self.assertTrue(access_token is not None)
# build api object test using oauth
api = API(auth)
s = api.update_status('test %i' % random.randint(0, 1000))
api.destroy_status(s.id)
def testaccesstype(self):
auth = OAuthHandler(oauth_consumer_key, oauth_consumer_secret)
auth_url = auth.get_authorization_url(access_type='read')
print('Please open: ' + auth_url)
answer = input('Did Twitter only request read permissions? (y/n) ')
self.assertEqual('y', answer.lower())
| Remove input import from six.moves | Remove input import from six.moves
| Python | mit | svven/tweepy,tweepy/tweepy | ---
+++
@@ -1,7 +1,5 @@
import random
import unittest
-
-from six.moves import input
from .config import *
from tweepy import API, OAuthHandler |
d2d03e89b0c89bc78c4087b5ad6a4f543301f927 | Bindings/Python/tests/test_component_interface.py | Bindings/Python/tests/test_component_interface.py | import os
import unittest
import opensim as osim
test_dir = os.path.join(os.path.dirname(os.path.abspath(osim.__file__)),
'tests')
# Silence warning messages if mesh (.vtp) files cannot be found.
osim.Model.setDebugLevel(0)
class TestComponentInterface(unittest.TestCase):
def test_printComponentsMatching(self):
model = osim.Model(os.path.join(test_dir,
"gait10dof18musc_subject01.osim"))
model.finalizeFromProperties();
num_matches = model.printComponentsMatching("_r")
self.assertEquals(num_matches, 98)
def test_attachGeometry_memory_management(self):
model = osim.Model()
model.getGround().attachGeometry(osim.Sphere(1.5))
| import os
import unittest
import opensim as osim
test_dir = os.path.join(os.path.dirname(os.path.abspath(osim.__file__)),
'tests')
# Silence warning messages if mesh (.vtp) files cannot be found.
osim.Model.setDebugLevel(0)
class TestComponentInterface(unittest.TestCase):
def test_printComponentsMatching(self):
model = osim.Model(os.path.join(test_dir,
"gait10dof18musc_subject01.osim"))
model.finalizeFromProperties();
num_matches = model.printComponentsMatching("_r")
self.assertEquals(num_matches, 126)
def test_attachGeometry_memory_management(self):
model = osim.Model()
model.getGround().attachGeometry(osim.Sphere(1.5))
| Update the number of components listed in the model. | Update the number of components listed in the model.
| Python | apache-2.0 | opensim-org/opensim-core,opensim-org/opensim-core,opensim-org/opensim-core,opensim-org/opensim-core,opensim-org/opensim-core,opensim-org/opensim-core,opensim-org/opensim-core | ---
+++
@@ -15,7 +15,7 @@
"gait10dof18musc_subject01.osim"))
model.finalizeFromProperties();
num_matches = model.printComponentsMatching("_r")
- self.assertEquals(num_matches, 98)
+ self.assertEquals(num_matches, 126)
def test_attachGeometry_memory_management(self):
model = osim.Model()
model.getGround().attachGeometry(osim.Sphere(1.5)) |
b7a0653cdb2c20def38a687963763b75455ebbcb | conftest.py | conftest.py | from __future__ import absolute_import, division, print_function
from dials.conftest import regression_data, run_in_tmpdir
| from __future__ import absolute_import, division, print_function
from dials.conftest import pytest_addoption, regression_data, run_in_tmpdir
| Add --regression command line option | Add --regression command line option
| Python | bsd-3-clause | xia2/i19 | ---
+++
@@ -1,3 +1,3 @@
from __future__ import absolute_import, division, print_function
-from dials.conftest import regression_data, run_in_tmpdir
+from dials.conftest import pytest_addoption, regression_data, run_in_tmpdir |
33fa3d886742b440945fa80eaa5a8da9950f1181 | runtests.py | runtests.py | #!/usr/bin/env python
import os
import sys
import django
from django.conf import settings
DEFAULT_SETTINGS = dict(
INSTALLED_APPS=[
"django.contrib.auth",
"django.contrib.contenttypes",
"django.contrib.sites",
"badgekit_webhooks",
"badgekit_webhooks.tests"
],
DATABASES={
"default": {
"ENGINE": "django.db.backends.sqlite3",
"NAME": ":memory:",
}
},
SITE_ID=1,
ROOT_URLCONF="badgekit_webhooks.tests.urls",
SECRET_KEY="notasecret",
)
def runtests(*test_args):
if not settings.configured:
settings.configure(**DEFAULT_SETTINGS)
# Compatibility with Django 1.7's stricter initialization
if hasattr(django, "setup"):
django.setup()
if not test_args:
test_args = ["tests"]
parent = os.path.dirname(os.path.abspath(__file__))
sys.path.insert(0, parent)
from django.test.simple import DjangoTestSuiteRunner
failures = DjangoTestSuiteRunner(
verbosity=1, interactive=True, failfast=False).run_tests(test_args)
sys.exit(failures)
if __name__ == "__main__":
runtests(*sys.argv[1:])
| #!/usr/bin/env python
import os
import sys
import django
from django.conf import settings
DEFAULT_SETTINGS = dict(
INSTALLED_APPS=[
"django.contrib.auth",
"django.contrib.contenttypes",
"django.contrib.sites",
"badgekit_webhooks",
"badgekit_webhooks.tests"
],
DATABASES={
"default": {
"ENGINE": "django.db.backends.sqlite3",
"NAME": ":memory:",
}
},
SITE_ID=1,
ROOT_URLCONF="badgekit_webhooks.tests.urls",
SECRET_KEY="notasecret",
)
def runtests(*test_args):
if not settings.configured:
settings.configure(**DEFAULT_SETTINGS)
# Compatibility with Django 1.7's stricter initialization
if hasattr(django, "setup"):
django.setup()
parent = os.path.dirname(os.path.abspath(__file__))
sys.path.insert(0, parent)
try:
from django.test.simple import DjangoTestSuiteRunner as TestRunner
if not test_args:
test_args = ["tests"]
except ImportError:
from django.test.runner import DiscoverRunner as TestRunner
if not test_args:
test_args = ["badgekit_webhooks.tests"]
failures = TestRunner(
verbosity=1, interactive=True, failfast=False).run_tests(test_args)
sys.exit(failures)
if __name__ == "__main__":
runtests(*sys.argv[1:])
| Fix test runner for development Django | Fix test runner for development Django
| Python | mit | tgs/django-badgekit-webhooks | ---
+++
@@ -35,14 +35,19 @@
if hasattr(django, "setup"):
django.setup()
- if not test_args:
- test_args = ["tests"]
-
parent = os.path.dirname(os.path.abspath(__file__))
sys.path.insert(0, parent)
- from django.test.simple import DjangoTestSuiteRunner
- failures = DjangoTestSuiteRunner(
+ try:
+ from django.test.simple import DjangoTestSuiteRunner as TestRunner
+ if not test_args:
+ test_args = ["tests"]
+ except ImportError:
+ from django.test.runner import DiscoverRunner as TestRunner
+ if not test_args:
+ test_args = ["badgekit_webhooks.tests"]
+
+ failures = TestRunner(
verbosity=1, interactive=True, failfast=False).run_tests(test_args)
sys.exit(failures)
|
2d908f812a0cfeab18e36733ec3380e507865c20 | tests/test_auth.py | tests/test_auth.py | # -*- coding: utf-8 -*-
from unittest import TestCase
class TestOneAll(TestCase):
def test_whether_test_runs(self):
self.assertTrue(True)
| # -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function, unicode_literals
from unittest import TestCase, main
from pyoneall import OneAll
from pyoneall.classes import BadOneAllCredentials, Connections
class TestOneAll(TestCase):
VALID_CREDENTIALS = {
'site_name': 'python',
'public_key': '2d27cffd-1ced-4991-83d1-acce715461e5',
# I really hope this doesn't Jynx my accounts.
'private_key': '84d94998-4029-4ac3-be9b-f2825100da6a',
}
INVALID_CREDENTIALS = {
'site_name': 'python',
'public_key': '01234567-89ab-cdef-0123-456789abcdef',
'private_key': '01234567-89ab-cdef-0123-456789abcdef',
}
def test_00_whether_test_runs(self):
self.assertTrue(True)
def test_01_users_list(self):
auth = OneAll(**self.VALID_CREDENTIALS)
c = auth.connections()
self.assertIsInstance(c, Connections)
def test_02_bad_credentials(self):
auth = OneAll(**self.INVALID_CREDENTIALS)
with self.assertRaises(BadOneAllCredentials):
auth.connections()
def dont_test_03_swapped_credentials(self):
kwargs = dict(self.VALID_CREDENTIALS)
kwargs['private_key'], kwargs['public_key'] = kwargs['public_key'], kwargs['private_key']
auth = OneAll(**kwargs)
# How should this result be different from test 02?
with self.assertRaises(BadOneAllCredentials):
auth.connections()
if __name__ == '__main__':
main()
| Test suite is taking shape. :) | Test suite is taking shape. :)
| Python | mit | leandigo/pyoneall | ---
+++
@@ -1,7 +1,46 @@
# -*- coding: utf-8 -*-
-from unittest import TestCase
+from __future__ import absolute_import, division, print_function, unicode_literals
+from unittest import TestCase, main
+
+from pyoneall import OneAll
+from pyoneall.classes import BadOneAllCredentials, Connections
class TestOneAll(TestCase):
- def test_whether_test_runs(self):
+ VALID_CREDENTIALS = {
+ 'site_name': 'python',
+ 'public_key': '2d27cffd-1ced-4991-83d1-acce715461e5',
+ # I really hope this doesn't Jynx my accounts.
+ 'private_key': '84d94998-4029-4ac3-be9b-f2825100da6a',
+ }
+
+ INVALID_CREDENTIALS = {
+ 'site_name': 'python',
+ 'public_key': '01234567-89ab-cdef-0123-456789abcdef',
+ 'private_key': '01234567-89ab-cdef-0123-456789abcdef',
+ }
+
+ def test_00_whether_test_runs(self):
self.assertTrue(True)
+
+ def test_01_users_list(self):
+ auth = OneAll(**self.VALID_CREDENTIALS)
+ c = auth.connections()
+ self.assertIsInstance(c, Connections)
+
+ def test_02_bad_credentials(self):
+ auth = OneAll(**self.INVALID_CREDENTIALS)
+ with self.assertRaises(BadOneAllCredentials):
+ auth.connections()
+
+ def dont_test_03_swapped_credentials(self):
+ kwargs = dict(self.VALID_CREDENTIALS)
+ kwargs['private_key'], kwargs['public_key'] = kwargs['public_key'], kwargs['private_key']
+ auth = OneAll(**kwargs)
+ # How should this result be different from test 02?
+ with self.assertRaises(BadOneAllCredentials):
+ auth.connections()
+
+
+if __name__ == '__main__':
+ main() |
6dfbbba5abf380e3f47f9190a864faa13cf1599d | data_preparation.py | data_preparation.py | # importing modules/ libraries
import pandas as pd
import numpy as np
orders_prior_df = pd.read_csv('Data/orders_prior_sample.csv')
order_products_prior_df = pd.read_csv('Data/order_products_prior_sample.csv')
grouped = order_products_prior_df.groupby('order_id', as_index = False)
grouped_data = pd.DataFrame()
grouped_data['order_id'] = grouped['order_id'].aggregate(np.mean)
def product_ids(group):
l = []
for e in group['product_id']:
l.append(str(e))
return ' '.join(l)
grouped_data['product_ids'] = grouped.apply(product_ids)
def add_to_cart_orders(group):
l = []
for e in group['add_to_cart_order']:
l.append(str(e))
return ' '.join(l)
grouped_data['add_to_cart_orders'] = grouped.apply(add_to_cart_orders)
print('First five rows of grouped_data:\n', grouped_data.head())
orders_prior_merged = pd.merge(orders_prior_df, grouped_data, on='order_id')
print('First five rows of orders_prior_merged:\n', orders_prior_merged.head())
| # importing modules/ libraries
import pandas as pd
import numpy as np
orders_prior_df = pd.read_csv('Data/orders_prior_sample.csv')
order_products_prior_df = pd.read_csv('Data/order_products_prior_sample.csv')
grouped = order_products_prior_df.groupby('order_id', as_index = False)
grouped_data = pd.DataFrame()
grouped_data['order_id'] = grouped['order_id'].aggregate(np.mean)
def product_ids(group):
l = []
for e in group['product_id']:
l.append(str(e))
return ' '.join(l)
grouped_data['product_ids'] = grouped.apply(product_ids)
def add_to_cart_orders(group):
l = []
for e in group['add_to_cart_order']:
l.append(str(e))
return ' '.join(l)
grouped_data['add_to_cart_orders'] = grouped.apply(add_to_cart_orders)
grouped_data['reordered'] = grouped['reordered'].aggregate(np.mean)['reordered'].round()
print('First five rows of grouped_data:\n', grouped_data.head())
orders_prior_merged = pd.merge(orders_prior_df, grouped_data, on='order_id')
print('First five rows of orders_prior_merged:\n', orders_prior_merged.head())
| Merge product reordered column with order ids | feat: Merge product reordered column with order ids
| Python | mit | rjegankumar/instacart_prediction_model | ---
+++
@@ -27,6 +27,8 @@
return ' '.join(l)
grouped_data['add_to_cart_orders'] = grouped.apply(add_to_cart_orders)
+
+grouped_data['reordered'] = grouped['reordered'].aggregate(np.mean)['reordered'].round()
print('First five rows of grouped_data:\n', grouped_data.head())
orders_prior_merged = pd.merge(orders_prior_df, grouped_data, on='order_id') |
7873996d49ad32984465086623a3f6537eae11af | nbgrader/preprocessors/headerfooter.py | nbgrader/preprocessors/headerfooter.py | from IPython.nbconvert.preprocessors import Preprocessor
from IPython.nbformat.current import read as read_nb
from IPython.utils.traitlets import Unicode
class IncludeHeaderFooter(Preprocessor):
"""A preprocessor for adding header and/or footer cells to a notebook."""
header = Unicode("", config=True, help="Path to header notebook")
footer = Unicode("", config=True, help="Path to footer notebook")
def preprocess(self, nb, resources):
"""Concatenates the cells from the header and footer notebooks to the
given cells.
"""
new_cells = []
# header
if self.header != "":
with open(self.header, 'r') as fh:
header_nb = read_nb(fh, 'ipynb')
new_cells.extend(header_nb.worksheets[0].cells)
# body
new_cells.extend(nb.worksheets[0].cells)
# footer
if self.footer != "":
with open(self.footer, 'r') as fh:
footer_nb = read_nb(fh, 'ipynb')
new_cells.extend(footer_nb.worksheets[0].cells)
nb.worksheets[0].cells = new_cells
super(IncludeHeaderFooter, self).preprocess(nb, resources)
return nb, resources
def preprocess_cell(self, cell, resources, cell_index):
return cell, resources
| from IPython.nbconvert.preprocessors import Preprocessor
from IPython.nbformat.current import read as read_nb
from IPython.utils.traitlets import Unicode
class IncludeHeaderFooter(Preprocessor):
"""A preprocessor for adding header and/or footer cells to a notebook."""
header = Unicode("", config=True, help="Path to header notebook")
footer = Unicode("", config=True, help="Path to footer notebook")
def preprocess(self, nb, resources):
"""Concatenates the cells from the header and footer notebooks to the
given cells.
"""
new_cells = []
# header
if self.header:
with open(self.header, 'r') as fh:
header_nb = read_nb(fh, 'ipynb')
new_cells.extend(header_nb.worksheets[0].cells)
# body
new_cells.extend(nb.worksheets[0].cells)
# footer
if self.footer:
with open(self.footer, 'r') as fh:
footer_nb = read_nb(fh, 'ipynb')
new_cells.extend(footer_nb.worksheets[0].cells)
nb.worksheets[0].cells = new_cells
super(IncludeHeaderFooter, self).preprocess(nb, resources)
return nb, resources
def preprocess_cell(self, cell, resources, cell_index):
return cell, resources
| Fix if statements checking if header/footer exist | Fix if statements checking if header/footer exist
| Python | bsd-3-clause | jhamrick/nbgrader,jupyter/nbgrader,jupyter/nbgrader,jhamrick/nbgrader,EdwardJKim/nbgrader,ellisonbg/nbgrader,EdwardJKim/nbgrader,jupyter/nbgrader,jupyter/nbgrader,EdwardJKim/nbgrader,ellisonbg/nbgrader,MatKallada/nbgrader,ellisonbg/nbgrader,modulexcite/nbgrader,jhamrick/nbgrader,EdwardJKim/nbgrader,jdfreder/nbgrader,jdfreder/nbgrader,jupyter/nbgrader,alope107/nbgrader,MatKallada/nbgrader,modulexcite/nbgrader,dementrock/nbgrader,jhamrick/nbgrader,ellisonbg/nbgrader,alope107/nbgrader,dementrock/nbgrader | ---
+++
@@ -17,7 +17,7 @@
new_cells = []
# header
- if self.header != "":
+ if self.header:
with open(self.header, 'r') as fh:
header_nb = read_nb(fh, 'ipynb')
new_cells.extend(header_nb.worksheets[0].cells)
@@ -26,7 +26,7 @@
new_cells.extend(nb.worksheets[0].cells)
# footer
- if self.footer != "":
+ if self.footer:
with open(self.footer, 'r') as fh:
footer_nb = read_nb(fh, 'ipynb')
new_cells.extend(footer_nb.worksheets[0].cells) |
a9cc67b9defeffc76091bd204f230a431db80196 | traftrack/image.py | traftrack/image.py | import PIL.Image
import PIL.ImageMath
import urllib.request
from io import BytesIO
def load_img_url(url):
req = urllib.request.urlopen(url)
data = BytesIO(req.read())
return PIL.Image.open(data)
def load_img_file(fname):
return PIL.Image.open(fname)
def compute_histo_RYG(img, mask):
img = img.convert(mode='RGB')
mask = mask.convert(mode='1')
black = PIL.Image.new('RGB', mask.size, color=(0, 0, 0, 0))
masked = PIL.Image.composite(img, black, mask)
palette = PIL.Image.new('P', (1, 1))
palette.putpalette(
[0, 0, 0, # black
255, 0, 0, # red
255, 255, 0, # yellow
0, 255, 0]) # green
quantized = masked.quantize(palette=palette)
colors = quantized.getcolors()
return colors[1][0], colors[2][0], colors[3][0]
| import PIL.Image
import PIL.ImageMath
import urllib.request
from io import BytesIO
def load_img_url(url):
req = urllib.request.urlopen(url)
data = BytesIO(req.read())
return PIL.Image.open(data)
def load_img_file(fname):
return PIL.Image.open(fname)
def compute_histo_RYG(img, mask):
img = img.convert(mode='RGB')
mask = mask.convert(mode='1')
black = PIL.Image.new('RGB', mask.size, color=(0, 0, 0, 0))
masked = PIL.Image.composite(img, black, mask)
palette = PIL.Image.new('P', (1, 1))
palette.putpalette(
[0, 0, 0, # black
255, 0, 0, # red
255, 255, 0, # yellow
0, 255, 0]) # green
quantized = masked.quantize(palette=palette)
colors = quantized.getcolors()
r = next((c[0] for c in colors if c[1] == 1), 0)
y = next((c[0] for c in colors if c[1] == 2), 0)
g = next((c[0] for c in colors if c[1] == 3), 0)
return r, y, g
| Fix issue with non-existing color in compute_histo_RYG | Fix issue with non-existing color in compute_histo_RYG
| Python | mit | asavonic/traftrack | ---
+++
@@ -31,4 +31,8 @@
quantized = masked.quantize(palette=palette)
colors = quantized.getcolors()
- return colors[1][0], colors[2][0], colors[3][0]
+ r = next((c[0] for c in colors if c[1] == 1), 0)
+ y = next((c[0] for c in colors if c[1] == 2), 0)
+ g = next((c[0] for c in colors if c[1] == 3), 0)
+
+ return r, y, g |
61c4b0952e198fd5335f110349b4cc3fe840a02f | bynamodb/patcher.py | bynamodb/patcher.py | from boto.dynamodb2.layer1 import DynamoDBConnection
from .model import Model
def patch_dynamodb_connection(**kwargs):
""":class:`boto.dynamodb2.layer1.DynamoDBConnection` patcher.
It partially applies the keyword arguments to the
:class:`boto.dynamodb2.layer1.DynamoDBConnection` initializer method.
The common usage of this function would be patching host and port
to the local DynamoDB or remote DynamoDB as the project configuration
changes.
"""
if hasattr(DynamoDBConnection, '__original_init__'):
return
DynamoDBConnection.__original_init__ = DynamoDBConnection.__init__
def init(self, **fkwargs):
fkwargs.update(kwargs)
self.__original_init__(**fkwargs)
DynamoDBConnection.__init__ = init
def patch_table_name_prefix(prefix):
"""Patch the table name prefix"""
Model._table_prefix = prefix
| from boto.dynamodb2.layer1 import DynamoDBConnection
from .model import Model
def patch_from_config(config):
if 'DYNAMODB_CONNECTION' in config:
patch_dynamodb_connection(**config['DYNAMODB_CONNECTION'])
if 'DYNAMODB_PREFIX' in config:
patch_table_name_prefix(config['DYNAMODB_PREFIX'])
def patch_dynamodb_connection(**kwargs):
""":class:`boto.dynamodb2.layer1.DynamoDBConnection` patcher.
It partially applies the keyword arguments to the
:class:`boto.dynamodb2.layer1.DynamoDBConnection` initializer method.
The common usage of this function would be patching host and port
to the local DynamoDB or remote DynamoDB as the project configuration
changes.
"""
if hasattr(DynamoDBConnection, '__original_init__'):
return
DynamoDBConnection.__original_init__ = DynamoDBConnection.__init__
def init(self, **fkwargs):
fkwargs.update(kwargs)
self.__original_init__(**fkwargs)
DynamoDBConnection.__init__ = init
def patch_table_name_prefix(prefix):
"""Patch the table name prefix"""
Model._table_prefix = prefix
| Add support for the patching connection and the prefix through config dict | Add support for the patching connection and the prefix through config dict
| Python | mit | teddychoi/BynamoDB | ---
+++
@@ -1,6 +1,13 @@
from boto.dynamodb2.layer1 import DynamoDBConnection
from .model import Model
+
+
+def patch_from_config(config):
+ if 'DYNAMODB_CONNECTION' in config:
+ patch_dynamodb_connection(**config['DYNAMODB_CONNECTION'])
+ if 'DYNAMODB_PREFIX' in config:
+ patch_table_name_prefix(config['DYNAMODB_PREFIX'])
def patch_dynamodb_connection(**kwargs): |
ad8a68744c9c844af6e093954b9f50cfc355920a | scripts/update_comments.py | scripts/update_comments.py | """
Update User.comments_viewed_timestamp field & comments model.
Accompanies https://github.com/CenterForOpenScience/osf.io/pull/1762
"""
from modularodm import Q
from framework.auth.core import User
from website.models import Comment
from website.app import init_app
import logging
from scripts import utils as script_utils
logger = logging.getLogger(__name__)
def main():
init_app(routes=False, set_backends=True, mfr=False)
update_comments_viewed_timestamp()
update_comments()
def update_comments_viewed_timestamp():
users = User.find(Q('comments_viewed_timestamp', 'ne', None) | Q('comments_viewed_timestamp', 'ne', {}))
for user in users:
if user.comments_viewed_timestamp:
for node in user.comments_viewed_timestamp:
user.comments_viewed_timestamp[node] = {'node': user.comments_viewed_timestamp[node]}
user.save()
def update_comments():
comments = Comment.find()
for comment in comments:
comment.root_target = comment.node
comment.page = Comment.OVERVIEW
comment.is_hidden = False
comment.save()
if __name__ == '__main__':
script_utils.add_file_logger(logger, __file__)
main() | """
Update User.comments_viewed_timestamp field & comments model.
Accompanies https://github.com/CenterForOpenScience/osf.io/pull/1762
"""
from modularodm import Q
from framework.auth.core import User
from website.models import Comment
from website.app import init_app
import logging
from scripts import utils as script_utils
logger = logging.getLogger(__name__)
def main():
init_app(routes=False, set_backends=True)
update_comments_viewed_timestamp()
update_comments()
def update_comments_viewed_timestamp():
users = User.find(Q('comments_viewed_timestamp', 'ne', None) | Q('comments_viewed_timestamp', 'ne', {}))
for user in users:
if user.comments_viewed_timestamp:
for node in user.comments_viewed_timestamp:
user.comments_viewed_timestamp[node] = {'node': user.comments_viewed_timestamp[node]}
user.save()
def update_comments():
comments = Comment.find()
for comment in comments:
comment.root_target = comment.node
comment.page = Comment.OVERVIEW
comment.is_hidden = False
comment.save()
if __name__ == '__main__':
script_utils.add_file_logger(logger, __file__)
main() | Remove mfr parameter from init_app | Remove mfr parameter from init_app
| Python | apache-2.0 | chennan47/osf.io,RomanZWang/osf.io,kch8qx/osf.io,amyshi188/osf.io,billyhunt/osf.io,zamattiac/osf.io,abought/osf.io,GageGaskins/osf.io,brandonPurvis/osf.io,zamattiac/osf.io,zachjanicki/osf.io,acshi/osf.io,brianjgeiger/osf.io,DanielSBrown/osf.io,TomHeatwole/osf.io,KAsante95/osf.io,chrisseto/osf.io,billyhunt/osf.io,crcresearch/osf.io,erinspace/osf.io,alexschiller/osf.io,caneruguz/osf.io,chrisseto/osf.io,sloria/osf.io,laurenrevere/osf.io,felliott/osf.io,acshi/osf.io,binoculars/osf.io,caneruguz/osf.io,zamattiac/osf.io,abought/osf.io,abought/osf.io,asanfilippo7/osf.io,wearpants/osf.io,mfraezz/osf.io,TomBaxter/osf.io,billyhunt/osf.io,CenterForOpenScience/osf.io,doublebits/osf.io,cslzchen/osf.io,kwierman/osf.io,TomBaxter/osf.io,mluo613/osf.io,SSJohns/osf.io,rdhyee/osf.io,samchrisinger/osf.io,leb2dg/osf.io,sloria/osf.io,baylee-d/osf.io,SSJohns/osf.io,cwisecarver/osf.io,Nesiehr/osf.io,cwisecarver/osf.io,cslzchen/osf.io,erinspace/osf.io,monikagrabowska/osf.io,RomanZWang/osf.io,mattclark/osf.io,Johnetordoff/osf.io,zachjanicki/osf.io,alexschiller/osf.io,cslzchen/osf.io,mluke93/osf.io,hmoco/osf.io,saradbowman/osf.io,leb2dg/osf.io,SSJohns/osf.io,chrisseto/osf.io,acshi/osf.io,brianjgeiger/osf.io,doublebits/osf.io,binoculars/osf.io,brandonPurvis/osf.io,alexschiller/osf.io,mluo613/osf.io,jnayak1/osf.io,adlius/osf.io,amyshi188/osf.io,asanfilippo7/osf.io,jnayak1/osf.io,KAsante95/osf.io,brandonPurvis/osf.io,acshi/osf.io,baylee-d/osf.io,amyshi188/osf.io,brandonPurvis/osf.io,kwierman/osf.io,KAsante95/osf.io,felliott/osf.io,DanielSBrown/osf.io,doublebits/osf.io,caseyrollins/osf.io,hmoco/osf.io,baylee-d/osf.io,jnayak1/osf.io,mluke93/osf.io,emetsger/osf.io,laurenrevere/osf.io,DanielSBrown/osf.io,brandonPurvis/osf.io,RomanZWang/osf.io,SSJohns/osf.io,aaxelb/osf.io,asanfilippo7/osf.io,TomHeatwole/osf.io,jnayak1/osf.io,caseyrollins/osf.io,mluke93/osf.io,mluo613/osf.io,felliott/osf.io,rdhyee/osf.io,zachjanicki/osf.io,caseyrollins/osf.io,emetsger/osf.io,billyhunt/osf.io,Johnetordoff/osf.io,crcresearch/osf.io,Johnetordoff/osf.io,DanielSBrown/osf.io,leb2dg/osf.io,HalcyonChimera/osf.io,pattisdr/osf.io,Johnetordoff/osf.io,aaxelb/osf.io,brianjgeiger/osf.io,brianjgeiger/osf.io,GageGaskins/osf.io,monikagrabowska/osf.io,chennan47/osf.io,alexschiller/osf.io,amyshi188/osf.io,TomBaxter/osf.io,billyhunt/osf.io,wearpants/osf.io,Nesiehr/osf.io,GageGaskins/osf.io,abought/osf.io,mfraezz/osf.io,pattisdr/osf.io,CenterForOpenScience/osf.io,KAsante95/osf.io,kch8qx/osf.io,caneruguz/osf.io,kch8qx/osf.io,RomanZWang/osf.io,icereval/osf.io,chrisseto/osf.io,aaxelb/osf.io,monikagrabowska/osf.io,zamattiac/osf.io,wearpants/osf.io,kwierman/osf.io,monikagrabowska/osf.io,TomHeatwole/osf.io,alexschiller/osf.io,adlius/osf.io,emetsger/osf.io,crcresearch/osf.io,GageGaskins/osf.io,HalcyonChimera/osf.io,Nesiehr/osf.io,rdhyee/osf.io,icereval/osf.io,cslzchen/osf.io,emetsger/osf.io,cwisecarver/osf.io,CenterForOpenScience/osf.io,mfraezz/osf.io,hmoco/osf.io,pattisdr/osf.io,zachjanicki/osf.io,mluo613/osf.io,mattclark/osf.io,adlius/osf.io,wearpants/osf.io,mattclark/osf.io,kwierman/osf.io,kch8qx/osf.io,RomanZWang/osf.io,mluo613/osf.io,hmoco/osf.io,felliott/osf.io,binoculars/osf.io,doublebits/osf.io,acshi/osf.io,HalcyonChimera/osf.io,monikagrabowska/osf.io,CenterForOpenScience/osf.io,GageGaskins/osf.io,asanfilippo7/osf.io,mluke93/osf.io,samchrisinger/osf.io,chennan47/osf.io,aaxelb/osf.io,adlius/osf.io,saradbowman/osf.io,KAsante95/osf.io,doublebits/osf.io,erinspace/osf.io,laurenrevere/osf.io,rdhyee/osf.io,HalcyonChimera/osf.io,caneruguz/osf.io,icereval/osf.io,leb2dg/osf.io,samchrisinger/osf.io,Nesiehr/osf.io,samchrisinger/osf.io,kch8qx/osf.io,cwisecarver/osf.io,sloria/osf.io,mfraezz/osf.io,TomHeatwole/osf.io | ---
+++
@@ -13,7 +13,7 @@
def main():
- init_app(routes=False, set_backends=True, mfr=False)
+ init_app(routes=False, set_backends=True)
update_comments_viewed_timestamp()
update_comments()
|
b4712e75655108d396e5a4ee9b274b34c338e5b9 | api/models.py | api/models.py | from django.db import models
from django.utils.timezone import now
class Reading(models.Model):
# Authenticating on user
owner = models.ForeignKey('auth.User', related_name='api',
default='')
# When the row gets made
created = models.DateTimeField(auto_now_add=True)
# Data on sensor readings
# TODO how do you programmatically set these fields?
pm10 = models.IntegerField()
pm10_reading = models.IntegerField()
pm25 = models.IntegerField()
pm25_reading = models.IntegerField()
class Meta:
ordering = ('-created',)
| from django.db import models
from django.utils.timezone import now
class Reading(models.Model):
# Authenticating on user
owner = models.ForeignKey('auth.User', related_name='readings',
default='')
# When the row gets made
created = models.DateTimeField(auto_now_add=True)
createdHour = models.DateTimeField(default = datetime.datetime.now, blank=True)
# Data on sensor readings
# TODO how do you programmatically set these fields?
pm10 = models.IntegerField(default = 0)
pm25 = models.IntegerField(default = 0)
pm10count = models.IntegerField(default = 0)
pm25count = models.IntegerField(default = 0)
class Meta:
ordering = ('-created',)
def save(self, *args, **kwargs):
super(Reading, self).save(*args, **kwargs) | Include createHour field that will streamline groupBy operations in the db | Include createHour field that will streamline groupBy operations in the db
| Python | bsd-3-clause | codefornigeria/dustduino-server,developmentseed/dustduino-server,codefornigeria/dustduino-server,codefornigeria/dustduino-server,developmentseed/dustduino-server,developmentseed/dustduino-server | ---
+++
@@ -3,18 +3,22 @@
class Reading(models.Model):
# Authenticating on user
- owner = models.ForeignKey('auth.User', related_name='api',
+ owner = models.ForeignKey('auth.User', related_name='readings',
default='')
# When the row gets made
created = models.DateTimeField(auto_now_add=True)
+ createdHour = models.DateTimeField(default = datetime.datetime.now, blank=True)
# Data on sensor readings
# TODO how do you programmatically set these fields?
- pm10 = models.IntegerField()
- pm10_reading = models.IntegerField()
- pm25 = models.IntegerField()
- pm25_reading = models.IntegerField()
+ pm10 = models.IntegerField(default = 0)
+ pm25 = models.IntegerField(default = 0)
+ pm10count = models.IntegerField(default = 0)
+ pm25count = models.IntegerField(default = 0)
class Meta:
ordering = ('-created',)
+
+ def save(self, *args, **kwargs):
+ super(Reading, self).save(*args, **kwargs) |
811a94b477abae045fb8b840c33481ed8b1d8266 | app/upload.py | app/upload.py | #!/usr/bin/env python
import tornado.web
import os
import uuid
class UploadHandler(tornado.web.RequestHandler):
def post(self):
fileinfo = self.request.files['filearg'][0]
print 'hi'
print "fileinfo is", fileinfo.keys()
fname = fileinfo['filename']
extn = os.path.splitext(fname)[-1]
cname = str(uuid.uuid4()) + extn
fh = open('uploads/' + cname, 'w')
fh.write(fileinfo['body'])
self.finish(cname + " is uploaded!! Check uploads folder")
def get(self):
self.render("upload.html") | #!/usr/bin/env python
import tornado.web
import os
import uuid
class UploadHandler(tornado.web.RequestHandler):
def post(self):
fileinfo = self.request.files['filearg'][0]
print 'hi'
print "fileinfo is", fileinfo.keys()
fname = fileinfo['filename']
extn = os.path.splitext(fname)[-1]
cname = str(uuid.uuid4()) + extn
if not os.exists(os.path.join(os.getcwd(), '/uploads')):
os.mkdirs(os.path.join(os.getcwd(), '/uploads')):
fh = open('uploads/' + cname, 'w')
fh.write(fileinfo['body'])
self.finish(cname + " is uploaded!! Check uploads folder")
def get(self):
self.render("upload.html") | Create folder if it doesn't exist | Create folder if it doesn't exist
| Python | mit | santosfamilyfoundation/SantosCloud,santosfamilyfoundation/TrafficCloud,santosfamilyfoundation/TrafficCloud,santosfamilyfoundation/SantosCloud,santosfamilyfoundation/TrafficCloud,santosfamilyfoundation/SantosCloud,santosfamilyfoundation/SantosCloud | ---
+++
@@ -12,6 +12,8 @@
fname = fileinfo['filename']
extn = os.path.splitext(fname)[-1]
cname = str(uuid.uuid4()) + extn
+ if not os.exists(os.path.join(os.getcwd(), '/uploads')):
+ os.mkdirs(os.path.join(os.getcwd(), '/uploads')):
fh = open('uploads/' + cname, 'w')
fh.write(fileinfo['body'])
self.finish(cname + " is uploaded!! Check uploads folder") |
7698d7256f7a88b02b3dd02b411532cb4a6a46aa | cmi/modify_uri.py | cmi/modify_uri.py | #! /usr/bin/env python
#
# Replaces the extension ".la" with ".so" in the library URI in all
# ".cca" files in %{buildroot}%{_datadir}/cca.
#
# Mark Piper (mark.piper@colorado.edu)
import os
import sys
import glob
from subprocess import check_call
try:
install_share_dir = sys.argv[1]
cca_dir = os.path.join(install_share_dir, "cca")
print("Modifying *.cca files in " + cca_dir)
for f in glob.glob(os.path.join(cca_dir, "*.cca")):
check_call(["sed", "--in-place", "s/\.la/.so/", f])
except:
print("Error in post-install modification of *.cca files.")
sys.exit(1)
| #! /usr/bin/env python
#
# Replaces the extension ".la" with ".so" in the library URI in all
# ".cca" files in %{buildroot}%{_datadir}/cca.
#
# Mark Piper (mark.piper@colorado.edu)
import os
import sys
import glob
from subprocess import check_call
try:
install_share_dir = sys.argv[1]
cca_dir = os.path.join(install_share_dir, "cca")
print("Modifying *.cca files in " + cca_dir)
for f in glob.glob(os.path.join(cca_dir, "*.cca")):
check_call(["sed", "--in-place", "s/\.la/.so/", f])
except:
print("Error in modification of *.cca files.")
sys.exit(1)
| Change wording in error message | Change wording in error message
| Python | mit | csdms/rpm_tools,csdms/rpm_tools | ---
+++
@@ -17,5 +17,5 @@
for f in glob.glob(os.path.join(cca_dir, "*.cca")):
check_call(["sed", "--in-place", "s/\.la/.so/", f])
except:
- print("Error in post-install modification of *.cca files.")
+ print("Error in modification of *.cca files.")
sys.exit(1) |
fd09975379338d47ec1bea8709c4a3c803aaa40d | slackbot.py | slackbot.py | #! /usr/bin/env python2.7
import requests
class Slackbot(object):
def __init__(self, slack_name, token):
self.slack_name = slack_name
self.token = token
assert self.token, "Token should not be blank"
self.url = self.sb_url()
def sb_url(self):
url = "https://{}.slack.com/".format(self.slack_name)
url += "services/hooks/slackbot"
return url
def say(self, channel, statement):
"""
channel should not be preceded with '#'
"""
assert channel # not blank
if channel[0] == '#':
channel = channel[1:]
nurl = self.url + "?token={}&channel=%23{}".format(self.token, channel)
p = requests.post(nurl, statement)
return p.status_code
| #! /usr/bin/env python2.7
import requests
class Slackbot(object):
def __init__(self, slack_name, token):
self.slack_name = slack_name
self.token = token
assert self.token, "Token should not be blank"
self.url = self.sb_url()
def sb_url(self):
url = "https://{}.slack.com/".format(self.slack_name)
url += "services/hooks/slackbot"
return url
def say(self, channel, statement):
"""
channel should not be preceded with '#'
"""
assert channel # not blank
if channel[0] == '#':
channel = channel[1:]
nurl = self.url + "?token={}&channel=%23{}".format(self.token, channel)
p = requests.post(nurl, data=statement.encode('utf-8'))
return p.status_code
| Fix unicode encoding of Slack message posts | Fix unicode encoding of Slack message posts
| Python | apache-2.0 | rossrader/destalinator | ---
+++
@@ -24,5 +24,5 @@
if channel[0] == '#':
channel = channel[1:]
nurl = self.url + "?token={}&channel=%23{}".format(self.token, channel)
- p = requests.post(nurl, statement)
+ p = requests.post(nurl, data=statement.encode('utf-8'))
return p.status_code |
860629358dd7651b1f35a70f65dfabb1010daa77 | tests/QueryableListTests/tutils.py | tests/QueryableListTests/tutils.py |
def filterDictToStr(filterDict):
return ', '.join(['%s=%s' %(key, repr(value)) for key, value in filterDict.items()])
class DataObject(object):
def __init__(self, **kwargs):
for key, value in kwargs.items():
setattr(self, key, value)
def __str__(self):
return 'DataObject( %s )' %(', '.join(['%s=%s' %(key, repr(value)) for key, value in self.__dict__.items()]))
__repr__ = __str__
|
def filterDictToStr(filterDict):
return ', '.join(['%s=%s' %(key, repr(value)) for key, value in filterDict.items()])
class DataObject(object):
def __init__(self, **kwargs):
for key, value in kwargs.items():
setattr(self, key, value)
def __str__(self):
return 'DataObject( %s )' %(', '.join(['%s=%s' %(key, repr(value)) for key, value in self.__dict__.items()]))
__repr__ = __str__
class hashableDict(dict):
'''
A dict that is hashable.
'''
def __hash__(self):
KEYVAL_SEP = '~~..~~'
NONEVAL='~~__NONE$$zz'
PAIRS_SEP='88ascvjikZZ'
hashableStr = []
keys = list(self.keys())
keys.sort()
for key in keys:
value = self[key]
if value is None:
value = NONEVAL
else:
value = str(value)
hashableStr.append(key + KEYVAL_SEP + value)
hashableStr = PAIRS_SEP.join(hashableStr)
ret = hash(hashableStr)
return ret
| Add a hashable-dict type for testing | Add a hashable-dict type for testing
| Python | lgpl-2.1 | kata198/QueryableList,kata198/QueryableList | ---
+++
@@ -12,3 +12,36 @@
return 'DataObject( %s )' %(', '.join(['%s=%s' %(key, repr(value)) for key, value in self.__dict__.items()]))
__repr__ = __str__
+
+
+class hashableDict(dict):
+ '''
+ A dict that is hashable.
+ '''
+
+ def __hash__(self):
+ KEYVAL_SEP = '~~..~~'
+ NONEVAL='~~__NONE$$zz'
+
+ PAIRS_SEP='88ascvjikZZ'
+
+ hashableStr = []
+
+ keys = list(self.keys())
+ keys.sort()
+
+ for key in keys:
+ value = self[key]
+
+ if value is None:
+ value = NONEVAL
+ else:
+ value = str(value)
+
+ hashableStr.append(key + KEYVAL_SEP + value)
+
+ hashableStr = PAIRS_SEP.join(hashableStr)
+
+ ret = hash(hashableStr)
+ return ret
+ |
1eedac5229e5a9128c4fbc09f7d7b97a3859e9b9 | django_sse/views.py | django_sse/views.py | # -*- coding: utf-8 -*-
from django.views.generic import View
from django.views.decorators.csrf import csrf_exempt
from django.http import HttpResponse
try:
from django.http import StreamingHttpResponse as HttpResponse
except ImportError:
from django.http import HttpResponse
from django.utils.decorators import method_decorator
from sse import Sse
class BaseSseView(View):
"""
This is a base class for sse streaming.
"""
def get_last_id(self):
if "HTTP_LAST_EVENT_ID" in self.request.META:
return self.request.META['HTTP_LAST_EVENT_ID']
return None
def _iterator(self):
for subiterator in self.iterator():
for bufferitem in self.sse:
yield bufferitem
@method_decorator(csrf_exempt)
def dispatch(self, request, *args, **kwargs):
self.sse = Sse()
self.request = request
self.args = args
self.kwargs = kwargs
response = HttpResponse(self._iterator(), content_type="text/event-stream")
response['Cache-Control'] = 'no-cache'
response['Software'] = 'django-sse'
return response
def iterator(self):
"""
This is a source of stream.
Must be use sentence ``yield`` for flush
content fon sse object to the client.
Example:
def iterator(self):
counter = 0
while True:
self.sse.add_message('foo', 'bar')
self.sse.add_message('bar', 'foo')
yield
"""
raise NotImplementedError
| # -*- coding: utf-8 -*-
from django.views.generic import View
from django.views.decorators.csrf import csrf_exempt
try:
from django.http import StreamingHttpResponse as HttpResponse
except ImportError:
from django.http import HttpResponse
from django.utils.decorators import method_decorator
from sse import Sse
class BaseSseView(View):
"""
This is a base class for sse streaming.
"""
def get_last_id(self):
if "HTTP_LAST_EVENT_ID" in self.request.META:
return self.request.META['HTTP_LAST_EVENT_ID']
return None
def _iterator(self):
for subiterator in self.iterator():
for bufferitem in self.sse:
yield bufferitem
@method_decorator(csrf_exempt)
def dispatch(self, request, *args, **kwargs):
self.sse = Sse()
self.request = request
self.args = args
self.kwargs = kwargs
response = HttpResponse(self._iterator(), content_type="text/event-stream")
response['Cache-Control'] = 'no-cache'
response['Software'] = 'django-sse'
return response
def iterator(self):
"""
This is a source of stream.
Must be use sentence ``yield`` for flush
content fon sse object to the client.
Example:
def iterator(self):
counter = 0
while True:
self.sse.add_message('foo', 'bar')
self.sse.add_message('bar', 'foo')
yield
"""
raise NotImplementedError
| Remove duplicate import. (Thanks to MechanisM) | Remove duplicate import. (Thanks to MechanisM)
| Python | bsd-3-clause | chadmiller/django-sse,niwinz/django-sse,chadmiller/django-sse | ---
+++
@@ -2,7 +2,6 @@
from django.views.generic import View
from django.views.decorators.csrf import csrf_exempt
-from django.http import HttpResponse
try:
from django.http import StreamingHttpResponse as HttpResponse |
da9a8811669daba4b89ceb0bec30787ff5efe8d0 | rum/rum.py | rum/rum.py | import threading
from flask import Flask
from users import users
api = Flask(__name__)
lock = threading.Lock()
user_num = 0
@api.route('/')
def index():
return 'Rackspace User Management'
@api.route('/user')
def get_user():
global user_num
with lock:
if user_num < len(users):
bash = "export OS_REGION_NAME=IAD\n"
bash += "export OS_USERNAME={}\n".format(users[user_num].username)
bash += "export OS_API_KEY={}\n".format(users[user_num].api_key)
user_num += 1
else:
bash = "No More Creds\n"
return bash
@api.route('/reset')
def reset_users():
global user_num
with lock:
user_num = 0
return "More Creds\n"
if __name__ == '__main__':
api.run(debug=True)
| import threading
from flask import Flask
from users import users
api = Flask(__name__)
lock = threading.Lock()
user_num = 0
@api.route('/')
def index():
return 'Rackspace User Management'
@api.route('/user')
def get_user():
global user_num
with lock:
if user_num < len(users):
bash = "export OS_REGION_NAME=ORD\n"
bash += "export OS_USERNAME={}\n".format(users[user_num].username)
bash += "export OS_API_KEY={}\n".format(users[user_num].api_key)
bash += "export MACHINE_NAME=machine{0:02d}\n".format(user_num)
user_num += 1
else:
bash = "No More Creds\n"
return bash
@api.route('/reset')
def reset_users():
global user_num
with lock:
user_num = 0
return "More Creds\n"
if __name__ == '__main__':
api.run(debug=True)
| Switch to ORD. Add MACHINE_NAME. | Switch to ORD. Add MACHINE_NAME.
| Python | mit | everett-toews/rackspace-user-management | ---
+++
@@ -19,9 +19,10 @@
with lock:
if user_num < len(users):
- bash = "export OS_REGION_NAME=IAD\n"
+ bash = "export OS_REGION_NAME=ORD\n"
bash += "export OS_USERNAME={}\n".format(users[user_num].username)
bash += "export OS_API_KEY={}\n".format(users[user_num].api_key)
+ bash += "export MACHINE_NAME=machine{0:02d}\n".format(user_num)
user_num += 1
else:
bash = "No More Creds\n" |
63c72bab549ae2c5aaa6370aebe10cce1e14effe | sorted_nearest/__init__.py | sorted_nearest/__init__.py | from sorted_nearest.src.sorted_nearest import (nearest_previous_nonoverlapping,
nearest_next_nonoverlapping,
nearest_nonoverlapping,
find_clusters)
| from sorted_nearest.src.sorted_nearest import (nearest_previous_nonoverlapping,
nearest_next_nonoverlapping,
nearest_nonoverlapping,
find_clusters)
from sorted_nearest.version import __version__
| Add version flag to sorted nearest | Add version flag to sorted nearest
| Python | bsd-3-clause | pyranges/sorted_nearest,pyranges/sorted_nearest,pyranges/sorted_nearest | ---
+++
@@ -2,3 +2,6 @@
nearest_next_nonoverlapping,
nearest_nonoverlapping,
find_clusters)
+
+
+from sorted_nearest.version import __version__ |
8c5386209fb859a30ea160fd5a1ac1303b9574ea | batch_related.py | batch_related.py | #!/usr/bin/env python
import datetime
import logging
import api
def get_now_str():
format = '%d.%h-%H:%M:%S'
now = datetime.datetime.now()
now_str = datetime.datetime.strftime(now, format)
return now_str
if __name__ == '__main__':
collections = api.SEARCHABLE_COLLECTIONS
api.logger.setLevel(logging.INFO)
db = api.data_db
for collection in collections:
#if collection != 'movies':
# continue
started = datetime.datetime.now()
count = db[collection].count()
print 'Starting to work on {} at {}'.format(collection, get_now_str())
print 'Collection {} has {} documents.'.format(collection, count)
for doc in db[collection].find({}, snapshot=True):
key = '{}.{}'.format(collection, doc['_id'])
related = api.get_bhp_related(doc)
if not related:
print 'No related items found for {}'.format(key)
doc['related'] = []
db[collection].save(doc)
continue
else:
doc['related'] = related
db[collection].save(doc)
finished = datetime.datetime.now()
per_doc_time = (finished - started).total_seconds()/count
print '''Finished working on {} at {}.
Related took {:.2f} seconds per document.'''.format(
collection, get_now_str(), per_doc_time)
| #!/usr/bin/env python
import datetime
import logging
import api
def get_now_str():
format = '%d.%h-%H:%M:%S'
now = datetime.datetime.now()
now_str = datetime.datetime.strftime(now, format)
return now_str
if __name__ == '__main__':
collections = api.SEARCHABLE_COLLECTIONS
api.logger.setLevel(logging.INFO)
db = api.data_db
for collection in collections:
started = datetime.datetime.now()
count = db[collection].count()
print 'Starting to work on {} at {}'.format(collection, get_now_str())
print 'Collection {} has {} documents.'.format(collection, count)
for doc in db[collection].find({}, modifiers={"$snapshot": "true"}):
key = '{}.{}'.format(collection, doc['_id'])
related = api.get_bhp_related(doc)
if not related:
print 'No related items found for {}'.format(key)
doc['related'] = []
db[collection].save(doc)
continue
else:
doc['related'] = related
db[collection].save(doc)
finished = datetime.datetime.now()
per_doc_time = (finished - started).total_seconds()/count
print '''Finished working on {} at {}.
Related took {:.2f} seconds per document.'''.format(
collection, get_now_str(), per_doc_time)
| Use the updated pymongo syntax for mongo snapshot cursor | Use the updated pymongo syntax for mongo snapshot cursor
| Python | agpl-3.0 | Beit-Hatfutsot/dbs-back,Beit-Hatfutsot/dbs-back,Beit-Hatfutsot/dbs-back,Beit-Hatfutsot/dbs-back | ---
+++
@@ -18,13 +18,11 @@
api.logger.setLevel(logging.INFO)
db = api.data_db
for collection in collections:
- #if collection != 'movies':
- # continue
started = datetime.datetime.now()
count = db[collection].count()
print 'Starting to work on {} at {}'.format(collection, get_now_str())
print 'Collection {} has {} documents.'.format(collection, count)
- for doc in db[collection].find({}, snapshot=True):
+ for doc in db[collection].find({}, modifiers={"$snapshot": "true"}):
key = '{}.{}'.format(collection, doc['_id'])
related = api.get_bhp_related(doc)
if not related: |
30b4003b22ab12bcc83013c63903dad7e36a5374 | webserver/codemanagement/urls.py | webserver/codemanagement/urls.py | from django.conf.urls.defaults import patterns, url, include
from piston.resource import Resource
from .views import (CreateRepoView, UpdatePasswordView,
ListSubmissionView, SubmitView)
from .api_handlers import RepoAuthHandler, RepoPathHandler, RepoTagListHandler
urlpatterns = patterns(
"",
url(r'^api/repo/auth/', Resource(handler=RepoAuthHandler)),
url(r'^api/repo/path/', Resource(handler=RepoPathHandler)),
url(r'^api/repo/tags/', Resource(handler=RepoTagListHandler)),
url(r'^competition/(?P<comp_slug>[\w-]+)/create-repo/$',
CreateRepoView.as_view(),
name='create_repo'),
url(r'^competition/(?P<comp_slug>[\w-]+)/update-password/$',
UpdatePasswordView.as_view(),
name='update_repo_password'),
url(r'^competition/(?P<comp_slug>[\w-]+)/submissions/$',
ListSubmissionView.as_view(),
name='list_submissions'),
url(r'^competition/(?P<comp_slug>[\w-]+)/submit/(?P<sha>[a-f0-9]{40})/$',
SubmitView.as_view(),
name='list_submissions'),
url(r'^repo/', include('greta.repo_view_urls')),
)
| from django.conf.urls.defaults import patterns, url, include
from piston.resource import Resource
from .views import (CreateRepoView, UpdatePasswordView,
ListSubmissionView, SubmitView)
from .api_handlers import RepoAuthHandler, RepoPathHandler, RepoTagListHandler
urlpatterns = patterns(
"",
url(r'^api/repo/auth/', Resource(handler=RepoAuthHandler)),
url(r'^api/repo/path/', Resource(handler=RepoPathHandler)),
url(r'^api/repo/tags/', Resource(handler=RepoTagListHandler)),
url(r'^competition/(?P<comp_slug>[\w-]+)/create-repo/$',
CreateRepoView.as_view(),
name='create_repo'),
url(r'^competition/(?P<comp_slug>[\w-]+)/update-password/$',
UpdatePasswordView.as_view(),
name='update_repo_password'),
url(r'^competition/(?P<comp_slug>[\w-]+)/submissions/$',
ListSubmissionView.as_view(),
name='list_submissions'),
url(r'^competition/(?P<comp_slug>[\w-]+)/submit/(?P<sha>[a-f0-9]{40})/$',
SubmitView.as_view(),
name='submit'),
url(r'^repo/', include('greta.repo_view_urls')),
)
| Fix URL name for submission page | Fix URL name for submission page
| Python | bsd-3-clause | siggame/webserver,siggame/webserver,siggame/webserver | ---
+++
@@ -27,7 +27,7 @@
url(r'^competition/(?P<comp_slug>[\w-]+)/submit/(?P<sha>[a-f0-9]{40})/$',
SubmitView.as_view(),
- name='list_submissions'),
+ name='submit'),
url(r'^repo/', include('greta.repo_view_urls')),
) |
393b2b3cf7a62219a3567374108822eb941ffb69 | zou/app/services/base_service.py | zou/app/services/base_service.py | from sqlalchemy.exc import StatementError
from zou.app.utils import events
def get_instance(model, instance_id, exception):
"""
Get instance of any model from its ID and raise given exception if not
found.
"""
if instance_id is None:
raise exception()
try:
instance = model.get(instance_id)
except StatementError:
raise exception()
if instance is None:
raise exception()
return instance
def get_or_create_instance_by_name(model, **kwargs):
"""
Get instance of any model by name. If it doesn't exist it creates a new
instance of this model from positional arguments dict.
"""
instance = model.get_by(name=kwargs["name"])
if instance is None:
instance = model.create(**kwargs)
events.emit("%s:new" % model.__tablename__, {
"%s_id" % model.__tablename__: instance.id
})
return instance.serialize()
def get_model_map_from_array(models):
"""
Return a map matching based on given model list. The maps keys are the model
IDs and the values are the models. It's convenient to check find a model by
its ID.
"""
return {model["id"]: model for model in models}
| from sqlalchemy.exc import StatementError
from zou.app.utils import events
def get_instance(model, instance_id, exception):
"""
Get instance of any model from its ID and raise given exception if not
found.
"""
if instance_id is None:
raise exception()
try:
instance = model.get(instance_id)
except StatementError:
raise exception()
if instance is None:
raise exception()
return instance
def get_or_create_instance_by_name(model, **kwargs):
"""
Get instance of any model by name. If it doesn't exist it creates a new
instance of this model from positional arguments dict.
"""
instance = model.get_by(name=kwargs["name"])
if instance is None:
instance = model.create(**kwargs)
events.emit("%s:new" % model.__tablename__, {
"%s_id" % model.__tablename__: instance.id
})
return instance.serialize()
def get_model_map_from_array(models):
"""
Return a map matching based on given model list. The maps keys are the model
IDs and the values are the models. It's convenient to check find a model by
its ID.
"""
return {model["id"]: model for model in models}
| Fix wrong ascii character in comments of base service | Fix wrong ascii character in comments of base service
| Python | agpl-3.0 | cgwire/zou | ---
+++
@@ -38,7 +38,7 @@
def get_model_map_from_array(models):
"""
Return a map matching based on given model list. The maps keys are the model
- IDs and the values are the models. It's convenient to check find a model by
+ IDs and the values are the models. It's convenient to check find a model by
its ID.
"""
return {model["id"]: model for model in models} |
4ddfb4bfb9e1f6a94c5914296aa878d495929636 | nightreads/settings/heroku.py | nightreads/settings/heroku.py | import dj_database_url
from .common import *
SECRET_KEY = '4ln7qg*67amc&7-h^=^0%ml_s(w4y_fy4uybib%j(v(46-x0i2'
DEBUG = False
ALLOWED_HOSTS = ['*']
db_from_env = dj_database_url.config(conn_max_age=500)
DATABASES['default'].update(db_from_env)
# Simplified static file serving.
# https://warehouse.python.org/project/whitenoise/
STATICFILES_STORAGE = 'whitenoise.django.GzipManifestStaticFilesStorage'
# Honor the 'X-Forwarded-Proto' header for request.is_secure()
SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https')
| import dj_database_url
import os
from .common import *
SECRET_KEY = os.environ['SECRET_KEY']
DEBUG = False
ALLOWED_HOSTS = ['*']
db_from_env = dj_database_url.config(conn_max_age=500)
DATABASES['default'].update(db_from_env)
# Simplified static file serving.
# https://warehouse.python.org/project/whitenoise/
STATICFILES_STORAGE = 'whitenoise.django.GzipManifestStaticFilesStorage'
# Honor the 'X-Forwarded-Proto' header for request.is_secure()
SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https')
| Use `SECRET_KEY` from env in Heroku | Use `SECRET_KEY` from env in Heroku
| Python | mit | avinassh/nightreads,avinassh/nightreads | ---
+++
@@ -1,8 +1,9 @@
import dj_database_url
+import os
from .common import *
-SECRET_KEY = '4ln7qg*67amc&7-h^=^0%ml_s(w4y_fy4uybib%j(v(46-x0i2'
+SECRET_KEY = os.environ['SECRET_KEY']
DEBUG = False
ALLOWED_HOSTS = ['*']
|
44b709f57dfaa12f158caf32c2032f1455443298 | serializer.py | serializer.py | # -*- coding: utf-8 -*-
# This file is part of the pymfony package.
#
# (c) Alexandre Quercia <alquerci@email.com>
#
# For the full copyright and license information, please view the LICENSE
# file that was distributed with this source code.
from __future__ import absolute_import;
from pickle import dumps;
from pickle import loads;
try:
from base64 import encodebytes;
from base64 import decodebytes;
except Exception:
from base64 import encodestring as encodebytes;
from base64 import decodestring as decodebytes;
"""
"""
CHARSET = 'UTF-8';
def serialize(obj):
return encodebytes(dumps(obj)).decode(CHARSET).replace('\n', '');
def unserialize(s):
return loads(decodebytes(s.encode(CHARSET)));
| # -*- coding: utf-8 -*-
# This file is part of the pymfony package.
#
# (c) Alexandre Quercia <alquerci@email.com>
#
# For the full copyright and license information, please view the LICENSE
# file that was distributed with this source code.
from __future__ import absolute_import;
from pickle import dumps;
from pickle import loads;
try:
from base64 import encodebytes;
from base64 import decodebytes;
except Exception:
from base64 import encodestring as encodebytes;
from base64 import decodestring as decodebytes;
"""
"""
CHARSET = 'UTF-8';
PICKLE_PROTOCOL = 2;
def serialize(obj):
return encodebytes(dumps(obj, PICKLE_PROTOCOL)).decode(CHARSET).replace('\n', '');
def unserialize(s):
return loads(decodebytes(s.encode(CHARSET)));
| Use pickle protocole 2 to BC for Python2* | [System][Serializer] Use pickle protocole 2 to BC for Python2*
| Python | mit | pymfony/system | ---
+++
@@ -22,9 +22,10 @@
"""
CHARSET = 'UTF-8';
+PICKLE_PROTOCOL = 2;
def serialize(obj):
- return encodebytes(dumps(obj)).decode(CHARSET).replace('\n', '');
+ return encodebytes(dumps(obj, PICKLE_PROTOCOL)).decode(CHARSET).replace('\n', '');
def unserialize(s):
return loads(decodebytes(s.encode(CHARSET))); |
fa66f44cf9783e790a2758b255ad740e712dc667 | heufybot/output.py | heufybot/output.py | class OutputHandler(object):
def __init__(self, connection):
self.connection = connection
def cmdNICK(self, nick):
self.connection.sendMessage("NICK", nick)
def cmdUSER(self, ident, gecos):
# RFC2812 allows usermodes to be set, but this isn't implemented much in IRCds at all.
# Pass 0 for usermodes instead.
self.connection.sendMessage("USER", ident, "0", "*", ":{}".format(gecos))
def cmdQUIT(self, reason):
self.connection.sendMessage("QUIT", ":{}".format(reason))
| class OutputHandler(object):
def __init__(self, connection):
self.connection = connection
def cmdNICK(self, nick):
self.connection.sendMessage("NICK", nick)
def cmdQUIT(self, reason):
self.connection.sendMessage("QUIT", ":{}".format(reason))
def cmdUSER(self, ident, gecos):
# RFC2812 allows usermodes to be set, but this isn't implemented much in IRCds at all.
# Pass 0 for usermodes instead.
self.connection.sendMessage("USER", ident, "0", "*", ":{}".format(gecos))
| Put commands in alphabetical order for my own sanity | Put commands in alphabetical order for my own sanity
| Python | mit | Heufneutje/PyHeufyBot,Heufneutje/PyHeufyBot | ---
+++
@@ -5,10 +5,10 @@
def cmdNICK(self, nick):
self.connection.sendMessage("NICK", nick)
+ def cmdQUIT(self, reason):
+ self.connection.sendMessage("QUIT", ":{}".format(reason))
+
def cmdUSER(self, ident, gecos):
# RFC2812 allows usermodes to be set, but this isn't implemented much in IRCds at all.
# Pass 0 for usermodes instead.
self.connection.sendMessage("USER", ident, "0", "*", ":{}".format(gecos))
-
- def cmdQUIT(self, reason):
- self.connection.sendMessage("QUIT", ":{}".format(reason)) |
7106317db23165220754f1cf45e7a8d30a9a76db | dyfunconn/fc/cos.py | dyfunconn/fc/cos.py | #
"""
"""
from ..analytic_signal import analytic_signal
import numpy as np
def cos(data, fb=None, fs=None, pairs=None):
"""
"""
n_samples, n_rois = np.shape(data)
X = None
if fb is not None and fs is not None:
_, uphases, _ = analytic_signal(data, fb, fs)
X = uphases
else:
X = data
conn_mtx = np.zeros((n_rois, n_rois), dtype=np.float32)
for k in range(n_rois):
for l in range(k + 1, n_rois):
val = np.sum(np.cos(X[k, :] - X[l, : ])) / np.float32(n_samples)
val = np.abs(val)
conn_mtx[k, l] = val
return conn_mtx
| #
"""
"""
from ..analytic_signal import analytic_signal
import numpy as np
def cos(data, fb=None, fs=None, pairs=None):
"""
"""
n_rois, n_samples = np.shape(data)
X = None
if fb is not None and fs is not None:
_, uphases, _ = analytic_signal(data, fb, fs)
X = uphases
else:
X = data
conn_mtx = np.zeros((n_rois, n_rois), dtype=np.float32)
for k in range(n_rois):
for l in range(k + 1, n_rois):
val = np.sum(np.cos(X[k, :] - X[l, :])) / np.float32(n_samples)
val = np.abs(val)
conn_mtx[k, l] = val
return conn_mtx
| Change the order of shape. | Change the order of shape.
| Python | bsd-3-clause | makism/dyfunconn | ---
+++
@@ -13,7 +13,7 @@
"""
"""
- n_samples, n_rois = np.shape(data)
+ n_rois, n_samples = np.shape(data)
X = None
if fb is not None and fs is not None:
@@ -25,7 +25,7 @@
conn_mtx = np.zeros((n_rois, n_rois), dtype=np.float32)
for k in range(n_rois):
for l in range(k + 1, n_rois):
- val = np.sum(np.cos(X[k, :] - X[l, : ])) / np.float32(n_samples)
+ val = np.sum(np.cos(X[k, :] - X[l, :])) / np.float32(n_samples)
val = np.abs(val)
conn_mtx[k, l] = val |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.