commit stringlengths 40 40 | old_file stringlengths 4 150 | new_file stringlengths 4 150 | old_contents stringlengths 0 3.26k | new_contents stringlengths 1 4.43k | subject stringlengths 15 501 | message stringlengths 15 4.06k | lang stringclasses 4 values | license stringclasses 13 values | repos stringlengths 5 91.5k | diff stringlengths 0 4.35k |
|---|---|---|---|---|---|---|---|---|---|---|
1eb9830485ec82713d3e8d4d1e13ea1fdc1733c6 | airtable.py | airtable.py | import requests, json
import pandas as pd
class AT:
def __init__(self, base, api):
self.base = base
self.api = api
self.headers = {"Authorization": "Bearer "+self.api}
def getTable(self,table):
r = requests.get("https://api.airtable.com/v0/"+self.base+"/"+table, headers=self.headers)
j = r.json()
df = pd.DataFrame(dict(zip([i["id"] for i in j["records"]], [i["fields"] for i in j["records"]]))).transpose()
return df
def pushToTable(self, table, obj, typecast=False):
h = self.headers
h["Content-type"] = "application/json"
r = requests.post("https://api.airtable.com/v0/"+self.base+"/"+table, headers=h, data=json.dumps({"fields": obj, "typecast": typecast}))
return r.json() | import requests, json
import pandas as pd
class AT:
def __init__(self, base, api):
self.base = base
self.api = api
self.headers = {"Authorization": "Bearer "+self.api}
def getTable(self,table):
r = requests.get("https://api.airtable.com/v0/"+self.base+"/"+table, headers=self.headers)
j = r.json()
df = pd.DataFrame(dict(zip([i["id"] for i in j["records"]], [i["fields"] for i in j["records"]]))).transpose()
return df
def pushToTable(self, table, obj, typecast=False):
h = self.headers
h["Content-type"] = "application/json"
r = requests.post("https://api.airtable.com/v0/"+self.base+"/"+table, headers=h, data=json.dumps({"fields": obj, "typecast": typecast}))
if r.status_code == requests.codes.ok:
return r.json()
else:
print(r.json)
return False | Add quick error messaging for easier debugging | Add quick error messaging for easier debugging
| Python | mit | MeetMangrove/location-bot | ---
+++
@@ -18,4 +18,8 @@
h = self.headers
h["Content-type"] = "application/json"
r = requests.post("https://api.airtable.com/v0/"+self.base+"/"+table, headers=h, data=json.dumps({"fields": obj, "typecast": typecast}))
- return r.json()
+ if r.status_code == requests.codes.ok:
+ return r.json()
+ else:
+ print(r.json)
+ return False |
378b5679f9ca3b814eb0a2a89e9f8045ae4bc4c1 | FunctionHandler.py | FunctionHandler.py | import os, sys
from glob import glob
import GlobalVars
def LoadFunction(path, loadAs=''):
loadType = 'l'
name = path
src = __import__('Functions.' + name, globals(), locals(), [])
if loadAs != '':
name = loadAs
if name in GlobalVars.functions:
loadType = 'rel'
del sys.modules['Functions.'+name]
for f in glob ('Functions/%s.pyc' % name):
os.remove(f)
reload(src)
components = name.split('.')
for comp in components[:1]:
src = getattr(src, comp)
print str(src)
func = src.Instantiate()
GlobalVars.functions.update({name:func})
return loadType
def UnloadFunction(name):
success = True
if name in GlobalVars.functions.keys():
del GlobalVars.functions[name]
else:
success = False
return success
def AutoLoadFunctions():
root = os.path.join('.', 'Functions')
for item in os.listdir(root):
if not os.path.isfile(os.path.join(root, item)):
continue
if not item.endswith('.py'):
continue
try:
if item[:-3] not in GlobalVars.nonDefaultModules:
LoadFunction(item[:-3])
except Exception, x:
print x.args
| import os, sys
from glob import glob
import GlobalVars
def LoadFunction(path, loadAs=''):
loadType = 'l'
name = path
src = __import__('Functions.' + name, globals(), locals(), [])
if loadAs != '':
name = loadAs
if name in GlobalVars.functions:
loadType = 'rel'
del sys.modules['Functions.'+name]
for f in glob ('Functions/%s.pyc' % name):
os.remove(f)
reload(src)
components = name.split('.')
for comp in components[:1]:
src = getattr(src, comp)
ModuleName = str(src).split("from")[0].strip("(").rstrip(" ")
if loadType != 'rel':
print ModuleName + " loaded."
func = src.Instantiate()
GlobalVars.functions.update({name:func})
return loadType
def UnloadFunction(name):
success = True
if name in GlobalVars.functions.keys():
del GlobalVars.functions[name]
else:
success = False
return success
def AutoLoadFunctions():
root = os.path.join('.', 'Functions')
for item in os.listdir(root):
if not os.path.isfile(os.path.join(root, item)):
continue
if not item.endswith('.py'):
continue
try:
if item[:-3] not in GlobalVars.nonDefaultModules:
LoadFunction(item[:-3])
except Exception, x:
print x.args
| Clean up module loading printing | Clean up module loading printing
| Python | mit | HubbeKing/Hubbot_Twisted | ---
+++
@@ -20,7 +20,9 @@
for comp in components[:1]:
src = getattr(src, comp)
- print str(src)
+ ModuleName = str(src).split("from")[0].strip("(").rstrip(" ")
+ if loadType != 'rel':
+ print ModuleName + " loaded."
func = src.Instantiate()
|
47d69320261a3126637229c9deaf02ba425998af | members/models.py | members/models.py | from django.db import models
from django.contrib.auth.models import AbstractUser
class User(AbstractUser):
faculty_number = models.CharField(max_length=8)
def __unicode__(self):
return unicode(self.username)
def attended_meetings(self):
return self.protocols.all()
| from django.db import models
from django.contrib.auth.models import AbstractUser
class User(AbstractUser):
faculty_number = models.CharField(max_length=8)
def __unicode__(self):
return unicode(self.username)
def attended_meetings(self):
return self.meetings_attend.all()
def absent_meetings(self):
return self.meetings_absent.all()
| Add attended_meetings and absent_meetings methos to User class | Add attended_meetings and absent_meetings methos to User class
| Python | mit | Hackfmi/Diaphanum,Hackfmi/Diaphanum | ---
+++
@@ -9,4 +9,7 @@
return unicode(self.username)
def attended_meetings(self):
- return self.protocols.all()
+ return self.meetings_attend.all()
+
+ def absent_meetings(self):
+ return self.meetings_absent.all() |
b261eb0b2180ebc07ace6c1abad4ec68d6c17840 | app/__init__.py | app/__init__.py | from __future__ import absolute_import
from __future__ import unicode_literals
# Import flask and template operators
from flask import Flask, request, render_template
# Define the WSGI application object
app = Flask(__name__)
# Configurations
app.config.from_object('config.default')
# Configure webhooks
from .webhooks import WebHooks
webhooks = WebHooks(app)
from .gitlab import GitlabWebHook
webhooks.add_handler('gitlab', GitlabWebHook)
# Configure logging
import logging
from logging.handlers import TimedRotatingFileHandler
from logging import Formatter
# Configure the application log
if app.config.get('APPLICATION_LOG', None):
application_log_handler = TimedRotatingFileHandler(app.config.get('APPLICATION_LOG'), 'd', 7)
application_log_handler.setLevel(logging.INFO)
application_log_handler.setFormatter(Formatter(
'%(asctime)s %(levelname)s: %(message)s '
'[in %(pathname)s:%(lineno)d]'
))
app.logger.addHandler(application_log_handler)
# Sample HTTP error handling
@app.errorhandler(404)
def not_found(error):
return render_template('404.html'), 404
# Import modules
from app.public import views as public_views
from app.hooks import views as hook_views
| from __future__ import absolute_import
from __future__ import unicode_literals
# Import flask and template operators
from flask import Flask, request, render_template
# Define the WSGI application object
app = Flask(__name__)
# Configurations
app.config.from_object('config.default')
# Configure webhooks
from .webhooks import WebHooks
webhooks = WebHooks(app)
from .gitlab import GitlabWebHook
webhooks.add_handler('gitlab', GitlabWebHook)
# Configure Slacker
from slacker import Slacker
slack = Slacker(app.config.get('SLACK_TOKEN'))
# Configure logging
import logging
from logging.handlers import TimedRotatingFileHandler
from logging import Formatter
# Configure the application log
if app.config.get('APPLICATION_LOG', None):
application_log_handler = TimedRotatingFileHandler(app.config.get('APPLICATION_LOG'), 'd', 7)
application_log_handler.setLevel(logging.INFO)
application_log_handler.setFormatter(Formatter(
'%(asctime)s %(levelname)s: %(message)s '
'[in %(pathname)s:%(lineno)d]'
))
app.logger.addHandler(application_log_handler)
# Sample HTTP error handling
@app.errorhandler(404)
def not_found(error):
return render_template('404.html'), 404
# Import modules
from app.public import views as public_views
from app.hooks import views as hook_views
| Configure slacker on app initialization | Configure slacker on app initialization
| Python | apache-2.0 | pipex/gitbot,pipex/gitbot,pipex/gitbot | ---
+++
@@ -16,6 +16,10 @@
from .gitlab import GitlabWebHook
webhooks.add_handler('gitlab', GitlabWebHook)
+
+# Configure Slacker
+from slacker import Slacker
+slack = Slacker(app.config.get('SLACK_TOKEN'))
# Configure logging
import logging |
2917e089734ace4fd212ef9a16e8adf71d671312 | test/partial_double_test.py | test/partial_double_test.py | from doubles import allow, teardown
class User(object):
def __init__(self, name):
self.name = name
def get_name(self):
return self.name
class TestPartialDouble(object):
def test_stubs_real_object(self):
user = User('Alice')
allow(user).to_receive('get_name').and_return('Bob')
assert user.get_name() == 'Bob'
def test_restores_original(self):
user = User('Alice')
allow(user).to_receive('get_name').and_return('Bob')
teardown()
assert user.get_name() == 'Alice'
| from doubles import allow, teardown
class User(object):
def __init__(self, name, age):
self.name = name
self._age = age
@property
def age(self):
return self._age
def get_name(self):
return self.name
class TestPartialDouble(object):
def test_stubs_real_object(self):
user = User('Alice', 25)
allow(user).to_receive('get_name').and_return('Bob')
assert user.get_name() == 'Bob'
def test_restores_original(self):
user = User('Alice', 25)
allow(user).to_receive('get_name').and_return('Bob')
teardown()
assert user.get_name() == 'Alice'
def test_only_affects_stubbed_method(self):
user = User('Alice', 25)
allow(user).to_receive('get_name').and_return('Bob')
assert user.age == 25
| Test that only stubbed methods are altered on partial doubles. | Test that only stubbed methods are altered on partial doubles.
| Python | mit | uber/doubles | ---
+++
@@ -2,8 +2,13 @@
class User(object):
- def __init__(self, name):
+ def __init__(self, name, age):
self.name = name
+ self._age = age
+
+ @property
+ def age(self):
+ return self._age
def get_name(self):
return self.name
@@ -11,16 +16,23 @@
class TestPartialDouble(object):
def test_stubs_real_object(self):
- user = User('Alice')
+ user = User('Alice', 25)
allow(user).to_receive('get_name').and_return('Bob')
assert user.get_name() == 'Bob'
def test_restores_original(self):
- user = User('Alice')
+ user = User('Alice', 25)
allow(user).to_receive('get_name').and_return('Bob')
teardown()
assert user.get_name() == 'Alice'
+
+ def test_only_affects_stubbed_method(self):
+ user = User('Alice', 25)
+
+ allow(user).to_receive('get_name').and_return('Bob')
+
+ assert user.age == 25 |
efb7191428756f8ef0b85475d00297e2594eca4c | feincms/content/comments/models.py | feincms/content/comments/models.py | # ------------------------------------------------------------------------
# coding=utf-8
# ------------------------------------------------------------------------
#
# Created by Martin J. Laubach on 08.01.10.
#
# ------------------------------------------------------------------------
from django.db import models
from django.utils.safestring import mark_safe
from django.utils.translation import ugettext_lazy as _
from django.template.loader import render_to_string
from django.template import RequestContext
# ------------------------------------------------------------------------
class CommentsContent(models.Model):
comments_enabled = models.BooleanField(_('enabled'), default=True)
class Meta:
abstract = True
verbose_name = _('comments')
verbose_name_plural = _('comments')
def render(self, **kwargs):
parent_type = self.parent.__class__.__name__.lower()
request = kwargs.get('request')
return render_to_string([
'content/comments/%s.html' % parent_type,
'content/comments/default-site.html',
'content/comments/default.html',
], RequestContext(request, { 'content': self, 'parent': self.parent }))
# ------------------------------------------------------------------------
# ------------------------------------------------------------------------
# ------------------------------------------------------------------------
| # ------------------------------------------------------------------------
# coding=utf-8
# ------------------------------------------------------------------------
#
# Created by Martin J. Laubach on 08.01.10.
#
# ------------------------------------------------------------------------
from django.db import models
from django.utils.safestring import mark_safe
from django.utils.translation import ugettext_lazy as _
from django.template.loader import render_to_string
from django.template import RequestContext
# ------------------------------------------------------------------------
class CommentsContent(models.Model):
comments_enabled = models.BooleanField(_('enabled'), default=True)
class Meta:
abstract = True
verbose_name = _('comments')
verbose_name_plural = _('comments')
def render(self, **kwargs):
parent_type = self.parent.__class__.__name__.lower()
request = kwargs.get('request')
# TODO: Check for translation extension before use!
comment_page = self.parent.original_translation
f = None
if self.comments_enabled and request.POST:
extra = request._feincms_appcontent_parameters.get('page_extra_path', ())
if len(extra) > 0 and extra[0] == u"post-comment":
from django.contrib.comments.views.comments import post_comment
r = post_comment(request)
if not isinstance(r, HttpResponseRedirect):
f = comments.get_form()(comment_page, data=request.POST)
if f is None:
f = comments.get_form()(comment_page)
return render_to_string([
'content/comments/%s.html' % parent_type,
'content/comments/default-site.html',
'content/comments/default.html',
], RequestContext(request, { 'content': self, 'feincms_page' : self.parent, 'parent': comment_page, 'form' : f }))
# ------------------------------------------------------------------------
# ------------------------------------------------------------------------
# ------------------------------------------------------------------------
| Handle posting in comments content type so the "post comment" stays in the cms framework instead of showing an external page from django.contrib.comments. | Handle posting in comments content type so the "post comment" stays in the cms framework instead of showing an external page from django.contrib.comments. | Python | bsd-3-clause | joshuajonah/feincms,feincms/feincms,matthiask/django-content-editor,matthiask/django-content-editor,hgrimelid/feincms,michaelkuty/feincms,nickburlett/feincms,hgrimelid/feincms,mjl/feincms,matthiask/django-content-editor,joshuajonah/feincms,mjl/feincms,joshuajonah/feincms,michaelkuty/feincms,nickburlett/feincms,nickburlett/feincms,matthiask/django-content-editor,feincms/feincms,matthiask/feincms2-content,matthiask/feincms2-content,mjl/feincms,feincms/feincms,michaelkuty/feincms,nickburlett/feincms,pjdelport/feincms,michaelkuty/feincms,pjdelport/feincms,joshuajonah/feincms,hgrimelid/feincms,pjdelport/feincms,matthiask/feincms2-content | ---
+++
@@ -26,11 +26,26 @@
parent_type = self.parent.__class__.__name__.lower()
request = kwargs.get('request')
+ # TODO: Check for translation extension before use!
+ comment_page = self.parent.original_translation
+
+ f = None
+ if self.comments_enabled and request.POST:
+ extra = request._feincms_appcontent_parameters.get('page_extra_path', ())
+ if len(extra) > 0 and extra[0] == u"post-comment":
+ from django.contrib.comments.views.comments import post_comment
+ r = post_comment(request)
+ if not isinstance(r, HttpResponseRedirect):
+ f = comments.get_form()(comment_page, data=request.POST)
+
+ if f is None:
+ f = comments.get_form()(comment_page)
+
return render_to_string([
'content/comments/%s.html' % parent_type,
'content/comments/default-site.html',
'content/comments/default.html',
- ], RequestContext(request, { 'content': self, 'parent': self.parent }))
+ ], RequestContext(request, { 'content': self, 'feincms_page' : self.parent, 'parent': comment_page, 'form' : f }))
# ------------------------------------------------------------------------
|
12cca87c2c84db562361ee230dfc033c31f7e0d4 | webhooks/azuremonitor/setup.py | webhooks/azuremonitor/setup.py | from setuptools import setup, find_packages
version = '5.0.0'
setup(
name="alerta-azure-monitor",
version=version,
description='Alerta webhook for Azure Monitor',
url='https://github.com/alerta/alerta-contrib',
license='MIT',
author='Anton Delitsch',
author_email='anton@trugen.net',
packages=find_packages(),
py_modules=['alerta_azuremonitor'],
install_requires=[
],
include_package_data=True,
zip_safe=True,
entry_points={
'alerta.webhooks': [
'azuremonitor = alerta_azuremonitor:AzureMonitorWebhook'
]
}
)
| from setuptools import setup, find_packages
version = '5.0.1'
setup(
name="alerta-azure-monitor",
version=version,
description='Alerta webhook for Azure Monitor',
url='https://github.com/alerta/alerta-contrib',
license='MIT',
author='Anton Delitsch',
author_email='anton@trugen.net',
packages=find_packages(),
py_modules=['alerta_azuremonitor'],
install_requires=[
'python-dateutil'
],
include_package_data=True,
zip_safe=True,
entry_points={
'alerta.webhooks': [
'azuremonitor = alerta_azuremonitor:AzureMonitorWebhook'
]
}
)
| Add dependency to azure monitor | Add dependency to azure monitor
| Python | mit | alerta/alerta-contrib,alerta/alerta-contrib,alerta/alerta-contrib | ---
+++
@@ -1,6 +1,6 @@
from setuptools import setup, find_packages
-version = '5.0.0'
+version = '5.0.1'
setup(
name="alerta-azure-monitor",
@@ -13,6 +13,7 @@
packages=find_packages(),
py_modules=['alerta_azuremonitor'],
install_requires=[
+ 'python-dateutil'
],
include_package_data=True,
zip_safe=True, |
b37eb87e73f049b87dcd0bf3cd3ff9be1ffaff4b | scripts/run_tests.py | scripts/run_tests.py | #!/usr/bin/env python
import optparse
import sys
from os import path
from os.path import expanduser
import unittest
import argparse
# Simple stand-alone test runner
# - Runs independently of appengine runner
# - So we need to find the GAE library
# - Looks for tests as ./tests/test*.py
# - Use --skipbasics to skip the most basic tests and run only tests/test_graphs*.py
#
# see https://developers.google.com/appengine/docs/python/tools/localunittesting
#
# Alt: python -m unittest discover -s tests/ -p 'test_*.py' (problem as needs GAE files)
def main(sdk_path, test_path, args):
sys.path.insert(0, sdk_path)
import dev_appserver
dev_appserver.fix_sys_path()
print args, test_path
if vars(args)["skipbasics"]:
suite = unittest.loader.TestLoader().discover(test_path, pattern="test_graphs*.py")
else:
suite = unittest.loader.TestLoader().discover(test_path)
unittest.TextTestRunner(verbosity=2).run(suite)
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Configurable testing of schema.org.')
parser.add_argument('--skipbasics', action='store_true', help='Skip basic tests.')
args = parser.parse_args()
main(expanduser("~") + '/google-cloud-sdk/platform/google_appengine/', './tests/', args)
| #!/usr/bin/env python
import argparse
import optparse
from os import getenv, path
from os.path import expanduser
import sys
import unittest
# Simple stand-alone test runner
# - Runs independently of appengine runner
# - So we need to find the GAE library
# - Looks for tests as ./tests/test*.py
# - Use --skipbasics to skip the most basic tests and run only tests/test_graphs*.py
#
# see https://developers.google.com/appengine/docs/python/tools/localunittesting
#
# Alt: python -m unittest discover -s tests/ -p 'test_*.py' (problem as needs GAE files)
def main(sdk_path, test_path, args):
sys.path.insert(0, sdk_path) # add AppEngine SDK to path
import dev_appserver
dev_appserver.fix_sys_path()
print args, test_path
if vars(args)["skipbasics"]:
suite = unittest.loader.TestLoader().discover(test_path, pattern="test_graphs*.py")
else:
suite = unittest.loader.TestLoader().discover(test_path)
unittest.TextTestRunner(verbosity=2).run(suite)
if __name__ == '__main__':
sdk_path = getenv('APP_ENGINE',
expanduser("~") + '/google-cloud-sdk/platform/google_appengine/')
parser = argparse.ArgumentParser(description='Configurable testing of schema.org.')
parser.add_argument('--skipbasics', action='store_true', help='Skip basic tests.')
args = parser.parse_args()
main(sdk_path, './tests/', args)
| Check APP_ENGINE env var before using hard-coded path to Google AppEngine SDK. | Check APP_ENGINE env var before using hard-coded path to Google AppEngine SDK.
| Python | apache-2.0 | hschema/schemaorg,schemaorg/schemaorg,pwz3n0/schemaorg,gkellogg/schemaorg,vholland/schemaorg,URXtech/schemaorg,schemaorg/schemaorg,cesarmarinhorj/schemaorg,sdo-sport/schemaorg,hschema/schemaorg,tfrancart/schemaorg,ya7lelkom/schemaorg,ynh/schemaorg,sdo-sport/schemaorg,twamarc/schemaorg,haonature/schemaorg,schemaorg/schemaorg,haonature888/schemaorg,pwz3n0/schemaorg,ya7lelkom/schemaorg,lanthaler/schemaorg,unor/schemaorg,vholland/schemaorg,unor/schemaorg,twamarc/schemaorg,vholland/schemaorg,unor/schemaorg,dbs/schemaorg,twamarc/schemaorg,gkellogg/schemaorg,twcctz500000/schemaorg,ya7lelkom/schemaorg,lanthaler/schemaorg,tfrancart/schemaorg,gkellogg/schemaorg,cesarmarinhorj/schemaorg,vemmaverve/schemaorg,pwz3n0/schemaorg,quantegy/schemaorg,schemaorg/schemaorg,quantegy/schemaorg,haocafes/schemaorg,dbs/schemaorg,lanthaler/schemaorg,gkellogg/schemaorg,tfrancart/schemaorg,haonature/schemaorg,mortizbank/schemaorg,hschema/schemaorg,quantegy/schemaorg,URXtech/schemaorg,haonaturel/schemaorg,ynh/schemaorg,mortizbank/schemaorg,cesarmarinhorj/schemaorg,vemmaverve/schemaorg,haonature888/schemaorg,haocafes/schemaorg,mortizbank/schemaorg,ynh/schemaorg,tfrancart/schemaorg,schemaorg/schemaorg,dbs/schemaorg,sdo-sport/schemaorg,twcctz500000/schemaorg,vholland/schemaorg,dbs/schemaorg,haonaturel/schemaorg | ---
+++
@@ -1,11 +1,11 @@
#!/usr/bin/env python
+import argparse
import optparse
+from os import getenv, path
+from os.path import expanduser
import sys
-from os import path
-from os.path import expanduser
import unittest
-import argparse
# Simple stand-alone test runner
# - Runs independently of appengine runner
@@ -18,7 +18,7 @@
# Alt: python -m unittest discover -s tests/ -p 'test_*.py' (problem as needs GAE files)
def main(sdk_path, test_path, args):
- sys.path.insert(0, sdk_path)
+ sys.path.insert(0, sdk_path) # add AppEngine SDK to path
import dev_appserver
dev_appserver.fix_sys_path()
print args, test_path
@@ -30,10 +30,9 @@
if __name__ == '__main__':
-
-
+ sdk_path = getenv('APP_ENGINE',
+ expanduser("~") + '/google-cloud-sdk/platform/google_appengine/')
parser = argparse.ArgumentParser(description='Configurable testing of schema.org.')
-
parser.add_argument('--skipbasics', action='store_true', help='Skip basic tests.')
args = parser.parse_args()
- main(expanduser("~") + '/google-cloud-sdk/platform/google_appengine/', './tests/', args)
+ main(sdk_path, './tests/', args) |
f86c43e7f1d59aa2b6b8bf636c736cb36da877f9 | google/cloud/forseti/__init__.py | google/cloud/forseti/__init__.py | # Copyright 2017 The Forseti Security Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Google CLoud Forseti."""
__version__ = '2.10.0'
__package_name__ = 'forseti-security'
| # Copyright 2017 The Forseti Security Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Google CLoud Forseti."""
__version__ = '2.11.0'
__package_name__ = 'forseti-security'
| Change version number to 2.11.0. | Change version number to 2.11.0.
| Python | apache-2.0 | forseti-security/forseti-security,forseti-security/forseti-security,forseti-security/forseti-security,forseti-security/forseti-security | ---
+++
@@ -14,5 +14,5 @@
"""Google CLoud Forseti."""
-__version__ = '2.10.0'
+__version__ = '2.11.0'
__package_name__ = 'forseti-security' |
1a61ba3655e575cbf4d20190182654cb677bce9c | app/grandchallenge/serving/tasks.py | app/grandchallenge/serving/tasks.py | from celery import shared_task
from django.db.models import F
from grandchallenge.serving.models import Download
@shared_task
def create_download(*_, **kwargs):
try:
d = Download.objects.get(**kwargs)
d.count = F("count") + 1
d.save()
except Download.DoesNotExist:
Download.objects.create(**kwargs)
| from celery import shared_task
from django.db.models import F
from grandchallenge.serving.models import Download
@shared_task
def create_download(*_, **kwargs):
d, created = Download.objects.get_or_create(**kwargs)
if not created:
d.count = F("count") + 1
d.save()
| Fix race condition in create_download | Fix race condition in create_download
| Python | apache-2.0 | comic/comic-django,comic/comic-django,comic/comic-django,comic/comic-django,comic/comic-django | ---
+++
@@ -6,9 +6,8 @@
@shared_task
def create_download(*_, **kwargs):
- try:
- d = Download.objects.get(**kwargs)
+ d, created = Download.objects.get_or_create(**kwargs)
+
+ if not created:
d.count = F("count") + 1
d.save()
- except Download.DoesNotExist:
- Download.objects.create(**kwargs) |
094b16a8088c8e7f8012465984c53e87e6e61eac | prompt_toolkit/__init__.py | prompt_toolkit/__init__.py | """
prompt_toolkit
==============
Author: Jonathan Slenders
Description: prompt_toolkit is a Library for building powerful interactive
command lines in Python. It can be a replacement for GNU
readline, but it can be much more than that.
See the examples directory to learn about the usage.
Probably, to get started, you meight also want to have a look at
`prompt_toolkit.shortcuts.prompt`.
"""
from .application import Application
from .shortcuts import *
from .formatted_text import HTML, ANSI
# Don't forget to update in `docs/conf.py`!
__version__ = '2.0.0'
| """
prompt_toolkit
==============
Author: Jonathan Slenders
Description: prompt_toolkit is a Library for building powerful interactive
command lines in Python. It can be a replacement for GNU
readline, but it can be much more than that.
See the examples directory to learn about the usage.
Probably, to get started, you might also want to have a look at
`prompt_toolkit.shortcuts.prompt`.
"""
from .application import Application
from .shortcuts import *
from .formatted_text import HTML, ANSI
# Don't forget to update in `docs/conf.py`!
__version__ = '2.0.0'
| Fix typo: `meight` -> `might` | Fix typo: `meight` -> `might`
| Python | bsd-3-clause | jonathanslenders/python-prompt-toolkit | ---
+++
@@ -10,7 +10,7 @@
See the examples directory to learn about the usage.
-Probably, to get started, you meight also want to have a look at
+Probably, to get started, you might also want to have a look at
`prompt_toolkit.shortcuts.prompt`.
"""
from .application import Application |
8c10f7a3112ecece857ee9c3d20076377f7196a0 | upload.py | upload.py | import os
import re
import datetime
from trovebox import Trovebox
def main():
try:
client = Trovebox()
client.configure(api_version=2)
except IOError, e:
print
print '!! Could not initialize Trovebox connection.'
print '!! Check that ~/.config/trovebox/default exists and contains proper information.'
print
raise e
starttime = datetime.datetime.now()
for root, folders, files in os.walk(os.getcwd()):
folder_name = album = None
for filename in files:
if not re.match(r'^.+\.jpg$', filename, flags=re.IGNORECASE):
continue
if not folder_name:
folder_name = root.split('/')[-1]
album = client.album.create(folder_name)
print 'Entering folder %s' % root
print 'Uploading %s...' % filename
path = '%s/%s' % (root, filename)
client.photo.upload(path, albums=[album.id])
print datetime.datetime.now() - starttime
if __name__ == "__main__":
main()
| import os
import re
import datetime
from trovebox import Trovebox
from trovebox.errors import TroveboxError
def main():
try:
client = Trovebox()
client.configure(api_version=2)
except IOError, e:
print
print '!! Could not initialize Trovebox connection.'
print '!! Check that ~/.config/trovebox/default exists and contains proper information.'
print
raise e
starttime = datetime.datetime.now()
for root, folders, files in os.walk(os.getcwd()):
folder_name = album = None
for filename in files:
# Trovebox supports .jpg, .gif, and .png files
if not re.search(r'\.(jpg|jpeg|gif|png)$', filename, flags=re.IGNORECASE):
continue
if not folder_name:
folder_name = root.split('/')[-1]
try:
album = client.album.create(folder_name)
except TroveboxError, e:
print e.message
print 'Using full path as album name as fallback'
print 'Entering folder %s' % root
print 'Uploading %s...' % filename
path = '%s/%s' % (root, filename)
client.photo.upload(path, albums=[album.id])
print datetime.datetime.now() - starttime
if __name__ == "__main__":
main()
| Use full path as album name fallback. Support all Trovebox file types. | Use full path as album name fallback. Support all Trovebox file types.
| Python | mit | nip3o/trovebox-uploader | ---
+++
@@ -3,6 +3,7 @@
import datetime
from trovebox import Trovebox
+from trovebox.errors import TroveboxError
def main():
try:
@@ -22,12 +23,19 @@
folder_name = album = None
for filename in files:
- if not re.match(r'^.+\.jpg$', filename, flags=re.IGNORECASE):
+ # Trovebox supports .jpg, .gif, and .png files
+ if not re.search(r'\.(jpg|jpeg|gif|png)$', filename, flags=re.IGNORECASE):
continue
if not folder_name:
folder_name = root.split('/')[-1]
- album = client.album.create(folder_name)
+
+ try:
+ album = client.album.create(folder_name)
+ except TroveboxError, e:
+ print e.message
+ print 'Using full path as album name as fallback'
+
print 'Entering folder %s' % root
print 'Uploading %s...' % filename |
b13c8b5cd0dde5d329a14b99c672307567992434 | workshop_drf/todo/serializers.py | workshop_drf/todo/serializers.py | from rest_framework import serializers
from . import models
class Category(serializers.ModelSerializer):
class Meta:
model = models.Category
class Task(serializers.ModelSerializer):
class Meta:
model = models.Task
| from rest_framework import serializers
from django.contrib.auth import get_user_model
from . import models
class Category(serializers.ModelSerializer):
class Meta:
model = models.Category
fields = ('id', 'name')
class Task(serializers.ModelSerializer):
owner = serializers.SlugRelatedField(
slug_field='username',
queryset=get_user_model().objects.all())
categories = serializers.SlugRelatedField(
slug_field='name',
queryset=models.Category.objects.all(),
many=True)
class Meta:
model = models.Task
fields = ('id', 'name', 'owner', 'categories', 'done')
| Add human readable owner & categories. | Add human readable owner & categories.
| Python | mit | arnlaugsson/workshop_drf,xordoquy/workshop_drf_djangoconeu2015,pombredanne/workshop_drf_djangoconeu2015 | ---
+++
@@ -1,13 +1,24 @@
from rest_framework import serializers
+from django.contrib.auth import get_user_model
from . import models
class Category(serializers.ModelSerializer):
class Meta:
model = models.Category
+ fields = ('id', 'name')
class Task(serializers.ModelSerializer):
+ owner = serializers.SlugRelatedField(
+ slug_field='username',
+ queryset=get_user_model().objects.all())
+ categories = serializers.SlugRelatedField(
+ slug_field='name',
+ queryset=models.Category.objects.all(),
+ many=True)
+
class Meta:
model = models.Task
+ fields = ('id', 'name', 'owner', 'categories', 'done')
|
303f7e3e623294f63e53de2a1949a8bfb4a416ab | src/wirecloudcommons/utils/transaction.py | src/wirecloudcommons/utils/transaction.py | from django.db.transaction import is_dirty, leave_transaction_management, rollback, commit, enter_transaction_management, managed
from django.db import DEFAULT_DB_ALIAS
from django.http import HttpResponse
def commit_on_http_success(func, using=None):
"""
This decorator activates db commit on HTTP success response. This way, if the
view function return a success reponse, a commit is made; if the viewfunc
produces an exception or return an error response, a rollback is made.
"""
if using is None:
using = DEFAULT_DB_ALIAS
def wrapped_func(*args, **kwargs):
enter_transaction_management(using=using)
managed(True, using=using)
try:
res = func(*args, **kwargs)
except:
if is_dirty(using=using):
rollback(using=using)
raise
else:
if is_dirty(using=using):
if not isinstance(res, HttpResponse) or res.status_code > 200 or res.status_code < 200:
rollback(using=using)
else:
try:
commit(using=using)
except:
rollback(using=using)
raise
leave_transaction_management(using=using)
return res
return wrapped_func
| from django.db.transaction import is_dirty, leave_transaction_management, rollback, commit, enter_transaction_management, managed
from django.db import DEFAULT_DB_ALIAS
from django.http import HttpResponse
def commit_on_http_success(func, using=None):
"""
This decorator activates db commit on HTTP success response. This way, if the
view function return a success reponse, a commit is made; if the viewfunc
produces an exception or return an error response, a rollback is made.
"""
if using is None:
using = DEFAULT_DB_ALIAS
def wrapped_func(*args, **kwargs):
enter_transaction_management(using=using)
managed(True, using=using)
try:
res = func(*args, **kwargs)
except:
if is_dirty(using=using):
rollback(using=using)
raise
else:
if is_dirty(using=using):
if not isinstance(res, HttpResponse) or res.status_code < 200 or res.status_code >= 400:
rollback(using=using)
else:
try:
commit(using=using)
except:
rollback(using=using)
raise
leave_transaction_management(using=using)
return res
return wrapped_func
| Make commit_on_http_success commit for status codes from 200 to 399 and not only with 200 | Make commit_on_http_success commit for status codes from 200 to 399 and not only with 200
Signed-off-by: Álvaro Arranz García <3a7352a9ec78d7d17a9240a830621a1f159ca041@conwet.com>
| Python | agpl-3.0 | jpajuelo/wirecloud,jpajuelo/wirecloud,rockneurotiko/wirecloud,rockneurotiko/wirecloud,rockneurotiko/wirecloud,jpajuelo/wirecloud,jpajuelo/wirecloud,rockneurotiko/wirecloud | ---
+++
@@ -25,7 +25,7 @@
else:
if is_dirty(using=using):
- if not isinstance(res, HttpResponse) or res.status_code > 200 or res.status_code < 200:
+ if not isinstance(res, HttpResponse) or res.status_code < 200 or res.status_code >= 400:
rollback(using=using)
else:
try: |
8ddfcf45b4da91a02e12ebff2304e7ecf8a04378 | IPython/utils/importstring.py | IPython/utils/importstring.py | # encoding: utf-8
"""
A simple utility to import something by its string name.
Authors:
* Brian Granger
"""
#-----------------------------------------------------------------------------
# Copyright (C) 2008-2011 The IPython Development Team
#
# Distributed under the terms of the BSD License. The full license is in
# the file COPYING, distributed as part of this software.
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Functions and classes
#-----------------------------------------------------------------------------
def import_item(name):
"""Import and return ``bar`` given the string ``foo.bar``.
Calling ``bar = import_item("foo.bar")`` is the functional equivalent of
executing the code ``from foo import bar``.
Parameters
----------
name : string
The fully qualified name of the module/package being imported.
Returns
-------
mod : module object
The module that was imported.
"""
package = '.'.join(name.split('.')[0:-1])
obj = name.split('.')[-1]
# Note: the original code for this was the following. We've left it
# visible for now in case the new implementation shows any problems down
# the road, to make it easier on anyone looking for a problem. This code
# should be removed once we're comfortable we didn't break anything.
## execString = 'from %s import %s' % (package, obj)
## try:
## exec execString
## except SyntaxError:
## raise ImportError("Invalid class specification: %s" % name)
## exec 'temp = %s' % obj
## return temp
if package:
module = __import__(package,fromlist=[obj])
try:
pak = module.__dict__[obj]
except KeyError:
raise ImportError('No module named %s' % obj)
return pak
else:
return __import__(obj)
| # encoding: utf-8
"""
A simple utility to import something by its string name.
Authors:
* Brian Granger
"""
#-----------------------------------------------------------------------------
# Copyright (C) 2008-2011 The IPython Development Team
#
# Distributed under the terms of the BSD License. The full license is in
# the file COPYING, distributed as part of this software.
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Functions and classes
#-----------------------------------------------------------------------------
def import_item(name):
"""Import and return ``bar`` given the string ``foo.bar``.
Calling ``bar = import_item("foo.bar")`` is the functional equivalent of
executing the code ``from foo import bar``.
Parameters
----------
name : string
The fully qualified name of the module/package being imported.
Returns
-------
mod : module object
The module that was imported.
"""
parts = name.rsplit('.', 1)
if len(parts) == 2:
# called with 'foo.bar....'
package, obj = parts
module = __import__(package, fromlist=[obj])
try:
pak = module.__dict__[obj]
except KeyError:
raise ImportError('No module named %s' % obj)
return pak
else:
# called with un-dotted string
return __import__(parts[0])
| Restructure code to avoid unnecessary list slicing by using rsplit. | Restructure code to avoid unnecessary list slicing by using rsplit.
| Python | bsd-3-clause | ipython/ipython,ipython/ipython | ---
+++
@@ -34,29 +34,17 @@
mod : module object
The module that was imported.
"""
-
- package = '.'.join(name.split('.')[0:-1])
- obj = name.split('.')[-1]
- # Note: the original code for this was the following. We've left it
- # visible for now in case the new implementation shows any problems down
- # the road, to make it easier on anyone looking for a problem. This code
- # should be removed once we're comfortable we didn't break anything.
-
- ## execString = 'from %s import %s' % (package, obj)
- ## try:
- ## exec execString
- ## except SyntaxError:
- ## raise ImportError("Invalid class specification: %s" % name)
- ## exec 'temp = %s' % obj
- ## return temp
-
- if package:
- module = __import__(package,fromlist=[obj])
+ parts = name.rsplit('.', 1)
+ if len(parts) == 2:
+ # called with 'foo.bar....'
+ package, obj = parts
+ module = __import__(package, fromlist=[obj])
try:
pak = module.__dict__[obj]
except KeyError:
raise ImportError('No module named %s' % obj)
return pak
else:
- return __import__(obj)
+ # called with un-dotted string
+ return __import__(parts[0]) |
531974ce5d621b903608aa226110277f77918167 | tools/reset_gids.py | tools/reset_gids.py | #!/usr/bin/env python
# -*- coding:utf-8 -*-
import types
from sfa.storage.model import *
from sfa.storage.alchemy import *
from sfa.trust.gid import create_uuid
from sfa.trust.hierarchy import Hierarchy
from sfa.util.xrn import Xrn
from sfa.trust.certificate import Certificate, Keypair, convert_public_key
def fix_users():
s=global_dbsession
hierarchy = Hierarchy()
users = s.query(RegRecord).filter_by(type="user")
for record in users:
record.gid = ""
if not record.gid:
uuid = create_uuid()
pkey = Keypair(create=True)
pub_key=getattr(record,'reg_keys',None)
if pub_key is not None:
# use only first key in record
if pub_key and isinstance(pub_key, types.ListType): pub_key = pub_key[0]
pub_key = pub_key.key
pkey = convert_public_key(pub_key)
urn = Xrn (xrn=record.hrn, type='user').get_urn()
email=getattr(record,'email',None)
gid_object = hierarchy.create_gid(urn, uuid, pkey, email = email)
gid = gid_object.save_to_string(save_parents=True)
record.gid = gid
s.commit()
if __name__ == '__main__':
fix_users()
| #!/usr/bin/env python
# -*- coding:utf-8 -*-
import types
from sfa.storage.model import *
from sfa.storage.alchemy import *
from sfa.trust.gid import create_uuid
from sfa.trust.hierarchy import Hierarchy
from sfa.util.xrn import Xrn
from sfa.trust.certificate import Certificate, Keypair, convert_public_key
def fix_users():
s=global_dbsession
hierarchy = Hierarchy()
users = s.query(RegRecord).filter_by(type="user")
for record in users:
record.gid = ""
if not record.gid:
uuid = create_uuid()
pkey = Keypair(create=True)
pub_key=getattr(record,'reg_keys',None)
print pub_key
if len(pub_key) > 0:
# use only first key in record
if pub_key and isinstance(pub_key, types.ListType): pub_key = pub_key[0]
pub_key = pub_key.key
pkey = convert_public_key(pub_key)
urn = Xrn (xrn=record.hrn, type='user').get_urn()
email=getattr(record,'email',None)
gid_object = hierarchy.create_gid(urn, uuid, pkey, email = email)
gid = gid_object.save_to_string(save_parents=True)
record.gid = gid
s.commit()
if __name__ == '__main__':
fix_users()
| Reset GIDs works even if user has no pub_key | Fix: Reset GIDs works even if user has no pub_key
| Python | mit | yippeecw/sfa,onelab-eu/sfa,onelab-eu/sfa,yippeecw/sfa,onelab-eu/sfa,yippeecw/sfa | ---
+++
@@ -20,7 +20,8 @@
uuid = create_uuid()
pkey = Keypair(create=True)
pub_key=getattr(record,'reg_keys',None)
- if pub_key is not None:
+ print pub_key
+ if len(pub_key) > 0:
# use only first key in record
if pub_key and isinstance(pub_key, types.ListType): pub_key = pub_key[0]
pub_key = pub_key.key |
1a069e7a8565dcd72b362d6b4c0cc3b1b981e5a6 | Streamer/iterMapper.py | Streamer/iterMapper.py | #!/usr/bin/env python
from utils import read_input
from constants import EURISTIC_FACTOR
from collections import Counter
import sys
def choose_nodes(nodes, neighbours_iterable):
neighbours_count = len(neighbours_iterable)
unpacked_list = []
for t in neighbours_iterable:
unpacked_list += t[1:]
c = Counter(unpacked_list)
return tuple(k for k, v in c.items() if v >= neighbours_count * EURISTIC_FACTOR and k not in set(nodes))
def main(separator='\t'):
"""
Choose the next node to be added to the subgraph.
Take as input:
- iterable of nodes (ordered) as key
- iterable of iterable as value
"""
data = read_input(sys.stdin)
for nodes, neighbours_iterable in data:
nodes = eval(nodes)
neighbours_iterable = eval(neighbours_iterable)
next_nodes = choose_nodes(nodes, neighbours_iterable)
for n in next_nodes:
print("{}\t{}".format(sorted(nodes + (n, )), neighbours_iterable))
if __name__ == '__main__':
main()
| #!/usr/bin/env python
from utils import read_input
from constants import EURISTIC_FACTOR
from collections import Counter
import sys
def choose_nodes(nodes, neighbours_iterable):
neighbours_count = len(neighbours_iterable)
unpacked_list = []
for t in neighbours_iterable:
unpacked_list += t[1:]
c = Counter(unpacked_list)
nodes_set = set(nodes)
return tuple(k for k, v in c.items() if v >= neighbours_count * EURISTIC_FACTOR and k not in nodes_set)
def main(separator='\t'):
"""
Choose the next node to be added to the subgraph.
Take as input:
- iterable of nodes (ordered) as key
- iterable of iterable as value
"""
data = read_input(sys.stdin)
for nodes, neighbours_iterable in data:
nodes = eval(nodes)
neighbours_iterable = eval(neighbours_iterable)
next_nodes = choose_nodes(nodes, neighbours_iterable)
for n in next_nodes:
print("{}\t{}".format(sorted(nodes + (n, )), neighbours_iterable))
if __name__ == '__main__':
main()
| Improve choose_nodes method (set problem) | Improve choose_nodes method (set problem)
| Python | mit | AldurD392/SubgraphExplorer,AldurD392/SubgraphExplorer,AldurD392/SubgraphExplorer | ---
+++
@@ -15,7 +15,9 @@
unpacked_list += t[1:]
c = Counter(unpacked_list)
- return tuple(k for k, v in c.items() if v >= neighbours_count * EURISTIC_FACTOR and k not in set(nodes))
+ nodes_set = set(nodes)
+
+ return tuple(k for k, v in c.items() if v >= neighbours_count * EURISTIC_FACTOR and k not in nodes_set)
def main(separator='\t'): |
474d7c95e5fec4a8638f1eb4ff7225f01d116308 | heap.py | heap.py | # -*- coding: utf-8 -*-
class Heap(object):
"""Implements a heap data structure in Python.
The underlying data structure used to hold the data is a list.
"""
_heap = []
def __init__(self, initial=None):
"""Creates a new heap.
Args:
initial: (Optional): A continguous list containing the data with which to
initialize the new heap.
"""
if isinstance(initial, list) or isinstance(initial, tuple):
self._heap = initial
elif initial is not None:
raise TypeError(
'Illegal type submitted for heap data; use a list or tuple instead.')
def __unicode__(self):
if not self._heap:
return 'Empty'
return 'Root: %s' % self.__heap[0]
| # -*- coding: utf-8 -*-
class Heap(object):
"""Implements a heap data structure in Python.
The underlying data structure used to hold the data is a list.
"""
__heap = []
def __init__(self, initial=None):
"""Creates a new heap.
Args:
initial: (Optional): A continguous list containing the data with which to
initialize the new heap.
"""
if isinstance(initial, list) or isinstance(initial, tuple):
self.__heap = initial
elif initial is not None:
raise TypeError(
'Illegal type submitted for heap data; use a list or tuple instead.')
def __unicode__(self):
if not self.__heap:
return 'Empty'
return 'Root: %s' % self.__heap[0]
| Revert "remove name mangling to make testing easier" | Revert "remove name mangling to make testing easier"
This reverts commit c0647badcab661e0ac6e0499c36868e516dcd2e6.
| Python | mit | DasAllFolks/PyAlgo | ---
+++
@@ -7,7 +7,7 @@
The underlying data structure used to hold the data is a list.
"""
- _heap = []
+ __heap = []
def __init__(self, initial=None):
"""Creates a new heap.
@@ -17,12 +17,12 @@
initialize the new heap.
"""
if isinstance(initial, list) or isinstance(initial, tuple):
- self._heap = initial
+ self.__heap = initial
elif initial is not None:
raise TypeError(
'Illegal type submitted for heap data; use a list or tuple instead.')
def __unicode__(self):
- if not self._heap:
+ if not self.__heap:
return 'Empty'
return 'Root: %s' % self.__heap[0] |
1d1a64c8a98d98a243307dd58ec3874f0369ce8f | tests/ex12_tests.py | tests/ex12_tests.py | from nose.tools import *
from exercises import ex12
def test_histogram():
'''
Test our histogram output is correct
'''
test_histogram = ex12.histogram([1, 2, 3])
print(test_histogram)
assert_equal(test_histogram, '*\n**\n***\n')
| from nose.tools import *
from exercises import ex12
def test_histogram():
'''
Test our histogram output is correct
'''
test_histogram = ex12.histogram([1, 2, 3])
# assert_equal(test_histogram, '*\n**\n***\n')
| Drop ex12 tests for now. | Drop ex12 tests for now.
| Python | mit | gravyboat/python-exercises | ---
+++
@@ -7,6 +7,5 @@
'''
test_histogram = ex12.histogram([1, 2, 3])
- print(test_histogram)
- assert_equal(test_histogram, '*\n**\n***\n')
+# assert_equal(test_histogram, '*\n**\n***\n')
|
13da665f07be45f5c5b9308d0219250b368810d5 | tests/test_utils.py | tests/test_utils.py | import unittest
from app import create_app, db
from app.utils import get_or_create, is_safe_url
from app.models import User
class TestUtils(unittest.TestCase):
def setUp(self):
self.app = create_app("testing")
self.app_ctx = self.app.app_context()
self.app_ctx.push()
db.create_all()
def tearDown(self):
db.session.remove()
db.drop_all()
self.app_ctx.pop()
def test_get_or_create(self):
user1, created1 = get_or_create(User, name="foo", social_id="bar")
db.session.add(user1)
db.session.commit()
user2, created2 = get_or_create(User, name="foo", social_id="bar")
self.assertTrue(created1)
self.assertFalse(created2)
self.assertEquals(user1, user2)
def test_is_safe_url(self):
with self.app.test_request_context():
self.assertFalse(is_safe_url("http://externalsite.com"))
self.assertTrue(is_safe_url("http://" + self.app.config["SERVER_NAME"]))
self.assertTrue(is_safe_url("safe_internal_link")) | import unittest
from app import create_app, db
from app.utils import get_or_create, is_safe_url, get_redirect_target
from app.models import User
class TestUtils(unittest.TestCase):
def setUp(self):
self.app = create_app("testing")
self.app_ctx = self.app.app_context()
self.app_ctx.push()
db.create_all()
def tearDown(self):
db.session.remove()
db.drop_all()
self.app_ctx.pop()
def test_get_or_create(self):
user1, created1 = get_or_create(User, name="foo", social_id="bar")
db.session.add(user1)
db.session.commit()
user2, created2 = get_or_create(User, name="foo", social_id="bar")
self.assertTrue(created1)
self.assertFalse(created2)
self.assertEquals(user1, user2)
def test_is_safe_url(self):
with self.app.test_request_context():
self.assertFalse(is_safe_url("http://externalsite.com"))
self.assertTrue(is_safe_url("http://" + self.app.config["SERVER_NAME"]))
self.assertTrue(is_safe_url("safe_internal_link"))
def test_get_redirect_target(self):
with self.app.test_request_context("/?next=http://externalsite.com"):
self.assertIsNone(get_redirect_target())
with self.app.test_request_context("/?next=safe_internal_link"):
self.assertEquals(get_redirect_target(), "safe_internal_link") | Add unit test for get_redirect_target utility function | Add unit test for get_redirect_target utility function
| Python | mit | Encrylize/MyDictionary,Encrylize/MyDictionary,Encrylize/MyDictionary | ---
+++
@@ -1,7 +1,7 @@
import unittest
from app import create_app, db
-from app.utils import get_or_create, is_safe_url
+from app.utils import get_or_create, is_safe_url, get_redirect_target
from app.models import User
@@ -31,3 +31,10 @@
self.assertFalse(is_safe_url("http://externalsite.com"))
self.assertTrue(is_safe_url("http://" + self.app.config["SERVER_NAME"]))
self.assertTrue(is_safe_url("safe_internal_link"))
+
+ def test_get_redirect_target(self):
+ with self.app.test_request_context("/?next=http://externalsite.com"):
+ self.assertIsNone(get_redirect_target())
+
+ with self.app.test_request_context("/?next=safe_internal_link"):
+ self.assertEquals(get_redirect_target(), "safe_internal_link") |
7c646414121e68b69896b5f700f65a1963977f72 | tests/test_views.py | tests/test_views.py | from test_plus.test import TestCase
from django_private_chat.views import *
from django.test import RequestFactory
from django.urls import reverse
from django_private_chat.models import *
class TestDialogListView(TestCase):
def setUp(self):
self.factory = RequestFactory()
self.owner_user = self.make_user(username="owuser")
self.oponet_user = self.make_user(username="opuser")
self.dialog = Dialog()
self.dialog.owner = self.owner_user
self.dialog.opponent = self.oponet_user
self.dialog.save()
self.right_dialog = self.dialog
self.dialog = Dialog()
self.dialog.owner = self.make_user(username="user1")
self.dialog.opponent = self.make_user(username="user2")
self.dialog.save()
def test_get_queryset(self):
request = self.factory.get(reverse('dialogs_detail', kwargs={'username':'opuser'}))
request.user = self.owner_user
test_view = DialogListView()
test_view.request = request
queryset = list(test_view.get_queryset())
required_queryset = [self.right_dialog]
self.assertEqual(queryset, required_queryset)
| from test_plus.test import TestCase
from django_private_chat.views import *
from django.test import RequestFactory
from django.urlresolvers import reverse
from django_private_chat.models import *
class TestDialogListView(TestCase):
def setUp(self):
self.factory = RequestFactory()
self.owner_user = self.make_user(username="owuser")
self.oponet_user = self.make_user(username="opuser")
self.dialog = Dialog()
self.dialog.owner = self.owner_user
self.dialog.opponent = self.oponet_user
self.dialog.save()
self.right_dialog = self.dialog
self.dialog = Dialog()
self.dialog.owner = self.make_user(username="user1")
self.dialog.opponent = self.make_user(username="user2")
self.dialog.save()
def test_get_queryset(self):
request = self.factory.get(reverse('dialogs_detail', kwargs={'username':'opuser'}))
request.user = self.owner_user
test_view = DialogListView()
test_view.request = request
queryset = list(test_view.get_queryset())
required_queryset = [self.right_dialog]
self.assertEqual(queryset, required_queryset)
| Fix for older versions of django | Fix for older versions of django | Python | isc | Bearle/django-private-chat,Bearle/django-private-chat,Bearle/django-private-chat | ---
+++
@@ -1,7 +1,7 @@
from test_plus.test import TestCase
from django_private_chat.views import *
from django.test import RequestFactory
-from django.urls import reverse
+from django.urlresolvers import reverse
from django_private_chat.models import *
|
23e984fe24428241b873b93a4ca541b69d3345d2 | nipy/labs/viz_tools/test/test_cm.py | nipy/labs/viz_tools/test/test_cm.py | # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*-
# vi: set ft=python sts=4 ts=4 sw=4 et:
""" Smoke testing the cm module
"""
from nose import SkipTest
try:
import matplotlib as mp
# Make really sure that we don't try to open an Xserver connection.
mp.use('svg', warn=False)
import pylab as pl
pl.switch_backend('svg')
except ImportError:
raise SkipTest('Could not import matplotlib')
from ..cm import dim_cmap, replace_inside
def test_dim_cmap():
# This is only a smoke test
mp.use('svg', warn=False)
import pylab as pl
dim_cmap(pl.cm.jet)
def test_replace_inside():
# This is only a smoke test
mp.use('svg', warn=False)
import pylab as pl
pl.switch_backend('svg')
replace_inside(pl.cm.jet, pl.cm.hsv, .2, .8)
# We also test with gnuplot, which is defined using function
replace_inside(pl.cm.gnuplot, pl.cm.gnuplot2, .2, .8)
| # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*-
# vi: set ft=python sts=4 ts=4 sw=4 et:
""" Smoke testing the cm module
"""
from nose import SkipTest
try:
import matplotlib as mp
# Make really sure that we don't try to open an Xserver connection.
mp.use('svg', warn=False)
import pylab as pl
pl.switch_backend('svg')
except ImportError:
raise SkipTest('Could not import matplotlib')
from ..cm import dim_cmap, replace_inside
def test_dim_cmap():
# This is only a smoke test
mp.use('svg', warn=False)
import pylab as pl
dim_cmap(pl.cm.jet)
def test_replace_inside():
# This is only a smoke test
mp.use('svg', warn=False)
import pylab as pl
pl.switch_backend('svg')
replace_inside(pl.cm.jet, pl.cm.hsv, .2, .8)
# We also test with gnuplot, which is defined using function
if hasattr(pl.cm, 'gnuplot'):
# gnuplot is only in recent version of MPL
replace_inside(pl.cm.gnuplot, pl.cm.gnuplot2, .2, .8)
| Fix tests on old MPL | BUG: Fix tests on old MPL
Old MPL do not have function-defined colormaps, so the corresponding
code path cannot be tested.
| Python | bsd-3-clause | alexis-roche/nipy,nipy/nipy-labs,arokem/nipy,arokem/nipy,alexis-roche/nipy,alexis-roche/nireg,alexis-roche/register,alexis-roche/niseg,alexis-roche/nipy,bthirion/nipy,alexis-roche/nipy,bthirion/nipy,alexis-roche/register,arokem/nipy,nipy/nireg,nipy/nireg,bthirion/nipy,alexis-roche/nireg,alexis-roche/niseg,arokem/nipy,alexis-roche/register,bthirion/nipy,nipy/nipy-labs | ---
+++
@@ -30,6 +30,8 @@
pl.switch_backend('svg')
replace_inside(pl.cm.jet, pl.cm.hsv, .2, .8)
# We also test with gnuplot, which is defined using function
- replace_inside(pl.cm.gnuplot, pl.cm.gnuplot2, .2, .8)
+ if hasattr(pl.cm, 'gnuplot'):
+ # gnuplot is only in recent version of MPL
+ replace_inside(pl.cm.gnuplot, pl.cm.gnuplot2, .2, .8)
|
d348c4f7c60b599e713eeeda7ed6806c5b1baae0 | tests/explorers_tests/test_additive_ou.py | tests/explorers_tests/test_additive_ou.py | from __future__ import unicode_literals
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
from future import standard_library
standard_library.install_aliases()
import unittest
import numpy as np
from chainerrl.explorers.additive_ou import AdditiveOU
class TestAdditiveOU(unittest.TestCase):
def test(self):
action_size = 3
def greedy_action_func():
return np.asarray([0] * action_size, dtype=np.float32)
explorer = AdditiveOU()
for t in range(100):
a = explorer.select_action(t, greedy_action_func)
print(t, a)
| from __future__ import unicode_literals
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
from future import standard_library
standard_library.install_aliases()
import unittest
from chainer import testing
import numpy as np
from chainerrl.explorers.additive_ou import AdditiveOU
@testing.parameterize(*testing.product({
'action_size': [1, 3],
'sigma_type': ['scalar', 'ndarray'],
}))
class TestAdditiveOU(unittest.TestCase):
def test(self):
def greedy_action_func():
return np.asarray([0] * self.action_size, dtype=np.float32)
if self.sigma_type == 'scalar':
sigma = np.random.rand()
elif self.sigma_type == 'ndarray':
sigma = np.random.rand(self.action_size)
theta = np.random.rand()
explorer = AdditiveOU(theta=theta, sigma=sigma)
print('theta:', theta, 'sigma', sigma)
for t in range(100):
a = explorer.select_action(t, greedy_action_func)
print(t, a)
| Add tests of non-scalar sigma for AddtiveOU | Add tests of non-scalar sigma for AddtiveOU
| Python | mit | toslunar/chainerrl,toslunar/chainerrl | ---
+++
@@ -6,22 +6,32 @@
standard_library.install_aliases()
import unittest
+from chainer import testing
import numpy as np
from chainerrl.explorers.additive_ou import AdditiveOU
+@testing.parameterize(*testing.product({
+ 'action_size': [1, 3],
+ 'sigma_type': ['scalar', 'ndarray'],
+}))
class TestAdditiveOU(unittest.TestCase):
def test(self):
- action_size = 3
+ def greedy_action_func():
+ return np.asarray([0] * self.action_size, dtype=np.float32)
- def greedy_action_func():
- return np.asarray([0] * action_size, dtype=np.float32)
+ if self.sigma_type == 'scalar':
+ sigma = np.random.rand()
+ elif self.sigma_type == 'ndarray':
+ sigma = np.random.rand(self.action_size)
+ theta = np.random.rand()
- explorer = AdditiveOU()
+ explorer = AdditiveOU(theta=theta, sigma=sigma)
+ print('theta:', theta, 'sigma', sigma)
for t in range(100):
a = explorer.select_action(t, greedy_action_func)
print(t, a) |
0049a5b12b60e0bbd104c7d88d36d432f51a3d37 | cobe/control.py | cobe/control.py | import cmdparse
import commands
import logging
import optparse
import sys
parser = cmdparse.CommandParser()
parser.add_option("-b", "--brain", type="string", default="cobe.brain",
help="Specify an alternate brain file")
parser.add_option("", "--debug", action="store_true",
help=optparse.SUPPRESS_HELP)
parser.add_option("", "--profile", action="store_true",
help=optparse.SUPPRESS_HELP)
parser.add_command(commands.InitCommand(), "Control")
parser.add_command(commands.ConsoleCommand(), "Control")
parser.add_command(commands.LearnCommand(), "Learning")
parser.add_command(commands.LearnIrcLogCommand(), "Learning")
def main():
(command, options, args) = parser.parse_args()
formatter = logging.Formatter("%(levelname)s: %(message)s")
console = logging.StreamHandler()
console.setFormatter(formatter)
logging.root.addHandler(console)
if options.debug:
logging.root.setLevel(logging.DEBUG)
else:
logging.root.setLevel(logging.INFO)
if options.profile:
import cProfile
if command is None:
parser.print_help()
sys.exit(1)
try:
if options.profile:
cProfile.run("command.run(options, args)", "cobe.pstats")
else:
command.run(options, args)
except KeyboardInterrupt:
print
sys.exit(1)
if __name__ == "__main__":
main()
| import cmdparse
import commands
import logging
import optparse
import sys
parser = cmdparse.CommandParser()
parser.add_option("-b", "--brain", type="string", default="cobe.brain",
help="Specify an alternate brain file")
parser.add_option("", "--debug", action="store_true",
help=optparse.SUPPRESS_HELP)
parser.add_command(commands.InitCommand(), "Control")
parser.add_command(commands.ConsoleCommand(), "Control")
parser.add_command(commands.LearnCommand(), "Learning")
parser.add_command(commands.LearnIrcLogCommand(), "Learning")
def main():
(command, options, args) = parser.parse_args()
formatter = logging.Formatter("%(levelname)s: %(message)s")
console = logging.StreamHandler()
console.setFormatter(formatter)
logging.root.addHandler(console)
if options.debug:
logging.root.setLevel(logging.DEBUG)
else:
logging.root.setLevel(logging.INFO)
if command is None:
parser.print_help()
sys.exit(1)
try:
command.run(options, args)
except KeyboardInterrupt:
print
sys.exit(1)
if __name__ == "__main__":
main()
| Remove built-in profiling support; use cProfile from outside | Remove built-in profiling support; use cProfile from outside
| Python | mit | meska/cobe,DarkMio/cobe,LeMagnesium/cobe,meska/cobe,wodim/cobe-ng,LeMagnesium/cobe,wodim/cobe-ng,pteichman/cobe,tiagochiavericosta/cobe,DarkMio/cobe,tiagochiavericosta/cobe,pteichman/cobe | ---
+++
@@ -8,8 +8,6 @@
parser.add_option("-b", "--brain", type="string", default="cobe.brain",
help="Specify an alternate brain file")
parser.add_option("", "--debug", action="store_true",
- help=optparse.SUPPRESS_HELP)
-parser.add_option("", "--profile", action="store_true",
help=optparse.SUPPRESS_HELP)
parser.add_command(commands.InitCommand(), "Control")
@@ -30,18 +28,12 @@
else:
logging.root.setLevel(logging.INFO)
- if options.profile:
- import cProfile
-
if command is None:
parser.print_help()
sys.exit(1)
try:
- if options.profile:
- cProfile.run("command.run(options, args)", "cobe.pstats")
- else:
- command.run(options, args)
+ command.run(options, args)
except KeyboardInterrupt:
print
sys.exit(1) |
37161832aab8ecb611f9a80e1b58fc57866cdc14 | tests/rules/test_git_remote_seturl_add.py | tests/rules/test_git_remote_seturl_add.py | import pytest
from thefuck.rules.git_remote_seturl_add import match, get_new_command
from tests.utils import Command
@pytest.mark.parametrize('command', [
Command(script='git remote set-url origin url', stderr="fatal: No such remote")])
def test_match(command):
assert match(command)
@pytest.mark.parametrize('command', [
Command('git remote set-url origin url', stderr=""),
Command('git remote add origin url'),
Command('git remote remove origin'),
Command('git remote prune origin'),
Command('git remote set-branches origin branch')
])
def test_not_match(command):
assert not match(command)
@pytest.mark.parametrize('command, new_command', [
(Command('git remote set-url origin git@github.com:nvbn/thefuck.git'),
'git remote add origin git@github.com:nvbn/thefuck.git')])
def test_get_new_command(command, new_command):
assert get_new_command(command) == new_command
| import pytest
from thefuck.rules.git_remote_seturl_add import match, get_new_command
from tests.utils import Command
@pytest.mark.parametrize('command', [
Command(script='git remote set-url origin url', stderr="fatal: No such remote")])
def test_match(command):
assert match(command)
@pytest.mark.parametrize('command', [
Command('git remote set-url origin url', stderr=""),
Command('git remote add origin url'),
Command('git remote remove origin'),
Command('git remote prune origin'),
Command('git remote set-branches origin branch')])
def test_not_match(command):
assert not match(command)
@pytest.mark.parametrize('command, new_command', [
(Command('git remote set-url origin git@github.com:nvbn/thefuck.git'),
'git remote add origin git@github.com:nvbn/thefuck.git')])
def test_get_new_command(command, new_command):
assert get_new_command(command) == new_command
| Fix flake8 errors: E123 closing bracket does not match indentation of opening bracket's line | Fix flake8 errors: E123 closing bracket does not match indentation of opening bracket's line
| Python | mit | Clpsplug/thefuck,SimenB/thefuck,mlk/thefuck,nvbn/thefuck,nvbn/thefuck,scorphus/thefuck,scorphus/thefuck,SimenB/thefuck,Clpsplug/thefuck,mlk/thefuck | ---
+++
@@ -14,8 +14,7 @@
Command('git remote add origin url'),
Command('git remote remove origin'),
Command('git remote prune origin'),
- Command('git remote set-branches origin branch')
- ])
+ Command('git remote set-branches origin branch')])
def test_not_match(command):
assert not match(command)
|
7baac2883aa6abc0f1f458882025ba1d0e9baab2 | app/migrations/versions/4ef20b76cab1_.py | app/migrations/versions/4ef20b76cab1_.py | """Enable PostGIS
Revision ID: 4ef20b76cab1
Revises: 55004b0f00d6
Create Date: 2015-02-11 20:49:42.303864
"""
# revision identifiers, used by Alembic.
revision = '4ef20b76cab1'
down_revision = '55004b0f00d6'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.execute("CREATE EXTENSION postgis;")
op.execute("CREATE EXTENSION postgis_topology;")
def downgrade():
op.execute("DROP EXTENSION postgis_topology;")
op.execute("DROP EXTENSION postgis;")
| """Enable PostGIS
Revision ID: 4ef20b76cab1
Revises: 55004b0f00d6
Create Date: 2015-02-11 20:49:42.303864
"""
# revision identifiers, used by Alembic.
revision = '4ef20b76cab1'
down_revision = '55004b0f00d6'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.execute("CREATE EXTENSION IF NOT EXISTS postgis;")
op.execute("CREATE EXTENSION IF NOT EXISTS postgis_topology;")
def downgrade():
op.execute("DROP EXTENSION IF EXISTS postgis_topology;")
op.execute("DROP EXTENSION IF EXISTS postgis;")
| Add extra code to PostGIS migration to only create extensions if they're not already there. Drop on rollback only if extensions exist. | Add extra code to PostGIS migration to only create extensions if they're not already there. Drop on rollback only if extensions exist.
| Python | mit | openchattanooga/cpd-zones-old,openchattanooga/cpd-zones-old | ---
+++
@@ -15,9 +15,9 @@
def upgrade():
- op.execute("CREATE EXTENSION postgis;")
- op.execute("CREATE EXTENSION postgis_topology;")
+ op.execute("CREATE EXTENSION IF NOT EXISTS postgis;")
+ op.execute("CREATE EXTENSION IF NOT EXISTS postgis_topology;")
def downgrade():
- op.execute("DROP EXTENSION postgis_topology;")
- op.execute("DROP EXTENSION postgis;")
+ op.execute("DROP EXTENSION IF EXISTS postgis_topology;")
+ op.execute("DROP EXTENSION IF EXISTS postgis;") |
e697743b89f262a179881e2c58e2422a146248d0 | db_cleanup.py | db_cleanup.py | #!/usr/bin/env python
#
# Periodic cleanup job for blog comments.
# This will remove any abandoned comments that
# may have been posted by bots and did not get
# past the captcha.
#
# Use PYTHONPATH=<StackSmash dir to manage.py> ./db_cleanup.py
#
import os, datetime
def clean_up():
# Set Django settings module.
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "StackSmash.settings")
# import our blog stuff
from StackSmash.apps.blog.models import Comment, Post
# if comments are not listed and older than a week, delete them.
Comment.objects.filter(listed=False, created__lt = datetime.datetime.now() - datetime.timedelta(days=7)).delete()
# comments = Comment.objects.filter(listed=False).order_by("created")
# If comments are older than a day, delete them.
# for comment in comments:
# if comment.created.day < datetime.datetime.now().day:
# Comment.objects.filter(listed=False, id=comment.id).delete()
if __name__ == "__main__":
clean_up()
| #!/usr/bin/env python
#
# Periodic cleanup job for blog comments.
# This will remove any abandoned comments that
# may have been posted by bots and did not get
# past the captcha.
#
# Use PYTHONPATH=<StackSmash dir to manage.py> ./db_cleanup.py
#
# Cronjob to run on the 12th hour of every day:
# * 12 * * * PYTHONPATH=/StackSmash /StackSmash/db_cleanup.py
#
import os, datetime
def clean_up():
# Set Django settings module.
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "StackSmash.settings")
# import our blog stuff
from StackSmash.apps.blog.models import Comment, Post
# if comments are not listed and older than a week, delete them.
Comment.objects.filter(listed=False, created__lt = datetime.datetime.now() - datetime.timedelta(days=7)).delete()
if __name__ == "__main__":
clean_up()
| Add cron information, clean up old cruft that isnt needed. | Add cron information, clean up old cruft that isnt needed.
| Python | bsd-2-clause | Justasic/StackSmash,Justasic/StackSmash | ---
+++
@@ -6,6 +6,9 @@
# past the captcha.
#
# Use PYTHONPATH=<StackSmash dir to manage.py> ./db_cleanup.py
+#
+# Cronjob to run on the 12th hour of every day:
+# * 12 * * * PYTHONPATH=/StackSmash /StackSmash/db_cleanup.py
#
import os, datetime
@@ -20,13 +23,5 @@
# if comments are not listed and older than a week, delete them.
Comment.objects.filter(listed=False, created__lt = datetime.datetime.now() - datetime.timedelta(days=7)).delete()
-# comments = Comment.objects.filter(listed=False).order_by("created")
-
- # If comments are older than a day, delete them.
-# for comment in comments:
-# if comment.created.day < datetime.datetime.now().day:
-# Comment.objects.filter(listed=False, id=comment.id).delete()
-
-
if __name__ == "__main__":
clean_up() |
6e6aaac438a18220db20ad480a8a82af49c44caa | pages/serializers.py | pages/serializers.py | from rest_framework import serializers
from rest_framework.reverse import reverse
from pages import fields, mixins, models
from pages.utils import build_url
class PageSerializer(serializers.HyperlinkedModelSerializer):
name = serializers.CharField()
regions = serializers.SerializerMethodField('rendered_regions')
class Meta:
fields = ('id', 'url', 'name', 'slug', 'regions')
model = models.Page
view_name = 'pages:page-detail'
extra_kwargs = {
'url': {'lookup_field': 'slug'},
}
def rendered_regions(self, obj):
return obj.rendered_regions(self.context['request'])
class JsonPageSerializer(PageSerializer):
def rendered_regions(self, obj):
"""Render regions as a json-serializable dictionary."""
return obj.render_json(self.context.get('request'))
class GroupSerializer(mixins.LinksMixin, serializers.HyperlinkedModelSerializer):
url = fields.AbsoluteURLIdentityField()
pages = serializers.SerializerMethodField('get_pages_link')
links_fields = ['pages']
class Meta:
model = models.Group
def get_pages_link(self, obj):
return build_url(
reverse('pages:page-list', request=self.context.get('request')),
{'group': obj.slug},
)
| from rest_framework import serializers
from rest_framework.reverse import reverse
from pages import fields, mixins, models
from pages.utils import build_url
class PageSerializer(serializers.HyperlinkedModelSerializer):
name = serializers.CharField()
regions = serializers.SerializerMethodField('rendered_regions')
class Meta:
fields = ('id', 'url', 'name', 'slug', 'regions')
model = models.Page
view_name = 'pages:page-detail'
extra_kwargs = {
'url': {'lookup_field': 'slug', 'view_name': 'pages:page-detail'},
}
def rendered_regions(self, obj):
return obj.rendered_regions(self.context['request'])
class JsonPageSerializer(PageSerializer):
def rendered_regions(self, obj):
"""Render regions as a json-serializable dictionary."""
return obj.render_json(self.context.get('request'))
class GroupSerializer(mixins.LinksMixin, serializers.HyperlinkedModelSerializer):
url = fields.AbsoluteURLIdentityField()
pages = serializers.SerializerMethodField('get_pages_link')
links_fields = ['pages']
class Meta:
model = models.Group
def get_pages_link(self, obj):
return build_url(
reverse('pages:page-list', request=self.context.get('request')),
{'group': obj.slug},
)
| Add 'view_name' to url extra kwargs | Add 'view_name' to url extra kwargs
| Python | bsd-2-clause | incuna/feincms-pages-api | ---
+++
@@ -14,7 +14,7 @@
model = models.Page
view_name = 'pages:page-detail'
extra_kwargs = {
- 'url': {'lookup_field': 'slug'},
+ 'url': {'lookup_field': 'slug', 'view_name': 'pages:page-detail'},
}
def rendered_regions(self, obj): |
fb7b9618d5e54e8500efb0904913b4febf80222c | catsnap/batch/image_batch.py | catsnap/batch/image_batch.py | from __future__ import unicode_literals
from catsnap import Client, HASH_KEY
from boto.dynamodb.batch import BatchList
import json
MAX_ITEMS_TO_REQUEST = 99
def get_images(filenames):
if not filenames:
raise StopIteration
filenames = list(filenames)
unprocessed_keys = filenames[MAX_ITEMS_TO_REQUEST:]
filenames = filenames[:MAX_ITEMS_TO_REQUEST]
dynamo = Client().get_dynamodb()
table = Client().table('image')
batch_list = BatchList(dynamo)
batch_list.add_batch(table, filenames,
attributes_to_get=['tags', HASH_KEY])
response = dynamo.batch_get_item(batch_list)
items = response['Responses'][table.name]['Items']
for item in items:
item['filename'] = item.pop(HASH_KEY)
item['tags'] = json.loads(item['tags'])
if response['UnprocessedKeys'] \
and table.name in response['UnprocessedKeys']:
for key in response['UnprocessedKeys'][table.name]['Keys']:
unprocessed_keys.append(key['HashKeyElement'])
for item in items:
yield item
if not unprocessed_keys:
raise StopIteration
for item in get_images(unprocessed_keys):
yield item
| from __future__ import unicode_literals
from catsnap import Client, HASH_KEY
from boto.dynamodb.batch import BatchList
import json
MAX_ITEMS_TO_REQUEST = 99
def get_image_items(filenames):
if not filenames:
raise StopIteration
filenames = list(filenames)
unprocessed_keys = filenames[MAX_ITEMS_TO_REQUEST:]
filenames = filenames[:MAX_ITEMS_TO_REQUEST]
dynamo = Client().get_dynamodb()
table = Client().table('image')
batch_list = BatchList(dynamo)
batch_list.add_batch(table, filenames,
attributes_to_get=['tags', HASH_KEY])
response = dynamo.batch_get_item(batch_list)
items = response['Responses'][table.name]['Items']
if response['UnprocessedKeys'] \
and table.name in response['UnprocessedKeys']:
for key in response['UnprocessedKeys'][table.name]['Keys']:
unprocessed_keys.append(key['HashKeyElement'])
for item in items:
yield item
if not unprocessed_keys:
raise StopIteration
for item in get_image_items(unprocessed_keys):
yield item
def get_images(filenames):
for item in get_image_items(filenames):
yield {'filename': item[HASH_KEY],
'tags': json.loads(item['tags'])}
| Break get_images up to match get_tags | Break get_images up to match get_tags
| Python | mit | ErinCall/catsnap,ErinCall/catsnap,ErinCall/catsnap | ---
+++
@@ -6,7 +6,7 @@
MAX_ITEMS_TO_REQUEST = 99
-def get_images(filenames):
+def get_image_items(filenames):
if not filenames:
raise StopIteration
filenames = list(filenames)
@@ -20,9 +20,6 @@
attributes_to_get=['tags', HASH_KEY])
response = dynamo.batch_get_item(batch_list)
items = response['Responses'][table.name]['Items']
- for item in items:
- item['filename'] = item.pop(HASH_KEY)
- item['tags'] = json.loads(item['tags'])
if response['UnprocessedKeys'] \
and table.name in response['UnprocessedKeys']:
for key in response['UnprocessedKeys'][table.name]['Keys']:
@@ -32,5 +29,10 @@
yield item
if not unprocessed_keys:
raise StopIteration
- for item in get_images(unprocessed_keys):
+ for item in get_image_items(unprocessed_keys):
yield item
+
+def get_images(filenames):
+ for item in get_image_items(filenames):
+ yield {'filename': item[HASH_KEY],
+ 'tags': json.loads(item['tags'])} |
39d370f314431e44e7eb978865be4f7696625eec | scraper/models.py | scraper/models.py | from django.db import models
class Author(models.Model):
name = models.TextField()
def __str__(self):
return self.name
class Paper(models.Model):
url = models.TextField()
title = models.TextField()
citations = models.IntegerField()
abstract = models.TextField()
journal = models.TextField()
volume = models.IntegerField(null=True)
issue = models.IntegerField(null=True)
year = models.IntegerField()
authors = models.ManyToManyField(Author)
class Project(models.Model):
researcher = models.ForeignKey(Author)
title = models.TextField()
url = models.TextField()
funding_amount = models.IntegerField()
year = models.IntegerField()
| from django.db import models
class Author(models.Model):
name = models.TextField()
def __str__(self):
return self.name
class Paper(models.Model):
url = models.TextField()
title = models.TextField()
citations = models.IntegerField()
abstract = models.TextField()
journal = models.TextField()
volume = models.IntegerField(null=True)
issue = models.IntegerField(null=True)
year = models.IntegerField()
authors = models.ManyToManyField(Author)
class Meta:
ordering = ['year', 'title']
class Project(models.Model):
researcher = models.ForeignKey(Author)
title = models.TextField()
url = models.TextField()
funding_amount = models.IntegerField()
year = models.IntegerField()
class Meta:
ordering = ['year', 'title']
| Order entries in table by year, then title | Order entries in table by year, then title
| Python | mit | Spferical/cure-alzheimers-fund-tracker,Spferical/cure-alzheimers-fund-tracker,Spferical/cure-alzheimers-fund-tracker | ---
+++
@@ -18,6 +18,8 @@
issue = models.IntegerField(null=True)
year = models.IntegerField()
authors = models.ManyToManyField(Author)
+ class Meta:
+ ordering = ['year', 'title']
class Project(models.Model):
@@ -26,3 +28,5 @@
url = models.TextField()
funding_amount = models.IntegerField()
year = models.IntegerField()
+ class Meta:
+ ordering = ['year', 'title'] |
9c3c5ede82b6672f23b5aec90cdbadb57ca8b92c | construi/cli.py | construi/cli.py | from .config import parse
from .target import Target
from .__version__ import __version__
from argparse import ArgumentParser
import logging
import os
import sys
def main():
setup_logging()
parser = ArgumentParser(prog='construi', description='Run construi')
parser.add_argument('target', metavar='TARGET', nargs='?')
parser.add_argument('--basedir', metavar='DIR', default=os.getcwd())
parser.add_argument('--version', action='version', version=__version__)
args = parser.parse_args()
config = parse(args.basedir, 'construi.yml')
target = args.target or config.default
Target(config.for_target(target)).run()
def setup_logging():
root_logger = logging.getLogger()
root_logger.addHandler(logging.StreamHandler(sys.stdout))
root_logger.setLevel(logging.INFO)
logging.getLogger("requests").propagate = False
| from .config import parse
from .target import Target
from .__version__ import __version__
from argparse import ArgumentParser
import logging
import os
import sys
def main():
setup_logging()
parser = ArgumentParser(prog='construi', description='Run construi')
parser.add_argument('target', metavar='TARGET', nargs='?')
parser.add_argument('--basedir', metavar='DIR', default=os.getcwd())
parser.add_argument('--version', action='version', version=__version__)
parser.add_argument('-T', '--list-targets', action='store_true')
args = parser.parse_args()
config = load_config(args)
if args.list_targets:
list_targets(config)
target = args.target or config.default
Target(config.for_target(target)).run()
def load_config(args):
return parse(args.basedir, 'construi.yml')
def setup_logging():
root_logger = logging.getLogger()
root_logger.addHandler(logging.StreamHandler(sys.stdout))
root_logger.setLevel(logging.INFO)
logging.getLogger("requests").propagate = False
def list_targets(config):
targets = config.targets.keys()
targets.sort()
for target in targets:
print(target)
sys.exit(0)
| Add -T option to list available targets | Add -T option to list available targets
| Python | apache-2.0 | lstephen/construi | ---
+++
@@ -17,14 +17,22 @@
parser.add_argument('target', metavar='TARGET', nargs='?')
parser.add_argument('--basedir', metavar='DIR', default=os.getcwd())
parser.add_argument('--version', action='version', version=__version__)
+ parser.add_argument('-T', '--list-targets', action='store_true')
args = parser.parse_args()
- config = parse(args.basedir, 'construi.yml')
+ config = load_config(args)
+
+ if args.list_targets:
+ list_targets(config)
target = args.target or config.default
Target(config.for_target(target)).run()
+
+
+def load_config(args):
+ return parse(args.basedir, 'construi.yml')
def setup_logging():
@@ -33,3 +41,14 @@
root_logger.setLevel(logging.INFO)
logging.getLogger("requests").propagate = False
+
+
+def list_targets(config):
+ targets = config.targets.keys()
+
+ targets.sort()
+
+ for target in targets:
+ print(target)
+
+ sys.exit(0) |
a754323facdb05b18d19a1a0365ad12e8c25ed06 | ocradmin/core/tests/test_core.py | ocradmin/core/tests/test_core.py | """
Core tests. Test general environment.
"""
import subprocess as sp
from django.test import TestCase
from django.test.client import Client
from django.contrib.auth.models import User
from django.conf import settings
class CoreTest(TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_isri_tools(self):
"""
Ensure running 'accuracy' with no args results
in usage info. Basically we want to make sure
that the accuracy binary is available.
"""
p = sp.Popen(["accuracy"], stderr=sp.PIPE)
self.assertRegexpMatches(p.communicate()[1], "^Usage")
| """
Core tests. Test general environment.
"""
import subprocess as sp
from django.test import TestCase
from django.test.client import Client
from django.contrib.auth.models import User
from django.conf import settings
class CoreTest(TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_isri_tools(self):
"""
Ensure running 'accuracy' with no args results
in usage info. Basically we want to make sure
that the accuracy binary is available.
"""
stdout, stderr = self._run_cmd("accuracy")
self.assertRegexpMatches(stderr, "^Usage")
def test_cuneiform(self):
"""
Ensure cuneiform is available. This is fragile since it depends
on Cuneiform's annoying output on stdout.
"""
stdout, stderr = self._run_cmd("cuneiform")
self.assertRegexpMatches(stdout, "^Cuneiform for Linux")
def test_tesseract(self):
"""
Ensure tesseract is available.
"""
stdout, stderr = self._run_cmd("tesseract")
self.assertRegexpMatches(stderr, "^Usage")
def test_convert(self):
"""
Ensure (Image|Graphics)Magick is available.
"""
stdout, stderr = self._run_cmd("convert")
self.assertRegexpMatches(stdout, "Usage")
def _run_cmd(self, *args):
p = sp.Popen(args, stdout=sp.PIPE, stderr=sp.PIPE)
return p.communicate()
| Test the presence of various tools | Test the presence of various tools
| Python | apache-2.0 | vitorio/ocropodium,vitorio/ocropodium,vitorio/ocropodium,vitorio/ocropodium | ---
+++
@@ -22,7 +22,32 @@
in usage info. Basically we want to make sure
that the accuracy binary is available.
"""
- p = sp.Popen(["accuracy"], stderr=sp.PIPE)
- self.assertRegexpMatches(p.communicate()[1], "^Usage")
+ stdout, stderr = self._run_cmd("accuracy")
+ self.assertRegexpMatches(stderr, "^Usage")
+ def test_cuneiform(self):
+ """
+ Ensure cuneiform is available. This is fragile since it depends
+ on Cuneiform's annoying output on stdout.
+ """
+ stdout, stderr = self._run_cmd("cuneiform")
+ self.assertRegexpMatches(stdout, "^Cuneiform for Linux")
+ def test_tesseract(self):
+ """
+ Ensure tesseract is available.
+ """
+ stdout, stderr = self._run_cmd("tesseract")
+ self.assertRegexpMatches(stderr, "^Usage")
+
+ def test_convert(self):
+ """
+ Ensure (Image|Graphics)Magick is available.
+ """
+ stdout, stderr = self._run_cmd("convert")
+ self.assertRegexpMatches(stdout, "Usage")
+
+ def _run_cmd(self, *args):
+ p = sp.Popen(args, stdout=sp.PIPE, stderr=sp.PIPE)
+ return p.communicate()
+ |
9c5c2f916f8f8fceb38848212d7c4d8883fd2aef | polling_stations/apps/api/mixins.py | polling_stations/apps/api/mixins.py | from rest_framework.decorators import list_route
from rest_framework.pagination import LimitOffsetPagination
from rest_framework.response import Response
class LargeResultsSetPagination(LimitOffsetPagination):
default_limit = 100
max_limit = 1000
class PollingEntityMixin():
pagination_class = LargeResultsSetPagination
def output(self, request):
if not self.validate_request():
return Response(
{'detail': 'council_id parameter must be specified'}, 400)
queryset = self.get_queryset()
if 'council_id' not in request.query_params:
page = self.paginate_queryset(queryset)
if page is not None:
serializer = self.get_serializer(
page,
many=True,
read_only=True,
context={'request': request}
)
return self.get_paginated_response(serializer.data)
serializer = self.get_serializer(
queryset,
many=True,
read_only=True,
context={'request': request}
)
return Response(serializer.data)
def list(self, request, *args, **kwargs):
self.geo = False
return self.output(request)
@list_route(url_path='geo')
def geo(self, request, format=None):
self.geo = True
return self.output(request)
| from rest_framework.decorators import list_route
from rest_framework.pagination import LimitOffsetPagination
from rest_framework.response import Response
class LargeResultsSetPagination(LimitOffsetPagination):
default_limit = 100
max_limit = 1000
class PollingEntityMixin():
pagination_class = LargeResultsSetPagination
def output(self, request):
if not self.validate_request():
return Response(
{'detail': 'council_id parameter must be specified'}, 400)
queryset = self.get_queryset()
if 'council_id' not in request.query_params:
page = self.paginate_queryset(queryset)
if page is not None:
serializer = self.get_serializer(
page,
many=True,
read_only=True,
context={'request': request}
)
return self.get_paginated_response(serializer.data)
if 'council_id' in request.query_params and (
'station_id' in request.query_params or
'district_id' in request.query_params) and\
len(queryset) == 1:
# If we are requesting a single polling station or district
# return an object instead of an array with length 1
serializer = self.get_serializer(
queryset[0],
many=False,
read_only=True,
context={'request': request}
)
return Response(serializer.data)
serializer = self.get_serializer(
queryset,
many=True,
read_only=True,
context={'request': request}
)
return Response(serializer.data)
def list(self, request, *args, **kwargs):
self.geo = False
return self.output(request)
@list_route(url_path='geo')
def geo(self, request, format=None):
self.geo = True
return self.output(request)
| Return object not array when requesting single district/station | Return object not array when requesting single district/station
If we are requesting a single polling station or district
return an object instead of an array with length 1
| Python | bsd-3-clause | DemocracyClub/UK-Polling-Stations,chris48s/UK-Polling-Stations,DemocracyClub/UK-Polling-Stations,chris48s/UK-Polling-Stations,chris48s/UK-Polling-Stations,DemocracyClub/UK-Polling-Stations | ---
+++
@@ -30,6 +30,20 @@
)
return self.get_paginated_response(serializer.data)
+ if 'council_id' in request.query_params and (
+ 'station_id' in request.query_params or
+ 'district_id' in request.query_params) and\
+ len(queryset) == 1:
+ # If we are requesting a single polling station or district
+ # return an object instead of an array with length 1
+ serializer = self.get_serializer(
+ queryset[0],
+ many=False,
+ read_only=True,
+ context={'request': request}
+ )
+ return Response(serializer.data)
+
serializer = self.get_serializer(
queryset,
many=True, |
3d03959224de39f2c7d491bdac438c08e368fb6c | comics/comics/komistriper.py | comics/comics/komistriper.py | # encoding: utf-8
from comics.aggregator.crawler import NettserierCrawlerBase
from comics.core.comic_data import ComicDataBase
class ComicData(ComicDataBase):
name = "Samtull"
language = "no"
url = "https://nettserier.no/aikomi/comic/"
rights = "Emil Åslund"
start_date = "2015-01-14"
class Crawler(NettserierCrawlerBase):
history_capable_date = "2015-01-14"
time_zone = "Europe/Oslo"
def crawl(self, pub_date):
return self.crawl_helper("aikomi", pub_date)
| # encoding: utf-8
from comics.aggregator.crawler import NettserierCrawlerBase
from comics.core.comic_data import ComicDataBase
class ComicData(ComicDataBase):
name = "Samtull"
language = "no"
url = "https://nettserier.no/aikomi/comic/"
rights = "Emil Åslund"
start_date = "2015-01-24"
class Crawler(NettserierCrawlerBase):
history_capable_date = "2015-01-24"
time_zone = "Europe/Oslo"
def crawl(self, pub_date):
return self.crawl_helper("aikomi", pub_date)
| Correct start date for "Samtull" | Correct start date for "Samtull"
| Python | agpl-3.0 | jodal/comics,datagutten/comics,jodal/comics,jodal/comics,datagutten/comics,datagutten/comics,datagutten/comics,jodal/comics | ---
+++
@@ -9,11 +9,11 @@
language = "no"
url = "https://nettserier.no/aikomi/comic/"
rights = "Emil Åslund"
- start_date = "2015-01-14"
+ start_date = "2015-01-24"
class Crawler(NettserierCrawlerBase):
- history_capable_date = "2015-01-14"
+ history_capable_date = "2015-01-24"
time_zone = "Europe/Oslo"
def crawl(self, pub_date): |
dcd39f2955cd80e3888458954a58203ae74dab71 | cyder/base/eav/forms.py | cyder/base/eav/forms.py | from django import forms
from django.core.exceptions import ValidationError
from cyder.base.eav.constants import ATTRIBUTE_TYPES
from cyder.base.eav.models import Attribute
def get_eav_form(eav_model, entity_model):
class EAVForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
if 'instance' in kwargs and kwargs['instance'] is not None:
# This is a bound form with a real instance
if 'initial' not in kwargs:
kwargs['initial'] = dict()
# Set the attribute field to the name, not the pk
kwargs['initial']['attribute'] = \
kwargs['instance'].attribute.name
# Set the attribute_type field to the current attribute's type
kwargs['initial']['attribute_type'] = \
kwargs['instance'].attribute.attribute_type
super(EAVForm, self).__init__(*args, **kwargs)
attribute_type = forms.ChoiceField(
choices=eav_model._meta.get_field('attribute').type_choices)
entity = forms.ModelChoiceField(
queryset=entity_model.objects.all(),
widget=forms.HiddenInput())
class Meta:
model = eav_model
fields = ('entity', 'attribute_type', 'attribute', 'value')
return EAVForm
| from django import forms
from django.core.exceptions import ValidationError
from cyder.base.eav.constants import ATTRIBUTE_TYPES
from cyder.base.eav.models import Attribute
def get_eav_form(eav_model, entity_model):
class EAVForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
if 'instance' in kwargs and kwargs['instance'] is not None:
# This is a bound form with a real instance
if 'initial' not in kwargs:
kwargs['initial'] = dict()
# Set the attribute field to the name, not the pk
kwargs['initial']['attribute'] = \
kwargs['instance'].attribute.name
# Set the attribute_type field to the current attribute's type
kwargs['initial']['attribute_type'] = \
kwargs['instance'].attribute.attribute_type
super(EAVForm, self).__init__(*args, **kwargs)
attribute_type = forms.ChoiceField(
choices=eav_model._meta.get_field('attribute').type_choices)
entity = forms.ModelChoiceField(
queryset=entity_model.objects.all(),
widget=forms.HiddenInput())
class Meta:
model = eav_model
fields = ('entity', 'attribute_type', 'attribute', 'value')
EAVForm.__name__ = eav_model.__name__ + 'Form'
return EAVForm
| Set EAV form class name to match EAV model name | Set EAV form class name to match EAV model name
(for easier debugging, at least in theory)
| Python | bsd-3-clause | murrown/cyder,akeym/cyder,OSU-Net/cyder,drkitty/cyder,drkitty/cyder,zeeman/cyder,drkitty/cyder,zeeman/cyder,drkitty/cyder,akeym/cyder,murrown/cyder,murrown/cyder,murrown/cyder,zeeman/cyder,zeeman/cyder,OSU-Net/cyder,OSU-Net/cyder,OSU-Net/cyder,akeym/cyder,akeym/cyder | ---
+++
@@ -35,4 +35,6 @@
model = eav_model
fields = ('entity', 'attribute_type', 'attribute', 'value')
+ EAVForm.__name__ = eav_model.__name__ + 'Form'
+
return EAVForm |
43e43e9f342d69bd2b0652d833e204916517efe2 | module_auto_update/migrations/10.0.2.0.0/pre-migrate.py | module_auto_update/migrations/10.0.2.0.0/pre-migrate.py | # -*- coding: utf-8 -*-
# Copyright 2018 Tecnativa - Jairo Llopis
# License LGPL-3.0 or later (https://www.gnu.org/licenses/lgpl).
import logging
from psycopg2 import IntegrityError
from odoo.addons.module_auto_update.models.module_deprecated import \
PARAM_DEPRECATED
_logger = logging.getLogger(__name__)
def migrate(cr, version):
"""Autoenable deprecated behavior."""
try:
cr.execute(
"INSERT INTO ir_config_parameter (key, value) VALUES (%s, '1')",
(PARAM_DEPRECATED,)
)
_logger.warn("Deprecated features have been autoenabled, see "
"addon's README to know how to upgrade to the new "
"supported autoupdate mechanism.")
except IntegrityError:
_logger.info("Deprecated features setting exists, not autoenabling")
| # -*- coding: utf-8 -*-
# Copyright 2018 Tecnativa - Jairo Llopis
# License LGPL-3.0 or later (https://www.gnu.org/licenses/lgpl).
import logging
from psycopg2 import IntegrityError
from odoo.addons.module_auto_update.models.module_deprecated import \
PARAM_DEPRECATED
_logger = logging.getLogger(__name__)
def migrate(cr, version):
"""Autoenable deprecated behavior."""
try:
with cr.savepoint():
cr.execute(
"""INSERT INTO ir_config_parameter (key, value)
VALUES (%s, '1')""",
(PARAM_DEPRECATED,)
)
_logger.warn("Deprecated features have been autoenabled, see "
"addon's README to know how to upgrade to the new "
"supported autoupdate mechanism.")
except IntegrityError:
_logger.info("Deprecated features setting exists, not autoenabling")
| Rollback cursor if param exists | [FIX] module_auto_update: Rollback cursor if param exists
Without this patch, when upgrading after you have stored the deprecated features parameter, the cursor became broken and no more migrations could happen. You got this error:
Traceback (most recent call last):
File "/usr/local/bin/odoo", line 6, in <module>
exec(compile(open(__file__).read(), __file__, 'exec'))
File "/opt/odoo/custom/src/odoo/odoo.py", line 160, in <module>
main()
File "/opt/odoo/custom/src/odoo/odoo.py", line 157, in main
openerp.cli.main()
File "/opt/odoo/custom/src/odoo/openerp/cli/command.py", line 64, in main
o.run(args)
File "/opt/odoo/custom/src/odoo/openerp/cli/shell.py", line 65, in run
self.shell(openerp.tools.config['db_name'])
File "/opt/odoo/custom/src/odoo/openerp/cli/shell.py", line 52, in shell
registry = openerp.modules.registry.RegistryManager.get(dbname)
File "/opt/odoo/custom/src/odoo/openerp/modules/registry.py", line 355, in get
update_module)
File "/opt/odoo/custom/src/odoo/openerp/modules/registry.py", line 386, in new
openerp.modules.load_modules(registry._db, force_demo, status, update_module)
File "/opt/odoo/custom/src/odoo/openerp/modules/loading.py", line 335, in load_modules
force, status, report, loaded_modules, update_module)
File "/opt/odoo/custom/src/odoo/openerp/modules/loading.py", line 239, in load_marked_modules
loaded, processed = load_module_graph(cr, graph, progressdict, report=report, skip_modules=loaded_modules, perform_checks=perform_checks)
File "/opt/odoo/custom/src/odoo/openerp/modules/loading.py", line 136, in load_module_graph
registry.setup_models(cr, partial=True)
File "/opt/odoo/custom/src/odoo/openerp/modules/registry.py", line 186, in setup_models
cr.execute('select model, transient from ir_model where state=%s', ('manual',))
File "/opt/odoo/custom/src/odoo/openerp/sql_db.py", line 154, in wrapper
return f(self, *args, **kwargs)
File "/opt/odoo/custom/src/odoo/openerp/sql_db.py", line 233, in execute
res = self._obj.execute(query, params)
psycopg2.InternalError: current transaction is aborted, commands ignored until end of transaction block
Now you can safely migrate, be that parameter pre-created or not.
| Python | agpl-3.0 | Vauxoo/server-tools,Vauxoo/server-tools,Vauxoo/server-tools | ---
+++
@@ -12,10 +12,12 @@
def migrate(cr, version):
"""Autoenable deprecated behavior."""
try:
- cr.execute(
- "INSERT INTO ir_config_parameter (key, value) VALUES (%s, '1')",
- (PARAM_DEPRECATED,)
- )
+ with cr.savepoint():
+ cr.execute(
+ """INSERT INTO ir_config_parameter (key, value)
+ VALUES (%s, '1')""",
+ (PARAM_DEPRECATED,)
+ )
_logger.warn("Deprecated features have been autoenabled, see "
"addon's README to know how to upgrade to the new "
"supported autoupdate mechanism.") |
f6f3c7a70ff2c47adc2525c0c5868debc7e78fdd | make_a_plea/settings/production.py | make_a_plea/settings/production.py | from .base import *
import os
DEBUG = False
TEMPLATE_DEBUG = DEBUG
GOOGLE_ANALYTICS_ID = "UA-53811587-1"
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': os.environ['POSTGRES_DB'],
'USER': os.environ['POSTGRES_USER'],
'PASSWORD': os.environ.get('POSTGRES_PASS', ''),
'HOST': os.environ.get('POSTGRES_HOST', ''),
'PORT': os.environ.get('POSTGRES_PORT', ''),
'OPTIONS': {
'sslmode': 'require',
},
}
}
BROKER_TRANSPORT_OPTIONS = {'region': 'eu-west-1',
'queue_name_prefix': 'production-',
'polling_interval': 1,
'visibility_timeout': 3600}
INSTALLED_APPS += ('raven.contrib.django.raven_compat', )
ALLOWED_HOSTS = ["www.makeaplea.justice.gov.uk", "www.service.justice.gov.uk" ]
# Enable CachedStaticFilesStorage for cache-busting assets
STATICFILES_STORAGE = 'django.contrib.staticfiles.storage.CachedStaticFilesStorage'
SESSION_COOKIE_SECURE = True
CSRF_COOKIE_SECURE = True
STORE_USER_DATA = True
ENCRYPTED_COOKIE_KEYS = [
os.environ["ENCRYPTED_COOKIE_KEY"]
]
| from .base import *
import os
DEBUG = False
TEMPLATE_DEBUG = DEBUG
GOOGLE_ANALYTICS_ID = "UA-53811587-1"
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': os.environ['POSTGRES_DB'],
'USER': os.environ['POSTGRES_USER'],
'PASSWORD': os.environ.get('POSTGRES_PASS', ''),
'HOST': os.environ.get('POSTGRES_HOST', ''),
'PORT': os.environ.get('POSTGRES_PORT', ''),
'OPTIONS': {
'sslmode': 'require',
},
}
}
BROKER_TRANSPORT_OPTIONS = {'region': 'eu-west-1',
'queue_name_prefix': 'production-',
'polling_interval': 1,
'visibility_timeout': 3600}
INSTALLED_APPS += ('raven.contrib.django.raven_compat', )
ALLOWED_HOSTS = ["www.makeaplea.justice.gov.uk", "www.makeaplea.service.gov.uk"]
# Enable CachedStaticFilesStorage for cache-busting assets
STATICFILES_STORAGE = 'django.contrib.staticfiles.storage.CachedStaticFilesStorage'
SESSION_COOKIE_SECURE = True
CSRF_COOKIE_SECURE = True
STORE_USER_DATA = True
ENCRYPTED_COOKIE_KEYS = [
os.environ["ENCRYPTED_COOKIE_KEY"]
]
| Update ALLOWED_HOSTS for move to service domain | Update ALLOWED_HOSTS for move to service domain
| Python | mit | ministryofjustice/manchester_traffic_offences_pleas,ministryofjustice/manchester_traffic_offences_pleas,ministryofjustice/manchester_traffic_offences_pleas,ministryofjustice/manchester_traffic_offences_pleas | ---
+++
@@ -28,7 +28,7 @@
INSTALLED_APPS += ('raven.contrib.django.raven_compat', )
-ALLOWED_HOSTS = ["www.makeaplea.justice.gov.uk", "www.service.justice.gov.uk" ]
+ALLOWED_HOSTS = ["www.makeaplea.justice.gov.uk", "www.makeaplea.service.gov.uk"]
# Enable CachedStaticFilesStorage for cache-busting assets
STATICFILES_STORAGE = 'django.contrib.staticfiles.storage.CachedStaticFilesStorage' |
505772740004ec8c73db49b7772e15d563a27b38 | themint/__init__.py | themint/__init__.py | import os
from flask import Flask
import logging
from raven.contrib.flask import Sentry
from themint.health import Health
app = Flask(__name__)
app.config.from_object(os.environ.get('SETTINGS'))
if not app.debug:
app.logger.addHandler(logging.StreamHandler())
app.logger.setLevel(logging.INFO)
if 'SENTRY_DSN' in os.environ:
sentry = Sentry(app, dsn=os.environ['SENTRY_DSN'])
app.logger.info(app.config)
from themint.service import message_service
Health(app, checks=[message_service.health])
| import os
from flask import Flask
import logging
from raven.contrib.flask import Sentry
from themint.health import Health
app = Flask(__name__)
app.config.from_object(os.environ.get('SETTINGS'))
if not app.debug:
app.logger.addHandler(logging.StreamHandler())
app.logger.setLevel(logging.INFO)
if 'SENTRY_DSN' in os.environ:
sentry = Sentry(app, dsn=os.environ['SENTRY_DSN'])
app.logger.debug(app.config)
from themint.service import message_service
Health(app, checks=[message_service.health])
| Set config logging in init to debug | Set config logging in init to debug
| Python | mit | LandRegistry/mint-alpha,LandRegistry/mint-alpha | ---
+++
@@ -18,7 +18,7 @@
if 'SENTRY_DSN' in os.environ:
sentry = Sentry(app, dsn=os.environ['SENTRY_DSN'])
-app.logger.info(app.config)
+app.logger.debug(app.config)
from themint.service import message_service
Health(app, checks=[message_service.health]) |
82c31412190e42f98ce65d5ad1a6a9b8faad2cb6 | lcd_ticker.py | lcd_ticker.py | #!/usr/bin/env python
"""Display stock quotes on LCD"""
import ystockquote as y
from lcd import lcd_string, tn
symbols = ['AAPL', 'MSFT', 'F', 'T', 'KO', 'GOOG', 'SYK', 'DIS', 'GM', 'GE',
'BAC', 'IBM', 'C', 'AMZN', 'AET', 'DOW', 'INTC', 'PFE', 'MRK',
'RTN']
def compact_quote(symbol):
symbol = 'SYK'
a = y.get_all(symbol)
L52 = int(round(float(a['fifty_two_week_low']), 0))
P = round(float(a['price']), 1)
C = a['change']
H52 = int(round(float(a['fifty_two_week_high']), 0))
PE = round(float(a['price_earnings_ratio']), 1)
Cp = int(round(float(C) / float(P) * 100))
return '{} {} {}% [{} {}] PE {}'.format(symbol, P, Cp, L52, H52, PE)
while(True):
try:
for s in symbols:
lcd_string(compact_quote(s), tn)
except KeyboardInterrupt:
break
| #!/usr/bin/env python
"""Display stock quotes on LCD"""
import ystockquote as y
from lcd import lcd_string, tn
symbols = ['AAPL', 'MSFT', 'F', 'T', 'KO', 'GOOG', 'SYK', 'DIS', 'GM', 'GE',
'BAC', 'IBM', 'C', 'AMZN', 'AET', 'DOW', 'INTC', 'PFE', 'MRK',
'RTN']
def compact_quote(symbol):
a = y.get_all(symbol)
try:
L52 = int(round(float(a['fifty_two_week_low']), 0))
except ValueError:
L52 = '_'
try:
P = round(float(a['price']), 1)
except ValueError:
P = '_'
try:
C = a['change']
except ValueError:
C = '_'
try:
H52 = int(round(float(a['fifty_two_week_high']), 0))
except ValueError:
H52 = '_'
try:
PE = round(float(a['price_earnings_ratio']), 1)
except ValueError:
PE = '_'
try:
Cp = int(round(float(C) / float(P) * 100))
except ValueError:
Cp = '_'
return '{} {} {}% [{} {}] PE {}'.format(symbol, P, Cp, L52, H52, PE)
while(True):
try:
for s in symbols:
lcd_string(compact_quote(s), tn)
except KeyboardInterrupt:
break
| Handle when N/A comes thru the quote. | Handle when N/A comes thru the quote.
| Python | mit | zimolzak/Raspberry-Pi-newbie,zimolzak/Raspberry-Pi-newbie,zimolzak/Raspberry-Pi-newbie,zimolzak/Raspberry-Pi-newbie,zimolzak/Raspberry-Pi-newbie | ---
+++
@@ -10,16 +10,31 @@
'RTN']
def compact_quote(symbol):
- symbol = 'SYK'
a = y.get_all(symbol)
-
- L52 = int(round(float(a['fifty_two_week_low']), 0))
- P = round(float(a['price']), 1)
- C = a['change']
- H52 = int(round(float(a['fifty_two_week_high']), 0))
- PE = round(float(a['price_earnings_ratio']), 1)
- Cp = int(round(float(C) / float(P) * 100))
-
+ try:
+ L52 = int(round(float(a['fifty_two_week_low']), 0))
+ except ValueError:
+ L52 = '_'
+ try:
+ P = round(float(a['price']), 1)
+ except ValueError:
+ P = '_'
+ try:
+ C = a['change']
+ except ValueError:
+ C = '_'
+ try:
+ H52 = int(round(float(a['fifty_two_week_high']), 0))
+ except ValueError:
+ H52 = '_'
+ try:
+ PE = round(float(a['price_earnings_ratio']), 1)
+ except ValueError:
+ PE = '_'
+ try:
+ Cp = int(round(float(C) / float(P) * 100))
+ except ValueError:
+ Cp = '_'
return '{} {} {}% [{} {}] PE {}'.format(symbol, P, Cp, L52, H52, PE)
while(True): |
b9dde5e9fc56feaea581cecca3f919f4e053044d | brumecli/config.py | brumecli/config.py | import os
import yaml
from subprocess import check_output, CalledProcessError
from colors import red
from jinja2 import Template
class Config():
@staticmethod
def load(config_file='brume.yml'):
"""Return the YAML configuration for a project based on the `config_file` template."""
template_functions = {}
def env(key):
"""Return the value of the `key` environment variable."""
try:
return os.environ[key]
except KeyError:
print(red('[ERROR] No environment variable with key {}'.format(key)))
exit(1)
template_functions['env'] = env
if os.path.isdir('.git'):
def git_commit():
"""Return the SHA1 of the latest Git commit (HEAD)."""
try:
return check_output(['git', 'rev-parse', '--short', 'HEAD']).strip()
except CalledProcessError:
print(red('[ERROR] Current directory is not a Git repository'))
exit(1)
def git_branch():
"""Return the name of the current Git branch."""
try:
return check_output(['git', 'rev-parse', '--abbrev-ref', 'HEAD']).strip()
except CalledProcessError:
print(red('[ERROR] Current directory is not a Git repository'))
exit(1)
template_functions['git_commit'] = git_commit()
template_functions['git_branch'] = git_branch()
template = Template(open(config_file, 'r').read())
return yaml.load(
template.render(**template_functions)
)
| import os
import yaml
from subprocess import check_output, CalledProcessError
from colors import red
from jinja2 import Template
class Config():
@staticmethod
def env(key):
"""Return the value of the `key` environment variable."""
try:
return os.environ[key]
except KeyError:
print(red('[ERROR] No environment variable with key {}'.format(key)))
exit(1)
@staticmethod
def git_commit():
"""Return the SHA1 of the latest Git commit (HEAD)."""
try:
return check_output(['git', 'rev-parse', '--short', 'HEAD']).strip()
except CalledProcessError:
print(red('[ERROR] Current directory is not a Git repository'))
exit(1)
@staticmethod
def git_branch():
"""Return the name of the current Git branch."""
try:
return check_output(['git', 'rev-parse', '--abbrev-ref', 'HEAD']).strip()
except CalledProcessError:
print(red('[ERROR] Current directory is not a Git repository'))
exit(1)
@staticmethod
def load(config_file='brume.yml'):
"""
Return the YAML configuration for a project based on the `config_file` template.
By default, the template exposes the `env` function.
The `git_branch` and `git_commit` values are exposed only when a `.git` folder
exists in the current directory
"""
template_functions = {}
template_functions['env'] = Config.env
if os.path.isdir('.git'):
template_functions['git_commit'] = Config.git_commit()
template_functions['git_branch'] = Config.git_branch()
template = Template(open(config_file, 'r').read())
return yaml.load(
template.render(**template_functions)
)
| Move template functions out of `Config.load()` | Move template functions out of `Config.load()`
| Python | mit | flou/brume,geronimo-iia/brume | ---
+++
@@ -9,37 +9,47 @@
class Config():
@staticmethod
+ def env(key):
+ """Return the value of the `key` environment variable."""
+ try:
+ return os.environ[key]
+ except KeyError:
+ print(red('[ERROR] No environment variable with key {}'.format(key)))
+ exit(1)
+
+ @staticmethod
+ def git_commit():
+ """Return the SHA1 of the latest Git commit (HEAD)."""
+ try:
+ return check_output(['git', 'rev-parse', '--short', 'HEAD']).strip()
+ except CalledProcessError:
+ print(red('[ERROR] Current directory is not a Git repository'))
+ exit(1)
+
+ @staticmethod
+ def git_branch():
+ """Return the name of the current Git branch."""
+ try:
+ return check_output(['git', 'rev-parse', '--abbrev-ref', 'HEAD']).strip()
+ except CalledProcessError:
+ print(red('[ERROR] Current directory is not a Git repository'))
+ exit(1)
+
+ @staticmethod
def load(config_file='brume.yml'):
- """Return the YAML configuration for a project based on the `config_file` template."""
+ """
+ Return the YAML configuration for a project based on the `config_file` template.
+
+ By default, the template exposes the `env` function.
+ The `git_branch` and `git_commit` values are exposed only when a `.git` folder
+ exists in the current directory
+ """
template_functions = {}
-
- def env(key):
- """Return the value of the `key` environment variable."""
- try:
- return os.environ[key]
- except KeyError:
- print(red('[ERROR] No environment variable with key {}'.format(key)))
- exit(1)
- template_functions['env'] = env
+ template_functions['env'] = Config.env
if os.path.isdir('.git'):
- def git_commit():
- """Return the SHA1 of the latest Git commit (HEAD)."""
- try:
- return check_output(['git', 'rev-parse', '--short', 'HEAD']).strip()
- except CalledProcessError:
- print(red('[ERROR] Current directory is not a Git repository'))
- exit(1)
-
- def git_branch():
- """Return the name of the current Git branch."""
- try:
- return check_output(['git', 'rev-parse', '--abbrev-ref', 'HEAD']).strip()
- except CalledProcessError:
- print(red('[ERROR] Current directory is not a Git repository'))
- exit(1)
- template_functions['git_commit'] = git_commit()
- template_functions['git_branch'] = git_branch()
+ template_functions['git_commit'] = Config.git_commit()
+ template_functions['git_branch'] = Config.git_branch()
template = Template(open(config_file, 'r').read())
return yaml.load( |
fe42da2e9c642c7e4f8b480012e9455ffcb294a0 | openacademy/model/openacademy_course.py | openacademy/model/openacademy_course.py | # -*- coding: utf-8 -*-
from openerp import fields, models
'''
This module create model of Course
'''
class Course(models.Model):
'''
This class create model of Course
'''
_name = 'openacademy.course' # Model odoo name
name = fields.Char(string='Title', required=True) # Field reserved to identified name rec
description = fields.Text(string='Description')
responsible_id = fields.Many2one('res.users',
ondelete='set null',
string="Responsible", index=True)
session_ids = fields.One2many('openacademy.session', 'course_id', string="Sessions")
_sql_constraints = [
('name_description_check',
'CHECK(name != description)',
"The title of the course should not be the description"),
('name_unique',
'UNIQUE(name)',
"The course title must be unique"),
]
| # -*- coding: utf-8 -*-
from openerp import api, fields, models
'''
This module create model of Course
'''
class Course(models.Model):
'''
This class create model of Course
'''
_name = 'openacademy.course' # Model odoo name
name = fields.Char(string='Title', required=True) # Field reserved to identified name rec
description = fields.Text(string='Description')
responsible_id = fields.Many2one('res.users',
ondelete='set null',
string="Responsible", index=True)
session_ids = fields.One2many('openacademy.session', 'course_id', string="Sessions")
_sql_constraints = [
('name_description_check',
'CHECK(name != description)',
"The title of the course should not be the description"),
('name_unique',
'UNIQUE(name)',
"The course title must be unique"),
]
@api.one # api.one send defaults params: cr, uid, id, context
def copy(self, default=None):
print "estoy pasando por la funcion heredada de copy en cursos"
# default['name'] = self.name ' (copy)'
copied_count = self.search_count(
[('name', '=like', u"Copy of {}%".format(self.name))])
if not copied_count:
new_name = u"Copy of {}".format(self.name)
else:
new_name = u"Copy of {} ({})".format(self.name, copied_count)
default['name'] = new_name
return super(Course, self).copy(default)
| Modify copy method into inherit | [REF] openacademy: Modify copy method into inherit
| Python | apache-2.0 | deivislaya/openacademy-project | ---
+++
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-from openerp import fields, models
+from openerp import api, fields, models
'''
This module create model of Course
@@ -30,3 +30,19 @@
"The course title must be unique"),
]
+ @api.one # api.one send defaults params: cr, uid, id, context
+ def copy(self, default=None):
+ print "estoy pasando por la funcion heredada de copy en cursos"
+ # default['name'] = self.name ' (copy)'
+
+ copied_count = self.search_count(
+ [('name', '=like', u"Copy of {}%".format(self.name))])
+ if not copied_count:
+ new_name = u"Copy of {}".format(self.name)
+ else:
+ new_name = u"Copy of {} ({})".format(self.name, copied_count)
+
+ default['name'] = new_name
+ return super(Course, self).copy(default)
+
+ |
b27a09e67d737310ec419eb76a39e667316184f0 | userprofile/forms.py | userprofile/forms.py | from datetime import datetime
from django import forms
from news.forms import MaterialFileWidget
from .models import Profile
class ProfileSearchForm(forms.Form):
name = forms.CharField(max_length=200)
class ProfileForm(forms.ModelForm):
image = forms.FileField(required=False, widget=MaterialFileWidget)
def __init__(self, *args, **kwargs):
self.user = kwargs.pop("user", None)
super(ProfileForm, self).__init__(*args, **kwargs)
def clean(self):
cleaned_data = super().clean()
has_ongoing_or_future_reservations = self.user.reservations.filter(
end__gt=datetime.now()
).exists()
if cleaned_data["phone_number"] is None and has_ongoing_or_future_reservations:
self.add_error(
"phone_number",
"Du kan ikke fjerne telefonnummer med pågående eller fremtidige reservasjoner",
)
class Meta:
model = Profile
fields = [
"image",
"study",
"show_email",
"social_discord",
"social_steam",
"social_battlenet",
"social_git",
"allergi_gluten",
"allergi_vegetar",
"allergi_vegan",
"allergi_annet",
"limit_social",
"phone_number",
]
| from datetime import datetime, timedelta
from django import forms
from news.forms import MaterialFileWidget
from .models import Profile
class ProfileSearchForm(forms.Form):
name = forms.CharField(max_length=200)
class ProfileForm(forms.ModelForm):
image = forms.FileField(required=False, widget=MaterialFileWidget)
def __init__(self, *args, **kwargs):
self.user = kwargs.pop("user", None)
super(ProfileForm, self).__init__(*args, **kwargs)
def clean(self):
cleaned_data = super().clean()
has_recent_or_future_reservations = self.user.reservations.filter(
end__gt=datetime.now() - timedelta(days=1)
).exists()
if cleaned_data["phone_number"] is None and has_recent_or_future_reservations:
self.add_error(
"phone_number",
"Du kan ikke fjerne telefonnummer med nylig gjennomførte (siste 24 timer), pågående eller fremtidige "
"reservasjoner",
)
class Meta:
model = Profile
fields = [
"image",
"study",
"show_email",
"social_discord",
"social_steam",
"social_battlenet",
"social_git",
"allergi_gluten",
"allergi_vegetar",
"allergi_vegan",
"allergi_annet",
"limit_social",
"phone_number",
]
| Add 24 hour waiting time for removing phone number after reservation | Add 24 hour waiting time for removing phone number after reservation
| Python | mit | hackerspace-ntnu/website,hackerspace-ntnu/website,hackerspace-ntnu/website | ---
+++
@@ -1,4 +1,4 @@
-from datetime import datetime
+from datetime import datetime, timedelta
from django import forms
@@ -20,13 +20,14 @@
def clean(self):
cleaned_data = super().clean()
- has_ongoing_or_future_reservations = self.user.reservations.filter(
- end__gt=datetime.now()
+ has_recent_or_future_reservations = self.user.reservations.filter(
+ end__gt=datetime.now() - timedelta(days=1)
).exists()
- if cleaned_data["phone_number"] is None and has_ongoing_or_future_reservations:
+ if cleaned_data["phone_number"] is None and has_recent_or_future_reservations:
self.add_error(
"phone_number",
- "Du kan ikke fjerne telefonnummer med pågående eller fremtidige reservasjoner",
+ "Du kan ikke fjerne telefonnummer med nylig gjennomførte (siste 24 timer), pågående eller fremtidige "
+ "reservasjoner",
)
class Meta: |
b8a1e049024289a0665c5bff3ecdf60cf3e63825 | typhon/spareice/__init__.py | typhon/spareice/__init__.py | # -*- coding: utf-8 -*-
"""All SPARE-ICE related modules."""
from typhon.spareice.collocations import * # noqa
from typhon.spareice.common import * # noqa
from typhon.spareice.datasets import * # noqa
__all__ = [s for s in dir() if not s.startswith('_')]
| # -*- coding: utf-8 -*-
"""All SPARE-ICE related modules."""
from typhon.spareice.array import *
from typhon.spareice.collocations import * # noqa
from typhon.spareice.common import * # noqa
from typhon.spareice.datasets import * # noqa
__all__ = [s for s in dir() if not s.startswith('_')]
| Add array submodule to standard import | Add array submodule to standard import
| Python | mit | atmtools/typhon,atmtools/typhon | ---
+++
@@ -2,6 +2,7 @@
"""All SPARE-ICE related modules."""
+from typhon.spareice.array import *
from typhon.spareice.collocations import * # noqa
from typhon.spareice.common import * # noqa
from typhon.spareice.datasets import * # noqa |
12b46a902f1596c0559e6e7d3faf6ea7b812a800 | api/radar_api/tests/conftest.py | api/radar_api/tests/conftest.py | import string
import random
import pytest
from radar_api.app import create_app
from radar.database import db
@pytest.fixture(scope='session')
def app():
return create_app({
'TESTING': True,
'SQLALCHEMY_DATABASE_URI': 'postgres://postgres@localhost/radar_test',
'SECRET_KEY': ''.join(random.sample(string.printable, 32)),
'BASE_URL': 'http://localhost'
})
@pytest.yield_fixture(scope='session')
def app_context(app):
with app.app_context() as app_context:
yield app_context
@pytest.fixture(scope='session')
def test_db(request, app_context):
db.drop_all()
db.create_all()
def teardown():
db.drop_all()
request.addfinalizer(teardown)
return db
@pytest.fixture
def transaction(request, app_context, test_db):
db.session.begin_nested()
def teardown():
db.session.rollback()
request.addfinalizer(teardown)
return db
@pytest.yield_fixture
def client(app, app_context):
with app.test_client() as client:
yield client
| import string
import random
import pytest
from radar_api.app import create_app
from radar.database import db
@pytest.fixture(scope='session')
def app():
return create_app({
'TESTING': True,
'SQLALCHEMY_DATABASE_URI': 'postgres://postgres@localhost/radar_test',
'SECRET_KEY': ''.join(random.sample(string.printable, 32)),
'BASE_URL': 'http://localhost',
'UKRDC_PATIENT_SEARCH_URL': 'http://localhost:5101/search',
})
@pytest.yield_fixture(scope='session')
def app_context(app):
with app.app_context() as app_context:
yield app_context
@pytest.fixture(scope='session')
def test_db(request, app_context):
db.drop_all()
db.create_all()
def teardown():
db.drop_all()
request.addfinalizer(teardown)
return db
@pytest.fixture
def transaction(request, app_context, test_db):
db.session.begin_nested()
def teardown():
db.session.rollback()
request.addfinalizer(teardown)
return db
@pytest.yield_fixture
def client(app, app_context):
with app.test_client() as client:
yield client
| Add UKRDC_PATIENT_SEARCH_URL to test app config | Add UKRDC_PATIENT_SEARCH_URL to test app config
| Python | agpl-3.0 | renalreg/radar,renalreg/radar,renalreg/radar,renalreg/radar | ---
+++
@@ -13,7 +13,8 @@
'TESTING': True,
'SQLALCHEMY_DATABASE_URI': 'postgres://postgres@localhost/radar_test',
'SECRET_KEY': ''.join(random.sample(string.printable, 32)),
- 'BASE_URL': 'http://localhost'
+ 'BASE_URL': 'http://localhost',
+ 'UKRDC_PATIENT_SEARCH_URL': 'http://localhost:5101/search',
})
|
7e60f9d7962b3795983fdf5af0605319b1447098 | whack/operations.py | whack/operations.py | import os
from whack.caching import DirectoryCacher, NoCachingStrategy
import whack.builder
def install(package, install_dir, caching, builder_uris, params):
if caching.enabled:
cacher = DirectoryCacher(os.path.expanduser("~/.cache/whack/builds"))
else:
cacher = NoCachingStrategy()
builder = whack.builder.Builders(cacher, builder_uris)
builder.install(package, install_dir, params)
| import os
from whack.caching import HttpCacher, DirectoryCacher, NoCachingStrategy
import whack.builder
def install(package, install_dir, caching, builder_uris, params):
if not caching.enabled:
cacher = NoCachingStrategy()
elif caching.http_cache_url is not None:
# TODO: add DirectoryCacher in front of HttpCacher
cacher = HttpCacher(caching.http_cache_url, caching.http_cache_key)
else:
cacher = DirectoryCacher(os.path.expanduser("~/.cache/whack/builds"))
builder = whack.builder.Builders(cacher, builder_uris)
builder.install(package, install_dir, params)
| Implement HTTP caching when CLI option is set | Implement HTTP caching when CLI option is set
| Python | bsd-2-clause | mwilliamson/whack | ---
+++
@@ -1,13 +1,16 @@
import os
-from whack.caching import DirectoryCacher, NoCachingStrategy
+from whack.caching import HttpCacher, DirectoryCacher, NoCachingStrategy
import whack.builder
def install(package, install_dir, caching, builder_uris, params):
- if caching.enabled:
+ if not caching.enabled:
+ cacher = NoCachingStrategy()
+ elif caching.http_cache_url is not None:
+ # TODO: add DirectoryCacher in front of HttpCacher
+ cacher = HttpCacher(caching.http_cache_url, caching.http_cache_key)
+ else:
cacher = DirectoryCacher(os.path.expanduser("~/.cache/whack/builds"))
- else:
- cacher = NoCachingStrategy()
builder = whack.builder.Builders(cacher, builder_uris)
builder.install(package, install_dir, params) |
73caeecd963326f4789eb3dc484e59ffb475e12f | blankspot_stats.py | blankspot_stats.py | #! /usr/bin/env python
import MapGardening
import optparse
usage = "usage: %prog [options]"
p = optparse.OptionParser(usage)
p.add_option('--place', '-p',
default="all"
)
options, arguments = p.parse_args()
possible_tables = [
'hist_point',
'hist_point_250m',
'hist_point_500m',
'hist_point_1000m',
'hist_point_proximity',
]
if options.place == "all":
places = MapGardening.get_all_places()
else:
placename = options.place
place = MapGardening.get_place(placename)
places = {placename: place}
MapGardening.init_logging()
for placename in places.keys():
print "printing blankspot info for", placename
MapGardening.init_db(places[placename]['dbname'])
for table in possible_tables:
nodetable = MapGardening.NodeTable(table) # Table may not exist, but object will still be created
nodetable.get_blankspot_stats()
MapGardening.disconnect_db() | #! /usr/bin/env python
"""
Calculate statistics for each study area, and prints results to stdout.
All it prints is the number of blankspots, the number of v1 nodes,
and the number of total nodes. Since I am no longer storing the blankspot
information in the hist_point table itself, these stats are no longer very informative.
If you are looking for statistics for each user, you want user_analysis.py
"""
import MapGardening
import optparse
usage = "usage: %prog [options]"
p = optparse.OptionParser(usage)
p.add_option('--place', '-p',
default="all"
)
options, arguments = p.parse_args()
possible_tables = [
'blankspots_1000_b',
]
if options.place == "all":
places = MapGardening.get_all_places()
else:
placename = options.place
place = MapGardening.get_place(placename)
places = {placename: place}
MapGardening.init_logging()
for placename in places.keys():
print "printing blankspot info for", placename
MapGardening.init_db(places[placename]['dbname'])
for table in possible_tables:
nodetable = MapGardening.NodeTable(table) # Table may not exist, but object will still be created
nodetable.get_blankspot_stats()
MapGardening.disconnect_db() | Add docstring, change tables searched | Add docstring, change tables searched
| Python | mit | almccon/mapgardening,almccon/mapgardening,almccon/mapgardening,almccon/mapgardening | ---
+++
@@ -1,4 +1,13 @@
#! /usr/bin/env python
+"""
+Calculate statistics for each study area, and prints results to stdout.
+
+All it prints is the number of blankspots, the number of v1 nodes,
+and the number of total nodes. Since I am no longer storing the blankspot
+information in the hist_point table itself, these stats are no longer very informative.
+
+If you are looking for statistics for each user, you want user_analysis.py
+"""
import MapGardening
import optparse
@@ -13,11 +22,7 @@
possible_tables = [
- 'hist_point',
- 'hist_point_250m',
- 'hist_point_500m',
- 'hist_point_1000m',
- 'hist_point_proximity',
+ 'blankspots_1000_b',
]
if options.place == "all": |
b97842ecf1c8fa22b599353c1c7fe75fcf482702 | tests/test_utils.py | tests/test_utils.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.test import TestCase
from modeltrans.manager import (split_translated_fieldname,
transform_translatable_fields)
from modeltrans.utils import build_localized_fieldname
from tests.app.models import Blog
class UtilsTest(TestCase):
def test_split_translated_fieldname(self):
self.assertEquals(
split_translated_fieldname('title_nl'),
('title', 'nl')
)
self.assertEquals(
split_translated_fieldname('full_name_nl'),
('full_name', 'nl')
)
def test_transform_translatable_fields(self):
self.assertEquals(
transform_translatable_fields(Blog, {'title': 'bar', 'title_nl': 'foo'}),
{
'i18n': {
'title_nl': 'foo'
},
'title': 'bar'
}
)
def test_build_localized_fieldname(self):
self.assertEquals(
build_localized_fieldname('title', 'nl'),
'title_nl'
)
self.assertEquals(
build_localized_fieldname('category__name', 'nl'),
'category__name_nl'
)
| # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.test import TestCase
from modeltrans.manager import transform_translatable_fields
from modeltrans.utils import (build_localized_fieldname,
split_translated_fieldname)
from tests.app.models import Blog
class UtilsTest(TestCase):
def test_split_translated_fieldname(self):
self.assertEquals(
split_translated_fieldname('title_nl'),
('title', 'nl')
)
self.assertEquals(
split_translated_fieldname('full_name_nl'),
('full_name', 'nl')
)
def test_transform_translatable_fields(self):
self.assertEquals(
transform_translatable_fields(Blog, {'title': 'bar', 'title_nl': 'foo'}),
{
'i18n': {
'title_nl': 'foo'
},
'title': 'bar'
}
)
def test_build_localized_fieldname(self):
self.assertEquals(
build_localized_fieldname('title', 'nl'),
'title_nl'
)
self.assertEquals(
build_localized_fieldname('category__name', 'nl'),
'category__name_nl'
)
| Use proper import from utils | Use proper import from utils
| Python | bsd-3-clause | zostera/django-modeltrans,zostera/django-modeltrans | ---
+++
@@ -3,9 +3,9 @@
from django.test import TestCase
-from modeltrans.manager import (split_translated_fieldname,
- transform_translatable_fields)
-from modeltrans.utils import build_localized_fieldname
+from modeltrans.manager import transform_translatable_fields
+from modeltrans.utils import (build_localized_fieldname,
+ split_translated_fieldname)
from tests.app.models import Blog
|
a9d4fab047249fbf5db26385779902d0f7483057 | qsimcirq/__init__.py | qsimcirq/__init__.py | from .qsim_circuit import *
from .qsim_simulator import *
from .qsimh_simulator import *
| from .qsim_circuit import add_op_to_opstring, add_op_to_circuit, QSimCircuit
from .qsim_simulator import QSimSimulatorState, QSimSimulatorTrialResult, QSimSimulator
from .qsimh_simulator import QSimhSimulator
| Replace star imports to fix mypy issue. | Replace star imports to fix mypy issue.
| Python | apache-2.0 | quantumlib/qsim,quantumlib/qsim,quantumlib/qsim,quantumlib/qsim | ---
+++
@@ -1,5 +1,3 @@
-from .qsim_circuit import *
-from .qsim_simulator import *
-from .qsimh_simulator import *
-
-
+from .qsim_circuit import add_op_to_opstring, add_op_to_circuit, QSimCircuit
+from .qsim_simulator import QSimSimulatorState, QSimSimulatorTrialResult, QSimSimulator
+from .qsimh_simulator import QSimhSimulator |
f5592efd0cf780c6e97483a16820f98478be8e3d | devil/devil/android/sdk/version_codes.py | devil/devil/android/sdk/version_codes.py | # Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Android SDK version codes.
http://developer.android.com/reference/android/os/Build.VERSION_CODES.html
"""
JELLY_BEAN = 16
JELLY_BEAN_MR1 = 17
JELLY_BEAN_MR2 = 18
KITKAT = 19
KITKAT_WATCH = 20
LOLLIPOP = 21
LOLLIPOP_MR1 = 22
MARSHMALLOW = 23
| # Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Android SDK version codes.
http://developer.android.com/reference/android/os/Build.VERSION_CODES.html
"""
JELLY_BEAN = 16
JELLY_BEAN_MR1 = 17
JELLY_BEAN_MR2 = 18
KITKAT = 19
KITKAT_WATCH = 20
LOLLIPOP = 21
LOLLIPOP_MR1 = 22
MARSHMALLOW = 23
NOUGAT = 24
| Add NOUGAT version code constant. | Add NOUGAT version code constant.
Review-Url: https://codereview.chromium.org/2386453002
| Python | bsd-3-clause | sahiljain/catapult,catapult-project/catapult,catapult-project/catapult,catapult-project/catapult,benschmaus/catapult,catapult-project/catapult-csm,benschmaus/catapult,sahiljain/catapult,sahiljain/catapult,catapult-project/catapult-csm,catapult-project/catapult-csm,benschmaus/catapult,benschmaus/catapult,sahiljain/catapult,catapult-project/catapult,catapult-project/catapult-csm,sahiljain/catapult,catapult-project/catapult-csm,catapult-project/catapult-csm,benschmaus/catapult,catapult-project/catapult,catapult-project/catapult,catapult-project/catapult,sahiljain/catapult,benschmaus/catapult,catapult-project/catapult-csm,benschmaus/catapult | ---
+++
@@ -15,4 +15,5 @@
LOLLIPOP = 21
LOLLIPOP_MR1 = 22
MARSHMALLOW = 23
+NOUGAT = 24
|
881a57be20adb82eb7632bcd8282b0971c2793f7 | test/test.py | test/test.py | #!/usr/bin/python
import os
import re
import commands
successRE = re.compile("Hello, GitHub!")
testsPassed = 0
# Add more tests here
tests = [
"bin/hello_c",
"bin/hello_cpp",
"java -cp bin hello",
"python hello.py",
"ruby hello.rb",
"perl hello.pl",
"sh hello.sh",
"expect hello.tcl"
]
def run_test(test):
global testsPassed
output = commands.getoutput(test)
output.rstrip()
if len(output) > 0 and successRE.match(output) != None:
testsPassed += 1
else:
print "\"{0}\" failed({1}). Number of tests executed: {2}".format(test, output, testsPassed)
exit(1)
if __name__ == '__main__':
# Change directory to previous directory where scripts are located
os.chdir ("..")
# Run the tests
for test in tests:
run_test(test)
# Print out the result
print "{0} total tests ran ".format(testsPassed)
print "All tests passed!"
| #!/usr/bin/python
import os
import re
import commands
successRE = re.compile("Hello, GitHub!")
testsPassed = 0
# Add more tests here
tests = [
"bin/hello_c",
"bin/hello_cpp",
"java -cp bin hello",
"python hello.py",
"ruby hello.rb",
"perl hello.pl",
"sh hello.sh",
"tclsh hello.tcl"
]
def run_test(test):
global testsPassed
output = commands.getoutput(test)
output.rstrip()
if len(output) > 0 and successRE.match(output) != None:
testsPassed += 1
else:
print "\"{0}\" failed({1}). Number of tests executed: {2}".format(test, output, testsPassed)
exit(1)
if __name__ == '__main__':
# Change directory to previous directory where scripts are located
os.chdir ("..")
# Run the tests
for test in tests:
run_test(test)
# Print out the result
print "{0} total tests ran ".format(testsPassed)
print "All tests passed!"
| Use tclsh. Old habits die hard :) | Use tclsh. Old habits die hard :)
| Python | mit | luke-ho/hello-github,luke-ho/hello-github,luke-ho/hello-github,luke-ho/hello-github,luke-ho/hello-github,luke-ho/hello-github,luke-ho/hello-github,luke-ho/hello-github,luke-ho/hello-github | ---
+++
@@ -17,7 +17,7 @@
"ruby hello.rb",
"perl hello.pl",
"sh hello.sh",
- "expect hello.tcl"
+ "tclsh hello.tcl"
]
def run_test(test): |
8dbf6f4c581430ae7393d1ed0c5f0b377ffebd7e | doc/examples/plot_match_face_template.py | doc/examples/plot_match_face_template.py | """
=================
Template Matching
=================
In this example, we use template matching to identify the occurrence of an
image patch (in this case, a sub-image centered on the camera man's head).
Since there's only a single match, the maximum value in the `match_template`
result` corresponds to the head location. If you expect multiple matches, you
should use a proper peak-finding function.
"""
import numpy as np
import matplotlib.pyplot as plt
from skimage import data
from skimage.feature import match_template
image = data.camera()
head = image[70:170, 180:280]
result = match_template(image, head)
fig, (ax1, ax2) = plt.subplots(ncols=2, figsize=(8, 4))
ax1.imshow(head)
ax1.set_axis_off()
ax1.set_title('template')
ax2.imshow(image)
ax2.set_axis_off()
ax2.set_title('image')
# highlight matched region
xy = np.unravel_index(np.argmax(result), result.shape)[::-1] #-1 flips ij to xy
wface, hface = head.shape
rect = plt.Rectangle(xy, wface, hface, edgecolor='r', facecolor='none')
ax2.add_patch(rect)
plt.show()
| """
=================
Template Matching
=================
In this example, we use template matching to identify the occurrence of an
image patch (in this case, a sub-image centered on the camera man's head).
Since there's only a single match, the maximum value in the `match_template`
result` corresponds to the head location. If you expect multiple matches, you
should use a proper peak-finding function.
"""
import numpy as np
import matplotlib.pyplot as plt
from skimage import data
from skimage.feature import match_template
image = data.camera()
head = image[70:170, 180:280]
result = match_template(image, head)
fig, (ax1, ax2) = plt.subplots(ncols=2, figsize=(8, 4))
ax1.imshow(head)
ax1.set_axis_off()
ax1.set_title('template')
ax2.imshow(image)
ax2.set_axis_off()
ax2.set_title('image')
# highlight matched region
xy = np.unravel_index(np.argmax(result), result.shape)[::-1] #-1 flips ij to xy
hface, wface = head.shape
rect = plt.Rectangle(xy, wface, hface, edgecolor='r', facecolor='none')
ax2.add_patch(rect)
plt.show()
| Fix shape unpacking ((height, width), not (w, h)). | Fix shape unpacking ((height, width), not (w, h)).
| Python | bsd-3-clause | rjeli/scikit-image,Midafi/scikit-image,ofgulban/scikit-image,vighneshbirodkar/scikit-image,paalge/scikit-image,michaelpacer/scikit-image,chriscrosscutler/scikit-image,Hiyorimi/scikit-image,almarklein/scikit-image,paalge/scikit-image,emon10005/scikit-image,SamHames/scikit-image,emmanuelle/scikits.image,jwiggins/scikit-image,SamHames/scikit-image,chintak/scikit-image,ajaybhat/scikit-image,youprofit/scikit-image,bennlich/scikit-image,rjeli/scikit-image,Hiyorimi/scikit-image,oew1v07/scikit-image,newville/scikit-image,Midafi/scikit-image,warmspringwinds/scikit-image,jwiggins/scikit-image,keflavich/scikit-image,WarrenWeckesser/scikits-image,chintak/scikit-image,newville/scikit-image,michaelaye/scikit-image,vighneshbirodkar/scikit-image,emmanuelle/scikits.image,dpshelio/scikit-image,SamHames/scikit-image,chriscrosscutler/scikit-image,paalge/scikit-image,michaelaye/scikit-image,emmanuelle/scikits.image,rjeli/scikit-image,robintw/scikit-image,almarklein/scikit-image,Britefury/scikit-image,chintak/scikit-image,warmspringwinds/scikit-image,almarklein/scikit-image,almarklein/scikit-image,WarrenWeckesser/scikits-image,pratapvardhan/scikit-image,ofgulban/scikit-image,juliusbierk/scikit-image,oew1v07/scikit-image,emon10005/scikit-image,robintw/scikit-image,juliusbierk/scikit-image,blink1073/scikit-image,youprofit/scikit-image,ClinicalGraphics/scikit-image,pratapvardhan/scikit-image,GaZ3ll3/scikit-image,Britefury/scikit-image,bsipocz/scikit-image,ajaybhat/scikit-image,blink1073/scikit-image,emmanuelle/scikits.image,vighneshbirodkar/scikit-image,ClinicalGraphics/scikit-image,bsipocz/scikit-image,chintak/scikit-image,GaZ3ll3/scikit-image,SamHames/scikit-image,michaelpacer/scikit-image,keflavich/scikit-image,bennlich/scikit-image,dpshelio/scikit-image,ofgulban/scikit-image | ---
+++
@@ -33,7 +33,7 @@
# highlight matched region
xy = np.unravel_index(np.argmax(result), result.shape)[::-1] #-1 flips ij to xy
-wface, hface = head.shape
+hface, wface = head.shape
rect = plt.Rectangle(xy, wface, hface, edgecolor='r', facecolor='none')
ax2.add_patch(rect)
|
70863101a882eee0811460cf9bf0f8442d9b0775 | djproxy/urls.py | djproxy/urls.py | from django.conf.urls import patterns
from djproxy.views import HttpProxy
def generate_routes(config):
routes = []
for service_name, proxy_config in config.items():
ProxyClass = type(
'ProxyClass',
(HttpProxy, ),
{'base_url': proxy_config['base_url']}
)
routes.append((
r'^%s' % proxy_config['prefix'],
ProxyClass.as_view()
))
return routes
| import re
from django.conf.urls import patterns, url
from djproxy.views import HttpProxy
def generate_routes(config):
routes = ()
for service_name, proxy_config in config.items():
base_url = proxy_config['base_url']
prefix = proxy_config['prefix']
ProxyClass = type('ProxyClass', (HttpProxy,), {'base_url': base_url})
routes += url(r'^%s' % prefix.lstrip('/'), ProxyClass.as_view()),
return patterns('', *routes)
| Return a `patterns` rather than a list of tuples | Return a `patterns` rather than a list of tuples
| Python | mit | thomasw/djproxy | ---
+++
@@ -1,17 +1,18 @@
-from django.conf.urls import patterns
+import re
+
+from django.conf.urls import patterns, url
+
from djproxy.views import HttpProxy
def generate_routes(config):
- routes = []
+ routes = ()
for service_name, proxy_config in config.items():
- ProxyClass = type(
- 'ProxyClass',
- (HttpProxy, ),
- {'base_url': proxy_config['base_url']}
- )
- routes.append((
- r'^%s' % proxy_config['prefix'],
- ProxyClass.as_view()
- ))
- return routes
+ base_url = proxy_config['base_url']
+ prefix = proxy_config['prefix']
+
+ ProxyClass = type('ProxyClass', (HttpProxy,), {'base_url': base_url})
+
+ routes += url(r'^%s' % prefix.lstrip('/'), ProxyClass.as_view()),
+
+ return patterns('', *routes) |
48ef416352870ae5c695ada006f1855d03d893df | dlexperiment.py | dlexperiment.py | class Experiment(object):
def __init__(self, epochs=1):
self.epochs = epochs
def get_epochs(self):
return self.epochs
def train(self):
raise NotImplementedError
def test(self):
raise NotImplementedError
def set_loss(self):
raise NotImplementedError
def checkpoint(self):
raise NotImplementedError
def save(self):
raise NotImplementedError
def load(self):
raise NotImplementedError
def is_done(self):
raise NotImplementedError
class PyTorchExperiment(object):
def save(self):
pass
| class Experiment(object):
def __init__(self, model, optimizer, train_data, test_data, epochs=1):
self.model = model
self.optimizer = optimizer
self.train_data = train_data
self.test_data = test_data
self.epochs = epochs
self.loss = 0
self.current_epoch = 0
def get_epoch(self):
return self.current_epoch
def train(self):
raise NotImplementedError
def test(self):
raise NotImplementedError
def set_loss(self, loss):
self.loss = loss
def checkpoint(self):
raise NotImplementedError
def save(self):
raise NotImplementedError
def load(self):
raise NotImplementedError
def is_done(self):
raise NotImplementedError
class PyTorchExperiment(object):
def save(self):
pass
| Add necessary params to Experiment. | Add necessary params to Experiment.
| Python | apache-2.0 | sagelywizard/dlex | ---
+++
@@ -1,9 +1,15 @@
class Experiment(object):
- def __init__(self, epochs=1):
+ def __init__(self, model, optimizer, train_data, test_data, epochs=1):
+ self.model = model
+ self.optimizer = optimizer
+ self.train_data = train_data
+ self.test_data = test_data
self.epochs = epochs
+ self.loss = 0
+ self.current_epoch = 0
- def get_epochs(self):
- return self.epochs
+ def get_epoch(self):
+ return self.current_epoch
def train(self):
raise NotImplementedError
@@ -11,8 +17,8 @@
def test(self):
raise NotImplementedError
- def set_loss(self):
- raise NotImplementedError
+ def set_loss(self, loss):
+ self.loss = loss
def checkpoint(self):
raise NotImplementedError |
05e8170326c5aa2be48eee5f90ab5a3919775e01 | io_EDM/__init__.py | io_EDM/__init__.py |
bl_info = {
'name': "Import: .EDM model files",
'description': "Importing of .EDM model files",
'author': "Nicholas Devenish",
'version': (0,0,1),
'blender': (2, 78, 0),
'location': "File > Import/Export > .EDM Files",
'category': 'Import-Export',
}
try:
import bpy
def register():
from .io_operators import register as importer_register
from .rna import register as rna_register
from .panels import register as panels_register
rna_register()
panels_register()
importer_register()
bpy.utils.register_module(__name__)
def unregister():
from .io_operators import unregister as importer_unregister
from .rna import unregister as rna_unregister
from .panels import unregister as panels_unregister
importer_unregister()
panels_unregister()
rna_unregister()
bpy.utils.unregister_module(__name__)
if __name__ == "__main__":
register()
except ImportError:
# Allow for now, as we might just want to import the sub-package
pass |
bl_info = {
'name': "Import: .EDM model files",
'description': "Importing of .EDM model files",
'author': "Nicholas Devenish",
'version': (0,0,1),
'blender': (2, 78, 0),
'location': "File > Import/Export > .EDM Files",
'category': 'Import-Export',
}
try:
import bpy
def register():
from .io_operators import register as importer_register
from .rna import register as rna_register
from .panels import register as panels_register
rna_register()
panels_register()
importer_register()
def unregister():
from .io_operators import unregister as importer_unregister
from .rna import unregister as rna_unregister
from .panels import unregister as panels_unregister
importer_unregister()
panels_unregister()
rna_unregister()
if __name__ == "__main__":
register()
except ImportError:
# Allow for now, as we might just want to import the sub-package
pass | Remove potential duplicate registration code | Remove potential duplicate registration code
Was sometimes causing an error when importing the project
| Python | mit | ndevenish/Blender_ioEDM,ndevenish/Blender_ioEDM | ---
+++
@@ -19,8 +19,7 @@
rna_register()
panels_register()
importer_register()
- bpy.utils.register_module(__name__)
-
+
def unregister():
from .io_operators import unregister as importer_unregister
from .rna import unregister as rna_unregister
@@ -28,7 +27,6 @@
importer_unregister()
panels_unregister()
rna_unregister()
- bpy.utils.unregister_module(__name__)
if __name__ == "__main__":
register() |
f8fe7041d209bb83e8483180824ffa73ceaa5f52 | ckanny/__init__.py | ckanny/__init__.py | # -*- coding: utf-8 -*-
# vim: sw=4:ts=4:expandtab
"""
ckanny
~~~~~~
Miscellaneous CKAN utility scripts
Examples:
literal blocks::
python example_google.py
Attributes:
module_level_variable1 (int): Module level variables may be documented in
"""
from __future__ import (
absolute_import, division, print_function, with_statement,
unicode_literals)
from manager import Manager
from . import datastorer, filestorer, package, hdx
__title__ = 'ckanny'
__author__ = 'Reuben Cummings'
__description__ = 'Miscellaneous CKAN utility scripts'
__email__ = 'reubano@gmail.com'
__version__ = '0.17.0'
__license__ = 'MIT'
__copyright__ = 'Copyright 2015 Reuben Cummings'
manager = Manager()
manager.merge(datastorer.manager, namespace='ds')
manager.merge(filestorer.manager, namespace='fs')
manager.merge(hdx.manager, namespace='hdx')
manager.merge(package.manager, namespace='pk')
@manager.command
def ver():
"""Show ckanny version"""
print('v%s' % __version__)
if __name__ == '__main__':
manager.main()
| # -*- coding: utf-8 -*-
# vim: sw=4:ts=4:expandtab
"""
ckanny
~~~~~~
Miscellaneous CKAN utility scripts
Examples:
literal blocks::
python example_google.py
Attributes:
module_level_variable1 (int): Module level variables may be documented in
"""
from __future__ import (
absolute_import, division, print_function, with_statement,
unicode_literals)
from manager import Manager
from . import datastorer, filestorer, package, hdx
__version__ = '0.17.0'
__title__ = 'ckanny'
__author__ = 'Reuben Cummings'
__description__ = 'Miscellaneous CKAN utility scripts'
__email__ = 'reubano@gmail.com'
__license__ = 'MIT'
__copyright__ = 'Copyright 2015 Reuben Cummings'
manager = Manager()
manager.merge(datastorer.manager, namespace='ds')
manager.merge(filestorer.manager, namespace='fs')
manager.merge(hdx.manager, namespace='hdx')
manager.merge(package.manager, namespace='pk')
@manager.command
def ver():
"""Show ckanny version"""
print('v%s' % __version__)
if __name__ == '__main__':
manager.main()
| Move version num to own line | Move version num to own line | Python | mit | reubano/ckanny,reubano/ckanny | ---
+++
@@ -23,11 +23,12 @@
from manager import Manager
from . import datastorer, filestorer, package, hdx
+__version__ = '0.17.0'
+
__title__ = 'ckanny'
__author__ = 'Reuben Cummings'
__description__ = 'Miscellaneous CKAN utility scripts'
__email__ = 'reubano@gmail.com'
-__version__ = '0.17.0'
__license__ = 'MIT'
__copyright__ = 'Copyright 2015 Reuben Cummings'
|
899cf1aa2bc274602a7f9b2ef315ed67239f955a | examples/inprocess/embedded_qtconsole.py | examples/inprocess/embedded_qtconsole.py | import os
from IPython.qt.console.qtconsoleapp import IPythonQtConsoleApp
from IPython.qt.console.rich_ipython_widget import RichIPythonWidget
from IPython.qt.inprocess import QtInProcessKernelManager
from IPython.lib import guisupport
from IPython.utils import path
def print_process_id():
print 'Process ID is:', os.getpid()
def main():
# Print the ID of the main process
print_process_id()
app = guisupport.get_app_qt4()
# Create an in-process kernel
# >>> print_process_id()
# will print the same process ID as the main process
kernel_manager = QtInProcessKernelManager()
kernel_manager.start_kernel()
kernel = kernel_manager.kernel
kernel.gui = 'qt4'
kernel.shell.push({'foo': 43, 'print_process_id': print_process_id})
kernel_client = kernel_manager.client()
kernel_client.start_channels()
def stop():
kernel_client.stop_channels()
kernel_manager.shutdown_kernel()
app.exit()
control = RichIPythonWidget()
control.kernel_manager = kernel_manager
control.kernel_client = kernel_client
control.exit_requested.connect(stop)
iapp = IPythonQtConsoleApp.instance()
iapp.config_file_paths = [path.locate_profile("default")]
iapp.config_files = ["ipython_qtconsole_config.py"]
iapp.load_config_file()
for cls_name, d in iapp.config.items():
for k, v in d.items():
setattr(control, k, v)
iapp.init_colors(control)
control.show()
guisupport.start_event_loop_qt4(app)
if __name__ == '__main__':
main()
| import os
from IPython.qt.console.rich_ipython_widget import RichIPythonWidget
from IPython.qt.inprocess import QtInProcessKernelManager
from IPython.lib import guisupport
def print_process_id():
print 'Process ID is:', os.getpid()
def main():
# Print the ID of the main process
print_process_id()
app = guisupport.get_app_qt4()
# Create an in-process kernel
# >>> print_process_id()
# will print the same process ID as the main process
kernel_manager = QtInProcessKernelManager()
kernel_manager.start_kernel()
kernel = kernel_manager.kernel
kernel.gui = 'qt4'
kernel.shell.push({'foo': 43, 'print_process_id': print_process_id})
kernel_client = kernel_manager.client()
kernel_client.start_channels()
def stop():
kernel_client.stop_channels()
kernel_manager.shutdown_kernel()
app.exit()
control = RichIPythonWidget()
control.kernel_manager = kernel_manager
control.kernel_client = kernel_client
control.exit_requested.connect(stop)
control.show()
guisupport.start_event_loop_qt4(app)
if __name__ == '__main__':
main()
| Revert config-loading change in embedding example. | Revert config-loading change in embedding example.
| Python | bsd-3-clause | ipython/ipython,ipython/ipython | ---
+++
@@ -1,10 +1,8 @@
import os
-from IPython.qt.console.qtconsoleapp import IPythonQtConsoleApp
from IPython.qt.console.rich_ipython_widget import RichIPythonWidget
from IPython.qt.inprocess import QtInProcessKernelManager
from IPython.lib import guisupport
-from IPython.utils import path
def print_process_id():
@@ -38,17 +36,8 @@
control.kernel_manager = kernel_manager
control.kernel_client = kernel_client
control.exit_requested.connect(stop)
+ control.show()
- iapp = IPythonQtConsoleApp.instance()
- iapp.config_file_paths = [path.locate_profile("default")]
- iapp.config_files = ["ipython_qtconsole_config.py"]
- iapp.load_config_file()
- for cls_name, d in iapp.config.items():
- for k, v in d.items():
- setattr(control, k, v)
- iapp.init_colors(control)
-
- control.show()
guisupport.start_event_loop_qt4(app)
|
ae324434fb00a46eae45d8218954950947bd636c | test_board_pytest.py | test_board_pytest.py | from board import Board
def test_constructor():
board = Board(0,0)
assert board.boardMatrix.size == 0
assert board.columns == 0
assert board.rows == 0
board = Board(5,5)
assert board.boardMatrix.size == 25
assert board.columns == 5
assert board.rows == 5
def test_addPiece():
board = Board(5,5)
assert board.addPiece(0, 1) == True
assert board.boardMatrix.item((4,0)) == 1
assert board.addPiece(0, 1) == True
assert board.boardMatrix.item((3,0)) == 1
assert board.addPiece(1, 1) == True
assert board.boardMatrix.item((4,1)) == 1
assert board.addPiece(4, 1) == True
assert board.boardMatrix.item((4,4)) == 1
| from board import Board
def test_constructor():
board = Board(0,0)
assert board.boardMatrix.size == 0
assert board.columns == 0
assert board.rows == 0
board = Board(5,5)
assert board.boardMatrix.size == 25
assert board.columns == 5
assert board.rows == 5
def test_addPiece():
board = Board(5,5)
assert board.addPiece(0, 1) == True
assert board.boardMatrix.item((4,0)) == 1
assert board.addPiece(0, 1) == True
assert board.boardMatrix.item((3,0)) == 1
assert board.addPiece(1, 1) == True
assert board.boardMatrix.item((4,1)) == 1
assert board.addPiece(4, 1) == True
assert board.boardMatrix.item((4,4)) == 1
"""
Tests that the board can be filled up completely
but no more.
"""
def test_addPieceMaxColumn():
board = Board(5,5)
# Fill board
for x in range(board.columns):
for y in range(board.rows):
assert board.addPiece(x, 1) == True
# Attempt to overfill
for x in range(board.columns):
assert board.addPiece(x, 2) == False
# Make sure initially filled values weren't overriden
for x in range(board.columns):
for y in range(board.rows):
assert board.boardMatrix.item((x,y)) == 1
| Add test checking board can't get overfilled. | Add test checking board can't get overfilled.
| Python | mit | isaacarvestad/four-in-a-row | ---
+++
@@ -24,5 +24,24 @@
assert board.addPiece(4, 1) == True
assert board.boardMatrix.item((4,4)) == 1
-
+"""
+Tests that the board can be filled up completely
+but no more.
+"""
+def test_addPieceMaxColumn():
+ board = Board(5,5)
+
+ # Fill board
+ for x in range(board.columns):
+ for y in range(board.rows):
+ assert board.addPiece(x, 1) == True
+
+ # Attempt to overfill
+ for x in range(board.columns):
+ assert board.addPiece(x, 2) == False
+
+ # Make sure initially filled values weren't overriden
+ for x in range(board.columns):
+ for y in range(board.rows):
+ assert board.boardMatrix.item((x,y)) == 1 |
9437b7fa2ef7f581968d6628561940dcb1e3f4ad | test_tws/__init__.py | test_tws/__init__.py | '''Unit test package for package "tws".'''
__copyright__ = "Copyright (c) 2008 Kevin J Bluck"
__version__ = "$Id$"
import socket
from tws import EWrapper
def test_import():
'''Verify successful import of top-level "tws" package'''
import tws
assert tws
class mock_wrapper(EWrapper):
def __init__(self):
self.errors = []
def error(self, id, code, text):
self.errors.append((id, code, text))
class mock_socket(object):
def __init__(self):
self._peer = ()
def connect(self, peer, error=False):
if error: raise socket.error()
self._peer = peer
def getpeername(self):
if not self._peer: raise socket.error()
return self._peer
def makefile(self, mode):
return StringIO()
| '''Unit test package for package "tws".'''
__copyright__ = "Copyright (c) 2008 Kevin J Bluck"
__version__ = "$Id$"
import socket
from tws import EWrapper
def test_import():
'''Verify successful import of top-level "tws" package'''
import tws
assert tws
class mock_wrapper(EWrapper):
def __init__(self):
self.calldata = []
self.errors = []
def error(self, id, code, text):
self.errors.append((id, code, text))
def __getattr__(self, name):
# Any arbitrary unknown attribute is mapped to a function call which is
# recorded into self.calldata.
return lambda *args, **kwds: self.calldata.append((name, args, kwds))
class mock_socket(object):
def __init__(self):
self._peer = ()
def connect(self, peer, error=False):
if error: raise socket.error()
self._peer = peer
def getpeername(self):
if not self._peer: raise socket.error()
return self._peer
def makefile(self, mode):
return StringIO()
| Implement a __getattr__() for mock_wrapper that just returns a lambda that records whatever call was attempted along with the call params. | Implement a __getattr__() for mock_wrapper that just returns a lambda that records whatever call was attempted along with the call params. | Python | bsd-3-clause | kbluck/pytws,kbluck/pytws | ---
+++
@@ -16,11 +16,18 @@
class mock_wrapper(EWrapper):
def __init__(self):
+ self.calldata = []
self.errors = []
def error(self, id, code, text):
self.errors.append((id, code, text))
-
+
+ def __getattr__(self, name):
+ # Any arbitrary unknown attribute is mapped to a function call which is
+ # recorded into self.calldata.
+ return lambda *args, **kwds: self.calldata.append((name, args, kwds))
+
+
class mock_socket(object):
def __init__(self): |
87c6a222c7e979c2e44ecf152158bfcbe3b61d2a | calaccess_processed/managers.py | calaccess_processed/managers.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Custom managers for working with CAL-ACCESS processed data models.
"""
from __future__ import unicode_literals
import os
from django.db import models, connection
class ProcessedDataManager(models.Manager):
"""
Utilities for loading raw CAL-ACCESS data into processed data models.
"""
def load_raw_data(self):
with connection.cursor() as c:
c.execute(self.raw_data_load_query)
@property
def raw_data_load_query_path(self):
return os.path.join(
'calaccess_processed',
'sql',
'load_%s_model.sql' % self.model._meta.model_name,
)
@property
def has_raw_data_load_query(self):
if os.path.exists(self.raw_data_load_query_path):
return True
else:
return False
@property
def raw_data_load_query(self):
sql = ''
if self.has_raw_data_load_query:
with open(self.raw_data_load_query_path) as f:
sql = f.read()
return sql
@property
def db_table(self):
"""
Return the model's database table name as a string.
"""
return self._meta.db_table
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Custom managers for working with CAL-ACCESS processed data models.
"""
from __future__ import unicode_literals
import os
from django.db import models, connection
class ProcessedDataManager(models.Manager):
"""
Utilities for loading raw CAL-ACCESS data into processed data models.
"""
def load_raw_data(self):
with connection.cursor() as c:
c.execute(self.raw_data_load_query)
@property
def raw_data_load_query_path(self):
return os.path.join(
os.path.dirname(__file__),
'sql',
'load_%s_model.sql' % self.model._meta.model_name,
)
@property
def has_raw_data_load_query(self):
if os.path.exists(self.raw_data_load_query_path):
return True
else:
return False
@property
def raw_data_load_query(self):
sql = ''
if self.has_raw_data_load_query:
with open(self.raw_data_load_query_path) as f:
sql = f.read()
return sql
@property
def db_table(self):
"""
Return the model's database table name as a string.
"""
return self._meta.db_table
| Fix path to .sql files | Fix path to .sql files
| Python | mit | california-civic-data-coalition/django-calaccess-processed-data,california-civic-data-coalition/django-calaccess-processed-data | ---
+++
@@ -18,7 +18,7 @@
@property
def raw_data_load_query_path(self):
return os.path.join(
- 'calaccess_processed',
+ os.path.dirname(__file__),
'sql',
'load_%s_model.sql' % self.model._meta.model_name,
) |
334b3e1bbda58439020131fe178db1e72cbf662a | 2/Solution.py | 2/Solution.py | from ListNode import *
class Solution():
def addTwoNumbers(self, l1, l2):
current_node = ListNode(None)
head_node = current_node
carry = 0
p = l1
q = l2
while p or q or carry:
x = y = 0
if p is not None:
x = p.val
p = p.next
if q is not None:
y = q.val
q = q.next
sum = x + y + carry
sum, carry = sum % 10, int(sum / 10)
current_node.next = ListNode(sum)
current_node = current_node.next
return head_node.next
def buildTree(nums):
node = ListNode(nums[0])
node.next = ListNode(nums[1])
node.next.next = ListNode(nums[2])
return node
def printTree(node):
print(node.val, "->", node.next.val, "->", node.next.next.val, sep=" ")
if __name__ == "__main__":
nums1 = [2, 4, 3]
nums2 = [5, 6, 4]
print(
printTree(Solution().addTwoNumbers(buildTree(nums1),
buildTree(nums2))))
| from ListNode import *
class Solution():
def addTwoNumbers(self, l1, l2):
head_node = current_node = ListNode(None)
carry = 0
p = l1
q = l2
while p or q or carry:
x = y = 0
if p is not None:
x = p.val
p = p.next
if q is not None:
y = q.val
q = q.next
sum = x + y + carry
sum, carry = sum % 10, int(sum / 10)
current_node.next = ListNode(sum)
current_node = current_node.next
return head_node.next
def buildTree(nums):
head = node = ListNode(None)
for num in nums:
node.next = ListNode(num)
node = node.next
return head.next
def printTree(node):
while node:
print(node.val, end='')
node = node.next
if node: print(' -> ', end='')
print()
if __name__ == '__main__':
nums1 = [2, 4]
nums2 = [2, 5, 9]
printTree(Solution().addTwoNumbers(buildTree(nums1),
buildTree(nums2)))
| Refactor build and print method | Refactor build and print method
| Python | mit | xliiauo/leetcode,xiao0720/leetcode,xiao0720/leetcode,xliiauo/leetcode,xliiauo/leetcode | ---
+++
@@ -3,8 +3,7 @@
class Solution():
def addTwoNumbers(self, l1, l2):
- current_node = ListNode(None)
- head_node = current_node
+ head_node = current_node = ListNode(None)
carry = 0
p = l1
q = l2
@@ -30,19 +29,23 @@
def buildTree(nums):
- node = ListNode(nums[0])
- node.next = ListNode(nums[1])
- node.next.next = ListNode(nums[2])
- return node
+ head = node = ListNode(None)
+ for num in nums:
+ node.next = ListNode(num)
+ node = node.next
+ return head.next
def printTree(node):
- print(node.val, "->", node.next.val, "->", node.next.next.val, sep=" ")
+ while node:
+ print(node.val, end='')
+ node = node.next
+ if node: print(' -> ', end='')
+ print()
-if __name__ == "__main__":
- nums1 = [2, 4, 3]
- nums2 = [5, 6, 4]
- print(
- printTree(Solution().addTwoNumbers(buildTree(nums1),
- buildTree(nums2))))
+if __name__ == '__main__':
+ nums1 = [2, 4]
+ nums2 = [2, 5, 9]
+ printTree(Solution().addTwoNumbers(buildTree(nums1),
+ buildTree(nums2))) |
edb10e7ae1f428dade04a9976c3b3f985065d458 | settings/__init__.py | settings/__init__.py | # -*- coding: utf-8 -*-
from __future__ import print_function
# Standard Library
import sys
if "test" in sys.argv:
print("\033[1;91mNo django tests.\033[0m")
print("Try: \033[1;33mpy.test\033[0m")
sys.exit(0)
from .common import * # noqa
try:
from .dev import * # noqa
from .prod import * # noqa
except ImportError:
pass
| # -*- coding: utf-8 -*-
from __future__ import print_function
# Standard Library
import sys
if "test" in sys.argv:
print("\033[1;91mNo django tests.\033[0m")
print("Try: \033[1;33mpy.test\033[0m")
sys.exit(0)
from .common import * # noqa
try:
from .dev import * # noqa
except ImportError:
pass
try:
from .prod import * # noqa
except ImportError:
pass
| Make sure prod.py is read in settings | Make sure prod.py is read in settings
| Python | mit | hTrap/junction,farhaanbukhsh/junction,ChillarAnand/junction,farhaanbukhsh/junction,akshayaurora/junction,NabeelValapra/junction,pythonindia/junction,shashisp/junction,hTrap/junction,ChillarAnand/junction,shashisp/junction,nava45/junction,NabeelValapra/junction,shashisp/junction,farhaanbukhsh/junction,akshayaurora/junction,shashisp/junction,nava45/junction,pythonindia/junction,Rahul91/junction,NabeelValapra/junction,hTrap/junction,Rahul91/junction,Rahul91/junction,praba230890/junction,Rahul91/junction,akshayaurora/junction,nava45/junction,nava45/junction,hTrap/junction,pythonindia/junction,ChillarAnand/junction,pythonindia/junction,akshayaurora/junction,praba230890/junction,NabeelValapra/junction,praba230890/junction,ChillarAnand/junction,farhaanbukhsh/junction,praba230890/junction | ---
+++
@@ -13,6 +13,10 @@
try:
from .dev import * # noqa
+except ImportError:
+ pass
+
+try:
from .prod import * # noqa
except ImportError:
pass |
b7db1d067c8efe86a6ab39a15fef0ab878656249 | uber/__init__.py | uber/__init__.py | import os
from decimal import Decimal
from pockets.autolog import log
from uber._version import __version__ # noqa: F401
def on_load():
"""
Called by sideboard when the uber plugin is loaded.
"""
# Note: The following imports have side effects
from uber import config # noqa: F401
from uber import api # noqa: F401
from uber import automated_emails # noqa: F401
from uber import custom_tags # noqa: F401
from uber import jinja # noqa: F401
from uber import menu # noqa: F401
from uber import models # noqa: F401
from uber import model_checks # noqa: F401
from uber import sep_commands # noqa: F401
from uber import server # noqa: F401
from uber import tasks # noqa: F401
# sideboard must be imported AFTER the on_load() function is declared,
# otherwise on_load() won't exist yet when sideboard looks for it.
import sideboard # noqa: E402
# NOTE: this will decrease the precision of some serialized decimal.Decimals
sideboard.lib.serializer.register(Decimal, lambda n: float(n))
| import os
from decimal import Decimal
from pockets.autolog import log
from uber._version import __version__ # noqa: F401
def on_load():
"""
Called by sideboard when the uber plugin is loaded.
"""
# Note: The following imports have side effects
from uber import config # noqa: F401
from uber import api # noqa: F401
from uber import automated_emails # noqa: F401
from uber import custom_tags # noqa: F401
from uber import jinja # noqa: F401
from uber import menu # noqa: F401
from uber import models # noqa: F401
from uber import model_checks # noqa: F401
from uber import sep_commands # noqa: F401
from uber import server # noqa: F401
from uber import tasks # noqa: F401
# sideboard must be imported AFTER the on_load() function is declared,
# otherwise on_load() won't exist yet when sideboard looks for it.
import sideboard # noqa: E402
# NOTE: this will decrease the precision of some serialized decimal.Decimals
sideboard.lib.serializer.register(Decimal, lambda n: float(n))
@sideboard.lib.on_startup
def create_data_dirs():
from uber.config import c
for directory in c.DATA_DIRS.values():
if not os.path.exists(directory):
log.info('Creating directory {}'.format(directory))
os.makedirs(directory, mode=0o744)
| Revert "Don't make dirs on startup" | Revert "Don't make dirs on startup"
This reverts commit 17243b31fc6c8d8f4bb0dc7e11e2601800e80bb0.
| Python | agpl-3.0 | magfest/ubersystem,magfest/ubersystem,magfest/ubersystem,magfest/ubersystem | ---
+++
@@ -32,3 +32,12 @@
# NOTE: this will decrease the precision of some serialized decimal.Decimals
sideboard.lib.serializer.register(Decimal, lambda n: float(n))
+
+@sideboard.lib.on_startup
+def create_data_dirs():
+ from uber.config import c
+
+ for directory in c.DATA_DIRS.values():
+ if not os.path.exists(directory):
+ log.info('Creating directory {}'.format(directory))
+ os.makedirs(directory, mode=0o744) |
84bb5fbef5c98bdee344ac9d9739f035bd9a8f7b | tooz/drivers/zake.py | tooz/drivers/zake.py | # Copyright (c) 2013-2014 Mirantis Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from __future__ import absolute_import
from zake import fake_client
from zake import fake_storage
from tooz.drivers import zookeeper
class ZakeDriver(zookeeper.KazooDriver):
"""The driver using the Zake client which mimic a fake Kazoo client
without the need of real ZooKeeper servers.
"""
# here we need to pass *threading handler* as an argument
fake_storage = fake_storage.FakeStorage(
fake_client.k_threading.SequentialThreadingHandler())
@classmethod
def _make_client(cls, parsed_url, options):
return fake_client.FakeClient(storage=cls.fake_storage)
| # Copyright (c) 2013-2014 Mirantis Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from __future__ import absolute_import
from zake import fake_client
from zake import fake_storage
from tooz.drivers import zookeeper
class ZakeDriver(zookeeper.KazooDriver):
"""The driver using the Zake client which mimic a fake Kazoo client
without the need of real ZooKeeper servers.
"""
# NOTE(harlowja): this creates a shared backend 'storage' layer that
# would typically exist inside a zookeeper server, but since zake has
# no concept of a 'real' zookeeper server we create a fake one and share
# it among active clients to simulate zookeeper's consistent storage in
# a thread-safe manner.
fake_storage = fake_storage.FakeStorage(
fake_client.k_threading.SequentialThreadingHandler())
@classmethod
def _make_client(cls, parsed_url, options):
return fake_client.FakeClient(storage=cls.fake_storage)
| Change inline docs about class fake storage variable | Change inline docs about class fake storage variable
Adjust the docs to better describe why a fake storage
class attribute exists and how it is used and what it
represents compared to a real zookeeper setup.
Change-Id: I255ccd83c8033266e9cee09a343468ae4e0f2bfd
| Python | apache-2.0 | citrix-openstack-build/tooz,openstack/tooz,citrix-openstack-build/tooz,openstack/tooz | ---
+++
@@ -25,7 +25,11 @@
without the need of real ZooKeeper servers.
"""
- # here we need to pass *threading handler* as an argument
+ # NOTE(harlowja): this creates a shared backend 'storage' layer that
+ # would typically exist inside a zookeeper server, but since zake has
+ # no concept of a 'real' zookeeper server we create a fake one and share
+ # it among active clients to simulate zookeeper's consistent storage in
+ # a thread-safe manner.
fake_storage = fake_storage.FakeStorage(
fake_client.k_threading.SequentialThreadingHandler())
|
40493966b989e73a07f6a33bd9e9497ae9ad9f3f | user/admin.py | user/admin.py | from django.contrib import admin
from .models import User
@admin.register(User)
class UserAdmin(admin.ModelAdmin):
# list view
list_display = (
'get_name',
'email',
'get_date_joined',
'is_staff',
'is_superuser')
list_display_links = ('get_name', 'email')
list_filter = (
'is_staff',
'is_superuser',
'profile__joined')
ordering = ('email',)
search_fields = ('email',)
# form view
fieldsets = (
(None, {
'classes': ('wide',),
'fields': ('email', 'password')}),
('Permissions', {
'classes': ('collapse',),
'fields': (
'is_active',
'is_staff',
'is_superuser',
'groups',
'user_permissions')}),
('Important dates', {
'classes': ('collapse',),
'fields': ('last_login',)}),
)
filter_horizontal = (
'groups', 'user_permissions',)
def get_date_joined(self, user):
return user.profile.joined
get_date_joined.short_description = 'Joined'
get_date_joined.admin_order_field = (
'profile__joined')
def get_name(self, user):
return user.profile.name
get_name.short_description = 'Name'
get_name.admin_order_field = 'profile__name'
| from django.contrib import admin
from .models import User
@admin.register(User)
class UserAdmin(admin.ModelAdmin):
# list view
list_display = (
'get_name',
'email',
'get_date_joined',
'is_staff',
'is_superuser')
list_display_links = ('get_name', 'email')
list_filter = (
'is_staff',
'is_superuser',
'profile__joined')
ordering = ('email',)
search_fields = ('email',)
# form view
fieldsets = (
(None, {
'classes': ('wide',),
'fields': ('email',)}),
('Permissions', {
'classes': ('collapse',),
'fields': (
'is_active',
'is_staff',
'is_superuser',
'groups',
'user_permissions')}),
('Important dates', {
'classes': ('collapse',),
'fields': ('last_login',)}),
)
filter_horizontal = (
'groups', 'user_permissions',)
def get_date_joined(self, user):
return user.profile.joined
get_date_joined.short_description = 'Joined'
get_date_joined.admin_order_field = (
'profile__joined')
def get_name(self, user):
return user.profile.name
get_name.short_description = 'Name'
get_name.admin_order_field = 'profile__name'
| Remove password from UserAdmin fieldsets. | Ch23: Remove password from UserAdmin fieldsets.
| Python | bsd-2-clause | jambonrose/DjangoUnleashed-1.8,jambonrose/DjangoUnleashed-1.8 | ---
+++
@@ -23,7 +23,7 @@
fieldsets = (
(None, {
'classes': ('wide',),
- 'fields': ('email', 'password')}),
+ 'fields': ('email',)}),
('Permissions', {
'classes': ('collapse',),
'fields': ( |
178373851bcb66487b89224f19e3c3dc887a8f95 | user_profile/urls.py | user_profile/urls.py | from django.conf.urls import include, url
from django.contrib import admin
from . import views
urlpatterns = [
url(r'^', views.ViewView.as_view(), name='profile_own_view'),
url(r'^edit/', views.EditView.as_view(), name='profile_edit'),
url(r'^view/', views.ViewView.as_view(), name='profile_own_view'),
url(r'^view/(?P<user_name>\d+)/$', views.ViewView.as_view(), name='profile_view'),
]
| from django.conf.urls import include, url
from django.contrib import admin
from . import views
urlpatterns = [
url(r'^$', views.ViewView.as_view(), name='profile_own_view'),
url(r'^edit/', views.EditView.as_view(), name='profile_edit'),
url(r'^view/', views.ViewView.as_view(), name='profile_own_view'),
url(r'^view/(?P<user_name>\d+)/$', views.ViewView.as_view(), name='profile_view'),
]
| Fix user_profile index page url-pattern | Fix user_profile index page url-pattern
| Python | mit | DeWaster/Tviserrys,DeWaster/Tviserrys | ---
+++
@@ -4,7 +4,7 @@
from . import views
urlpatterns = [
- url(r'^', views.ViewView.as_view(), name='profile_own_view'),
+ url(r'^$', views.ViewView.as_view(), name='profile_own_view'),
url(r'^edit/', views.EditView.as_view(), name='profile_edit'),
url(r'^view/', views.ViewView.as_view(), name='profile_own_view'),
url(r'^view/(?P<user_name>\d+)/$', views.ViewView.as_view(), name='profile_view'), |
01b15e2df498706a342009e300c77168032c7824 | fbmsgbot/bot.py | fbmsgbot/bot.py | from http_client import HttpClient
from models.message import ReceivedMessage
class Bot():
"""
@brief Facebook messenger bot
"""
def __init__(self, token):
self.api_token = token
self.client = HttpClient(token)
def send_message(self, message, completion):
def _completion(response, error):
if error is not None:
print 'Error Encountered! Could not send message\n'
print 'Message: %s' % error
else:
completion(response)
self.client.submit_request(
'/me/messages',
'POST',
message.to_json(),
_completion)
def set_welcome(self, message, completion):
def _completion(response, error):
if error is not None:
pass
else:
completion(response)
self.client.submit_request(
'/me/thread_settings',
'POST',
message.to_json(),
_completion)
def messages_for_request(self, request):
"""
Handle incoming requests from the webhook
"""
entries = request.json['entry']
messages = []
for entry in entries:
message = {}
for msg in entry['messaging']:
messages.append(ReceivedMessage(msg))
return messages
def message_from_reciept(receipt):
"""
Handle receipts
"""
raise NotImplementedError
| from http_client import HttpClient
from models.message import ReceivedMessage
class Bot():
"""
@brief Facebook messenger bot
"""
def __init__(self, token):
self.api_token = token
self.client = HttpClient(token)
def send_message(self, message):
response, error = self.client.submit_request(
'/me/messages',
'POST',
message.to_json(),
None)
if error is not None:
print 'Error Encountered! Could not send message\n'
print 'Message: %s' % error
return response, error
def set_welcome(self, message, completion):
def _completion(response, error):
if error is not None:
pass
else:
completion(response)
self.client.submit_request(
'/me/thread_settings',
'POST',
message.to_json(),
_completion)
def messages_for_request(self, request):
"""
Handle incoming requests from the webhook
"""
entries = request.json['entry']
messages = []
for entry in entries:
message = {}
for msg in entry['messaging']:
messages.append(ReceivedMessage(msg))
return messages
def message_from_reciept(receipt):
"""
Handle receipts
"""
raise NotImplementedError
| Refactor send_message to remove completion block | Refactor send_message to remove completion block
| Python | mit | ben-cunningham/python-messenger-bot,ben-cunningham/pybot | ---
+++
@@ -11,20 +11,19 @@
self.api_token = token
self.client = HttpClient(token)
- def send_message(self, message, completion):
+ def send_message(self, message):
- def _completion(response, error):
- if error is not None:
- print 'Error Encountered! Could not send message\n'
- print 'Message: %s' % error
- else:
- completion(response)
+ response, error = self.client.submit_request(
+ '/me/messages',
+ 'POST',
+ message.to_json(),
+ None)
- self.client.submit_request(
- '/me/messages',
- 'POST',
- message.to_json(),
- _completion)
+ if error is not None:
+ print 'Error Encountered! Could not send message\n'
+ print 'Message: %s' % error
+
+ return response, error
def set_welcome(self, message, completion):
|
c209115dfb385cc167457aa87808b21a554f63cf | yvs/set_pref.py | yvs/set_pref.py | # yvs.set_pref
# coding=utf-8
from __future__ import unicode_literals
import json
import sys
import yvs.shared as shared
# Parse pref set data from the given JSON string
def parse_pref_set_data(pref_set_data_str):
pref_set_data = json.loads(pref_set_data_str)
return pref_set_data['pref'], pref_set_data['value']
# Set the YouVersion Suggest preference with the given key
def set_pref(pref_id, value_id):
user_prefs = shared.get_user_prefs()
user_prefs[pref_id] = value_id
# If new language is set, ensure that preferred version is updated also
if pref_id == 'language':
bible = shared.get_bible_data(language=value_id)
user_prefs['version'] = bible['default_version']
shared.set_user_prefs(user_prefs)
def main(pref_set_data_str):
pref, value = parse_pref_set_data(pref_set_data_str)
set_pref(pref['id'], value['id'])
print('Set preferred {} to {}'.format(
pref['name'].lower(), value['name']).encode('utf-8'))
if __name__ == '__main__':
main(sys.argv[1].decode('utf-8'))
| # yvs.set_pref
# coding=utf-8
from __future__ import unicode_literals
import json
import sys
import yvs.shared as shared
# Parse pref set data from the given JSON string
def parse_pref_set_data(pref_set_data_str):
pref_set_data = json.loads(pref_set_data_str)
return pref_set_data['pref'], pref_set_data['value']
# Set the YouVersion Suggest preference with the given key
def set_pref(pref_id, value_id):
user_prefs = shared.get_user_prefs()
user_prefs[pref_id] = value_id
# If new language is set, ensure that preferred version is updated also
if pref_id == 'language':
bible = shared.get_bible_data(language=value_id)
user_prefs['version'] = bible['default_version']
shared.set_user_prefs(user_prefs)
def main(pref_set_data_str):
pref, value = parse_pref_set_data(pref_set_data_str)
set_pref(pref['id'], value['id'])
print('Preferred {} set to {}'.format(
pref['name'].lower(), value['name']).encode('utf-8'))
if __name__ == '__main__':
main(sys.argv[1].decode('utf-8'))
| Revise "set preference" notification to be clearer | Revise "set preference" notification to be clearer
Notification message originally read as though it were in the
imperative when it is meant to be in past tense.
| Python | mit | caleb531/youversion-suggest,caleb531/youversion-suggest | ---
+++
@@ -32,7 +32,7 @@
pref, value = parse_pref_set_data(pref_set_data_str)
set_pref(pref['id'], value['id'])
- print('Set preferred {} to {}'.format(
+ print('Preferred {} set to {}'.format(
pref['name'].lower(), value['name']).encode('utf-8'))
|
bf53b5a1e6562162ba9c3f89568ebfeb0124249d | athenet/layers/pool.py | athenet/layers/pool.py | """Pooling layer."""
from theano.tensor.signal import downsample
from athenet.layers import Layer
class MaxPool(Layer):
"""Max-pooling layer."""
def __init__(self, poolsize, stride=None):
"""Create max-pooling layer.
:poolsize: Pooling factor in the format (height, width).
:stride: Pair representing interval at which to apply the filters.
"""
super(MaxPool, self).__init__()
self.poolsize = poolsize
self.stride = stride
@property
def output_shape(self):
image_h, image_w, n_channels = self.input_shape
pool_h, pool_w = self.poolsize
if self.stride:
stride_h, stride_w = self.stride
else:
stride_h, stride_w = pool_h, pool_w
output_h = (image_h - pool_h) / stride_h + 1
output_w = (image_w - pool_w) / stride_w + 1
return (output_h, output_w, n_channels)
def _get_output(self, layer_input):
"""Return layer's output.
layer_input: Input in the format (batch size, number of channels,
image height, image width).
:return: Layer output.
"""
return downsample.max_pool_2d(
input=layer_input,
ds=self.poolsize,
ignore_border=True,
st=self.stride
)
| """Pooling layer."""
from theano.tensor.signal import downsample
from athenet.layers import Layer
class MaxPool(Layer):
"""Max-pooling layer."""
def __init__(self, poolsize, stride=None):
"""Create max-pooling layer.
:poolsize: Pooling factor in the format (height, width).
:stride: Pair representing interval at which to apply the filters.
"""
super(MaxPool, self).__init__()
self.poolsize = poolsize
if stride is None:
self.stride = poolsize
else:
self.stride = stride
@property
def output_shape(self):
image_h, image_w, n_channels = self.input_shape
pool_h, pool_w = self.poolsize
if self.stride is not None:
stride_h, stride_w = self.stride
else:
stride_h, stride_w = pool_h, pool_w
output_h = (image_h - pool_h) / stride_h + 1
output_w = (image_w - pool_w) / stride_w + 1
return (output_h, output_w, n_channels)
def _get_output(self, layer_input):
"""Return layer's output.
layer_input: Input in the format (batch size, number of channels,
image height, image width).
:return: Layer output.
"""
if self.stride == self.poolsize:
stride = None
else:
stride = self.stride
return downsample.max_pool_2d(
input=layer_input,
ds=self.poolsize,
ignore_border=True,
st=stride
)
| Change stride semantic in MaxPool | Change stride semantic in MaxPool
| Python | bsd-2-clause | heurezjusz/Athenet,heurezjusz/Athena | ---
+++
@@ -15,13 +15,16 @@
"""
super(MaxPool, self).__init__()
self.poolsize = poolsize
- self.stride = stride
+ if stride is None:
+ self.stride = poolsize
+ else:
+ self.stride = stride
@property
def output_shape(self):
image_h, image_w, n_channels = self.input_shape
pool_h, pool_w = self.poolsize
- if self.stride:
+ if self.stride is not None:
stride_h, stride_w = self.stride
else:
stride_h, stride_w = pool_h, pool_w
@@ -37,9 +40,13 @@
image height, image width).
:return: Layer output.
"""
+ if self.stride == self.poolsize:
+ stride = None
+ else:
+ stride = self.stride
return downsample.max_pool_2d(
input=layer_input,
ds=self.poolsize,
ignore_border=True,
- st=self.stride
+ st=stride
) |
f56ed1c14b87e4d28e8e853cf64d91cf756576d1 | dashboard/tasks.py | dashboard/tasks.py | import json
import requests
from bitcoinmonitor.celeryconfig import app
from channels import Group
app.conf.beat_schedule = {
'add-every-30-seconds': {
'task': 'dashboard.tasks.get_bitcoin_price',
'schedule': 6.0,
'args': ("dale",)
},
}
@app.task
def get_bitcoin_price(arg):
last_price = requests.get("https://bittrex.com/api/v1.1/public/getticker?market=USDT-BTC").json().get("result").get("Last")
Group('btc-price').send({'text': json.dumps({
'last_price': last_price
})})
| import json
from bitcoinmonitor.celeryconfig import app
from channels import Group
from .helpers import get_coin_price
app.conf.beat_schedule = {
'get-bitcoin-price-every-five-seconds': {
'task': 'dashboard.tasks.get_bitcoin_price',
'schedule': 5.0,
},
'get-litecoin-price-every-five-seconds': {
'task': 'dashboard.tasks.get_litcoin_price',
'schedule': 5.0,
},
}
@app.task
def get_bitcoin_price():
data = get_coin_price('BTC')
Group('btc-price').send({'text': data})
@app.task
def get_litcoin_price():
data = get_coin_price('LTC')
Group('ltc-price').send({'text': data})
| Create another task to get the litecoin price | Create another task to get the litecoin price
| Python | mit | alessandroHenrique/coinpricemonitor,alessandroHenrique/coinpricemonitor,alessandroHenrique/coinpricemonitor | ---
+++
@@ -1,22 +1,31 @@
import json
-import requests
from bitcoinmonitor.celeryconfig import app
from channels import Group
+from .helpers import get_coin_price
app.conf.beat_schedule = {
- 'add-every-30-seconds': {
+ 'get-bitcoin-price-every-five-seconds': {
'task': 'dashboard.tasks.get_bitcoin_price',
- 'schedule': 6.0,
- 'args': ("dale",)
+ 'schedule': 5.0,
+ },
+ 'get-litecoin-price-every-five-seconds': {
+ 'task': 'dashboard.tasks.get_litcoin_price',
+ 'schedule': 5.0,
},
}
@app.task
-def get_bitcoin_price(arg):
- last_price = requests.get("https://bittrex.com/api/v1.1/public/getticker?market=USDT-BTC").json().get("result").get("Last")
- Group('btc-price').send({'text': json.dumps({
- 'last_price': last_price
- })})
+def get_bitcoin_price():
+ data = get_coin_price('BTC')
+
+ Group('btc-price').send({'text': data})
+
+
+@app.task
+def get_litcoin_price():
+ data = get_coin_price('LTC')
+
+ Group('ltc-price').send({'text': data}) |
8d53a7478a139770d9ffb241ec2985123c403845 | bookmarks/bookmarks/models.py | bookmarks/bookmarks/models.py | from __future__ import unicode_literals
from django.db import models
from django.utils import timezone
from django.dispatch import receiver
from django.conf import settings
from taggit.managers import TaggableManager
import requests
class Bookmark(models.Model):
title = models.CharField(max_length=200, blank=True, null=True)
description = models.TextField(blank=True, null=True)
date_added = models.DateTimeField(default=timezone.now, blank=True)
tags = TaggableManager(blank=True)
private = models.BooleanField(default=False)
url = models.URLField(max_length=500)
def __unicode__(self):
return "{}: {} [{}]".format(
self.pk,
self.title[:40],
self.date_added
)
@receiver(models.signals.post_save, sender=Bookmark)
def bookmark_pre_save_handler(sender, instance, created, *args, **kwargs):
# Only run for new items, not updates
if created:
if not hasattr(settings, 'SLACK_WEBHOOK_URL'):
return
payload = {
'channel': "#bookmarks-dev",
'username': "Bookmarks",
'text': "{}".format(
"Bookmark added:",
),
'icon_emoji': ":blue_book:",
'attachments': [
{
"fallback": instance.title,
"color": "good",
"title": instance.title,
"title_link": instance.url,
"text": instance.description,
}
]
}
requests.post(settings.SLACK_WEBHOOK_URL, json=payload)
| from __future__ import unicode_literals
from django.db import models
from django.utils import timezone
from django.dispatch import receiver
from django.conf import settings
from taggit.managers import TaggableManager
import requests
class Bookmark(models.Model):
title = models.CharField(max_length=200, blank=True, null=True)
description = models.TextField(blank=True, null=True)
date_added = models.DateTimeField(default=timezone.now, blank=True)
tags = TaggableManager(blank=True)
private = models.BooleanField(default=False)
url = models.URLField(max_length=500)
def __unicode__(self):
return "{}: {} [{}]".format(
self.pk,
self.title[:40],
self.date_added
)
@receiver(models.signals.post_save, sender=Bookmark)
def bookmark_pre_save_handler(sender, instance, created, *args, **kwargs):
# Only run for new items, not updates
if created:
if not hasattr(settings, 'SLACK_WEBHOOK_URL'):
return
payload = {
'channel': "#bookmarks-dev",
'username': "Bookmarks",
'text': "<{}|{}>\n{}".format(
instance.url,
instance.title,
instance.description,
),
'icon_emoji': ":blue_book:",
'unfurl_links': True
}
requests.post(settings.SLACK_WEBHOOK_URL, json=payload)
| Remove attachment and use slack link unfurling | Remove attachment and use slack link unfurling
| Python | mit | tom-henderson/bookmarks,tom-henderson/bookmarks,tom-henderson/bookmarks | ---
+++
@@ -36,19 +36,13 @@
payload = {
'channel': "#bookmarks-dev",
'username': "Bookmarks",
- 'text': "{}".format(
- "Bookmark added:",
+ 'text': "<{}|{}>\n{}".format(
+ instance.url,
+ instance.title,
+ instance.description,
),
'icon_emoji': ":blue_book:",
- 'attachments': [
- {
- "fallback": instance.title,
- "color": "good",
- "title": instance.title,
- "title_link": instance.url,
- "text": instance.description,
- }
- ]
+ 'unfurl_links': True
}
requests.post(settings.SLACK_WEBHOOK_URL, json=payload) |
b9e2418e90ba61905aacf37d254e2718124aba77 | apps/graphing/tests/__init__.py | apps/graphing/tests/__init__.py | from basic import *
# this generates a lot of non-exception generating errors
# all the more incentive to fix these tests up sooner!
# TODO - fix
from views import *
| from basic import *
# this generates a lot of non-exception generating errors
# all the more incentive to fix these tests up sooner!
# TODO - fix
# actually, to prevent getting swamped by email error
# logs, i'll comment this out until it's fixed
# from views import *
| Disable graphing unit tests until we fix logger.error conditions | Disable graphing unit tests until we fix logger.error conditions
| Python | bsd-3-clause | SEL-Columbia/commcare-hq,gmimano/commcaretest,qedsoftware/commcare-hq,dimagi/commcare-hq,puttarajubr/commcare-hq,puttarajubr/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,SEL-Columbia/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,puttarajubr/commcare-hq,dimagi/commcare-hq,puttarajubr/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,SEL-Columbia/commcare-hq,gmimano/commcaretest,gmimano/commcaretest | ---
+++
@@ -2,4 +2,6 @@
# this generates a lot of non-exception generating errors
# all the more incentive to fix these tests up sooner!
# TODO - fix
-from views import *
+# actually, to prevent getting swamped by email error
+# logs, i'll comment this out until it's fixed
+# from views import * |
abe9be5cc9789b7b1c091f08b23655f903d71fb2 | apps/impala/src/impala/tests.py | apps/impala/src/impala/tests.py | #!/usr/bin/env python
# Licensed to Cloudera, Inc. under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. Cloudera, Inc. licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from nose.tools import assert_true, assert_equal, assert_false
from desktop.lib.django_test_util import make_logged_in_client
class TestImpala:
def setUp(self):
self.client = make_logged_in_client()
def test_basic_flow(self):
response = self.client.get("/impala/")
assert_true("""<li id="impalaIcon"
class="active""" in response.content, response.content)
assert_true('Query Editor' in response.content)
response = self.client.get("/impala/execute/")
assert_true('Query Editor' in response.content)
| #!/usr/bin/env python
# Licensed to Cloudera, Inc. under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. Cloudera, Inc. licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import re
from nose.tools import assert_true, assert_equal, assert_false
from desktop.lib.django_test_util import make_logged_in_client
class TestImpala:
def setUp(self):
self.client = make_logged_in_client()
def test_basic_flow(self):
response = self.client.get("/impala/")
assert_true(re.search('<li id="impalaIcon"\W+class="active', response.content), response.content)
assert_true('Query Editor' in response.content)
response = self.client.get("/impala/execute/")
assert_true('Query Editor' in response.content)
| Fix test looking for Impala icon | [impala] Fix test looking for Impala icon
Use regexp in tests for matching white spaces and new lines
| Python | apache-2.0 | xiangel/hue,xiangel/hue,GitHublong/hue,kawamon/hue,kawamon/hue,rahul67/hue,cloudera/hue,lumig242/Hue-Integration-with-CDAP,dulems/hue,xq262144/hue,sanjeevtripurari/hue,epssy/hue,rahul67/hue,pwong-mapr/private-hue,x303597316/hue,ahmed-mahran/hue,epssy/hue,ChenJunor/hue,sanjeevtripurari/hue,abhishek-ch/hue,epssy/hue,xiangel/hue,sanjeevtripurari/hue,fangxingli/hue,todaychi/hue,kawamon/hue,Peddle/hue,cloudera/hue,rahul67/hue,cloudera/hue,kawamon/hue,GitHublong/hue,vitan/hue,yoer/hue,jayceyxc/hue,erickt/hue,javachengwc/hue,kawamon/hue,ahmed-mahran/hue,MobinRanjbar/hue,abhishek-ch/hue,Peddle/hue,dulems/hue,kawamon/hue,x303597316/hue,hdinsight/hue,MobinRanjbar/hue,pratikmallya/hue,jayceyxc/hue,nvoron23/hue,jjmleiro/hue,pwong-mapr/private-hue,erickt/hue,erickt/hue,Peddle/hue,jounex/hue,rahul67/hue,javachengwc/hue,MobinRanjbar/hue,jjmleiro/hue,vitan/hue,pwong-mapr/private-hue,x303597316/hue,cloudera/hue,kawamon/hue,mapr/hue,jjmleiro/hue,azureplus/hue,erickt/hue,pwong-mapr/private-hue,xiangel/hue,abhishek-ch/hue,fangxingli/hue,kawamon/hue,ChenJunor/hue,Peddle/hue,pratikmallya/hue,jjmleiro/hue,jayceyxc/hue,MobinRanjbar/hue,vmax-feihu/hue,jounex/hue,todaychi/hue,javachengwc/hue,GitHublong/hue,lumig242/Hue-Integration-with-CDAP,vitan/hue,javachengwc/hue,mapr/hue,vitan/hue,mapr/hue,epssy/hue,yoer/hue,erickt/hue,Peddle/hue,cloudera/hue,ChenJunor/hue,fangxingli/hue,abhishek-ch/hue,hdinsight/hue,todaychi/hue,ahmed-mahran/hue,yongshengwang/hue,ChenJunor/hue,jounex/hue,vmax-feihu/hue,GitHublong/hue,mapr/hue,vitan/hue,azureplus/hue,xq262144/hue,nvoron23/hue,jayceyxc/hue,vmax-feihu/hue,mapr/hue,hdinsight/hue,abhishek-ch/hue,abhishek-ch/hue,cloudera/hue,jounex/hue,jjmleiro/hue,GitHublong/hue,yongshengwang/hue,xq262144/hue,todaychi/hue,Peddle/hue,fangxingli/hue,todaychi/hue,hdinsight/hue,pratikmallya/hue,Peddle/hue,todaychi/hue,cloudera/hue,fangxingli/hue,javachengwc/hue,ahmed-mahran/hue,pwong-mapr/private-hue,azureplus/hue,kawamon/hue,nvoron23/hue,todaychi/hue,yongshengwang/hue,mapr/hue,jayceyxc/hue,sanjeevtripurari/hue,xiangel/hue,MobinRanjbar/hue,pratikmallya/hue,yongshengwang/hue,Peddle/hue,ahmed-mahran/hue,hdinsight/hue,cloudera/hue,rahul67/hue,todaychi/hue,fangxingli/hue,nvoron23/hue,fangxingli/hue,lumig242/Hue-Integration-with-CDAP,yongshengwang/hue,cloudera/hue,xq262144/hue,jayceyxc/hue,epssy/hue,GitHublong/hue,GitHublong/hue,erickt/hue,x303597316/hue,azureplus/hue,vitan/hue,dulems/hue,cloudera/hue,mapr/hue,Peddle/hue,kawamon/hue,cloudera/hue,kawamon/hue,dulems/hue,abhishek-ch/hue,jjmleiro/hue,x303597316/hue,javachengwc/hue,jjmleiro/hue,kawamon/hue,xq262144/hue,jounex/hue,dulems/hue,xq262144/hue,yongshengwang/hue,x303597316/hue,jounex/hue,kawamon/hue,nvoron23/hue,yoer/hue,jjmleiro/hue,kawamon/hue,cloudera/hue,vitan/hue,ahmed-mahran/hue,MobinRanjbar/hue,cloudera/hue,xiangel/hue,fangxingli/hue,ChenJunor/hue,javachengwc/hue,rahul67/hue,azureplus/hue,lumig242/Hue-Integration-with-CDAP,x303597316/hue,vmax-feihu/hue,kawamon/hue,cloudera/hue,nvoron23/hue,dulems/hue,pwong-mapr/private-hue,jounex/hue,azureplus/hue,azureplus/hue,rahul67/hue,ChenJunor/hue,lumig242/Hue-Integration-with-CDAP,azureplus/hue,cloudera/hue,yongshengwang/hue,yoer/hue,xq262144/hue,dulems/hue,hdinsight/hue,vmax-feihu/hue,cloudera/hue,xq262144/hue,abhishek-ch/hue,ahmed-mahran/hue,pratikmallya/hue,jounex/hue,kawamon/hue,yoer/hue,GitHublong/hue,vmax-feihu/hue,epssy/hue,lumig242/Hue-Integration-with-CDAP,ahmed-mahran/hue,javachengwc/hue,ChenJunor/hue,erickt/hue,lumig242/Hue-Integration-with-CDAP,MobinRanjbar/hue,xiangel/hue,yoer/hue,lumig242/Hue-Integration-with-CDAP,nvoron23/hue,kawamon/hue,pwong-mapr/private-hue,nvoron23/hue,yongshengwang/hue,jayceyxc/hue,hdinsight/hue,vmax-feihu/hue,jayceyxc/hue,xq262144/hue,vitan/hue,yoer/hue,dulems/hue,hdinsight/hue,xiangel/hue,rahul67/hue,sanjeevtripurari/hue,sanjeevtripurari/hue,sanjeevtripurari/hue,cloudera/hue,pratikmallya/hue,ChenJunor/hue,kawamon/hue,vmax-feihu/hue,cloudera/hue,jayceyxc/hue,sanjeevtripurari/hue,lumig242/Hue-Integration-with-CDAP,epssy/hue,pratikmallya/hue,MobinRanjbar/hue,erickt/hue,x303597316/hue,yoer/hue,todaychi/hue,jjmleiro/hue,epssy/hue,pratikmallya/hue | ---
+++
@@ -15,6 +15,8 @@
# See the License for the specific language governing permissions and
# limitations under the License.
+import re
+
from nose.tools import assert_true, assert_equal, assert_false
from desktop.lib.django_test_util import make_logged_in_client
@@ -26,8 +28,7 @@
def test_basic_flow(self):
response = self.client.get("/impala/")
- assert_true("""<li id="impalaIcon"
- class="active""" in response.content, response.content)
+ assert_true(re.search('<li id="impalaIcon"\W+class="active', response.content), response.content)
assert_true('Query Editor' in response.content)
response = self.client.get("/impala/execute/") |
4cfa123da2ccf416e2cb7e4bd9bc0c189a06081b | tests/__init__.py | tests/__init__.py | # Copyright 2013-2014 DataStax, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
log = logging.getLogger()
log.setLevel('DEBUG')
| # Copyright 2013-2014 DataStax, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
log = logging.getLogger()
log.setLevel('DEBUG')
# if nose didn't already attach a log handler, add one here
if not log.handlers:
handler = logging.StreamHandler()
handler.setFormatter(logging.Formatter('%(asctime)s %(levelname)s [%(module)s:%(lineno)s]: %(message)s'))
log.addHandler(handler)
| Add test logger if not added by nose | Add test logger if not added by nose
| Python | apache-2.0 | jregovic/python-driver,thobbs/python-driver,mobify/python-driver,stef1927/python-driver,stef1927/python-driver,coldeasy/python-driver,thelastpickle/python-driver,beobal/python-driver,bbirand/python-driver,HackerEarth/cassandra-python-driver,yi719/python-driver,HackerEarth/cassandra-python-driver,thelastpickle/python-driver,bbirand/python-driver,coldeasy/python-driver,tempbottle/python-driver,jregovic/python-driver,yi719/python-driver,mambocab/python-driver,kracekumar/python-driver,beobal/python-driver,mambocab/python-driver,sontek/python-driver,jfelectron/python-driver,markflorisson/python-driver,datastax/python-driver,kishkaru/python-driver,mike-tr-adamson/python-driver,mobify/python-driver,kracekumar/python-driver,vipjml/python-driver,mike-tr-adamson/python-driver,vipjml/python-driver,datastax/python-driver,markflorisson/python-driver,sontek/python-driver,kishkaru/python-driver,jfelectron/python-driver,tempbottle/python-driver,thobbs/python-driver | ---
+++
@@ -16,3 +16,8 @@
log = logging.getLogger()
log.setLevel('DEBUG')
+# if nose didn't already attach a log handler, add one here
+if not log.handlers:
+ handler = logging.StreamHandler()
+ handler.setFormatter(logging.Formatter('%(asctime)s %(levelname)s [%(module)s:%(lineno)s]: %(message)s'))
+ log.addHandler(handler) |
5841590444d202e6fb1fe8d7d937807ff9805677 | astropy/table/tests/test_row.py | astropy/table/tests/test_row.py | import pytest
import numpy as np
from .. import Column, Row, Table
class TestRow():
def setup_method(self, method):
self.a = Column('a', [1, 2, 3])
self.b = Column('b', [4, 5, 6])
def test_subclass(self):
"""Row is subclass of ndarray and Row"""
table = Table([self.a, self.b])
c = Row(table, 2)
assert isinstance(c, Row)
def test_values(self):
"""Row accurately reflects table values and attributes"""
table = Table([self.a, self.b], meta={'x': 1})
row = table[1]
assert row['a'] == 2
assert row['b'] == 5
assert row[0] == 2
assert row[1] == 5
assert row.meta is table.meta
assert row.colnames == table.colnames
assert row.columns is table.columns
with pytest.raises(IndexError):
row[2]
assert str(row.dtype) == "[('a', '<i8'), ('b', '<i8')]"
def test_ref(self):
"""Row is a reference into original table data"""
table = Table([self.a, self.b])
row = table[1]
row['a'] = 10
assert table['a'][1] == 10
| import pytest
import numpy as np
from .. import Column, Row, Table
class TestRow():
def setup_method(self, method):
self.a = Column('a', [1, 2, 3])
self.b = Column('b', [4, 5, 6])
self.t = Table([self.a, self.b])
def test_subclass(self):
"""Row is subclass of ndarray and Row"""
c = Row(self.t, 2)
assert isinstance(c, Row)
def test_values(self):
"""Row accurately reflects table values and attributes"""
table = self.t
row = table[1]
assert row['a'] == 2
assert row['b'] == 5
assert row[0] == 2
assert row[1] == 5
assert row.meta is table.meta
assert row.colnames == table.colnames
assert row.columns is table.columns
with pytest.raises(IndexError):
row[2]
assert str(row.dtype) == "[('a', '<i8'), ('b', '<i8')]"
def test_ref(self):
"""Row is a reference into original table data"""
table = self.t
row = table[1]
row['a'] = 10
assert table['a'][1] == 10
def SKIP_test_set_slice(self):
"""Set row elements with a slice
This currently fails because the underlying np.void object
row.data = table._data[index] does not support slice assignment.
"""
table = self.t
row = table[0]
row[:] = [-1, -1]
row[:1] = np.array([-2])
assert np.all(table._data == np.array([[-1, -1],
[-2, 5],
[3, 6]]))
| Add a (skipped) test for row slice assignment. | Add a (skipped) test for row slice assignment.
E. Bray requested the ability to assign to a table via a row with
slice assignment, e.g.
row = table[2]
row[2:5] = [2, 3, 4]
row[:] = 3
This does not currently work because np.void (which is what numpy
returns for structured array row access) does not support slice
assignment. Test is left there as a placeholder for now.
| Python | bsd-3-clause | bsipocz/astropy,lpsinger/astropy,MSeifert04/astropy,larrybradley/astropy,bsipocz/astropy,astropy/astropy,kelle/astropy,DougBurke/astropy,stargaser/astropy,dhomeier/astropy,lpsinger/astropy,pllim/astropy,dhomeier/astropy,DougBurke/astropy,lpsinger/astropy,astropy/astropy,tbabej/astropy,joergdietrich/astropy,funbaker/astropy,tbabej/astropy,tbabej/astropy,bsipocz/astropy,saimn/astropy,dhomeier/astropy,pllim/astropy,lpsinger/astropy,tbabej/astropy,larrybradley/astropy,StuartLittlefair/astropy,bsipocz/astropy,pllim/astropy,pllim/astropy,StuartLittlefair/astropy,saimn/astropy,funbaker/astropy,astropy/astropy,astropy/astropy,MSeifert04/astropy,mhvk/astropy,astropy/astropy,stargaser/astropy,AustereCuriosity/astropy,joergdietrich/astropy,DougBurke/astropy,funbaker/astropy,aleksandr-bakanov/astropy,joergdietrich/astropy,joergdietrich/astropy,AustereCuriosity/astropy,MSeifert04/astropy,larrybradley/astropy,StuartLittlefair/astropy,AustereCuriosity/astropy,aleksandr-bakanov/astropy,stargaser/astropy,saimn/astropy,lpsinger/astropy,AustereCuriosity/astropy,mhvk/astropy,kelle/astropy,aleksandr-bakanov/astropy,kelle/astropy,mhvk/astropy,saimn/astropy,larrybradley/astropy,funbaker/astropy,saimn/astropy,StuartLittlefair/astropy,aleksandr-bakanov/astropy,joergdietrich/astropy,mhvk/astropy,StuartLittlefair/astropy,dhomeier/astropy,kelle/astropy,dhomeier/astropy,larrybradley/astropy,AustereCuriosity/astropy,DougBurke/astropy,MSeifert04/astropy,kelle/astropy,pllim/astropy,mhvk/astropy,stargaser/astropy,tbabej/astropy | ---
+++
@@ -8,16 +8,16 @@
def setup_method(self, method):
self.a = Column('a', [1, 2, 3])
self.b = Column('b', [4, 5, 6])
+ self.t = Table([self.a, self.b])
def test_subclass(self):
"""Row is subclass of ndarray and Row"""
- table = Table([self.a, self.b])
- c = Row(table, 2)
+ c = Row(self.t, 2)
assert isinstance(c, Row)
def test_values(self):
"""Row accurately reflects table values and attributes"""
- table = Table([self.a, self.b], meta={'x': 1})
+ table = self.t
row = table[1]
assert row['a'] == 2
assert row['b'] == 5
@@ -32,7 +32,21 @@
def test_ref(self):
"""Row is a reference into original table data"""
- table = Table([self.a, self.b])
+ table = self.t
row = table[1]
row['a'] = 10
assert table['a'][1] == 10
+
+ def SKIP_test_set_slice(self):
+ """Set row elements with a slice
+
+ This currently fails because the underlying np.void object
+ row.data = table._data[index] does not support slice assignment.
+ """
+ table = self.t
+ row = table[0]
+ row[:] = [-1, -1]
+ row[:1] = np.array([-2])
+ assert np.all(table._data == np.array([[-1, -1],
+ [-2, 5],
+ [3, 6]])) |
7faeebea3186443055cd8dd5e02137339c048ac9 | src/ggrc_basic_permissions/roles/ProgramOwner.py | src/ggrc_basic_permissions/roles/ProgramOwner.py | # Copyright (C) 2017 Google Inc.
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
scope = "Private Program"
description = """
User with authorization to peform administrative tasks such as associating
users to roles within the scope of of a program.<br/><br/>When a person
creates a program they are automatically given the ProgramOwner role. This
allows them to Edit, Delete, or Map objects to the Program. It also allows
them to add people and assign them roles when their programs are private.
ProgramOwner is the most powerful role.
"""
permissions = {
"read": [
"ObjectDocument",
"ObjectPerson",
"Program",
"ProgramControl",
"Relationship",
"UserRole",
"Context",
],
"create": [
"ObjectDocument",
"ObjectPerson",
"ProgramControl",
"Relationship",
"UserRole",
"Audit",
],
"view_object_page": [
"__GGRC_ALL__"
],
"update": [
"ObjectDocument",
"ObjectPerson",
"Program",
"ProgramControl",
"Relationship",
"UserRole"
],
"delete": [
"ObjectDocument",
"ObjectPerson",
"Program",
"ProgramControl",
"Relationship",
"UserRole",
]
}
| # Copyright (C) 2017 Google Inc.
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
scope = "Private Program"
description = """
User with authorization to peform administrative tasks such as associating
users to roles within the scope of of a program.<br/><br/>When a person
creates a program they are automatically given the ProgramOwner role. This
allows them to Edit, Delete, or Map objects to the Program. It also allows
them to add people and assign them roles when their programs are private.
ProgramOwner is the most powerful role.
"""
permissions = {
"read": [
"ObjectDocument",
"ObjectPerson",
"Program",
"ProgramControl",
"Relationship",
"UserRole",
"Context",
],
"create": [
"ObjectDocument",
"ObjectPerson",
"ProgramControl",
"Relationship",
"UserRole",
"Audit",
"Snapshot",
],
"view_object_page": [
"__GGRC_ALL__"
],
"update": [
"ObjectDocument",
"ObjectPerson",
"Program",
"ProgramControl",
"Relationship",
"UserRole"
],
"delete": [
"ObjectDocument",
"ObjectPerson",
"Program",
"ProgramControl",
"Relationship",
"UserRole",
]
}
| Add support for creating snapshots for program owner | Add support for creating snapshots for program owner
| Python | apache-2.0 | AleksNeStu/ggrc-core,AleksNeStu/ggrc-core,AleksNeStu/ggrc-core,plamut/ggrc-core,plamut/ggrc-core,VinnieJohns/ggrc-core,selahssea/ggrc-core,plamut/ggrc-core,selahssea/ggrc-core,AleksNeStu/ggrc-core,selahssea/ggrc-core,VinnieJohns/ggrc-core,plamut/ggrc-core,VinnieJohns/ggrc-core,VinnieJohns/ggrc-core,selahssea/ggrc-core | ---
+++
@@ -27,6 +27,7 @@
"Relationship",
"UserRole",
"Audit",
+ "Snapshot",
],
"view_object_page": [
"__GGRC_ALL__" |
ce95fbb56d7b331c0d1f55f6f6f8fc32b1e0f135 | datastories/admin.py | datastories/admin.py | from models import Story, Page, StoryPage
from django.contrib import admin
admin.site.register(Story)
admin.site.register(Page)
admin.site.register(StoryPage)
| from models import Story, Page, StoryPage
from django.contrib import admin
class StoryAdmin(admin.ModelAdmin):
list_display = ('title', 'slug')
prepopulated_fields = dict(slug=['title'])
exclude = ('owner',)
# Uncomment the stuff below to automate keeping creator as owner
# and restricting editing to owner and superuser
# save_model(self, request, obj, form, change):
# if not change:
# obj.owner = request.user
# super(StoryAdmin, self).save_model(request, obj, form, change)
# def queryset(self, request):
# qs = super(StoryAdmin, self).queryset(request)
# if request.user.is_superuser:
# return qs
# return qs.filter(owner=request.user)
# def has_change_permission(self, request, obj=None):
# ok = super(StoryAdmin, self).has_change_permission(request, obj)
# if not ok:
# return False
# if obj is None:
# return True
# # Not sure I need this:
# if request.user.is_superuser:
# return True
# return request.user.id == obj.owner.id
admin.site.register(Story, StoryAdmin)
admin.site.register(Page)
admin.site.register(StoryPage)
| Add a StoryAdmin to hide owner. | Add a StoryAdmin to hide owner.
Also has comment out code for automating owner and restrincting
editing of a story to its owner and superuser. We can decide
whether we want it later (untested).
| Python | bsd-3-clause | MAPC/masshealth,MAPC/masshealth | ---
+++
@@ -1,6 +1,36 @@
from models import Story, Page, StoryPage
from django.contrib import admin
-admin.site.register(Story)
+class StoryAdmin(admin.ModelAdmin):
+ list_display = ('title', 'slug')
+ prepopulated_fields = dict(slug=['title'])
+ exclude = ('owner',)
+
+# Uncomment the stuff below to automate keeping creator as owner
+# and restricting editing to owner and superuser
+
+# save_model(self, request, obj, form, change):
+# if not change:
+# obj.owner = request.user
+# super(StoryAdmin, self).save_model(request, obj, form, change)
+
+# def queryset(self, request):
+# qs = super(StoryAdmin, self).queryset(request)
+# if request.user.is_superuser:
+# return qs
+# return qs.filter(owner=request.user)
+
+# def has_change_permission(self, request, obj=None):
+# ok = super(StoryAdmin, self).has_change_permission(request, obj)
+# if not ok:
+# return False
+# if obj is None:
+# return True
+# # Not sure I need this:
+# if request.user.is_superuser:
+# return True
+# return request.user.id == obj.owner.id
+
+admin.site.register(Story, StoryAdmin)
admin.site.register(Page)
admin.site.register(StoryPage) |
c0f37084b587e142aaadfa2c803d40bb9c4e55fe | website/project/metadata/authorizers/__init__.py | website/project/metadata/authorizers/__init__.py | import json
import os
import logging
logger = logging.getLogger(__name__)
HERE = os.path.dirname(os.path.realpath(__file__))
groups = json.load(
open(
os.path.join(HERE, 'defaults.json')
)
)
fp = None
try:
fp = open('{0}/local.json'.format(HERE))
except IOError:
logger.info('No local.json found to populate lists of DraftRegistrationApproval authorizers.')
if fp:
for group, members in json.load(fp).iteritems():
if group not in groups:
groups[group] = members
else:
groups[group] = set(groups[group]) | set(members)
fp.close()
def members_for(group):
global_members = set(groups['global'])
return global_members | set(groups.get(group, []))
| import json
import os
import logging
logger = logging.getLogger(__name__)
HERE = os.path.dirname(os.path.realpath(__file__))
groups = None
with open(os.path.join(HERE, 'defaults.json')) as defaults:
groups = json.load(defaults)
fp = None
try:
fp = open('{0}/local.json'.format(HERE))
except IOError:
logger.info('No local.json found to populate lists of DraftRegistrationApproval authorizers.')
if fp:
for group, members in json.load(fp).iteritems():
if group not in groups:
groups[group] = members
else:
groups[group] = set(groups[group]) | set(members)
fp.close()
def members_for(group):
global_members = set(groups['global'])
return global_members | set(groups.get(group, []))
| Use context manager for filepointer management | Use context manager for filepointer management
| Python | apache-2.0 | kch8qx/osf.io,wearpants/osf.io,pattisdr/osf.io,cwisecarver/osf.io,billyhunt/osf.io,crcresearch/osf.io,baylee-d/osf.io,mfraezz/osf.io,DanielSBrown/osf.io,cwisecarver/osf.io,TomHeatwole/osf.io,sloria/osf.io,cwisecarver/osf.io,brianjgeiger/osf.io,samanehsan/osf.io,asanfilippo7/osf.io,felliott/osf.io,jnayak1/osf.io,brandonPurvis/osf.io,acshi/osf.io,RomanZWang/osf.io,Johnetordoff/osf.io,DanielSBrown/osf.io,billyhunt/osf.io,billyhunt/osf.io,abought/osf.io,aaxelb/osf.io,brandonPurvis/osf.io,HalcyonChimera/osf.io,mattclark/osf.io,SSJohns/osf.io,doublebits/osf.io,zamattiac/osf.io,TomHeatwole/osf.io,emetsger/osf.io,erinspace/osf.io,TomBaxter/osf.io,HalcyonChimera/osf.io,amyshi188/osf.io,icereval/osf.io,mluke93/osf.io,mluo613/osf.io,kch8qx/osf.io,Johnetordoff/osf.io,brianjgeiger/osf.io,cslzchen/osf.io,chrisseto/osf.io,zamattiac/osf.io,caneruguz/osf.io,mluke93/osf.io,kch8qx/osf.io,KAsante95/osf.io,Ghalko/osf.io,binoculars/osf.io,emetsger/osf.io,abought/osf.io,baylee-d/osf.io,RomanZWang/osf.io,ticklemepierce/osf.io,mluo613/osf.io,Nesiehr/osf.io,brianjgeiger/osf.io,amyshi188/osf.io,felliott/osf.io,samchrisinger/osf.io,binoculars/osf.io,monikagrabowska/osf.io,samchrisinger/osf.io,TomHeatwole/osf.io,wearpants/osf.io,jnayak1/osf.io,CenterForOpenScience/osf.io,samchrisinger/osf.io,rdhyee/osf.io,SSJohns/osf.io,samanehsan/osf.io,asanfilippo7/osf.io,ticklemepierce/osf.io,chrisseto/osf.io,amyshi188/osf.io,doublebits/osf.io,hmoco/osf.io,leb2dg/osf.io,pattisdr/osf.io,monikagrabowska/osf.io,alexschiller/osf.io,caneruguz/osf.io,mattclark/osf.io,mattclark/osf.io,alexschiller/osf.io,laurenrevere/osf.io,leb2dg/osf.io,DanielSBrown/osf.io,Nesiehr/osf.io,kwierman/osf.io,laurenrevere/osf.io,CenterForOpenScience/osf.io,zachjanicki/osf.io,pattisdr/osf.io,jnayak1/osf.io,kch8qx/osf.io,CenterForOpenScience/osf.io,erinspace/osf.io,ticklemepierce/osf.io,acshi/osf.io,GageGaskins/osf.io,felliott/osf.io,HalcyonChimera/osf.io,zachjanicki/osf.io,emetsger/osf.io,chennan47/osf.io,caneruguz/osf.io,samchrisinger/osf.io,Ghalko/osf.io,adlius/osf.io,zachjanicki/osf.io,KAsante95/osf.io,abought/osf.io,samanehsan/osf.io,hmoco/osf.io,mfraezz/osf.io,alexschiller/osf.io,danielneis/osf.io,monikagrabowska/osf.io,crcresearch/osf.io,icereval/osf.io,CenterForOpenScience/osf.io,KAsante95/osf.io,mfraezz/osf.io,hmoco/osf.io,leb2dg/osf.io,cslzchen/osf.io,aaxelb/osf.io,ticklemepierce/osf.io,samanehsan/osf.io,monikagrabowska/osf.io,danielneis/osf.io,DanielSBrown/osf.io,mluo613/osf.io,alexschiller/osf.io,KAsante95/osf.io,asanfilippo7/osf.io,saradbowman/osf.io,aaxelb/osf.io,jnayak1/osf.io,caseyrollins/osf.io,doublebits/osf.io,wearpants/osf.io,kwierman/osf.io,icereval/osf.io,TomBaxter/osf.io,Nesiehr/osf.io,KAsante95/osf.io,billyhunt/osf.io,sloria/osf.io,danielneis/osf.io,brianjgeiger/osf.io,binoculars/osf.io,Johnetordoff/osf.io,crcresearch/osf.io,Johnetordoff/osf.io,kwierman/osf.io,doublebits/osf.io,asanfilippo7/osf.io,brandonPurvis/osf.io,caseyrollins/osf.io,RomanZWang/osf.io,zamattiac/osf.io,rdhyee/osf.io,RomanZWang/osf.io,erinspace/osf.io,amyshi188/osf.io,Ghalko/osf.io,cslzchen/osf.io,felliott/osf.io,abought/osf.io,mluo613/osf.io,cwisecarver/osf.io,monikagrabowska/osf.io,mluke93/osf.io,cslzchen/osf.io,baylee-d/osf.io,TomHeatwole/osf.io,leb2dg/osf.io,emetsger/osf.io,GageGaskins/osf.io,TomBaxter/osf.io,mluo613/osf.io,laurenrevere/osf.io,adlius/osf.io,GageGaskins/osf.io,saradbowman/osf.io,acshi/osf.io,HalcyonChimera/osf.io,caseyrollins/osf.io,caneruguz/osf.io,rdhyee/osf.io,SSJohns/osf.io,acshi/osf.io,alexschiller/osf.io,mfraezz/osf.io,aaxelb/osf.io,GageGaskins/osf.io,Nesiehr/osf.io,wearpants/osf.io,brandonPurvis/osf.io,adlius/osf.io,GageGaskins/osf.io,hmoco/osf.io,Ghalko/osf.io,chrisseto/osf.io,doublebits/osf.io,chennan47/osf.io,RomanZWang/osf.io,acshi/osf.io,mluke93/osf.io,zachjanicki/osf.io,adlius/osf.io,danielneis/osf.io,chrisseto/osf.io,billyhunt/osf.io,chennan47/osf.io,zamattiac/osf.io,sloria/osf.io,kch8qx/osf.io,rdhyee/osf.io,kwierman/osf.io,brandonPurvis/osf.io,SSJohns/osf.io | ---
+++
@@ -6,11 +6,10 @@
HERE = os.path.dirname(os.path.realpath(__file__))
-groups = json.load(
- open(
- os.path.join(HERE, 'defaults.json')
- )
-)
+groups = None
+with open(os.path.join(HERE, 'defaults.json')) as defaults:
+ groups = json.load(defaults)
+
fp = None
try:
fp = open('{0}/local.json'.format(HERE)) |
dbd92c4fd50f81ee23387636fddff827da8fb7f3 | dduplicated/cli.py | dduplicated/cli.py | # The client of DDuplicated tool.
from os import path as opath, getcwd
from pprint import pprint
from sys import argv
from dduplicated import commands
def get_paths(params):
paths = []
for param in params:
path = opath.join(getcwd(), param)
if opath.exists(path) and opath.isdir(path) and not opath.islink(path):
paths.append(path)
return paths
def main():
params = argv
processed_files = []
# Remove the command name
del params[0]
if len(params) == 0 or "help" in params:
commands.help()
elif "detect" in params:
processed_files = commands.detect(get_paths(params))
elif "delete" in params:
processed_files = commands.delete(commands.detect(get_paths(params)))
elif "link" in params:
processed_files = commands.link(commands.detect(get_paths(params)))
else:
commands.help()
exit()
if len(processed_files) > 0:
pprint(processed_files)
else:
print("No duplicates found")
print("Great! Bye!")
exit(0)
| # The client of DDuplicated tool.
from os import path as opath, getcwd
from pprint import pprint
from sys import argv
from dduplicated import commands
def get_paths(params):
paths = []
for param in params:
path = opath.join(getcwd(), param)
if opath.exists(path) and opath.isdir(path) and not opath.islink(path):
paths.append(path)
return paths
def main():
params = argv
processed_files = []
# Remove the command name
del params[0]
if len(params) == 0 or "help" in params:
commands.help()
exit()
elif "detect" in params:
processed_files = commands.detect(get_paths(params))
elif "delete" in params:
processed_files = commands.delete(commands.detect(get_paths(params)))
elif "link" in params:
processed_files = commands.link(commands.detect(get_paths(params)))
else:
commands.help()
exit()
if len(processed_files) > 0:
pprint(processed_files)
else:
print("No duplicates found")
print("Great! Bye!")
exit(0)
| Fix in output to help command. | Fix in output to help command.
Signed-off-by: messiasthi <8562fc1efba9a3c99753c749fdfb1b6932b70fbf@gmail.com>
| Python | mit | messiasthi/dduplicated-cli | ---
+++
@@ -23,6 +23,8 @@
if len(params) == 0 or "help" in params:
commands.help()
+ exit()
+
elif "detect" in params:
processed_files = commands.detect(get_paths(params))
|
b4d9fb47e040b199f88cffb4a0b761c443f390b4 | dduplicated/cli.py | dduplicated/cli.py | # The client of DDuplicated tool.
from os import path as opath, getcwd
from pprint import pprint
from sys import argv
from dduplicated import commands
def get_paths(params):
paths = []
for param in params:
path = opath.join(getcwd(), param)
if opath.exists(path) and opath.isdir(path) and not opath.islink(path):
paths.append(path)
return paths
def main():
params = argv
processed_files = []
# Remove the command name
del params[0]
if len(params) == 0 or "help" in params:
commands.help()
elif "detect" in params:
processed_files = commands.detect(get_paths(params))
elif "delete" in params:
processed_files = commands.delete(commands.detect(get_paths(params)))
elif "link" in params:
processed_files = commands.link(commands.detect(get_paths(params)))
else:
commands.help()
if len(processed_files) > 0:
pprint(processed_files)
else:
print("No duplicates found")
print("Great! Bye!")
exit(0)
| # The client of DDuplicated tool.
from os import path as opath, getcwd
from pprint import pprint
from sys import argv
from dduplicated import commands
def get_paths(params):
paths = []
for param in params:
path = opath.join(getcwd(), param)
if opath.exists(path) and opath.isdir(path) and not opath.islink(path):
paths.append(path)
return paths
def main():
params = argv
processed_files = []
# Remove the command name
del params[0]
if len(params) == 0 or "help" in params:
commands.help()
elif "detect" in params:
processed_files = commands.detect(get_paths(params))
elif "delete" in params:
processed_files = commands.delete(commands.detect(get_paths(params)))
elif "link" in params:
processed_files = commands.link(commands.detect(get_paths(params)))
else:
commands.help()
exit()
if len(processed_files) > 0:
pprint(processed_files)
else:
print("No duplicates found")
print("Great! Bye!")
exit(0)
| Update in output to terminal. | Update in output to terminal.
Signed-off-by: messiasthi <8562fc1efba9a3c99753c749fdfb1b6932b70fbf@gmail.com>
| Python | mit | messiasthi/dduplicated-cli | ---
+++
@@ -2,6 +2,7 @@
from os import path as opath, getcwd
from pprint import pprint
from sys import argv
+
from dduplicated import commands
def get_paths(params):
@@ -33,6 +34,7 @@
else:
commands.help()
+ exit()
if len(processed_files) > 0:
pprint(processed_files) |
44e50483a4ba9a4c47ee092d8d807930340c4e8e | testClient.py | testClient.py | #!/usr/bin/env python
"""
Binary memcached test client.
Copyright (c) 2007 Dustin Sallings <dustin@spy.net>
"""
import sys
import socket
import random
import struct
from testServer import REQ_MAGIC_BYTE, PKT_FMT, MIN_RECV_PACKET
if __name__ == '__main__':
s=socket.socket(socket.AF_INET, socket.SOCK_STREAM)
rv = s.connect_ex(('127.0.0.1', 11211))
cmd, key, val = sys.argv[1:]
myopaque=random.Random().randint(0, 2**32)
s.send(struct.pack(PKT_FMT, REQ_MAGIC_BYTE,
int(cmd), len(key), myopaque, len(val)) + key + val)
response=s.recv(MIN_RECV_PACKET)
assert len(response) == MIN_RECV_PACKET
magic, cmd, errcode, opaque, remaining=struct.unpack(PKT_FMT, response)
assert magic == REQ_MAGIC_BYTE
assert opaque == myopaque
print "Error code: ", errcode
print s.recv(remaining)
| #!/usr/bin/env python
"""
Binary memcached test client.
Copyright (c) 2007 Dustin Sallings <dustin@spy.net>
"""
import sys
import socket
import random
import struct
from testServer import REQ_MAGIC_BYTE, PKT_FMT, MIN_RECV_PACKET, EXTRA_HDR_FMTS
from testServer import CMD_SET, CMD_ADD, CMD_REPLACE
if __name__ == '__main__':
s=socket.socket(socket.AF_INET, socket.SOCK_STREAM)
rv = s.connect_ex(('127.0.0.1', 11211))
cmd, key, val = sys.argv[1:4]
cmd = int(cmd)
if cmd in EXTRA_HDR_FMTS:
flags, exp=[int(x) for x in sys.argv[4:]]
myopaque=random.Random().randint(0, 2**32)
extraHeader=''
if cmd in EXTRA_HDR_FMTS:
extraHeader = struct.pack(">II", flags, exp)
msg=struct.pack(PKT_FMT, REQ_MAGIC_BYTE,
cmd, len(key), myopaque, len(key) + len(extraHeader) + len(val))
s.send(msg + extraHeader + key + val)
response=s.recv(MIN_RECV_PACKET)
assert len(response) == MIN_RECV_PACKET
magic, cmd, errcode, opaque, remaining=struct.unpack(PKT_FMT, response)
assert magic == REQ_MAGIC_BYTE
assert opaque == myopaque
print "Error code: ", errcode
print `s.recv(remaining)`
| Allow mutation commands from the test client. | Allow mutation commands from the test client.
| Python | mit | dustin/memcached-test | ---
+++
@@ -10,21 +10,32 @@
import random
import struct
-from testServer import REQ_MAGIC_BYTE, PKT_FMT, MIN_RECV_PACKET
+from testServer import REQ_MAGIC_BYTE, PKT_FMT, MIN_RECV_PACKET, EXTRA_HDR_FMTS
+from testServer import CMD_SET, CMD_ADD, CMD_REPLACE
if __name__ == '__main__':
+
s=socket.socket(socket.AF_INET, socket.SOCK_STREAM)
rv = s.connect_ex(('127.0.0.1', 11211))
- cmd, key, val = sys.argv[1:]
+ cmd, key, val = sys.argv[1:4]
+ cmd = int(cmd)
+ if cmd in EXTRA_HDR_FMTS:
+ flags, exp=[int(x) for x in sys.argv[4:]]
myopaque=random.Random().randint(0, 2**32)
- s.send(struct.pack(PKT_FMT, REQ_MAGIC_BYTE,
- int(cmd), len(key), myopaque, len(val)) + key + val)
+
+ extraHeader=''
+ if cmd in EXTRA_HDR_FMTS:
+ extraHeader = struct.pack(">II", flags, exp)
+
+ msg=struct.pack(PKT_FMT, REQ_MAGIC_BYTE,
+ cmd, len(key), myopaque, len(key) + len(extraHeader) + len(val))
+ s.send(msg + extraHeader + key + val)
response=s.recv(MIN_RECV_PACKET)
assert len(response) == MIN_RECV_PACKET
magic, cmd, errcode, opaque, remaining=struct.unpack(PKT_FMT, response)
assert magic == REQ_MAGIC_BYTE
assert opaque == myopaque
print "Error code: ", errcode
- print s.recv(remaining)
+ print `s.recv(remaining)` |
a0db97549a64595cb30554ccb583f928f4ad430d | api/models.py | api/models.py | import json
from django.db import models
import requests
from Suchary.settings import GCM_API_KEY
class Device(models.Model):
registration_id = models.TextField()
android_id = models.TextField(unique=True)
alias = models.TextField(blank=True)
version = models.CharField(max_length=20)
model = models.CharField(max_length=60)
os_version = models.CharField(max_length=20)
type = models.CharField(max_length=20)
active = models.BooleanField(default=True)
last_used = models.DateTimeField(auto_now=True)
last_seen = models.DateTimeField()
created = models.DateTimeField(auto_now_add=True)
def send_data(self, data):
url = 'https://android.googleapis.com/gcm/send'
header = {'Authorization': 'key=' + GCM_API_KEY, 'Content-Type': 'application/json'}
payload = {'registration_ids': [self.registration_id], 'data': data}
r = requests.post(url, data=json.dumps(payload), headers=header)
return r
def __str__(self):
if self.alias:
return str(self.alias)
return str(self.registration_id) | import json
from django.db import models
import requests
from Suchary.settings import GCM_API_KEY
class Device(models.Model):
registration_id = models.TextField()
android_id = models.TextField(unique=True)
alias = models.TextField(blank=True)
version = models.CharField(max_length=20)
model = models.CharField(max_length=60)
os_version = models.CharField(max_length=20)
type = models.CharField(max_length=20)
active = models.BooleanField(default=True)
last_used = models.DateTimeField(auto_now=True)
last_seen = models.DateTimeField(auto_now_add=True)
created = models.DateTimeField(auto_now_add=True)
def send_data(self, data):
url = 'https://android.googleapis.com/gcm/send'
header = {'Authorization': 'key=' + GCM_API_KEY, 'Content-Type': 'application/json'}
payload = {'registration_ids': [self.registration_id], 'data': data}
r = requests.post(url, data=json.dumps(payload), headers=header)
return r
def __str__(self):
if self.alias:
return str(self.alias)
return str(self.registration_id)
| Fix Device model, not needed to set last_seen on creation | Fix Device model, not needed to set last_seen on creation
| Python | mit | jchmura/suchary-django,jchmura/suchary-django,jchmura/suchary-django | ---
+++
@@ -16,7 +16,7 @@
type = models.CharField(max_length=20)
active = models.BooleanField(default=True)
last_used = models.DateTimeField(auto_now=True)
- last_seen = models.DateTimeField()
+ last_seen = models.DateTimeField(auto_now_add=True)
created = models.DateTimeField(auto_now_add=True)
def send_data(self, data): |
83c5cc34539f68360cbab585af9465e95f3ec592 | tensorbayes/__init__.py | tensorbayes/__init__.py | from . import layers
from . import utils
from . import nputils
from . import tbutils
from . import distributions
from .utils import FileWriter
from .tbutils import function
| import sys
from . import layers
from . import utils
from . import nputils
from . import tbutils
from . import distributions
from .utils import FileWriter
from .tbutils import function
if 'ipykernel' in sys.argv[0]:
from . import nbutils
| Add nbutils import to base import | Add nbutils import to base import
| Python | mit | RuiShu/tensorbayes | ---
+++
@@ -1,3 +1,4 @@
+import sys
from . import layers
from . import utils
from . import nputils
@@ -5,3 +6,6 @@
from . import distributions
from .utils import FileWriter
from .tbutils import function
+
+if 'ipykernel' in sys.argv[0]:
+ from . import nbutils |
e5812200c68a720345310e9a14ffa2a1a8f849e0 | arg-reader.py | arg-reader.py | #!/usr/bin/env python3
# References:
# http://docs.python.org/3.3/library/argparse.html?highlight=argparse#argparse
# http://bip.weizmann.ac.il/course/python/PyMOTW/PyMOTW/docs/argparse/index.html
import argparse
def main():
'''
For help, use argument -h
$ ./arg-reader.py -h
To specify an argument, prefix with -
$ ./arg-reader.py -animalbig hippo -animalsmall fly
To read arguments from a file, prefix file name with @
$ ./arg-reader.py @args2.txt
To specify arguments from command line and from a file
$ ./arg-reader.py @args.txt -animalbig hippo
'''
parser = argparse.ArgumentParser(description='To read arguments from a file, prefix file name with @ e.g. $ ./arg-reader.py @args.txt -animalbig hippo',
fromfile_prefix_chars='@',
)
parser.add_argument('-animalbig', action="store", dest="animalbig",
help = 'name of a big animal')
parser.add_argument('-animalsmall', action="store", dest="animalsmall",
help = 'name of a small animal')
arguments = parser.parse_args()
print(arguments)
print(arguments.animalbig)
print(arguments.animalsmall)
if __name__ == "__main__": main()
| #!/usr/bin/env python3
# References:
# http://docs.python.org/3.3/library/argparse.html?highlight=argparse#argparse
# http://bip.weizmann.ac.il/course/python/PyMOTW/PyMOTW/docs/argparse/index.html
# http://stackoverflow.com/questions/3853722/python-argparse-how-to-insert-newline-the-help-text
import argparse
from argparse import RawTextHelpFormatter
def main():
'''
Read arguments from command line or from a file.
'''
parser = argparse.ArgumentParser(description=''' For help, use argument -h
$ ./arg-reader.py -h
To specify an argument, prefix with -
$ ./arg-reader.py -animalbig hippo -animalsmall fly
To read arguments from a file, prefix file name with @
$ ./arg-reader.py @args2.txt
To specify arguments from command line and from a file
$ ./arg-reader.py @args.txt -animalbig hippo''',
fromfile_prefix_chars='@',
formatter_class=RawTextHelpFormatter,
)
parser.add_argument('-animalbig', action="store", dest="animalbig",
help = 'name of a big animal')
parser.add_argument('-animalsmall', action="store", dest="animalsmall",
help = 'name of a small animal')
arguments = parser.parse_args()
print(arguments)
print(arguments.animalbig)
print(arguments.animalsmall)
if __name__ == "__main__": main()
| Format description to multiple lines using RawTextHelpFormatter. | Format description to multiple lines using RawTextHelpFormatter.
Reference:
# http://stackoverflow.com/questions/3853722/python-argparse-how-to-insert-newline-the-help-text
| Python | mit | beepscore/argparse | ---
+++
@@ -3,23 +3,26 @@
# References:
# http://docs.python.org/3.3/library/argparse.html?highlight=argparse#argparse
# http://bip.weizmann.ac.il/course/python/PyMOTW/PyMOTW/docs/argparse/index.html
+# http://stackoverflow.com/questions/3853722/python-argparse-how-to-insert-newline-the-help-text
import argparse
+from argparse import RawTextHelpFormatter
def main():
'''
- For help, use argument -h
+ Read arguments from command line or from a file.
+ '''
+
+ parser = argparse.ArgumentParser(description=''' For help, use argument -h
$ ./arg-reader.py -h
To specify an argument, prefix with -
$ ./arg-reader.py -animalbig hippo -animalsmall fly
To read arguments from a file, prefix file name with @
$ ./arg-reader.py @args2.txt
To specify arguments from command line and from a file
- $ ./arg-reader.py @args.txt -animalbig hippo
- '''
-
- parser = argparse.ArgumentParser(description='To read arguments from a file, prefix file name with @ e.g. $ ./arg-reader.py @args.txt -animalbig hippo',
+ $ ./arg-reader.py @args.txt -animalbig hippo''',
fromfile_prefix_chars='@',
+ formatter_class=RawTextHelpFormatter,
)
parser.add_argument('-animalbig', action="store", dest="animalbig", |
39c4d50b08f92a5d76ac5864e13a3427e7dfd86a | app/accounts/tests/test_models.py | app/accounts/tests/test_models.py | from django.contrib.auth.models import User
from django.db import IntegrityError
from django.test import TestCase
from app.accounts.models import UserProfile
class UserProfileTest(TestCase):
"""Test UserProfile model"""
def setUp(self):
self.user = User.objects.create(username='frank', password='secret')
UserProfile.objects.create(user=self.user)
def test_user_can_only_have_one_profile_entry(self):
profile = UserProfile(user=self.user, custom_auth_id='134664323567')
self.assertRaises(IntegrityError, profile.save)
| from django.contrib.auth.models import User
from django.db import IntegrityError
from django.test import TestCase
from app.accounts.models import UserProfile
class UserProfileTest(TestCase):
"""Test UserProfile model"""
def setUp(self):
self.user = User.objects.create(username='frank', password='secret')
def test_user_can_only_have_one_profile_entry(self):
profile = UserProfile(user=self.user, custom_auth_id='134664323567')
self.assertRaises(IntegrityError, profile.save)
def test_profile_is_created_on_user_creation(self):
user_profile = UserProfile.objects.get(user=self.user)
self.assertEqual(user_profile.user, self.user)
| Add test for creating profile on user creation | Add test for creating profile on user creation
| Python | mit | teamtaverna/core | ---
+++
@@ -10,9 +10,11 @@
def setUp(self):
self.user = User.objects.create(username='frank', password='secret')
- UserProfile.objects.create(user=self.user)
def test_user_can_only_have_one_profile_entry(self):
profile = UserProfile(user=self.user, custom_auth_id='134664323567')
+ self.assertRaises(IntegrityError, profile.save)
- self.assertRaises(IntegrityError, profile.save)
+ def test_profile_is_created_on_user_creation(self):
+ user_profile = UserProfile.objects.get(user=self.user)
+ self.assertEqual(user_profile.user, self.user) |
4007508e10d730068e7f0a2ded0a7403525051a4 | checklisthq/checklisthq/urls.py | checklisthq/checklisthq/urls.py | from django.conf.urls import patterns, include, url
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
url(r'^login/$', 'django.contrib.auth.views.login'),
url(r'^users/new$', 'main.views.new_user', name="new_user"),
url(r'^logout/$', 'django.contrib.auth.views.logout_then_login'),
url(r'^.*$', 'main.views.home', name='home'),
# url(r'^checklisthq/', include('checklisthq.foo.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
url(r'^admin/', include(admin.site.urls)),
)
| from django.conf.urls import patterns, include, url
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
url(r'^login/$', 'django.contrib.auth.views.login'),
url(r'^users/new$', 'main.views.new_user', name="new_user"),
url(r'^logout/$', 'django.contrib.auth.views.logout_then_login'),
# url(r'^checklisthq/', include('checklisthq.foo.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
url(r'^admin/', include(admin.site.urls)),
url(r'^.*$', 'main.views.home', name='home'),
)
| Reorder URLs, catchall at the end | Reorder URLs, catchall at the end
| Python | agpl-3.0 | checklisthq/checklisthq.com,checklisthq/checklisthq.com | ---
+++
@@ -8,7 +8,6 @@
url(r'^login/$', 'django.contrib.auth.views.login'),
url(r'^users/new$', 'main.views.new_user', name="new_user"),
url(r'^logout/$', 'django.contrib.auth.views.logout_then_login'),
- url(r'^.*$', 'main.views.home', name='home'),
# url(r'^checklisthq/', include('checklisthq.foo.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
@@ -16,4 +15,6 @@
# Uncomment the next line to enable the admin:
url(r'^admin/', include(admin.site.urls)),
+
+ url(r'^.*$', 'main.views.home', name='home'),
) |
265f36fb7fac426d662fbdebf29e8aad01e257d2 | flask_oauthlib/utils.py | flask_oauthlib/utils.py | # coding: utf-8
import base64
from flask import request, Response
from oauthlib.common import to_unicode, bytes_type
def extract_params():
"""Extract request params."""
uri = request.url
http_method = request.method
headers = dict(request.headers)
if 'wsgi.input' in headers:
del headers['wsgi.input']
if 'wsgi.errors' in headers:
del headers['wsgi.errors']
body = request.form.to_dict()
return uri, http_method, body, headers
def to_bytes(text, encoding='utf-8'):
"""Make sure text is bytes type."""
if not isinstance(text, bytes_type):
text = text.encode(encoding)
return text
def decode_base64(text, encoding='utf-8'):
"""Decode base64 string."""
text = to_bytes(text, encoding)
return to_unicode(base64.b64decode(text), encoding)
def create_response(headers, body, status):
"""Create response class for Flask."""
response = Response(body or '')
for k, v in headers.items():
response.headers[k] = v
response.status_code = status
return response
| # coding: utf-8
import base64
from flask import request, Response
from oauthlib.common import to_unicode, bytes_type
def extract_params():
"""Extract request params."""
uri = request.url
http_method = request.method
headers = dict(request.headers)
if 'wsgi.input' in headers:
del headers['wsgi.input']
if 'wsgi.errors' in headers:
del headers['wsgi.errors']
body = request.form.to_dict()
return uri, http_method, body, headers
def to_bytes(text, encoding='utf-8'):
"""Make sure text is bytes type."""
if not text:
return text
if not isinstance(text, bytes_type):
text = text.encode(encoding)
return text
def decode_base64(text, encoding='utf-8'):
"""Decode base64 string."""
text = to_bytes(text, encoding)
return to_unicode(base64.b64decode(text), encoding)
def create_response(headers, body, status):
"""Create response class for Flask."""
response = Response(body or '')
for k, v in headers.items():
response.headers[k] = v
response.status_code = status
return response
| Fix to_bytes when text is None | Fix to_bytes when text is None
| Python | bsd-3-clause | tonyseek/flask-oauthlib,auerj/flask-oauthlib,icook/flask-oauthlib,RealGeeks/flask-oauthlib,Fleurer/flask-oauthlib,CoreyHyllested/flask-oauthlib,huxuan/flask-oauthlib,landler/flask-oauthlib,lepture/flask-oauthlib,lepture/flask-oauthlib,tonyseek/flask-oauthlib,CommonsCloud/CommonsCloud-FlaskOAuthlib,brightforme/flask-oauthlib,stianpr/flask-oauthlib,adambard/flask-oauthlib,CommonsCloud/CommonsCloud-FlaskOAuthlib,stianpr/flask-oauthlib,cogniteev/flask-oauthlib,brightforme/flask-oauthlib,CoreyHyllested/flask-oauthlib,icook/flask-oauthlib,Fleurer/flask-oauthlib,RealGeeks/flask-oauthlib,kevin1024/flask-oauthlib,auerj/flask-oauthlib,huxuan/flask-oauthlib,cogniteev/flask-oauthlib,kevin1024/flask-oauthlib,Ryan-K/flask-oauthlib,landler/flask-oauthlib,Ryan-K/flask-oauthlib,PyBossa/flask-oauthlib,PyBossa/flask-oauthlib,adambard/flask-oauthlib | ---
+++
@@ -21,6 +21,8 @@
def to_bytes(text, encoding='utf-8'):
"""Make sure text is bytes type."""
+ if not text:
+ return text
if not isinstance(text, bytes_type):
text = text.encode(encoding)
return text |
c480ed20fd5b7c5d53b4f0112feed801cd99ef9c | tests/test_data_prep.py | tests/test_data_prep.py | import os
import pandas as pd
import numpy.testing as npt
from gypsy import DATA_DIR
from gypsy.data_prep import prep_standtable
def test_prep_standtable():
data_file_name = 'raw_standtable.csv'
plot_data = pd.read_csv(os.path.join(DATA_DIR, data_file_name))
expected_data_path = os.path.join(
DATA_DIR, 'output', 'dataprepped_standtable.csv'
)
result = prep_standtable(plot_data)
expected = pd.read_csv(expected_data_path, index_col=0)
assert isinstance(result, pd.DataFrame)
assert npt.assert_almost_equal(
expected.values, result.values, decimal=3
) is None
# regenerate output files
# result.to_csv(expected_data_path)
| import os
import pandas as pd
import numpy.testing as npt
from gypsy import DATA_DIR
from gypsy.data_prep import prep_standtable
def test_prep_standtable():
data_file_name = 'raw_standtable.csv'
plot_data = pd.read_csv(os.path.join(DATA_DIR, data_file_name))
expected_data_path = os.path.join(
DATA_DIR, 'output', 'dataprepped_standtable.csv'
)
result = prep_standtable(plot_data)
expected = pd.read_csv(expected_data_path, index_col=0)
assert isinstance(result, pd.DataFrame)
assert npt.assert_allclose(
expected.values, result.values,
rtol=0.01, atol=0.1,
equal_nan=True
) is None
# regenerate output files
# result.to_csv(expected_data_path)
| Use allclose for dataprep test | Use allclose for dataprep test
| Python | mit | tesera/pygypsy,tesera/pygypsy | ---
+++
@@ -17,8 +17,10 @@
expected = pd.read_csv(expected_data_path, index_col=0)
assert isinstance(result, pd.DataFrame)
- assert npt.assert_almost_equal(
- expected.values, result.values, decimal=3
+ assert npt.assert_allclose(
+ expected.values, result.values,
+ rtol=0.01, atol=0.1,
+ equal_nan=True
) is None
# regenerate output files |
07455e5821d21c988c7c5fcda9345e99355eb4e7 | redash/__init__.py | redash/__init__.py | import json
import urlparse
from flask import Flask, make_response
from flask.ext.restful import Api
from flask_peewee.db import Database
import redis
from redash import settings, utils
__version__ = '0.3.2'
app = Flask(__name__,
template_folder=settings.STATIC_ASSETS_PATH,
static_folder=settings.STATIC_ASSETS_PATH,
static_path='/static')
api = Api(app)
# configure our database
settings.DATABASE_CONFIG.update({'threadlocals': True})
app.config['DATABASE'] = settings.DATABASE_CONFIG
db = Database(app)
from redash.authentication import setup_authentication
auth = setup_authentication(app)
@api.representation('application/json')
def json_representation(data, code, headers=None):
resp = make_response(json.dumps(data, cls=utils.JSONEncoder), code)
resp.headers.extend(headers or {})
return resp
redis_url = urlparse.urlparse(settings.REDIS_URL)
redis_connection = redis.StrictRedis(host=redis_url.hostname, port=redis_url.port, db=0, password=redis_url.password)
from redash import data
data_manager = data.Manager(redis_connection, db)
from redash import controllers | import json
import urlparse
from flask import Flask, make_response
from flask.ext.restful import Api
from flask_peewee.db import Database
import redis
from redash import settings, utils
__version__ = '0.3.2'
app = Flask(__name__,
template_folder=settings.STATIC_ASSETS_PATH,
static_folder=settings.STATIC_ASSETS_PATH,
static_path='/static')
api = Api(app)
# configure our database
settings.DATABASE_CONFIG.update({'threadlocals': True})
app.config['DATABASE'] = settings.DATABASE_CONFIG
db = Database(app)
from redash.authentication import setup_authentication
auth = setup_authentication(app)
@api.representation('application/json')
def json_representation(data, code, headers=None):
resp = make_response(json.dumps(data, cls=utils.JSONEncoder), code)
resp.headers.extend(headers or {})
return resp
redis_url = urlparse.urlparse(settings.REDIS_URL)
if redis_url.path:
redis_db = redis_url.path[1]
else:
redis_db = 0
redis_connection = redis.StrictRedis(host=redis_url.hostname, port=redis_url.port, db=redis_db, password=redis_url.password)
from redash import data
data_manager = data.Manager(redis_connection, db)
from redash import controllers | Use database number from redis url if available. | Use database number from redis url if available.
| Python | bsd-2-clause | chriszs/redash,imsally/redash,44px/redash,guaguadev/redash,denisov-vlad/redash,rockwotj/redash,44px/redash,rockwotj/redash,getredash/redash,ninneko/redash,akariv/redash,amino-data/redash,akariv/redash,imsally/redash,EverlyWell/redash,getredash/redash,easytaxibr/redash,M32Media/redash,vishesh92/redash,ninneko/redash,getredash/redash,easytaxibr/redash,crowdworks/redash,hudl/redash,ninneko/redash,denisov-vlad/redash,stefanseifert/redash,amino-data/redash,useabode/redash,guaguadev/redash,jmvasquez/redashtest,alexanderlz/redash,moritz9/redash,pubnative/redash,denisov-vlad/redash,imsally/redash,rockwotj/redash,pubnative/redash,vishesh92/redash,useabode/redash,crowdworks/redash,akariv/redash,44px/redash,easytaxibr/redash,getredash/redash,chriszs/redash,akariv/redash,guaguadev/redash,44px/redash,stefanseifert/redash,denisov-vlad/redash,amino-data/redash,moritz9/redash,M32Media/redash,crowdworks/redash,M32Media/redash,M32Media/redash,alexanderlz/redash,crowdworks/redash,jmvasquez/redashtest,stefanseifert/redash,denisov-vlad/redash,pubnative/redash,EverlyWell/redash,moritz9/redash,stefanseifert/redash,stefanseifert/redash,pubnative/redash,imsally/redash,EverlyWell/redash,akariv/redash,ninneko/redash,moritz9/redash,jmvasquez/redashtest,useabode/redash,vishesh92/redash,chriszs/redash,getredash/redash,jmvasquez/redashtest,useabode/redash,easytaxibr/redash,ninneko/redash,amino-data/redash,pubnative/redash,hudl/redash,EverlyWell/redash,guaguadev/redash,jmvasquez/redashtest,easytaxibr/redash,hudl/redash,vishesh92/redash,chriszs/redash,rockwotj/redash,alexanderlz/redash,hudl/redash,alexanderlz/redash,guaguadev/redash | ---
+++
@@ -33,7 +33,11 @@
redis_url = urlparse.urlparse(settings.REDIS_URL)
-redis_connection = redis.StrictRedis(host=redis_url.hostname, port=redis_url.port, db=0, password=redis_url.password)
+if redis_url.path:
+ redis_db = redis_url.path[1]
+else:
+ redis_db = 0
+redis_connection = redis.StrictRedis(host=redis_url.hostname, port=redis_url.port, db=redis_db, password=redis_url.password)
from redash import data
data_manager = data.Manager(redis_connection, db) |
4139dafb967c61ac8d10b3b9fa8d64c8c079bfa2 | scripts/png2raw.py | scripts/png2raw.py | #!/usr/bin/env python
import Image
import logging
import sys
def main(argv):
pngFileName = sys.argv[1]
baseFileName, _ = pngFileName.rsplit('.')
rawFileName = '%s.raw' % baseFileName
palFileName = '%s.pal' % baseFileName
image = Image.open(pngFileName)
with open(palFileName, 'w') as palFile:
pal = map(chr, image.getpalette())
palFile.write("".join(pal))
with open(rawFileName, 'w') as rawFile:
w, h = image.size
for y in range(h):
for x in range(w):
pixel = image.getpixel((x,y))
rawFile.write(chr(pixel))
if __name__ == '__main__':
FORMAT = '%(levelname)s: %(message)s'
logging.basicConfig(format=FORMAT, level=logging.DEBUG)
main(sys.argv)
| #!/usr/bin/env python
import Image
import argparse
import os
def main():
parser = argparse.ArgumentParser(
description='Converts input image to raw image and palette data.')
parser.add_argument('-f', '--force', action='store_true',
help='If output files exist, the tool will overwrite them.')
parser.add_argument('input', metavar='INPUT', type=str,
help='Input image filename.')
parser.add_argument('output', metavar='OUTPUT', type=str,
help='Output files basename (without extension).')
args = parser.parse_args()
inputPath = os.path.abspath(args.input)
outputPath = os.path.abspath(args.output)
rawFilePath = '%s.raw' % outputPath
palFilePath = '%s.pal' % outputPath
if not os.path.isfile(inputPath):
raise SystemExit('Input file does not exists!')
if any(map(os.path.isfile, [rawFilePath, palFilePath])) and not args.force:
raise SystemExit('Will not overwrite output files!')
try:
image = Image.open(inputPath)
except IOError as ex:
raise SystemExit('Error: %s.' % ex)
else:
with open(palFilePath, 'w') as palFile:
pal = map(chr, image.getpalette())
palFile.write("".join(pal))
with open(rawFilePath, 'w') as rawFile:
w, h = image.size
for y in range(h):
for x in range(w):
pixel = image.getpixel((x,y))
rawFile.write(chr(pixel))
if __name__ == '__main__':
main()
| Add cmdline options parser and a sanity check. | Add cmdline options parser and a sanity check.
| Python | artistic-2.0 | cahirwpz/demoscene,cahirwpz/demoscene,cahirwpz/demoscene,cahirwpz/demoscene | ---
+++
@@ -1,30 +1,47 @@
#!/usr/bin/env python
import Image
-import logging
-import sys
+import argparse
+import os
-def main(argv):
- pngFileName = sys.argv[1]
- baseFileName, _ = pngFileName.rsplit('.')
+def main():
+ parser = argparse.ArgumentParser(
+ description='Converts input image to raw image and palette data.')
+ parser.add_argument('-f', '--force', action='store_true',
+ help='If output files exist, the tool will overwrite them.')
+ parser.add_argument('input', metavar='INPUT', type=str,
+ help='Input image filename.')
+ parser.add_argument('output', metavar='OUTPUT', type=str,
+ help='Output files basename (without extension).')
+ args = parser.parse_args()
- rawFileName = '%s.raw' % baseFileName
- palFileName = '%s.pal' % baseFileName
+ inputPath = os.path.abspath(args.input)
+ outputPath = os.path.abspath(args.output)
- image = Image.open(pngFileName)
+ rawFilePath = '%s.raw' % outputPath
+ palFilePath = '%s.pal' % outputPath
- with open(palFileName, 'w') as palFile:
- pal = map(chr, image.getpalette())
- palFile.write("".join(pal))
+ if not os.path.isfile(inputPath):
+ raise SystemExit('Input file does not exists!')
- with open(rawFileName, 'w') as rawFile:
- w, h = image.size
- for y in range(h):
- for x in range(w):
- pixel = image.getpixel((x,y))
- rawFile.write(chr(pixel))
+ if any(map(os.path.isfile, [rawFilePath, palFilePath])) and not args.force:
+ raise SystemExit('Will not overwrite output files!')
+
+ try:
+ image = Image.open(inputPath)
+ except IOError as ex:
+ raise SystemExit('Error: %s.' % ex)
+ else:
+ with open(palFilePath, 'w') as palFile:
+ pal = map(chr, image.getpalette())
+ palFile.write("".join(pal))
+
+ with open(rawFilePath, 'w') as rawFile:
+ w, h = image.size
+ for y in range(h):
+ for x in range(w):
+ pixel = image.getpixel((x,y))
+ rawFile.write(chr(pixel))
if __name__ == '__main__':
- FORMAT = '%(levelname)s: %(message)s'
- logging.basicConfig(format=FORMAT, level=logging.DEBUG)
- main(sys.argv)
+ main() |
3fe8498b8599238fd18b8f96edff438e1f569f48 | sheldon/storage.py | sheldon/storage.py | # -*- coding: utf-8 -*-
"""
Interface to Redis-storage.
@author: Seva Zhidkov
@contact: zhidkovseva@gmail.com
@license: The MIT license
Copyright (C) 2015
"""
from .utils import logger
# We will catch all import exceptions in bot.py
from redis import StrictRedis
class Storage:
def __init__(self, bot):
"""
Create new storage for bot
:param bot: Bot object
:return:
"""
self.bot = bot
# Connect to Redis.
# If we had problems with Redis - just set self.redis to None.
# Not redis-required modules must work without Redis.
try:
self.redis = StrictRedis(host=bot.config.get('SHELDON_REDIS_HOST',
'localhost'),
port=int(
bot.config.get('SHELDON_REDIS_PORT',
'6379')
),
db=int(
bot.config.get('SHELDON_REDIS_DB', '0')
)
)
except Exception as error:
logger.error_log_message('Error while connection Redis:')
logger.error_log_message(str(error.__traceback__))
self.redis = None
| # -*- coding: utf-8 -*-
"""
Interface to Redis-storage.
@author: Seva Zhidkov
@contact: zhidkovseva@gmail.com
@license: The MIT license
Copyright (C) 2015
"""
from .utils import logger
# We will catch all import exceptions in bot.py
from redis import StrictRedis
class Storage:
def __init__(self, bot):
"""
Create new storage for bot
:param bot: Bot object
:return:
"""
self.bot = bot
# Connect to Redis.
# If we had problems with Redis - just set self.redis to None.
# Not redis-required modules must work without Redis.
try:
self.redis = StrictRedis(host=bot.config.get('SHELDON_REDIS_HOST',
'localhost'),
port=int(
bot.config.get('SHELDON_REDIS_PORT',
'6379')
),
db=int(
bot.config.get('SHELDON_REDIS_DB', '0')
)
)
except Exception as error:
logger.error_log_message('Error while connecting Redis:')
logger.error_log_message(str(error.__traceback__))
self.redis = None
| Fix typo in redis error message | Fix typo in redis error message
| Python | mit | lises/sheldon | ---
+++
@@ -40,7 +40,7 @@
)
)
except Exception as error:
- logger.error_log_message('Error while connection Redis:')
+ logger.error_log_message('Error while connecting Redis:')
logger.error_log_message(str(error.__traceback__))
self.redis = None
|
f1b1542b28b83f7adabbadc7e2932ed8b42aa8c3 | main/_config.py | main/_config.py | import os
import inspect
# Flask
DEBUG = True
# Amazon S3 Settings
AWS_KEY = ''
AWS_SECRET_KEY = ''
AWS_BUCKET = 'www.vpr.net'
AWS_DIRECTORY = 'sandbox/app/'
SOUNDCLOUD_API = {
"client_id": "",
"client_secret": "",
"username": "",
"password": ""}
SOUNDCLOUD_NUM_TRACKS = 5
# Cache Settings (units in seconds)
STATIC_EXPIRES = 60 * 24 * 3600
HTML_EXPIRES = 3600
# Frozen Flask
FREEZER_DEFAULT_MIMETYPE = 'text/html'
FREEZER_IGNORE_MIMETYPE_WARNINGS = True
FREEZER_DESTINATION = 'build'
FREEZER_BASE_URL = 'http://%s/%s' % (AWS_BUCKET, AWS_DIRECTORY)
FREEZER_STATIC_IGNORE = ['Gruntfile*', 'node_modules', 'package.json',
'dev', '.sass-cache']
WEBFACTION_PATH = AWS_DIRECTORY
ABSOLUTE_PATH = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe()))) + '/'
| import os
import inspect
# Flask
DEBUG = True
# Amazon S3 Settings
AWS_KEY = ''
AWS_SECRET_KEY = ''
AWS_BUCKET = 'www.vpr.net'
AWS_DIRECTORY = 'sandbox/app/'
SOUNDCLOUD_API = {
"client_id": "",
"client_secret": "",
"username": "",
"password": ""}
SOUNDCLOUD_NUM_TRACKS = 10
# Cache Settings (units in seconds)
STATIC_EXPIRES = 60 * 24 * 3600
HTML_EXPIRES = 3600
# Frozen Flask
FREEZER_DEFAULT_MIMETYPE = 'text/html'
FREEZER_IGNORE_MIMETYPE_WARNINGS = True
FREEZER_DESTINATION = 'build'
FREEZER_BASE_URL = 'http://%s/%s' % (AWS_BUCKET, AWS_DIRECTORY)
FREEZER_STATIC_IGNORE = ['Gruntfile*', 'node_modules', 'package.json',
'dev', '.sass-cache']
WEBFACTION_PATH = AWS_DIRECTORY
ABSOLUTE_PATH = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe()))) + '/'
| Increase number of SoundCloud episodes pulled in | Increase number of SoundCloud episodes pulled in
This is related to the fact that unpublished episodes still pull through. | Python | apache-2.0 | vprnet/EOTS-iframe-widget,vprnet/EOTS-iframe-widget,vprnet/EOTS-iframe-widget | ---
+++
@@ -15,7 +15,7 @@
"client_secret": "",
"username": "",
"password": ""}
-SOUNDCLOUD_NUM_TRACKS = 5
+SOUNDCLOUD_NUM_TRACKS = 10
# Cache Settings (units in seconds)
STATIC_EXPIRES = 60 * 24 * 3600 |
3c4f7906f98e6dfb9afe6993bee993ed05b969f3 | apps/splash/views.py | apps/splash/views.py | import datetime
from django.shortcuts import render
from apps.splash.models import SplashEvent, SplashYear
def index(request):
# I'm really sorry ...
splash_year = SplashYear.objects.get(start_date__gt=str(datetime.date.today() - datetime.timedelta(180)))
return render(request, 'splash/base.html', {'splash_year': splash_year })
| import datetime
from django.shortcuts import render
from apps.splash.models import SplashEvent, SplashYear
def index(request):
# I'm really sorry ...
splash_year = SplashYear.objects.get(start_date__gt=str(datetime.date.today() - datetime.timedelta(180)))
return render(request, 'splash/base.html', {'splash_year': splash_year })
# And I'm really sorry for this ...
def _merge_events(splash_events):
events = []
for event in splash_events:
if len(events) > 0 and event.start_time.strftime('%d-%m') == events[-1][0].start_time.strftime('%d-%m'):
events[-1].append(event)
else:
events.append([event])
return events
| Add method for merging duplicated events | Add method for merging duplicated events
| Python | mit | dotKom/onlineweb4,dotKom/onlineweb4,dotKom/onlineweb4,dotKom/onlineweb4 | ---
+++
@@ -7,3 +7,16 @@
# I'm really sorry ...
splash_year = SplashYear.objects.get(start_date__gt=str(datetime.date.today() - datetime.timedelta(180)))
return render(request, 'splash/base.html', {'splash_year': splash_year })
+
+
+# And I'm really sorry for this ...
+def _merge_events(splash_events):
+ events = []
+
+ for event in splash_events:
+ if len(events) > 0 and event.start_time.strftime('%d-%m') == events[-1][0].start_time.strftime('%d-%m'):
+ events[-1].append(event)
+ else:
+ events.append([event])
+
+ return events |
7b108ec9392c70113a5f5bf04e104de1fe123815 | autosort/wrapping.py | autosort/wrapping.py | def _dynamic_wrap(items, limit):
scores, trace = [0], []
for j in range(len(items)):
best, psum, index = 0, limit, -1
for i in reversed(range(j + 1)):
psum -= items[i]
score = scores[i] + psum ** 2
if i == j or score < best and psum >= 0:
best = score
index = i
scores.append(best)
trace.append(index)
return _build_indices(trace)
def _build_indices(trace):
indices, index = [], len(trace) - 1
while index >= 0:
indices.append((trace[index], index + 1))
index = trace[index] - 1
return indices[::-1]
| def _dynamic_wrap(items, limit):
scores, trace = [0], []
for j in range(len(items)):
best, psum, index = float('inf'), limit, -1
for i in reversed(range(j + 1)):
psum -= items[i]
score = scores[i] + psum ** 2
if score < best and (psum >= 0 or i == j):
best = score
index = i
scores.append(best)
trace.append(index)
return _build_indices(trace)
def _build_indices(trace):
indices, index = [], len(trace) - 1
while index >= 0:
indices.append((trace[index], index + 1))
index = trace[index] - 1
return indices[::-1]
| Make it clearer that exactly one item allows psum < 0 | Make it clearer that exactly one item allows psum < 0
| Python | mit | fbergroth/autosort | ---
+++
@@ -1,11 +1,11 @@
def _dynamic_wrap(items, limit):
scores, trace = [0], []
for j in range(len(items)):
- best, psum, index = 0, limit, -1
+ best, psum, index = float('inf'), limit, -1
for i in reversed(range(j + 1)):
psum -= items[i]
score = scores[i] + psum ** 2
- if i == j or score < best and psum >= 0:
+ if score < best and (psum >= 0 or i == j):
best = score
index = i
|
e0797f6dbefea651420f474940963b470a0931fd | test/functional/test_framework/txtools.py | test/functional/test_framework/txtools.py | from .cdefs import MIN_TX_SIZE, MAX_TXOUT_PUBKEY_SCRIPT
from .mininode import CTransaction, FromHex, ToHex, CTxOut
from .script import OP_RETURN, CScript
import random
from binascii import hexlify, unhexlify
# Pad outputs until it reaches at least min_size
def pad_tx(tx, min_size=None):
if min_size is None:
min_size = MIN_TX_SIZE
curr_size = len(tx.serialize())
while curr_size < min_size:
# txout.value + txout.pk_script bytes + op_return
extra_bytes = 8 + 1 + 1
padding_len = max(0, min_size - curr_size - extra_bytes)
padding_len = min(padding_len, MAX_TXOUT_PUBKEY_SCRIPT)
if padding_len == 0:
tx.vout.append(CTxOut(0, CScript([OP_RETURN])))
else:
padding = random.randrange(
1 << 8 * padding_len - 1, 1 << 8 * padding_len)
tx.vout.append(
CTxOut(0, CScript([padding, OP_RETURN])))
curr_size = len(tx.serialize())
tx.rehash()
# Pad outputs until it reaches at least min_size
def pad_raw_tx(rawtx_hex, min_size=None):
tx = CTransaction()
FromHex(tx, rawtx_hex)
pad_tx(tx, min_size)
return ToHex(tx)
| from .cdefs import MIN_TX_SIZE, MAX_TXOUT_PUBKEY_SCRIPT
from .mininode import CTransaction, FromHex, ToHex, CTxOut
from .script import OP_RETURN, CScript
import random
from binascii import hexlify, unhexlify
# Pad outputs until it reaches at least min_size
def pad_tx(tx, min_size=None):
if min_size is None:
min_size = MIN_TX_SIZE
curr_size = len(tx.serialize())
while curr_size < min_size:
# txout.value + txout.pk_script bytes + op_return
extra_bytes = 8 + 1 + 1
padding_len = max(0, min_size - curr_size - extra_bytes)
padding_len = min(padding_len, MAX_TXOUT_PUBKEY_SCRIPT)
if padding_len == 0:
tx.vout.append(CTxOut(0, CScript([OP_RETURN])))
else:
padding = random.randrange(
1 << 8 * padding_len - 2, 1 << 8 * padding_len - 1)
tx.vout.append(
CTxOut(0, CScript([padding, OP_RETURN])))
curr_size = len(tx.serialize())
tx.rehash()
# Pad outputs until it reaches at least min_size
def pad_raw_tx(rawtx_hex, min_size=None):
tx = CTransaction()
FromHex(tx, rawtx_hex)
pad_tx(tx, min_size)
return ToHex(tx)
| Fix pad_tx off by one error + nits | Fix pad_tx off by one error + nits
Summary: See title
Test Plan: test_runner.py
Reviewers: deadalnix, schancel, #bitcoin_abc
Reviewed By: schancel, #bitcoin_abc
Subscribers: teamcity
Differential Revision: https://reviews.bitcoinabc.org/D2096
| Python | mit | Bitcoin-ABC/bitcoin-abc,cculianu/bitcoin-abc,Bitcoin-ABC/bitcoin-abc,cculianu/bitcoin-abc,cculianu/bitcoin-abc,ftrader-bitcoinabc/bitcoin-abc,Bitcoin-ABC/bitcoin-abc,ftrader-bitcoinabc/bitcoin-abc,Bitcoin-ABC/bitcoin-abc,Bitcoin-ABC/bitcoin-abc,cculianu/bitcoin-abc,ftrader-bitcoinabc/bitcoin-abc,Bitcoin-ABC/bitcoin-abc,Bitcoin-ABC/bitcoin-abc,cculianu/bitcoin-abc,cculianu/bitcoin-abc,ftrader-bitcoinabc/bitcoin-abc,Bitcoin-ABC/bitcoin-abc,Bitcoin-ABC/bitcoin-abc,ftrader-bitcoinabc/bitcoin-abc,ftrader-bitcoinabc/bitcoin-abc,cculianu/bitcoin-abc,Bitcoin-ABC/bitcoin-abc,ftrader-bitcoinabc/bitcoin-abc,ftrader-bitcoinabc/bitcoin-abc,ftrader-bitcoinabc/bitcoin-abc,Bitcoin-ABC/bitcoin-abc,cculianu/bitcoin-abc,Bitcoin-ABC/bitcoin-abc,Bitcoin-ABC/bitcoin-abc,ftrader-bitcoinabc/bitcoin-abc | ---
+++
@@ -23,18 +23,17 @@
tx.vout.append(CTxOut(0, CScript([OP_RETURN])))
else:
padding = random.randrange(
- 1 << 8 * padding_len - 1, 1 << 8 * padding_len)
+ 1 << 8 * padding_len - 2, 1 << 8 * padding_len - 1)
tx.vout.append(
CTxOut(0, CScript([padding, OP_RETURN])))
curr_size = len(tx.serialize())
tx.rehash()
-# Pad outputs until it reaches at least min_size
+# Pad outputs until it reaches at least min_size
def pad_raw_tx(rawtx_hex, min_size=None):
-
tx = CTransaction()
FromHex(tx, rawtx_hex)
pad_tx(tx, min_size) |
bdcecb3c96cef5b663b1ada22efa952b0882f1f0 | spacy/tests/regression/test_issue600.py | spacy/tests/regression/test_issue600.py | from __future__ import unicode_literals
from ...tokens import Doc
from ...vocab import Vocab
def test_issue600():
doc = Doc(Vocab(tag_map={'NN': {'pos': 'NOUN'}}), words=['hello'])
doc[0].tag_ = u'NN'
| from __future__ import unicode_literals
from ...tokens import Doc
from ...vocab import Vocab
from ...attrs import POS
def test_issue600():
doc = Doc(Vocab(tag_map={'NN': {'pos': 'NOUN'}}), words=['hello'])
doc[0].tag_ = u'NN'
| Add import in regression test | Add import in regression test
| Python | mit | Gregory-Howard/spaCy,spacy-io/spaCy,raphael0202/spaCy,explosion/spaCy,oroszgy/spaCy.hu,Gregory-Howard/spaCy,banglakit/spaCy,banglakit/spaCy,raphael0202/spaCy,honnibal/spaCy,aikramer2/spaCy,recognai/spaCy,Gregory-Howard/spaCy,Gregory-Howard/spaCy,explosion/spaCy,recognai/spaCy,recognai/spaCy,oroszgy/spaCy.hu,recognai/spaCy,oroszgy/spaCy.hu,raphael0202/spaCy,raphael0202/spaCy,banglakit/spaCy,oroszgy/spaCy.hu,honnibal/spaCy,spacy-io/spaCy,raphael0202/spaCy,aikramer2/spaCy,oroszgy/spaCy.hu,Gregory-Howard/spaCy,recognai/spaCy,honnibal/spaCy,aikramer2/spaCy,aikramer2/spaCy,banglakit/spaCy,aikramer2/spaCy,raphael0202/spaCy,Gregory-Howard/spaCy,spacy-io/spaCy,oroszgy/spaCy.hu,spacy-io/spaCy,explosion/spaCy,spacy-io/spaCy,banglakit/spaCy,aikramer2/spaCy,honnibal/spaCy,explosion/spaCy,recognai/spaCy,explosion/spaCy,explosion/spaCy,spacy-io/spaCy,banglakit/spaCy | ---
+++
@@ -1,6 +1,7 @@
from __future__ import unicode_literals
from ...tokens import Doc
from ...vocab import Vocab
+from ...attrs import POS
def test_issue600(): |
1f3183acbe50df32d76d1cc0cb71b4cd9afdaa79 | controller/__init__.py | controller/__init__.py | # -*- coding: utf-8 -*-
import sys
__version__ = '0.6.0'
FRAMEWORK_NAME = 'RAPyDo'
# PROJECT_YAML_SPECSDIR = 'specs'
COMPOSE_ENVIRONMENT_FILE = '.env'
SUBMODULES_DIR = 'submodules'
PLACEHOLDER = '#@$%-REPLACE-#@%$-ME-#@$%'
##################
# NOTE: telling the app if testing or not
# http://j.mp/2uifoza
TESTING = hasattr(sys, '_called_from_test')
| # -*- coding: utf-8 -*-
import sys
__version__ = '0.5.1'
FRAMEWORK_NAME = 'RAPyDo'
# PROJECT_YAML_SPECSDIR = 'specs'
COMPOSE_ENVIRONMENT_FILE = '.env'
SUBMODULES_DIR = 'submodules'
PLACEHOLDER = '#@$%-REPLACE-#@%$-ME-#@$%'
##################
# NOTE: telling the app if testing or not
# http://j.mp/2uifoza
TESTING = hasattr(sys, '_called_from_test')
| Fix version with last package | Fix version with last package
| Python | mit | rapydo/do | ---
+++
@@ -2,7 +2,7 @@
import sys
-__version__ = '0.6.0'
+__version__ = '0.5.1'
FRAMEWORK_NAME = 'RAPyDo'
# PROJECT_YAML_SPECSDIR = 'specs' |
ff13cc4b7ef29c4454abb41b8e9a525d12c9ff7d | tailorscad/tests/test_arg_parser.py | tailorscad/tests/test_arg_parser.py |
import unittest
from tailorscad.arg_parser import parse_args
class TestArgParser(unittest.TestCase):
def test_parse_args_none(self):
args = []
argv = []
args = parse_args(argv)
self.assertFalse(args)
def test_parse_args_inknown(self):
args = []
argv = ['-a', 'word']
args = parse_args(argv)
self.assertFalse(args)
def test_parse_args_known(self):
args = []
argv = ['-c', 'test']
args = parse_args(argv)
self.assertTrue(args)
self.assertEqual(args, ['test'])
def test_parse_args_unkown_and_known(self):
args = []
argv = ['-a', 'word', '-c', 'test']
args = parse_args(argv)
self.assertTrue(args)
self.assertEqual(args, ['test'])
|
import unittest
from tailorscad.arg_parser import parse_args
class TestArgParser(unittest.TestCase):
def test_parse_args_none(self):
args = []
argv = []
args = parse_args(argv)
self.assertFalse(args.config)
def test_parse_args_inknown(self):
args = []
argv = ['-a', 'word']
args = parse_args(argv)
self.assertFalse(args.config)
def test_parse_args_known(self):
args = []
argv = ['-c', 'test']
args = parse_args(argv)
self.assertTrue(args)
self.assertEqual(args.config, 'test')
def test_parse_args_unkown_and_known(self):
args = []
argv = ['-a', 'word', '-c', 'test']
args = parse_args(argv)
self.assertTrue(args)
self.assertEqual(args.config, 'test')
| Fix unit tests for arg_parser | Fix unit tests for arg_parser
| Python | mit | savorywatt/tailorSCAD | ---
+++
@@ -13,7 +13,7 @@
args = parse_args(argv)
- self.assertFalse(args)
+ self.assertFalse(args.config)
def test_parse_args_inknown(self):
@@ -22,7 +22,7 @@
args = parse_args(argv)
- self.assertFalse(args)
+ self.assertFalse(args.config)
def test_parse_args_known(self):
@@ -32,7 +32,7 @@
args = parse_args(argv)
self.assertTrue(args)
- self.assertEqual(args, ['test'])
+ self.assertEqual(args.config, 'test')
def test_parse_args_unkown_and_known(self):
args = []
@@ -41,4 +41,4 @@
args = parse_args(argv)
self.assertTrue(args)
- self.assertEqual(args, ['test'])
+ self.assertEqual(args.config, 'test') |
ccbd25f196453f4c7b61fa4e69d192d7b96595e2 | remo/remozilla/tests/__init__.py | remo/remozilla/tests/__init__.py | import datetime
from django.utils.timezone import utc
import factory
from factory import fuzzy
from remo.profiles.tests import UserFactory
from remo.remozilla.models import Bug
from remo.remozilla.tasks import COMPONENTS
CHANGE_DT = datetime.datetime(2012, 1, 1, tzinfo=utc)
CREATION_DT = datetime.datetime(2011, 1, 1, tzinfo=utc)
DUE_DT = datetime.datetime(2013, 1, 1, tzinfo=utc)
RESOLUTION = ['FIXED', 'INVALID', 'WONTFIX', 'DUPLICATE', 'WORKSFORME',
'INCOMPLETE']
STATUS = ['UNCONFIRMED', 'NEW', 'ASSIGNED', 'REOPENED', 'READY', 'RESOLVED',
'VERIFIED']
class BugFactory(factory.django.DjangoModelFactory):
FACTORY_FOR = Bug
bug_id = fuzzy.FuzzyInteger(50000, 200000)
bug_creation_time = fuzzy.FuzzyDateTime(CREATION_DT, CHANGE_DT)
bug_last_change_time = fuzzy.FuzzyDateTime(CHANGE_DT, DUE_DT)
creator = factory.SubFactory(UserFactory)
assigned_to = factory.SubFactory(UserFactory)
component = fuzzy.FuzzyChoice(COMPONENTS)
summary = 'Bug summary'
whiteboard = 'Bug whiteboard'
resolution = fuzzy.FuzzyChoice(RESOLUTION)
status = fuzzy.FuzzyChoice(STATUS)
@factory.post_generation
def add_cc_users(self, create, extracted, **kwargs):
if not create:
return
if extracted:
for user in extracted:
self.cc.add(user)
| import datetime
from django.utils.timezone import utc
import factory
from factory import fuzzy
from remo.profiles.tests import UserFactory
from remo.remozilla.models import Bug
from remo.remozilla.tasks import COMPONENTS
CHANGE_DT = datetime.datetime(2012, 1, 1, tzinfo=utc)
CREATION_DT = datetime.datetime(2011, 1, 1, tzinfo=utc)
DUE_DT = datetime.datetime(2013, 1, 1, tzinfo=utc)
class BugFactory(factory.django.DjangoModelFactory):
FACTORY_FOR = Bug
bug_id = fuzzy.FuzzyInteger(50000, 200000)
bug_creation_time = fuzzy.FuzzyDateTime(CREATION_DT, CHANGE_DT)
bug_last_change_time = fuzzy.FuzzyDateTime(CHANGE_DT, DUE_DT)
creator = factory.SubFactory(UserFactory)
assigned_to = factory.SubFactory(UserFactory)
component = fuzzy.FuzzyChoice(COMPONENTS)
summary = 'Bug summary'
whiteboard = 'Bug whiteboard'
@factory.post_generation
def add_cc_users(self, create, extracted, **kwargs):
if not create:
return
if extracted:
for user in extracted:
self.cc.add(user)
| Add default RESOLUTION and STATUS in BugFactory. | Add default RESOLUTION and STATUS in BugFactory.
* Fixes failing tests.
| Python | bsd-3-clause | tsmrachel/remo,johngian/remo,mozilla/remo,tsmrachel/remo,flamingspaz/remo,abdullah2891/remo,johngian/remo,chirilo/remo,akatsoulas/remo,flamingspaz/remo,Mte90/remo,mozilla/remo,tsmrachel/remo,Mte90/remo,johngian/remo,chirilo/remo,abdullah2891/remo,johngian/remo,akatsoulas/remo,mozilla/remo,abdullah2891/remo,abdullah2891/remo,chirilo/remo,akatsoulas/remo,chirilo/remo,Mte90/remo,Mte90/remo,akatsoulas/remo,tsmrachel/remo,flamingspaz/remo,mozilla/remo,flamingspaz/remo | ---
+++
@@ -12,10 +12,6 @@
CHANGE_DT = datetime.datetime(2012, 1, 1, tzinfo=utc)
CREATION_DT = datetime.datetime(2011, 1, 1, tzinfo=utc)
DUE_DT = datetime.datetime(2013, 1, 1, tzinfo=utc)
-RESOLUTION = ['FIXED', 'INVALID', 'WONTFIX', 'DUPLICATE', 'WORKSFORME',
- 'INCOMPLETE']
-STATUS = ['UNCONFIRMED', 'NEW', 'ASSIGNED', 'REOPENED', 'READY', 'RESOLVED',
- 'VERIFIED']
class BugFactory(factory.django.DjangoModelFactory):
@@ -29,8 +25,6 @@
component = fuzzy.FuzzyChoice(COMPONENTS)
summary = 'Bug summary'
whiteboard = 'Bug whiteboard'
- resolution = fuzzy.FuzzyChoice(RESOLUTION)
- status = fuzzy.FuzzyChoice(STATUS)
@factory.post_generation
def add_cc_users(self, create, extracted, **kwargs): |
f17310e0fcf5d7ea7ceab2b9243f106eb1222b69 | desertbot/datastore.py | desertbot/datastore.py | import json
import os
class DataStore(object):
def __init__(self, storagePath="desertbot_data.json"):
self.storagePath = storagePath
self.data = None
self.load()
def load(self):
if not os.path.exists(self.storagePath):
self.data = {}
self.save()
return
with open(self.storagePath) as storageFile:
self.data = json.load(storageFile)
def save(self):
tmpFile = "{}.tmp".format(self.storagePath)
with open(tmpFile, "w") as storageFile:
storageFile.write(json.dumps(self.data, indent=4))
os.rename(tmpFile, self.storagePath)
| import json
import os
class DataStore(object):
def __init__(self, storagePath="desertbot_data.json"):
self.storagePath = storagePath
self.data = {}
self.load()
def load(self):
if not os.path.exists(self.storagePath):
self.save()
return
with open(self.storagePath) as storageFile:
self.data = json.load(storageFile)
def save(self):
tmpFile = "{}.tmp".format(self.storagePath)
with open(tmpFile, "w") as storageFile:
storageFile.write(json.dumps(self.data, indent=4))
os.rename(tmpFile, self.storagePath)
def __len__(self):
return len(self.data)
def __iter__(self):
return iter(self.data)
def __getitem__(self, item):
return self.data[item]
| Allow using DataStore class as if dict | Allow using DataStore class as if dict
| Python | mit | DesertBot/DesertBot | ---
+++
@@ -5,12 +5,11 @@
class DataStore(object):
def __init__(self, storagePath="desertbot_data.json"):
self.storagePath = storagePath
- self.data = None
+ self.data = {}
self.load()
def load(self):
if not os.path.exists(self.storagePath):
- self.data = {}
self.save()
return
with open(self.storagePath) as storageFile:
@@ -21,3 +20,12 @@
with open(tmpFile, "w") as storageFile:
storageFile.write(json.dumps(self.data, indent=4))
os.rename(tmpFile, self.storagePath)
+
+ def __len__(self):
+ return len(self.data)
+
+ def __iter__(self):
+ return iter(self.data)
+
+ def __getitem__(self, item):
+ return self.data[item] |
4ef159ae6d45bc546f1c84b57416fc2b87eecc33 | thrift/test/py/adapter_for_tests.py | thrift/test/py/adapter_for_tests.py | # Copyright (c) Facebook, Inc. and its affiliates.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import Dict
from .adapter_bar.ttypes import Bar
class AdapterTestStructToDict:
Type = Dict[str, int]
@staticmethod
def from_thrift(thrift_value):
return {k: v for k, v in thrift_value.__dict__.items() if v is not None}
@staticmethod
def to_thrift(py_value):
return Bar(**py_value)
| # Copyright (c) Facebook, Inc. and its affiliates.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import Dict
from .adapter_bar.ttypes import Bar
class AdapterTestStructToDict:
Type = Dict[str, int]
@staticmethod
# pyre-fixme[3]: Return type must be annotated.
# pyre-fixme[2]: Parameter must be annotated.
def from_thrift(thrift_value):
return {k: v for k, v in thrift_value.__dict__.items() if v is not None}
@staticmethod
# pyre-fixme[3]: Return type must be annotated.
# pyre-fixme[2]: Parameter must be annotated.
def to_thrift(py_value):
return Bar(**py_value)
| Convert type check targets in thrift/test to use configuration | Convert type check targets in thrift/test to use configuration
Summary:
Migrating buck integration to use configurations.
For more information about this migration, please see: https://fb.workplace.com/groups/295311271085134/permalink/552700215346237/
Reviewed By: dkgi
Differential Revision: D30708385
fbshipit-source-id: 4d65f711d129dc87f1b87aad342783f09dca0d2d
| Python | apache-2.0 | facebook/fbthrift,facebook/fbthrift,facebook/fbthrift,facebook/fbthrift,facebook/fbthrift,facebook/fbthrift,facebook/fbthrift,facebook/fbthrift,facebook/fbthrift | ---
+++
@@ -21,9 +21,13 @@
Type = Dict[str, int]
@staticmethod
+ # pyre-fixme[3]: Return type must be annotated.
+ # pyre-fixme[2]: Parameter must be annotated.
def from_thrift(thrift_value):
return {k: v for k, v in thrift_value.__dict__.items() if v is not None}
@staticmethod
+ # pyre-fixme[3]: Return type must be annotated.
+ # pyre-fixme[2]: Parameter must be annotated.
def to_thrift(py_value):
return Bar(**py_value) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.