commit stringlengths 40 40 | old_file stringlengths 4 150 | new_file stringlengths 4 150 | old_contents stringlengths 0 3.26k | new_contents stringlengths 1 4.43k | subject stringlengths 15 501 | message stringlengths 15 4.06k | lang stringclasses 4 values | license stringclasses 13 values | repos stringlengths 5 91.5k | diff stringlengths 0 4.35k |
|---|---|---|---|---|---|---|---|---|---|---|
d8d38a24b6f6e12e2ee1a54d5a8900bd240aea18 | flickr_api/_version.py | flickr_api/_version.py | """ Version module.
Allows to define the version number uniquely.
"""
__version__ = "0.5dev"
| """ Version module.
Allows to define the version number uniquely.
"""
__version__ = "0.5"
| Set the version number for the v0.5 release | Set the version number for the v0.5 release | Python | bsd-3-clause | alexis-mignon/python-flickr-api,bryndin/tornado-flickr-api,alexis-mignon/python-flickr-api | ---
+++
@@ -3,4 +3,4 @@
Allows to define the version number uniquely.
"""
-__version__ = "0.5dev"
+__version__ = "0.5" |
1fa57ad194735cc7f6b4170d2c296d9c8b3ce4c3 | website_analytics_piwik/__openerp__.py | website_analytics_piwik/__openerp__.py | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# This module copyright (C) 2015 Therp BV <http://therp.nl>.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
"name": "Piwik analytics",
"version": "1.0",
"author": "Therp BV",
"license": "AGPL-3",
"category": "Website",
"summary": "Track website users using piwik",
"depends": [
'website',
],
"data": [
"views/website_config_settings.xml",
"views/website.xml",
'views/templates.xml',
],
"auto_install": False,
"installable": True,
"application": False,
}
| # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# This module copyright (C) 2015 Therp BV <http://therp.nl>.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
"name": "Piwik analytics",
"version": "1.0",
"author": "Therp BV,Odoo Community Association (OCA)",
"license": "AGPL-3",
"category": "Website",
"summary": "Track website users using piwik",
"depends": [
'website',
],
"data": [
"views/website_config_settings.xml",
"views/website.xml",
'views/templates.xml',
],
"auto_install": False,
"installable": True,
"application": False,
}
| Add Odoo Community Association (OCA) in authors | Add Odoo Community Association (OCA) in authors
| Python | agpl-3.0 | incaser/website,pedrobaeza/website,Benniphx/website,LasLabs/website,Antiun/website,acsone/website,open-synergy/website,hbrunn/website,BT-jmichaud/website,gfcapalbo/website,brain-tec/website,sergio-incaser/website,nicolas-petit/website,syci/website-odoo,xpansa/website,open-synergy/website,Yajo/website,nicolas-petit/website,acsone/website,Tecnativa/website,syci/website-odoo,AuyaJackie/website,eLBati/website,brain-tec/website,khaeusler/website,kaerdsar/website,BT-fgarbely/website,BT-jmichaud/website,xpansa/website,LasLabs/website,nuobit/website,RoelAdriaans-B-informed/website,hbrunn/website,open-synergy/website,Rona111/website,pedrobaeza/website,RoelAdriaans-B-informed/website,Rona111/website,Tecnativa/website,xpansa/website,Rona111/website,LasLabs/website,Antiun/website,nuobit/website,gfcapalbo/website,BT-ojossen/website,alanljj/oca_website,initOS/website,Tecnativa/website,brain-tec/website,BT-jmichaud/website,seb-elico/website,StefanRijnhart/website,BT-fgarbely/website,acsone/website,Yajo/website,RoelAdriaans-B-informed/website,eLBati/website,kaerdsar/website,alanljj/oca_website,pedrobaeza/website,syci/website-odoo,JayVora-SerpentCS/website,brain-tec/website,eLBati/website,LasLabs/website,BT-ojossen/website,AuyaJackie/website,khaeusler/website,seb-elico/website,kaerdsar/website,Benniphx/website,alanljj/oca_website,AuyaJackie/website,nuobit/website,acsone/website,Benniphx/website,sergio-incaser/website,RoelAdriaans-B-informed/website,Yajo/website,nicolas-petit/website,initOS/website,Endika/website,open-synergy/website,gfcapalbo/website,sergio-incaser/website,JayVora-SerpentCS/website,initOS/website,seb-elico/website,Antiun/website,Antiun/website,StefanRijnhart/website,gfcapalbo/website,khaeusler/website,Endika/website,BT-ojossen/website,StefanRijnhart/website,JayVora-SerpentCS/website,hbrunn/website,incaser/website,xpansa/website,BT-fgarbely/website,Endika/website,pedrobaeza/website,Yajo/website,incaser/website,nuobit/website,Endika/website | ---
+++
@@ -21,7 +21,7 @@
{
"name": "Piwik analytics",
"version": "1.0",
- "author": "Therp BV",
+ "author": "Therp BV,Odoo Community Association (OCA)",
"license": "AGPL-3",
"category": "Website",
"summary": "Track website users using piwik", |
2f5095c7020a1b6906981e75491007ce72ec5aac | antimarkdown/handlers.py | antimarkdown/handlers.py | # -*- coding: utf-8 -*-
"""antimarkdown.handlers -- Element handlers for converting HTML Elements/subtrees to Markdown text.
"""
from collections import deque
from antimarkdown import nodes
def render(*domtrees):
if not domtrees:
return u''
root = nodes.Root()
for dom in domtrees:
build_render_tree(root, dom)
lines = unicode(root).rstrip().splitlines()
# Strip leading empty lines
while not lines[0].strip():
lines.pop(0)
return nodes.normalize(u'\n'.join(lines))
def build_render_tree(root, domtree):
"""Process an ElementTree domtree and build a render tree.
"""
opened = set()
stack = deque([domtree])
blackboard = {}
render_tree = root
current_node = render_tree
while stack:
domtree = stack.pop()
if domtree not in opened:
# Open the domtree
# Build the render node.
node_class = getattr(nodes, domtree.tag.upper(), nodes.Node)
current_node = node_class(current_node, domtree, blackboard)
stack.append(domtree)
# Queue children
for el in reversed(domtree):
stack.append(el)
opened.add(domtree)
else:
# Close the domtree
current_node = current_node.parent
return root
| # -*- coding: utf-8 -*-
"""antimarkdown.handlers -- Element handlers for converting HTML Elements/subtrees to Markdown text.
"""
from collections import deque
from antimarkdown import nodes
def render(*domtrees):
if not domtrees:
return u''
root = nodes.Root()
for dom in domtrees:
build_render_tree(root, dom)
lines = unicode(root).rstrip().splitlines()
# Strip leading empty lines
while lines and not lines[0].strip():
lines.pop(0)
return nodes.normalize(u'\n'.join(lines))
def build_render_tree(root, domtree):
"""Process an ElementTree domtree and build a render tree.
"""
opened = set()
stack = deque([domtree])
blackboard = {}
render_tree = root
current_node = render_tree
while stack:
domtree = stack.pop()
if domtree not in opened:
# Open the domtree
# Build the render node.
node_class = getattr(nodes, domtree.tag.upper(), nodes.Node)
current_node = node_class(current_node, domtree, blackboard)
stack.append(domtree)
# Queue children
for el in reversed(domtree):
stack.append(el)
opened.add(domtree)
else:
# Close the domtree
current_node = current_node.parent
return root
| Handle case with empty block | Handle case with empty block
| Python | mit | Crossway/antimarkdown,Crossway/antimarkdown | ---
+++
@@ -16,7 +16,7 @@
lines = unicode(root).rstrip().splitlines()
# Strip leading empty lines
- while not lines[0].strip():
+ while lines and not lines[0].strip():
lines.pop(0)
return nodes.normalize(u'\n'.join(lines)) |
a6955a9bc3471b74f801bb8b64c6e816dda7551a | hokusai/lib/command.py | hokusai/lib/command.py | import os
import sys
import traceback
from functools import wraps
from hokusai.lib.common import print_red, get_verbosity
from hokusai.lib.exceptions import CalledProcessError, HokusaiError
from hokusai.lib.config import config
def command(config_check=True):
def decorator(func):
@wraps(func)
def wrapper(*args, **kwargs):
try:
if config_check:
config.check()
result = func(*args, **kwargs)
if result is None:
sys.exit(0)
else:
sys.exit(result)
except HokusaiError as e:
print_red(e.message)
sys.exit(e.return_code)
except SystemExit:
raise
except KeyboardInterrupt:
raise
except (CalledProcessError, Exception) as e:
if get_verbosity() or os.environ.get('DEBUG'):
print_red(traceback.format_exc(e))
else:
print_red("ERROR: %s" % str(e))
sys.exit(1)
return wrapper
return decorator
| import os
import sys
import traceback
from functools import wraps
from hokusai.lib.common import print_red, get_verbosity
from hokusai.lib.exceptions import CalledProcessError, HokusaiError
from hokusai.lib.config import config
def command(config_check=True):
def decorator(func):
@wraps(func)
def wrapper(*args, **kwargs):
try:
if config_check:
config.check()
result = func(*args, **kwargs)
if result is None:
sys.exit(0)
else:
sys.exit(result)
except HokusaiError as e:
print_red(e.message)
sys.exit(e.return_code)
except SystemExit:
raise
except KeyboardInterrupt:
raise
except (CalledProcessError, Exception) as e:
if get_verbosity() or os.environ.get('DEBUG'):
print_red(traceback.format_exc())
else:
print_red("ERROR: %s" % str(e))
if hasattr(e, 'output'):
print(e.output.decode('utf-8'))
elif hasattr(e, 'message'):
print(e.message.decode('utf-8'))
sys.exit(1)
return wrapper
return decorator
| Fix traceback and exception handling | Fix traceback and exception handling
| Python | mit | izakp/hokusai | ---
+++
@@ -28,9 +28,13 @@
raise
except (CalledProcessError, Exception) as e:
if get_verbosity() or os.environ.get('DEBUG'):
- print_red(traceback.format_exc(e))
+ print_red(traceback.format_exc())
else:
print_red("ERROR: %s" % str(e))
+ if hasattr(e, 'output'):
+ print(e.output.decode('utf-8'))
+ elif hasattr(e, 'message'):
+ print(e.message.decode('utf-8'))
sys.exit(1)
return wrapper
return decorator |
3d943c113c244f4b285c2493004de48b754fd8d8 | bin/bson2json.py | bin/bson2json.py | #!/usr/bin/env python
"""
Simple utility to display BSON files.
"""
import sys
import errno
from bson.json_util import dumps
import bson_lazy
usage = '''
Usage: %s FILE... [OPTIONS]
Options:
--pretty Pretty print JSON
--help Print this help message
'''.strip() % sys.argv[0]
def main():
args = sys.argv[1:]
kwargs = {}
if '--pretty' in args:
args.remove('--pretty')
kwargs = {'sort_keys': True, 'indent': 4, 'separators': (',',':')}
if len(args) == 0 or '--help' in args:
print >>sys.stderr, usage
sys.exit()
for path in args:
try:
with open(path, 'rb') as f:
for doc in bson_lazy.load(f):
print dumps(doc, **kwargs)
except IOError, e:
if e.errno != errno.EPIPE:
print >>sys.stderr, 'ERROR: %s' % e
if __name__ == '__main__':
main()
| #!/usr/bin/env python
"""
Simple utility to display BSON files.
"""
import sys
import errno
from bson.json_util import dumps
import bson_lazy
usage = '''
Usage: %s FILE... [OPTIONS]
Options:
--pretty Pretty print JSON
--help Print this help message
'''.strip() % sys.argv[0]
def main():
args = sys.argv[1:]
kwargs = {}
if '--pretty' in args:
args.remove('--pretty')
kwargs = {'sort_keys': True, 'indent': 4, 'separators': (',',':')}
if len(args) == 0 or '--help' in args:
print >>sys.stderr, usage
sys.exit()
for path in args:
try:
with open(path, 'rb') as f:
for doc in bson_lazy.load(f):
print dumps(doc, **kwargs)
except IOError, e:
if e.errno != errno.EPIPE:
print >>sys.stderr, 'ERROR: %s' % e
except KeyboardInterrupt:
return
if __name__ == '__main__':
main()
| Hide stacktrace for keyboard interrupts | Hide stacktrace for keyboard interrupts
| Python | mit | ShinNoNoir/bson_lazy | ---
+++
@@ -36,6 +36,9 @@
except IOError, e:
if e.errno != errno.EPIPE:
print >>sys.stderr, 'ERROR: %s' % e
+
+ except KeyboardInterrupt:
+ return
if __name__ == '__main__': |
b191a78d847167616dc38756c9fb450e5eb95c70 | utils/database.py | utils/database.py | import json
class Database(dict):
"""Holds a dict that contains all the information about the users in a channel"""
def __init__(self, irc):
super(Database, self).__init__(json.load(open("userdb.json")))
self.irc = irc
def remove_entry(self, event, nick):
try:
del self[event.target][nick]
except KeyError:
for i in self[event.target].values():
if i['host'] == event.source.host:
del self[event.target][i['hostmask'].split("!")[0]]
break
def add_entry(self, channel, nick, hostmask, account):
temp = {
'hostmask': hostmask,
'host': hostmask.split("@")[1],
'account': account,
'seen': [__import__("time").time(), ""]
}
failed = False
try:
user = self[channel][nick]
except KeyError:
failed = True
self[channel][nick] = temp
if not failed:
del temp['seen']
user.update(temp)
def get_user_host(self, channel, nick):
try:
host = "*!*@" + self[channel][nick]['host']
except KeyError:
self.irc.send("WHO {0} nuhs%nhuac".format(channel))
host = "*!*@" + self[channel][nick]['host']
return host
def flush(self):
with open('userdb.json', 'w') as f:
json.dump(self, f, indent=2, separators=(',', ': '))
| import json
class Database(dict):
"""Holds a dict that contains all the information about the users in a channel"""
def __init__(self, irc):
super(Database, self).__init__(json.load(open("userdb.json")))
self.irc = irc
def remove_entry(self, event, nick):
try:
del self[event.target][nick]
except KeyError:
for i in self[event.target].values():
if i['host'] == event.source.host:
del self[event.target][i['hostmask'].split("!")[0]]
break
def add_entry(self, channel, nick, hostmask, account):
temp = {
'hostmask': hostmask,
'host': hostmask.split("@")[1],
'account': account,
'seen': [__import__("time").time(), ""]
}
failed = False
try:
user = self[channel][nick]
except KeyError:
failed = True
self[channel][nick] = temp
if not failed:
del temp['seen']
user.update(temp)
def get_user_host(self, channel, nick):
try:
host = "*!*@" + self[channel][nick]['host']
except KeyError:
self.irc.send("WHO {0} nuhs%nhuac".format(channel))
host = "*!*@" + self[channel][nick]['host']
return host
def flush(self):
with open('userdb.json', 'w') as f:
json.dump(self, f, indent=2, separators=(',', ': '))
f.write("\n")
| Add new-line at EOF, when dumping userdb | Add new-line at EOF, when dumping userdb
| Python | mit | wolfy1339/Python-IRC-Bot | ---
+++
@@ -45,3 +45,4 @@
def flush(self):
with open('userdb.json', 'w') as f:
json.dump(self, f, indent=2, separators=(',', ': '))
+ f.write("\n") |
ff716f1cf4b523e37e667b29693757a3f32b1cd5 | djangae/urls.py | djangae/urls.py | from django.conf.urls import patterns, url, include
import djangae.contrib.mappers.urls
urlpatterns = patterns('djangae.views',
url(r'^start$', 'start'),
url(r'^start$', 'stop'),
url(r'^warmup$', 'warmup'),
url(r'^queue/deferred/?$', 'deferred'),
url(r'^internalupload/$', 'internalupload', name='djangae_internal_upload_handler'),
url(r'^mapreduce/', include(djangae.contrib.mappers.urls))
)
| from django.conf.urls import patterns, url, include
import djangae.contrib.mappers.urls
urlpatterns = patterns('djangae.views',
url(r'^start$', 'start'),
url(r'^stop$', 'stop'),
url(r'^warmup$', 'warmup'),
url(r'^queue/deferred/?$', 'deferred'),
url(r'^internalupload/$', 'internalupload', name='djangae_internal_upload_handler'),
url(r'^mapreduce/', include(djangae.contrib.mappers.urls))
)
| Fix url typo for stopping mapreduce task | Fix url typo for stopping mapreduce task
| Python | bsd-3-clause | grzes/djangae,kirberich/djangae,potatolondon/djangae,kirberich/djangae,kirberich/djangae,grzes/djangae,asendecka/djangae,asendecka/djangae,asendecka/djangae,potatolondon/djangae,grzes/djangae | ---
+++
@@ -3,7 +3,7 @@
urlpatterns = patterns('djangae.views',
url(r'^start$', 'start'),
- url(r'^start$', 'stop'),
+ url(r'^stop$', 'stop'),
url(r'^warmup$', 'warmup'),
url(r'^queue/deferred/?$', 'deferred'),
url(r'^internalupload/$', 'internalupload', name='djangae_internal_upload_handler'), |
a1e786bdba7dcf688211e0039313bf63e572cfe2 | example-flask-python3.6-index/app/main.py | example-flask-python3.6-index/app/main.py | from flask import Flask, send_file
app = Flask(__name__)
@app.route("/hello")
def hello():
return "Hello World from Flask in a uWSGI Nginx Docker container with \
Python 3.6 (from the example template)"
@app.route("/")
def main():
return send_file('./static/index.html')
if __name__ == "__main__":
app.run(host='0.0.0.0', debug=True, port=80)
| from flask import Flask, send_file
app = Flask(__name__)
@app.route("/hello")
def hello():
return "Hello World from Flask in a uWSGI Nginx Docker container with \
Python 3.6 (from the example template)"
@app.route("/")
def main():
return send_file('./static/index.html')
# Everything not declared before (not a Flask route / API endpoint)...
@app.route('/<path:path>')
def route_frontend(path):
# ...could be a static file needed by the front end that
# doesn't use the `static` path (like in `<script src="bundle.js">`)
file_path = './static/' + path
if os.path.isfile(file_path):
return send_file(file_path)
# ...or should be handled by the SPA's "router" in front end
else:
return send_file('./static/index.html')
if __name__ == "__main__":
app.run(host='0.0.0.0', debug=True, port=80)
| Add static content and front end routes to SPA example | Add static content and front end routes to SPA example
| Python | apache-2.0 | tiangolo/uwsgi-nginx-flask-docker,tiangolo/uwsgi-nginx-flask-docker,tiangolo/uwsgi-nginx-flask-docker | ---
+++
@@ -10,5 +10,17 @@
def main():
return send_file('./static/index.html')
+# Everything not declared before (not a Flask route / API endpoint)...
+@app.route('/<path:path>')
+def route_frontend(path):
+ # ...could be a static file needed by the front end that
+ # doesn't use the `static` path (like in `<script src="bundle.js">`)
+ file_path = './static/' + path
+ if os.path.isfile(file_path):
+ return send_file(file_path)
+ # ...or should be handled by the SPA's "router" in front end
+ else:
+ return send_file('./static/index.html')
+
if __name__ == "__main__":
app.run(host='0.0.0.0', debug=True, port=80) |
cb20f8d81832aafb2ec6f70f135b0a027b14dc91 | processors/fix_changeline_budget_titles.py | processors/fix_changeline_budget_titles.py | import json
import logging
if __name__ == "__main__":
input = sys.argv[1]
output = sys.argv[2]
processor = fix_changeline_budget_titles().process(input,output,[])
class fix_changeline_budget_titles(object):
def process(self,inputs,output):
out = []
budgets = {}
changes_jsons, budget_jsons = inputs
for line in file(budget_jsons):
line = json.loads(line.strip())
budgets["%(year)s/%(code)s" % line] = line['title']
outfile = file(output,"w")
changed_num = 0
for line in file(changes_jsons):
line = json.loads(line.strip())
key = "%(year)s/%(budget_code)s" % line
title = budgets.get(key)
if title != None:
if title != line['budget_title']:
line['budget_title'] = title
changed_num += 1
else:
logging.error("Failed to find title for change with key %s" % key)
raise Exception()
outfile.write(json.dumps(line,sort_keys=True)+"\n")
print "updated %d entries" % changed_num
| import json
import logging
if __name__ == "__main__":
input = sys.argv[1]
output = sys.argv[2]
processor = fix_changeline_budget_titles().process(input,output,[])
class fix_changeline_budget_titles(object):
def process(self,inputs,output):
out = []
budgets = {}
changes_jsons, budget_jsons = inputs
for line in file(budget_jsons):
line = json.loads(line.strip())
budgets["%(year)s/%(code)s" % line] = line['title']
outfile = file(output,"w")
changed_num = 0
for line in file(changes_jsons):
line = json.loads(line.strip())
key = "%(year)s/%(budget_code)s" % line
title = budgets.get(key)
if title != None:
if title != line['budget_title']:
line['budget_title'] = title
changed_num += 1
else:
logging.error("Failed to find title for change with key %s" % key)
outfile.write(json.dumps(line,sort_keys=True)+"\n")
print "updated %d entries" % changed_num
| Fix bug in changeling title fix - it used to remove some lines on the way... | Fix bug in changeling title fix - it used to remove some lines on the way...
| Python | mit | omerbartal/open-budget-data,omerbartal/open-budget-data,OpenBudget/open-budget-data,OpenBudget/open-budget-data | ---
+++
@@ -32,6 +32,5 @@
changed_num += 1
else:
logging.error("Failed to find title for change with key %s" % key)
- raise Exception()
outfile.write(json.dumps(line,sort_keys=True)+"\n")
print "updated %d entries" % changed_num |
2d616924f7dc02458bf0b13a396f3f91b039d321 | hub/views.py | hub/views.py | from rest_framework.decorators import api_view
from rest_framework.response import Response
from .models import FormBuilderPreference
from django.http import HttpResponseRedirect
from django.core.management import call_command
@api_view(['GET'])
def switch_builder(request):
'''
very un-restful, but for ease of testing, a quick 'GET' is hard to beat
'''
if not request.user.is_authenticated():
raise exceptions.NotAuthenticated()
if 'beta' in request.GET:
beta_val = request.GET.get('beta') == '1'
(pref, created) = FormBuilderPreference.objects.get_or_create(
user=request.user)
pref.preferred_builder = FormBuilderPreference.KPI if beta_val \
else FormBuilderPreference.DKOBO
pref.save()
if 'migrate' in request.GET:
call_command(
'import_survey_drafts_from_dkobo', username=request.user.username)
return HttpResponseRedirect('/')
| from .models import FormBuilderPreference
from django.http import HttpResponseRedirect
from django.core.management import call_command
from django.contrib.auth.decorators import login_required
@login_required
def switch_builder(request):
'''
very un-restful, but for ease of testing, a quick 'GET' is hard to beat
'''
if 'beta' in request.GET:
beta_val = request.GET.get('beta') == '1'
(pref, created) = FormBuilderPreference.objects.get_or_create(
user=request.user)
pref.preferred_builder = FormBuilderPreference.KPI if beta_val \
else FormBuilderPreference.DKOBO
pref.save()
if 'migrate' in request.GET:
call_command(
'import_survey_drafts_from_dkobo', username=request.user.username)
return HttpResponseRedirect('/')
| Use `login_required` decorator on `switch_builder` view | Use `login_required` decorator on `switch_builder` view
| Python | agpl-3.0 | kobotoolbox/kpi,onaio/kpi,kobotoolbox/kpi,onaio/kpi,onaio/kpi,onaio/kpi,kobotoolbox/kpi,kobotoolbox/kpi,kobotoolbox/kpi | ---
+++
@@ -1,18 +1,14 @@
-from rest_framework.decorators import api_view
-from rest_framework.response import Response
from .models import FormBuilderPreference
from django.http import HttpResponseRedirect
from django.core.management import call_command
+from django.contrib.auth.decorators import login_required
-@api_view(['GET'])
+@login_required
def switch_builder(request):
'''
very un-restful, but for ease of testing, a quick 'GET' is hard to beat
'''
- if not request.user.is_authenticated():
- raise exceptions.NotAuthenticated()
-
if 'beta' in request.GET:
beta_val = request.GET.get('beta') == '1'
(pref, created) = FormBuilderPreference.objects.get_or_create( |
8f4d9c69f3f39fadda35329e22a378b6cdc1f829 | dipy/reconst/base.py | dipy/reconst/base.py | """
Base-classes for reconstruction models and reconstruction fits.
All the models in the reconst module follow the same template: a Model object
is used to represent the abstract properties of the model, that are independent
of the specifics of the data . These properties are reused whenver fitting a
particular set of data (different voxels, for example).
"""
class ReconstModel(object):
""" Abstract class for signal reconstruction models
"""
def __init__(self, gtab):
"""Initialization of the abstract class for signal reconstruction models
Parameters
----------
gtab : GradientTable class instance
"""
self.gtab=gtab
def fit(self, data, mask=None,**kwargs):
return ReconstFit(self, data)
class ReconstFit(object):
""" Abstract class which holds the fit result of ReconstModel
For example that could be holding FA or GFA etc.
"""
def __init__(self, model, data):
self.model = model
self.data = data
| """
Base-classes for reconstruction models and reconstruction fits.
All the models in the reconst module follow the same template: a Model object
is used to represent the abstract properties of the model, that are independent
of the specifics of the data . These properties are reused whenver fitting a
particular set of data (different voxels, for example).
"""
class ReconstModel(object):
""" Abstract class for signal reconstruction models
"""
def __init__(self, gtab):
"""Initialization of the abstract class for signal reconstruction models
Parameters
----------
gtab : GradientTable class instance
"""
self.gtab = gtab
def fit(self, data, mask=None, **kwargs):
return ReconstFit(self, data)
class ReconstFit(object):
""" Abstract class which holds the fit result of ReconstModel
For example that could be holding FA or GFA etc.
"""
def __init__(self, model, data):
self.model = model
self.data = data
| Update code in dipy/reconst (PEP8) | Update code in dipy/reconst (PEP8)
Using `pycodestyle` output, the file `dipy/reconst/base.py` was
updated to pass `pycodestyle` check
Signed-off-by: Antonio Ossa <1ecf3d2f96b6e61cf9b68f0fc294cab57dc5d597@uc.cl>
| Python | bsd-3-clause | FrancoisRheaultUS/dipy,nilgoyyou/dipy,nilgoyyou/dipy,FrancoisRheaultUS/dipy | ---
+++
@@ -14,6 +14,7 @@
class ReconstModel(object):
""" Abstract class for signal reconstruction models
"""
+
def __init__(self, gtab):
"""Initialization of the abstract class for signal reconstruction models
@@ -22,16 +23,18 @@
gtab : GradientTable class instance
"""
- self.gtab=gtab
+ self.gtab = gtab
- def fit(self, data, mask=None,**kwargs):
+ def fit(self, data, mask=None, **kwargs):
return ReconstFit(self, data)
+
class ReconstFit(object):
""" Abstract class which holds the fit result of ReconstModel
For example that could be holding FA or GFA etc.
"""
+
def __init__(self, model, data):
self.model = model
self.data = data |
b5e6952841d19e75b308fb2ab16ca5b098d376a9 | django-tutorial/tutorial/polls/views.py | django-tutorial/tutorial/polls/views.py | from django.shortcuts import render, get_object_or_404
from django.http import HttpResponse, Http404
from polls.models import Question
# Create your views here.
def index(request):
latest_question_list = Question.objects.order_by('pub_date')[:5]
context = {'latest_question_list': latest_question_list}
return render(request, 'polls/index.html', context)
def detail(request, question_id):
question = get_object_or_404(Question, pk=question_id)
return render(request, 'polls/detail.html', {'question': question})
def results(request, question_id):
response = "You're looking at the results of question %s."
return HttpResponse(response % question_id)
def vote(request, question_id):
return HttpResponse("You're voting on question %s." % question_id)
| from django.shortcuts import render, get_object_or_404
from django.http import HttpResponseRedirect, HttpResponse
from django.core.urlresolvers import reverse
from polls.models import Choice, Question
# Create your views here.
def index(request):
latest_question_list = Question.objects.order_by('pub_date')[:5]
context = {'latest_question_list': latest_question_list}
return render(request, 'polls/index.html', context)
def detail(request, question_id):
question = get_object_or_404(Question, pk=question_id)
return render(request, 'polls/detail.html', {'question': question})
def results(request, question_id):
response = "You're looking at the results of question %s."
return HttpResponse(response % question_id)
def vote(request, question_id):
p = get_object_or_404(Question, pk=question_id)
try:
selected_choice = p.choice_set.get(pk=request.POST['choice'])
except (KeyError, Choice.DoesNotExist):
# Redisplay the question voting form.
return render(request, 'polls/detail.html', {
'question': p,
'error_message': "You didn't select a choice.",
})
else:
selected_choice.votes += 1
selected_choice.save()
# Always return an HttpResponseRedirect after successfully dealing
# with POST data. This prevents data from being posted twice if a
# user hits the Back button.
return HttpResponseRedirect(reverse('polls:results', args=(p.id,))) | Update the view for the voting | Update the view for the voting
| Python | mit | domenicosolazzo/practice-django,domenicosolazzo/practice-django,domenicosolazzo/practice-django | ---
+++
@@ -1,6 +1,8 @@
from django.shortcuts import render, get_object_or_404
-from django.http import HttpResponse, Http404
-from polls.models import Question
+from django.http import HttpResponseRedirect, HttpResponse
+from django.core.urlresolvers import reverse
+
+from polls.models import Choice, Question
# Create your views here.
@@ -18,4 +20,19 @@
return HttpResponse(response % question_id)
def vote(request, question_id):
- return HttpResponse("You're voting on question %s." % question_id)
+ p = get_object_or_404(Question, pk=question_id)
+ try:
+ selected_choice = p.choice_set.get(pk=request.POST['choice'])
+ except (KeyError, Choice.DoesNotExist):
+ # Redisplay the question voting form.
+ return render(request, 'polls/detail.html', {
+ 'question': p,
+ 'error_message': "You didn't select a choice.",
+ })
+ else:
+ selected_choice.votes += 1
+ selected_choice.save()
+ # Always return an HttpResponseRedirect after successfully dealing
+ # with POST data. This prevents data from being posted twice if a
+ # user hits the Back button.
+ return HttpResponseRedirect(reverse('polls:results', args=(p.id,))) |
2fc45a6a0e2ba1efe06b4282234cf13c0ccd5b7d | dj_experiment/conf.py | dj_experiment/conf.py | from appconf import AppConf
from django.conf import settings
class DjExperimentAppConf(AppConf):
DATA_DIR = "./"
SEPARATOR = "."
OUTPUT_PREFIX = ""
OUTPUT_SUFFIX = ".nc"
CELERY_BROKER_URL = 'amqp://guest:guest@localhost:5672//'
CELERY_RESULT_BACKEND = 'rpc://'
class Meta:
prefix = 'dj_experiment'
holder = 'dj_experiment.conf.settings'
| import os
from appconf import AppConf
from django.conf import settings
class DjExperimentAppConf(AppConf):
DATA_DIR = "./"
BASE_DATA_DIR = os.path.join(settings.BASE_DIR, 'data')
SEPARATOR = "."
OUTPUT_PREFIX = ""
OUTPUT_SUFFIX = ".nc"
CELERY_BROKER_URL = 'amqp://guest:guest@localhost:5672//'
CELERY_RESULT_BACKEND = 'rpc://'
class Meta:
prefix = 'dj_experiment'
holder = 'dj_experiment.conf.settings'
| Add default base data dir for experiments | Add default base data dir for experiments
| Python | mit | francbartoli/dj-experiment,francbartoli/dj-experiment | ---
+++
@@ -1,9 +1,12 @@
+import os
+
from appconf import AppConf
from django.conf import settings
class DjExperimentAppConf(AppConf):
DATA_DIR = "./"
+ BASE_DATA_DIR = os.path.join(settings.BASE_DIR, 'data')
SEPARATOR = "."
OUTPUT_PREFIX = ""
OUTPUT_SUFFIX = ".nc" |
dbbbb844f80e73f5743bd2d213e21e44937c1870 | scripts/set_headers.py | scripts/set_headers.py | import requests
from mitmproxy import ctx
# TODO authenticate user
def getUser(flow):
if 'proxyauth' in flow.metadata: # TODO and user != None
return flow.metadata['proxyauth'][0]
else:
return ""
def request(flow):
payload = { 'username': getUser(flow), 'url': flow.request.pretty_host }
r = requests.get("http://localhost:3000/idea", params=payload)
flow.request.headers["x-idea-id"] = r.text
| import requests
from mitmproxy import http
from mitmproxy import ctx
# TODO authenticate user
def getProxyauth(flow):
if 'proxyauth' in flow.metadata: # TODO and user != None
return flow.metadata['proxyauth']
else:
return ""
def request(flow):
payload = { 'username': getProxyauth(flow)[0], 'password': getProxyauth(flow)[1], 'url': flow.request.pretty_host }
r = requests.get("http://localhost:3000/idea", params=payload)
# TODO check if auth'd
if r.status_code == 401:
flow.response = http.HTTPResponse.make(
401, # (optional) status code
b"You need to authenticate properly, buddy", # (optional) content
{"Content-Type": "text/html"} # (optional) headers
)
return
flow.request.headers["x-idea-id"] = r.text
| Add unauthenticated message if sign-in incorrect | Add unauthenticated message if sign-in incorrect
| Python | mit | laurmurclar/mitmproxy,laurmurclar/mitmproxy,laurmurclar/mitmproxy,laurmurclar/mitmproxy | ---
+++
@@ -1,14 +1,23 @@
import requests
+from mitmproxy import http
from mitmproxy import ctx
# TODO authenticate user
-def getUser(flow):
- if 'proxyauth' in flow.metadata: # TODO and user != None
- return flow.metadata['proxyauth'][0]
- else:
- return ""
+def getProxyauth(flow):
+ if 'proxyauth' in flow.metadata: # TODO and user != None
+ return flow.metadata['proxyauth']
+ else:
+ return ""
def request(flow):
- payload = { 'username': getUser(flow), 'url': flow.request.pretty_host }
+ payload = { 'username': getProxyauth(flow)[0], 'password': getProxyauth(flow)[1], 'url': flow.request.pretty_host }
r = requests.get("http://localhost:3000/idea", params=payload)
+ # TODO check if auth'd
+ if r.status_code == 401:
+ flow.response = http.HTTPResponse.make(
+ 401, # (optional) status code
+ b"You need to authenticate properly, buddy", # (optional) content
+ {"Content-Type": "text/html"} # (optional) headers
+ )
+ return
flow.request.headers["x-idea-id"] = r.text |
8cf268441e82da0348020bee344d8728a9f41ea8 | lib/ansible/runner/filter_plugins/core.py | lib/ansible/runner/filter_plugins/core.py | # (c) 2012, Jeroen Hoekx <jeroen@hoekx.be>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
import json
import yaml
class FilterModule(object):
''' Ansible core jinja2 filters '''
def filters(self):
return {
'to_json': json.dumps,
'from_json': json.loads,
'to_yaml': yaml.dump,
'from_yaml': yaml.load,
}
| # (c) 2012, Jeroen Hoekx <jeroen@hoekx.be>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
import json
import yaml
class FilterModule(object):
''' Ansible core jinja2 filters '''
def filters(self):
return {
'to_json': json.dumps,
'from_json': json.loads,
'to_yaml': yaml.safe_dump,
'from_yaml': yaml.load,
}
| Use yaml.safe_dump rather than yaml.dump. No more "!!python/unicode". | Use yaml.safe_dump rather than yaml.dump. No more "!!python/unicode".
| Python | mit | thaim/ansible,thaim/ansible | ---
+++
@@ -25,7 +25,7 @@
return {
'to_json': json.dumps,
'from_json': json.loads,
- 'to_yaml': yaml.dump,
+ 'to_yaml': yaml.safe_dump,
'from_yaml': yaml.load,
}
|
9473dec555f59a8f70bfd70bd6568bbcb8827f78 | glaive/glaive.py | glaive/glaive.py | import sklearn.cross_validation as cv
from sklearn import tree
import pandas as py
import numpy as np
| import sklearn.cross_validation as cv
from sklearn import tree
import pandas as pd
import numpy as np
import os.path
import pprint
def readData(maxReplayCount = 749):
"""reads the data from .arff files (Weka Attribute-Relation files)
maxReplayCount is <= 749
returns:
data - a list of dataframes containing replay build orders of the first 100 actions, indexed as ['frame', 'action', 'actionNumber']
"""
filename = "data/pvt_{}_lifetimes.arff"
range = np.arange(1, maxReplayCount)
data = []
buildList = set([])
for replay in range:
replayName = filename.format(str(replay).zfill(3))
if (os.path.isfile(replayName)):
with open(replayName, "r") as file:
#read, strip, and remove unnecessary data
lines = map(lambda x: x.rstrip().split(",")[:2], file.readlines()[11:111])
#make and add a dataframe
df = pd.DataFrame(lines, columns=("frame", "action"))
df["action"] = df["action"].map(lambda x: str(x).replace("Protoss ", "").replace(" ", "_"))
for action in list(df["action"].unique()):
buildList.add(action)
data.append(df)
# print(data)
# print(type(data))
# buildList = map(lambda x: list(x["action"].unique()), data)
buildList = list(buildList)
for df in data:
df["actionNumber"] = df["action"].map(lambda x: buildList.index(x))
return data
def createFunctions():
"""docstring for createFunctions"""
pass
data = readData()
print(data) | Add parsing to enable vectors | Add parsing to enable vectors
| Python | mit | peixian/Ultralisk,peixian/Ultralisk,peixian/Ultralisk | ---
+++
@@ -1,5 +1,48 @@
import sklearn.cross_validation as cv
from sklearn import tree
-import pandas as py
+import pandas as pd
import numpy as np
+import os.path
+import pprint
+def readData(maxReplayCount = 749):
+ """reads the data from .arff files (Weka Attribute-Relation files)
+
+ maxReplayCount is <= 749
+
+ returns:
+ data - a list of dataframes containing replay build orders of the first 100 actions, indexed as ['frame', 'action', 'actionNumber']
+ """
+ filename = "data/pvt_{}_lifetimes.arff"
+ range = np.arange(1, maxReplayCount)
+
+ data = []
+ buildList = set([])
+ for replay in range:
+ replayName = filename.format(str(replay).zfill(3))
+ if (os.path.isfile(replayName)):
+ with open(replayName, "r") as file:
+ #read, strip, and remove unnecessary data
+ lines = map(lambda x: x.rstrip().split(",")[:2], file.readlines()[11:111])
+ #make and add a dataframe
+ df = pd.DataFrame(lines, columns=("frame", "action"))
+ df["action"] = df["action"].map(lambda x: str(x).replace("Protoss ", "").replace(" ", "_"))
+ for action in list(df["action"].unique()):
+ buildList.add(action)
+ data.append(df)
+
+ # print(data)
+ # print(type(data))
+ # buildList = map(lambda x: list(x["action"].unique()), data)
+ buildList = list(buildList)
+ for df in data:
+ df["actionNumber"] = df["action"].map(lambda x: buildList.index(x))
+ return data
+
+
+def createFunctions():
+ """docstring for createFunctions"""
+ pass
+
+data = readData()
+print(data) |
ceea28b5f07d43644bbefacb39bd1f2b40297e36 | xero/constants.py | xero/constants.py | # Public/Private
XERO_BASE_URL = "https://api.xero.com"
REQUEST_TOKEN_URL = "%s/oauth/RequestToken" % XERO_BASE_URL
AUTHORIZE_URL = "%s/oauth/Authorize" % XERO_BASE_URL
ACCESS_TOKEN_URL = "%s/oauth/AccessToken" % XERO_BASE_URL
XERO_API_URL = "%s/api.xro/2.0" % XERO_BASE_URL
# Partner
PARTNER_XERO_BASE_URL = "https://api-partner.network.xero.com"
PARTNER_REQUEST_TOKEN_URL = "%s/oauth/RequestToken" % PARTNER_XERO_BASE_URL
PARTNER_AUTHORIZE_URL = AUTHORIZE_URL
PARTNER_ACCESS_TOKEN_URL = "%s/oauth/AccessToken" % PARTNER_XERO_BASE_URL
PARTNER_XERO_API_URL = "%s/api.xro/2.0" % PARTNER_XERO_BASE_URL | # Public/Private
XERO_BASE_URL = "https://api.xero.com"
REQUEST_TOKEN_URL = "%s/oauth/RequestToken" % XERO_BASE_URL
AUTHORIZE_URL = "%s/oauth/Authorize" % XERO_BASE_URL
ACCESS_TOKEN_URL = "%s/oauth/AccessToken" % XERO_BASE_URL
XERO_API_URL = "%s/api.xro/2.0" % XERO_BASE_URL
# Partner
PARTNER_XERO_BASE_URL = "https://api-partner.network.xero.com"
PARTNER_REQUEST_TOKEN_URL = "%s/oauth/RequestToken" % PARTNER_XERO_BASE_URL
PARTNER_AUTHORIZE_URL = "%s/oauth/Authorize" % PARTNER_XERO_BASE_URL
PARTNER_ACCESS_TOKEN_URL = "%s/oauth/AccessToken" % PARTNER_XERO_BASE_URL
PARTNER_XERO_API_URL = "%s/api.xro/2.0" % PARTNER_XERO_BASE_URL
| Update partner authorize URL to match changes by Xero | Update partner authorize URL to match changes by Xero | Python | bsd-3-clause | skillflip/pyxero | ---
+++
@@ -8,6 +8,6 @@
# Partner
PARTNER_XERO_BASE_URL = "https://api-partner.network.xero.com"
PARTNER_REQUEST_TOKEN_URL = "%s/oauth/RequestToken" % PARTNER_XERO_BASE_URL
-PARTNER_AUTHORIZE_URL = AUTHORIZE_URL
+PARTNER_AUTHORIZE_URL = "%s/oauth/Authorize" % PARTNER_XERO_BASE_URL
PARTNER_ACCESS_TOKEN_URL = "%s/oauth/AccessToken" % PARTNER_XERO_BASE_URL
PARTNER_XERO_API_URL = "%s/api.xro/2.0" % PARTNER_XERO_BASE_URL |
5965b991bade7c75b3be5db32db791bbc1cf04b7 | cupy/lib/stride_tricks.py | cupy/lib/stride_tricks.py | import cupy
def as_strided(x, shape=None, strides=None):
"""
Create a view into the array with the given shape and strides.
.. warning:: This function has to be used with extreme care, see notes.
Parameters
----------
x : ndarray
Array to create a new.
shape : sequence of int, optional
The shape of the new array. Defaults to ``x.shape``.
strides : sequence of int, optional
The strides of the new array. Defaults to ``x.strides``.
Returns
-------
view : ndarray
See also
--------
numpy.lib.stride_tricks.as_strided
reshape : reshape an array.
Notes
-----
``as_strided`` creates a view into the array given the exact strides
and shape. This means it manipulates the internal data structure of
ndarray and, if done incorrectly, the array elements can point to
invalid memory and can corrupt results or crash your program.
"""
shape = x.shape if shape is None else tuple(shape)
strides = x.strides if strides is None else tuple(strides)
return cupy.ndarray(shape=shape, dtype=x.dtype,
memptr=x.data, strides=strides)
| import cupy
def as_strided(x, shape=None, strides=None):
"""
Create a view into the array with the given shape and strides.
.. warning:: This function has to be used with extreme care, see notes.
Parameters
----------
x : ndarray
Array to create a new.
shape : sequence of int, optional
The shape of the new array. Defaults to ``x.shape``.
strides : sequence of int, optional
The strides of the new array. Defaults to ``x.strides``.
Returns
-------
view : ndarray
See also
--------
numpy.lib.stride_tricks.as_strided
reshape : reshape an array.
Notes
-----
``as_strided`` creates a view into the array given the exact strides
and shape. This means it manipulates the internal data structure of
ndarray and, if done incorrectly, the array elements can point to
invalid memory and can corrupt results or crash your program.
"""
shape = x.shape if shape is None else tuple(shape)
strides = x.strides if strides is None else tuple(strides)
return cupy.ndarray(shape=shape, dtype=x.dtype,
memptr=x.data, strides=strides)
| Fix document format of as_strided | Fix document format of as_strided
| Python | mit | cupy/cupy,cupy/cupy,cupy/cupy,cupy/cupy | ---
+++
@@ -4,6 +4,7 @@
def as_strided(x, shape=None, strides=None):
"""
Create a view into the array with the given shape and strides.
+
.. warning:: This function has to be used with extreme care, see notes.
Parameters |
4c3f483ad0c3f119b9c3bb7b2b8b28680c48da3f | dsub/_dsub_version.py | dsub/_dsub_version.py | # Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Single source of truth for dsub's version.
This must remain small and dependency-free so that any dsub module may
import it without creating circular dependencies. Note that this module
is parsed as a text file by setup.py and changes to the format of this
file could break setup.py.
The version should follow formatting requirements specified in PEP-440.
- https://www.python.org/dev/peps/pep-0440
A typical release sequence will be versioned as:
0.1.3.dev0 -> 0.1.3 -> 0.1.4.dev0 -> ...
"""
DSUB_VERSION = '0.3.2.dev0'
| # Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Single source of truth for dsub's version.
This must remain small and dependency-free so that any dsub module may
import it without creating circular dependencies. Note that this module
is parsed as a text file by setup.py and changes to the format of this
file could break setup.py.
The version should follow formatting requirements specified in PEP-440.
- https://www.python.org/dev/peps/pep-0440
A typical release sequence will be versioned as:
0.1.3.dev0 -> 0.1.3 -> 0.1.4.dev0 -> ...
"""
DSUB_VERSION = '0.3.2'
| Update dsub version to 0.3.2 | Update dsub version to 0.3.2
PiperOrigin-RevId: 252927917
| Python | apache-2.0 | DataBiosphere/dsub,DataBiosphere/dsub | ---
+++
@@ -26,4 +26,4 @@
0.1.3.dev0 -> 0.1.3 -> 0.1.4.dev0 -> ...
"""
-DSUB_VERSION = '0.3.2.dev0'
+DSUB_VERSION = '0.3.2' |
3ebb80476553e8228c6709a3e6ff75ea42fff586 | properties/property.py | properties/property.py | """
Property class
"""
class Property(object):
"""
a monopoly property that items on the
board can inherit from
"""
def __init__(self):
pass
| """
Property class
"""
class Property(object):
"""
a monopoly property that items on the
board can inherit from
"""
def __init__(self, name, price, baseRent, rentWithHouses, mortgageValue, owner=None, houseCost, hotelCost):
self.name = name
self.owner = owner
self.price = price
self.baseRent = baseRent
self.rentWithHouses = rentWithHouses
self.mortgageValue = mortgageValue
self.houseCost = houseCost
self.hotelCost = hotelCost
self.houses = 0
self.hotels = 0
@property
def rent(self):
return
def purchase(self, player, cost):
player.balance -= cost
def purchaseProperty(self, player):
self.owner = player
purchase(player, self.cost)
def buyHotel(self, player):
self.hotels += 1
player.balance -= self.hotelCost
def buyHouse(self, player):
self.houses += 1
player.balance -= self.houseCost
| Add some basic transactional methdos to properties | Add some basic transactional methdos to properties
| Python | mit | markthethomas/monopoly | ---
+++
@@ -1,11 +1,43 @@
"""
Property class
"""
+
class Property(object):
"""
a monopoly property that items on the
board can inherit from
"""
- def __init__(self):
- pass
+
+ def __init__(self, name, price, baseRent, rentWithHouses, mortgageValue, owner=None, houseCost, hotelCost):
+ self.name = name
+ self.owner = owner
+ self.price = price
+ self.baseRent = baseRent
+ self.rentWithHouses = rentWithHouses
+ self.mortgageValue = mortgageValue
+
+ self.houseCost = houseCost
+ self.hotelCost = hotelCost
+
+ self.houses = 0
+ self.hotels = 0
+
+ @property
+ def rent(self):
+ return
+
+ def purchase(self, player, cost):
+ player.balance -= cost
+
+ def purchaseProperty(self, player):
+ self.owner = player
+ purchase(player, self.cost)
+
+ def buyHotel(self, player):
+ self.hotels += 1
+ player.balance -= self.hotelCost
+
+ def buyHouse(self, player):
+ self.houses += 1
+ player.balance -= self.houseCost |
90f2c22a9243855546c8689c5773be837e05aa47 | core/views.py | core/views.py | # -*- coding: utf-8 -*-
from django.shortcuts import render_to_response, get_object_or_404
from django.views.generic.detail import DetailView
from django.views.generic.edit import CreateView
from django.views.generic.list import ListView
from django.template import RequestContext
from core.mixins import SubdomainContextMixin, PaginatorMixin
from core.models import Infopage
from core.context_processors import subdomains_context, categories_context
class RyndaCreateView(SubdomainContextMixin, CreateView):
pass
class RyndaDetailView(SubdomainContextMixin, DetailView):
pass
class RyndaListView(SubdomainContextMixin, PaginatorMixin, ListView ):
pass
def show_page(request, slug):
page = get_object_or_404(Infopage, slug=slug)
return render_to_response('infopage/show_page.html',
{'title': page.title, 'text': page.text, },
context_instance=RequestContext(request,
processors=[subdomains_context, categories_context])
)
| # -*- coding: utf-8 -*-
from django.shortcuts import render_to_response, get_object_or_404
from django.views.generic.detail import DetailView
from django.views.generic.edit import CreateView
from django.views.generic.list import ListView
from django.template import RequestContext
from core.mixins import SubdomainContextMixin, PaginatorMixin
from core.models import Infopage
from core.context_processors import subdomains_context, categories_context
class RyndaCreateView(SubdomainContextMixin, CreateView):
pass
class RyndaDetailView(SubdomainContextMixin, DetailView):
pass
class RyndaListView(SubdomainContextMixin, PaginatorMixin, ListView ):
paginator_url = None
def get_paginator_url(self):
if self.paginator_url is None:
raise Exception(
"You MUST define paginator_url or overwrite get_paginator_url()")
return self.paginator_url
def get_context_data(self, **kwargs):
context = super(RyndaListView, self).get_context_data(**kwargs)
context['paginator_url'] = self.get_paginator_url()
sc = self.paginator(context['paginator'].num_pages, page=context['page_obj'].number)
context['paginator_line'] = sc
return context
def show_page(request, slug):
page = get_object_or_404(Infopage, slug=slug)
return render_to_response('infopage/show_page.html',
{'title': page.title, 'text': page.text, },
context_instance=RequestContext(request,
processors=[subdomains_context, categories_context])
)
| Move paginator settings to base list view | Move paginator settings to base list view
| Python | mit | sarutobi/flowofkindness,sarutobi/flowofkindness,sarutobi/flowofkindness,sarutobi/Rynda,sarutobi/Rynda,sarutobi/ritmserdtsa,sarutobi/Rynda,sarutobi/Rynda,sarutobi/ritmserdtsa,sarutobi/flowofkindness,sarutobi/ritmserdtsa,sarutobi/ritmserdtsa | ---
+++
@@ -20,7 +20,20 @@
class RyndaListView(SubdomainContextMixin, PaginatorMixin, ListView ):
- pass
+ paginator_url = None
+
+ def get_paginator_url(self):
+ if self.paginator_url is None:
+ raise Exception(
+ "You MUST define paginator_url or overwrite get_paginator_url()")
+ return self.paginator_url
+
+ def get_context_data(self, **kwargs):
+ context = super(RyndaListView, self).get_context_data(**kwargs)
+ context['paginator_url'] = self.get_paginator_url()
+ sc = self.paginator(context['paginator'].num_pages, page=context['page_obj'].number)
+ context['paginator_line'] = sc
+ return context
def show_page(request, slug): |
7b75e27e36ec20793732b51693ecb57026fa75be | examples/testlogin.py | examples/testlogin.py | #!/usr/bin/python
from fortiosapi import FortiOSAPI
import sys
import os
import pprint
import json
import pexpect
import yaml
import logging
from packaging.version import Version
formatter = logging.Formatter(
'%(asctime)s %(name)-12s %(levelname)-8s %(message)s')
logger = logging.getLogger('fortiosapi')
hdlr = logging.FileHandler('testfortiosapi.log')
hdlr.setFormatter(formatter)
logger.addHandler(hdlr)
logger.setLevel(logging.DEBUG)
fgt = FortiOSAPI()
def main():
# Login to the FGT ip
fgt.debug('on')
fgthost = '192.168.122.71'
user = 'admin'
passwd = ''
resp = fgt.login(fgthost, user, passwd)
pp = pprint.PrettyPrinter(indent=4)
resp = fgt.license()
pp = pprint.PrettyPrinter(indent=4)
pp.pprint(resp)
if __name__ == '__main__':
main()
| #!/usr/bin/python
import logging
import pprint
from fortiosapi import FortiOSAPI
formatter = logging.Formatter(
'%(asctime)s %(name)-12s %(levelname)-8s %(message)s')
logger = logging.getLogger('fortiosapi')
hdlr = logging.FileHandler('testfortiosapi.log')
hdlr.setFormatter(formatter)
logger.addHandler(hdlr)
logger.setLevel(logging.DEBUG)
fgt = FortiOSAPI()
def main():
# Login to the FGT ip
fgt.debug('on')
fgthost = '10.10.10.125'
user = 'admin'
passwd = 'toto'
resp = fgt.login(fgthost, user, passwd)
pp = pprint.PrettyPrinter(indent=4)
resp = fgt.license()
pp = pprint.PrettyPrinter(indent=4)
pp.pprint(resp)
if __name__ == '__main__':
main()
| Create an example firewall rule push with the antivirus enablement. | Create an example firewall rule push with the antivirus enablement.
| Python | apache-2.0 | thomnico/fortiosapi,thomnico/fortiosapi,thomnico/fortigateconf | ---
+++
@@ -1,13 +1,9 @@
#!/usr/bin/python
+import logging
+import pprint
+
from fortiosapi import FortiOSAPI
-import sys
-import os
-import pprint
-import json
-import pexpect
-import yaml
-import logging
-from packaging.version import Version
+
formatter = logging.Formatter(
'%(asctime)s %(name)-12s %(levelname)-8s %(message)s')
logger = logging.getLogger('fortiosapi')
@@ -22,9 +18,9 @@
def main():
# Login to the FGT ip
fgt.debug('on')
- fgthost = '192.168.122.71'
+ fgthost = '10.10.10.125'
user = 'admin'
- passwd = ''
+ passwd = 'toto'
resp = fgt.login(fgthost, user, passwd)
pp = pprint.PrettyPrinter(indent=4)
resp = fgt.license() |
de70b1549f33484da87d6958d9f9714e7da50956 | git_upstream_diff.py | git_upstream_diff.py | #!/usr/bin/env python
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import argparse
import sys
import subprocess2
from git_common import current_branch, get_or_create_merge_base, config_list
from git_common import GIT_EXE
def main(args):
default_args = config_list('depot-tools.upstream-diff.default-args')
args = default_args + args
parser = argparse.ArgumentParser()
parser.add_argument('--wordwise', action='store_true', default=False,
help=(
'Print a colorized wordwise diff '
'instead of line-wise diff'))
opts, extra_args = parser.parse_known_args(args)
cmd = [GIT_EXE, 'diff', '--patience', '-C', '-C']
if opts.wordwise:
cmd += ['--word-diff=color', r'--word-diff-regex=(\w+|[^[:space:]])']
cmd += [get_or_create_merge_base(current_branch())]
cmd += extra_args
subprocess2.check_call(cmd)
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))
| #!/usr/bin/env python
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import argparse
import sys
import subprocess2
import git_common as git
def main(args):
default_args = git.config_list('depot-tools.upstream-diff.default-args')
args = default_args + args
parser = argparse.ArgumentParser()
parser.add_argument('--wordwise', action='store_true', default=False,
help=(
'Print a colorized wordwise diff '
'instead of line-wise diff'))
opts, extra_args = parser.parse_known_args(args)
cur = git.current_branch()
if not cur or cur == 'HEAD':
print 'fatal: Cannot perform git-upstream-diff while not on a branch'
return 1
par = git.upstream(cur)
if not par:
print 'fatal: No upstream configured for branch \'%s\'' % cur
return 1
cmd = [git.GIT_EXE, 'diff', '--patience', '-C', '-C']
if opts.wordwise:
cmd += ['--word-diff=color', r'--word-diff-regex=(\w+|[^[:space:]])']
cmd += [git.get_or_create_merge_base(cur, par)]
cmd += extra_args
subprocess2.check_call(cmd)
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))
| Make udiff print reasonable errors while not on a branch. | Make udiff print reasonable errors while not on a branch.
R=agable@chromium.org
BUG=
Review URL: https://codereview.chromium.org/212493002
git-svn-id: fd409f4bdeea2bb50a5d34bb4d4bfc2046a5a3dd@259647 0039d316-1c4b-4281-b951-d872f2087c98
| Python | bsd-3-clause | kromain/chromium-tools,cybertk/depot_tools,npe9/depot_tools,HackFisher/depot_tools,kromain/chromium-tools,duongbaoduy/gtools,Neozaru/depot_tools,eatbyte/depot_tools,kaiix/depot_tools,fracting/depot_tools,CoherentLabs/depot_tools,kromain/chromium-tools,Chilledheart/depot_tools,Chilledheart/depot_tools,kromain/chromium-tools,ajohnson23/depot_tools,kaiix/depot_tools,azunite/chrome_build,chinmaygarde/depot_tools,xuyuhan/depot_tools,Neozaru/depot_tools,smikes/depot_tools,hsharsha/depot_tools,liaorubei/depot_tools,SuYiling/chrome_depot_tools,airtimemedia/depot_tools,npe9/depot_tools,duongbaoduy/gtools,smikes/depot_tools,eatbyte/depot_tools,Phonebooth/depot_tools,Midrya/chromium,npe9/depot_tools,Chilledheart/depot_tools,Phonebooth/depot_tools,azureplus/chromium_depot_tools,fanjunwei/depot_tools,withtone/depot_tools,michalliu/chromium-depot_tools,disigma/depot_tools,cybertk/depot_tools,mlufei/depot_tools,Chilledheart/depot_tools,HackFisher/depot_tools,xuyuhan/depot_tools,ajohnson23/depot_tools,aleonliao/depot_tools,sarvex/depot-tools,disigma/depot_tools,cybertk/depot_tools,fanjunwei/depot_tools,aleonliao/depot_tools,npe9/depot_tools,yetu/repotools,G-P-S/depot_tools,Midrya/chromium,duanwujie/depot_tools,sarvex/depot-tools,chinmaygarde/depot_tools,mlufei/depot_tools,SuYiling/chrome_depot_tools,eatbyte/depot_tools,fracting/depot_tools,chinmaygarde/depot_tools,eatbyte/depot_tools,yetu/repotools,gcodetogit/depot_tools,cpanelli/-git-clone-https-chromium.googlesource.com-chromium-tools-depot_tools,xuyuhan/depot_tools,primiano/depot_tools,Neozaru/depot_tools,azureplus/chromium_depot_tools,michalliu/chromium-depot_tools,liaorubei/depot_tools,SuYiling/chrome_depot_tools,CoherentLabs/depot_tools,Neozaru/depot_tools,withtone/depot_tools,Neozaru/depot_tools,fracting/depot_tools,gcodetogit/depot_tools,HackFisher/depot_tools,duanwujie/depot_tools,disigma/depot_tools,liaorubei/depot_tools,hsharsha/depot_tools,aleonliao/depot_tools,yetu/repotools,G-P-S/depot_tools,smikes/depot_tools,azunite/chrome_build,Midrya/chromium,fanjunwei/depot_tools,gcodetogit/depot_tools,Chilledheart/depot_tools,airtimemedia/depot_tools,airtimemedia/depot_tools,cybertk/depot_tools,withtone/depot_tools,liaorubei/depot_tools,duongbaoduy/gtools,michalliu/chromium-depot_tools,duanwujie/depot_tools,primiano/depot_tools,hsharsha/depot_tools,airtimemedia/depot_tools,sarvex/depot-tools,G-P-S/depot_tools,smikes/depot_tools,Phonebooth/depot_tools,mlufei/depot_tools,primiano/depot_tools,HackFisher/depot_tools,fanjunwei/depot_tools,sarvex/depot-tools,azunite/chrome_build,michalliu/chromium-depot_tools,G-P-S/depot_tools,xuyuhan/depot_tools,kaiix/depot_tools,ajohnson23/depot_tools,Phonebooth/depot_tools,cpanelli/-git-clone-https-chromium.googlesource.com-chromium-tools-depot_tools,azureplus/chromium_depot_tools,cpanelli/-git-clone-https-chromium.googlesource.com-chromium-tools-depot_tools,cybertk/depot_tools,smikes/depot_tools | ---
+++
@@ -8,11 +8,10 @@
import subprocess2
-from git_common import current_branch, get_or_create_merge_base, config_list
-from git_common import GIT_EXE
+import git_common as git
def main(args):
- default_args = config_list('depot-tools.upstream-diff.default-args')
+ default_args = git.config_list('depot-tools.upstream-diff.default-args')
args = default_args + args
parser = argparse.ArgumentParser()
@@ -22,10 +21,20 @@
'instead of line-wise diff'))
opts, extra_args = parser.parse_known_args(args)
- cmd = [GIT_EXE, 'diff', '--patience', '-C', '-C']
+ cur = git.current_branch()
+ if not cur or cur == 'HEAD':
+ print 'fatal: Cannot perform git-upstream-diff while not on a branch'
+ return 1
+
+ par = git.upstream(cur)
+ if not par:
+ print 'fatal: No upstream configured for branch \'%s\'' % cur
+ return 1
+
+ cmd = [git.GIT_EXE, 'diff', '--patience', '-C', '-C']
if opts.wordwise:
cmd += ['--word-diff=color', r'--word-diff-regex=(\w+|[^[:space:]])']
- cmd += [get_or_create_merge_base(current_branch())]
+ cmd += [git.get_or_create_merge_base(cur, par)]
cmd += extra_args
|
cd9bb30b68c491d2d8caf72d47e6b8c9f9a82a45 | responsive/__init__.py | responsive/__init__.py | "Utilities for building responsive websites in Django."
from __future__ import unicode_literals
__version__ = '0.2.0'
| "Utilities for building responsive websites in Django."
from __future__ import unicode_literals
__version__ = '0.3.0dev'
| Update development status on master. | Update development status on master.
| Python | bsd-2-clause | mlavin/django-responsive,mlavin/django-responsive,mlavin/django-responsive | ---
+++
@@ -2,4 +2,4 @@
from __future__ import unicode_literals
-__version__ = '0.2.0'
+__version__ = '0.3.0dev' |
3639576bf9e32f5cda952e78ebae5a75c9c476c0 | fedmsg.d/statscache.py | fedmsg.d/statscache.py | import socket
import datetime
hostname = socket.gethostname().split('.')[0]
config = {
# Consumer stuff
"statscache.consumer.enabled": True,
"statscache.sqlalchemy.uri": "sqlite:////var/tmp/statscache-dev-db.sqlite",
# stats models will go back at least this far (current value arbitrary)
"statscache.consumer.epoch": datetime.datetime(year=2015, month=6, day=1),
# stats models are updated at this frequency
"statscache.producer.frequency": datetime.timedelta(seconds=1),
# Turn on logging for statscache
"logging": dict(
loggers=dict(
statscache={
"level": "DEBUG",
"propagate": False,
"handlers": ["console"],
},
statscache_plugins={
"level": "DEBUG",
"propagate": False,
"handlers": ["console"],
},
),
),
}
| import socket
import datetime
hostname = socket.gethostname().split('.')[0]
config = {
# Consumer stuff
"statscache.consumer.enabled": True,
"statscache.sqlalchemy.uri": "sqlite:////var/tmp/statscache-dev-db.sqlite",
# stats models will go back at least this far (current value arbitrary)
"statscache.consumer.epoch": datetime.datetime(year=2015, month=8, day=8),
# stats models are updated at this frequency
"statscache.producer.frequency": datetime.timedelta(seconds=1),
# Turn on logging for statscache
"logging": dict(
loggers=dict(
statscache={
"level": "DEBUG",
"propagate": False,
"handlers": ["console"],
},
statscache_plugins={
"level": "DEBUG",
"propagate": False,
"handlers": ["console"],
},
),
),
}
| Update statistics epoch for easier testing | Update statistics epoch for easier testing
| Python | lgpl-2.1 | yazman/statscache,yazman/statscache,yazman/statscache | ---
+++
@@ -8,7 +8,7 @@
"statscache.consumer.enabled": True,
"statscache.sqlalchemy.uri": "sqlite:////var/tmp/statscache-dev-db.sqlite",
# stats models will go back at least this far (current value arbitrary)
- "statscache.consumer.epoch": datetime.datetime(year=2015, month=6, day=1),
+ "statscache.consumer.epoch": datetime.datetime(year=2015, month=8, day=8),
# stats models are updated at this frequency
"statscache.producer.frequency": datetime.timedelta(seconds=1),
# Turn on logging for statscache |
430da89ec87a9ed862a4aa86e3de7672a07a313d | egpackager/datamanager.py | egpackager/datamanager.py | import logging
from collections import OrderedDict
from egpackager.datasources import GspreadDataSource
class DataManager(object):
def __init__(self, debug=False):
# Set up logging
if debug:
logging.basicConfig(level=logging.DEBUG)
else:
logging.basicConfig(level=logging.INFO)
self.logger = logging.getLogger(__name__)
self.logger.debug("Initializing new registry manager")
self._data = OrderedDict()
def add_gpsread_datasource(self, *args, **kwargs):
self.logger.debug('Adding Google Sheets data source')
self._data[kwargs['uri']] = GspreadDataSource(*args, **kwargs)
@property
def data(self):
return self._data
| import logging
from collections import OrderedDict
from egpackager.datasources import GspreadDataSource
class DataManager(object):
def __init__(self, debug=False):
# Set up logging
if debug:
logging.basicConfig(level=logging.DEBUG)
else:
logging.basicConfig(level=logging.INFO)
self.logger = logging.getLogger(__name__)
self.logger.debug("Initializing new registry manager")
self._data = OrderedDict()
def add_datasource(self, *args, **kwargs):
if 'type' not in kwargs:
raise TypeError("Missing require keyword argument: 'type")
if kwargs['type'] == 'gspread':
# Remove keyword argument 'type' as it us not needed anymore
del kwargs['type']
self.logger.debug('Adding Google Sheets data source')
self._data[kwargs['uri']] = GspreadDataSource(*args, **kwargs)
elif kwargs['type'] == 'raster':
pass
@property
def data(self):
return self._data
| Generalize adding a data source | Generalize adding a data source
Instead of using data source specific methods for DataManager, use
just one: add_datasource(). The type of data source is defined by
the keyword argument 'type'.
| Python | mit | VUEG/egpackager | ---
+++
@@ -17,9 +17,16 @@
self.logger.debug("Initializing new registry manager")
self._data = OrderedDict()
- def add_gpsread_datasource(self, *args, **kwargs):
- self.logger.debug('Adding Google Sheets data source')
- self._data[kwargs['uri']] = GspreadDataSource(*args, **kwargs)
+ def add_datasource(self, *args, **kwargs):
+ if 'type' not in kwargs:
+ raise TypeError("Missing require keyword argument: 'type")
+ if kwargs['type'] == 'gspread':
+ # Remove keyword argument 'type' as it us not needed anymore
+ del kwargs['type']
+ self.logger.debug('Adding Google Sheets data source')
+ self._data[kwargs['uri']] = GspreadDataSource(*args, **kwargs)
+ elif kwargs['type'] == 'raster':
+ pass
@property
def data(self): |
1d6670165dd74084813b38032cfddb6d33cd9d7a | xdc-plugin/tests/compare_output_json.py | xdc-plugin/tests/compare_output_json.py | #!/usr/bin/env python3
"""
This script extracts the top module cells and their corresponding parameters
from json files produced by Yosys.
The return code of this script is used to check if the output is equivalent.
"""
import sys
import json
parameters = ["IOSTANDARD", "DRIVE", "SLEW", "IN_TERM"]
def read_cells(json_file):
with open(json_file) as f:
data = json.load(f)
f.close()
cells = data['modules']['top']['cells']
cells_parameters = dict()
for cell, opts in cells.items():
attributes = opts['parameters']
if len(attributes.keys()):
if any([x in parameters for x in attributes.keys()]):
cells_parameters[cell] = attributes
return cells_parameters
def main():
if len(sys.argv) < 3:
print("Incorrect number of arguments")
exit(1)
cells1 = read_cells(sys.argv[1])
cells2 = read_cells(sys.argv[2])
if cells1 == cells2:
exit(0)
else:
print(json.dumps(cells1, indent=4))
print("VS")
print(json.dumps(cells2, indent=4))
exit(1)
if __name__ == "__main__":
main()
| #!/usr/bin/env python3
"""
This script extracts the top module cells and their corresponding parameters
from json files produced by Yosys.
The return code of this script is used to check if the output is equivalent.
"""
import sys
import json
import argparse
parameters = ["IOSTANDARD", "DRIVE", "SLEW", "IN_TERM"]
def read_cells(json_file):
with open(json_file) as f:
data = json.load(f)
f.close()
cells = data['modules']['top']['cells']
cells_parameters = dict()
for cell, opts in cells.items():
attributes = opts['parameters']
if len(attributes.keys()):
if any([x in parameters for x in attributes.keys()]):
cells_parameters[cell] = attributes
return cells_parameters
def main(args):
cells = read_cells(args.json)
if args.update:
with open(args.golden, 'w') as f:
json.dump(cells, f)
else:
with open(args.golden) as f:
cells_golden = json.load(f)
if cells == cells_golden:
exit(0)
else:
print(json.dumps(cells, indent=4))
print("VS")
print(json.dumps(cells_golden, indent=4))
exit(1)
f.close()
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument('--json', help = 'JSON to compare', required = True)
parser.add_argument('--golden', help = 'Golden JSON file', required = True)
parser.add_argument('--update', action = 'store_true', help = 'Update golden reference')
args = parser.parse_args()
main(args)
| Refactor test output comparison script | XDC: Refactor test output comparison script
Signed-off-by: Tomasz Michalak <a2fdaa543b4cc5e3d6cd8672ec412c0eb393b86e@antmicro.com>
| Python | apache-2.0 | SymbiFlow/yosys-f4pga-plugins,SymbiFlow/yosys-f4pga-plugins,chipsalliance/yosys-f4pga-plugins,SymbiFlow/yosys-f4pga-plugins,chipsalliance/yosys-f4pga-plugins,SymbiFlow/yosys-symbiflow-plugins,antmicro/yosys-symbiflow-plugins,antmicro/yosys-symbiflow-plugins,SymbiFlow/yosys-symbiflow-plugins,antmicro/yosys-symbiflow-plugins,SymbiFlow/yosys-symbiflow-plugins | ---
+++
@@ -8,6 +8,7 @@
import sys
import json
+import argparse
parameters = ["IOSTANDARD", "DRIVE", "SLEW", "IN_TERM"]
@@ -25,19 +26,27 @@
return cells_parameters
-def main():
- if len(sys.argv) < 3:
- print("Incorrect number of arguments")
- exit(1)
- cells1 = read_cells(sys.argv[1])
- cells2 = read_cells(sys.argv[2])
- if cells1 == cells2:
- exit(0)
+def main(args):
+ cells = read_cells(args.json)
+ if args.update:
+ with open(args.golden, 'w') as f:
+ json.dump(cells, f)
else:
- print(json.dumps(cells1, indent=4))
- print("VS")
- print(json.dumps(cells2, indent=4))
- exit(1)
+ with open(args.golden) as f:
+ cells_golden = json.load(f)
+ if cells == cells_golden:
+ exit(0)
+ else:
+ print(json.dumps(cells, indent=4))
+ print("VS")
+ print(json.dumps(cells_golden, indent=4))
+ exit(1)
+ f.close()
if __name__ == "__main__":
- main()
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--json', help = 'JSON to compare', required = True)
+ parser.add_argument('--golden', help = 'Golden JSON file', required = True)
+ parser.add_argument('--update', action = 'store_true', help = 'Update golden reference')
+ args = parser.parse_args()
+ main(args) |
c58322ba39708ac7bb6d48eacbb5e1011c676b8f | bluebottle/utils/monkey_patch_parler.py | bluebottle/utils/monkey_patch_parler.py | import parler.appsettings
from parler.utils.conf import add_default_language_settings
from bluebottle.clients import properties
appsettings = parler.appsettings
class TenantAwareParlerAppsettings(object):
@property
def PARLER_DEFAULT_LANGUAGE_CODE(self):
return properties.LANGUAGE_CODE
@property
def PARLER_LANGUAGES(self):
return add_default_language_settings({
1: [{'code': lang[0]} for lang in properties.LANGUAGES],
'default': {
'fallbacks': [properties.LANGUAGE_CODE],
'hide_untranslated': False
}
})
def __getattr__(self, attr):
return getattr(appsettings, attr)
parler.appsettings = TenantAwareParlerAppsettings()
| import parler.appsettings
from parler.utils.conf import add_default_language_settings
from bluebottle.clients import properties
appsettings = parler.appsettings
class TenantAwareParlerAppsettings(object):
@property
def PARLER_DEFAULT_LANGUAGE_CODE(self):
return properties.LANGUAGE_CODE
@property
def PARLER_LANGUAGES(self):
return add_default_language_settings({
1: [{'code': lang[0]} for lang in properties.LANGUAGES],
'default': {
'fallbacks': [lang[0] for lang in properties.LANGUAGES],
'hide_untranslated': False
}
})
def __getattr__(self, attr):
return getattr(appsettings, attr)
parler.appsettings = TenantAwareParlerAppsettings()
| Add all languages to fallback languages | Add all languages to fallback languages
BB-12799 #resolve
| Python | bsd-3-clause | onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle | ---
+++
@@ -16,7 +16,7 @@
return add_default_language_settings({
1: [{'code': lang[0]} for lang in properties.LANGUAGES],
'default': {
- 'fallbacks': [properties.LANGUAGE_CODE],
+ 'fallbacks': [lang[0] for lang in properties.LANGUAGES],
'hide_untranslated': False
}
}) |
2367a5454660ffec6b4bab11939edc52197be791 | guv/__init__.py | guv/__init__.py | version_info = (0, 20, 0)
__version__ = '.'.join(map(str, version_info))
try:
from . import greenthread
from . import greenpool
from . import queue
from . import timeout
from . import patcher
from . import server
from .hubs.trampoline import gyield
import greenlet
import pyuv_cffi # only to compile the shared library before monkey-patching
sleep = greenthread.sleep
spawn = greenthread.spawn
spawn_n = greenthread.spawn_n
spawn_after = greenthread.spawn_after
kill = greenthread.kill
Timeout = timeout.Timeout
with_timeout = timeout.with_timeout
GreenPool = greenpool.GreenPool
GreenPile = greenpool.GreenPile
Queue = queue.Queue
import_patched = patcher.import_patched
monkey_patch = patcher.monkey_patch
serve = server.serve
listen = server.listen
connect = server.connect
StopServe = server.StopServe
wrap_ssl = server.wrap_ssl
getcurrent = greenlet.greenlet.getcurrent
except ImportError as e:
# This is to make Debian packaging easier, it ignores import errors of greenlet so that the
# packager can still at least access the version. Also this makes easy_install a little quieter
if 'greenlet' not in str(e):
# any other exception should be printed
import traceback
traceback.print_exc()
| version_info = (0, 21, 0)
__version__ = '.'.join(map(str, version_info))
try:
from . import greenpool
from . import queue
from .hubs.trampoline import gyield
from .greenthread import sleep, spawn, spawn_n, spawn_after, kill
from .greenpool import GreenPool, GreenPile
from .timeout import Timeout, with_timeout
from .patcher import import_patched, monkey_patch
from .server import serve, listen, connect, StopServe, wrap_ssl
import pyuv_cffi # only to compile the shared library before monkey-patching
except ImportError as e:
# This is to make Debian packaging easier, it ignores import errors of greenlet so that the
# packager can still at least access the version. Also this makes easy_install a little quieter
if 'greenlet' not in str(e):
# any other exception should be printed
import traceback
traceback.print_exc()
| Reorganize imports (and bump version) | Reorganize imports (and bump version)
| Python | mit | veegee/guv,veegee/guv | ---
+++
@@ -1,41 +1,18 @@
-version_info = (0, 20, 0)
+version_info = (0, 21, 0)
__version__ = '.'.join(map(str, version_info))
try:
- from . import greenthread
from . import greenpool
from . import queue
- from . import timeout
- from . import patcher
- from . import server
from .hubs.trampoline import gyield
- import greenlet
+ from .greenthread import sleep, spawn, spawn_n, spawn_after, kill
+ from .greenpool import GreenPool, GreenPile
+ from .timeout import Timeout, with_timeout
+ from .patcher import import_patched, monkey_patch
+ from .server import serve, listen, connect, StopServe, wrap_ssl
+
import pyuv_cffi # only to compile the shared library before monkey-patching
- sleep = greenthread.sleep
- spawn = greenthread.spawn
- spawn_n = greenthread.spawn_n
- spawn_after = greenthread.spawn_after
- kill = greenthread.kill
-
- Timeout = timeout.Timeout
- with_timeout = timeout.with_timeout
-
- GreenPool = greenpool.GreenPool
- GreenPile = greenpool.GreenPile
-
- Queue = queue.Queue
-
- import_patched = patcher.import_patched
- monkey_patch = patcher.monkey_patch
-
- serve = server.serve
- listen = server.listen
- connect = server.connect
- StopServe = server.StopServe
- wrap_ssl = server.wrap_ssl
-
- getcurrent = greenlet.greenlet.getcurrent
except ImportError as e:
# This is to make Debian packaging easier, it ignores import errors of greenlet so that the
# packager can still at least access the version. Also this makes easy_install a little quieter |
26f08f0f3275c352351f27a36674b65c6e6d171f | messaging.py | messaging.py | import sms_generator
import sms_twilio
import db
import config
def broadcast_procedure(procedure, location, duration, doctor):
message = sms_generator.new_procedure_message(procedure, location, duration, doctor)
recipients = db.get_all_students()
print(recipients)
for recipient in recipients:
print("Sending SMS")
print(recipient)
sms_twilio.send_sms(recipient['phone_number'], config.twilio_number, message)
| import sms_generator
import sms_twilio
import db
import config
list_of_opportunities = []
def get_friendly_ref(id):
if not list_of_opportunities:
new_ref = 1
new_opportunity = dict(id=str(id), ref=new_ref)
list_of_opportunities.append(new_opportunity)
print(str.format("New ref is {0}", new_ref))
return new_ref
else:
temp_list = []
for opp in list_of_opportunities:
temp_list.append(opp['ref'])
new_ref = max(temp_list) + 1
new_opportunity = dict(id=str(id), ref=new_ref)
list_of_opportunities.append(new_opportunity)
print(str.format("New opportunity added {0}", new_opportunity))
return new_ref
def remove_unique_ref(ref):
print(str.format("Removing ref {0}", ref))
list_of_opportunities.pop(str(ref), None)
def broadcast_procedure(procedure, location, duration, doctor, ref_id):
message_ref = get_friendly_ref(ref_id)
message = sms_generator.new_procedure_message(procedure, location, duration, doctor, message_ref)
recipients = db.get_all_students()
print(recipients)
for recipient in recipients:
print("Sending SMS")
print(recipient)
sms_twilio.send_sms(recipient['phone_number'], config.twilio_number, message)
| Add code for automatically assigning message_refs | Add code for automatically assigning message_refs
| Python | mit | nhshd-slot/SLOT,bsharif/SLOT,bsharif/SLOT,nhshd-slot/SLOT,nhshd-slot/SLOT,bsharif/SLOT | ---
+++
@@ -3,9 +3,38 @@
import db
import config
+list_of_opportunities = []
-def broadcast_procedure(procedure, location, duration, doctor):
- message = sms_generator.new_procedure_message(procedure, location, duration, doctor)
+
+def get_friendly_ref(id):
+ if not list_of_opportunities:
+ new_ref = 1
+ new_opportunity = dict(id=str(id), ref=new_ref)
+ list_of_opportunities.append(new_opportunity)
+ print(str.format("New ref is {0}", new_ref))
+ return new_ref
+
+ else:
+ temp_list = []
+ for opp in list_of_opportunities:
+ temp_list.append(opp['ref'])
+
+ new_ref = max(temp_list) + 1
+ new_opportunity = dict(id=str(id), ref=new_ref)
+ list_of_opportunities.append(new_opportunity)
+
+ print(str.format("New opportunity added {0}", new_opportunity))
+ return new_ref
+
+
+def remove_unique_ref(ref):
+ print(str.format("Removing ref {0}", ref))
+ list_of_opportunities.pop(str(ref), None)
+
+
+def broadcast_procedure(procedure, location, duration, doctor, ref_id):
+ message_ref = get_friendly_ref(ref_id)
+ message = sms_generator.new_procedure_message(procedure, location, duration, doctor, message_ref)
recipients = db.get_all_students()
print(recipients) |
c21bea6d80287d7c42b3634a7612e4e8cbc419be | plotnine/exceptions.py | plotnine/exceptions.py | from textwrap import dedent
import warnings
# Statsmodels is slow to fix upstream future warnings
# This module is imported before the stats module so
# so any FutureWarnings with the imports are suppressed
warnings.filterwarnings(
'ignore',
category=FutureWarning,
module='statsmodels')
warnings.filterwarnings(
'ignore',
category=FutureWarning,
module='pandas')
# These are rare
warnings.filterwarnings(
'ignore',
category=FutureWarning,
module='scipy')
class PlotnineError(Exception):
"""
Exception for ggplot errors
"""
def __init__(self, *args):
args = [dedent(arg) for arg in args]
self.message = " ".join(args)
def __str__(self):
return repr(self.message)
class PlotnineWarning(UserWarning):
"""
Warnings for ggplot inconsistencies
"""
pass
| from textwrap import dedent
import warnings
# Statsmodels is slow to fix upstream future warnings
# This module is imported before the stats module so
# so any FutureWarnings with the imports are suppressed
warnings.filterwarnings(
'ignore',
category=FutureWarning,
module='statsmodels')
warnings.filterwarnings(
'ignore',
category=FutureWarning,
module='pandas')
# These are rare
warnings.filterwarnings(
'ignore',
category=FutureWarning,
module='scipy')
# Show the warnings on one line, leaving out any code makes the
# message clear
def warning_format(message, category, filename, lineno, file=None, line=None):
fmt = '{}:{}: {}: {}\n'.format
return fmt(filename, lineno, category.__name__, message)
warnings.formatwarning = warning_format
class PlotnineError(Exception):
"""
Exception for ggplot errors
"""
def __init__(self, *args):
args = [dedent(arg) for arg in args]
self.message = " ".join(args)
def __str__(self):
return repr(self.message)
class PlotnineWarning(UserWarning):
"""
Warnings for ggplot inconsistencies
"""
pass
| Print warnings on one line & no code | ENH: Print warnings on one line & no code
| Python | mit | has2k1/plotnine,has2k1/plotnine | ---
+++
@@ -21,6 +21,16 @@
module='scipy')
+# Show the warnings on one line, leaving out any code makes the
+# message clear
+def warning_format(message, category, filename, lineno, file=None, line=None):
+ fmt = '{}:{}: {}: {}\n'.format
+ return fmt(filename, lineno, category.__name__, message)
+
+
+warnings.formatwarning = warning_format
+
+
class PlotnineError(Exception):
"""
Exception for ggplot errors |
9c396e6f75bac114068421740b7faedbe3915966 | hermes/views.py | hermes/views.py | from django.views.generic import ListView, DetailView
from .models import Post
class PostListView(ListView):
context_object_name = 'posts'
model = Post
template_name = 'hermes/post_list.html'
def get_queryset(self):
return self.model.objects.order_by('created_on')
class CategoryPostListView(PostListView):
slug = None
def get_queryset(self):
category_slug = self.kwargs.get('slug', '')
return self.model.objects.in_category(category_slug)
class ArchivePostListView(PostListView):
def get_queryset(self):
year = self.kwargs.get('year', None)
month = self.kwargs.get('month', None)
day = self.kwargs.get('day', None)
return self.model.objects.created_on(year=year, month=month, day=day)
class PostDetail(DetailView):
context_object_name = 'post'
model = Post
template_name = "hermes/post_detail.html"
| from django.views.generic import ListView, DetailView
from .models import Post
class PostListView(ListView):
context_object_name = 'posts'
model = Post
template_name = 'hermes/post_list.html'
def get_queryset(self):
return self.model.objects.order_by('created_on')
class CategoryPostListView(PostListView):
slug = None
def get_queryset(self):
category_slug = self.kwargs.get('slug', None)
if category_slug is None:
category_slug = self.slug
return self.model.objects.in_category(category_slug)
class ArchivePostListView(PostListView):
def get_queryset(self):
year = self.kwargs.get('year', None)
month = self.kwargs.get('month', None)
day = self.kwargs.get('day', None)
return self.model.objects.created_on(year=year, month=month, day=day)
class PostDetail(DetailView):
context_object_name = 'post'
model = Post
template_name = "hermes/post_detail.html"
| Allow slug to be passed directly into the view | Allow slug to be passed directly into the view | Python | mit | emilian/django-hermes | ---
+++
@@ -16,7 +16,11 @@
slug = None
def get_queryset(self):
- category_slug = self.kwargs.get('slug', '')
+ category_slug = self.kwargs.get('slug', None)
+
+ if category_slug is None:
+ category_slug = self.slug
+
return self.model.objects.in_category(category_slug)
|
70d7058e5a45699082dc5a834e462f70ebe30345 | h5py/tests/test_h5o.py | h5py/tests/test_h5o.py | import pytest
from .common import TestCase
from h5py import File
class TestException(Exception):
pass
def throwing(name, obj):
print(name, obj)
raise TestException("throwing exception")
class TestVisit(TestCase):
def test_visit(self):
fname = self.mktemp()
fid = File(fname, 'w')
fid.create_dataset('foo', (100,), dtype='uint8')
with pytest.raises(TestException, match='throwing exception'):
fid.visititems(throwing)
fid.close()
| import pytest
from .common import TestCase
from h5py import File
class SampleException(Exception):
pass
def throwing(name, obj):
print(name, obj)
raise SampleException("throwing exception")
class TestVisit(TestCase):
def test_visit(self):
fname = self.mktemp()
fid = File(fname, 'w')
fid.create_dataset('foo', (100,), dtype='uint8')
with pytest.raises(SampleException, match='throwing exception'):
fid.visititems(throwing)
fid.close()
| Rename exception to squash test warning | Rename exception to squash test warning
| Python | bsd-3-clause | h5py/h5py,h5py/h5py,h5py/h5py | ---
+++
@@ -4,18 +4,18 @@
from h5py import File
-class TestException(Exception):
+class SampleException(Exception):
pass
def throwing(name, obj):
print(name, obj)
- raise TestException("throwing exception")
+ raise SampleException("throwing exception")
class TestVisit(TestCase):
def test_visit(self):
fname = self.mktemp()
fid = File(fname, 'w')
fid.create_dataset('foo', (100,), dtype='uint8')
- with pytest.raises(TestException, match='throwing exception'):
+ with pytest.raises(SampleException, match='throwing exception'):
fid.visititems(throwing)
fid.close() |
aa867937c1e9842bd58c4006d4497589132a5c92 | heat/objects/fields.py | heat/objects/fields.py | # Copyright 2014 Intel Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_serialization import jsonutils as json
from oslo_versionedobjects import fields
import six
class Json(fields.FieldType):
def coerce(self, obj, attr, value):
if isinstance(value, six.string_types):
loaded = json.loads(value)
return loaded
return value
def from_primitive(self, obj, attr, value):
return self.coerce(obj, attr, value)
def to_primitive(self, obj, attr, value):
return json.dumps(value)
class JsonField(fields.AutoTypedField):
pass
class ListField(fields.AutoTypedField):
pass
| # Copyright 2014 Intel Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_serialization import jsonutils as json
from oslo_versionedobjects import fields
import six
class Json(fields.FieldType):
def coerce(self, obj, attr, value):
if isinstance(value, six.string_types):
loaded = json.loads(value)
return loaded
return value
def from_primitive(self, obj, attr, value):
return self.coerce(obj, attr, value)
def to_primitive(self, obj, attr, value):
return json.dumps(value)
class JsonField(fields.AutoTypedField):
AUTO_TYPE = Json()
class ListField(fields.AutoTypedField):
AUTO_TYPE = fields.List(fields.FieldType())
| Make JsonField actually do it's work | Make JsonField actually do it's work
This code will be used when objects are registered.
Change-Id: I2f1b3e2dd5584335e318b936984c227050bbe699
Partial-Bug: 1491258
| Python | apache-2.0 | cwolferh/heat-scratch,noironetworks/heat,openstack/heat,steveb/heat,steveb/heat,dims/heat,jasondunsmore/heat,jasondunsmore/heat,cwolferh/heat-scratch,openstack/heat,dims/heat,noironetworks/heat | ---
+++
@@ -33,8 +33,8 @@
class JsonField(fields.AutoTypedField):
- pass
+ AUTO_TYPE = Json()
class ListField(fields.AutoTypedField):
- pass
+ AUTO_TYPE = fields.List(fields.FieldType()) |
406abe34d82aeb7d72d7f4dc96d44d57807f928b | devilry/devilry_authenticate/urls.py | devilry/devilry_authenticate/urls.py | from devilry.devilry_authenticate.views import CustomLoginView
from django.conf.urls import url
from django_cradmin.apps.cradmin_authenticate.views import logout
urlpatterns = [
url(r'^login$', CustomLoginView.as_view(), name='cradmin-authenticate-login'),
url(r'^logout$', logout.cradmin_logoutview, name='cradmin-authenticate-logout'),
]
| from devilry.devilry_authenticate.views import CustomLoginView
from django.conf.urls import url
from django_cradmin.apps.cradmin_authenticate.views import logout
urlpatterns = [
url(r'^login$', CustomLoginView.as_view(), name='cradmin-authenticate-login'),
url(r'^logout$', logout.cradmin_logoutview, name='cradmin-authenticate-logout'),
# url(r'^feide/oauth-successful-login-callback', ...),
]
| Add placeholder for feide oauth callback. | authenticate: Add placeholder for feide oauth callback.
| Python | bsd-3-clause | devilry/devilry-django,devilry/devilry-django,devilry/devilry-django,devilry/devilry-django | ---
+++
@@ -6,4 +6,5 @@
urlpatterns = [
url(r'^login$', CustomLoginView.as_view(), name='cradmin-authenticate-login'),
url(r'^logout$', logout.cradmin_logoutview, name='cradmin-authenticate-logout'),
+ # url(r'^feide/oauth-successful-login-callback', ...),
] |
09d3ba64bfdb9d8d9a46e12ab9770e4a5f86ea20 | sendsms/backends/twiliorest.py | sendsms/backends/twiliorest.py | #-*- coding: utf-8 -*-
"""
this backend requires the twilio python library: http://pypi.python.org/pypi/twilio/
"""
from twilio.rest import TwilioRestClient
from django.conf import settings
from sendsms.backends.base import BaseSmsBackend
TWILIO_ACCOUNT_SID = getattr(settings, 'SENDSMS_TWILIO_ACCOUNT_SID', '')
TWILIO_AUTH_TOKEN = getattr(settings, 'SENDSMS_TWILIO_AUTH_TOKEN', '')
class SmsBackend(BaseSmsBackend):
def send_messages(self, messages):
client = TwilioRestClient(TWILIO_ACCOUNT_SID, TWILIO_AUTH_TOKEN)
for message in messages:
for to in message.to:
try:
msg = client.sms.messages.create(
to=to,
from_=message.from_phone,
body=message.body
)
except:
if not self.fail_silently:
raise | #-*- coding: utf-8 -*-
"""
this backend requires the twilio python library: http://pypi.python.org/pypi/twilio/
"""
from twilio.rest import TwilioRestClient
from django.conf import settings
from sendsms.backends.base import BaseSmsBackend
TWILIO_ACCOUNT_SID = getattr(settings, 'SENDSMS_TWILIO_ACCOUNT_SID', '')
TWILIO_AUTH_TOKEN = getattr(settings, 'SENDSMS_TWILIO_AUTH_TOKEN', '')
class SmsBackend(BaseSmsBackend):
def send_messages(self, messages):
client = TwilioRestClient(TWILIO_ACCOUNT_SID, TWILIO_AUTH_TOKEN)
for message in messages:
for to in message.to:
try:
msg = client.messages.create(
to=to,
from_=message.from_phone,
body=message.body
)
except:
if not self.fail_silently:
raise
| Use newer twilio API to fix 160 character limit | Use newer twilio API to fix 160 character limit
See http://stackoverflow.com/questions/22028278/send-sms-of-more-than-160-characters-in-python-using-twilio | Python | mit | stefanfoulis/django-sendsms | ---
+++
@@ -15,7 +15,7 @@
for message in messages:
for to in message.to:
try:
- msg = client.sms.messages.create(
+ msg = client.messages.create(
to=to,
from_=message.from_phone,
body=message.body |
d39066a1de376e0c824510592c1e5e0c5edc46a3 | hitcounter.py | hitcounter.py | from flask import Flask
from redis import Redis
import os
hitcounter = Flask(__name__)
redis = Redis(host='redis', port=6379)
who = os.getenv('WHO', 'World')
@hitcounter.route('/')
def hello():
redis.incr('hits')
return 'Hello %s! I have been seen %s times.' % (who,redis.get('hits'))
if __name__ == "__main__":
hitcounter.run(host="0.0.0.0", port=80)
| from flask import Flask
from redis import Redis
import os
hitcounter = Flask(__name__)
redis = Redis(host='redis', port=6379)
who = os.getenv('WHO', 'World')
@hitcounter.route('/')
def hello():
redis.incr('hits')
return '<strong>Hello %s! I have been seen %s times.</strong>' % (who,redis.get('hits'))
if __name__ == "__main__":
hitcounter.run(host="0.0.0.0", port=80)
| Change font style to <strong> | Change font style to <strong>
| Python | bsd-2-clause | kkonstan/hitcounter | ---
+++
@@ -9,7 +9,7 @@
@hitcounter.route('/')
def hello():
redis.incr('hits')
- return 'Hello %s! I have been seen %s times.' % (who,redis.get('hits'))
+ return '<strong>Hello %s! I have been seen %s times.</strong>' % (who,redis.get('hits'))
if __name__ == "__main__":
hitcounter.run(host="0.0.0.0", port=80) |
c7ac2b1805bd82bdeef3227fc34149431c161df7 | yesimeanit/showoff/newsletter_subscriptions/admin.py | yesimeanit/showoff/newsletter_subscriptions/admin.py | from django.contrib import admin
from .models import NewsletterSubscription
admin.site.register(NewsletterSubscription,
list_display=('email', 'is_active', 'confirmed_on', 'unsubscribed_on'),
list_filter=('is_active',),
)
| from django.contrib import admin
from .models import NewsletterSubscription
admin.site.register(NewsletterSubscription,
list_display=('email', 'is_active', 'confirmed_on', 'unsubscribed_on'),
list_filter=('is_active',),
search_fields=('code', 'email', 'first_name', 'last_name'),
)
| Add search fields for newsletter subscriptions | Add search fields for newsletter subscriptions
| Python | bsd-3-clause | guetux/django-yesimeanit | ---
+++
@@ -6,4 +6,5 @@
admin.site.register(NewsletterSubscription,
list_display=('email', 'is_active', 'confirmed_on', 'unsubscribed_on'),
list_filter=('is_active',),
+ search_fields=('code', 'email', 'first_name', 'last_name'),
) |
d5e3d6c3ca285f1037f284cfb78e279c2d1032ec | dojopuzzles/core/urls.py | dojopuzzles/core/urls.py | from django.urls import path
from core import views
app_name = "core"
urlpatterns = [
path("home/", views.home, name="home"),
path("about/", views.about, name="about"),
]
| from core import views
from django.urls import path
app_name = "core"
urlpatterns = [
path("", views.home, name="home"),
path("about/", views.about, name="about"),
]
| Fix route for main page | Fix route for main page
| Python | mit | rennerocha/dojopuzzles | ---
+++
@@ -1,11 +1,9 @@
+from core import views
from django.urls import path
-
-from core import views
-
app_name = "core"
urlpatterns = [
- path("home/", views.home, name="home"),
+ path("", views.home, name="home"),
path("about/", views.about, name="about"),
] |
9a85df62fa7023a5e8155639a118fd4823895ae3 | honeybadger/middleware.py | honeybadger/middleware.py | from honeybadger import honeybadger
class DjangoHoneybadgerMiddleware(object):
def __init__(self):
from django.conf import settings
config_kwargs = dict([(k.lower(), v) for k, v in getattr(settings, 'HONEYBADGER', {}).items()])
honeybadger.configure(**config_kwargs)
honeybadger.config.set_12factor_config() # environment should override Django settings
def process_request(self, request):
honeybadger.begin_request(request)
return None
def process_exception(self, request, exception):
honeybadger.notify(exception)
return None
def process_response(self, request, response):
honeybadger.reset_context()
return response
# TODO: finish Flask support
class FlaskHoneybadgerMiddleware(object):
def __init__(self, app, **kwargs):
raise NotImplemented
# return app
| from honeybadger import honeybadger
class DjangoHoneybadgerMiddleware(object):
def __init__(self):
from django.conf import settings
if getattr(settings, 'DEBUG'):
honeybadger.configure(environment='development')
config_kwargs = dict([(k.lower(), v) for k, v in getattr(settings, 'HONEYBADGER', {}).items()])
honeybadger.configure(**config_kwargs)
honeybadger.config.set_12factor_config() # environment should override Django settings
def process_request(self, request):
honeybadger.begin_request(request)
return None
def process_exception(self, request, exception):
honeybadger.notify(exception)
return None
def process_response(self, request, response):
honeybadger.reset_context()
return response
# TODO: finish Flask support
class FlaskHoneybadgerMiddleware(object):
def __init__(self, app, **kwargs):
raise NotImplemented
# return app
| Set environment to development if Django is in DEBUG mode. | Set environment to development if Django is in DEBUG mode.
| Python | mit | honeybadger-io/honeybadger-python,honeybadger-io/honeybadger-python | ---
+++
@@ -3,6 +3,8 @@
class DjangoHoneybadgerMiddleware(object):
def __init__(self):
from django.conf import settings
+ if getattr(settings, 'DEBUG'):
+ honeybadger.configure(environment='development')
config_kwargs = dict([(k.lower(), v) for k, v in getattr(settings, 'HONEYBADGER', {}).items()])
honeybadger.configure(**config_kwargs)
honeybadger.config.set_12factor_config() # environment should override Django settings |
326bebb58242981ec66f257525e5c5f58fae9196 | example/article/admin.py | example/article/admin.py | from django.contrib import admin
from django.contrib.admin.widgets import AdminTextInputWidget, AdminTextareaWidget
from parler.admin import TranslatableAdmin
from .models import Article
from parler.forms import TranslatableModelForm, TranslatedField
class ArticleAdminForm(TranslatableModelForm):
"""
Example form
Translated fields can be enhanced by manually declaring them:
"""
title = TranslatedField(widget=AdminTextInputWidget)
content = TranslatedField(widget=AdminTextareaWidget)
class ArticleAdmin(TranslatableAdmin):
"""
Example admin.
Using an empty class would already work,
but this example shows some additional options.
"""
# The 'language_column' is provided by the base class:
list_display = ('title', 'language_column')
# Example custom form usage.
form = ArticleAdminForm
# NOTE: when using Django 1.4, use declared_fieldsets= instead of fieldsets=
fieldsets = (
(None, {
'fields': ('title', 'slug', 'published'),
}),
("Contents", {
'fields': ('content',),
})
)
def get_prepopulated_fields(self, request, obj=None):
# Can't use prepopulated_fields= yet, but this is a workaround.
return {'slug': ('title',)}
admin.site.register(Article, ArticleAdmin)
| from django.contrib import admin
from django.contrib.admin.widgets import AdminTextInputWidget, AdminTextareaWidget
from parler.admin import TranslatableAdmin
from .models import Article
from parler.forms import TranslatableModelForm, TranslatedField
class ArticleAdminForm(TranslatableModelForm):
"""
Example form
Translated fields can be enhanced by manually declaring them:
"""
title = TranslatedField(widget=AdminTextInputWidget)
content = TranslatedField(widget=AdminTextareaWidget)
class ArticleAdmin(TranslatableAdmin):
"""
Example admin.
Using an empty class would already work,
but this example shows some additional options.
"""
# The 'language_column' is provided by the base class:
list_display = ('title', 'language_column')
list_filter = ('published',)
# Example custom form usage.
form = ArticleAdminForm
# NOTE: when using Django 1.4, use declared_fieldsets= instead of fieldsets=
fieldsets = (
(None, {
'fields': ('title', 'slug', 'published'),
}),
("Contents", {
'fields': ('content',),
})
)
def get_prepopulated_fields(self, request, obj=None):
# Can't use prepopulated_fields= yet, but this is a workaround.
return {'slug': ('title',)}
admin.site.register(Article, ArticleAdmin)
| Add list_filter to example ArticleAdmin | Add list_filter to example ArticleAdmin
When using a list filter and then adding or editing an object the
language GET parameter goes missing causing the wrong translation to be
edited.
| Python | apache-2.0 | django-parler/django-parler,jrief/django-parler,edoburu/django-parler,HiddenData/django-parler,skirsdeda/django-parler,edoburu/django-parler,zhangguiyu/django-parler,jrief/django-parler,django-parler/django-parler,imposeren/django-parler,defivelo/django-parler,imposeren/django-parler,zhangguiyu/django-parler,defivelo/django-parler,ellmetha/django-parler,HiddenData/django-parler,skirsdeda/django-parler,ellmetha/django-parler | ---
+++
@@ -25,6 +25,7 @@
# The 'language_column' is provided by the base class:
list_display = ('title', 'language_column')
+ list_filter = ('published',)
# Example custom form usage.
form = ArticleAdminForm |
bda9c03e40315f4050477463b715fab038a96a1e | examples/pystray_icon.py | examples/pystray_icon.py | from PIL import Image
from pystray import Icon, Menu, MenuItem
import webview
import sys
if sys.platform == 'darwin':
# System tray icon needs to run in it's own process on Mac OS X
from multiprocessing import Process as Thread, Queue
else:
from threading import Thread
from queue import Queue
"""
This example demonstrates running pywebview alongside with pystray to display a system tray icon.
"""
def run_webview():
window = webview.create_window('Webview', 'https://pywebview.flowrl.com/hello')
webview.start()
return window
def run_pystray(queue: Queue):
def on_open(icon, item):
queue.put('open')
def on_exit(icon, item):
icon.stop()
queue.put('exit')
image = Image.open("logo/logo.png")
menu = Menu(MenuItem('Open', on_open), MenuItem('Exit', on_exit))
icon = Icon("Pystray", image, "Pystray", menu)
icon.run()
if __name__ == '__main__':
queue = Queue()
icon_thread = Thread(target=run_pystray, args=(queue,))
icon_thread.start()
window = run_webview()
while True:
event = queue.get()
if event == 'open':
if window.closed.is_set():
window = run_webview()
if event == 'exit':
if not window.closed.is_set():
window.destroy()
break
icon_thread.join()
| from PIL import Image
from pystray import Icon, Menu, MenuItem
import webview
import sys
if sys.platform == 'darwin':
# System tray icon needs to run in it's own process on Mac OS X
import multiprocessing
from multiprocessing import Process as Thread, Queue
multiprocessing.set_start_method('spawn')
else:
from threading import Thread
from queue import Queue
"""
This example demonstrates running pywebview alongside with pystray to display a system tray icon.
"""
def run_webview():
window = webview.create_window('Webview', 'https://pywebview.flowrl.com/hello')
webview.start()
def run_pystray(queue: Queue):
def on_open(icon, item):
queue.put('open')
def on_exit(icon, item):
icon.stop()
queue.put('exit')
image = Image.open("logo/logo.png")
menu = Menu(MenuItem('Open', on_open), MenuItem('Exit', on_exit))
icon = Icon("Pystray", image, "Pystray", menu)
icon.run()
if __name__ == '__main__':
queue = Queue()
icon_thread = Thread(target=run_pystray, args=(queue,))
icon_thread.start()
run_webview()
while True:
event = queue.get()
if event == 'open':
run_webview()
if event == 'exit':
break
icon_thread.join()
| Fix process spawn on Mac os, simplify logic | Fix process spawn on Mac os, simplify logic
| Python | bsd-3-clause | r0x0r/pywebview,r0x0r/pywebview,r0x0r/pywebview,r0x0r/pywebview,r0x0r/pywebview | ---
+++
@@ -5,7 +5,9 @@
import sys
if sys.platform == 'darwin':
# System tray icon needs to run in it's own process on Mac OS X
+ import multiprocessing
from multiprocessing import Process as Thread, Queue
+ multiprocessing.set_start_method('spawn')
else:
from threading import Thread
from queue import Queue
@@ -19,7 +21,6 @@
def run_webview():
window = webview.create_window('Webview', 'https://pywebview.flowrl.com/hello')
webview.start()
- return window
def run_pystray(queue: Queue):
@@ -43,16 +44,13 @@
icon_thread = Thread(target=run_pystray, args=(queue,))
icon_thread.start()
- window = run_webview()
+ run_webview()
while True:
event = queue.get()
if event == 'open':
- if window.closed.is_set():
- window = run_webview()
+ run_webview()
if event == 'exit':
- if not window.closed.is_set():
- window.destroy()
break
icon_thread.join() |
d879afb2ae12f4d217632fbe253f87c3db1673a7 | tests/test_gen_sql.py | tests/test_gen_sql.py | #!/usr/bin/env python
import sys
from io import StringIO
from pg_bawler import gen_sql
def test_simple_main(monkeypatch):
stdout = StringIO()
monkeypatch.setattr(sys, 'stdout', stdout)
class Args:
tablename = 'foo'
gen_sql.main(*[Args.tablename])
sql = stdout.getvalue()
assert gen_sql.TRIGGER_FN_FMT.format(args=Args) in sql
assert gen_sql.TRIGGER_FN_FMT.format(args=Args) in sql
| #!/usr/bin/env python
import sys
from io import StringIO
from pg_bawler import gen_sql
def test_simple_main(monkeypatch):
stdout = StringIO()
monkeypatch.setattr(sys, 'stdout', stdout)
class Args:
tablename = 'foo'
gen_sql.main(*[Args.tablename])
sql = stdout.getvalue()
assert gen_sql.TRIGGER_FN_FMT.format(args=Args) in sql
assert gen_sql.TRIGGER_FN_FMT.format(args=Args) in sql
def test_no_drop(monkeypatch):
stdout = StringIO()
monkeypatch.setattr(sys, 'stdout', stdout)
gen_sql.main('--no-drop', 'foo')
sql = stdout.getvalue()
assert 'DROP' not in sql
| Add absolutely stupid test for only-drop gen_sql | Add absolutely stupid test for only-drop gen_sql
| Python | bsd-3-clause | beezz/pg_bawler,beezz/pg_bawler | ---
+++
@@ -17,3 +17,11 @@
assert gen_sql.TRIGGER_FN_FMT.format(args=Args) in sql
assert gen_sql.TRIGGER_FN_FMT.format(args=Args) in sql
+
+
+def test_no_drop(monkeypatch):
+ stdout = StringIO()
+ monkeypatch.setattr(sys, 'stdout', stdout)
+ gen_sql.main('--no-drop', 'foo')
+ sql = stdout.getvalue()
+ assert 'DROP' not in sql |
abefbbc99e7e62bed31db549519807feee7254f9 | tests/test_machine.py | tests/test_machine.py | import rml.machines
def test_machine_load_elements():
lattice = rml.machines.get_elements(machine='SRI21', elemType='BPM')
assert len(lattice) == 173
| import rml.machines
def test_machine_load_elements():
lattice = rml.machines.get_elements(machine='SRI21', elemType='BPM')
assert len(lattice) == 173
for element in lattice.get_elements():
assert element.get_pv_name('readback')
| Test to get different pv names for an element | Test to get different pv names for an element
| Python | apache-2.0 | razvanvasile/RML,willrogers/pml,willrogers/pml | ---
+++
@@ -4,3 +4,5 @@
def test_machine_load_elements():
lattice = rml.machines.get_elements(machine='SRI21', elemType='BPM')
assert len(lattice) == 173
+ for element in lattice.get_elements():
+ assert element.get_pv_name('readback') |
df4bd8201c7c3651fe045b69b1ef8772829b811d | kegweb/kegweb/forms.py | kegweb/kegweb/forms.py | from django import forms
from registration.models import RegistrationProfile
from registration.forms import RegistrationForm
from pykeg.core.models import UserProfile
class KegbotRegistrationForm(RegistrationForm):
gender = forms.CharField()
weight = forms.IntegerField()
def save(self, profile_callback=None):
new_user = RegistrationProfile.objects.create_inactive_user(username=self.cleaned_data['username'],
password=self.cleaned_data['password1'],
email=self.cleaned_data['email'],
send_email=False,
profile_callback=profile_callback)
new_user.is_active = True
new_user.save()
new_profile = UserProfile.objects.create(user=new_user,
gender=self.cleaned_data['gender'],
weight=self.cleaned_data['weight'])
new_profile.save()
return new_user
class UserProfileForm(forms.ModelForm):
class Meta:
model = UserProfile
fields = ('gender', 'weight')
| from django import forms
from registration.models import RegistrationProfile
from registration.forms import RegistrationForm
from pykeg.core.models import UserProfile
class KegbotRegistrationForm(RegistrationForm):
gender = forms.CharField()
weight = forms.IntegerField()
def save(self, profile_callback=None):
new_user = RegistrationProfile.objects.create_inactive_user(username=self.cleaned_data['username'],
password=self.cleaned_data['password1'],
email=self.cleaned_data['email'],
send_email=False,
profile_callback=profile_callback)
new_user.is_active = True
new_user.save()
new_profile, is_new = UserProfile.objects.get_or_create(user=new_user)
new_profile.gender = self.cleaned_data['gender']
new_profile.weight = self.cleaned_data['weight']
new_profile.save()
return new_user
class UserProfileForm(forms.ModelForm):
class Meta:
model = UserProfile
fields = ('gender', 'weight')
| Fix error on new user registration. | Fix error on new user registration.
| Python | mit | Kegbot/kegbot-server,Kegbot/kegbot-server,Kegbot/kegbot-server,Kegbot/kegbot-server,Kegbot/kegbot-server | ---
+++
@@ -17,9 +17,9 @@
profile_callback=profile_callback)
new_user.is_active = True
new_user.save()
- new_profile = UserProfile.objects.create(user=new_user,
- gender=self.cleaned_data['gender'],
- weight=self.cleaned_data['weight'])
+ new_profile, is_new = UserProfile.objects.get_or_create(user=new_user)
+ new_profile.gender = self.cleaned_data['gender']
+ new_profile.weight = self.cleaned_data['weight']
new_profile.save()
return new_user
|
aae16acb38649d2af6d80453b82b2e460e36efdd | form_utils_bootstrap3/tests/__init__.py | form_utils_bootstrap3/tests/__init__.py | import os
import django
from django.conf import settings
if not settings.configured:
settings_dict = dict(
INSTALLED_APPS=[
'django.contrib.contenttypes',
'django.contrib.auth',
'bootstrap3',
'form_utils',
],
DATABASES={
"default": {
"ENGINE": "django.db.backends.sqlite3",
}
},
MEDIA_ROOT=os.path.join(os.path.dirname(__file__), 'media'),
MEDIA_URL='/media/',
STATIC_URL='/static/',
MIDDLEWARE_CLASSES=[],
BOOTSTRAP3={
'form_renderers': {
'default': 'form_utils_bootstrap3.renderers.BetterFormRenderer'
}
}
)
if django.VERSION >= (1, 8):
settings_dict['TEMPLATES'] = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': []
}
]
settings.configure(**settings_dict)
if django.VERSION >= (1, 7):
django.setup()
| import os
import django
from django.conf import settings
if not settings.configured:
settings_dict = dict(
INSTALLED_APPS=[
'django.contrib.contenttypes',
'django.contrib.auth',
'bootstrap3',
'form_utils',
],
DATABASES={
"default": {
"ENGINE": "django.db.backends.sqlite3",
}
},
MEDIA_ROOT=os.path.join(os.path.dirname(__file__), 'media'),
MEDIA_URL='/media/',
STATIC_URL='/static/',
MIDDLEWARE_CLASSES=[],
BOOTSTRAP3={
'form_renderers': {
'default': 'form_utils_bootstrap3.renderers.BetterFormRenderer'
}
}
)
if django.VERSION >= (1, 8):
settings_dict['TEMPLATES'] = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': []
}
]
settings.configure(**settings_dict)
django.setup()
| Remove check for versions < 1.7 | Remove check for versions < 1.7
| Python | mit | federicobond/django-form-utils-bootstrap3 | ---
+++
@@ -38,6 +38,4 @@
settings.configure(**settings_dict)
-
-if django.VERSION >= (1, 7):
- django.setup()
+django.setup() |
1c54e4233db3231bfd20bd6591c2747ed8b7868d | gopher/models.py | gopher/models.py | from django.db import models
class AirtimeApplication(models.Model):
name = models.CharField(max_length=50)
ratio = models.IntegerField(null=True, blank=True)
max_per_day = models.IntegerField(null=True, blank=True)
amount = models.IntegerField(null=True, blank=True)
active = models.BooleanField()
product_key = models.CharField(max_length=10)
def __unicode__(self):
return self.name
class Meta:
verbose_name = "Airtime Application"
class SendAirtime(models.Model):
app_id = models.ForeignKey(AirtimeApplication)
msisdn = models.CharField(max_length=30,
verbose_name="MSISDN (Mobile Number)")
product_key = models.CharField(max_length=10)
amount = models.IntegerField()
sent = models.BooleanField()
created_at = models.DateTimeField(auto_now_add=True,
blank=False,
editable=False)
def __unicode__(self):
return self.msisdn
class Meta:
verbose_name = "Send Airtime"
| from django.db import models
class AirtimeApplication(models.Model):
name = models.CharField(max_length=50)
ratio = models.IntegerField(null=True, blank=True)
max_per_day = models.IntegerField(null=True, blank=True)
amount = models.IntegerField(null=True, blank=True)
active = models.BooleanField()
product_key = models.CharField(max_length=10)
def __unicode__(self):
return self.name
class Meta:
verbose_name = "Airtime Application"
class SendAirtime(models.Model):
app_id = models.ForeignKey(AirtimeApplication)
msisdn = models.CharField(max_length=30,
verbose_name="MSISDN (Mobile Number)")
product_key = models.CharField(max_length=10)
amount = models.IntegerField()
sent = models.BooleanField()
created_at = models.DateTimeField(auto_now_add=True,
blank=False,
editable=False)
def __unicode__(self):
return self.msisdn
class Meta:
verbose_name_plural = "Send Airtime"
| Fix application name in admin | Fix application name in admin
| Python | mit | westerncapelabs/django-grs-gatewaycms | ---
+++
@@ -31,4 +31,4 @@
return self.msisdn
class Meta:
- verbose_name = "Send Airtime"
+ verbose_name_plural = "Send Airtime" |
7e22cba69ddfcc523f4945c36470983b436faf3c | larvae/tests/test_bill.py | larvae/tests/test_bill.py | from ..bill import Bill
from validictory import ValidationError
def toy_bill():
b = Bill(bill_id="HB 2017",
session="2012A",
title="A bill for an act to raise the cookie budget by 200%",
type="bill")
b.add_source("http://uri.example.com/", note="foo")
b.validate()
return b
def test_basic_invalid_bill():
""" Test that we can create an invalid bill, and validation will fail """
b = toy_bill()
b.bill_id = None
try:
assert ("Big Garbage String") == b.validate()
except ValidationError:
pass
def test_verify_actions():
""" Make sure actions work """
b = toy_bill()
b.add_action("Some dude liked it.", "some dude", "2013-04-29")
b.validate()
| from ..bill import Bill
from validictory import ValidationError
def toy_bill():
b = Bill(bill_id="HB 2017",
session="2012A",
title="A bill for an act to raise the cookie budget by 200%",
type="bill")
b.add_source("http://uri.example.com/", note="foo")
b.validate()
return b
def test_basic_invalid_bill():
""" Test that we can create an invalid bill, and validation will fail """
b = toy_bill()
b.bill_id = None
try:
assert ("Big Garbage String") == b.validate()
except ValidationError:
pass
def test_verify_actions():
""" Make sure actions work """
b = toy_bill()
b.add_action("Some dude liked it.", "some dude", "2013-04-29")
b.validate()
# XXX: Check output
def test_verify_related_bill():
""" Make sure related bills work """
b = toy_bill()
b.add_related_bill(bill_id="HB 2020",
session="2011A",
chamber="upper",
relation="companion") # continuation?
b.validate()
| Add a related bill tester. | Add a related bill tester.
| Python | bsd-3-clause | AGarrow/larvae | ---
+++
@@ -27,3 +27,14 @@
b = toy_bill()
b.add_action("Some dude liked it.", "some dude", "2013-04-29")
b.validate()
+ # XXX: Check output
+
+
+def test_verify_related_bill():
+ """ Make sure related bills work """
+ b = toy_bill()
+ b.add_related_bill(bill_id="HB 2020",
+ session="2011A",
+ chamber="upper",
+ relation="companion") # continuation?
+ b.validate() |
b8a90774ff48a94abd6671245ff3b3f0fbf95507 | transmute/__init__.py | transmute/__init__.py | import sys
minver = (3,3,0)
if sys.version_info[0:3] < minver:
raise ImportError("transmute requires Python version {} or newer.".format('.'.join(('{}'.format(v) for v in minver))))
##
# @brief The application version number.
version = (0, 0, '1a')
version_string = '.'.join(str(v) for v in version) | import sys
minver = (3,3,0)
if sys.version_info[0:3] < minver:
raise ImportError("transmute requires Python version {} or newer.".format('.'.join(('{}'.format(v) for v in minver))))
##
# @brief The application version number.
version = (0, 0, '1a')
##
# @brief The dot-separated version number in string format.
version_string = '.'.join(str(v) for v in version)
| Add doxygen comment for version_string | Add doxygen comment for version_string
Add a doxygen comment for version_string describing its nature.
| Python | mit | Rakankou/transmute | ---
+++
@@ -9,4 +9,6 @@
# @brief The application version number.
version = (0, 0, '1a')
+##
+# @brief The dot-separated version number in string format.
version_string = '.'.join(str(v) for v in version) |
334e794be7514c032e6db4c39761d67820c405ff | oscar/management/commands/oscar_update_product_ratings.py | oscar/management/commands/oscar_update_product_ratings.py | # -*- coding: utf-8 -*-
from django.core.management.base import BaseCommand
from django.db.models import get_model
Product = get_model('catalogue', 'Product')
ProductReview = get_model('reviews', 'ProductReview')
class Command(BaseCommand):
help = """Update the denormalised reviews average on all Product instances.
Should only be necessary when changing to e.g. a weight-based
rating."""
def handle(self, *args, **options):
# Iterate over all Products (not just ones with reviews)
products = Product.objects.all()
for product in products:
ProductReview.update_product_rating(product)
self.stdout.write('Successfully updated %s products\n'
% products.count())
| # -*- coding: utf-8 -*-
from django.core.management.base import BaseCommand
from django.db.models import get_model
Product = get_model('catalogue', 'Product')
class Command(BaseCommand):
help = """Update the denormalised reviews average on all Product instances.
Should only be necessary when changing to e.g. a weight-based
rating."""
def handle(self, *args, **options):
# Iterate over all Products (not just ones with reviews)
products = Product.objects.all()
for product in products:
product.update_rating()
self.stdout.write(
'Successfully updated %s products\n' % products.count())
| Fix bug in management command for updating ratings | Fix bug in management command for updating ratings
| Python | bsd-3-clause | kapt/django-oscar,pasqualguerrero/django-oscar,jmt4/django-oscar,lijoantony/django-oscar,ahmetdaglarbas/e-commerce,dongguangming/django-oscar,nickpack/django-oscar,WillisXChen/django-oscar,jmt4/django-oscar,manevant/django-oscar,jinnykoo/wuyisj,eddiep1101/django-oscar,pdonadeo/django-oscar,taedori81/django-oscar,mexeniz/django-oscar,ka7eh/django-oscar,jinnykoo/wuyisj.com,QLGu/django-oscar,amirrpp/django-oscar,anentropic/django-oscar,okfish/django-oscar,bnprk/django-oscar,marcoantoniooliveira/labweb,michaelkuty/django-oscar,Bogh/django-oscar,makielab/django-oscar,adamend/django-oscar,bschuon/django-oscar,bschuon/django-oscar,faratro/django-oscar,manevant/django-oscar,thechampanurag/django-oscar,pdonadeo/django-oscar,manevant/django-oscar,jinnykoo/christmas,Idematica/django-oscar,itbabu/django-oscar,faratro/django-oscar,john-parton/django-oscar,Jannes123/django-oscar,thechampanurag/django-oscar,mexeniz/django-oscar,mexeniz/django-oscar,ademuk/django-oscar,taedori81/django-oscar,ahmetdaglarbas/e-commerce,nickpack/django-oscar,john-parton/django-oscar,kapari/django-oscar,nickpack/django-oscar,pasqualguerrero/django-oscar,taedori81/django-oscar,solarissmoke/django-oscar,jinnykoo/christmas,spartonia/django-oscar,adamend/django-oscar,dongguangming/django-oscar,solarissmoke/django-oscar,josesanch/django-oscar,WadeYuChen/django-oscar,ademuk/django-oscar,monikasulik/django-oscar,monikasulik/django-oscar,dongguangming/django-oscar,pasqualguerrero/django-oscar,faratro/django-oscar,WillisXChen/django-oscar,ademuk/django-oscar,sonofatailor/django-oscar,WillisXChen/django-oscar,jinnykoo/christmas,marcoantoniooliveira/labweb,amirrpp/django-oscar,ka7eh/django-oscar,QLGu/django-oscar,QLGu/django-oscar,vovanbo/django-oscar,thechampanurag/django-oscar,Idematica/django-oscar,rocopartners/django-oscar,eddiep1101/django-oscar,jinnykoo/wuyisj.com,nickpack/django-oscar,vovanbo/django-oscar,jlmadurga/django-oscar,elliotthill/django-oscar,anentropic/django-oscar,DrOctogon/unwash_ecom,WillisXChen/django-oscar,jlmadurga/django-oscar,jlmadurga/django-oscar,makielab/django-oscar,DrOctogon/unwash_ecom,Bogh/django-oscar,WadeYuChen/django-oscar,saadatqadri/django-oscar,ka7eh/django-oscar,itbabu/django-oscar,eddiep1101/django-oscar,adamend/django-oscar,jinnykoo/wuyisj.com,rocopartners/django-oscar,rocopartners/django-oscar,sonofatailor/django-oscar,adamend/django-oscar,machtfit/django-oscar,django-oscar/django-oscar,spartonia/django-oscar,MatthewWilkes/django-oscar,solarissmoke/django-oscar,django-oscar/django-oscar,faratro/django-oscar,DrOctogon/unwash_ecom,jinnykoo/wuyisj,amirrpp/django-oscar,Jannes123/django-oscar,manevant/django-oscar,eddiep1101/django-oscar,mexeniz/django-oscar,saadatqadri/django-oscar,kapt/django-oscar,spartonia/django-oscar,WillisXChen/django-oscar,sonofatailor/django-oscar,Bogh/django-oscar,MatthewWilkes/django-oscar,jmt4/django-oscar,MatthewWilkes/django-oscar,michaelkuty/django-oscar,ahmetdaglarbas/e-commerce,okfish/django-oscar,ahmetdaglarbas/e-commerce,thechampanurag/django-oscar,elliotthill/django-oscar,elliotthill/django-oscar,monikasulik/django-oscar,jinnykoo/wuyisj.com,django-oscar/django-oscar,michaelkuty/django-oscar,kapari/django-oscar,marcoantoniooliveira/labweb,MatthewWilkes/django-oscar,WadeYuChen/django-oscar,django-oscar/django-oscar,lijoantony/django-oscar,okfish/django-oscar,monikasulik/django-oscar,lijoantony/django-oscar,ka7eh/django-oscar,solarissmoke/django-oscar,sonofatailor/django-oscar,pasqualguerrero/django-oscar,spartonia/django-oscar,Idematica/django-oscar,binarydud/django-oscar,binarydud/django-oscar,jmt4/django-oscar,machtfit/django-oscar,sasha0/django-oscar,vovanbo/django-oscar,pdonadeo/django-oscar,Jannes123/django-oscar,rocopartners/django-oscar,okfish/django-oscar,anentropic/django-oscar,nfletton/django-oscar,jlmadurga/django-oscar,jinnykoo/wuyisj,anentropic/django-oscar,nfletton/django-oscar,bnprk/django-oscar,saadatqadri/django-oscar,john-parton/django-oscar,Jannes123/django-oscar,john-parton/django-oscar,michaelkuty/django-oscar,sasha0/django-oscar,vovanbo/django-oscar,makielab/django-oscar,kapari/django-oscar,jinnykoo/wuyisj,nfletton/django-oscar,bnprk/django-oscar,QLGu/django-oscar,josesanch/django-oscar,bschuon/django-oscar,nfletton/django-oscar,dongguangming/django-oscar,saadatqadri/django-oscar,itbabu/django-oscar,lijoantony/django-oscar,kapari/django-oscar,kapt/django-oscar,Bogh/django-oscar,bnprk/django-oscar,sasha0/django-oscar,bschuon/django-oscar,amirrpp/django-oscar,ademuk/django-oscar,josesanch/django-oscar,makielab/django-oscar,sasha0/django-oscar,machtfit/django-oscar,WadeYuChen/django-oscar,binarydud/django-oscar,binarydud/django-oscar,WillisXChen/django-oscar,taedori81/django-oscar,marcoantoniooliveira/labweb,itbabu/django-oscar,pdonadeo/django-oscar | ---
+++
@@ -4,7 +4,6 @@
from django.db.models import get_model
Product = get_model('catalogue', 'Product')
-ProductReview = get_model('reviews', 'ProductReview')
class Command(BaseCommand):
@@ -16,6 +15,6 @@
# Iterate over all Products (not just ones with reviews)
products = Product.objects.all()
for product in products:
- ProductReview.update_product_rating(product)
- self.stdout.write('Successfully updated %s products\n'
- % products.count())
+ product.update_rating()
+ self.stdout.write(
+ 'Successfully updated %s products\n' % products.count()) |
ad70a7ec6543d64ec185eb2d52ccfa291a1dfad6 | servicerating/views.py | servicerating/views.py | import csv
from django.http import HttpResponse
from servicerating.models import Response
def report_responses(request):
qs = Response.objects.raw("SELECT servicerating_response.*, servicerating_extra.value AS clinic_code from servicerating_response INNER JOIN servicerating_extra ON servicerating_response.contact_id = servicerating_extra.contact_id WHERE servicerating_extra.key = 'clinic_code'")
# Create the HttpResponse object with the appropriate CSV header.
response = HttpResponse(content_type='text/csv')
response['Content-Disposition'] = 'attachment; filename="servicerating_incl_clinic_code.csv"'
writer = csv.writer(response)
writer.writerow(["Contact", "Key", "Value", "Created At", "Updated At", "Clinic Code"])
for obj in qs:
writer.writerow([obj.contact, obj.key, obj.value, obj.created_at,
obj.updated_at, obj.clinic_code])
return response
| import csv
from django.http import HttpResponse
from servicerating.models import Response
def report_responses(request):
qs = Response.objects.raw("SELECT servicerating_response.*, servicerating_extra.value AS clinic_code from servicerating_response INNER JOIN servicerating_extra ON servicerating_response.contact_id = servicerating_extra.contact_id WHERE servicerating_extra.key = 'clinic_code'")
# Create the HttpResponse object with the appropriate CSV header.
response = HttpResponse(content_type='text/csv')
response['Content-Disposition'] = 'attachment; filename="servicerating_incl_clinic_code.csv"'
writer = csv.writer(response)
writer.writerow(["Rating ID", "Contact ID", "Key", "Value", "Created At", "Updated At", "Clinic Code"])
for obj in qs:
writer.writerow([obj.id, obj.contact_id, obj.key, obj.value, obj.created_at,
obj.updated_at, obj.clinic_code])
return response
| Remove FK's from CSV export for massive speed boost | Remove FK's from CSV export for massive speed boost
| Python | bsd-3-clause | praekelt/ndoh-control,praekelt/ndoh-control,praekelt/ndoh-control,praekelt/ndoh-control | ---
+++
@@ -12,9 +12,9 @@
writer = csv.writer(response)
- writer.writerow(["Contact", "Key", "Value", "Created At", "Updated At", "Clinic Code"])
+ writer.writerow(["Rating ID", "Contact ID", "Key", "Value", "Created At", "Updated At", "Clinic Code"])
for obj in qs:
- writer.writerow([obj.contact, obj.key, obj.value, obj.created_at,
+ writer.writerow([obj.id, obj.contact_id, obj.key, obj.value, obj.created_at,
obj.updated_at, obj.clinic_code])
return response |
e9784ddaeea80311587ed7b255869d791008b29e | modder/gui/trayicon.py | modder/gui/trayicon.py | # coding: utf-8
import os.path
import platform
import wx
def create_menu_item(menu, label, func=None):
menu_item = wx.MenuItem(menu, -1, label)
if callable(func):
menu.Bind(wx.EVT_MENU, func, id=menu_item.GetId())
else:
menu_item.Enable(False)
menu.AppendItem(menu_item)
return menu_item
class TrayIcon(wx.TaskBarIcon):
icon_fpath = os.path.join(
os.path.dirname(__file__), 'resources', 'icons8-Module-64.png'
)
def __init__(self, frame=None):
super(TrayIcon, self).__init__()
self._frame = frame or wx.Frame(None)
self.SetIcon(wx.Icon(self.icon_fpath, wx.BITMAP_TYPE_PNG))
def CreatePopupMenu(self):
menu = wx.Menu()
create_menu_item(menu, 'Modder')
menu.AppendSeparator()
mods_count = wx.GetApp()._manager.count
create_menu_item(menu, '{:d} mods loaded'.format(mods_count))
create_menu_item(menu, 'Exit', self.on_exit)
return menu
def on_exit(self, evt):
wx.CallAfter(self.Destroy)
self._frame.Close()
| # coding: utf-8
import os.path
import platform
import wx
def create_menu_item(menu, label, func=None):
menu_item = wx.MenuItem(menu, -1, label)
if callable(func):
menu.Bind(wx.EVT_MENU, func, id=menu_item.GetId())
else:
menu_item.Enable(False)
menu.AppendItem(menu_item)
return menu_item
class TrayIcon(wx.TaskBarIcon):
icon_fpath = os.path.join(
os.path.dirname(__file__), 'resources', 'icons8-Module-64.png'
)
def __init__(self, frame=None):
super(TrayIcon, self).__init__()
self._frame = frame or wx.Frame(None)
self.SetIcon(wx.Icon(self.icon_fpath, wx.BITMAP_TYPE_PNG))
def CreatePopupMenu(self):
menu = wx.Menu()
create_menu_item(menu, 'Modder')
menu.AppendSeparator()
mods_count = wx.GetApp()._manager.count
create_menu_item(menu, '{:d} mods loaded'.format(mods_count), self.on_manage_mods)
create_menu_item(menu, 'Exit', self.on_exit)
return menu
def on_manage_mods(self, evt):
pass
def on_exit(self, evt):
wx.CallAfter(self.Destroy)
self._frame.Close()
| Add stub for mod manager GUI | Add stub for mod manager GUI
| Python | mit | JokerQyou/Modder2 | ---
+++
@@ -32,9 +32,12 @@
menu.AppendSeparator()
mods_count = wx.GetApp()._manager.count
- create_menu_item(menu, '{:d} mods loaded'.format(mods_count))
+ create_menu_item(menu, '{:d} mods loaded'.format(mods_count), self.on_manage_mods)
create_menu_item(menu, 'Exit', self.on_exit)
return menu
+
+ def on_manage_mods(self, evt):
+ pass
def on_exit(self, evt):
wx.CallAfter(self.Destroy) |
6439182a1ed9efd6dd08aefce8cca44221bb9cef | sf/mmck/controllers.py | sf/mmck/controllers.py | from sf.lib.orderedattrdict import OrderedAttrDict
class Controller(object):
def __init__(self, module, name):
self.module = module
self.name = name
@property
def ctl(self):
return self.module.controllers[self.name]
@property
def value(self):
return getattr(self.module, self.name)
@value.setter
def value(self, value):
setattr(self.module, self.name, value)
class Group(OrderedAttrDict):
def __setitem__(self, key, value, **kwargs):
if isinstance(value, tuple) and len(value) == 2:
module, name = value
value = Controller(module, name)
elif not isinstance(value, (Controller, Group)):
raise ValueError(
'Value must be a 2-tuple, Controller instance, '
'or Group instance'
)
super().__setitem__(key, value, **kwargs)
__all__ = [
'Controller',
'Group',
]
| from sf.lib.orderedattrdict import OrderedAttrDict
class Controller(object):
def __init__(self, module, name):
self.module = module
self.name = name
@property
def ctl(self):
c = self.module.controllers[self.name]
if hasattr(self.module, 'user_defined') and c.number >= 6:
c = self.module.user_defined[c.number - 6]
return c
@property
def value(self):
return getattr(self.module, self.name)
@value.setter
def value(self, value):
setattr(self.module, self.name, value)
class Group(OrderedAttrDict):
def __setitem__(self, key, value, **kwargs):
if isinstance(value, tuple) and len(value) == 2:
module, name = value
value = Controller(module, name)
elif not isinstance(value, (Controller, Group)):
raise ValueError(
'Value must be a 2-tuple, Controller instance, '
'or Group instance'
)
super().__setitem__(key, value, **kwargs)
__all__ = [
'Controller',
'Group',
]
| Return specific user-defined control, not proxy | Return specific user-defined control, not proxy
| Python | mit | metrasynth/solar-flares | ---
+++
@@ -9,7 +9,10 @@
@property
def ctl(self):
- return self.module.controllers[self.name]
+ c = self.module.controllers[self.name]
+ if hasattr(self.module, 'user_defined') and c.number >= 6:
+ c = self.module.user_defined[c.number - 6]
+ return c
@property
def value(self): |
9db378f028cf84d81165e7c50f62db794eb8fee2 | tests/query_test/test_chars.py | tests/query_test/test_chars.py | #!/usr/bin/env python
# Copyright (c) 2012 Cloudera, Inc. All rights reserved.
#
import logging
import pytest
from copy import copy
from tests.common.test_vector import *
from tests.common.impala_test_suite import *
class TestStringQueries(ImpalaTestSuite):
@classmethod
def get_workload(cls):
return 'functional-query'
def setup_method(self, method):
self.__cleanup_char_tables()
self.__create_char_tables()
def teardown_method(self, method):
self.__cleanup_char_tables()
def __cleanup_char_tables(self):
self.client.execute('drop table if exists functional.test_char_tmp');
self.client.execute('drop table if exists functional.test_varchar_tmp');
def __create_char_tables(self):
self.client.execute(
'create table if not exists functional.test_varchar_tmp (vc varchar(5))')
self.client.execute(
'create table if not exists functional.test_char_tmp (c char(5))')
@classmethod
def add_test_dimensions(cls):
super(TestStringQueries, cls).add_test_dimensions()
cls.TestMatrix.add_dimension(
create_exec_option_dimension(disable_codegen_options=[True]))
cls.TestMatrix.add_constraint(lambda v:\
v.get_value('table_format').file_format in ['text'])
def test_varchar(self, vector):
self.run_test_case('QueryTest/chars', vector)
| #!/usr/bin/env python
# Copyright (c) 2012 Cloudera, Inc. All rights reserved.
#
import logging
import pytest
from copy import copy
from tests.common.test_vector import *
from tests.common.impala_test_suite import *
class TestStringQueries(ImpalaTestSuite):
@classmethod
def get_workload(cls):
return 'functional-query'
def setup_method(self, method):
self.__cleanup_char_tables()
self.__create_char_tables()
def teardown_method(self, method):
self.__cleanup_char_tables()
def __cleanup_char_tables(self):
self.client.execute('drop table if exists functional.test_char_tmp');
self.client.execute('drop table if exists functional.test_varchar_tmp');
def __create_char_tables(self):
self.client.execute(
'create table if not exists functional.test_varchar_tmp (vc varchar(5))')
self.client.execute(
'create table if not exists functional.test_char_tmp (c char(5))')
@classmethod
def add_test_dimensions(cls):
super(TestStringQueries, cls).add_test_dimensions()
cls.TestMatrix.add_dimension(
create_exec_option_dimension(disable_codegen_options=[True]))
cls.TestMatrix.add_constraint(lambda v:\
v.get_value('table_format').file_format in ['text'] and
v.get_value('table_format').compression_codec in ['none'])
def test_varchar(self, vector):
self.run_test_case('QueryTest/chars', vector)
| Fix char test to only run on test/none. | Fix char test to only run on test/none.
Change-Id: I8f5ac5a6e7399ce2fdbe78d07ae24deaa1d7532d
Reviewed-on: http://gerrit.sjc.cloudera.com:8080/4326
Tested-by: jenkins
Reviewed-by: Alex Behm <fe1626037acfc2dc542d2aa723a6d14f2464a20c@cloudera.com>
| Python | apache-2.0 | michaelhkw/incubator-impala,cloudera/Impala,michaelhkw/incubator-impala,cloudera/Impala,cloudera/Impala,michaelhkw/incubator-impala,michaelhkw/incubator-impala,michaelhkw/incubator-impala,michaelhkw/incubator-impala,cloudera/Impala,michaelhkw/incubator-impala,cloudera/Impala,cloudera/Impala,cloudera/Impala | ---
+++
@@ -36,7 +36,8 @@
cls.TestMatrix.add_dimension(
create_exec_option_dimension(disable_codegen_options=[True]))
cls.TestMatrix.add_constraint(lambda v:\
- v.get_value('table_format').file_format in ['text'])
+ v.get_value('table_format').file_format in ['text'] and
+ v.get_value('table_format').compression_codec in ['none'])
def test_varchar(self, vector):
self.run_test_case('QueryTest/chars', vector) |
c1429d70319b6f39412f1f34297f706b20d78b80 | members/urls.py | members/urls.py | from django.conf.urls import patterns, url
urlpatterns = patterns('members.views',
url(r'^login/$', 'login', name='login'),
url(r'^search/(?P<name>.*)/$', 'search', name='search'),
)
| from django.conf.urls import patterns, url
urlpatterns = patterns('members.views',
url(r'^login/$', 'login', name='login'),
url(r'^search/(?P<name>.*)/$', 'search', name='search'),
url(r'^archive/$', 'archive_student_council', name='archive_student_council'),
)
| Add url leading to student council archive | Add url leading to student council archive
| Python | mit | Hackfmi/Diaphanum,Hackfmi/Diaphanum | ---
+++
@@ -4,4 +4,5 @@
urlpatterns = patterns('members.views',
url(r'^login/$', 'login', name='login'),
url(r'^search/(?P<name>.*)/$', 'search', name='search'),
+ url(r'^archive/$', 'archive_student_council', name='archive_student_council'),
) |
d1aa094c9b0988c12100c8300aae4b390bb276f8 | zonetruck/__main__.py | zonetruck/__main__.py | import yaml
from zonetruck.WorkManager import WorkManager
from zonetruck.ZoneUpdater import ZoneUpdater
from zonetruck.ZoneFilter import ZoneFilter
from zonetruck.zone_xfer import zone_xfer
def main(argv):
config = yaml.safe_load(open(argv[1], 'r'))
zone_filter = ZoneFilter(config['filter_rules']).filter
zone_updaters = [ZoneUpdater(**o).task for o in config['outputs']]
subsequent_tasks = [[zone_filter], zone_updaters]
work_manager = WorkManager()
for source in config['sources']:
for zone in source['zones']:
work_manager.submit_work(100, zone_xfer, (source['masters'], zone), subsequent_tasks)
work_manager.start()
work_manager.join()
if __name__ == '__main__':
import sys
main(sys.argv) | import yaml
from zonetruck.WorkManager import WorkManager
from zonetruck.ZoneUpdater import ZoneUpdater
from zonetruck.ZoneFilter import ZoneFilter
from zonetruck.zone_xfer import zone_xfer
import sys
def main(argv=None):
argv = argv or sys.argv
config = yaml.safe_load(open(argv[1], 'r'))
zone_filter = ZoneFilter(config['filter_rules']).filter
zone_updaters = [ZoneUpdater(**o).task for o in config['outputs']]
subsequent_tasks = [[zone_filter], zone_updaters]
work_manager = WorkManager()
for source in config['sources']:
for zone in source['zones']:
work_manager.submit_work(100, zone_xfer, (source['masters'], zone), subsequent_tasks)
work_manager.start()
work_manager.join()
if __name__ == '__main__':
main() | Make argv arguments optional, fixes pip installed script | Make argv arguments optional, fixes pip installed script
| Python | mit | pv2b/zonetruck | ---
+++
@@ -3,8 +3,10 @@
from zonetruck.ZoneUpdater import ZoneUpdater
from zonetruck.ZoneFilter import ZoneFilter
from zonetruck.zone_xfer import zone_xfer
+import sys
-def main(argv):
+def main(argv=None):
+ argv = argv or sys.argv
config = yaml.safe_load(open(argv[1], 'r'))
zone_filter = ZoneFilter(config['filter_rules']).filter
@@ -22,5 +24,4 @@
work_manager.join()
if __name__ == '__main__':
- import sys
- main(sys.argv)
+ main() |
6ac868c83dc3d6ad0733e5155c97c23414b06b06 | lupa/tests/__init__.py | lupa/tests/__init__.py | from __future__ import absolute_import
import unittest
import doctest
import os
import lupa
def suite():
test_dir = os.path.abspath(os.path.dirname(__file__))
tests = []
for filename in os.listdir(test_dir):
if filename.endswith('.py') and not filename.startswith('_'):
tests.append('lupa.tests.' + filename[:-3])
suite = unittest.defaultTestLoader.loadTestsFromNames(tests)
suite.addTest(doctest.DocTestSuite(lupa._lupa))
suite.addTest(doctest.DocFileSuite('../../README.rst'))
return suite
if __name__ == '__main__':
unittest.TextTestRunner(verbosity=2).run(suite())
| from __future__ import absolute_import
import unittest
import doctest
import os
import os.path as os_path
import sys
import lupa
def suite():
test_dir = os.path.abspath(os.path.dirname(__file__))
tests = []
for filename in os.listdir(test_dir):
if filename.endswith('.py') and not filename.startswith('_'):
tests.append('lupa.tests.' + filename[:-3])
suite = unittest.defaultTestLoader.loadTestsFromNames(tests)
suite.addTest(doctest.DocTestSuite(lupa._lupa))
# Long version of
# suite.addTest(doctest.DocFileSuite('../../README.rst'))
# to remove some platform specific tests.
readme_filename = 'README.rst'
readme_file = os_path.join(os_path.dirname(__file__), '..', '..', readme_filename)
with open(readme_file) as f:
readme = f.read()
if sys.platform != 'linux2':
# Exclude last section, which is Linux specific.
readme = readme.split('Importing Lua binary modules\n----------------------------\n', 1)[0]
parser = doctest.DocTestParser()
test = parser.get_doctest(readme, {'__file__': readme_file}, 'README.rst', readme_file, 0)
suite.addTest(doctest.DocFileCase(test))
return suite
if __name__ == '__main__':
unittest.TextTestRunner(verbosity=2).run(suite())
| Exclude platform specific doctests on appveyor. | Exclude platform specific doctests on appveyor.
| Python | mit | pombredanne/lupa,pombredanne/lupa | ---
+++
@@ -3,6 +3,8 @@
import unittest
import doctest
import os
+import os.path as os_path
+import sys
import lupa
@@ -17,7 +19,22 @@
suite = unittest.defaultTestLoader.loadTestsFromNames(tests)
suite.addTest(doctest.DocTestSuite(lupa._lupa))
- suite.addTest(doctest.DocFileSuite('../../README.rst'))
+
+ # Long version of
+ # suite.addTest(doctest.DocFileSuite('../../README.rst'))
+ # to remove some platform specific tests.
+ readme_filename = 'README.rst'
+ readme_file = os_path.join(os_path.dirname(__file__), '..', '..', readme_filename)
+ with open(readme_file) as f:
+ readme = f.read()
+ if sys.platform != 'linux2':
+ # Exclude last section, which is Linux specific.
+ readme = readme.split('Importing Lua binary modules\n----------------------------\n', 1)[0]
+
+ parser = doctest.DocTestParser()
+ test = parser.get_doctest(readme, {'__file__': readme_file}, 'README.rst', readme_file, 0)
+ suite.addTest(doctest.DocFileCase(test))
+
return suite
|
8a1002dd7afd008ff85699f95ae11884b96f60ac | nanoservice/crypto.py | nanoservice/crypto.py | import hmac
import hashlib
from .error import AuthenticatorInvalidSignature
class Authenticator(object):
""" This object is used to authenticate messages """
def __init__(self, secret, digestmod=None):
assert secret
self.secret = secret.encode('utf-8')
self.digestmod = digestmod or hashlib.sha256
self.sig_size = self.digestmod().digest_size * 2
def sign(self, encoded):
""" Return authentication signature of encoded bytes """
h = hmac.new(self.secret, encoded, digestmod=self.digestmod)
return h.hexdigest().encode('utf-8')
def signed(self, encoded):
""" Sign encoded bytes and append signature """
signature = self.sign(encoded)
return signature + encoded
def unsigned(self, encoded):
""" Remove signature and return just the message """
_, message = self.split(encoded)
return message
def split(self, encoded):
""" Split into signature and message """
signature = encoded[:self.sig_size]
message = encoded[self.sig_size:]
return signature, message
def auth(self, encoded):
""" Validate integrity of encoded bytes """
signature, message = self.split(encoded)
computed = self.sign(message)
if not hmac.compare_digest(signature, computed):
raise AuthenticatorInvalidSignature
| import hmac
import hashlib
from .error import AuthenticatorInvalidSignature
class Authenticator(object):
""" This object is used to authenticate messages """
def __init__(self, secret, digestmod=None):
assert secret
self.secret = secret.encode('utf-8')
self.digestmod = digestmod or hashlib.sha256
self.sig_size = self.digestmod().digest_size * 2
def sign(self, encoded):
""" Return authentication signature of encoded bytes """
h = hmac.new(self.secret, encoded, digestmod=self.digestmod)
return h.hexdigest().encode('utf-8')
def signed(self, encoded):
""" Sign encoded bytes and append signature """
signature = self.sign(encoded)
return encoded + signature
def unsigned(self, encoded):
""" Remove signature and return just the message """
message, _ = self.split(encoded)
return message
def split(self, encoded):
""" Split into signature and message """
maxlen = len(encoded) - self.sig_size
message = encoded[:maxlen]
signature = encoded[-self.sig_size:]
return message, signature
def auth(self, encoded):
""" Validate integrity of encoded bytes """
message, signature = self.split(encoded)
computed = self.sign(message)
if not hmac.compare_digest(signature, computed):
raise AuthenticatorInvalidSignature
| Put signature at the end to permit pub-sub to happen | Put signature at the end to permit pub-sub to happen
| Python | mit | walkr/nanoservice | ---
+++
@@ -21,22 +21,23 @@
def signed(self, encoded):
""" Sign encoded bytes and append signature """
signature = self.sign(encoded)
- return signature + encoded
+ return encoded + signature
def unsigned(self, encoded):
""" Remove signature and return just the message """
- _, message = self.split(encoded)
+ message, _ = self.split(encoded)
return message
def split(self, encoded):
""" Split into signature and message """
- signature = encoded[:self.sig_size]
- message = encoded[self.sig_size:]
- return signature, message
+ maxlen = len(encoded) - self.sig_size
+ message = encoded[:maxlen]
+ signature = encoded[-self.sig_size:]
+ return message, signature
def auth(self, encoded):
""" Validate integrity of encoded bytes """
- signature, message = self.split(encoded)
+ message, signature = self.split(encoded)
computed = self.sign(message)
if not hmac.compare_digest(signature, computed):
raise AuthenticatorInvalidSignature |
2f0cf83b4f2bcff1f6c7f8caa118f98153ae0c0f | annotator_supreme/config/development.py | annotator_supreme/config/development.py | import logging
ENV = 'development'
APP_DEBUG = False # Avoid using this in debug mode
# CORS configuration
CORS_HEADERS = 'Content-Type'
CORS_ORIGINS = '*'
KEYSPACE = "annotator_supreme3"
#Logging
LOG_LEVEL = logging.INFO
| import logging
ENV = 'development'
APP_DEBUG = True # Avoid using this in debug mode
# CORS configuration
CORS_HEADERS = 'Content-Type'
CORS_ORIGINS = '*'
KEYSPACE = "annotator_supreme3"
#Logging
LOG_LEVEL = logging.INFO
| Enable livereload on dev by default | UPD: Enable livereload on dev by default
| Python | mit | meerkat-cv/annotator-supreme,meerkat-cv/annotator-supreme,meerkat-cv/annotator-supreme,meerkat-cv/annotator-supreme,meerkat-cv/annotator-supreme | ---
+++
@@ -1,7 +1,7 @@
import logging
ENV = 'development'
-APP_DEBUG = False # Avoid using this in debug mode
+APP_DEBUG = True # Avoid using this in debug mode
# CORS configuration
CORS_HEADERS = 'Content-Type' |
b9b03c1f736b38d122baafdd57bbd96657de17af | valuenetwork/api/types/apps.py | valuenetwork/api/types/apps.py | from django.apps import AppConfig
import valuenetwork.api.types as types
class ApiTypesAppConfig(AppConfig):
name = 'valuenetwork.api.types'
verbose_name = "ApiTypes"
def ready(self):
#import pdb; pdb.set_trace()
from valuenetwork.api.types.EconomicResource import EconomicResource, EconomicResourceCategory
types.EconomicResource = EconomicResource
types.EconomicResourceCategory = EconomicResourceCategory
from valuenetwork.api.types.Agent import Agent
types.Agent = Agent
from valuenetwork.api.types.Process import Process
types.Process = Process
from valuenetwork.api.types.EconomicEvent import EconomicEvent
types.EconomicEvent = EconomicEvent
super(ApiTypesAppConfig, self).ready()
| from django.apps import AppConfig
import valuenetwork.api.types as types
class ApiTypesAppConfig(AppConfig):
name = 'valuenetwork.api.types'
verbose_name = "ApiTypes"
def ready(self):
""" Source of this hack:
https://stackoverflow.com/questions/37862725/django-1-9-how-to-import-in-init-py
'Adding from .models import CommentMixin imports CommentMixin so that you can use it
inside the ready() method. It does not magically add it to the comment module so that
you can access it as comments.CommentMixin
You could assign it to the comments module in the ready() method.'
from .models import CommentMixin
comments.CommentMixin = CommentsMixin
"""
from valuenetwork.api.types.EconomicResource import EconomicResource, EconomicResourceCategory
types.EconomicResource = EconomicResource
types.EconomicResourceCategory = EconomicResourceCategory
from valuenetwork.api.types.Agent import Agent
types.Agent = Agent
from valuenetwork.api.types.Process import Process
types.Process = Process
from valuenetwork.api.types.EconomicEvent import EconomicEvent
types.EconomicEvent = EconomicEvent
super(ApiTypesAppConfig, self).ready()
| Add a comment about the source of the hack | Add a comment about the source of the hack
| Python | agpl-3.0 | FreedomCoop/valuenetwork,FreedomCoop/valuenetwork,FreedomCoop/valuenetwork,FreedomCoop/valuenetwork | ---
+++
@@ -6,7 +6,16 @@
verbose_name = "ApiTypes"
def ready(self):
- #import pdb; pdb.set_trace()
+ """ Source of this hack:
+ https://stackoverflow.com/questions/37862725/django-1-9-how-to-import-in-init-py
+ 'Adding from .models import CommentMixin imports CommentMixin so that you can use it
+ inside the ready() method. It does not magically add it to the comment module so that
+ you can access it as comments.CommentMixin
+
+ You could assign it to the comments module in the ready() method.'
+ from .models import CommentMixin
+ comments.CommentMixin = CommentsMixin
+ """
from valuenetwork.api.types.EconomicResource import EconomicResource, EconomicResourceCategory
types.EconomicResource = EconomicResource |
5fc72fab36b3c29ccadc64aac3ffcb8d6bf56c48 | osf/models/subject.py | osf/models/subject.py | # -*- coding: utf-8 -*-
from django.db import models
from website.util import api_v2_url
from osf.models.base import BaseModel, ObjectIDMixin
class Subject(ObjectIDMixin, BaseModel):
"""A subject discipline that may be attached to a preprint."""
modm_model_path = 'website.project.taxonomies.Subject'
modm_query = None
text = models.CharField(null=False, max_length=256, unique=True) # max length on prod: 73
parents = models.ManyToManyField('self', symmetrical=False, related_name='children')
@property
def absolute_api_v2_url(self):
return api_v2_url('taxonomies/{}/'.format(self._id))
@property
def child_count(self):
"""For v1 compat."""
return self.children.count()
def get_absolute_url(self):
return self.absolute_api_v2_url
| # -*- coding: utf-8 -*-
from django.db import models
from website.util import api_v2_url
from osf.models.base import BaseModel, ObjectIDMixin
class Subject(ObjectIDMixin, BaseModel):
"""A subject discipline that may be attached to a preprint."""
modm_model_path = 'website.project.taxonomies.Subject'
modm_query = None
text = models.CharField(null=False, max_length=256, unique=True) # max length on prod: 73
parents = models.ManyToManyField('self', symmetrical=False, related_name='children')
@property
def absolute_api_v2_url(self):
return api_v2_url('taxonomies/{}/'.format(self._id))
@property
def child_count(self):
"""For v1 compat."""
return self.children.count()
def get_absolute_url(self):
return self.absolute_api_v2_url
@property
def hierarchy(self):
if self.parents.exists():
return self.parents.first().hierarchy + [self._id]
return [self._id]
| Add Subject.hierarchy to djangosf model | Add Subject.hierarchy to djangosf model
| Python | apache-2.0 | Johnetordoff/osf.io,mluo613/osf.io,acshi/osf.io,hmoco/osf.io,alexschiller/osf.io,adlius/osf.io,leb2dg/osf.io,adlius/osf.io,mfraezz/osf.io,monikagrabowska/osf.io,cwisecarver/osf.io,mluo613/osf.io,HalcyonChimera/osf.io,Johnetordoff/osf.io,caneruguz/osf.io,baylee-d/osf.io,saradbowman/osf.io,aaxelb/osf.io,brianjgeiger/osf.io,Johnetordoff/osf.io,cslzchen/osf.io,acshi/osf.io,erinspace/osf.io,mattclark/osf.io,brianjgeiger/osf.io,Nesiehr/osf.io,brianjgeiger/osf.io,icereval/osf.io,acshi/osf.io,Johnetordoff/osf.io,chrisseto/osf.io,binoculars/osf.io,acshi/osf.io,crcresearch/osf.io,alexschiller/osf.io,chennan47/osf.io,mluo613/osf.io,laurenrevere/osf.io,mfraezz/osf.io,monikagrabowska/osf.io,adlius/osf.io,hmoco/osf.io,TomBaxter/osf.io,binoculars/osf.io,felliott/osf.io,CenterForOpenScience/osf.io,caseyrollins/osf.io,aaxelb/osf.io,brianjgeiger/osf.io,caseyrollins/osf.io,chennan47/osf.io,adlius/osf.io,crcresearch/osf.io,cwisecarver/osf.io,HalcyonChimera/osf.io,leb2dg/osf.io,baylee-d/osf.io,baylee-d/osf.io,hmoco/osf.io,crcresearch/osf.io,leb2dg/osf.io,HalcyonChimera/osf.io,alexschiller/osf.io,alexschiller/osf.io,monikagrabowska/osf.io,alexschiller/osf.io,hmoco/osf.io,caseyrollins/osf.io,mfraezz/osf.io,CenterForOpenScience/osf.io,Nesiehr/osf.io,caneruguz/osf.io,mattclark/osf.io,laurenrevere/osf.io,icereval/osf.io,HalcyonChimera/osf.io,monikagrabowska/osf.io,mfraezz/osf.io,pattisdr/osf.io,chrisseto/osf.io,caneruguz/osf.io,TomBaxter/osf.io,caneruguz/osf.io,felliott/osf.io,sloria/osf.io,sloria/osf.io,mluo613/osf.io,chrisseto/osf.io,cslzchen/osf.io,erinspace/osf.io,cslzchen/osf.io,chrisseto/osf.io,icereval/osf.io,mattclark/osf.io,chennan47/osf.io,erinspace/osf.io,aaxelb/osf.io,cwisecarver/osf.io,cslzchen/osf.io,CenterForOpenScience/osf.io,binoculars/osf.io,aaxelb/osf.io,sloria/osf.io,CenterForOpenScience/osf.io,leb2dg/osf.io,Nesiehr/osf.io,mluo613/osf.io,acshi/osf.io,felliott/osf.io,Nesiehr/osf.io,monikagrabowska/osf.io,cwisecarver/osf.io,laurenrevere/osf.io,felliott/osf.io,saradbowman/osf.io,pattisdr/osf.io,TomBaxter/osf.io,pattisdr/osf.io | ---
+++
@@ -25,3 +25,9 @@
def get_absolute_url(self):
return self.absolute_api_v2_url
+
+ @property
+ def hierarchy(self):
+ if self.parents.exists():
+ return self.parents.first().hierarchy + [self._id]
+ return [self._id] |
8235a217b50520093d549115fe09a8d4ff5e9191 | webmanager/default_settings.py | webmanager/default_settings.py | INSTALLED_APPS += (
'simplemenu',
'webmanager',
'bootstrapform',
'userenabootstrap',
'userena',
# 'social_auth',
'provider.oauth2',
)
TEMPLATE_CONTEXT_PROCESSORS += (
'django.contrib.auth.context_processors.auth',
)
AUTHENTICATION_BACKENDS += (
'userena.backends.UserenaAuthenticationBackend',
'django.contrib.auth.backends.ModelBackend',
'guardian.backends.ObjectPermissionBackend'
)
ANONYMOUS_USER_ID = -1
AUTH_PROFILE_MODULE = 'webmanager.MyProfile'
USERENA_SIGNIN_REDIRECT_URL = '/accounts/%(username)s/'
LOGIN_URL = '/accounts/signin/'
LOGOUT_URL = '/accounts/signout/'
#EMAIL_BACKEND = 'django.core.mail.backends.dummy.EmailBackend'
USERENA_ACTIVATION_REQUIRED = False
USERENA_SIGNIN_AFTER_SIGNUP = True
| INSTALLED_APPS += (
'provider',
'provider.oauth2',
'simplemenu',
'webmanager',
'bootstrapform',
'userenabootstrap',
'userena',
# 'social_auth',
)
TEMPLATE_CONTEXT_PROCESSORS += (
'django.contrib.auth.context_processors.auth',
)
AUTHENTICATION_BACKENDS += (
'userena.backends.UserenaAuthenticationBackend',
'django.contrib.auth.backends.ModelBackend',
'guardian.backends.ObjectPermissionBackend'
)
ANONYMOUS_USER_ID = -1
AUTH_PROFILE_MODULE = 'webmanager.MyProfile'
USERENA_SIGNIN_REDIRECT_URL = '/accounts/%(username)s/'
LOGIN_URL = '/accounts/signin/'
LOGOUT_URL = '/accounts/signout/'
#EMAIL_BACKEND = 'django.core.mail.backends.dummy.EmailBackend'
USERENA_ACTIVATION_REQUIRED = False
USERENA_SIGNIN_AFTER_SIGNUP = True
| Fix provider oauth2 warning by import provider before oauth2 as described in the manual | Fix provider oauth2 warning by import provider before oauth2 as described in the manual
| Python | bsd-3-clause | weijia/webmanager,weijia/webmanager,weijia/webmanager | ---
+++
@@ -1,11 +1,12 @@
INSTALLED_APPS += (
+ 'provider',
+ 'provider.oauth2',
'simplemenu',
'webmanager',
'bootstrapform',
'userenabootstrap',
'userena',
# 'social_auth',
- 'provider.oauth2',
)
TEMPLATE_CONTEXT_PROCESSORS += ( |
deef2f98deeeaf51bd9ddda4c5a200d082e16522 | arcutils/test/base.py | arcutils/test/base.py | import json
import django.test
from .user import UserMixin
class Client(django.test.Client):
def patch_json(self, path, data=None, **kwargs):
return self.patch(path, **self._json_kwargs(data, kwargs))
def post_json(self, path, data=None, **kwargs):
return self.post(path, **self._json_kwargs(data, kwargs))
def put_json(self, path, data=None, **kwargs):
return self.put(path, **self._json_kwargs(data, kwargs))
def _json_kwargs(self, data, kwargs):
if data is not None:
data = json.dumps(data)
kwargs['data'] = data
kwargs['content_type'] = 'application/json'
return kwargs
class FunctionalTestCase(django.test.TestCase, UserMixin):
"""Base class for view tests.
It adds the following to Django's `TestCase`:
- Convenient user creation & login
- Convenient POSTs, PUTs, and PATCHes with a JSON body
"""
client_class = Client
| import json
import django.test
from .user import UserMixin
class Client(django.test.Client):
def patch_json(self, path, data=None, **kwargs):
return self.patch(path, **self._json_kwargs(data, kwargs))
def post_json(self, path, data=None, **kwargs):
return self.post(path, **self._json_kwargs(data, kwargs))
def put_json(self, path, data=None, **kwargs):
return self.put(path, **self._json_kwargs(data, kwargs))
def patch_json(self, path, data=None, **kwargs):
return self.patch(path, **self._json_kwargs(data, kwargs))
def _json_kwargs(self, data, kwargs):
if data is not None:
data = json.dumps(data)
kwargs['data'] = data
kwargs['content_type'] = 'application/json'
return kwargs
class FunctionalTestCase(django.test.TestCase, UserMixin):
"""Base class for view tests.
It adds the following to Django's `TestCase`:
- Convenient user creation & login
- Convenient POSTs, PUTs, and PATCHes with a JSON body
"""
client_class = Client
| Add patch_json method to our test Client | Add patch_json method to our test Client
| Python | mit | wylee/django-arcutils,wylee/django-arcutils,PSU-OIT-ARC/django-arcutils,PSU-OIT-ARC/django-arcutils | ---
+++
@@ -15,6 +15,9 @@
def put_json(self, path, data=None, **kwargs):
return self.put(path, **self._json_kwargs(data, kwargs))
+
+ def patch_json(self, path, data=None, **kwargs):
+ return self.patch(path, **self._json_kwargs(data, kwargs))
def _json_kwargs(self, data, kwargs):
if data is not None: |
fa4cf8c6c7a89a60c0067be53c6b0f29d3d3cde3 | biosensorsdb/admin.py | biosensorsdb/admin.py | import biosensorsdb.models
from django.contrib import admin
class ProjectAdmin(admin.ModelAdmin):
search_fields = ['team__name', 'year', 'title', 'abstract', 'inputs__name', 'outputs__name', 'tags__name']
admin.site.register(biosensorsdb.models.Team)
admin.site.register(biosensorsdb.models.SensorInput)
admin.site.register(biosensorsdb.models.SensorOutput)
admin.site.register(biosensorsdb.models.Track)
admin.site.register(biosensorsdb.models.Application)
admin.site.register(biosensorsdb.models.CompetitionResult)
admin.site.register(biosensorsdb.models.Project, ProjectAdmin)
| import biosensorsdb.models
from django.contrib import admin
class ProjectAdmin(admin.ModelAdmin):
search_fields = [
'team__name',
'year',
'title',
'abstract',
'track__name',
'inputs__name',
'outputs__name',
'application__name',
'results__result',
'tags__name',
]
admin.site.register(biosensorsdb.models.Team)
admin.site.register(biosensorsdb.models.SensorInput)
admin.site.register(biosensorsdb.models.SensorOutput)
admin.site.register(biosensorsdb.models.Track)
admin.site.register(biosensorsdb.models.Application)
admin.site.register(biosensorsdb.models.CompetitionResult)
admin.site.register(biosensorsdb.models.Project, ProjectAdmin)
| Make all project fields searchable. | Make all project fields searchable.
| Python | mit | jwintersinger/igembiosensors,jwintersinger/igembiosensors | ---
+++
@@ -2,7 +2,18 @@
from django.contrib import admin
class ProjectAdmin(admin.ModelAdmin):
- search_fields = ['team__name', 'year', 'title', 'abstract', 'inputs__name', 'outputs__name', 'tags__name']
+ search_fields = [
+ 'team__name',
+ 'year',
+ 'title',
+ 'abstract',
+ 'track__name',
+ 'inputs__name',
+ 'outputs__name',
+ 'application__name',
+ 'results__result',
+ 'tags__name',
+ ]
admin.site.register(biosensorsdb.models.Team)
admin.site.register(biosensorsdb.models.SensorInput) |
fd1b25ee02e878a76d12a5e3b679b0e5a11e7509 | src/choropleth/settings/test.py | src/choropleth/settings/test.py | from .base import *
DEBUG = False
TEMPLATE_DEBUG = False
INSTALLED_APPS += (
'model_mommy',
)
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'choropleth',
}
}
| from .base import *
DEBUG = True
TEMPLATE_DEBUG = True
INSTALLED_APPS += (
'model_mommy',
)
STATICFILES_STORAGE = 'pipeline.storage.PipelineStorage'
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'choropleth',
}
}
| Set debug settings to True for the sake of Pipeline. | Set debug settings to True for the sake of Pipeline.
git-svn-id: d73fdb991549f9d1a0affa567d55bb0fdbd453f3@8052 f04a3889-0f81-4131-97fb-bc517d1f583d
| Python | bsd-3-clause | damonkelley/texas-choropleth,unt-libraries/texas-choropleth,unt-libraries/texas-choropleth,damonkelley/texas-choropleth,unt-libraries/texas-choropleth,unt-libraries/texas-choropleth,damonkelley/texas-choropleth,damonkelley/texas-choropleth | ---
+++
@@ -1,11 +1,13 @@
from .base import *
-DEBUG = False
-TEMPLATE_DEBUG = False
+DEBUG = True
+TEMPLATE_DEBUG = True
INSTALLED_APPS += (
'model_mommy',
)
+
+STATICFILES_STORAGE = 'pipeline.storage.PipelineStorage'
DATABASES = {
'default': { |
d6a205e7c97c3355fbf0678c5a439d14d834bb45 | kokki/cookbooks/munin/recipes/master.py | kokki/cookbooks/munin/recipes/master.py |
from kokki import *
Package("munin")
File("/etc/munin/munin.conf",
owner = "root",
group = "root",
mode = 0644,
content = Template("munin/munin.conf.j2"))
|
from kokki import *
Package("munin")
Directory(env.config.munin.dbdir,
owner = "munin",
group = "munin",
mode = 0755)
File("/etc/munin/munin.conf",
owner = "root",
group = "root",
mode = 0644,
content = Template("munin/munin.conf.j2"))
| Make sure the munin dir exists and is owned by the right user | Make sure the munin dir exists and is owned by the right user
| Python | bsd-3-clause | samuel/kokki | ---
+++
@@ -2,6 +2,11 @@
from kokki import *
Package("munin")
+
+Directory(env.config.munin.dbdir,
+ owner = "munin",
+ group = "munin",
+ mode = 0755)
File("/etc/munin/munin.conf",
owner = "root", |
a016b2a4158a3412d3db14d9180994b7c49041ab | preferences/views.py | preferences/views.py | from django.shortcuts import render
from django.db import transaction
# from django.views.generic import TemplateView
from tot.utils import get_current_people
from registration.forms import RegistrationFormUniqueEmail
from registration.backends.default.views import RegistrationView
from preferences.models import PersonFollow
from opencivicdata.models.people_orgs import Person
class EmailRegistrationView(RegistrationView):
form_class = RegistrationFormUniqueEmail
def user_preferences(request):
user = request.user
senators = get_current_people(position='senator')
representatives = get_current_people(position='representative')
if request.method == 'POST':
with transaction.atomic():
PersonFollow.objects.filter(user=user).delete()
for senator in request.POST.getlist('senators'):
PersonFollow.objects.create(user=user, person_id=senator)
for representative in request.POST.getlist('representatives'):
PersonFollow.objects.create(user=user, person_id=representitive)
return render(
request,
'preferences/preferences.html',
{'user': user, 'senators': senators, 'representatives': representatives}
) | from django.shortcuts import render
from django.db import transaction
# from django.views.generic import TemplateView
from tot.utils import get_current_people
from bills.utils import get_all_subjects, get_all_locations
from registration.forms import RegistrationFormUniqueEmail
from registration.backends.default.views import RegistrationView
from preferences.models import PersonFollow
from opencivicdata.models.people_orgs import Person
class EmailRegistrationView(RegistrationView):
form_class = RegistrationFormUniqueEmail
def user_preferences(request):
user = request.user
senators = get_current_people(position='senator')
representatives = get_current_people(position='representative')
locations = get_all_locations()
subjects = get_all_subjects()
if request.method == 'POST':
with transaction.atomic():
PersonFollow.objects.filter(user=user).delete()
for senator in request.POST.getlist('senators'):
PersonFollow.objects.create(user=user, person_id=senator)
for representative in request.POST.getlist('representatives'):
PersonFollow.objects.create(user=user, person_id=representitive)
for location in request.POST.getlist('locations'):
PersonFollow.objects.create(user=user, person_id=representitive)
for subject in request.POST.getlist('subjects'):
PersonFollow.objects.create(user=user, person_id=representitive)
return render(
request,
'preferences/preferences.html',
{
'user': user,
'senators': senators,
'representatives': representatives,
'locations': locations,
'subjects': subjects
}
) | Update view to grab subjects and locations | Update view to grab subjects and locations
| Python | mit | jamesturk/tot,jamesturk/tot,jamesturk/tot,jamesturk/tot,jamesturk/tot | ---
+++
@@ -3,6 +3,8 @@
# from django.views.generic import TemplateView
from tot.utils import get_current_people
+
+from bills.utils import get_all_subjects, get_all_locations
from registration.forms import RegistrationFormUniqueEmail
from registration.backends.default.views import RegistrationView
@@ -24,6 +26,9 @@
senators = get_current_people(position='senator')
representatives = get_current_people(position='representative')
+ locations = get_all_locations()
+ subjects = get_all_subjects()
+
if request.method == 'POST':
with transaction.atomic():
PersonFollow.objects.filter(user=user).delete()
@@ -31,9 +36,19 @@
PersonFollow.objects.create(user=user, person_id=senator)
for representative in request.POST.getlist('representatives'):
PersonFollow.objects.create(user=user, person_id=representitive)
+ for location in request.POST.getlist('locations'):
+ PersonFollow.objects.create(user=user, person_id=representitive)
+ for subject in request.POST.getlist('subjects'):
+ PersonFollow.objects.create(user=user, person_id=representitive)
return render(
request,
'preferences/preferences.html',
- {'user': user, 'senators': senators, 'representatives': representatives}
+ {
+ 'user': user,
+ 'senators': senators,
+ 'representatives': representatives,
+ 'locations': locations,
+ 'subjects': subjects
+ }
) |
d47da024519f3e6d61ac05c8379c617a3e334706 | configs/test/SysPaths.py | configs/test/SysPaths.py | from m5 import *
import os.path
import sys
# Edit the following list to include the possible paths to the binary
# and disk image directories. The first directory on the list that
# exists will be selected.
SYSTEMDIR_PATH = ['/n/poolfs/z/dist/m5/system']
SYSTEMDIR = None
for d in SYSTEMDIR_PATH:
if os.path.exists(d):
SYSTEMDIR = d
break
if not SYSTEMDIR:
print >>sys.stderr, "Can't find a path to system files."
sys.exit(1)
BINDIR = SYSTEMDIR + '/binaries'
DISKDIR = SYSTEMDIR + '/disks'
def disk(file):
return os.path.join(DISKDIR, file)
def binary(file):
return os.path.join(BINDIR, file)
def script(file):
return os.path.join(SYSTEMDIR, 'boot', file)
| import os, sys
from os.path import isdir, join as joinpath
from os import environ as env
systemdir = None
bindir = None
diskdir = None
scriptdir = None
def load_defaults():
global systemdir, bindir, diskdir, scriptdir
if not systemdir:
try:
path = env['M5_PATH'].split(':')
except KeyError:
path = [ '/dist/m5/system' ]
for systemdir in path:
if os.path.isdir(systemdir):
break
else:
raise ImportError, "Can't find a path to system files."
if not bindir:
bindir = joinpath(systemdir, 'binaries')
if not diskdir:
diskdir = joinpath(systemdir, 'disks')
if not scriptdir:
scriptdir = joinpath(systemdir, 'boot')
def disk(file):
load_defaults()
return joinpath(diskdir, file)
def binary(file):
load_defaults()
return joinpath(bindir, file)
def script(file):
load_defaults()
return joinpath(scriptdir, file)
| Make the system paths more configurable | Make the system paths more configurable
configs/test/SysPaths.py:
Make the paths more configurable
| Python | bsd-3-clause | LingxiaoJIA/gem5,LingxiaoJIA/gem5,haowu4682/gem5,andrewfu0325/gem5-aladdin,haowu4682/gem5,andrewfu0325/gem5-aladdin,haowu4682/gem5,LingxiaoJIA/gem5,andrewfu0325/gem5-aladdin,LingxiaoJIA/gem5,andrewfu0325/gem5-aladdin,LingxiaoJIA/gem5,andrewfu0325/gem5-aladdin,haowu4682/gem5,haowu4682/gem5,andrewfu0325/gem5-aladdin,LingxiaoJIA/gem5,haowu4682/gem5,haowu4682/gem5,haowu4682/gem5,andrewfu0325/gem5-aladdin,LingxiaoJIA/gem5,haowu4682/gem5 | ---
+++
@@ -1,32 +1,42 @@
-from m5 import *
+import os, sys
+from os.path import isdir, join as joinpath
+from os import environ as env
-import os.path
-import sys
+systemdir = None
+bindir = None
+diskdir = None
+scriptdir = None
-# Edit the following list to include the possible paths to the binary
-# and disk image directories. The first directory on the list that
-# exists will be selected.
-SYSTEMDIR_PATH = ['/n/poolfs/z/dist/m5/system']
+def load_defaults():
+ global systemdir, bindir, diskdir, scriptdir
+ if not systemdir:
+ try:
+ path = env['M5_PATH'].split(':')
+ except KeyError:
+ path = [ '/dist/m5/system' ]
-SYSTEMDIR = None
-for d in SYSTEMDIR_PATH:
- if os.path.exists(d):
- SYSTEMDIR = d
- break
+ for systemdir in path:
+ if os.path.isdir(systemdir):
+ break
+ else:
+ raise ImportError, "Can't find a path to system files."
-if not SYSTEMDIR:
- print >>sys.stderr, "Can't find a path to system files."
- sys.exit(1)
-
-BINDIR = SYSTEMDIR + '/binaries'
-DISKDIR = SYSTEMDIR + '/disks'
+ if not bindir:
+ bindir = joinpath(systemdir, 'binaries')
+ if not diskdir:
+ diskdir = joinpath(systemdir, 'disks')
+ if not scriptdir:
+ scriptdir = joinpath(systemdir, 'boot')
def disk(file):
- return os.path.join(DISKDIR, file)
+ load_defaults()
+ return joinpath(diskdir, file)
def binary(file):
- return os.path.join(BINDIR, file)
+ load_defaults()
+ return joinpath(bindir, file)
def script(file):
- return os.path.join(SYSTEMDIR, 'boot', file)
+ load_defaults()
+ return joinpath(scriptdir, file)
|
5864ee86593a71aac1cca17bc351dcbcfebf5337 | bottery/exceptions.py | bottery/exceptions.py | """Exception classes for Bottery"""
class ImproperlyConfigured(Exception):
"""Bottery is somehow improperly configured"""
pass
| """Exception classes for Bottery"""
class BotteryException(Exception):
"""A base class for all Bottery exceptions for easier catching."""
pass
class ImproperlyConfigured(BotteryException):
"""Bottery is somehow improperly configured"""
pass
| Add BotteryException base class from which ImproperlyConfigured derives. | Add BotteryException base class from which ImproperlyConfigured derives.
Fixes #40
| Python | mit | rougeth/bottery | ---
+++
@@ -1,6 +1,11 @@
"""Exception classes for Bottery"""
-class ImproperlyConfigured(Exception):
+class BotteryException(Exception):
+ """A base class for all Bottery exceptions for easier catching."""
+ pass
+
+
+class ImproperlyConfigured(BotteryException):
"""Bottery is somehow improperly configured"""
pass |
3fbb7fd06dc0d459c8780abe82683ceb6f65ba6d | Orange/tests/test_preprocess_cur.py | Orange/tests/test_preprocess_cur.py | import unittest
from Orange.data import Table
from Orange.preprocess import ProjectCUR
class TestCURProjector(unittest.TestCase):
def test_project_cur_default(self):
data = Table("ionosphere")
projector = ProjectCUR()
data_cur = projector(data)
for i in xrange(data_cur.X.shape[1]):
sbtr = (data.X - data_cur.X[:, i][:, None]) == 0
self.assertTrue(((sbtr.sum(0) == data.X.shape[0])).any())
self.assertTrue(data_cur.X.shape[1] <= data.X.shape[1])
self.assertTrue((data.metas == data_cur.metas).all())
self.assertTrue((data.Y == data_cur.Y).any())
def test_project_cur(self):
data = Table("ionosphere")
projector = ProjectCUR(rank=3, max_error=1)
data_cur = projector(data)
for i in xrange(data_cur.X.shape[1]):
sbtr = (data.X - data_cur.X[:, i][:, None]) == 0
self.assertTrue(((sbtr.sum(0) == data.X.shape[0])).any())
self.assertTrue(data_cur.X.shape[1] <= data.X.shape[1])
self.assertTrue((data.metas == data_cur.metas).all())
self.assertTrue((data.Y == data_cur.Y).any())
| import unittest
from Orange.data import Table
from Orange.preprocess import ProjectCUR
class TestCURProjector(unittest.TestCase):
def test_project_cur_default(self):
data = Table("ionosphere")
projector = ProjectCUR()
data_cur = projector(data)
for i in range(data_cur.X.shape[1]):
sbtr = (data.X - data_cur.X[:, i][:, None]) == 0
self.assertTrue(((sbtr.sum(0) == data.X.shape[0])).any())
self.assertTrue(data_cur.X.shape[1] <= data.X.shape[1])
self.assertTrue((data.metas == data_cur.metas).all())
self.assertTrue((data.Y == data_cur.Y).any())
def test_project_cur(self):
data = Table("ionosphere")
projector = ProjectCUR(rank=3, max_error=1)
data_cur = projector(data)
for i in range(data_cur.X.shape[1]):
sbtr = (data.X - data_cur.X[:, i][:, None]) == 0
self.assertTrue(((sbtr.sum(0) == data.X.shape[0])).any())
self.assertTrue(data_cur.X.shape[1] <= data.X.shape[1])
self.assertTrue((data.metas == data_cur.metas).all())
self.assertTrue((data.Y == data_cur.Y).any())
| Fix error in test_preprocessor_cur (xrange -> range) | tests: Fix error in test_preprocessor_cur (xrange -> range)
| Python | bsd-2-clause | cheral/orange3,cheral/orange3,cheral/orange3,qPCR4vir/orange3,cheral/orange3,cheral/orange3,cheral/orange3,qPCR4vir/orange3,qPCR4vir/orange3,qPCR4vir/orange3,qPCR4vir/orange3,qPCR4vir/orange3 | ---
+++
@@ -9,7 +9,7 @@
data = Table("ionosphere")
projector = ProjectCUR()
data_cur = projector(data)
- for i in xrange(data_cur.X.shape[1]):
+ for i in range(data_cur.X.shape[1]):
sbtr = (data.X - data_cur.X[:, i][:, None]) == 0
self.assertTrue(((sbtr.sum(0) == data.X.shape[0])).any())
self.assertTrue(data_cur.X.shape[1] <= data.X.shape[1])
@@ -20,7 +20,7 @@
data = Table("ionosphere")
projector = ProjectCUR(rank=3, max_error=1)
data_cur = projector(data)
- for i in xrange(data_cur.X.shape[1]):
+ for i in range(data_cur.X.shape[1]):
sbtr = (data.X - data_cur.X[:, i][:, None]) == 0
self.assertTrue(((sbtr.sum(0) == data.X.shape[0])).any())
self.assertTrue(data_cur.X.shape[1] <= data.X.shape[1]) |
3ed19b2672738a59fc8676e0403ee90fe57273a1 | django/website/logframe/tests/test_admin.py | django/website/logframe/tests/test_admin.py | from mock import Mock
from ..admin import SubIndicatorAdmin
from ..models import SubIndicator
def test_sub_indicator_admin_rsult_returns_indicator_result():
sub_indicator = Mock(indicator=Mock(result='result'))
admin = SubIndicatorAdmin(SubIndicator, None)
assert sub_indicator.indicator.result == admin.result(sub_indicator)
| from mock import Mock
from ..admin import RatingAdmin, SubIndicatorAdmin
from ..models import colors, Rating, SubIndicator
def test_sub_indicator_admin_rsult_returns_indicator_result():
sub_indicator = Mock(indicator=Mock(result='result'))
admin = SubIndicatorAdmin(SubIndicator, None)
assert sub_indicator.indicator.result == admin.result(sub_indicator)
def test_rating_admin_colored_name_returns_name_for_colours():
obj = Mock(color=colors[0][0])
admin = RatingAdmin(Rating, None)
assert '<span class="rating-list-item {0}">{1}</span>'.format(colors[0][0], colors[0][1]) == admin.colored_name(obj)
| Add test for RatingAdmin colored_name | Add test for RatingAdmin colored_name | Python | agpl-3.0 | aptivate/alfie,daniell/kashana,aptivate/kashana,aptivate/alfie,daniell/kashana,aptivate/kashana,aptivate/alfie,daniell/kashana,aptivate/alfie,aptivate/kashana,aptivate/kashana,daniell/kashana | ---
+++
@@ -1,7 +1,7 @@
from mock import Mock
-from ..admin import SubIndicatorAdmin
-from ..models import SubIndicator
+from ..admin import RatingAdmin, SubIndicatorAdmin
+from ..models import colors, Rating, SubIndicator
def test_sub_indicator_admin_rsult_returns_indicator_result():
@@ -9,3 +9,10 @@
admin = SubIndicatorAdmin(SubIndicator, None)
assert sub_indicator.indicator.result == admin.result(sub_indicator)
+
+
+def test_rating_admin_colored_name_returns_name_for_colours():
+ obj = Mock(color=colors[0][0])
+
+ admin = RatingAdmin(Rating, None)
+ assert '<span class="rating-list-item {0}">{1}</span>'.format(colors[0][0], colors[0][1]) == admin.colored_name(obj) |
4cfb03f7d36822d658b2066d0b1c51d78f213458 | examples/django_example/example/app/mail.py | examples/django_example/example/app/mail.py | from django.conf import settings
from django.core.mail import send_mail
from django.core.urlresolvers import reverse
def send_validation(strategy, code):
url = reverse('social:complete', args=(strategy.backend_name,)) + \
'?verification_code=' + code.code
send_mail('Validate your account',
'Validate your account {0}'.format(url),
settings.EMAIL_FROM,
[code.email],
fail_silently=False)
| from django.conf import settings
from django.core.mail import send_mail
from django.core.urlresolvers import reverse
def send_validation(strategy, code):
url = reverse('social:complete', args=(strategy.backend.name,)) + \
'?verification_code=' + code.code
send_mail('Validate your account',
'Validate your account {0}'.format(url),
settings.EMAIL_FROM,
[code.email],
fail_silently=False)
| Use strategy.backend.name instead of strategy.backend_name | Use strategy.backend.name instead of strategy.backend_name | Python | bsd-3-clause | mrwags/python-social-auth,ononeor12/python-social-auth,mark-adams/python-social-auth,python-social-auth/social-docs,tutumcloud/python-social-auth,SeanHayes/python-social-auth,joelstanner/python-social-auth,ariestiyansyah/python-social-auth,DhiaEddineSaidi/python-social-auth,cjltsod/python-social-auth,ByteInternet/python-social-auth,contracode/python-social-auth,tkajtoch/python-social-auth,mathspace/python-social-auth,fearlessspider/python-social-auth,mark-adams/python-social-auth,DhiaEddineSaidi/python-social-auth,python-social-auth/social-app-django,merutak/python-social-auth,lawrence34/python-social-auth,msampathkumar/python-social-auth,jameslittle/python-social-auth,muhammad-ammar/python-social-auth,iruga090/python-social-auth,chandolia/python-social-auth,ariestiyansyah/python-social-auth,JerzySpendel/python-social-auth,JJediny/python-social-auth,hsr-ba-fs15-dat/python-social-auth,drxos/python-social-auth,jeyraof/python-social-auth,drxos/python-social-auth,contracode/python-social-auth,duoduo369/python-social-auth,contracode/python-social-auth,jeyraof/python-social-auth,robbiet480/python-social-auth,msampathkumar/python-social-auth,cmichal/python-social-auth,rsteca/python-social-auth,merutak/python-social-auth,henocdz/python-social-auth,robbiet480/python-social-auth,mchdks/python-social-auth,michael-borisov/python-social-auth,lneoe/python-social-auth,jeyraof/python-social-auth,nirmalvp/python-social-auth,iruga090/python-social-auth,python-social-auth/social-storage-sqlalchemy,mrwags/python-social-auth,tobias47n9e/social-core,nirmalvp/python-social-auth,mrwags/python-social-auth,alrusdi/python-social-auth,msampathkumar/python-social-auth,bjorand/python-social-auth,cjltsod/python-social-auth,hsr-ba-fs15-dat/python-social-auth,noodle-learns-programming/python-social-auth,lamby/python-social-auth,ononeor12/python-social-auth,JerzySpendel/python-social-auth,jneves/python-social-auth,VishvajitP/python-social-auth,ononeor12/python-social-auth,DhiaEddineSaidi/python-social-auth,falcon1kr/python-social-auth,VishvajitP/python-social-auth,S01780/python-social-auth,firstjob/python-social-auth,ariestiyansyah/python-social-auth,nirmalvp/python-social-auth,rsteca/python-social-auth,rsteca/python-social-auth,iruga090/python-social-auth,joelstanner/python-social-auth,rsalmaso/python-social-auth,garrett-schlesinger/python-social-auth,jneves/python-social-auth,MSOpenTech/python-social-auth,Andygmb/python-social-auth,webjunkie/python-social-auth,SeanHayes/python-social-auth,python-social-auth/social-app-django,frankier/python-social-auth,mchdks/python-social-auth,alrusdi/python-social-auth,noodle-learns-programming/python-social-auth,mathspace/python-social-auth,degs098/python-social-auth,ByteInternet/python-social-auth,jameslittle/python-social-auth,chandolia/python-social-auth,garrett-schlesinger/python-social-auth,JerzySpendel/python-social-auth,merutak/python-social-auth,henocdz/python-social-auth,lamby/python-social-auth,S01780/python-social-auth,barseghyanartur/python-social-auth,JJediny/python-social-auth,henocdz/python-social-auth,lawrence34/python-social-auth,rsalmaso/python-social-auth,imsparsh/python-social-auth,firstjob/python-social-auth,yprez/python-social-auth,lawrence34/python-social-auth,joelstanner/python-social-auth,JJediny/python-social-auth,tkajtoch/python-social-auth,wildtetris/python-social-auth,frankier/python-social-auth,Andygmb/python-social-auth,lneoe/python-social-auth,python-social-auth/social-app-django,webjunkie/python-social-auth,imsparsh/python-social-auth,lneoe/python-social-auth,ByteInternet/python-social-auth,MSOpenTech/python-social-auth,python-social-auth/social-core,cmichal/python-social-auth,drxos/python-social-auth,bjorand/python-social-auth,mark-adams/python-social-auth,wildtetris/python-social-auth,daniula/python-social-auth,degs098/python-social-auth,fearlessspider/python-social-auth,san-mate/python-social-auth,S01780/python-social-auth,bjorand/python-social-auth,san-mate/python-social-auth,michael-borisov/python-social-auth,webjunkie/python-social-auth,michael-borisov/python-social-auth,muhammad-ammar/python-social-auth,degs098/python-social-auth,daniula/python-social-auth,clef/python-social-auth,jameslittle/python-social-auth,barseghyanartur/python-social-auth,noodle-learns-programming/python-social-auth,daniula/python-social-auth,lamby/python-social-auth,alrusdi/python-social-auth,san-mate/python-social-auth,mchdks/python-social-auth,cmichal/python-social-auth,python-social-auth/social-app-cherrypy,hsr-ba-fs15-dat/python-social-auth,firstjob/python-social-auth,falcon1kr/python-social-auth,VishvajitP/python-social-auth,Andygmb/python-social-auth,mathspace/python-social-auth,clef/python-social-auth,python-social-auth/social-core,jneves/python-social-auth,yprez/python-social-auth,robbiet480/python-social-auth,chandolia/python-social-auth,muhammad-ammar/python-social-auth,MSOpenTech/python-social-auth,wildtetris/python-social-auth,imsparsh/python-social-auth,falcon1kr/python-social-auth,fearlessspider/python-social-auth,yprez/python-social-auth,clef/python-social-auth,tutumcloud/python-social-auth,tkajtoch/python-social-auth,barseghyanartur/python-social-auth,duoduo369/python-social-auth | ---
+++
@@ -4,7 +4,7 @@
def send_validation(strategy, code):
- url = reverse('social:complete', args=(strategy.backend_name,)) + \
+ url = reverse('social:complete', args=(strategy.backend.name,)) + \
'?verification_code=' + code.code
send_mail('Validate your account',
'Validate your account {0}'.format(url), |
e4c79021e21c050e7dcd769f80e2457d97e67506 | future/builtins/backports/newopen.py | future/builtins/backports/newopen.py | class open(object):
"""Wrapper providing key part of Python 3 open() interface.
From IPython's py3compat.py module. License: BSD.
"""
def __init__(self, fname, mode="r", encoding="utf-8"):
self.f = orig_open(fname, mode)
self.enc = encoding
def write(self, s):
return self.f.write(s.encode(self.enc))
def read(self, size=-1):
return self.f.read(size).decode(self.enc)
def close(self):
return self.f.close()
def __enter__(self):
return self
def __exit__(self, etype, value, traceback):
self.f.close()
| """
A substitute for the Python 3 open() function.
Note that io.open() is more complete but maybe slower. Even so, the
completeness may be a better default. TODO: compare these
"""
open_ = open
class open(object):
"""Wrapper providing key part of Python 3 open() interface.
From IPython's py3compat.py module. License: BSD.
"""
def __init__(self, fname, mode="r", encoding="utf-8"):
self.f = open_(fname, mode)
self.enc = encoding
def write(self, s):
return self.f.write(s.encode(self.enc))
def read(self, size=-1):
return self.f.read(size).decode(self.enc)
def close(self):
return self.f.close()
def __enter__(self):
return self
def __exit__(self, etype, value, traceback):
self.f.close()
| Update new open() function docs (and fix bug) | Update new open() function docs (and fix bug)
| Python | mit | michaelpacer/python-future,PythonCharmers/python-future,krischer/python-future,PythonCharmers/python-future,krischer/python-future,QuLogic/python-future,QuLogic/python-future,michaelpacer/python-future | ---
+++
@@ -1,10 +1,19 @@
+"""
+A substitute for the Python 3 open() function.
+
+Note that io.open() is more complete but maybe slower. Even so, the
+completeness may be a better default. TODO: compare these
+"""
+
+open_ = open
+
class open(object):
"""Wrapper providing key part of Python 3 open() interface.
From IPython's py3compat.py module. License: BSD.
"""
def __init__(self, fname, mode="r", encoding="utf-8"):
- self.f = orig_open(fname, mode)
+ self.f = open_(fname, mode)
self.enc = encoding
def write(self, s): |
8f13e1f3705f7cbe6b5e797a78c70292aabaf5f8 | neuroshare/NeuralEntity.py | neuroshare/NeuralEntity.py | from Entity import *
class NeuralEntity(Entity):
def __init__(self, nsfile, eid, info):
super(NeuralEntity,self).__init__(eid, nsfile, info)
@property
def probe_info(self):
return self._info['ProbeInfo']
@property
def source_entity_id(self):
return self._info['SourceEntityID']
@property
def source_unit_id(self):
return self._info['SourceUnitID']
def get_data (self, index=0, count=-1):
lib = self.file.library
if count < 0:
count = self.item_count
data = lib._get_neural_data (self, index, count)
return data
| from Entity import *
class NeuralEntity(Entity):
"""Entity the represents timestamps of action potentials, i.e. spike times.
Cutouts of the waveforms corresponding to spike data in a neural entity
might be found in a separate :class:`SegmentEntity` (cf. :func:`source_entity_id`).
"""
def __init__(self, nsfile, eid, info):
super(NeuralEntity,self).__init__(eid, nsfile, info)
@property
def probe_info(self):
return self._info['ProbeInfo']
@property
def source_entity_id(self):
"""[**Optional**] Id of the source entity of this spike, if any.
For example the spike waveform of the action potential corresponding
to this spike might have been recoreded in a segment entity."""
return self._info['SourceEntityID']
@property
def source_unit_id(self):
return self._info['SourceUnitID']
def get_data (self, index=0, count=-1):
"""Retrieve the spike times associated with this entity. A subset
of the data can be requested via the ``inde`` and ``count``
parameters."""
lib = self.file.library
if count < 0:
count = self.item_count
data = lib._get_neural_data (self, index, count)
return data
| Add basic doc to Neural entities | doc: Add basic doc to Neural entities
| Python | lgpl-2.1 | abhay447/python-neuroshare,G-Node/python-neuroshare,G-Node/python-neuroshare,abhay447/python-neuroshare | ---
+++
@@ -1,6 +1,10 @@
from Entity import *
class NeuralEntity(Entity):
+ """Entity the represents timestamps of action potentials, i.e. spike times.
+ Cutouts of the waveforms corresponding to spike data in a neural entity
+ might be found in a separate :class:`SegmentEntity` (cf. :func:`source_entity_id`).
+ """
def __init__(self, nsfile, eid, info):
super(NeuralEntity,self).__init__(eid, nsfile, info)
@@ -10,6 +14,9 @@
@property
def source_entity_id(self):
+ """[**Optional**] Id of the source entity of this spike, if any.
+ For example the spike waveform of the action potential corresponding
+ to this spike might have been recoreded in a segment entity."""
return self._info['SourceEntityID']
@property
@@ -17,6 +24,9 @@
return self._info['SourceUnitID']
def get_data (self, index=0, count=-1):
+ """Retrieve the spike times associated with this entity. A subset
+ of the data can be requested via the ``inde`` and ``count``
+ parameters."""
lib = self.file.library
if count < 0:
count = self.item_count |
ae7bda13946c2fff5760cdefc3c06cd932dcd070 | crits/config/handlers.py | crits/config/handlers.py | from django.conf import settings
from crits.config.config import CRITsConfig
def modify_configuration(forms, analyst):
"""
Modify the configuration with the submitted changes.
:param config_form: The form data.
:type config_form: dict
:param analyst: The user making the modifications.
:type analyst: str
:returns: dict with key "message"
"""
config = CRITsConfig.objects().first()
if not config:
config = CRITsConfig()
data = None
for form in forms:
if not data:
data = form.cleaned_data
else:
data.update(form.cleaned_data)
# data = config_form.cleaned_data
allowed_hosts_list = data['allowed_hosts'].split(',')
allowed_hosts = ()
for allowed_host in allowed_hosts_list:
allowed_hosts = allowed_hosts + (allowed_host.strip(),)
data['allowed_hosts'] = allowed_hosts
service_dirs_list = data['service_dirs'].split(',')
service_dirs = ()
for service_dir in service_dirs_list:
service_dirs = service_dirs + (service_dir.strip(),)
data['service_dirs'] = service_dirs
config.merge(data, overwrite=True)
try:
config.save(username=analyst)
settings.ENABLE_TOASTS = data['enable_toasts']
return {'message': "Success!"}
except Exception, e:
return {'message': "Failure: %s" % e}
| from django.conf import settings
from crits.config.config import CRITsConfig
def modify_configuration(forms, analyst):
"""
Modify the configuration with the submitted changes.
:param config_form: The form data.
:type config_form: dict
:param analyst: The user making the modifications.
:type analyst: str
:returns: dict with key "message"
"""
config = CRITsConfig.objects().first()
if not config:
config = CRITsConfig()
data = None
for form in forms:
if not data:
data = form.cleaned_data
else:
data.update(form.cleaned_data)
# data = config_form.cleaned_data
allowed_hosts_list = data['allowed_hosts'].split(',')
allowed_hosts = ()
for allowed_host in allowed_hosts_list:
allowed_hosts = allowed_hosts + (allowed_host.strip(),)
data['allowed_hosts'] = allowed_hosts
service_dirs_list = data['service_dirs'].split(',')
service_dirs = ()
for service_dir in service_dirs_list:
service_dirs = service_dirs + (service_dir.strip(),)
data['service_dirs'] = service_dirs
config.merge(data, overwrite=True)
try:
config.save(username=analyst)
return {'message': "Success!"}
except Exception, e:
return {'message': "Failure: %s" % e}
| Remove code prototype code where the settings variables were being modified, this code works in single threaded servers but might not work on multithreaded servers. | Remove code prototype code where the settings variables were being modified, this code works in single threaded servers but might not work on multithreaded servers.
| Python | mit | cfossace/crits,jhuapl-marti/marti,jhuapl-marti/marti,Magicked/crits,jinverar/crits,cdorer/crits,davidhdz/crits,DukeOfHazard/crits,blaquee/crits,DukeOfHazard/crits,Magicked/crits,jinverar/crits,seanthegeek/crits,Lambdanaut/crits,davidhdz/crits,dreardon/crits,kaoscoach/crits,cdorer/crits,davidhdz/crits,ckane/crits,ckane/crits,cfossace/crits,DukeOfHazard/crits,blaquee/crits,cdorer/crits,Lambdanaut/crits,DukeOfHazard/crits,jinverar/crits,lakiw/cripts,korrosivesec/crits,Magicked/crits,korrosivesec/crits,0x3a/crits,Lambdanaut/crits,Lambdanaut/crits,dreardon/crits,kaoscoach/crits,dreardon/crits,jhuapl-marti/marti,jinverar/crits,seanthegeek/crits,lakiw/cripts,0x3a/crits,0x3a/crits,seanthegeek/crits,davidhdz/crits,HardlyHaki/crits,kaoscoach/crits,0x3a/crits,HardlyHaki/crits,jhuapl-marti/marti,HardlyHaki/crits,davidhdz/crits,cdorer/crits,blaquee/crits,ckane/crits,dreardon/crits,kaoscoach/crits,HardlyHaki/crits,cfossace/crits,lakiw/cripts,blaquee/crits,korrosivesec/crits,korrosivesec/crits,cfossace/crits,Magicked/crits,seanthegeek/crits,lakiw/cripts,ckane/crits,seanthegeek/crits | ---
+++
@@ -38,8 +38,6 @@
config.merge(data, overwrite=True)
try:
config.save(username=analyst)
-
- settings.ENABLE_TOASTS = data['enable_toasts']
return {'message': "Success!"}
except Exception, e:
return {'message': "Failure: %s" % e} |
daa686e350e6cbdc3bdb51a869e37f0a72df45dd | profile_collection/startup/50-scans.py | profile_collection/startup/50-scans.py | # vim: sw=4 ts=4 sts expandtab smarttab
# HXN step-scan configuration
import hxntools.scans
from bluesky.global_state import get_gs
gs = get_gs()
hxntools.scans.setup()
ct = hxntools.scans.count
ascan = hxntools.scans.absolute_scan
dscan = hxntools.scans.relative_scan
fermat = hxntools.scans.relative_fermat
spiral = hxntools.scans.relative_spiral
mesh = hxntools.scans.absolute_mesh
dmesh = hxntools.scans.relative_mesh
d2scan = hxntools.scans.d2scan
a2scan = hxntools.scans.a2scan
gs.DETS = [zebra, sclr1, merlin1, xspress3, lakeshore2]
gs.TABLE_COLS = ['sclr1_ch2','sclr1_ch3', 'sclr1_ch4', 'sclr1_ch5_calc', 'ssx', 'ssy', 'ssz',
't_base', 't_sample', 't_vlens', 't_hlens']
# Plot this by default versus motor position:
gs.PLOT_Y = 'Det2_Pt'
gs.OVERPLOT = False
gs.BASELINE_DEVICES = [smll,vmll, hmll]
| # vim: sw=4 ts=4 sts expandtab smarttab
# HXN step-scan configuration
import hxntools.scans
from bluesky.global_state import get_gs
gs = get_gs()
hxntools.scans.setup()
ct = hxntools.scans.count
ascan = hxntools.scans.absolute_scan
dscan = hxntools.scans.relative_scan
fermat = hxntools.scans.relative_fermat
spiral = hxntools.scans.relative_spiral
mesh = hxntools.scans.absolute_mesh
dmesh = hxntools.scans.relative_mesh
d2scan = hxntools.scans.d2scan
a2scan = hxntools.scans.a2scan
gs.DETS = [zebra, sclr1, merlin1, xspress3, lakeshore2]
gs.TABLE_COLS = ['sclr1_ch2','sclr1_ch3', 'sclr1_ch4', 'sclr1_ch5_calc', 'ssx', 'ssy', 'ssz',
't_base', 't_sample', 't_vlens', 't_hlens']
# Plot this by default versus motor position:
gs.PLOT_Y = 'Det2_Cr'
gs.OVERPLOT = False
gs.BASELINE_DEVICES = [smll,vmll, hmll, ssa2, zp]
| Add ssa2, zp to baseline | Add ssa2, zp to baseline
| Python | bsd-2-clause | NSLS-II-HXN/ipython_ophyd,NSLS-II-HXN/ipython_ophyd | ---
+++
@@ -23,6 +23,6 @@
# Plot this by default versus motor position:
-gs.PLOT_Y = 'Det2_Pt'
+gs.PLOT_Y = 'Det2_Cr'
gs.OVERPLOT = False
-gs.BASELINE_DEVICES = [smll,vmll, hmll]
+gs.BASELINE_DEVICES = [smll,vmll, hmll, ssa2, zp] |
041efae3a2219b0c28a38ef248647f63d1f82517 | clowder_test/setup.py | clowder_test/setup.py | """
Setup file for clowder test runner
"""
from setuptools import setup
# Written according to the docs at
# https://packaging.python.org/en/latest/distributing.html
setup(
name='clowder-test',
description='Test runner for clowder command',
version='0.1.0',
url='http://clowder.cat',
author='Joe DeCapo',
author_email='joe@polka.cat',
license='MIT',
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Topic :: Software Development :: Build Tools',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.6'
],
packages=['clowder_test',
'clowder_test.cli'],
entry_points={
'console_scripts': [
'clowder-test=clowder_test.clowder_test_app:main',
]
},
install_requires=['argcomplete', 'cement', 'cprint', 'psutil', 'termcolor']
)
| """
Setup file for clowder test runner
"""
from setuptools import setup
# Written according to the docs at
# https://packaging.python.org/en/latest/distributing.html
setup(
name='clowder-test',
description='Test runner for clowder command',
version='0.1.0',
url='http://clowder.cat',
author='Joe DeCapo',
author_email='joe@polka.cat',
license='MIT',
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Topic :: Software Development :: Build Tools',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.6'
],
packages=['clowder_test',
'clowder_test.cli'],
entry_points={
'console_scripts': [
'clowder-test=clowder_test.clowder_test_app:main',
]
},
install_requires=['argcomplete', 'cement', 'colorama', 'cprint', 'psutil', 'termcolor']
)
| Add missing dependency to clowder_test | Add missing dependency to clowder_test
| Python | mit | JrGoodle/clowder,JrGoodle/clowder,JrGoodle/clowder | ---
+++
@@ -30,5 +30,5 @@
'clowder-test=clowder_test.clowder_test_app:main',
]
},
- install_requires=['argcomplete', 'cement', 'cprint', 'psutil', 'termcolor']
+ install_requires=['argcomplete', 'cement', 'colorama', 'cprint', 'psutil', 'termcolor']
) |
653cc71521636e7324b057a30506b3196b47461b | coding202-parsing-json/activity-parse-1.py | coding202-parsing-json/activity-parse-1.py | from urllib.request import Request, urlopen
import json
req = Request('http://jsonplaceholder.typicode.com/users')
response = urlopen(req)
response_string = response.read().decode("utf-8")
json_object = json.loads(response_string)
print(json_object)
#print(json.dumps(json_object, sort_keys=True, indent=4))
#print(json_object[4])
#print(json.dumps(json_object[4], sort_keys=True, indent=4))
#print(json_object[4]["name"])
#print(json_object[4]["address"]["geo"])
#print(json_object[4]["address"]["geo"]["lat"])
#print(json_object[4]["address"]["geo"]["lng"])
response.close() | from urllib.request import Request, urlopen
import json
req = Request('http://jsonplaceholder.typicode.com/users', headers={'User-Agent': 'Mozilla/5.0'})
response = urlopen(req)
response_string = response.read().decode("utf-8")
json_object = json.loads(response_string)
print(json_object)
#print(json.dumps(json_object, sort_keys=True, indent=4))
#print(json_object[4])
#print(json.dumps(json_object[4], sort_keys=True, indent=4))
#print(json_object[4]["name"])
#print(json_object[4]["address"]["geo"])
#print(json_object[4]["address"]["geo"]["lat"])
#print(json_object[4]["address"]["geo"]["lng"])
response.close()
| Fix for Spider Bot Defense | Fix for Spider Bot Defense
Updated user-agent to defeat a known defense against Python urllib-based http Spider Bot requests. | Python | apache-2.0 | CiscoDevNet/coding-skills-sample-code,CiscoDevNet/coding-skills-sample-code,CiscoDevNet/coding-skills-sample-code,CiscoDevNet/coding-skills-sample-code | ---
+++
@@ -1,7 +1,7 @@
from urllib.request import Request, urlopen
import json
-req = Request('http://jsonplaceholder.typicode.com/users')
+req = Request('http://jsonplaceholder.typicode.com/users', headers={'User-Agent': 'Mozilla/5.0'})
response = urlopen(req)
response_string = response.read().decode("utf-8")
|
198f070f15a234aee75a89a8fbc6ed1c712a9d6b | probfit/py23_compat.py | probfit/py23_compat.py | """
Python 2 / 3 compatibility helpers.
"""
import sys
py_ver = sys.version_info
PY2 = False
PY3 = False
if py_ver[0] == 2:
PY2 = True
else: # just in case PY4
PY3 = True
if PY2:
range = xrange
else:
range = range
| """
Python 2 / 3 compatibility helpers.
"""
import sys
py_ver = sys.version_info
PY2 = False
PY3 = False
if py_ver[0] == 2:
PY2 = True
else: # just in case PY4
PY3 = True
if PY2:
range = xrange # pylint: disable=undefined-variable
else:
range = range
| Disable pylint warning for xrange. | Disable pylint warning for xrange.
Pylint complains that xrange is an undefined variable when its run under
Python 3. This is true, which is why the offending line is wrapped with
an 'if PY3' clause.
| Python | mit | iminuit/probfit,iminuit/probfit | ---
+++
@@ -13,6 +13,6 @@
if PY2:
- range = xrange
+ range = xrange # pylint: disable=undefined-variable
else:
range = range |
9fe589bce3ace182803aff4b42ddf50cfcce4eb2 | app/application.py | app/application.py | import os
import uuid
from infosystem.common import authorization
from infosystem import database
from flask import Flask
from infosystem import system as system_module
from orcomm import init_data
app = Flask(__name__)
app.config['BASEDIR'] = os.path.abspath(os.path.dirname(__file__))
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = True
app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///' + app.config['BASEDIR'] + '/infosystem.db'
system = system_module.System([
# TODO List here your modules
]
)
database.db.init_app(app)
with app.app_context():
database.db.create_all()
rows = system.subsystems['domain'].manager.count()
if (rows == 0):
init_data.do(system)
for subsystem in system.subsystems.values():
app.register_blueprint(subsystem)
def protect():
return authorization.protect(system)
app.before_request(protect)
def load_app():
return app
| import os
import uuid
from infosystem.common import authorization
from infosystem import database
from flask import Flask
from infosystem import system as system_module
from app import init_data
app = Flask(__name__)
app.config['BASEDIR'] = os.path.abspath(os.path.dirname(__file__))
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = True
app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///' + app.config['BASEDIR'] + '/infosystem.db'
system = system_module.System([
# TODO List here your modules
]
)
database.db.init_app(app)
with app.app_context():
database.db.create_all()
rows = system.subsystems['domain'].manager.count()
if (rows == 0):
init_data.do(system)
for subsystem in system.subsystems.values():
app.register_blueprint(subsystem)
def protect():
return authorization.protect(system)
app.before_request(protect)
def load_app():
return app
| Rename orcomm pkg to app | Rename orcomm pkg to app
| Python | mit | fdoliveira/infosystem-seed | ---
+++
@@ -6,7 +6,7 @@
from flask import Flask
from infosystem import system as system_module
-from orcomm import init_data
+from app import init_data
app = Flask(__name__) |
219589c8498e2d0bd156bd9863d4ef23f4963111 | code/dataplot.py | code/dataplot.py | import numpy as np
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
def loadCudaStream(name):
"""
reads the file specified by name into a numpy array (and removes
the superfluous fourth bit from cuda's float4)
np.shape(data)=(N,3) where N is the length of a streamline
"""
data=np.fromfile(name, dtype="float32")
data=data.reshape(int(len(data)/4), 4)
data=np.delete(data,3,1)
return data
#
data=np.fromfile("../datadir/windings1.bin", dtype="float32")
datasize = np.sqrt(data.shape[0])
data=data.reshape(datasize, datasize)
data = np.minimum(data,1*np.ones(data.shape))
data = np.maximum(data,-1*np.ones(data.shape))
img = plt.imshow(data)
#img.set_cmap('hot')
plt.colorbar()
plt.show()
| import numpy as np
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
import sys
def loadCudaStream(name):
"""
reads the file specified by name into a numpy array (and removes
the superfluous fourth bit from cuda's float4)
np.shape(data)=(N,3) where N is the length of a streamline
"""
data=np.fromfile(name, dtype="float32")
data=data.reshape(int(len(data)/4), 4)
data=np.delete(data,3,1)
return data
#
binfile = sys.argv[1]
data=np.fromfile(binfile, dtype="float32")
datasize = np.sqrt(data.shape[0])
data=data.reshape(datasize, datasize)
data = np.minimum(data,1*np.ones(data.shape))
data = np.maximum(data,-1*np.ones(data.shape))
img = plt.imshow(data)
#img.set_cmap('hot')
plt.colorbar()
plt.show()
| Read binary data file location from commandline | Read binary data file location from commandline
| Python | mit | TAdeJong/plasma-analysis,TAdeJong/plasma-analysis | ---
+++
@@ -1,6 +1,7 @@
import numpy as np
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
+import sys
def loadCudaStream(name):
"""
@@ -14,7 +15,8 @@
data=np.delete(data,3,1)
return data
#
-data=np.fromfile("../datadir/windings1.bin", dtype="float32")
+binfile = sys.argv[1]
+data=np.fromfile(binfile, dtype="float32")
datasize = np.sqrt(data.shape[0])
data=data.reshape(datasize, datasize)
data = np.minimum(data,1*np.ones(data.shape)) |
c1dbb7ef6b9176b4ec09da5446a082fd88abc37a | numba_wrapper.py | numba_wrapper.py | # Copyright (c) 2017, Michael Boyle
# See LICENSE file for details: <https://github.com/moble/quaternion/blob/master/LICENSE>
from __future__ import division, print_function, absolute_import
## Allow the code to function without numba, but discourage it
try:
from numba import njit, jit, vectorize, int64, float64, complex128
from numba.utils import IS_PY3
GOT_NUMBA = True
except ImportError:
import warnings
import sys
warning_text = \
"\n\n" + "!" * 53 + "\n" + \
"Could not import from numba, which means that some\n" + \
"parts of this code may run MUCH more slowly. You\n" + \
"may wish to install numba." + \
"\n" + "!" * 53 + "\n"
warnings.warn(warning_text)
def _identity_decorator_outer(*args, **kwargs):
def _identity_decorator_inner(fn):
return fn
return _identity_decorator_inner
njit = _identity_decorator_outer
jit = _identity_decorator_outer
vectorize = _identity_decorator_outer
int64 = int
float64 = float
complex128 = complex
IS_PY3 = (sys.version_info[:2] >= (3, 0))
GOT_NUMBA = False
if IS_PY3:
xrange = range
else:
xrange = xrange
| # Copyright (c) 2017, Michael Boyle
# See LICENSE file for details: <https://github.com/moble/quaternion/blob/master/LICENSE>
from __future__ import division, print_function, absolute_import
## Allow the code to function without numba, but discourage it
try:
from numba import njit, jit, vectorize, int64, float64, complex128
try:
from numba.utils import IS_PY3
except ModuleNotFoundError:
IS_PY3 = (sys.version_info[:2] >= (3, 0))
GOT_NUMBA = True
except ImportError:
import warnings
import sys
warning_text = \
"\n\n" + "!" * 53 + "\n" + \
"Could not import from numba, which means that some\n" + \
"parts of this code may run MUCH more slowly. You\n" + \
"may wish to install numba." + \
"\n" + "!" * 53 + "\n"
warnings.warn(warning_text)
def _identity_decorator_outer(*args, **kwargs):
def _identity_decorator_inner(fn):
return fn
return _identity_decorator_inner
njit = _identity_decorator_outer
jit = _identity_decorator_outer
vectorize = _identity_decorator_outer
int64 = int
float64 = float
complex128 = complex
IS_PY3 = (sys.version_info[:2] >= (3, 0))
GOT_NUMBA = False
if IS_PY3:
xrange = range
else:
xrange = xrange
| Handle removal of IS_PY3 from numba | Handle removal of IS_PY3 from numba
Closes #133 | Python | mit | moble/quaternion,moble/quaternion | ---
+++
@@ -6,7 +6,10 @@
## Allow the code to function without numba, but discourage it
try:
from numba import njit, jit, vectorize, int64, float64, complex128
- from numba.utils import IS_PY3
+ try:
+ from numba.utils import IS_PY3
+ except ModuleNotFoundError:
+ IS_PY3 = (sys.version_info[:2] >= (3, 0))
GOT_NUMBA = True
except ImportError:
import warnings |
6f8b370222b6f842b3ea89bb854b4883d145dfe0 | nuts/__init__.py | nuts/__init__.py | # The main message class that the AuthChannel operate on
class AuthenticatedMessage(object):
def __init__(self, sender, msg, session=None):
self.sender = sender
self.msg = msg
self.session = session
def __str__(self):
return self.msg
class NutsError(Exception):
""" General NUTS-related failure. """
class NutsConnectionError(NutsError):
""" Something failed in the communication. """
class NutsMessageTooLarge(NutsError):
""" Tried to send message larger than what's supported by the underlying
transport.
"""
class NutsInvalidState(NutsError):
""" Tried to perform an action which is unavilable in the current
state. """
from .channels import (
AuthChannel,
UDPAuthChannel,
)
from .enums import (
ClientState,
ServerState,
Message,
)
| # The main message class that the AuthChannel operate on
class AuthenticatedMessage(object):
def __init__(self, sender, msg, session=None):
self.sender = sender
self.msg = msg
self.session = session
def __str__(self):
return self.msg
def __int__(self):
return int(self.msg)
def __float__(self):
return float(self.msg)
class NutsError(Exception):
""" General NUTS-related failure. """
class NutsConnectionError(NutsError):
""" Something failed in the communication. """
class NutsMessageTooLarge(NutsError):
""" Tried to send message larger than what's supported by the underlying
transport.
"""
class NutsInvalidState(NutsError):
""" Tried to perform an action which is unavilable in the current
state. """
from .channels import (
AuthChannel,
UDPAuthChannel,
)
from .enums import (
ClientState,
ServerState,
Message,
)
| Allow int(msg) and float(msg) calls | Allow int(msg) and float(msg) calls
| Python | mit | thusoy/nuts-auth,thusoy/nuts-auth | ---
+++
@@ -8,6 +8,13 @@
def __str__(self):
return self.msg
+
+ def __int__(self):
+ return int(self.msg)
+
+ def __float__(self):
+ return float(self.msg)
+
class NutsError(Exception):
""" General NUTS-related failure. """ |
26460d817573bb860a542f26b96bc02b0e2082d8 | fetch_configs/pdfium.py | fetch_configs/pdfium.py | # Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import sys
import config_util # pylint: disable=import-error
# This class doesn't need an __init__ method, so we disable the warning
# pylint: disable=no-init
class PdfiumConfig(config_util.Config):
"""Basic Config class for pdfium."""
@staticmethod
def fetch_spec(props):
url = 'https://pdfium.googlesource.com/pdfium.git',
solution = {
'name': 'src',
'url': url,
'managed': False,
'custom_vars': {},
}
if props.get('checkout_configuration'):
solution['custom_vars']['checkout_configuration'] = props[
'checkout_configuration']
spec = {
'solutions': [solution],
}
if props.get('target_os'):
spec['target_os'] = props['target_os'].split(',')
if props.get('target_os_only'):
spec['target_os_only'] = props['target_os_only']
return {
'type': 'gclient_git',
'gclient_git_spec': spec,
}
@staticmethod
def expected_root(_props):
return 'pdfium'
def main(argv=None):
return PdfiumConfig().handle_args(argv)
if __name__ == '__main__':
sys.exit(main(sys.argv))
| # Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import sys
import config_util # pylint: disable=import-error
# This class doesn't need an __init__ method, so we disable the warning
# pylint: disable=no-init
class PdfiumConfig(config_util.Config):
"""Basic Config class for pdfium."""
@staticmethod
def fetch_spec(props):
url = 'https://pdfium.googlesource.com/pdfium.git'
solution = {
'name': 'pdfium',
'url': url,
'managed': False,
'custom_vars': {},
}
if props.get('checkout_configuration'):
solution['custom_vars']['checkout_configuration'] = props[
'checkout_configuration']
spec = {
'solutions': [solution],
}
if props.get('target_os'):
spec['target_os'] = props['target_os'].split(',')
if props.get('target_os_only'):
spec['target_os_only'] = props['target_os_only']
return {
'type': 'gclient_git',
'gclient_git_spec': spec,
}
@staticmethod
def expected_root(_props):
return 'pdfium'
def main(argv=None):
return PdfiumConfig().handle_args(argv)
if __name__ == '__main__':
sys.exit(main(sys.argv))
| Fix typos in PDFium fetch config. | Fix typos in PDFium fetch config.
- The extra comma at the end of the url string unintentionally turns
the url into a list.
- The project name should be "pdfium".
Change-Id: I8944d59d06751716c512030145d29aac10cf13fd
Reviewed-on: https://chromium-review.googlesource.com/c/chromium/tools/depot_tools/+/3770290
Reviewed-by: Aravind Vasudevan <dbe94b625b74f03bb5e24a737a4f1e15753433de@google.com>
Commit-Queue: Lei Zhang <79648d9aca10df37c1b962af0b65ebfac3e19883@chromium.org>
| Python | bsd-3-clause | CoherentLabs/depot_tools,CoherentLabs/depot_tools | ---
+++
@@ -14,9 +14,9 @@
@staticmethod
def fetch_spec(props):
- url = 'https://pdfium.googlesource.com/pdfium.git',
+ url = 'https://pdfium.googlesource.com/pdfium.git'
solution = {
- 'name': 'src',
+ 'name': 'pdfium',
'url': url,
'managed': False,
'custom_vars': {}, |
e5511585341ef9de72dfa2a43db207af42f786ac | flask_static/watcher.py | flask_static/watcher.py | import os
from threading import Thread
from werkzeug._reloader import ReloaderLoop
class Watcher(Thread, ReloaderLoop):
def __init__(self, paths, static, tasks, interval=1, *args, **kwargs):
self.paths = paths
self.static = static
self.tasks = tasks
self.debug = kwargs.get('debug')
super(Watcher, self).__init__(*args, **kwargs)
ReloaderLoop.__init__(self, interval=interval)
def run(self):
times = {}
while not self._Thread__stopped:
for filename in self.static.findFiles(self.path):
try:
currtime = os.stat(filename).st_mtime
except OSError:
continue
oldtime = times.get(filename)
if oldtime and currtime > oldtime:
if self.debug:
print('[*] detected changes on %s' % filename)
self.static.run(*self.tasks)
times[filename] = currtime
break
times[filename] = currtime
self._sleep(self.interval)
| import os
from threading import Thread
from werkzeug._reloader import ReloaderLoop
class Watcher(Thread, ReloaderLoop):
def __init__(self, paths, static, tasks, interval=1, *args, **kwargs):
self.paths = paths
self.static = static
self.tasks = tasks
self.debug = kwargs.get('debug')
del kwargs['debug']
super(Watcher, self).__init__(*args, **kwargs)
ReloaderLoop.__init__(self, interval=interval)
def run(self):
times = {}
while not self._Thread__stopped:
for filename in self.static.findFiles(self.path):
try:
currtime = os.stat(filename).st_mtime
except OSError:
continue
oldtime = times.get(filename)
if oldtime and currtime > oldtime:
if self.debug:
print('[*] detected changes on %s' % filename)
self.static.run(*self.tasks)
times[filename] = currtime
break
times[filename] = currtime
self._sleep(self.interval)
| Remove invalid keyword from parameters dictionary | Remove invalid keyword from parameters dictionary
| Python | mit | rolurq/flask-gulp | ---
+++
@@ -11,6 +11,7 @@
self.static = static
self.tasks = tasks
self.debug = kwargs.get('debug')
+ del kwargs['debug']
super(Watcher, self).__init__(*args, **kwargs)
ReloaderLoop.__init__(self, interval=interval)
|
687692cd85db180ae962ec63c4bd4fbc73e33f57 | flask_storage/_utils.py | flask_storage/_utils.py | class ConfigItem(object):
"""The configuration item which may be bound with a instance.
:param name: the property name.
:param namespace: optional. the name of the attribute which contains all
configuration nested in the instance.
:param default: optional. the value which be provided while the
configuration item has been missed.
:param required: optional. if this paramater be ``True`` , getting missed
configuration item without default value will trigger a
``RuntimeError`` .
"""
def __init__(self, name, namespace="config", default=None, required=False):
self.name = name
self.namespace = namespace
self.default = default
self.is_required = required
def __repr__(self):
template = "ConfigItem(%r, namespace=%r, default=%r, required=%r)"
return template % (self.name, self.name, self.default, self.required)
def __get__(self, instance, owner):
if instance is None:
return self
namespace = self._namespace(instance)
if self.name not in namespace and self.required:
raise RuntimeError("missing %s['%s'] in %r" %
(self.namespace, self.name, instance))
return namespace.get(self.name, self.default)
def __set__(self, instance, value):
namespace = self._namespace(instance)
namespace[self.name] = value
def _namespace(self, instance):
"""Gets exists namespace or creates it."""
if not hasattr(instance, self.namespace):
setattr(instance, self.namespace, {})
return getattr(instance, self.namespace)
| class ConfigItem(object):
"""The configuration item which may be bound with a instance.
:param name: the property name.
:param namespace: optional. the name of the attribute which contains all
configuration nested in the instance.
:param default: optional. the value which be provided while the
configuration item has been missed.
:param required: optional. if this paramater be ``True`` , getting missed
configuration item without default value will trigger a
``RuntimeError`` .
"""
def __init__(self, name, namespace="config", default=None, required=False):
self.name = name
self.namespace = namespace
self.default = default
self.required = required
def __repr__(self):
template = "ConfigItem(%r, namespace=%r, default=%r, required=%r)"
return template % (self.name, self.name, self.default, self.required)
def __get__(self, instance, owner):
if instance is None:
return self
namespace = self._namespace(instance)
if self.name not in namespace and self.required:
raise RuntimeError("missing %s['%s'] in %r" %
(self.namespace, self.name, instance))
return namespace.get(self.name, self.default)
def __set__(self, instance, value):
namespace = self._namespace(instance)
namespace[self.name] = value
def _namespace(self, instance):
"""Gets exists namespace or creates it."""
if not hasattr(instance, self.namespace):
setattr(instance, self.namespace, {})
return getattr(instance, self.namespace)
| Fix up a typo in utilities | Fix up a typo in utilities
| Python | bsd-3-clause | lepture/flask-storage,menghan/flask-storage,fengluo/flask-storage,LiuDeng/flask-storage | ---
+++
@@ -15,7 +15,7 @@
self.name = name
self.namespace = namespace
self.default = default
- self.is_required = required
+ self.required = required
def __repr__(self):
template = "ConfigItem(%r, namespace=%r, default=%r, required=%r)" |
63a1a37f597d6b0313136caf011d2d4f50c17329 | src/sentry/api/serializers/models/user.py | src/sentry/api/serializers/models/user.py | from __future__ import absolute_import
from sentry.api.serializers import Serializer, register
from sentry.models import User
from sentry.utils.avatar import get_gravatar_url
@register(User)
class UserSerializer(Serializer):
def serialize(self, obj, attrs, user):
d = {
'id': str(obj.id),
'name': obj.get_full_name(),
'email': obj.email,
'avatarUrl': get_gravatar_url(obj.email, size=32),
}
return d
| from __future__ import absolute_import
from sentry.api.serializers import Serializer, register
from sentry.models import User
from sentry.utils.avatar import get_gravatar_url
@register(User)
class UserSerializer(Serializer):
def serialize(self, obj, attrs, user):
d = {
'id': str(obj.id),
'name': obj.get_display_name(),
'email': obj.email,
'avatarUrl': get_gravatar_url(obj.email, size=32),
}
return d
| Use display name instead of full name | Use display name instead of full name
| Python | bsd-3-clause | zenefits/sentry,jean/sentry,mvaled/sentry,imankulov/sentry,mitsuhiko/sentry,JackDanger/sentry,beeftornado/sentry,wong2/sentry,BuildingLink/sentry,JamesMura/sentry,ngonzalvez/sentry,JamesMura/sentry,zenefits/sentry,Kryz/sentry,mvaled/sentry,korealerts1/sentry,gencer/sentry,Kryz/sentry,looker/sentry,jean/sentry,mvaled/sentry,korealerts1/sentry,gencer/sentry,JamesMura/sentry,fotinakis/sentry,imankulov/sentry,jean/sentry,daevaorn/sentry,BayanGroup/sentry,BuildingLink/sentry,felixbuenemann/sentry,beeftornado/sentry,mvaled/sentry,wong2/sentry,alexm92/sentry,gencer/sentry,hongliang5623/sentry,gencer/sentry,ngonzalvez/sentry,kevinlondon/sentry,felixbuenemann/sentry,looker/sentry,daevaorn/sentry,zenefits/sentry,looker/sentry,ifduyue/sentry,alexm92/sentry,mvaled/sentry,fotinakis/sentry,Natim/sentry,looker/sentry,hongliang5623/sentry,songyi199111/sentry,songyi199111/sentry,Natim/sentry,gencer/sentry,looker/sentry,Kryz/sentry,mitsuhiko/sentry,zenefits/sentry,daevaorn/sentry,fuziontech/sentry,JackDanger/sentry,korealerts1/sentry,ifduyue/sentry,songyi199111/sentry,BayanGroup/sentry,fuziontech/sentry,JamesMura/sentry,zenefits/sentry,nicholasserra/sentry,fotinakis/sentry,imankulov/sentry,alexm92/sentry,BuildingLink/sentry,ifduyue/sentry,BuildingLink/sentry,hongliang5623/sentry,Natim/sentry,nicholasserra/sentry,nicholasserra/sentry,beeftornado/sentry,fuziontech/sentry,jean/sentry,kevinlondon/sentry,felixbuenemann/sentry,jean/sentry,kevinlondon/sentry,ifduyue/sentry,JamesMura/sentry,ifduyue/sentry,fotinakis/sentry,daevaorn/sentry,BayanGroup/sentry,ngonzalvez/sentry,mvaled/sentry,wong2/sentry,JackDanger/sentry,BuildingLink/sentry | ---
+++
@@ -10,7 +10,7 @@
def serialize(self, obj, attrs, user):
d = {
'id': str(obj.id),
- 'name': obj.get_full_name(),
+ 'name': obj.get_display_name(),
'email': obj.email,
'avatarUrl': get_gravatar_url(obj.email, size=32),
} |
eeb12c76698eedecbeb54585deffca605bd7096b | python/setup.py | python/setup.py | #! /usr/bin/env python
# Licensed to Cloudera, Inc. under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. Cloudera, Inc. licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from setuptools import setup, find_packages
from sys import version_info
if version_info[:2] > (2, 5):
install_requires = []
else:
install_requires = ['simplejson >= 2.0.0']
setup(
name = 'cm_api',
version = '1.0', # Compatible with API v1
packages = find_packages('src'),
package_dir = {'cm_api': 'src/cm_api'},
# Project uses simplejson, so ensure that it gets installed or upgraded
# on the target machine
install_requires = install_requires,
author = 'Cloudera, Inc.',
description = 'Cloudera Manager API client',
license = 'Proprietary',
url = 'http://www.cloudera.com/',
)
| #! /usr/bin/env python
# Licensed to Cloudera, Inc. under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. Cloudera, Inc. licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from setuptools import setup, find_packages
from sys import version_info
if version_info[:2] > (2, 5):
install_requires = []
else:
install_requires = ['simplejson >= 2.0.0']
setup(
name = 'cm_api',
version = '1.0.0', # Compatible with API v1
packages = find_packages('src'),
package_dir = {'cm_api': 'src/cm_api'},
# Project uses simplejson, so ensure that it gets installed or upgraded
# on the target machine
install_requires = install_requires,
author = 'Cloudera, Inc.',
description = 'Cloudera Manager API client',
license = 'Apache License 2.0',
url = 'https://github.com/cloudera/cm_api',
)
| Package metadata should have ASL2 as license | Package metadata should have ASL2 as license
| Python | apache-2.0 | cloudera/cm_api,cdapio/cm_api,cdapio/cm_api,justinhayes/cm_api,cdapio/cm_api,justinhayes/cm_api,cloudera/cm_api,cloudera/cm_api,justinhayes/cm_api,cdapio/cm_api | ---
+++
@@ -25,7 +25,7 @@
setup(
name = 'cm_api',
- version = '1.0', # Compatible with API v1
+ version = '1.0.0', # Compatible with API v1
packages = find_packages('src'),
package_dir = {'cm_api': 'src/cm_api'},
@@ -35,6 +35,6 @@
author = 'Cloudera, Inc.',
description = 'Cloudera Manager API client',
- license = 'Proprietary',
- url = 'http://www.cloudera.com/',
+ license = 'Apache License 2.0',
+ url = 'https://github.com/cloudera/cm_api',
) |
cb161eb1cc66c520a81026798112fb13d0cd0e50 | community/__init__.py | community/__init__.py | #!/usr/bin/python
# -*- coding: utf-8 -*-
"""
This package implements community detection.
Package name is community but refer to python-louvain on pypi
"""
from .community_louvain import (
partition_at_level,
modularity,
best_partition,
generate_dendrogram,
induced_graph,
load_binary,
)
__author__ = """Thomas Aynaud (thomas.aynaud@lip6.fr)"""
# Copyright (C) 2009 by
# Thomas Aynaud <thomas.aynaud@lip6.fr>
# All rights reserved.
# BSD license.
| #!/usr/bin/python
# -*- coding: utf-8 -*-
"""
This package implements community detection.
Package name is community but refer to python-louvain on pypi
"""
from .community_louvain import (
partition_at_level,
modularity,
best_partition,
generate_dendrogram,
induced_graph,
load_binary,
)
__version__ = "0.10"
__author__ = """Thomas Aynaud (thomas.aynaud@lip6.fr)"""
# Copyright (C) 2009 by
# Thomas Aynaud <thomas.aynaud@lip6.fr>
# All rights reserved.
# BSD license.
| Add version information to package | Add version information to package
| Python | bsd-3-clause | taynaud/python-louvain | ---
+++
@@ -15,7 +15,7 @@
load_binary,
)
-
+__version__ = "0.10"
__author__ = """Thomas Aynaud (thomas.aynaud@lip6.fr)"""
# Copyright (C) 2009 by
# Thomas Aynaud <thomas.aynaud@lip6.fr> |
7d54decec5f005ee7bcd8b16d75225a60f7cbdfe | post-to-slack.py | post-to-slack.py | """
Author: Ashish Gaikwad <ash.gkwd@gmail.com>
Copyright (c) 2015 Ashish Gaikwad
Description: This script will post a message on your slack channel
Please configure your `/etc/brainy-beats.cfg` file first.
Example brainy-beats.cfg
[slack]
url=https://hooks.slack.com/services/XxXxXxXxXx
channel=botwa
icon_emoji=ghost
username=beats-bot
"""
from subprocess import call
import sys
import json
import ConfigParser
def payload(text, conf):
toPost = {
"channel": "#" + conf.get("slack", "channel"),
"username": conf.get("slack", "username"),
"text": text,
"icon_emoji": ":" + conf.get("slack", "icon_emoji") + ":"
}
return "payload=" + json.dumps(toPost)
def main():
CONFIG_FILE = '/etc/slack_post.cfg'
# read configuration
conf = ConfigParser.ConfigParser()
conf.read(CONFIG_FILE)
call([
"curl",
"-X",
"POST",
"--data-urlencode",
payload(sys.argv[1], conf),
conf.get("slack", "url")
])
if __name__ == '__main__':
main() | """
Author: Ashish Gaikwad <ash.gkwd@gmail.com>
Copyright (c) 2015 Ashish Gaikwad
Description: This script will post a message on your slack channel
Please configure your `/etc/brainy-beats.cfg` file first.
Example brainy-beats.cfg
[slack]
url=https://hooks.slack.com/services/XxXxXxXxXx
channel=botwa
icon_emoji=ghost
username=beats-bot
"""
from subprocess import call
import sys
import json
import ConfigParser
def payload(text, conf):
toPost = {
"channel": "#" + conf.get("slack", "channel"),
"username": conf.get("slack", "username"),
"text": text,
"icon_emoji": ":" + conf.get("slack", "icon_emoji") + ":"
}
return "payload=" + json.dumps(toPost)
def main():
CONFIG_FILE = '/etc/brainy-beats.cfg'
# read configuration
conf = ConfigParser.ConfigParser()
conf.read(CONFIG_FILE)
call([
"curl",
"-X",
"POST",
"--data-urlencode",
payload(sys.argv[1], conf),
conf.get("slack", "url")
])
if __name__ == '__main__':
main() | FIX - path of .cfg file corrected | FIX - path of .cfg file corrected
| Python | mit | ashgkwd/brainy-beats,ashgkwd/brainy-beats | ---
+++
@@ -29,7 +29,7 @@
return "payload=" + json.dumps(toPost)
def main():
- CONFIG_FILE = '/etc/slack_post.cfg'
+ CONFIG_FILE = '/etc/brainy-beats.cfg'
# read configuration
conf = ConfigParser.ConfigParser()
conf.read(CONFIG_FILE) |
b03dff0d6964d886f122936d097c3d4acc0582db | proper_parens.py | proper_parens.py | #!/usr/bin/env python
from __future__ import print_function
from __future__ import unicode_literals
def safe_input(prompt):
"""Return user input after catching KeyboardInterrupt and EOFError"""
try:
reply = raw_input(prompt)
except (EOFError, KeyboardInterrupt):
quit()
else:
return reply.decode('utf-8') # Convert input to unicode
prompt = "Input a Lisp style statement '(test)': "
reply = safe_input(prompt)
def check_statement(value):
where_open = value.find("(")
where_close = value.find(")")
if ((where_open == -1) and where_close != -1) or (where_open > where_close):
return -1
| #!/usr/bin/env python
from __future__ import print_function
from __future__ import unicode_literals
def safe_input(prompt):
"""Return user input after catching KeyboardInterrupt and EOFError"""
try:
reply = raw_input(prompt)
except (EOFError, KeyboardInterrupt):
quit()
else:
return reply.decode('utf-8') # Convert input to unicode
prompt = "Input a Lisp style statement '(test)': "
reply = safe_input(prompt)
def check_statement(value):
open_index = [i for i, val in enumerate(reply) if val == "("]
close_index = [i for i, val in enumerate(reply) if val == ")"]
paren_total_broken = [a < b for a, b in zip(open_index, close_index)]
if paren_total_broken.find(False):
return -1
else:
return 0
| Add function for broken and groundwork for other objectives | Add function for broken and groundwork for other objectives
| Python | mit | constanthatz/data-structures | ---
+++
@@ -17,9 +17,12 @@
def check_statement(value):
- where_open = value.find("(")
- where_close = value.find(")")
+ open_index = [i for i, val in enumerate(reply) if val == "("]
+ close_index = [i for i, val in enumerate(reply) if val == ")"]
- if ((where_open == -1) and where_close != -1) or (where_open > where_close):
+ paren_total_broken = [a < b for a, b in zip(open_index, close_index)]
+
+ if paren_total_broken.find(False):
return -1
-
+ else:
+ return 0 |
8cb685ac92cd2fcc628ad96feaed67fc1af48ed6 | falafel/mappers/dmesg.py | falafel/mappers/dmesg.py | from .. import LogFileOutput, mapper
class DmesgLineList(LogFileOutput):
def has_startswith(self, prefix):
"""
Returns a boolean array which is `True` where there is one line in
dmesg starts with `prefix`, otherwise `False`.
"""
return any(line.startswith(prefix) for line in self.lines)
@mapper('dmesg')
def dmesg(context):
"""
Returns an object of DmesgLineList
"""
return DmesgLineList(context)
@mapper('vmcore-dmesg')
def vmcore_dmesg(context):
"""
Returns an object of DmesgLineList
"""
return DmesgLineList(context)
| from .. import LogFileOutput, mapper
@mapper('dmesg')
class DmesgLineList(LogFileOutput):
def has_startswith(self, prefix):
"""
Returns a boolean array which is `True` where there is one line in
dmesg starts with `prefix`, otherwise `False`.
"""
return any(line.startswith(prefix) for line in self.lines)
@mapper('dmesg')
def dmesg(context):
"""
Returns an object of DmesgLineList
"""
return DmesgLineList(context)
@mapper('vmcore-dmesg')
def vmcore_dmesg(context):
"""
Returns an object of DmesgLineList
"""
return DmesgLineList(context)
| Add mapper decorator to DmesgLineList mapper | Add mapper decorator to DmesgLineList mapper
* Add @mapper decorator to the DmesgLineList shared mapper to allow use
directly by plugins.
| Python | apache-2.0 | RedHatInsights/insights-core,RedHatInsights/insights-core | ---
+++
@@ -1,6 +1,7 @@
from .. import LogFileOutput, mapper
+@mapper('dmesg')
class DmesgLineList(LogFileOutput):
def has_startswith(self, prefix):
""" |
a070624ab59f700b86b008f7f81cd03b9622c5b3 | iati/tests/resources.py | iati/tests/resources.py | """A module to prove a way of locating and loading test resource files.
This is akin to the `iati.resources` module, but deals with test data.
"""
import iati.resources
def load_as_dataset(file_path):
"""Load a specified test data file as a Dataset.
Args:
file_path (str): The path of the file, relative to the root test data folder. Folders should be separated by a forward-slash (`/`).
Returns:
dataset: A Dataset containing the contents of the file at the specified location.
Raises:
iati.exceptions.ValidationError: If the provided XML does not conform to the IATI standard.
"""
return iati.resources.load_as_dataset(iati.resources.get_test_data_path(file_path))
def load_as_string(file_path):
"""Load a specified test data file as a string.
Args:
file_path (str): The path of the file, relative to the root test data folder. Folders should be separated by a forward-slash (`/`).
Returns:
str (python3) / unicode (python2): The contents of the file at the specified location.
"""
return iati.resources.load_as_string(iati.resources.get_test_data_path(file_path))
| """A module to prove a way of locating and loading test resource files.
This is akin to the `iati.resources` module, but deals with test data.
"""
import iati.resources
def load_as_dataset(file_path):
"""Load a specified test data file as a Dataset.
Args:
file_path (str): The path of the file, relative to the root test data folder. Folders should be separated by a forward-slash (`/`).
Returns:
dataset: A Dataset containing the contents of the file at the specified location.
Raises:
iati.exceptions.ValidationError: If the provided XML does not conform to the IATI standard.
"""
return iati.resources.load_as_dataset(iati.resources.get_test_data_path(file_path))
def load_as_string(file_path):
"""Load a specified test data file as a string.
Args:
file_path (str): The path of the file, relative to the root test data folder. Folders should be separated by a forward-slash (`/`).
Returns:
str (python3) / unicode (python2): The contents of the file at the specified location.
"""
return iati.resources.load_as_string(iati.resources.get_test_data_path(file_path))
| Add blank line to fix linting error | Add blank line to fix linting error
| Python | mit | IATI/iati.core,IATI/iati.core | ---
+++
@@ -4,6 +4,7 @@
"""
import iati.resources
+
def load_as_dataset(file_path):
"""Load a specified test data file as a Dataset. |
ff084f6fa7b61cef1550477265a65fa25b440b86 | mne/simulation/simulation_metrics.py | mne/simulation/simulation_metrics.py |
import numpy as np
def _check_stc(stc1, stc2):
# XXX What should we check? that the data is having the same size?
if stc1.data.shape != stc2.data.shape:
raise ValueError('Data in stcs must have the same size')
if stc1.times != stc2.times:
raise ValueError('Times of two stcs must match.')
def source_estimate_quantification(stc1, stc2, metric='rms'):
"""Helper function to calculate matrix similarities.
Parameters
----------
stc1 : SourceEstimate
First source estimate for comparison
stc2 : SourceEstimate
First source estimate for comparison
metric : str
Metric to calculate. 'rms', 'avg_corrcoef',
Returns
-------
"""
# TODO Add checks for source space
_check_stc(stc1, stc2)
score = _calc_metric(stc1.data, stc2.data, metric)
def _calc_metric(data1, data2, metric):
"""Calculate metric of choice"""
# Calculate root mean square difference between two matrices
if metric == 'rms':
return np.mean((stc1.data - stc2.data) ** 2)
# Calculate correlation coefficient between matrix elements
elif metric == 'avg_corrcoef':
return np.mean(np.corrcoeff(stc1.data, stc2.data))
| # Authors: Yousra Bekhti
# Mark Wronkiewicz <wronk.mark@gmail.com>
#
# License: BSD (3-clause)
import numpy as np
def _check_stc(stc1, stc2):
# XXX What should we check? that the data is having the same size?
if stc1.data.shape != stc2.data.shape:
raise ValueError('Data in stcs must have the same size')
if stc1.times != stc2.times:
raise ValueError('Times of two stcs must match.')
def source_estimate_quantification(stc1, stc2, metric='rms'):
"""Helper function to calculate matrix similarities.
Parameters
----------
stc1 : SourceEstimate
First source estimate for comparison
stc2 : SourceEstimate
First source estimate for comparison
metric : str
Metric to calculate. 'rms', 'avg_corrcoef',
Returns
-------
"""
# TODO Add checks for source space
_check_stc(stc1, stc2)
score = _calc_metric(stc1.data, stc2.data, metric)
def _calc_metric(data1, data2, metric):
"""Helper to calculate metric of choice.
Parameters
----------
data1 : ndarray, shape(n_sources, ntimes)
Second data matrix
data2 : ndarray, shape(n_sources, ntimes)
Second data matrix
metric : str
Metric to calculate. 'rms', 'corr',
Returns
-------
score : float
Calculated metric
"""
# Calculate root mean square difference between two matrices
if metric == 'rms':
return np.mean((stc1.data - stc2.data) ** 2)
# Calculate correlation coefficient between matrix elements
elif metric == 'corr':
return np.correlate(stc1.data.flatten(), stc2.data.flatten())
| Add documentation, tests, fix corr | Add documentation, tests, fix corr
| Python | bsd-3-clause | Teekuningas/mne-python,kingjr/mne-python,teonlamont/mne-python,alexandrebarachant/mne-python,jniediek/mne-python,rkmaddox/mne-python,olafhauk/mne-python,leggitta/mne-python,kingjr/mne-python,Teekuningas/mne-python,jaeilepp/mne-python,Eric89GXL/mne-python,nicproulx/mne-python,adykstra/mne-python,pravsripad/mne-python,larsoner/mne-python,leggitta/mne-python,jmontoyam/mne-python,cjayb/mne-python,jniediek/mne-python,cmoutard/mne-python,nicproulx/mne-python,mne-tools/mne-python,yousrabk/mne-python,kambysese/mne-python,alexandrebarachant/mne-python,lorenzo-desantis/mne-python,jaeilepp/mne-python,olafhauk/mne-python,yousrabk/mne-python,larsoner/mne-python,ARudiuk/mne-python,wronk/mne-python,kambysese/mne-python,wmvanvliet/mne-python,bloyl/mne-python,wronk/mne-python,drammock/mne-python,wmvanvliet/mne-python,larsoner/mne-python,wmvanvliet/mne-python,ARudiuk/mne-python,mne-tools/mne-python,cmoutard/mne-python,cjayb/mne-python,drammock/mne-python,Teekuningas/mne-python,teonlamont/mne-python,Eric89GXL/mne-python,bloyl/mne-python,rkmaddox/mne-python,pravsripad/mne-python,pravsripad/mne-python,drammock/mne-python,lorenzo-desantis/mne-python,mne-tools/mne-python,jmontoyam/mne-python,adykstra/mne-python,kingjr/mne-python,olafhauk/mne-python | ---
+++
@@ -1,3 +1,7 @@
+# Authors: Yousra Bekhti
+# Mark Wronkiewicz <wronk.mark@gmail.com>
+#
+# License: BSD (3-clause)
import numpy as np
@@ -34,13 +38,27 @@
score = _calc_metric(stc1.data, stc2.data, metric)
def _calc_metric(data1, data2, metric):
- """Calculate metric of choice"""
+ """Helper to calculate metric of choice.
+
+ Parameters
+ ----------
+ data1 : ndarray, shape(n_sources, ntimes)
+ Second data matrix
+ data2 : ndarray, shape(n_sources, ntimes)
+ Second data matrix
+ metric : str
+ Metric to calculate. 'rms', 'corr',
+
+ Returns
+ -------
+ score : float
+ Calculated metric
+ """
# Calculate root mean square difference between two matrices
if metric == 'rms':
return np.mean((stc1.data - stc2.data) ** 2)
# Calculate correlation coefficient between matrix elements
- elif metric == 'avg_corrcoef':
- return np.mean(np.corrcoeff(stc1.data, stc2.data))
-
+ elif metric == 'corr':
+ return np.correlate(stc1.data.flatten(), stc2.data.flatten()) |
79b145f9c73deb9116b4d51e29952b63e4d63cc8 | install/setup_pi_box.py | install/setup_pi_box.py | import os
print("Example Pi Box path: /home/username/my-pi-box")
pi_box_directory = raw_input("Pi Box path: ")
if not os.path.isdir(pi_box_directory):
os.makedirs(pi_box_directory)
with open('./install/pi-box-conf-template.txt', 'r') as f:
upstart_template = f.read()
with open('/etc/init/pi-box.conf', 'w+') as f:
f.write(upstart_template.format(pi_box_directory))
| import os
import sys
if not os.path.exists('/opt/Pi-Box/dropbox.txt'):
print('Dropbox token file (dropbox.txt) not found.')
print('Go here to authorize Pi-Box and obtain the token file: blah, blah, blah')
print('Save the file in: /opt/Pi-Box')
print('Run the installation script again: ./install.sh')
sys.exit()
print("Example Pi Box path: /home/username/my-pi-box")
pi_box_directory = raw_input("Pi Box path: ")
if not os.path.isdir(pi_box_directory):
os.makedirs(pi_box_directory)
with open('./install/pi-box-conf-template.txt', 'r') as f:
upstart_template = f.read()
with open('/etc/init/pi-box.conf', 'w+') as f:
f.write(upstart_template.format(pi_box_directory))
| Check for token file before installing | Check for token file before installing
| Python | mit | projectweekend/Pi-Box,projectweekend/Pi-Box | ---
+++
@@ -1,4 +1,12 @@
import os
+import sys
+
+if not os.path.exists('/opt/Pi-Box/dropbox.txt'):
+ print('Dropbox token file (dropbox.txt) not found.')
+ print('Go here to authorize Pi-Box and obtain the token file: blah, blah, blah')
+ print('Save the file in: /opt/Pi-Box')
+ print('Run the installation script again: ./install.sh')
+ sys.exit()
print("Example Pi Box path: /home/username/my-pi-box") |
0a5e272a1828be1eb93975e3b673ba67ca800635 | playserver/trackchecker.py | playserver/trackchecker.py | from threading import Timer
from . import track
_listeners = []
class TrackChecker():
def __init__(self, interval = 5):
self.listeners = []
self.CHECK_INTERVAL = interval
self._generator = self._checkSongGenerator()
self.timer = None
def _checkSongGenerator(self):
while True:
currentSong = ""
currentArtist = ""
currentAlbum = ""
song = track.getCurrentSong()
artist = track.getCurrentArtist()
album = track.getCurrentAlbum()
if (song != currentSong or artist != currentArtist
or album != currentAlbum):
currentSong = song
currentArtist = artist
currentAlbum = album
self._callListeners()
yield
def checkSong(self):
next(self._generator)
if self.timer != None:
self.startTimer()
def registerListener(self, function):
_listeners.append(function)
def _callListeners(self):
for listener in _listeners:
listener()
def startTimer(self):
self.timer = Timer(self.CHECK_INTERVAL, self.checkSong)
self.timer.start()
def cancelTimer(self):
self.timer.cancel()
| from threading import Timer
from . import track
_listeners = []
class TrackChecker():
def __init__(self, interval = 5):
self.listeners = []
self.CHECK_INTERVAL = interval
self.currentSong = ""
self.currentArtist = ""
self.currentAlbum = ""
self.timer = None
def checkSong(self):
song = track.getCurrentSong()
artist = track.getCurrentArtist()
album = track.getCurrentAlbum()
if (song != self.currentSong or artist != self.currentArtist
or album != self.currentAlbum):
self.currentSong = song
self.currentArtist = artist
self.currentAlbum = album
self._callListeners()
if self.timer != None:
self.startTimer()
def registerListener(self, function):
_listeners.append(function)
def _callListeners(self):
for listener in _listeners:
listener()
def startTimer(self):
self.timer = Timer(self.CHECK_INTERVAL, self.checkSong)
self.timer.start()
def cancelTimer(self):
self.timer.cancel()
self.timer = None
| Remove use of generator completely | Remove use of generator completely
| Python | mit | ollien/playserver,ollien/playserver,ollien/playserver | ---
+++
@@ -7,28 +7,23 @@
def __init__(self, interval = 5):
self.listeners = []
self.CHECK_INTERVAL = interval
- self._generator = self._checkSongGenerator()
+ self.currentSong = ""
+ self.currentArtist = ""
+ self.currentAlbum = ""
self.timer = None
- def _checkSongGenerator(self):
- while True:
- currentSong = ""
- currentArtist = ""
- currentAlbum = ""
- song = track.getCurrentSong()
- artist = track.getCurrentArtist()
- album = track.getCurrentAlbum()
+ def checkSong(self):
+ song = track.getCurrentSong()
+ artist = track.getCurrentArtist()
+ album = track.getCurrentAlbum()
- if (song != currentSong or artist != currentArtist
- or album != currentAlbum):
- currentSong = song
- currentArtist = artist
- currentAlbum = album
- self._callListeners()
- yield
-
- def checkSong(self):
- next(self._generator)
+ if (song != self.currentSong or artist != self.currentArtist
+ or album != self.currentAlbum):
+ self.currentSong = song
+ self.currentArtist = artist
+ self.currentAlbum = album
+ self._callListeners()
+
if self.timer != None:
self.startTimer()
@@ -45,3 +40,4 @@
def cancelTimer(self):
self.timer.cancel()
+ self.timer = None |
e4297691f20ec4185ed4491ab41553df14a05a91 | pycc/pycompat.py | pycc/pycompat.py | """Compatibility helpers for Py2 and Py3."""
from __future__ import division
from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
import sys
class VERSION(object):
"""Stand in for sys.version_info.
The values from sys only have named parameters starting in PY27. This
allows us to use named parameters for all versions of Python.
"""
major, minor, micro, releaselevel, serial = sys.version_info
PY2 = VERSION.major == 2
PY25 = PY2 and VERSION.minor == 5
PY26 = PY2 and VERSION.minor == 6
PY27 = PY2 and VERSION.minor == 7
PY3 = not PY2
PY31 = PY3 and VERSION.minor == 1
PY32 = PY3 and VERSION.minor == 2
PY33 = PY3 and VERSION.minor == 3
py34 = PY3 and VERSION.minor == 4
# Provide a nice range function for py2.
try:
range = xrange
except NameError:
pass
| """Compatibility helpers for Py2 and Py3."""
from __future__ import division
from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
import sys
class VERSION(object):
"""Stand in for sys.version_info.
The values from sys only have named parameters starting in PY27. This
allows us to use named parameters for all versions of Python.
"""
major, minor, micro, releaselevel, serial = sys.version_info
PY2 = VERSION.major == 2
PY25 = PY2 and VERSION.minor == 5
PY26 = PY2 and VERSION.minor == 6
PY27 = PY2 and VERSION.minor == 7
PY3 = not PY2
PY31 = PY3 and VERSION.minor == 1
PY32 = PY3 and VERSION.minor == 2
PY33 = PY3 and VERSION.minor == 3
py34 = PY3 and VERSION.minor == 4
# Provide a nice range function for py2.
try:
range = xrange
except NameError:
pass
# Provide a long type for py3.
try:
long = long
except NameError:
long = int
| Add a long type backfill for PY3 compat | Add a long type backfill for PY3 compat
PY3 combined the long and int types which makes some compiler
operations difficult. Adding a backfill to help with PY2/PY3
compat.
Signed-off-by: Kevin Conway <3473c1f185ca03eadc40ad288d84425b54fd7d57@gmail.com>
| Python | apache-2.0 | kevinconway/pycc,kevinconway/pycc | ---
+++
@@ -32,3 +32,9 @@
range = xrange
except NameError:
pass
+
+# Provide a long type for py3.
+try:
+ long = long
+except NameError:
+ long = int |
287719c22d28d404c7944b2abecc8644bf348f49 | tests/test_subclasshook.py | tests/test_subclasshook.py | """
Test that if foreign code provides a class that conforms to
BidirectionalMapping's interface, it is automatically a subclass.
"""
from bidict import BidirectionalMapping
class DumbBidirectionalMapping(dict):
def __inverted__(self):
for (key, val) in self.items():
yield (val, key)
@property
def inv(self):
return DumbBidirectionalMapping(self.__inverted__())
def test_subclasshook():
assert issubclass(DumbBidirectionalMapping, BidirectionalMapping)
assert not issubclass(dict, BidirectionalMapping)
| """
Test that if foreign code provides a class that conforms to
BidirectionalMapping's interface, it is automatically a subclass.
"""
from bidict import BidirectionalMapping
class DumbBidirectionalMapping(dict):
def __inverted__(self):
for (key, val) in self.items():
yield (val, key)
@property
def inv(self):
return DumbBidirectionalMapping(self.__inverted__())
class OldstyleClass():
"""
Old-style class (not derived from object).
This used to crash due to missing __mro__ attribute that is not present
in oldstyle classes.
"""
def test_subclasshook():
assert issubclass(DumbBidirectionalMapping, BidirectionalMapping)
assert not issubclass(dict, BidirectionalMapping)
assert not issubclass(OldstyleClass, BidirectionalMapping)
| Add test for oldstyle class handling | Add test for oldstyle class handling
| Python | mpl-2.0 | jab/bidict,jab/bidict | ---
+++
@@ -15,6 +15,15 @@
return DumbBidirectionalMapping(self.__inverted__())
+class OldstyleClass():
+ """
+ Old-style class (not derived from object).
+ This used to crash due to missing __mro__ attribute that is not present
+ in oldstyle classes.
+ """
+
+
def test_subclasshook():
assert issubclass(DumbBidirectionalMapping, BidirectionalMapping)
assert not issubclass(dict, BidirectionalMapping)
+ assert not issubclass(OldstyleClass, BidirectionalMapping) |
135b7baaf8f4378488af2b1e0fb4abf63522f0ab | test/tests/python-imports/container.py | test/tests/python-imports/container.py | import zlib
import bz2
assert(zlib.decompress(zlib.compress(b'IT WORKS IT WORKS IT WORKS')) == b'IT WORKS IT WORKS IT WORKS')
assert(bz2.decompress(bz2.compress(b'IT WORKS IT WORKS IT WORKS')) == b'IT WORKS IT WORKS IT WORKS')
| import curses
import zlib
assert(zlib.decompress(zlib.compress(b'IT WORKS IT WORKS IT WORKS')) == b'IT WORKS IT WORKS IT WORKS')
import bz2
assert(bz2.decompress(bz2.compress(b'IT WORKS IT WORKS IT WORKS')) == b'IT WORKS IT WORKS IT WORKS')
| Reorganize python-imports to include "curses" | Reorganize python-imports to include "curses"
| Python | apache-2.0 | thresheek/official-images,davidl-zend/official-images,nodejs-docker-bot/official-images,infosiftr/stackbrew,infosiftr/stackbrew,pesho/docker-official-images,dinogun/official-images,mattrobenolt/official-images,thresheek/official-images,jperrin/official-images,dinogun/official-images,thresheek/official-images,neo-technology/docker-official-images,docker-flink/official-images,infosiftr/stackbrew,infosiftr/stackbrew,benbc/docker-official-images,robfrank/official-images,docker-solr/official-images,mattrobenolt/official-images,docker-solr/official-images,pesho/docker-official-images,jperrin/official-images,pesho/docker-official-images,neo-technology/docker-official-images,31z4/official-images,jperrin/official-images,docker-flink/official-images,davidl-zend/official-images,mattrobenolt/official-images,davidl-zend/official-images,nodejs-docker-bot/official-images,docker-flink/official-images,31z4/official-images,docker-library/official-images,dinogun/official-images,davidl-zend/official-images,chorrell/official-images,jperrin/official-images,infosiftr/stackbrew,31z4/official-images,chorrell/official-images,robfrank/official-images,davidl-zend/official-images,jperrin/official-images,dinogun/official-images,infosiftr/stackbrew,robfrank/official-images,robfrank/official-images,chorrell/official-images,nodejs-docker-bot/official-images,docker-flink/official-images,docker-flink/official-images,mattrobenolt/official-images,thresheek/official-images,docker-solr/official-images,docker-solr/official-images,neo-technology/docker-official-images,jperrin/official-images,thresheek/official-images,thresheek/official-images,mattrobenolt/official-images,neo-technology/docker-official-images,benbc/docker-official-images,chorrell/official-images,dinogun/official-images,chorrell/official-images,mattrobenolt/official-images,pesho/docker-official-images,31z4/official-images,infosiftr/stackbrew,benbc/docker-official-images,31z4/official-images,jperrin/official-images,dinogun/official-images,robfrank/official-images,docker-flink/official-images,dinogun/official-images,docker-library/official-images,neo-technology/docker-official-images,docker-library/official-images,jperrin/official-images,neo-technology/docker-official-images,31z4/official-images,docker-library/official-images,robfrank/official-images,davidl-zend/official-images,docker-flink/official-images,31z4/official-images,davidl-zend/official-images,31z4/official-images,mattrobenolt/official-images,docker-solr/official-images,jperrin/official-images,nodejs-docker-bot/official-images,chorrell/official-images,davidl-zend/official-images,mattrobenolt/official-images,infosiftr/stackbrew,dinogun/official-images,infosiftr/stackbrew,chorrell/official-images,mattrobenolt/official-images,31z4/official-images,nodejs-docker-bot/official-images,infosiftr/stackbrew,dinogun/official-images,emilevauge/official-images,docker-flink/official-images,mattrobenolt/official-images,pesho/docker-official-images,docker-flink/official-images,nodejs-docker-bot/official-images,dinogun/official-images,docker-flink/official-images,thresheek/official-images,benbc/docker-official-images,benbc/docker-official-images,docker-solr/official-images,docker-library/official-images,chorrell/official-images,docker-flink/official-images,neo-technology/docker-official-images,davidl-zend/official-images,emilevauge/official-images,mattrobenolt/official-images,docker-solr/official-images,pesho/docker-official-images,benbc/docker-official-images,benbc/docker-official-images,emilevauge/official-images,robfrank/official-images,docker-flink/official-images,docker-library/official-images,nodejs-docker-bot/official-images,31z4/official-images,dinogun/official-images,docker-library/official-images,benbc/docker-official-images,nodejs-docker-bot/official-images,docker-solr/official-images,neo-technology/docker-official-images,chorrell/official-images,robfrank/official-images,docker-library/official-images,nodejs-docker-bot/official-images,jperrin/official-images,dinogun/official-images,nodejs-docker-bot/official-images,emilevauge/official-images,docker-flink/official-images,thresheek/official-images,thresheek/official-images,robfrank/official-images,31z4/official-images,docker-solr/official-images,docker-solr/official-images,nodejs-docker-bot/official-images,neo-technology/docker-official-images,davidl-zend/official-images,docker-library/official-images,thresheek/official-images,infosiftr/stackbrew,infosiftr/stackbrew,docker-solr/official-images,emilevauge/official-images,emilevauge/official-images,emilevauge/official-images,chorrell/official-images,thresheek/official-images,31z4/official-images,pesho/docker-official-images,mattrobenolt/official-images,emilevauge/official-images,pesho/docker-official-images,docker-library/official-images,infosiftr/stackbrew,neo-technology/docker-official-images,neo-technology/docker-official-images,docker-library/official-images,robfrank/official-images,infosiftr/stackbrew,davidl-zend/official-images,chorrell/official-images,pesho/docker-official-images,emilevauge/official-images,neo-technology/docker-official-images,31z4/official-images,jperrin/official-images,neo-technology/docker-official-images,robfrank/official-images,benbc/docker-official-images,jperrin/official-images,jperrin/official-images,docker-solr/official-images,thresheek/official-images,docker-library/official-images,thresheek/official-images,davidl-zend/official-images,robfrank/official-images,docker-library/official-images,pesho/docker-official-images,31z4/official-images,emilevauge/official-images,chorrell/official-images,pesho/docker-official-images,docker-library/official-images,thresheek/official-images,mattrobenolt/official-images,neo-technology/docker-official-images,chorrell/official-images,davidl-zend/official-images,nodejs-docker-bot/official-images,dinogun/official-images,emilevauge/official-images,benbc/docker-official-images,docker-solr/official-images,docker-solr/official-images,emilevauge/official-images,pesho/docker-official-images,robfrank/official-images | ---
+++
@@ -1,6 +1,7 @@
+import curses
+
import zlib
+assert(zlib.decompress(zlib.compress(b'IT WORKS IT WORKS IT WORKS')) == b'IT WORKS IT WORKS IT WORKS')
+
import bz2
-
-assert(zlib.decompress(zlib.compress(b'IT WORKS IT WORKS IT WORKS')) == b'IT WORKS IT WORKS IT WORKS')
assert(bz2.decompress(bz2.compress(b'IT WORKS IT WORKS IT WORKS')) == b'IT WORKS IT WORKS IT WORKS')
- |
edc830bcd8fb594406e314b5c93062b0ec347bba | cref/structure/__init__.py | cref/structure/__init__.py | from peptide import PeptideBuilder
import Bio.PDB
def write_pdb(aa_sequence, fragment_angles, gap_length, filepath):
"""
Generate pdb file with results
:param aa_sequence: Amino acid sequence
:param fragment_angles: Backbone torsion angles
:param gap_length: Length of the gap at the sequence start and end
:param filepath: Path to the file to save the pdb
"""
phi, psi = zip(*fragment_angles)
structure = PeptideBuilder.make_structure(aa_sequence, phi, psi)
out = Bio.PDB.PDBIO()
out.set_structure(structure)
out.save(filepath)
def rmsd(source, target):
source = Bio.PDB.parser.get_structure('source', source)
target = Bio.PDB.parser.get_structure('target', target)
superimposer = Bio.PDB.Superimposer()
source_atoms = list(source.get_atoms())
target_atoms = list(target.get_atoms())[:len(source_atoms)]
superimposer.set_atoms(source_atoms, target_atoms)
return superimposer.rms
| from peptide import PeptideBuilder
import Bio.PDB
def write_pdb(aa_sequence, fragment_angles, gap_length, filepath):
"""
Generate pdb file with results
:param aa_sequence: Amino acid sequence
:param fragment_angles: Backbone torsion angles
:param gap_length: Length of the gap at the sequence start and end
:param filepath: Path to the file to save the pdb
"""
phi, psi = zip(*fragment_angles)
structure = PeptideBuilder.make_structure(aa_sequence, phi, psi)
out = Bio.PDB.PDBIO()
out.set_structure(structure)
out.save(filepath)
def rmsd(source, target):
parser = Bio.PDB.PDBParser()
source = parser.get_structure('source', source)
target = parser.get_structure('target', target)
superimposer = Bio.PDB.Superimposer()
source_atoms = list(source.get_atoms())
target_atoms = list(target.get_atoms())[:len(source_atoms)]
superimposer.set_atoms(source_atoms, target_atoms)
return superimposer.rms
| Fix wrong pdb parser invocation | Fix wrong pdb parser invocation
| Python | mit | mchelem/cref2,mchelem/cref2,mchelem/cref2 | ---
+++
@@ -19,8 +19,9 @@
def rmsd(source, target):
- source = Bio.PDB.parser.get_structure('source', source)
- target = Bio.PDB.parser.get_structure('target', target)
+ parser = Bio.PDB.PDBParser()
+ source = parser.get_structure('source', source)
+ target = parser.get_structure('target', target)
superimposer = Bio.PDB.Superimposer()
source_atoms = list(source.get_atoms()) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.