commit
stringlengths 40
40
| old_file
stringlengths 4
150
| new_file
stringlengths 4
150
| old_contents
stringlengths 0
3.26k
| new_contents
stringlengths 1
4.43k
| subject
stringlengths 15
501
| message
stringlengths 15
4.06k
| lang
stringclasses 4
values | license
stringclasses 13
values | repos
stringlengths 5
91.5k
| diff
stringlengths 0
4.35k
|
|---|---|---|---|---|---|---|---|---|---|---|
d06b80227e404bd0ad36e6fd9d382c247e570ca9
|
runtime/Python2/setup.py
|
runtime/Python2/setup.py
|
from setuptools import setup
v = '4.10.1'
setup(
name='antlr4-python2-runtime',
version=v,
url='http://www.antlr.org',
license='BSD',
packages=['antlr4', 'antlr4.atn', 'antlr4.dfa', 'antlr4.tree', 'antlr4.error', 'antlr4.xpath'],
package_dir={'': 'src'},
author='Eric Vergnaud, Terence Parr, Sam Harwell',
author_email='eric.vergnaud@wanadoo.fr',
description=f'ANTLR {v} runtime for Python 2.7.12'
)
|
from setuptools import setup
v = '4.10.1'
setup(
name='antlr4-python2-runtime',
version=v,
url='http://www.antlr.org',
license='BSD',
packages=['antlr4', 'antlr4.atn', 'antlr4.dfa', 'antlr4.tree', 'antlr4.error', 'antlr4.xpath'],
package_dir={'': 'src'},
author='Eric Vergnaud, Terence Parr, Sam Harwell',
author_email='eric.vergnaud@wanadoo.fr',
description='ANTLR %s runtime for Python 2.7.12' % v
)
|
Fix SyntaxError due to F string
|
[py2] Fix SyntaxError due to F string
Signed-off-by: Travis Thieman <f1ef50ba1343ab5680bff0994219d82815f791bd@gmail.com>
|
Python
|
bsd-3-clause
|
parrt/antlr4,parrt/antlr4,ericvergnaud/antlr4,ericvergnaud/antlr4,ericvergnaud/antlr4,ericvergnaud/antlr4,antlr/antlr4,parrt/antlr4,antlr/antlr4,antlr/antlr4,ericvergnaud/antlr4,parrt/antlr4,antlr/antlr4,parrt/antlr4,antlr/antlr4,parrt/antlr4,ericvergnaud/antlr4,parrt/antlr4,antlr/antlr4,parrt/antlr4,ericvergnaud/antlr4,ericvergnaud/antlr4,antlr/antlr4,antlr/antlr4,antlr/antlr4,antlr/antlr4,ericvergnaud/antlr4,ericvergnaud/antlr4,parrt/antlr4,parrt/antlr4
|
---
+++
@@ -10,5 +10,5 @@
package_dir={'': 'src'},
author='Eric Vergnaud, Terence Parr, Sam Harwell',
author_email='eric.vergnaud@wanadoo.fr',
- description=f'ANTLR {v} runtime for Python 2.7.12'
+ description='ANTLR %s runtime for Python 2.7.12' % v
)
|
c86e22a16eb2c1f2c95f81c232ae8535e447e935
|
solutions/pybasic_ex1_3_1.py
|
solutions/pybasic_ex1_3_1.py
|
# Use the codon variables you defined previously
S = "TCT"
L = "CTT"
Y = "TAT"
C = "TGT"
# Create a list for the protein sequence CLYSY
codons = [C, L, Y, S, Y]
# Print the DNA sequence of the protein
print("DNA sequence:", codons)
# Print the DNA sequence of the last amino acid
print("Last codon:", codons[-1])
# Create two more variables containing the DNA sequence for a stop codon and a start codon
start = "ATG"
stop = "TGA"
# Replace the first element of the list with the start codon
codons[0] = start
# Append the stop codon to the end of the list
codons.append(stop)
# Print the resulting DNA sequence
print("DNA sequence after alteration:", "".join(codons))
|
# Use the codon variables you defined previously
S = "TCT"
L = "CTT"
Y = "TAT"
C = "TGT"
# Create a list for the protein sequence CLYSY
codons = [C, L, Y, S, Y]
# Print the DNA sequence of the protein
print("DNA sequence:", codons)
# Print the DNA sequence of the last amino acid
print("Last codon:", codons[-1])
# Create two more variables containing the DNA sequence for a stop codon and a start codon
start = "ATG"
stop = "TGA"
# Replace the first element of the list with the start codon
codons[0] = start
# Append the stop codon to the end of the list
codons.append(stop)
# Print the resulting DNA sequence
print("DNA sequence after alteration:", codons)
|
Remove join in exercise 1.3.1 not seen yet in course
|
Remove join in exercise 1.3.1 not seen yet in course
|
Python
|
unlicense
|
pycam/python-basic,pycam/python-basic
|
---
+++
@@ -24,4 +24,4 @@
codons.append(stop)
# Print the resulting DNA sequence
-print("DNA sequence after alteration:", "".join(codons))
+print("DNA sequence after alteration:", codons)
|
8dc69dca8538eb992989da396b65ade4fe2e5088
|
polls/models.py
|
polls/models.py
|
from django.db import models
from django.utils import timezone
from datetime import timedelta
class Poll(models.Model):
text = models.CharField(max_length=200)
created_ts = models.DateTimeField()
updated_ts = models.DateTimeField(null=True, default=None)
is_published = models.BooleanField(default=False)
publication_date = models.DateTimeField(
'date published',
default=None,
)
def __str__(self):
return self.text
def was_published_recently(self):
return self.publication_date >= timezone.now() - timedelta(days=1)
def save(self, *args, **kwargs):
''' On save, update timestamps '''
if not self.id:
self.created_ts = timezone.now()
self.updated_ts = timezone.now()
return super(Poll, self).save(*args, **kwargs)
class Choice(models.Model):
poll = models.ForeignKey(Poll, on_delete=models.CASCADE)
text = models.CharField(max_length=200)
votes = models.IntegerField(default=0)
def __str__(self):
return self.text
|
from django.db import models
from django.utils import timezone
from datetime import timedelta
class Poll(models.Model):
text = models.CharField(max_length=200)
created_ts = models.DateTimeField()
updated_ts = models.DateTimeField(null=True, default=None)
is_published = models.BooleanField(default=False)
publication_date = models.DateTimeField(
'date published',
default=None,
)
def __str__(self):
return self.text
def was_published_recently(self):
now = timezone.now()
return now - timedelta(days=1) <= self.publication_date <= now
def save(self, *args, **kwargs):
''' On save, update timestamps '''
if not self.id:
self.created_ts = timezone.now()
self.updated_ts = timezone.now()
return super(Poll, self).save(*args, **kwargs)
class Choice(models.Model):
poll = models.ForeignKey(Poll, on_delete=models.CASCADE)
text = models.CharField(max_length=200)
votes = models.IntegerField(default=0)
def __str__(self):
return self.text
|
Fix was_published_recently reporting polls from the future
|
Fix was_published_recently reporting polls from the future
|
Python
|
mit
|
fernandocanizo/django-poll-site,fernandocanizo/django-poll-site,fernandocanizo/django-poll-site
|
---
+++
@@ -17,7 +17,8 @@
return self.text
def was_published_recently(self):
- return self.publication_date >= timezone.now() - timedelta(days=1)
+ now = timezone.now()
+ return now - timedelta(days=1) <= self.publication_date <= now
def save(self, *args, **kwargs):
''' On save, update timestamps '''
|
c898d3f3d142727d0a55303238cda8044d729437
|
motobot/core_plugins/commands.py
|
motobot/core_plugins/commands.py
|
from motobot import command, Notice, split_response, IRCBot
@command('commands')
def commands_command(bot, database, context, message, args):
userlevel = bot.get_userlevel(context.channel, context.nick)
valid_command = lambda plugin: plugin.type == IRCBot.command_plugin \
and plugin.level <= userlevel and not plugin.arg.hidden
key = lambda plugin: (plugin.arg.trigger, plugin.func)
command_groups = {}
for command, func in map(key, filter(valid_command, bot.plugins)):
value = command_groups.get(func, [])
value.append(command)
command_groups[func] = value
format_group = lambda group: '({})'.format(', '.join(group)) \
if len(group) != 1 else group[0]
commands = map(format_group, sorted(command_groups.values(), key=lambda x: x[0]))
response = split_response(commands, "Bot Commands: {};")
return response, Notice(context.nick)
|
from motobot import command, Notice, split_response, IRCBot
from collections import defaultdict
def filter_plugins(plugins, userlevel):
return map(
lambda plugin: (plugin.arg.trigger, plugin.func), filter(
lambda plugin: plugin.type == IRCBot.command_plugin and
plugin.level <= userlevel and not plugin.arg.hidden,
plugins
)
)
def format_group(group):
return '({})'.format(', '.join(group)) if len(group) != 1 else group[0]
@command('commands')
def commands_command(bot, database, context, message, args):
userlevel = bot.get_userlevel(context.channel, context.nick)
groups = defaultdict(lambda: [])
for command, func in filter_plugins(bot.plugins, userlevel):
groups[func].append(command)
commands = map(format_group, sorted(groups.values(), key=lambda x: x[0]))
response = split_response(commands, "Bot Commands: {};")
return response, Notice(context.nick)
|
Revert "Revert "Cleans up split_response""
|
Revert "Revert "Cleans up split_response""
This reverts commit c3c62feb9fbd8b7ff35d70eaaa5fecfb2093dbb0.
|
Python
|
mit
|
Motoko11/MotoBot
|
---
+++
@@ -1,23 +1,30 @@
from motobot import command, Notice, split_response, IRCBot
+from collections import defaultdict
+
+
+def filter_plugins(plugins, userlevel):
+ return map(
+ lambda plugin: (plugin.arg.trigger, plugin.func), filter(
+ lambda plugin: plugin.type == IRCBot.command_plugin and
+ plugin.level <= userlevel and not plugin.arg.hidden,
+ plugins
+ )
+ )
+
+
+def format_group(group):
+ return '({})'.format(', '.join(group)) if len(group) != 1 else group[0]
@command('commands')
def commands_command(bot, database, context, message, args):
userlevel = bot.get_userlevel(context.channel, context.nick)
+ groups = defaultdict(lambda: [])
- valid_command = lambda plugin: plugin.type == IRCBot.command_plugin \
- and plugin.level <= userlevel and not plugin.arg.hidden
- key = lambda plugin: (plugin.arg.trigger, plugin.func)
+ for command, func in filter_plugins(bot.plugins, userlevel):
+ groups[func].append(command)
- command_groups = {}
- for command, func in map(key, filter(valid_command, bot.plugins)):
- value = command_groups.get(func, [])
- value.append(command)
- command_groups[func] = value
-
- format_group = lambda group: '({})'.format(', '.join(group)) \
- if len(group) != 1 else group[0]
- commands = map(format_group, sorted(command_groups.values(), key=lambda x: x[0]))
+ commands = map(format_group, sorted(groups.values(), key=lambda x: x[0]))
response = split_response(commands, "Bot Commands: {};")
return response, Notice(context.nick)
|
7e78408dad1aab6bb42fd62601ee52e5f0ab3bd9
|
stanczyk/proxy.py
|
stanczyk/proxy.py
|
from twisted.internet import endpoints, reactor
from txampext.multiplexing import ProxyingFactory
def connectProxy(namespace, identifier, _reactor=reactor):
"""Start listening on some free local port; connections will be
proxied to the virtual server with the given identifier.
"""
remote = namespace.get("remote")
if remote is None:
raise RuntimeError("You are not connected to the exercise server. "
"Call ``connect``.")
factory = ProxyingFactory(remote, identifier)
endpoint = endpoints.TCP4ServerEndpoint(_reactor, 0, interface="localhost")
d = endpoint.listen(factory)
d.addCallback(_listening, namespace, identifier)
return None
def _listening(listeningPort, namespace, identifier):
"""Started listening; report success to terminal.
"""
host = listeningPort.getHost()
template = "{id} is now listening on {h.host}:{h.port}"
namespace["manhole"].writeLine(template.format(h=host, id=identifier))
|
from stanczyk.util import _getRemote
from twisted.internet import endpoints, reactor
from txampext.multiplexing import ProxyingFactory
def connectProxy(namespace, identifier, _reactor=reactor):
"""Start listening on some free local port; connections will be
proxied to the virtual server with the given identifier.
"""
endpoint = endpoints.TCP4ServerEndpoint(_reactor, 0, interface="localhost")
factory = ProxyingFactory(_getRemote(namespace), identifier)
d = endpoint.listen(factory)
d.addCallback(_listening, namespace, identifier)
return None
def _listening(listeningPort, namespace, identifier):
"""Started listening; report success to terminal.
"""
host = listeningPort.getHost()
template = "{id} is now listening on {h.host}:{h.port}"
namespace["manhole"].writeLine(template.format(h=host, id=identifier))
|
Use the new fancy refactored remote logic
|
Use the new fancy refactored remote logic
|
Python
|
isc
|
crypto101/stanczyk
|
---
+++
@@ -1,3 +1,4 @@
+from stanczyk.util import _getRemote
from twisted.internet import endpoints, reactor
from txampext.multiplexing import ProxyingFactory
@@ -7,13 +8,8 @@
proxied to the virtual server with the given identifier.
"""
- remote = namespace.get("remote")
- if remote is None:
- raise RuntimeError("You are not connected to the exercise server. "
- "Call ``connect``.")
-
- factory = ProxyingFactory(remote, identifier)
endpoint = endpoints.TCP4ServerEndpoint(_reactor, 0, interface="localhost")
+ factory = ProxyingFactory(_getRemote(namespace), identifier)
d = endpoint.listen(factory)
d.addCallback(_listening, namespace, identifier)
return None
|
7a582488a3f8d86820dca7c3b44ff86b8dbe4412
|
changes/__init__.py
|
changes/__init__.py
|
import os
import subprocess
try:
VERSION = __import__('pkg_resources') \
.get_distribution('changes').version
except Exception, e:
VERSION = 'unknown'
def _get_git_revision(path):
try:
r = subprocess.check_output('git rev-parse HEAD', cwd=path, shell=True)
except Exception:
return None
return r.strip()
def get_revision():
"""
:returns: Revision number of this branch/checkout, if available. None if
no revision number can be determined.
"""
package_dir = os.path.dirname(__file__)
checkout_dir = os.path.normpath(os.path.join(package_dir, os.pardir))
path = os.path.join(checkout_dir, '.git')
if os.path.exists(path):
return _get_git_revision(path)
return None
def get_version():
base = VERSION
if __build__:
base = '%s (%s)' % (base, __build__)
return base
__build__ = get_revision()
__docformat__ = 'restructuredtext en'
|
import os
import subprocess
try:
VERSION = __import__('pkg_resources') \
.get_distribution('changes').version
except Exception:
VERSION = 'unknown'
def _get_git_revision(path):
try:
r = subprocess.check_output('git rev-parse HEAD', cwd=path, shell=True)
except Exception:
return None
return r.strip()
def get_revision():
"""
:returns: Revision number of this branch/checkout, if available. None if
no revision number can be determined.
"""
package_dir = os.path.dirname(__file__)
checkout_dir = os.path.normpath(os.path.join(package_dir, os.pardir))
path = os.path.join(checkout_dir, '.git')
if os.path.exists(path):
return _get_git_revision(path)
return None
def get_version():
base = VERSION
if __build__:
base = '%s (%s)' % (base, __build__)
return base
__build__ = get_revision()
__docformat__ = 'restructuredtext en'
|
Update exception syntax to be py3 compat
|
Update exception syntax to be py3 compat
|
Python
|
apache-2.0
|
bowlofstew/changes,bowlofstew/changes,dropbox/changes,wfxiang08/changes,dropbox/changes,bowlofstew/changes,dropbox/changes,bowlofstew/changes,wfxiang08/changes,dropbox/changes,wfxiang08/changes,wfxiang08/changes
|
---
+++
@@ -4,7 +4,7 @@
try:
VERSION = __import__('pkg_resources') \
.get_distribution('changes').version
-except Exception, e:
+except Exception:
VERSION = 'unknown'
|
f5613b2b03f20f9d8f2a8d221ba1fae86664839c
|
modules/mpi-ring/bin/onramp_status.py
|
modules/mpi-ring/bin/onramp_status.py
|
#!/usr/bin/env python
#
# Curriculum Module Status Script
# - Run while the job is running
# - Run -outside- of the allocation
# - onramp_run_params.ini file is available in current working directory
#
import sys
import re
#
# Display any special message you want the user to see, or leave blank if nothing.
# Please restrict status messages to 1 line of text.
#
# Read in the output file
lines = [line.rstrip('\n') for line in open('onramp/output.txt')]
# If the file is empty then nothing to do
if len(lines) <= 0:
sys.exit(0)
#print "Status) Number of Lines: %d" % len(lines)
# Find the last line of 'Increment value'
last_status = None
for line in lines:
searchObj = re.search( r'Increment value (.*)', line)
if searchObj:
last_status = searchObj.group(1).strip()
if last_status is not None:
print "%s" % last_status
#
# Exit successfully
#
sys.exit(0)
|
#!/usr/bin/env python
#
# Curriculum Module Status Script
# - Run while the job is running
# - Run -outside- of the allocation
# - onramp_run_params.ini file is available in current working directory
#
import sys
import re
#
# Display any special message you want the user to see, or leave blank if nothing.
# Please restrict status messages to 1 line of text.
#
# Read in the output file
lines = [line.rstrip('\n') for line in open('output.txt')]
# If the file is empty then nothing to do
if len(lines) <= 0:
sys.exit(0)
#print "Status) Number of Lines: %d" % len(lines)
# Find the last line of 'Increment value'
last_status = None
for line in lines:
searchObj = re.search( r'Increment value (.*)', line)
if searchObj:
last_status = searchObj.group(1).strip()
if last_status is not None:
print "%s" % last_status
#
# Exit successfully
#
sys.exit(0)
|
Update the status.py to look for the output.txt in the new location
|
Update the status.py to look for the output.txt in the new location
|
Python
|
bsd-3-clause
|
OnRampOrg/onramp,koepked/onramp,OnRampOrg/onramp,ssfoley/onramp,OnRampOrg/onramp,koepked/onramp,ssfoley/onramp,koepked/onramp,OnRampOrg/onramp,koepked/onramp,ssfoley/onramp,ssfoley/onramp,OnRampOrg/onramp,koepked/onramp,OnRampOrg/onramp,OnRampOrg/onramp,koepked/onramp
|
---
+++
@@ -15,7 +15,7 @@
#
# Read in the output file
-lines = [line.rstrip('\n') for line in open('onramp/output.txt')]
+lines = [line.rstrip('\n') for line in open('output.txt')]
# If the file is empty then nothing to do
if len(lines) <= 0:
|
00b798c309d8807a562efb31751e82e5149ac7c8
|
molo/core/api/tests/test_importers.py
|
molo/core/api/tests/test_importers.py
|
"""
Test the importing module.
This module relies heavily on an external service and requires
quite a bit of mocking.
"""
import json
from django.test import TestCase
from molo.core.tests.base import MoloTestCaseMixin
from molo.core.api import importers
from molo.core.api.tests import constants
class ArticleImportTestCase(MoloTestCaseMixin, TestCase):
def setUp(self):
self.mk_main()
def test_importer_initializtion(self):
content = json.dumps(constants.AVAILABLE_ARTICLES)
importer = importers.ArticlePageImporter(content=content)
|
"""
Test the importing module.
This module relies heavily on an external service and requires
quite a bit of mocking.
"""
import json
from django.test import TestCase
from molo.core.tests.base import MoloTestCaseMixin
from molo.core.api import importers
from molo.core.api.tests import constants
class ArticleImportTestCase(MoloTestCaseMixin, TestCase):
def setUp(self):
self.mk_main()
def test_importer_initializtion(self):
content = json.dumps(constants.AVAILABLE_ARTICLES)
importer = importers.ArticlePageImporter(content=content)
self.assertEqual(importer.articles(), content["items"])
|
Write test for importer initialisation
|
Write test for importer initialisation
|
Python
|
bsd-2-clause
|
praekelt/molo,praekelt/molo,praekelt/molo,praekelt/molo
|
---
+++
@@ -20,3 +20,5 @@
def test_importer_initializtion(self):
content = json.dumps(constants.AVAILABLE_ARTICLES)
importer = importers.ArticlePageImporter(content=content)
+
+ self.assertEqual(importer.articles(), content["items"])
|
190b4b193a2b33d7904310d24891e8aec18a126f
|
pipreq/cli.py
|
pipreq/cli.py
|
import argparse
import sys
from pipreq.command import Command
def create_parser():
parser = argparse.ArgumentParser(
description='Manage Python package requirements across multiple environments using '
'per-environment requirements files.')
parser.add_argument('-g', '--generate', action='store_true', default=False,
help='Generate requirements files')
parser.add_argument('-c', '--create', action='store_true', default=False,
help='Create or update rc file (requires list of packages)')
parser.add_argument('-U', '--upgrade', action='store_true', default=False,
help='Upgrade packages (requires list of packages)')
parser.add_argument('packages', nargs='?', type=argparse.FileType('r'), default=sys.stdin)
return parser
def verify_args(args):
if not args.create and not args.generate and not args.upgrade:
return u'Must specify generate (-g) or create/upgrade (-[cu]) with packages'
return None
def error(parser, message):
parser.print_help()
parser.exit(message="\nERROR: %s\n" % message)
def main():
try:
parser = create_parser()
parsed_args = parser.parse_args()
error_message = verify_args(parsed_args)
if error_message:
error(parser, error_message)
command = Command(parsed_args, ".requirementsrc")
command.run()
except KeyboardInterrupt:
sys.exit()
|
import argparse
import sys
from pipreq.command import Command
def create_parser():
parser = argparse.ArgumentParser(
description='Manage Python package requirements across multiple environments using '
'per-environment requirements files.')
parser.add_argument('-g', '--generate', action='store_true', default=False,
help='Generate requirements files')
parser.add_argument('-c', '--create', action='store_true', default=False,
help='Create or update rc file (requires list of packages)')
parser.add_argument('-U', '--upgrade', action='store_true', default=False,
help='Upgrade packages (requires list of packages)')
parser.add_argument('packages', nargs='?', type=argparse.FileType('r'), default=sys.stdin)
return parser
def verify_args(args):
if not args.create and not args.generate and not args.upgrade:
return 'Must specify generate (-g) or create/upgrade (-[cu]) with packages'
return None
def error(parser, message):
parser.print_help()
parser.exit(message="\nERROR: %s\n" % message)
def main():
try:
parser = create_parser()
parsed_args = parser.parse_args()
error_message = verify_args(parsed_args)
if error_message:
error(parser, error_message)
command = Command(parsed_args, ".requirementsrc")
command.run()
except KeyboardInterrupt:
sys.exit()
|
Remove unnecessary u on string
|
Remove unnecessary u on string
|
Python
|
mit
|
jessamynsmith/pipwrap,jessamynsmith/pipreq,jessamynsmith/pipwrap,jessamynsmith/pipreq
|
---
+++
@@ -22,7 +22,7 @@
def verify_args(args):
if not args.create and not args.generate and not args.upgrade:
- return u'Must specify generate (-g) or create/upgrade (-[cu]) with packages'
+ return 'Must specify generate (-g) or create/upgrade (-[cu]) with packages'
return None
|
cb6f11ad05ef07facf651f8fbccae9e86e0a77c8
|
processing.py
|
processing.py
|
#!/usr/bin/env python
"""
Processing routines for the waveFlapper case.
"""
import foampy
import numpy as np
import matplotlib.pyplot as plt
width_2d = 0.1
width_3d = 3.66
def plot_force():
"""Plots the streamwise force on the paddle over time."""
def plot_moment():
data = foampy.load_forces_moments()
i = 10
t = data["time"][i:]
m = data["moment"]["pressure"]["z"] + data["moment"]["viscous"]["z"]
m = m[i:]*width_3d/width_2d
plt.figure()
plt.plot(t, m)
plt.xlabel("t (s)")
plt.ylabel("Flapper moment (Nm)")
print("Max moment from CFD =", m.max(), "Nm")
print("Theoretical max moment (including inertia) =", 5500*3.3, "Nm")
plt.show()
if __name__ == "__main__":
plot_moment()
|
#!/usr/bin/env python
"""
Processing routines for the waveFlapper case.
"""
import foampy
import numpy as np
import matplotlib.pyplot as plt
width_2d = 0.1
width_3d = 3.66
m_paddle = 1270.0 # Paddle mass in kg, from OMB manual
h_piston = 3.3147
I_paddle = 1/3*m_paddle*h_piston**2
def plot_force():
"""Plots the streamwise force on the paddle over time."""
def plot_moment():
data = foampy.load_forces_moments()
i = 10
t = data["time"][i:]
m = data["moment"]["pressure"]["z"] + data["moment"]["viscous"]["z"]
m = m[i:]*width_3d/width_2d
period = 2.2
omega = 2*np.pi/period
theta = 0.048*np.sin(omega*t)
theta_doubledot = -0.048*omega**2*np.sin(omega*t)
m_inertial = I_paddle*theta_doubledot
m += m_inertial
plt.figure()
plt.plot(t, m)
plt.xlabel("t (s)")
plt.ylabel("Flapper moment (Nm)")
print("Max moment from CFD =", m.max(), "Nm")
print("Theoretical max moment (including inertia) =", 5500*3.3, "Nm")
plt.show()
if __name__ == "__main__":
plot_moment()
|
Add paddle inertia to calculations
|
Add paddle inertia to calculations
|
Python
|
cc0-1.0
|
petebachant/waveFlapper-OpenFOAM,petebachant/waveFlapper-OpenFOAM,petebachant/waveFlapper-OpenFOAM
|
---
+++
@@ -10,6 +10,9 @@
width_2d = 0.1
width_3d = 3.66
+m_paddle = 1270.0 # Paddle mass in kg, from OMB manual
+h_piston = 3.3147
+I_paddle = 1/3*m_paddle*h_piston**2
def plot_force():
"""Plots the streamwise force on the paddle over time."""
@@ -20,6 +23,12 @@
t = data["time"][i:]
m = data["moment"]["pressure"]["z"] + data["moment"]["viscous"]["z"]
m = m[i:]*width_3d/width_2d
+ period = 2.2
+ omega = 2*np.pi/period
+ theta = 0.048*np.sin(omega*t)
+ theta_doubledot = -0.048*omega**2*np.sin(omega*t)
+ m_inertial = I_paddle*theta_doubledot
+ m += m_inertial
plt.figure()
plt.plot(t, m)
plt.xlabel("t (s)")
|
db977f65a6f986508c826b645b9c94e5eff4f83f
|
oidc_provider/management/commands/creatersakey.py
|
oidc_provider/management/commands/creatersakey.py
|
from Crypto.PublicKey import RSA
from django.conf import settings
from django.core.management.base import BaseCommand
class Command(BaseCommand):
help = 'Randomly generate a new RSA key for the OpenID server'
def handle(self, *args, **options):
try:
key = RSA.generate(1024)
file_path = settings.BASE_DIR + '/OIDC_RSA_KEY.pem'
with open(file_path, 'w') as f:
f.write(key.exportKey('PEM'))
self.stdout.write('RSA key successfully created at: ' + file_path)
except Exception as e:
self.stdout.write('Something goes wrong: {0}'.format(e))
|
from Crypto.PublicKey import RSA
from django.conf import settings
from django.core.management.base import BaseCommand
class Command(BaseCommand):
help = 'Randomly generate a new RSA key for the OpenID server'
def handle(self, *args, **options):
try:
key = RSA.generate(1024)
file_path = settings.BASE_DIR + '/OIDC_RSA_KEY.pem'
with open(file_path, 'wb') as f:
f.write(key.exportKey('PEM'))
self.stdout.write('RSA key successfully created at: ' + file_path)
except Exception as e:
self.stdout.write('Something goes wrong: {0}'.format(e))
|
Append binary file mode to write RSA exported key needed by Python 3
|
Append binary file mode to write RSA exported key needed by Python 3
|
Python
|
mit
|
ByteInternet/django-oidc-provider,bunnyinc/django-oidc-provider,wayward710/django-oidc-provider,ByteInternet/django-oidc-provider,wojtek-fliposports/django-oidc-provider,wayward710/django-oidc-provider,juanifioren/django-oidc-provider,bunnyinc/django-oidc-provider,torreco/django-oidc-provider,wojtek-fliposports/django-oidc-provider,juanifioren/django-oidc-provider,torreco/django-oidc-provider
|
---
+++
@@ -11,7 +11,7 @@
try:
key = RSA.generate(1024)
file_path = settings.BASE_DIR + '/OIDC_RSA_KEY.pem'
- with open(file_path, 'w') as f:
+ with open(file_path, 'wb') as f:
f.write(key.exportKey('PEM'))
self.stdout.write('RSA key successfully created at: ' + file_path)
except Exception as e:
|
90bc04a92bbe6f29d1487fbd87a4fad811f22c93
|
setup/setup-test-docs.py
|
setup/setup-test-docs.py
|
#!/usr/bin/python
#
# SCRIPT FOR POPULATING TEST SOLR SERVER CORE WITH TEST DOCUMENTS
#
# Usage: python setup-test-docs.py <Solr Endpoint Url>
#
# Solr endpoint URL should be in the form:
# https://example.com/solr/<core-name>/
#
# .txt files in the directory ./txt/ will be committed to user-provided Solr
# core matching the name <core-name>.
import os
from os import listdir
from os.path import isfile, join
import json
import sys
arguments = sys.argv
solrApiUrl = arguments[1]
filePaths = [f for f in listdir('txt') if isfile(join('txt', f))]
TEMPLATE = """
{
"add": {
"doc":
{"title":"%s", "body": %s},
"boost":1.0,
"overwrite":true,
"commitWithin":1000
}
}
"""
headers = {'Content-type': 'application/json'}
for i, path in enumerate(filePaths):
print str(i) + '\tProcessing ' + path
f = open('txt/' + path)
text = f.read()
commandJson = TEMPLATE % (path.replace('.txt', ''), json.dumps(text))
os.system("curl " + solrApiUrl + "update?commit=true -H 'Content-type:application/json' -d '%s'" % commandJson)
print '\nDone.\n----------------------------------'
|
#!/usr/bin/python
#
# SCRIPT FOR POPULATING TEST SOLR SERVER CORE WITH TEST DOCUMENTS
#
# Usage: python setup-test-docs.py <Solr Endpoint Url>
#
# Solr endpoint URL should be in the form:
# https://example.com/solr/<core-name>/
#
# .txt files in the directory ./txt/ will be committed to user-provided Solr
# core matching the name <core-name>.
import os
from os import listdir
from os.path import isfile, join
import json
import sys
TEST_DOC_DIR = 'test_docs'
arguments = sys.argv
solrApiUrl = arguments[1]
filePaths = [f for f in listdir(TEST_DOC_DIR) if isfile(join(TEST_DOC_DIR, f))]
TEMPLATE = """
{
"add": {
"doc":
{"title":"%s", "body": %s},
"boost":1.0,
"overwrite":true,
"commitWithin":1000
}
}
"""
headers = {'Content-type': 'application/json'}
for i, path in enumerate(filePaths):
print str(i) + '\tProcessing ' + path
f = open(TEST_DOC_DIR + '/' + path)
text = f.read()
commandJson = TEMPLATE % (path.replace('.txt', ''), json.dumps(text))
os.system("curl " + solrApiUrl + "update?commit=true -H 'Content-type:application/json' -d '%s'" % commandJson)
print '\nDone.\n----------------------------------'
|
Use test_docs as directory for test documents for solr server
|
Use test_docs as directory for test documents for solr server
|
Python
|
mit
|
gios-asu/search-api
|
---
+++
@@ -16,11 +16,13 @@
import json
import sys
+TEST_DOC_DIR = 'test_docs'
+
arguments = sys.argv
solrApiUrl = arguments[1]
-filePaths = [f for f in listdir('txt') if isfile(join('txt', f))]
+filePaths = [f for f in listdir(TEST_DOC_DIR) if isfile(join(TEST_DOC_DIR, f))]
TEMPLATE = """
{
@@ -38,7 +40,7 @@
for i, path in enumerate(filePaths):
print str(i) + '\tProcessing ' + path
- f = open('txt/' + path)
+ f = open(TEST_DOC_DIR + '/' + path)
text = f.read()
commandJson = TEMPLATE % (path.replace('.txt', ''), json.dumps(text))
|
f34de068e71c57b434c48c9c2b90471112bb4a2b
|
common/djangoapps/util/bad_request_rate_limiter.py
|
common/djangoapps/util/bad_request_rate_limiter.py
|
"""
A utility class which wraps the RateLimitMixin 3rd party class to do bad request counting
which can be used for rate limiting
"""
from ratelimitbackend.backends import RateLimitMixin
from django.conf import settings
if settings.FEATURES.get('EDRAAK_RATELIMIT_APP', False):
from edraak_ratelimit.backends import EdraakRateLimitMixin
RateLimitMixin = EdraakRateLimitMixin
class BadRequestRateLimiter(RateLimitMixin):
"""
Use the 3rd party RateLimitMixin to help do rate limiting on the Password Reset flows
"""
def is_rate_limit_exceeded(self, request):
"""
Returns if the client has been rated limited
"""
counts = self.get_counters(request)
is_exceeded = sum(counts.values()) >= self.requests
if is_exceeded:
self.db_log_failed_attempt(request)
return is_exceeded
def tick_bad_request_counter(self, request):
"""
Ticks any counters used to compute when rate limt has been reached
"""
self.cache_incr(self.get_cache_key(request))
|
"""
A utility class which wraps the RateLimitMixin 3rd party class to do bad request counting
which can be used for rate limiting
"""
from ratelimitbackend.backends import RateLimitMixin
from django.conf import settings
if settings.FEATURES.get('EDRAAK_RATELIMIT_APP', False):
from edraak_ratelimit.backends import EdraakRateLimitMixin
RateLimitMixin = EdraakRateLimitMixin
class BadRequestRateLimiter(RateLimitMixin):
"""
Use the 3rd party RateLimitMixin to help do rate limiting on the Password Reset flows
"""
def is_rate_limit_exceeded(self, request):
"""
Returns if the client has been rated limited
"""
counts = self.get_counters(request)
is_exceeded = sum(counts.values()) >= self.requests
if is_exceeded and settings.FEATURES.get('EDRAAK_RATELIMIT_APP', False):
self.db_log_failed_attempt(request)
return is_exceeded
def tick_bad_request_counter(self, request):
"""
Ticks any counters used to compute when rate limt has been reached
"""
self.cache_incr(self.get_cache_key(request))
|
Fix object has no db_log_failed_attempt
|
Fix object has no db_log_failed_attempt
|
Python
|
agpl-3.0
|
Edraak/edraak-platform,Edraak/edraak-platform,Edraak/edraak-platform,Edraak/edraak-platform
|
---
+++
@@ -22,7 +22,7 @@
counts = self.get_counters(request)
is_exceeded = sum(counts.values()) >= self.requests
- if is_exceeded:
+ if is_exceeded and settings.FEATURES.get('EDRAAK_RATELIMIT_APP', False):
self.db_log_failed_attempt(request)
return is_exceeded
|
35201e71037d544893a59bfda8c4538fcb6fb4b7
|
api/tests/test_scrape_item.py
|
api/tests/test_scrape_item.py
|
from api.scrapers.item import scrape_item_by_id
from api import app
from flask.json import loads
import unittest
app.config['TESTING'] = True
class ScrapeItem(unittest.TestCase):
def test_scrape_item_by_id(self):
item = scrape_item_by_id('d19447e548d')
self.assertEqual('d19447e548d', item.lodestone_id)
self.assertEqual('Thyrus Zenith', item.name)
self.assertEqual('Two-handed Conjurer\'s Arm', item.type)
self.assertEqual(90, item.ilvl)
def test_item_json(self):
with app.test_client() as client:
response = client.get('/scrape/item/cada9ec7074')
self.assertEqual(response.status_code, 200)
json = loads(response.data)
self.assertEqual(json, {
"id": "cada9ec7074",
"ilvl": 110,
"name": "Arachne Robe",
"type": "Body"
})
def test_invalid_lodestone_id(self):
with app.test_client() as client:
response = client.post('/scrape/item/23fh032hf0oi1so3a012r1')
self.assertEqual(response.status_code, 405)
|
from api.scrapers.item import scrape_item_by_id
from api import app, db
from flask.json import loads
import unittest
app.config['TESTING'] = True
db.create_all()
class ScrapeItem(unittest.TestCase):
def test_scrape_item_by_id(self):
item = scrape_item_by_id('d19447e548d')
self.assertEqual('d19447e548d', item.lodestone_id)
self.assertEqual('Thyrus Zenith', item.name)
self.assertEqual('Two-handed Conjurer\'s Arm', item.type)
self.assertEqual(90, item.ilvl)
def test_item_json(self):
with app.test_client() as client:
response = client.get('/scrape/item/cada9ec7074')
self.assertEqual(response.status_code, 200)
json = loads(response.data)
self.assertEqual(json, {
"id": "cada9ec7074",
"ilvl": 110,
"name": "Arachne Robe",
"type": "Body"
})
def test_invalid_lodestone_id(self):
with app.test_client() as client:
response = client.post('/scrape/item/23fh032hf0oi1so3a012r1')
self.assertEqual(response.status_code, 405)
|
Create tables in database before running tests
|
Create tables in database before running tests
|
Python
|
mit
|
Demotivated/loadstone
|
---
+++
@@ -1,11 +1,12 @@
from api.scrapers.item import scrape_item_by_id
-from api import app
+from api import app, db
from flask.json import loads
import unittest
app.config['TESTING'] = True
+db.create_all()
class ScrapeItem(unittest.TestCase):
|
3e7d83d51fa43f8e93ad548b07193f13791f8abe
|
django_lightweight_queue/middleware/transaction.py
|
django_lightweight_queue/middleware/transaction.py
|
from django.db import transaction, connection
class TransactionMiddleware(object):
def process_job(self, job):
if not connection.in_atomic_block:
transaction.set_autocommit(False)
def process_result(self, job, result, duration):
if not connection.in_atomic_block:
transaction.commit()
def process_exception(self, job, time_taken, *exc_info):
if not connection.in_atomic_block:
transaction.rollback()
|
from django.db import transaction, connection
class TransactionMiddleware(object):
def process_job(self, job):
if not connection.in_atomic_block:
transaction.set_autocommit(False)
def process_result(self, job, result, duration):
if not connection.in_atomic_block:
transaction.commit()
def process_exception(self, job, time_taken, *exc_info):
if not connection.in_atomic_block:
transaction.rollback()
# Legacy
if not hasattr(connection, 'in_atomic_block'):
class TransactionMiddleware(object):
def process_job(self, job):
transaction.enter_transaction_management()
transaction.managed(True)
def process_result(self, job, result, duration):
if not transaction.is_managed():
return
if transaction.is_dirty():
transaction.commit()
transaction.leave_transaction_management()
def process_exception(self, job, time_taken, *exc_info):
if transaction.is_dirty():
transaction.rollback()
transaction.leave_transaction_management()
|
Add a legacy version for older versions of Django.
|
Add a legacy version for older versions of Django.
Signed-off-by: Chris Lamb <711c73f64afdce07b7e38039a96d2224209e9a6c@thread.com>
|
Python
|
bsd-3-clause
|
thread/django-lightweight-queue,lamby/django-lightweight-queue,prophile/django-lightweight-queue,thread/django-lightweight-queue,prophile/django-lightweight-queue
|
---
+++
@@ -12,3 +12,22 @@
def process_exception(self, job, time_taken, *exc_info):
if not connection.in_atomic_block:
transaction.rollback()
+
+# Legacy
+if not hasattr(connection, 'in_atomic_block'):
+ class TransactionMiddleware(object):
+ def process_job(self, job):
+ transaction.enter_transaction_management()
+ transaction.managed(True)
+
+ def process_result(self, job, result, duration):
+ if not transaction.is_managed():
+ return
+ if transaction.is_dirty():
+ transaction.commit()
+ transaction.leave_transaction_management()
+
+ def process_exception(self, job, time_taken, *exc_info):
+ if transaction.is_dirty():
+ transaction.rollback()
+ transaction.leave_transaction_management()
|
b6c98dd016aa440f96565ceaee2716cd530beae5
|
pages/search_indexes.py
|
pages/search_indexes.py
|
"""Django haystack `SearchIndex` module."""
from pages.models import Page, Content
from haystack.indexes import SearchIndex, CharField, DateTimeField
from haystack import site
import datetime
class PageIndex(SearchIndex):
"""Search index for pages content."""
text = CharField(document=True, use_template=True)
title = CharField(model_attr='title')
publication_date = DateTimeField(model_attr='publication_date')
def get_queryset(self):
"""Used when the entire index for model is updated."""
return Page.objects.published()
site.register(Page, PageIndex)
|
"""Django haystack `SearchIndex` module."""
from pages.models import Page, Content
from haystack.indexes import SearchIndex, CharField, DateTimeField
from haystack import site
import datetime
class PageIndex(SearchIndex):
"""Search index for pages content."""
text = CharField(document=True, use_template=True)
title = CharField(model_attr='title')
url = CharField(model_attr='get_absolute_url')
publication_date = DateTimeField(model_attr='publication_date')
def get_queryset(self):
"""Used when the entire index for model is updated."""
return Page.objects.published()
site.register(Page, PageIndex)
|
Add a url attribute to the SearchIndex for pages.
|
Add a url attribute to the SearchIndex for pages.
This is useful when displaying a list of search results because we
can create a link to the result without having to hit the database
for every object in the result list.
|
Python
|
bsd-3-clause
|
remik/django-page-cms,akaihola/django-page-cms,batiste/django-page-cms,akaihola/django-page-cms,batiste/django-page-cms,akaihola/django-page-cms,remik/django-page-cms,pombredanne/django-page-cms-1,pombredanne/django-page-cms-1,pombredanne/django-page-cms-1,oliciv/django-page-cms,remik/django-page-cms,remik/django-page-cms,oliciv/django-page-cms,oliciv/django-page-cms,batiste/django-page-cms
|
---
+++
@@ -10,6 +10,7 @@
"""Search index for pages content."""
text = CharField(document=True, use_template=True)
title = CharField(model_attr='title')
+ url = CharField(model_attr='get_absolute_url')
publication_date = DateTimeField(model_attr='publication_date')
def get_queryset(self):
|
7f86ab26fb1c6ba01f81fdc3f5b66a0f079c23ff
|
tests/test_app.py
|
tests/test_app.py
|
import asyncio
from unittest import mock
import aiohttp
import pytest
from bottery.app import App
def test_app_session():
app = App()
assert isinstance(app.session, aiohttp.ClientSession)
def test_app_already_configured_session():
app = App()
app._session = 'session'
assert app.session == 'session'
def test_app_loop():
app = App()
assert isinstance(app.loop, asyncio.AbstractEventLoop)
def test_app_already_configured_loop():
app = App()
app._loop = 'loop'
assert app.loop == 'loop'
@mock.patch('bottery.app.settings')
def test_app_configure_without_platforms(mocked_settings):
"""Should raise Exception if no platform was found at settings"""
mocked_settings.PLATFORMS = {}
app = App()
with pytest.raises(Exception):
app.configure_platforms()
|
import asyncio
import sys
from unittest import mock
import aiohttp
import pytest
from bottery.app import App
@pytest.fixture
def mocked_engine():
mocked_engine_module = mock.MagicMock()
mocked_engine_instance = mocked_engine_module.engine.return_value
mocked_engine_instance.tasks.return_value = [(mock.MagicMock(), )]
sys.modules['tests.fake_engine'] = mocked_engine_module
yield {
'module': mocked_engine_module,
'instance': mocked_engine_instance
}
del sys.modules['tests.fake_engine']
def test_app_session():
app = App()
assert isinstance(app.session, aiohttp.ClientSession)
def test_app_already_configured_session():
app = App()
app._session = 'session'
assert app.session == 'session'
def test_app_loop():
app = App()
assert isinstance(app.loop, asyncio.AbstractEventLoop)
def test_app_already_configured_loop():
app = App()
app._loop = 'loop'
assert app.loop == 'loop'
@mock.patch('bottery.app.settings')
def test_app_configure_without_platforms(mocked_settings):
"""Should raise Exception if no platform was found at settings"""
mocked_settings.PLATFORMS = {}
app = App()
with pytest.raises(Exception):
app.configure_platforms()
@mock.patch('bottery.app.settings')
def test_app_configure_with_platforms(mocked_settings, mocked_engine):
"""Should call the platform interface methods"""
mocked_settings.PLATFORMS = {
'test': {
'ENGINE': 'tests.fake_engine',
'OPTIONS': {
'token': 'should-be-a-valid-token'
}
}
}
app = App()
app.configure_platforms()
mocked_engine['module'].engine.assert_called_with(
session=app.session,
token='should-be-a-valid-token'
)
mocked_engine['instance'].configure.assert_called_with()
mocked_engine['instance'].tasks.assert_called_with()
|
Increase the code coverage of App.configure_platforms method
|
Increase the code coverage of App.configure_platforms method
|
Python
|
mit
|
rougeth/bottery
|
---
+++
@@ -1,10 +1,26 @@
import asyncio
+import sys
from unittest import mock
import aiohttp
import pytest
from bottery.app import App
+
+
+@pytest.fixture
+def mocked_engine():
+ mocked_engine_module = mock.MagicMock()
+ mocked_engine_instance = mocked_engine_module.engine.return_value
+ mocked_engine_instance.tasks.return_value = [(mock.MagicMock(), )]
+ sys.modules['tests.fake_engine'] = mocked_engine_module
+
+ yield {
+ 'module': mocked_engine_module,
+ 'instance': mocked_engine_instance
+ }
+
+ del sys.modules['tests.fake_engine']
def test_app_session():
@@ -37,3 +53,26 @@
app = App()
with pytest.raises(Exception):
app.configure_platforms()
+
+@mock.patch('bottery.app.settings')
+def test_app_configure_with_platforms(mocked_settings, mocked_engine):
+ """Should call the platform interface methods"""
+
+ mocked_settings.PLATFORMS = {
+ 'test': {
+ 'ENGINE': 'tests.fake_engine',
+ 'OPTIONS': {
+ 'token': 'should-be-a-valid-token'
+ }
+ }
+ }
+
+ app = App()
+ app.configure_platforms()
+
+ mocked_engine['module'].engine.assert_called_with(
+ session=app.session,
+ token='should-be-a-valid-token'
+ )
+ mocked_engine['instance'].configure.assert_called_with()
+ mocked_engine['instance'].tasks.assert_called_with()
|
2e9c6c883de12b7293b9e932e5268a2d806e714c
|
chatterbot/logic/time_adapter.py
|
chatterbot/logic/time_adapter.py
|
from __future__ import unicode_literals
from datetime import datetime
from .logic_adapter import LogicAdapter
class TimeLogicAdapter(LogicAdapter):
"""
The TimeLogicAdapter returns the current time.
"""
def __init__(self, **kwargs):
super(TimeLogicAdapter, self).__init__(**kwargs)
from textblob.classifiers import NaiveBayesClassifier
training_data = [
('what time is it', 1),
('do you know the time', 1),
('do you know what time it is', 1),
('what is the time', 1),
('it is time to go to sleep', 0),
('what is your favorite color', 0),
('i had a great time', 0),
('what is', 0)
]
self.classifier = NaiveBayesClassifier(training_data)
def process(self, statement):
from chatterbot.conversation import Statement
now = datetime.now()
confidence = self.classifier.classify(statement.text.lower())
response = Statement('The current time is ' + now.strftime('%I:%M %p'))
return confidence, response
|
from __future__ import unicode_literals
from datetime import datetime
from .logic_adapter import LogicAdapter
class TimeLogicAdapter(LogicAdapter):
"""
The TimeLogicAdapter returns the current time.
"""
def __init__(self, **kwargs):
super(TimeLogicAdapter, self).__init__(**kwargs)
from nltk import NaiveBayesClassifier
self.positive = [
'what time is it',
'do you know the time',
'do you know what time it is',
'what is the time'
]
self.negative = [
'it is time to go to sleep',
'what is your favorite color',
'i had a great time',
'what is'
]
labeled_data = (
[(name, 0) for name in self.negative] +
[(name, 1) for name in self.positive]
)
# train_set = apply_features(self.time_question_features, training_data)
train_set = [(self.time_question_features(n), text) for (n, text) in labeled_data]
self.classifier = NaiveBayesClassifier.train(train_set)
def time_question_features(self, text):
"""
Provide an analysis of significan features in the string.
"""
features = {}
all_words = " ".join(self.positive + self.negative).split()
for word in text.split():
features['contains({})'.format(word)] = (word in all_words)
for letter in 'abcdefghijklmnopqrstuvwxyz':
features['count({})'.format(letter)] = text.lower().count(letter)
features['has({})'.format(letter)] = (letter in text.lower())
return features
def process(self, statement):
from chatterbot.conversation import Statement
now = datetime.now()
time_features = self.time_question_features(statement.text.lower())
confidence = self.classifier.classify(time_features)
response = Statement('The current time is ' + now.strftime('%I:%M %p'))
return confidence, response
|
Remove textblob dependency in time logic adapter
|
Remove textblob dependency in time logic adapter
|
Python
|
bsd-3-clause
|
gunthercox/ChatterBot,vkosuri/ChatterBot,Gustavo6046/ChatterBot,davizucon/ChatterBot,Reinaesaya/OUIRL-ChatBot,Reinaesaya/OUIRL-ChatBot,maclogan/VirtualPenPal
|
---
+++
@@ -10,27 +10,56 @@
def __init__(self, **kwargs):
super(TimeLogicAdapter, self).__init__(**kwargs)
- from textblob.classifiers import NaiveBayesClassifier
+ from nltk import NaiveBayesClassifier
- training_data = [
- ('what time is it', 1),
- ('do you know the time', 1),
- ('do you know what time it is', 1),
- ('what is the time', 1),
- ('it is time to go to sleep', 0),
- ('what is your favorite color', 0),
- ('i had a great time', 0),
- ('what is', 0)
+ self.positive = [
+ 'what time is it',
+ 'do you know the time',
+ 'do you know what time it is',
+ 'what is the time'
]
- self.classifier = NaiveBayesClassifier(training_data)
+ self.negative = [
+ 'it is time to go to sleep',
+ 'what is your favorite color',
+ 'i had a great time',
+ 'what is'
+ ]
+
+ labeled_data = (
+ [(name, 0) for name in self.negative] +
+ [(name, 1) for name in self.positive]
+ )
+
+ # train_set = apply_features(self.time_question_features, training_data)
+ train_set = [(self.time_question_features(n), text) for (n, text) in labeled_data]
+
+ self.classifier = NaiveBayesClassifier.train(train_set)
+
+ def time_question_features(self, text):
+ """
+ Provide an analysis of significan features in the string.
+ """
+ features = {}
+
+ all_words = " ".join(self.positive + self.negative).split()
+
+ for word in text.split():
+ features['contains({})'.format(word)] = (word in all_words)
+
+ for letter in 'abcdefghijklmnopqrstuvwxyz':
+ features['count({})'.format(letter)] = text.lower().count(letter)
+ features['has({})'.format(letter)] = (letter in text.lower())
+
+ return features
def process(self, statement):
from chatterbot.conversation import Statement
now = datetime.now()
- confidence = self.classifier.classify(statement.text.lower())
+ time_features = self.time_question_features(statement.text.lower())
+ confidence = self.classifier.classify(time_features)
response = Statement('The current time is ' + now.strftime('%I:%M %p'))
return confidence, response
|
025c95a59b079d630c778646d5c82f5e0679b47c
|
sale_automatic_workflow/models/account_invoice.py
|
sale_automatic_workflow/models/account_invoice.py
|
# -*- coding: utf-8 -*-
# © 2011 Akretion Sébastien BEAU <sebastien.beau@akretion.com>
# © 2013 Camptocamp SA (author: Guewen Baconnier)
# © 2016 Sodexis
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
from odoo import models, fields
class AccountInvoice(models.Model):
_inherit = "account.invoice"
workflow_process_id = fields.Many2one(
comodel_name='sale.workflow.process',
string='Sale Workflow Process'
)
|
# -*- coding: utf-8 -*-
# © 2011 Akretion Sébastien BEAU <sebastien.beau@akretion.com>
# © 2013 Camptocamp SA (author: Guewen Baconnier)
# © 2016 Sodexis
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
from odoo import models, fields
class AccountInvoice(models.Model):
_inherit = "account.invoice"
workflow_process_id = fields.Many2one(
comodel_name='sale.workflow.process',
string='Sale Workflow Process',
copy=False,
)
|
Fix issue on account.invoice about workflow_process_id: if a user duplicate an invoice, it copy also the workflow and validations (the reason of bugs)
|
[FIX] Fix issue on account.invoice about workflow_process_id: if a user duplicate an invoice, it copy also the workflow and validations (the reason of bugs)
|
Python
|
agpl-3.0
|
kittiu/sale-workflow,kittiu/sale-workflow
|
---
+++
@@ -3,7 +3,6 @@
# © 2013 Camptocamp SA (author: Guewen Baconnier)
# © 2016 Sodexis
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
-
from odoo import models, fields
@@ -12,5 +11,6 @@
workflow_process_id = fields.Many2one(
comodel_name='sale.workflow.process',
- string='Sale Workflow Process'
+ string='Sale Workflow Process',
+ copy=False,
)
|
7e2440c00ce75dc3ff0eac53e63d629981a9873a
|
raven/contrib/celery/__init__.py
|
raven/contrib/celery/__init__.py
|
"""
raven.contrib.celery
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2010 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
try:
from celery.task import task
except ImportError:
from celery.decorators import task
from celery.signals import task_failure
from raven.base import Client
class CeleryMixin(object):
def send_encoded(self, message):
"Errors through celery"
self.send_raw.delay(message)
@task(routing_key='sentry')
def send_raw(self, message):
return super(CeleryMixin, self).send_encoded(message)
class CeleryClient(CeleryMixin, Client):
pass
def register_signal(client):
def process_failure_signal(exception, traceback, sender, task_id,
signal, args, kwargs, einfo, **kw):
exc_info = (type(exception), exception, traceback)
client.captureException(
exc_info=exc_info,
extra={
'task_id': task_id,
'sender': sender,
'args': args,
'kwargs': kwargs,
})
task_failure.connect(process_failure_signal)
|
"""
raven.contrib.celery
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2010 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
try:
from celery.task import task
except ImportError:
from celery.decorators import task
from celery.signals import task_failure
from raven.base import Client
class CeleryMixin(object):
def send_encoded(self, message):
"Errors through celery"
self.send_raw.delay(message)
@task(routing_key='sentry')
def send_raw(self, message):
return super(CeleryMixin, self).send_encoded(message)
class CeleryClient(CeleryMixin, Client):
pass
def register_signal(client):
@task_failure.connect(weak=False)
def process_failure_signal(sender, task_id, exception, args, kwargs,
traceback, einfo, **kw):
client.captureException(
exc_info=einfo.exc_info,
extra={
'task_id': task_id,
'task': sender,
'args': args,
'kwargs': kwargs,
})
|
Fix celery task_failure signal definition
|
Fix celery task_failure signal definition
|
Python
|
bsd-3-clause
|
lepture/raven-python,recht/raven-python,lepture/raven-python,beniwohli/apm-agent-python,dbravender/raven-python,patrys/opbeat_python,recht/raven-python,jbarbuto/raven-python,getsentry/raven-python,akalipetis/raven-python,Goldmund-Wyldebeast-Wunderliebe/raven-python,patrys/opbeat_python,ewdurbin/raven-python,nikolas/raven-python,daikeren/opbeat_python,arthurlogilab/raven-python,icereval/raven-python,ronaldevers/raven-python,jmp0xf/raven-python,danriti/raven-python,danriti/raven-python,smarkets/raven-python,Goldmund-Wyldebeast-Wunderliebe/raven-python,akheron/raven-python,dbravender/raven-python,smarkets/raven-python,ticosax/opbeat_python,lepture/raven-python,Goldmund-Wyldebeast-Wunderliebe/raven-python,lopter/raven-python-old,icereval/raven-python,ronaldevers/raven-python,johansteffner/raven-python,jmagnusson/raven-python,tarkatronic/opbeat_python,collective/mr.poe,someonehan/raven-python,jbarbuto/raven-python,inspirehep/raven-python,akalipetis/raven-python,dirtycoder/opbeat_python,smarkets/raven-python,arthurlogilab/raven-python,daikeren/opbeat_python,Photonomie/raven-python,inspirehep/raven-python,beniwohli/apm-agent-python,jbarbuto/raven-python,daikeren/opbeat_python,inspirehep/raven-python,danriti/raven-python,someonehan/raven-python,nikolas/raven-python,patrys/opbeat_python,ewdurbin/raven-python,icereval/raven-python,akheron/raven-python,jbarbuto/raven-python,inspirehep/raven-python,beniwohli/apm-agent-python,someonehan/raven-python,hzy/raven-python,percipient/raven-python,openlabs/raven,ticosax/opbeat_python,getsentry/raven-python,tarkatronic/opbeat_python,dirtycoder/opbeat_python,recht/raven-python,jmp0xf/raven-python,jmagnusson/raven-python,arthurlogilab/raven-python,percipient/raven-python,nikolas/raven-python,getsentry/raven-python,ronaldevers/raven-python,johansteffner/raven-python,akheron/raven-python,hzy/raven-python,ewdurbin/raven-python,dirtycoder/opbeat_python,percipient/raven-python,jmagnusson/raven-python,Goldmund-Wyldebeast-Wunderliebe/raven-python,nikolas/raven-python,johansteffner/raven-python,icereval/raven-python,hzy/raven-python,smarkets/raven-python,beniwohli/apm-agent-python,Photonomie/raven-python,Photonomie/raven-python,jmp0xf/raven-python,ticosax/opbeat_python,tarkatronic/opbeat_python,alex/raven,arthurlogilab/raven-python,akalipetis/raven-python,dbravender/raven-python,patrys/opbeat_python
|
---
+++
@@ -29,15 +29,15 @@
def register_signal(client):
- def process_failure_signal(exception, traceback, sender, task_id,
- signal, args, kwargs, einfo, **kw):
- exc_info = (type(exception), exception, traceback)
+ @task_failure.connect(weak=False)
+ def process_failure_signal(sender, task_id, exception, args, kwargs,
+ traceback, einfo, **kw):
client.captureException(
- exc_info=exc_info,
+ exc_info=einfo.exc_info,
extra={
'task_id': task_id,
- 'sender': sender,
+ 'task': sender,
'args': args,
'kwargs': kwargs,
})
- task_failure.connect(process_failure_signal)
+
|
9d68808a363ad00c3fc0b0907d625e5c75bdb8ae
|
ptt_preproc_sampling.py
|
ptt_preproc_sampling.py
|
#!/usr/bin/env python
from pathlib import Path
from random import shuffle
from shutil import copy
# configs
N = 10000
SAMPLED_DIR_PATH = Path('sampled/')
# mkdir if doesn't exist
if not SAMPLED_DIR_PATH.exists():
SAMPLED_DIR_PATH.mkdir()
# sample and copy
paths = [p for p in Path('preprocessed/').iterdir()]
shuffle(paths)
for p in paths[:N]:
copy(str(p), str(SAMPLED_DIR_PATH / p.name))
|
#!/usr/bin/env python
from pathlib import Path
from random import sample
from os import remove
# configs
N = 10000
# remove unsampled
paths = [path for path in Path('preprocessed/').iterdir()]
paths_len = len(paths)
if paths_len <= N:
raise RuntimeError('file count {:,} <= N {:,}'.format(paths_len, N))
for path in sample(paths, paths_len-N):
remove(str(path))
|
Use removing rather than copying
|
Use removing rather than copying
|
Python
|
mit
|
moskytw/mining-news
|
---
+++
@@ -2,24 +2,21 @@
from pathlib import Path
-from random import shuffle
-from shutil import copy
+from random import sample
+from os import remove
# configs
N = 10000
-SAMPLED_DIR_PATH = Path('sampled/')
-# mkdir if doesn't exist
+# remove unsampled
-if not SAMPLED_DIR_PATH.exists():
- SAMPLED_DIR_PATH.mkdir()
+paths = [path for path in Path('preprocessed/').iterdir()]
+paths_len = len(paths)
-# sample and copy
+if paths_len <= N:
+ raise RuntimeError('file count {:,} <= N {:,}'.format(paths_len, N))
-paths = [p for p in Path('preprocessed/').iterdir()]
-shuffle(paths)
-
-for p in paths[:N]:
- copy(str(p), str(SAMPLED_DIR_PATH / p.name))
+for path in sample(paths, paths_len-N):
+ remove(str(path))
|
b5e4af74bfc12eb3ae9ca14ab4cebc49daf05fdc
|
api/wb/urls.py
|
api/wb/urls.py
|
from django.conf.urls import url
from api.wb import views
app_name = 'osf'
urlpatterns = [
url(r'^move/', views.MoveFile.as_view(), name=views.MoveFile.view_name),
url(r'^copy/', views.MoveFile.as_view(), name=views.MoveFile.view_name),
]
|
from django.conf.urls import url
from api.wb import views
app_name = 'osf'
urlpatterns = [
url(r'^(?P<node_id>\w+)/move/', views.MoveFile.as_view(), name=views.MoveFile.view_name),
url(r'^(?P<node_id>\w+)/copy/', views.MoveFile.as_view(), name=views.MoveFile.view_name),
]
|
Add node id to url.
|
Add node id to url.
|
Python
|
apache-2.0
|
baylee-d/osf.io,adlius/osf.io,CenterForOpenScience/osf.io,cslzchen/osf.io,caseyrollins/osf.io,erinspace/osf.io,pattisdr/osf.io,erinspace/osf.io,icereval/osf.io,adlius/osf.io,erinspace/osf.io,HalcyonChimera/osf.io,HalcyonChimera/osf.io,mattclark/osf.io,adlius/osf.io,felliott/osf.io,Johnetordoff/osf.io,felliott/osf.io,felliott/osf.io,HalcyonChimera/osf.io,mattclark/osf.io,adlius/osf.io,cslzchen/osf.io,Johnetordoff/osf.io,mfraezz/osf.io,aaxelb/osf.io,aaxelb/osf.io,saradbowman/osf.io,icereval/osf.io,saradbowman/osf.io,cslzchen/osf.io,Johnetordoff/osf.io,mfraezz/osf.io,caseyrollins/osf.io,aaxelb/osf.io,mfraezz/osf.io,sloria/osf.io,brianjgeiger/osf.io,aaxelb/osf.io,mfraezz/osf.io,pattisdr/osf.io,CenterForOpenScience/osf.io,brianjgeiger/osf.io,brianjgeiger/osf.io,CenterForOpenScience/osf.io,sloria/osf.io,icereval/osf.io,brianjgeiger/osf.io,CenterForOpenScience/osf.io,baylee-d/osf.io,pattisdr/osf.io,sloria/osf.io,caseyrollins/osf.io,HalcyonChimera/osf.io,baylee-d/osf.io,felliott/osf.io,mattclark/osf.io,Johnetordoff/osf.io,cslzchen/osf.io
|
---
+++
@@ -4,6 +4,6 @@
app_name = 'osf'
urlpatterns = [
- url(r'^move/', views.MoveFile.as_view(), name=views.MoveFile.view_name),
- url(r'^copy/', views.MoveFile.as_view(), name=views.MoveFile.view_name),
+ url(r'^(?P<node_id>\w+)/move/', views.MoveFile.as_view(), name=views.MoveFile.view_name),
+ url(r'^(?P<node_id>\w+)/copy/', views.MoveFile.as_view(), name=views.MoveFile.view_name),
]
|
44893be528063d25d0b2305c9d24be4605c49f3c
|
mcserver/config/core.py
|
mcserver/config/core.py
|
"""
MCServer Tools config loader
"""
import json
import os.path
class CoreConfig(object):
"""
MCServer Tools configuration
"""
SETTINGS_FILE = 'mcserver.settings'
def __init__(self, path):
"""
Load configuration from the given file path
"""
self.settings_file = os.path.join(path, self.SETTINGS_FILE)
self._settings = {}
self._load_settings()
def _load_settings(self):
"""
Load the settings from disk
"""
with open(self.settings_file, 'r') as fh:
self._settings = json.load(fh)
def get(self, property, default = None):
"""
Try to get the property value. If the property was not found
then return the given default.
"""
if property not in self._settings:
return default
return self._settings[property]
def has(self, property):
"""
Check if the config has the given property.
"""
return property in self._settings
|
"""
MCServer Tools config loader
"""
import json
import os.path
from mcserver import MCServerError
class CoreConfig(object):
"""
MCServer Tools configuration
"""
SETTINGS_FILE = 'mcserver.settings'
def __init__(self, path):
"""
Load configuration from the given file path
"""
self.settings_file = os.path.join(path, self.SETTINGS_FILE)
self._settings = {}
self._load_settings()
def _load_settings(self):
"""
Load the settings from disk
"""
try:
with open(self.settings_file, 'r') as fh:
self._settings = json.load(fh)
except:
raise MCServerError('Could not open settings file: {}'.format(self.settings_file))
def get(self, property, default = None):
"""
Try to get the property value. If the property was not found
then return the given default.
"""
if property not in self._settings:
return default
return self._settings[property]
def has(self, property):
"""
Check if the config has the given property.
"""
return property in self._settings
|
Check for the existance of the settings file and report if its not there
|
Check for the existance of the settings file and report if its not there
|
Python
|
mit
|
cadyyan/mcserver-tools,cadyyan/mcserver-tools
|
---
+++
@@ -4,6 +4,8 @@
import json
import os.path
+
+from mcserver import MCServerError
class CoreConfig(object):
"""
@@ -27,8 +29,11 @@
Load the settings from disk
"""
- with open(self.settings_file, 'r') as fh:
- self._settings = json.load(fh)
+ try:
+ with open(self.settings_file, 'r') as fh:
+ self._settings = json.load(fh)
+ except:
+ raise MCServerError('Could not open settings file: {}'.format(self.settings_file))
def get(self, property, default = None):
"""
|
20224e4fe8b93dee087dd7a455f9709b9795a026
|
app/models.py
|
app/models.py
|
from app import database
class Talk(database.Model):
id = database.Column(database.Integer, primary_key=True, autoincrement=True)
title = database.Column(database.String(128), nullable=False)
description = database.Column(database.String(512))
speaker_facebook_id = database.Column(database.BIGINT, database.ForeignKey('speaker.facebook_id'),
nullable=False)
liked_by = database.relationship('Liker_Talk', backref='talk', lazy='dynamic')
def __repr__(self):
return '<Talk %r>' % self.id
class Speaker(database.Model):
facebook_id = database.Column(database.BIGINT, primary_key=True)
name = database.Column(database.String(128), nullable=False)
talks = database.relationship('Talk', backref='speaker', lazy='dynamic')
def __repr__(self):
return '<Speaker %r>' % self.facebook_id
class Liker_Talk(database.Model):
liker_facebook_id = database.Column(database.BIGINT, primary_key=True)
talk_id = database.Column(database.Integer, database.ForeignKey('talk.id'), primary_key=True)
def __repr__(self):
liker = repr(self.liker_facebook_id)
talk = repr(self.talk_id)
return '<Liker_Talk %r>' % ', '.join((liker, talk))
|
from app import database
class Talk(database.Model):
id = database.Column(database.Integer, primary_key=True, autoincrement=True)
title = database.Column(database.String(128), unique=True, nullable=False)
description = database.Column(database.String(512))
speaker_facebook_id = database.Column(database.BIGINT, database.ForeignKey('speaker.facebook_id'),
nullable=False)
liked_by = database.relationship('Liker_Talk', backref='talk', lazy='dynamic')
def __repr__(self):
return '<Talk %r>' % self.id
class Speaker(database.Model):
facebook_id = database.Column(database.BIGINT, primary_key=True)
name = database.Column(database.String(128), nullable=False)
talks = database.relationship('Talk', backref='speaker', lazy='dynamic')
def __repr__(self):
return '<Speaker %r>' % self.facebook_id
class Liker_Talk(database.Model):
liker_facebook_id = database.Column(database.BIGINT, primary_key=True)
talk_id = database.Column(database.Integer, database.ForeignKey('talk.id'), primary_key=True)
def __repr__(self):
liker = repr(self.liker_facebook_id)
talk = repr(self.talk_id)
return '<Liker_Talk %r>' % ', '.join((liker, talk))
|
Make title unique Talk property
|
Make title unique Talk property
|
Python
|
mit
|
Stark-Mountain/meetup-facebook-bot,Stark-Mountain/meetup-facebook-bot
|
---
+++
@@ -3,7 +3,7 @@
class Talk(database.Model):
id = database.Column(database.Integer, primary_key=True, autoincrement=True)
- title = database.Column(database.String(128), nullable=False)
+ title = database.Column(database.String(128), unique=True, nullable=False)
description = database.Column(database.String(512))
speaker_facebook_id = database.Column(database.BIGINT, database.ForeignKey('speaker.facebook_id'),
nullable=False)
|
3611e8a1b6477d251ddb2c90211e0cfee370671d
|
cal_pipe/easy_RFI_flagging.py
|
cal_pipe/easy_RFI_flagging.py
|
import sys
import os
'''
Easier searching for good RFI flagging values
'''
try:
vis = sys.argv[1]
except IndexError:
vis = raw_input("Input vis? : ")
# Just want the number of SPWs
tb.open(os.path.join(vis, "SPECTRAL_WINDOW"))
nchans = tb.getcol('NUM_CHAN')
tb.close()
spws = range(len(nchans))
default('flagdata')
for spw in spws:
print("On spw "+str(spw)+" of "+str(len(nchans)))
freqdevscale = 4.0
timedevscale = 4.0
while True:
print("Starting at ")
flagdata(vis=vis, mode='rflag', field='3C48*',
spw=str(spw), datacolumn='corrected',
action='calculate', display='both',
freqdevscale=freqdevscale, timedevscale=timedevscale,
flagbackup=False)
adjust = True if raw_input("New thresholds? : ") == "T" else False
if adjust:
print("Current freqdevscale and timedevscale: %s %s" % (freqdevscale, timedevscale))
freqdevscale = float(raw_input("New freqdevscale : "))
timedevscale = float(raw_input("New timedevscale : "))
else:
break
|
import sys
import os
'''
Easier searching for good RFI flagging values
'''
try:
ms_name = sys.argv[1]
except IndexError:
ms_name = raw_input("Input vis? : ")
# Just want the number of SPWs
tb.open(os.path.join(ms_name, "SPECTRAL_WINDOW"))
nchans = tb.getcol('NUM_CHAN')
tb.close()
spws = range(len(nchans))
default('flagdata')
for spw in spws:
print("On spw "+str(spw)+" of "+str(len(nchans)))
freqdevscale = 4.0
timedevscale = 4.0
while True:
print("Starting at ")
flagdata(vis=ms_name, mode='rflag', field='3C48*',
spw=str(spw), datacolumn='corrected',
action='calculate', display='both',
freqdevscale=freqdevscale, timedevscale=timedevscale,
flagbackup=False)
adjust = True if raw_input("New thresholds? : ") == "T" else False
if adjust:
print("Current freqdevscale and timedevscale: %s %s" % (freqdevscale, timedevscale))
freqdevscale = float(raw_input("New freqdevscale : "))
timedevscale = float(raw_input("New timedevscale : "))
else:
break
|
CHange name so it isn't reset
|
CHange name so it isn't reset
|
Python
|
mit
|
e-koch/canfar_scripts,e-koch/canfar_scripts
|
---
+++
@@ -7,12 +7,12 @@
'''
try:
- vis = sys.argv[1]
+ ms_name = sys.argv[1]
except IndexError:
- vis = raw_input("Input vis? : ")
+ ms_name = raw_input("Input vis? : ")
# Just want the number of SPWs
-tb.open(os.path.join(vis, "SPECTRAL_WINDOW"))
+tb.open(os.path.join(ms_name, "SPECTRAL_WINDOW"))
nchans = tb.getcol('NUM_CHAN')
tb.close()
@@ -27,7 +27,7 @@
while True:
print("Starting at ")
- flagdata(vis=vis, mode='rflag', field='3C48*',
+ flagdata(vis=ms_name, mode='rflag', field='3C48*',
spw=str(spw), datacolumn='corrected',
action='calculate', display='both',
freqdevscale=freqdevscale, timedevscale=timedevscale,
|
be458ff809f6f49e21be06054ad096ff3f5430f9
|
masters/master.client.syzygy/master_site_config.py
|
masters/master.client.syzygy/master_site_config.py
|
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""ActiveMaster definition."""
class Syzygy(object):
project_name = 'Syzygy'
master_port = 8142
slave_port = 8242
master_port_alt = 8342
tree_closing_notification_recipients = []
from_address = 'buildbot@chromium.org'
master_host = 'master3.golo.chromium.org'
buildslave_version = 'buildbot_slave_8_4'
twisted_version = 'twisted_10_2'
|
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""ActiveMaster definition."""
class Syzygy(object):
project_name = 'Syzygy'
master_port = 8042
slave_port = 8142
master_port_alt = 8242
tree_closing_notification_recipients = []
from_address = 'buildbot@chromium.org'
master_host = 'master3.golo.chromium.org'
buildslave_version = 'buildbot_slave_8_4'
twisted_version = 'twisted_10_2'
|
Fix ports for syzygy master to match previous ports.
|
Fix ports for syzygy master to match previous ports.
TBR=chrisha@chromium.org
BUG=
Review URL: https://chromiumcodereview.appspot.com/12315047
git-svn-id: 239fca9b83025a0b6f823aeeca02ba5be3d9fd76@183944 0039d316-1c4b-4281-b951-d872f2087c98
|
Python
|
bsd-3-clause
|
eunchong/build,eunchong/build,eunchong/build,eunchong/build
|
---
+++
@@ -6,9 +6,9 @@
class Syzygy(object):
project_name = 'Syzygy'
- master_port = 8142
- slave_port = 8242
- master_port_alt = 8342
+ master_port = 8042
+ slave_port = 8142
+ master_port_alt = 8242
tree_closing_notification_recipients = []
from_address = 'buildbot@chromium.org'
master_host = 'master3.golo.chromium.org'
|
80acc483f9b5d7fb462d81a2df092d16f5dbf035
|
openprocurement/tender/limited/subscribers.py
|
openprocurement/tender/limited/subscribers.py
|
from pyramid.events import subscriber
from openprocurement.tender.core.events import TenderInitializeEvent
from openprocurement.tender.core.utils import get_now, calculate_business_date
@subscriber(TenderInitializeEvent, procurementMethodType="reporting")
def tender_init_handler(event):
""" initialization handler for tenders """
event.tender.date = get_now()
@subscriber(TenderInitializeEvent, procurementMethodType="negotiation")
def tender_init_handler(event):
""" initialization handler for tenders """
tender = event.tender
tender.date = get_now()
if tender.lots:
for lot in tender.lots:
lot.date = get_now()
@subscriber(TenderInitializeEvent, procurementMethodType="negotiation.quick")
def tender_init_handler(event):
""" initialization handler for tenders """
tender = event.tender
tender.date = get_now()
if tender.lots:
for lot in tender.lots:
lot.date = get_now()
|
from pyramid.events import subscriber
from openprocurement.tender.core.events import TenderInitializeEvent
from openprocurement.tender.core.utils import get_now, calculate_business_date
@subscriber(TenderInitializeEvent, procurementMethodType="reporting")
def tender_init_handler_1(event):
""" initialization handler for tenders """
event.tender.date = get_now()
@subscriber(TenderInitializeEvent, procurementMethodType="negotiation")
def tender_init_handler_2(event):
""" initialization handler for tenders """
tender = event.tender
tender.date = get_now()
if tender.lots:
for lot in tender.lots:
lot.date = get_now()
@subscriber(TenderInitializeEvent, procurementMethodType="negotiation.quick")
def tender_init_handler_3(event):
""" initialization handler for tenders """
tender = event.tender
tender.date = get_now()
if tender.lots:
for lot in tender.lots:
lot.date = get_now()
|
Change tender init handlers names
|
Change tender init handlers names
|
Python
|
apache-2.0
|
openprocurement/openprocurement.tender.limited
|
---
+++
@@ -4,13 +4,13 @@
@subscriber(TenderInitializeEvent, procurementMethodType="reporting")
-def tender_init_handler(event):
+def tender_init_handler_1(event):
""" initialization handler for tenders """
event.tender.date = get_now()
@subscriber(TenderInitializeEvent, procurementMethodType="negotiation")
-def tender_init_handler(event):
+def tender_init_handler_2(event):
""" initialization handler for tenders """
tender = event.tender
tender.date = get_now()
@@ -20,7 +20,7 @@
@subscriber(TenderInitializeEvent, procurementMethodType="negotiation.quick")
-def tender_init_handler(event):
+def tender_init_handler_3(event):
""" initialization handler for tenders """
tender = event.tender
tender.date = get_now()
|
689dd5cb67516fd091a69e39708b547c66f96750
|
nap/dataviews/models.py
|
nap/dataviews/models.py
|
from .fields import Field
from .views import DataView
from django.utils.six import with_metaclass
class MetaView(type):
def __new__(mcs, name, bases, attrs):
meta = attrs.get('Meta', None)
try:
model = meta.model
except AttributeError:
if name != 'ModelDataView':
raise
else:
include = getattr(meta, 'fields', None)
exclude = getattr(meta, 'exclude', [])
# XXX Does the top base have all fields?
for model_field in model._meta.fields:
if model_field.name in attrs:
continue
if model_field.name in exclude:
continue
if include != '__all__' and model_field.name not in include:
continue
# XXX Magic for field types
attrs[model_field.name] = Field(model_field.name)
attrs['_meta'] = meta
return super(MetaView, mcs).__new__(mcs, name, bases, attrs)
class ModelDataView(with_metaclass(MetaView, DataView)):
pass
|
from django.db.models.fields import NOT_PROVIDED
from django.utils.six import with_metaclass
from . import filters
from .fields import Field
from .views import DataView
# Map of ModelField name -> list of filters
FIELD_FILTERS = {
'DateField': [filters.DateFilter],
'TimeField': [filters.TimeFilter],
'DateTimeField': [filters.DateTimeFilter],
}
class Options(object):
def __init__(self, meta):
self.model = getattr(meta, 'model', None)
self.fields = getattr(meta, 'fields', [])
self.exclude = getattr(meta, 'exclude', [])
self.required = getattr(meta, 'required', {})
class MetaView(type):
def __new__(mcs, name, bases, attrs):
meta = Options(attrs.get('Meta', None))
if meta.model is None:
if name != 'ModelDataView':
raise ValueError('model not defined on class Meta')
else:
# XXX Does the top base have all fields?
for model_field in meta.model._meta.fields:
if model_field.name in attrs:
continue
if model_field.name in meta.exclude:
continue
if meta.fields != '__all__' and model_field.name not in meta.fields:
continue
# XXX Magic for field types
kwargs = {}
kwargs['default'] = model_field.default
kwargs['required'] = any([
not model_field.blank,
model_field.default is not NOT_PROVIDED,
])
kwargs['filters'] = FIELD_FILTERS.get(model_field.__class__.__name__, [])
attrs[model_field.name] = Field(model_field.name, **kwargs)
attrs['_meta'] = meta
return super(MetaView, mcs).__new__(mcs, name, bases, attrs)
class ModelDataView(with_metaclass(MetaView, DataView)):
pass
|
Add Options class Add field filters lists Start proper model field introspection
|
Add Options class
Add field filters lists
Start proper model field introspection
|
Python
|
bsd-3-clause
|
limbera/django-nap,MarkusH/django-nap
|
---
+++
@@ -1,36 +1,56 @@
+from django.db.models.fields import NOT_PROVIDED
+from django.utils.six import with_metaclass
+
+from . import filters
from .fields import Field
from .views import DataView
-from django.utils.six import with_metaclass
+
+# Map of ModelField name -> list of filters
+FIELD_FILTERS = {
+ 'DateField': [filters.DateFilter],
+ 'TimeField': [filters.TimeFilter],
+ 'DateTimeField': [filters.DateTimeFilter],
+}
+
+
+class Options(object):
+ def __init__(self, meta):
+ self.model = getattr(meta, 'model', None)
+ self.fields = getattr(meta, 'fields', [])
+ self.exclude = getattr(meta, 'exclude', [])
+ self.required = getattr(meta, 'required', {})
class MetaView(type):
def __new__(mcs, name, bases, attrs):
- meta = attrs.get('Meta', None)
+ meta = Options(attrs.get('Meta', None))
- try:
- model = meta.model
- except AttributeError:
+ if meta.model is None:
if name != 'ModelDataView':
- raise
+ raise ValueError('model not defined on class Meta')
else:
- include = getattr(meta, 'fields', None)
- exclude = getattr(meta, 'exclude', [])
-
# XXX Does the top base have all fields?
- for model_field in model._meta.fields:
+ for model_field in meta.model._meta.fields:
if model_field.name in attrs:
continue
- if model_field.name in exclude:
+ if model_field.name in meta.exclude:
continue
- if include != '__all__' and model_field.name not in include:
+ if meta.fields != '__all__' and model_field.name not in meta.fields:
continue
# XXX Magic for field types
- attrs[model_field.name] = Field(model_field.name)
+ kwargs = {}
+ kwargs['default'] = model_field.default
+ kwargs['required'] = any([
+ not model_field.blank,
+ model_field.default is not NOT_PROVIDED,
+ ])
+ kwargs['filters'] = FIELD_FILTERS.get(model_field.__class__.__name__, [])
+ attrs[model_field.name] = Field(model_field.name, **kwargs)
attrs['_meta'] = meta
|
10e23fdd5c0427ad1ff5a5284410c755378a0e6d
|
SoftLayer/CLI/object_storage/list_accounts.py
|
SoftLayer/CLI/object_storage/list_accounts.py
|
"""List Object Storage accounts."""
# :license: MIT, see LICENSE for more details.
import click
import SoftLayer
from SoftLayer.CLI import environment
from SoftLayer.CLI import formatting
@click.command()
@environment.pass_env
def cli(env):
"""List object storage accounts."""
mgr = SoftLayer.ObjectStorageManager(env.client)
accounts = mgr.list_accounts()
table = formatting.Table(['id', 'name', 'apiType'])
table.sortby = 'id'
global api_type
for account in accounts:
if 'vendorName' in account and 'Swift' == account['vendorName']:
api_type = 'Swift'
elif 'Cleversafe' in account['serviceResource']['name']:
api_type = 'S3'
table.add_row([
account['id'],
account['username'],
api_type,
])
env.fout(table)
|
"""List Object Storage accounts."""
# :license: MIT, see LICENSE for more details.
import click
import SoftLayer
from SoftLayer.CLI import environment
from SoftLayer.CLI import formatting
@click.command()
@environment.pass_env
def cli(env):
"""List object storage accounts."""
mgr = SoftLayer.ObjectStorageManager(env.client)
accounts = mgr.list_accounts()
table = formatting.Table(['id', 'name', 'apiType'])
table.sortby = 'id'
api_type = None
for account in accounts:
if 'vendorName' in account and account['vendorName'] == 'Swift':
api_type = 'Swift'
elif 'Cleversafe' in account['serviceResource']['name']:
api_type = 'S3'
table.add_row([
account['id'],
account['username'],
api_type,
])
env.fout(table)
|
Fix object storage apiType for S3 and Swift.
|
Fix object storage apiType for S3 and Swift.
|
Python
|
mit
|
allmightyspiff/softlayer-python,softlayer/softlayer-python,kyubifire/softlayer-python
|
---
+++
@@ -17,9 +17,9 @@
accounts = mgr.list_accounts()
table = formatting.Table(['id', 'name', 'apiType'])
table.sortby = 'id'
- global api_type
+ api_type = None
for account in accounts:
- if 'vendorName' in account and 'Swift' == account['vendorName']:
+ if 'vendorName' in account and account['vendorName'] == 'Swift':
api_type = 'Swift'
elif 'Cleversafe' in account['serviceResource']['name']:
api_type = 'S3'
|
e9386e24bea91b8659b5184fe146002f555ccd15
|
versions/xmlib.py
|
versions/xmlib.py
|
#!/usr/bin/env python
#
# Xm
#
# The xm library is hard to find and requires this special code.
#
# Author P G Jones - 11/07/2012 <p.g.jones@qmul.ac.uk> : First revision
# Author P G Jones - 22/09/2012 <p.g.jones@qmul.ac.uk> : Major refactor of snoing.
####################################################################################################
import systempackage
import system
import os
class Xm(systempackage.SystemPackage):
""" Package for the Open Motif/Xm library."""
def __init__(self, system):
super(Xm, self).__init__("Xm", system, "Install Xm-dev (OpenMotif) on this system.")
def check_state(self):
""" Check the Xm state, slightly more involved on macs."""
if self._system.get_os_type() == system.System.Mac:
if os.path.exists("/sw/include/Xm"):
flags = [ "-I%s" % "/sw/include/Xm", "-L%s" % "/sw/lib" ]
elif os.path.exists("/usr/OpenMotif"):
flags = [ "-I%s" % "/usr/OpenMotif/include", "-L%s" % "/usr/OpenMotif/lib" ]
self._installed = self._system._test_compile(["Xm.h"], flags)
else:
self._installed = self._system.test_library("Xm", ["Xm/Xm.h"])
|
#!/usr/bin/env python
#
# Xm
#
# The xm library is hard to find and requires this special code.
#
# Author P G Jones - 11/07/2012 <p.g.jones@qmul.ac.uk> : First revision
# Author P G Jones - 22/09/2012 <p.g.jones@qmul.ac.uk> : Major refactor of snoing.
####################################################################################################
import systempackage
import system
import os
class Xm(systempackage.SystemPackage):
""" Package for the Open Motif/Xm library."""
def __init__(self, system):
super(Xm, self).__init__("Xm", system, "Install Xm-dev (OpenMotif) on this system.")
def check_state(self):
""" Check the Xm state, slightly more involved on macs."""
if self._system.get_os_type() == system.System.Mac:
if os.path.exists("/sw/include/Xm"):
flags = [ "-I%s" % "/sw/include", "-L%s" % "/sw/lib" ]
elif os.path.exists("/usr/OpenMotif"):
flags = [ "-I%s" % "/usr/OpenMotif/include", "-L%s" % "/usr/OpenMotif/lib" ]
self._installed = self._system._test_compile(["Xm/Xm.h"], flags)
else:
self._installed = self._system.test_library("Xm", ["Xm/Xm.h"])
|
Fix Xm library location error.
|
Fix Xm library location error.
|
Python
|
mit
|
mjmottram/snoing,mjmottram/snoing
|
---
+++
@@ -19,9 +19,9 @@
""" Check the Xm state, slightly more involved on macs."""
if self._system.get_os_type() == system.System.Mac:
if os.path.exists("/sw/include/Xm"):
- flags = [ "-I%s" % "/sw/include/Xm", "-L%s" % "/sw/lib" ]
+ flags = [ "-I%s" % "/sw/include", "-L%s" % "/sw/lib" ]
elif os.path.exists("/usr/OpenMotif"):
flags = [ "-I%s" % "/usr/OpenMotif/include", "-L%s" % "/usr/OpenMotif/lib" ]
- self._installed = self._system._test_compile(["Xm.h"], flags)
+ self._installed = self._system._test_compile(["Xm/Xm.h"], flags)
else:
self._installed = self._system.test_library("Xm", ["Xm/Xm.h"])
|
026aa257bff85b897e8e3ef1999b8fc6f7e3cc30
|
socketdjango/socketdjango/__init__.py
|
socketdjango/socketdjango/__init__.py
|
"""
Socketdjango Project Module
Interesting Docstring goes here!
"""
__version__ = '0.0.1'
|
"""
Socketdjango Project Module
Interesting Docstring goes here!
"""
__version__ = '0.1.0'
|
Change Initial Version Number to '0.1.0'
|
Change Initial Version Number to '0.1.0'
Change __version__ to '0.1.0'
|
Python
|
mit
|
bobbyrussell/django-socketio,bobbyrussell/django-socketio,bobbyrussell/django-socketio
|
---
+++
@@ -4,4 +4,4 @@
Interesting Docstring goes here!
"""
-__version__ = '0.0.1'
+__version__ = '0.1.0'
|
502a5cb7179aaedf68f3f16bf8d2ef7eb1ad0032
|
nsq/sockets/__init__.py
|
nsq/sockets/__init__.py
|
'''Sockets that wrap different connection types'''
# Not all platforms support all types of sockets provided here. For those that
# are not available, the corresponding socket wrapper is imported as None.
from .. import logger
# Snappy support
try:
from .snappy import SnappySocket
except ImportError: # pragma: no cover
logger.warn('Snappy compression not supported')
SnappySocket = None
# Deflate support
try:
from .deflate import DeflateSocket
except ImportError: # pragma: no cover
logger.warn('Deflate compression not supported')
DeflateSocket = None
# The TLS socket
try:
from .tls import TLSSocket
except ImportError: # pragma: no cover
logger.warn('TLS not supported')
TLSSocket = None
|
'''Sockets that wrap different connection types'''
# Not all platforms support all types of sockets provided here. For those that
# are not available, the corresponding socket wrapper is imported as None.
from .. import logger
# Snappy support
try:
from .snappy import SnappySocket
except ImportError: # pragma: no cover
logger.debug('Snappy compression not supported')
SnappySocket = None
# Deflate support
try:
from .deflate import DeflateSocket
except ImportError: # pragma: no cover
logger.debug('Deflate compression not supported')
DeflateSocket = None
# The TLS socket
try:
from .tls import TLSSocket
except ImportError: # pragma: no cover
logger.warn('TLS not supported')
TLSSocket = None
|
Reduce log severity of socket import messages
|
Reduce log severity of socket import messages
|
Python
|
mit
|
dlecocq/nsq-py,dlecocq/nsq-py
|
---
+++
@@ -9,7 +9,7 @@
try:
from .snappy import SnappySocket
except ImportError: # pragma: no cover
- logger.warn('Snappy compression not supported')
+ logger.debug('Snappy compression not supported')
SnappySocket = None
@@ -17,7 +17,7 @@
try:
from .deflate import DeflateSocket
except ImportError: # pragma: no cover
- logger.warn('Deflate compression not supported')
+ logger.debug('Deflate compression not supported')
DeflateSocket = None
|
7dfe9c435b102eacddd9e0617540495f0af46416
|
app/config.py
|
app/config.py
|
import os
if os.environ['DATABASE_URL'] is None:
SQLALCHEMY_DATABASE_URI = 'sqlite:///meetup.db'
else:
SQLALCHEMY_DATABASE_URI = os.environ['DATABASE_URL']
SQLALCHEMY_TRACK_MODIFICATIONS = False # supress deprecation warning
|
import os
if os.environ.get('DATABASE_URL') is None:
SQLALCHEMY_DATABASE_URI = 'sqlite:///meetup.db'
else:
SQLALCHEMY_DATABASE_URI = os.environ['DATABASE_URL']
SQLALCHEMY_TRACK_MODIFICATIONS = False # supress deprecation warning
|
Fix the SQLite URL problem
|
Fix the SQLite URL problem
|
Python
|
mit
|
Stark-Mountain/meetup-facebook-bot,Stark-Mountain/meetup-facebook-bot
|
---
+++
@@ -1,7 +1,7 @@
import os
-if os.environ['DATABASE_URL'] is None:
+if os.environ.get('DATABASE_URL') is None:
SQLALCHEMY_DATABASE_URI = 'sqlite:///meetup.db'
else:
SQLALCHEMY_DATABASE_URI = os.environ['DATABASE_URL']
|
28e0a10925d866572cae86507a3ace845fbff6a9
|
observers/middleware.py
|
observers/middleware.py
|
from .models import Observer
class ObserverMiddleware(object):
"""
Attaches an observer instance to every request coming from an
authenticated user.
"""
def process_request(self, request):
assert hasattr(request, 'user'), "ObserverMiddleware requires auth middleware to be installed."
if request.user and request.user.is_authenticated():
request.observer = Observer.objects.get(user=request.user)
else:
request.observer = None
|
from .models import Observer
class ObserverMiddleware(object):
"""
Attaches an observer instance to every request coming from an
authenticated user.
"""
def process_request(self, request):
assert hasattr(request, 'user'), "ObserverMiddleware requires auth middleware to be installed."
if request.user and request.user.is_authenticated:
request.observer = Observer.objects.get(user=request.user)
else:
request.observer = None
|
Use is_authenticated as a property.
|
Use is_authenticated as a property.
|
Python
|
mit
|
zsiciarz/variablestars.net,zsiciarz/variablestars.net,zsiciarz/variablestars.net
|
---
+++
@@ -8,7 +8,7 @@
"""
def process_request(self, request):
assert hasattr(request, 'user'), "ObserverMiddleware requires auth middleware to be installed."
- if request.user and request.user.is_authenticated():
+ if request.user and request.user.is_authenticated:
request.observer = Observer.objects.get(user=request.user)
else:
request.observer = None
|
a50a46ee26e5d7d325a228559bc701c86d1b392d
|
arg-reader.py
|
arg-reader.py
|
#!/usr/bin/env python3
# References:
# http://docs.python.org/3.3/library/argparse.html?highlight=argparse#argparse
# http://bip.weizmann.ac.il/course/python/PyMOTW/PyMOTW/docs/argparse/index.html
import argparse
def main():
'''
Read arguments from a file
'''
parser = argparse.ArgumentParser(description='Script reads arguments from a file. Type $ ./arg-reader.py @argsfilename e.g. $ ./arg-reader.py @args.txt',
fromfile_prefix_chars='@',
)
parser.add_argument('-animalbig', action="store", dest="animalbig",
help = 'name of a big animal')
parser.add_argument('-animalsmall', action="store", dest="animalsmall",
help = 'name of a small animal')
arguments = parser.parse_args()
print(arguments)
print(arguments.animalbig)
print(arguments.animalsmall)
if __name__ == "__main__": main()
|
#!/usr/bin/env python3
# References:
# http://docs.python.org/3.3/library/argparse.html?highlight=argparse#argparse
# http://bip.weizmann.ac.il/course/python/PyMOTW/PyMOTW/docs/argparse/index.html
import argparse
def main():
'''
For help, use argument -h
$ ./arg-reader.py -h
To specify an argument, prefix with -
$ ./arg-reader.py -animalbig hippo -animalsmall fly
To read arguments from a file, prefix file name with @
$ ./arg-reader.py @args2.txt
To specify arguments from command line and from a file
$ ./arg-reader.py @args.txt -animalbig hippo
'''
parser = argparse.ArgumentParser(description='To read arguments from a file, prefix file name with @ e.g. $ ./arg-reader.py @args.txt -animalbig hippo',
fromfile_prefix_chars='@',
)
parser.add_argument('-animalbig', action="store", dest="animalbig",
help = 'name of a big animal')
parser.add_argument('-animalsmall', action="store", dest="animalsmall",
help = 'name of a small animal')
arguments = parser.parse_args()
print(arguments)
print(arguments.animalbig)
print(arguments.animalsmall)
if __name__ == "__main__": main()
|
Add more comments about usage.
|
Add more comments about usage.
|
Python
|
mit
|
beepscore/argparse
|
---
+++
@@ -8,10 +8,17 @@
def main():
'''
- Read arguments from a file
+ For help, use argument -h
+ $ ./arg-reader.py -h
+ To specify an argument, prefix with -
+ $ ./arg-reader.py -animalbig hippo -animalsmall fly
+ To read arguments from a file, prefix file name with @
+ $ ./arg-reader.py @args2.txt
+ To specify arguments from command line and from a file
+ $ ./arg-reader.py @args.txt -animalbig hippo
'''
- parser = argparse.ArgumentParser(description='Script reads arguments from a file. Type $ ./arg-reader.py @argsfilename e.g. $ ./arg-reader.py @args.txt',
+ parser = argparse.ArgumentParser(description='To read arguments from a file, prefix file name with @ e.g. $ ./arg-reader.py @args.txt -animalbig hippo',
fromfile_prefix_chars='@',
)
|
da05390fa11a12d0491caff18d38e71a1e134b82
|
spicedham/sqlalchemywrapper/models.py
|
spicedham/sqlalchemywrapper/models.py
|
from sqlalchemy import Column, Integer, String
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.schema import UniqueConstraint
Base = declarative_base()
class Store(Base):
__tablename__ = 'store'
id = Column(Integer, primary_key=True, autoincrement=True)
key = Column(String)
tag = Column(String)
value = Column(String)
__table_args__ = (UniqueConstraint('key', 'tag'),)
def __unicode__(self):
return unicode(key)
|
from sqlalchemy import Column, Integer, String
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.schema import PrimaryKeyConstraint
Base = declarative_base()
class Store(Base):
__tablename__ = 'store'
key = Column(String)
tag = Column(String)
value = Column(String)
__table_args__ = (PrimaryKeyConstraint('key', 'tag'),)
def __unicode__(self):
return unicode(key)
|
Make tag and key be a composite primary key
|
Make tag and key be a composite primary key
|
Python
|
mpl-2.0
|
mozilla/spicedham,mozilla/spicedham
|
---
+++
@@ -1,16 +1,15 @@
from sqlalchemy import Column, Integer, String
from sqlalchemy.ext.declarative import declarative_base
-from sqlalchemy.schema import UniqueConstraint
+from sqlalchemy.schema import PrimaryKeyConstraint
Base = declarative_base()
class Store(Base):
__tablename__ = 'store'
- id = Column(Integer, primary_key=True, autoincrement=True)
key = Column(String)
tag = Column(String)
value = Column(String)
- __table_args__ = (UniqueConstraint('key', 'tag'),)
+ __table_args__ = (PrimaryKeyConstraint('key', 'tag'),)
def __unicode__(self):
return unicode(key)
|
63d1eb69fc614cb3f019e7b37dd4ec10896c644e
|
chartflo/views.py
|
chartflo/views.py
|
# -*- coding: utf-8 -*-
from django.views.generic import TemplateView
from chartflo.factory import ChartDataPack
class ChartsView(TemplateView):
template_name = 'chartflo/charts.html'
graph_type = "pie"
title = ""
def get_data(self):
return {}
def get_context_data(self, **kwargs):
context = super(ChartsView, self).get_context_data(**kwargs)
# get data
P = ChartDataPack()
dataset = self.get_data()
# package the data
datapack = P.package("chart_id", self.title, dataset)
# options
datapack['legend'] = True
datapack['export'] = False
context['datapack'] = datapack
context["graph_type"] = self.graph_type
context["title"] = context["label"] = self.title
context["chart_url"] = self._get_chart_url()
return context
def _get_chart_url(self):
url = "chartflo/charts/" + self.graph_type + ".html"
return url
|
# -*- coding: utf-8 -*-
from django.views.generic import TemplateView
from chartflo.factory import ChartDataPack
class ChartsView(TemplateView):
template_name = 'chartflo/charts.html'
chart_type = "pie"
title = ""
def get_data(self):
return {}
def get_context_data(self, **kwargs):
context = super(ChartsView, self).get_context_data(**kwargs)
# get data
P = ChartDataPack()
dataset = self.get_data()
# package the data
datapack = P.package("chart_id", self.title, dataset)
# options
datapack['legend'] = True
datapack['export'] = False
context['datapack'] = datapack
context["title"] = context["label"] = self.title
context["chart_url"] = self._get_chart_url()
return context
def _get_chart_url(self):
url = "chartflo/charts/" + self.chart_type + ".html"
return url
|
Change graph_type for chart_type and remove it from context
|
Change graph_type for chart_type and remove it from context
|
Python
|
mit
|
synw/django-chartflo,synw/django-chartflo,synw/django-chartflo
|
---
+++
@@ -6,7 +6,7 @@
class ChartsView(TemplateView):
template_name = 'chartflo/charts.html'
- graph_type = "pie"
+ chart_type = "pie"
title = ""
def get_data(self):
@@ -23,11 +23,10 @@
datapack['legend'] = True
datapack['export'] = False
context['datapack'] = datapack
- context["graph_type"] = self.graph_type
context["title"] = context["label"] = self.title
context["chart_url"] = self._get_chart_url()
return context
def _get_chart_url(self):
- url = "chartflo/charts/" + self.graph_type + ".html"
+ url = "chartflo/charts/" + self.chart_type + ".html"
return url
|
e66468faaf9c4885f13545329baa20fe4914f49c
|
historia.py
|
historia.py
|
from eve import Eve
from eve_swagger import swagger
from eve.auth import BasicAuth
from config import *
from hashlib import md5
class MyBasicAuth(BasicAuth):
def check_auth(self, username, password, allowed_roles, resource,
method):
accounts = app.data.driver.db['accounts']
account = accounts.find_one({'username': username})
return account and password == account['password']
def set_reporter(request, lookup):
print request
app = Eve(auth=MyBasicAuth)
app.on_pre_PUT_event += set_reporter
app.register_blueprint(swagger)
app.config['SWAGGER_INFO'] = SWAGGER_INFO
app.config['SWAGGER_HOST'] = SWAGGER_HOST
if __name__ == '__main__':
app.run(host=LISTEN_IP, port=LISTEN_PORT)
|
from eve import Eve
from eve_swagger import swagger
from eve.auth import BasicAuth
from config import *
from hashlib import md5
class MyBasicAuth(BasicAuth):
def check_auth(self, username, password, allowed_roles, resource,
method):
accounts = app.data.driver.db['accounts']
account = accounts.find_one({'username': username})
return account and md5(password).hexdigest() == account['password']
def set_reporter(request, lookup):
print request
app = Eve(auth=MyBasicAuth)
app.on_pre_PUT_event += set_reporter
app.register_blueprint(swagger)
app.config['SWAGGER_INFO'] = SWAGGER_INFO
app.config['SWAGGER_HOST'] = SWAGGER_HOST
if __name__ == '__main__':
app.run(host=LISTEN_IP, port=LISTEN_PORT)
|
Use MD5 to encode passwords
|
Use MD5 to encode passwords
|
Python
|
mit
|
waoliveros/historia
|
---
+++
@@ -9,11 +9,11 @@
method):
accounts = app.data.driver.db['accounts']
account = accounts.find_one({'username': username})
- return account and password == account['password']
+ return account and md5(password).hexdigest() == account['password']
def set_reporter(request, lookup):
print request
-
+
app = Eve(auth=MyBasicAuth)
app.on_pre_PUT_event += set_reporter
|
4f9e51ff45f6faf6d0be6a442b4b04c3301026fe
|
cloudenvy/commands/envy_snapshot.py
|
cloudenvy/commands/envy_snapshot.py
|
from cloudenvy.envy import Envy
class EnvySnapshot(object):
"""Create a snapshot of an ENVy."""
def __init__(self, argparser):
self._build_subparser(argparser)
def _build_subparser(self, subparsers):
subparser = subparsers.add_parser('snapshot', help='snapshot help')
subparser.set_defaults(func=self.run)
return subparser
#TODO(jakedahn): The entire UX for this needs to be talked about, refer to
# https://github.com/bcwaldon/cloudenvy/issues/27 for any
# discussion, if you're curious.
def run(self, config, args):
envy = Envy(config)
envy.snapshot('%s-snapshot' % envy.name)
|
from cloudenvy.envy import Envy
class EnvySnapshot(object):
"""Create a snapshot of an ENVy."""
def __init__(self, argparser):
self._build_subparser(argparser)
def _build_subparser(self, subparsers):
subparser = subparsers.add_parser('snapshot', help='snapshot help')
subparser.set_defaults(func=self.run)
subparser.add_argument('-n', '--name', action='store', default='',
help='Specify custom name for an ENVy.')
return subparser
#TODO(jakedahn): The entire UX for this needs to be talked about, refer to
# https://github.com/bcwaldon/cloudenvy/issues/27 for any
# discussion, if you're curious.
def run(self, config, args):
envy = Envy(config)
envy.snapshot('%s-snapshot' % envy.name)
|
Add missing --name flag to 'envy snapshot'
|
Add missing --name flag to 'envy snapshot'
|
Python
|
apache-2.0
|
cloudenvy/cloudenvy
|
---
+++
@@ -11,6 +11,9 @@
subparser = subparsers.add_parser('snapshot', help='snapshot help')
subparser.set_defaults(func=self.run)
+ subparser.add_argument('-n', '--name', action='store', default='',
+ help='Specify custom name for an ENVy.')
+
return subparser
#TODO(jakedahn): The entire UX for this needs to be talked about, refer to
|
68724546ba4f6063559ba14b8625c7e7ecdf9732
|
src/read_key.py
|
src/read_key.py
|
#!/usr/bin/python
def readKey(keyFileName):
return open("../options-and-settings/api-keys/" + keyFileName, "r").readline()
|
#!/usr/bin/python
def readKey(keyFileName):
return open("../options-and-settings/api-keys/" + keyFileName, "r").readline().rstrip('\n').rstrip('\r')
|
Remove newline and carraige return characters from key files so that API calls work
|
Remove newline and carraige return characters from key files so that API calls work
|
Python
|
mit
|
nilnullzip/StalkerBot,nilnullzip/StalkerBot
|
---
+++
@@ -1,5 +1,5 @@
#!/usr/bin/python
def readKey(keyFileName):
- return open("../options-and-settings/api-keys/" + keyFileName, "r").readline()
+ return open("../options-and-settings/api-keys/" + keyFileName, "r").readline().rstrip('\n').rstrip('\r')
|
b362d4b898493a856a810880079d3f44fe7d5d41
|
project/members/tests/test_application.py
|
project/members/tests/test_application.py
|
# -*- coding: utf-8 -*-
import pytest
from django.core.urlresolvers import reverse
from members.tests.fixtures.memberlikes import MembershipApplicationFactory
from members.tests.fixtures.types import MemberTypeFactory
from members.models import Member
@pytest.mark.django_db
def test_application_approve():
mtypes = [MemberTypeFactory(label='Normal member')]
application = MembershipApplicationFactory()
email = application.email
application.approve(set_mtypes=mtypes)
Member.objects.get(email=email)
@pytest.mark.django_db
def test_get_application_form(client):
response = client.get(reverse('members-apply'))
assert b'Apply for membership' in response.content
# TODO: Figure out a good way to submitting the form
|
# -*- coding: utf-8 -*-
import pytest
from django.core.urlresolvers import reverse
from members.tests.fixtures.memberlikes import MembershipApplicationFactory, MemberFactory
from members.tests.fixtures.types import MemberTypeFactory
from members.models import Member
@pytest.mark.django_db
def test_application_approve():
mtypes = [MemberTypeFactory(label='Normal member')]
application = MembershipApplicationFactory()
email = application.email
application.approve(set_mtypes=mtypes)
Member.objects.get(email=email)
@pytest.mark.django_db
def test_get_application_form(client):
response = client.get(reverse('members-apply'))
assert b'Apply for membership' in response.content
# TODO: Figure out a good way to submitting the form
@pytest.mark.django_db
def test_get_admin_members_list(admin_client):
# Create a test member
member = MemberFactory()
response = admin_client.get('/admin/members/member/')
assert member.email in response.content.decode('utf-8')
@pytest.mark.django_db
def test_get_admin_applications_list(admin_client):
application = MembershipApplicationFactory()
response = admin_client.get('/admin/members/membershipapplication/')
assert application.email in response.content.decode('utf-8')
|
Add quick admin-site tests too
|
Add quick admin-site tests too
|
Python
|
mit
|
jautero/asylum,jautero/asylum,rambo/asylum,hacklab-fi/asylum,hacklab-fi/asylum,HelsinkiHacklab/asylum,jautero/asylum,hacklab-fi/asylum,rambo/asylum,HelsinkiHacklab/asylum,HelsinkiHacklab/asylum,HelsinkiHacklab/asylum,jautero/asylum,rambo/asylum,rambo/asylum,hacklab-fi/asylum
|
---
+++
@@ -1,7 +1,7 @@
# -*- coding: utf-8 -*-
import pytest
from django.core.urlresolvers import reverse
-from members.tests.fixtures.memberlikes import MembershipApplicationFactory
+from members.tests.fixtures.memberlikes import MembershipApplicationFactory, MemberFactory
from members.tests.fixtures.types import MemberTypeFactory
from members.models import Member
@@ -19,3 +19,16 @@
assert b'Apply for membership' in response.content
# TODO: Figure out a good way to submitting the form
+
+@pytest.mark.django_db
+def test_get_admin_members_list(admin_client):
+ # Create a test member
+ member = MemberFactory()
+ response = admin_client.get('/admin/members/member/')
+ assert member.email in response.content.decode('utf-8')
+
+@pytest.mark.django_db
+def test_get_admin_applications_list(admin_client):
+ application = MembershipApplicationFactory()
+ response = admin_client.get('/admin/members/membershipapplication/')
+ assert application.email in response.content.decode('utf-8')
|
5456bee257cb36e4d1400da7e27480beadbf21fd
|
examples/arabic.py
|
examples/arabic.py
|
#!/usr/bin/env python
"""
Example using Arabic
===============
Generating a wordcloud from Arabic text
Other dependencies: bidi.algorithm, arabic_reshaper
"""
from os import path
import codecs
from wordcloud import WordCloud
import arabic_reshaper
from bidi.algorithm import get_display
d = path.dirname(__file__)
# Read the whole text.
f = codecs.open(path.join(d, 'arabicwords.txt'), 'r', 'utf-8')
# Make text readable for a non-Arabic library like wordcloud
text = arabic_reshaper.reshape(f.read())
text = get_display(text)
# Generate a word cloud image
wordcloud = WordCloud(font_path='fonts/NotoNaskhArabic/NotoNaskhArabic-Regular.ttf').generate(text)
# Export to an image
wordcloud.to_file("arabic_example.png")
|
#!/usr/bin/env python
"""
Create wordcloud with Arabic
===============
Generating a wordcloud from Arabic text
Other dependencies: bidi.algorithm, arabic_reshaper
"""
from os import path
import codecs
from wordcloud import WordCloud
import arabic_reshaper
from bidi.algorithm import get_display
d = path.dirname(__file__)
# Read the whole text.
f = codecs.open(path.join(d, 'arabicwords.txt'), 'r', 'utf-8')
# Make text readable for a non-Arabic library like wordcloud
text = arabic_reshaper.reshape(f.read())
text = get_display(text)
# Generate a word cloud image
wordcloud = WordCloud(font_path='fonts/NotoNaskhArabic/NotoNaskhArabic-Regular.ttf').generate(text)
# Export to an image
wordcloud.to_file("arabic_example.png")
|
Change the title of the example
|
Change the title of the example
|
Python
|
mit
|
amueller/word_cloud
|
---
+++
@@ -1,6 +1,6 @@
#!/usr/bin/env python
"""
-Example using Arabic
+Create wordcloud with Arabic
===============
Generating a wordcloud from Arabic text
Other dependencies: bidi.algorithm, arabic_reshaper
|
9e783b39e89e34ded032dc550bc8cc9016f1eded
|
cacheops/__init__.py
|
cacheops/__init__.py
|
VERSION = (3, 0, 1)
__version__ = '.'.join(map(str, VERSION if VERSION[-1] else VERSION[:2]))
from django.apps import AppConfig
from .simple import *
from .query import *
from .invalidation import *
from .templatetags.cacheops import *
from .transaction import install_cacheops_transaction_support
class CacheopsConfig(AppConfig):
name = 'cacheops'
def ready(self):
install_cacheops()
install_cacheops_transaction_support()
default_app_config = 'cacheops.CacheopsConfig'
|
VERSION = (3, 0, 1)
__version__ = '.'.join(map(str, VERSION if VERSION[-1] else VERSION[:2]))
from django.apps import AppConfig
from .simple import *
from .query import *
from .invalidation import *
from .templatetags.cacheops import *
from .transaction import install_cacheops_transaction_support
from .utils import debug_cache_key # noqa
class CacheopsConfig(AppConfig):
name = 'cacheops'
def ready(self):
install_cacheops()
install_cacheops_transaction_support()
default_app_config = 'cacheops.CacheopsConfig'
|
Make debug_cache_key a part of API
|
Make debug_cache_key a part of API
|
Python
|
bsd-3-clause
|
LPgenerator/django-cacheops,Suor/django-cacheops
|
---
+++
@@ -9,6 +9,7 @@
from .invalidation import *
from .templatetags.cacheops import *
from .transaction import install_cacheops_transaction_support
+from .utils import debug_cache_key # noqa
class CacheopsConfig(AppConfig):
|
843f84877d06329179f326600980eff0558e37e0
|
report_qweb_pdf_watermark/__manifest__.py
|
report_qweb_pdf_watermark/__manifest__.py
|
# © 2016 Therp BV <http://therp.nl>
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
{
"name": "Pdf watermark",
"version": "12.0.1.0.0",
"author": "Therp BV, "
"Odoo Community Association (OCA)",
"license": "AGPL-3",
"category": "Technical Settings",
"summary": "Add watermarks to your QWEB PDF reports",
"website": "https://github.com/oca/reporting-engine",
"depends": [
'web',
],
"data": [
"demo/report.xml",
"views/ir_actions_report_xml.xml",
"views/layout_templates.xml",
],
"demo": [
"demo/report.xml"
],
"intallable": True,
'external_dependencies': {
'python': [
'PyPDF2',
],
},
}
|
# © 2016 Therp BV <http://therp.nl>
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
{
"name": "Pdf watermark",
"version": "12.0.1.0.0",
"author": "Therp BV, "
"Odoo Community Association (OCA)",
"license": "AGPL-3",
"category": "Technical Settings",
"summary": "Add watermarks to your QWEB PDF reports",
"website": "https://github.com/oca/reporting-engine",
"depends": [
'web',
],
"data": [
"demo/report.xml",
"views/ir_actions_report_xml.xml",
"views/layout_templates.xml",
],
"demo": [
"demo/report.xml"
],
"installable": True,
'external_dependencies': {
'python': [
'PyPDF2',
],
},
}
|
Fix 'installable' syntax in manifest file
|
[FIX] Fix 'installable' syntax in manifest file
|
Python
|
agpl-3.0
|
OCA/reporting-engine,OCA/reporting-engine,OCA/reporting-engine,OCA/reporting-engine
|
---
+++
@@ -20,7 +20,7 @@
"demo": [
"demo/report.xml"
],
- "intallable": True,
+ "installable": True,
'external_dependencies': {
'python': [
'PyPDF2',
|
e35d55f46ffb9d42736ad4e57ae2a6c29838b054
|
board/tests.py
|
board/tests.py
|
from django.test import TestCase
# Create your tests here.
|
from test_plus.test import TestCase
class BoardTest(TestCase):
def test_get_board_list(self):
board_list_url = self.reverse("board:list")
self.get_check_200(board_list_url)
|
Add board list test code.
|
Add board list test code.
|
Python
|
mit
|
9XD/9XD,9XD/9XD,9XD/9XD,9XD/9XD
|
---
+++
@@ -1,3 +1,8 @@
-from django.test import TestCase
+from test_plus.test import TestCase
-# Create your tests here.
+
+class BoardTest(TestCase):
+ def test_get_board_list(self):
+ board_list_url = self.reverse("board:list")
+ self.get_check_200(board_list_url)
+
|
31fedddedc5ece0b7e68762269730e2cce110cb9
|
pnnl/models/__init__.py
|
pnnl/models/__init__.py
|
import importlib
import logging
from volttron.platform.agent import utils
_log = logging.getLogger(__name__)
utils.setup_logging()
__version__ = "0.1"
__all__ = ['Model']
class Model(object):
def __init__(self, config, **kwargs):
self.model = None
config = self.store_model_config(config)
if not config:
return
base_module = "volttron.pnnl.models."
try:
model_type = config["model_type"]
except KeyError as e:
_log.exception("Missing Model Type key: {}".format(e))
raise e
_file, model_type = model_type.split(".")
module = importlib.import_module(base_module + _file)
self.model_class = getattr(module, model_type)
def get_q(self, _set, sched_index, market_index, occupied):
q = self.model.predict(_set, sched_index, market_index, occupied)
return q
def store_model_config(self, _config):
try:
config = self.vip.config.get("model")
except KeyError:
config = {}
try:
self.vip.config.set("model", _config, send_update=False)
except RuntimeError:
_log.debug("Cannot change config store on config callback!")
_config.update(config)
return _config
|
import importlib
import logging
from volttron.platform.agent import utils
_log = logging.getLogger(__name__)
utils.setup_logging()
__version__ = "0.1"
__all__ = ['Model']
class Model(object):
def __init__(self, config, **kwargs):
self.model = None
config = self.store_model_config(config)
if not config:
return
base_module = "volttron.pnnl.models."
try:
model_type = config["model_type"]
except KeyError as e:
_log.exception("Missing Model Type key: {}".format(e))
raise e
_file, model_type = model_type.split(".")
module = importlib.import_module(base_module + _file)
self.model_class = getattr(module, model_type)
self.model = self.model_class(config, self)
def get_q(self, _set, sched_index, market_index, occupied):
q = self.model.predict(_set, sched_index, market_index, occupied)
return q
def store_model_config(self, _config):
try:
config = self.vip.config.get("model")
except KeyError:
config = {}
try:
self.vip.config.set("model", _config, send_update=False)
except RuntimeError:
_log.debug("Cannot change config store on config callback!")
_config.update(config)
return _config
|
Fix self.model is not set.
|
Fix self.model is not set.
|
Python
|
bsd-3-clause
|
VOLTTRON/volttron-applications,VOLTTRON/volttron-applications,VOLTTRON/volttron-applications,VOLTTRON/volttron-applications,VOLTTRON/volttron-applications
|
---
+++
@@ -24,6 +24,7 @@
_file, model_type = model_type.split(".")
module = importlib.import_module(base_module + _file)
self.model_class = getattr(module, model_type)
+ self.model = self.model_class(config, self)
def get_q(self, _set, sched_index, market_index, occupied):
q = self.model.predict(_set, sched_index, market_index, occupied)
|
3d2b4536803df4a202d8c1c9b5d0e689f1053378
|
tests/config.py
|
tests/config.py
|
import sys
sys.path.append('../ideascaly')
from ideascaly.auth import AuthNonSSO
from ideascaly.api import API
import unittest
testing_community = 'fiveheads.ideascale.com'
testing_token = '5b3326f8-50a5-419d-8f02-eef6a42fd61a'
class IdeascalyTestCase(unittest.TestCase):
def setUp(self):
self.auth = create_auth()
self.api = API(self.auth)
self.api.community_url = testing_community
def create_auth():
auth = AuthNonSSO(testing_token)
return auth
|
import os
import sys
import unittest
sys.path.append('../ideascaly')
from ideascaly.auth import AuthNonSSO
from ideascaly.api import API
testing_community = 'fiveheads.ideascale.com'
testing_token = os.environ.get('TOKEN', '')
class IdeascalyTestCase(unittest.TestCase):
def setUp(self):
self.auth = create_auth()
self.api = API(self.auth)
self.api.community_url = testing_community
def create_auth():
auth = AuthNonSSO(testing_token)
return auth
|
Read token from environment variable
|
Read token from environment variable
|
Python
|
mit
|
joausaga/ideascaly
|
---
+++
@@ -1,13 +1,13 @@
+import os
import sys
+import unittest
sys.path.append('../ideascaly')
from ideascaly.auth import AuthNonSSO
from ideascaly.api import API
-import unittest
-
testing_community = 'fiveheads.ideascale.com'
-testing_token = '5b3326f8-50a5-419d-8f02-eef6a42fd61a'
+testing_token = os.environ.get('TOKEN', '')
class IdeascalyTestCase(unittest.TestCase):
|
7845e017b264a38472d0dc94988a0afe6938132f
|
tests/acceptance/conftest.py
|
tests/acceptance/conftest.py
|
# -*- coding: utf-8 -*-
import mock
import pytest
@pytest.fixture
def default_trace_id_generator(dummy_request):
return lambda dummy_request: '17133d482ba4f605'
@pytest.fixture
def settings():
return {
'zipkin.tracing_percent': 100,
'zipkin.trace_id_generator': default_trace_id_generator,
}
@pytest.fixture
def get_span():
return {
'id': '1',
'tags': {
'http.uri': '/sample',
'http.uri.qs': '/sample',
'http.route': '/sample',
'response_status_code': '200',
},
'name': 'GET /sample',
'traceId': '17133d482ba4f605',
'localEndpoint': {
'ipv4': '127.0.0.1',
'port': 80,
'serviceName': 'acceptance_service',
},
'kind': 'SERVER',
'timestamp': mock.ANY,
'duration': mock.ANY,
}
|
# -*- coding: utf-8 -*-
import mock
import pytest
@pytest.fixture
def default_trace_id_generator(dummy_request):
return lambda dummy_request: '17133d482ba4f605'
@pytest.fixture
def settings():
return {
'zipkin.tracing_percent': 100,
'zipkin.trace_id_generator': default_trace_id_generator,
}
@pytest.fixture
def get_span():
return {
'id': '1',
'tags': {
'http.uri': '/sample',
'http.uri.qs': '/sample',
'http.route': '/sample',
'response_status_code': '200',
},
'name': 'GET /sample',
'traceId': '17133d482ba4f605',
'localEndpoint': {
'ipv4': mock.ANY,
'port': 80,
'serviceName': 'acceptance_service',
},
'kind': 'SERVER',
'timestamp': mock.ANY,
'duration': mock.ANY,
}
|
Allow any ip in the get_span expected span since it's not deterministic
|
Allow any ip in the get_span expected span since it's not deterministic
|
Python
|
apache-2.0
|
Yelp/pyramid_zipkin
|
---
+++
@@ -29,7 +29,7 @@
'name': 'GET /sample',
'traceId': '17133d482ba4f605',
'localEndpoint': {
- 'ipv4': '127.0.0.1',
+ 'ipv4': mock.ANY,
'port': 80,
'serviceName': 'acceptance_service',
},
|
c96e82caaa3fd560263c54db71772b44e9cd78d7
|
examples/upgrade_local_charm_k8s.py
|
examples/upgrade_local_charm_k8s.py
|
"""
This example:
1. Connects to the current model
2. Deploy a bundle and waits until it reports itself active
3. Destroys the units and applications
"""
from juju import jasyncio
from juju.model import Model
async def main():
model = Model()
print('Connecting to model')
# Connect to current model with current user, per Juju CLI
await model.connect()
try:
print('Deploying bundle')
applications = await model.deploy(
'./examples/k8s-local-bundle/bundle.yaml',
)
print('Waiting for active')
await model.wait_for_idle(status='active')
print("Successfully deployed!")
await applications[0].upgrade_charm(path='./examples/charms/onos.charm')
await model.wait_for_idle(status='active')
print('Removing bundle')
for application in applications:
await application.remove()
finally:
print('Disconnecting from model')
await model.disconnect()
print("Success")
if __name__ == '__main__':
jasyncio.run(main())
|
"""
This example:
1. Connects to the current model
2. Deploy a bundle and waits until it reports itself active
3. Upgrades the charm with a local path
4. Destroys the units and applications
"""
from juju import jasyncio
from juju.model import Model
async def main():
model = Model()
print('Connecting to model')
# Connect to current model with current user, per Juju CLI
await model.connect()
try:
print('Deploying bundle')
applications = await model.deploy(
'./examples/k8s-local-bundle/bundle.yaml',
)
print('Waiting for active')
await model.wait_for_idle(status='active')
print("Successfully deployed!")
local_path = './examples/charms/onos.charm'
print('Upgrading charm with %s' % local_path)
await applications[0].upgrade_charm(path=local_path)
await model.wait_for_idle(status='active')
print('Removing bundle')
for application in applications:
await application.remove()
finally:
print('Disconnecting from model')
await model.disconnect()
print("Success")
if __name__ == '__main__':
jasyncio.run(main())
|
Make the example more informative
|
Make the example more informative
|
Python
|
apache-2.0
|
juju/python-libjuju,juju/python-libjuju
|
---
+++
@@ -3,7 +3,8 @@
1. Connects to the current model
2. Deploy a bundle and waits until it reports itself active
-3. Destroys the units and applications
+3. Upgrades the charm with a local path
+4. Destroys the units and applications
"""
from juju import jasyncio
@@ -26,7 +27,9 @@
await model.wait_for_idle(status='active')
print("Successfully deployed!")
- await applications[0].upgrade_charm(path='./examples/charms/onos.charm')
+ local_path = './examples/charms/onos.charm'
+ print('Upgrading charm with %s' % local_path)
+ await applications[0].upgrade_charm(path=local_path)
await model.wait_for_idle(status='active')
|
1b9b4365a46cdbfbfe88e2f5e271ba387fe4274f
|
var_log_dieta/constants.py
|
var_log_dieta/constants.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals, division
import logging
logger = logging.getLogger(__name__) # pylint: disable=invalid-name
DATA_DIR = 'data'
DEFAULT_CONVERSIONS = {
'kg': {'g': 1000},
'l': {'ml': 1000},
} # yapf: disable
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals, division
import logging
logger = logging.getLogger(__name__) # pylint: disable=invalid-name
DATA_DIR = 'data'
DEFAULT_CONVERSIONS = {
'kg': {'g': 1000},
'l': {'ml': 1000},
'taza': {'ml': 250},
'tazon': {'ml': 350},
'vaso': {'ml': 300},
} # yapf: disable
|
Add taza, tazon and vaso global conversions
|
Add taza, tazon and vaso global conversions
|
Python
|
bsd-3-clause
|
pignacio/vld
|
---
+++
@@ -11,4 +11,7 @@
DEFAULT_CONVERSIONS = {
'kg': {'g': 1000},
'l': {'ml': 1000},
+ 'taza': {'ml': 250},
+ 'tazon': {'ml': 350},
+ 'vaso': {'ml': 300},
} # yapf: disable
|
18da33bd5524a7e9a043de90fb9b7aa78a26412d
|
addons/meme.py
|
addons/meme.py
|
import discord
import random
from discord.ext import commands
class Meme:
"""
Meme commands.
"""
def __init__(self, bot):
self.bot = bot
print('Addon "{}" loaded'.format(self.__class__.__name__))
@commands.has_permissions(kick_members=True)
@commands.command(pass_context=True, name="bam")
async def bam_member(self, ctx, user: discord.Member, *, reason=""):
"""Bams a user. Staff only."""
await self.bot.say("{} is ̶n͢ow b̕&̡.̷ 👍̡".format(self.bot.escape_name(user)))
@commands.has_permissions(kick_members=True)
@commands.command(pass_context=True, name="warm")
async def warm_member(self, ctx, user: discord.Member, *, reason=""):
"""Warms a user :3. Staff only."""
await self.bot.say("{} warmed. User is now {}°C.".format(user.mention, str(random.randint(0, 100))))
def setup(bot):
bot.add_cog(Meme(bot))
|
import discord
import random
from discord.ext import commands
class Meme:
"""
Meme commands.
"""
def __init__(self, bot):
self.bot = bot
print('Addon "{}" loaded'.format(self.__class__.__name__))
@commands.command(pass_context=True, hidden=True, name="bam")
async def bam_member(self, ctx, user: discord.Member, *, reason=""):
"""Bams a user owo"""
await self.bot.say("{} is ̶n͢ow b̕&̡.̷ 👍̡".format(self.bot.escape_name(user)))
@commands.command(pass_context=True, hidden=True, name="warm")
async def warm_member(self, ctx, user: discord.Member, *, reason=""):
"""Warms a user :3"""
await self.bot.say("{} warmed. User is now {}°C.".format(user.mention, str(random.randint(0, 100))))
def setup(bot):
bot.add_cog(Meme(bot))
|
Allow everyone to bam and warm, hide commands
|
Allow everyone to bam and warm, hide commands
|
Python
|
apache-2.0
|
916253/Kurisu-Reswitched
|
---
+++
@@ -12,16 +12,14 @@
self.bot = bot
print('Addon "{}" loaded'.format(self.__class__.__name__))
- @commands.has_permissions(kick_members=True)
- @commands.command(pass_context=True, name="bam")
+ @commands.command(pass_context=True, hidden=True, name="bam")
async def bam_member(self, ctx, user: discord.Member, *, reason=""):
- """Bams a user. Staff only."""
+ """Bams a user owo"""
await self.bot.say("{} is ̶n͢ow b̕&̡.̷ 👍̡".format(self.bot.escape_name(user)))
- @commands.has_permissions(kick_members=True)
- @commands.command(pass_context=True, name="warm")
+ @commands.command(pass_context=True, hidden=True, name="warm")
async def warm_member(self, ctx, user: discord.Member, *, reason=""):
- """Warms a user :3. Staff only."""
+ """Warms a user :3"""
await self.bot.say("{} warmed. User is now {}°C.".format(user.mention, str(random.randint(0, 100))))
|
b82fc6f21245cba7fadb35a6676433f015aad516
|
tripleo_common/utils/tarball.py
|
tripleo_common/utils/tarball.py
|
# Copyright 2016 Red Hat, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging
from oslo_concurrency import processutils
LOG = logging.getLogger(__name__)
def create_tarball(directory, filename, options='-czf'):
"""Create a tarball of a directory."""
LOG.debug('Creating tarball of %s at location %s' % (directory, filename))
processutils.execute('/usr/bin/tar', '-C', directory, options, filename,
'--exclude', '.git', '--exclude', '.tox', '.')
def tarball_extract_to_swift_container(object_client, filename, container):
LOG.debug('Uploading filename %s to Swift container %s' % (filename,
container))
with open(filename, 'r') as f:
object_client.put_object(
container=container,
obj='',
contents=f,
query_string='extract-archive=tar.gz',
headers={'X-Detect-Content-Type': 'true'}
)
|
# Copyright 2016 Red Hat, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging
from oslo_concurrency import processutils
LOG = logging.getLogger(__name__)
def create_tarball(directory, filename, options='-czf'):
"""Create a tarball of a directory."""
LOG.debug('Creating tarball of %s at location %s' % (directory, filename))
processutils.execute('/usr/bin/tar', '-C', directory, options, filename,
'--exclude', '.git', '--exclude', '.tox',
'--exclude', '*.pyc', '--exclude', '*.pyo', '.')
def tarball_extract_to_swift_container(object_client, filename, container):
LOG.debug('Uploading filename %s to Swift container %s' % (filename,
container))
with open(filename, 'r') as f:
object_client.put_object(
container=container,
obj='',
contents=f,
query_string='extract-archive=tar.gz',
headers={'X-Detect-Content-Type': 'true'}
)
|
Exclude more unneeded files from default plan
|
Exclude more unneeded files from default plan
This patch exludes more file types from the tarball uploaded to swift as
the default deployment plan.
Change-Id: I8b6d8de8d7662604cdb871fa6a4fb872c7937e25
Closes-Bug: #1613286
|
Python
|
apache-2.0
|
openstack/tripleo-common,openstack/tripleo-common
|
---
+++
@@ -23,7 +23,8 @@
"""Create a tarball of a directory."""
LOG.debug('Creating tarball of %s at location %s' % (directory, filename))
processutils.execute('/usr/bin/tar', '-C', directory, options, filename,
- '--exclude', '.git', '--exclude', '.tox', '.')
+ '--exclude', '.git', '--exclude', '.tox',
+ '--exclude', '*.pyc', '--exclude', '*.pyo', '.')
def tarball_extract_to_swift_container(object_client, filename, container):
|
7ac384be36e22919a15fc7d25de25aa7afcd9382
|
statscache/consumer.py
|
statscache/consumer.py
|
import copy
import fedmsg.consumers
import logging
log = logging.getLogger("fedmsg")
class StatsConsumer(fedmsg.consumers.FedmsgConsumer):
"""
The actual 'cache' of statscache that accumulates messages to be processed.
"""
topic = '*'
config_key = 'statscache.consumer.enabled'
def __init__(self, *args, **kwargs):
""" Instantiate the consumer and a default list of buckets """
log.debug("statscache consumer initializing")
super(StatsConsumer, self).__init__(*args, **kwargs)
log.debug("statscache consumer initialized")
self.buckets = {
'OneSecond': [],
'FiveSecond': [],
'OneMinute': [],
}
def consume(self, raw_msg):
""" Receive a message and enqueue it onto each bucket """
topic, msg = raw_msg['topic'], raw_msg['body']
log.info("Got message %r", topic)
for name, bucket in self.buckets.items():
bucket.append(copy.deepcopy(msg))
def stop(self):
log.info("Cleaning up StatsConsumer.")
super(StatsConsumer, self).stop()
|
import copy
import fedmsg.consumers
import logging
log = logging.getLogger("fedmsg")
class StatsConsumer(fedmsg.consumers.FedmsgConsumer):
"""
The actual 'cache' of statscache that accumulates messages to be processed.
"""
topic = '*'
config_key = 'statscache.consumer.enabled'
def __init__(self, *args, **kwargs):
""" Instantiate the consumer and a default list of buckets """
log.debug("statscache consumer initializing")
super(StatsConsumer, self).__init__(*args, **kwargs)
log.debug("statscache consumer initialized")
self.buckets = {
'OneSecond': [],
'FiveSecond': [],
'OneMinute': [],
'OneDay': [],
}
def consume(self, raw_msg):
""" Receive a message and enqueue it onto each bucket """
topic, msg = raw_msg['topic'], raw_msg['body']
log.info("Got message %r", topic)
for name, bucket in self.buckets.items():
bucket.append(copy.deepcopy(msg))
def stop(self):
log.info("Cleaning up StatsConsumer.")
super(StatsConsumer, self).stop()
|
Create missing bucket for one-day frequency
|
Create missing bucket for one-day frequency
|
Python
|
lgpl-2.1
|
yazman/statscache,yazman/statscache,yazman/statscache
|
---
+++
@@ -21,6 +21,7 @@
'OneSecond': [],
'FiveSecond': [],
'OneMinute': [],
+ 'OneDay': [],
}
def consume(self, raw_msg):
|
8cbc55794d67571831ccc22b1ccdcf716362d814
|
tests/test_hmmsearch3.py
|
tests/test_hmmsearch3.py
|
import os
import unittest
import sys
# hack to allow tests to find inmembrane in directory above
module_dir = os.path.abspath(os.path.dirname(__file__))
sys.path.insert(0, os.path.join(module_dir, '..'))
import inmembrane
class TestHmmsearch3(unittest.TestCase):
def setUp(self):
self.dir = os.path.join(module_dir, 'hmmsearch3')
def test_hmmsearch3(self):
save_dir = os.getcwd()
os.chdir(self.dir)
inmembrane.silence_log(True)
self.params = inmembrane.get_params()
self.params['fasta'] = "hmmsearch3.fasta"
self.params['hmm_profiles_dir'] = "../../protocols/gram_neg_profiles"
self.seqids, self.proteins = \
inmembrane.create_protein_data_structure(self.params['fasta'])
inmembrane.hmmsearch3(self.params, self.proteins)
self.expected_output = {
u'SPy_0128': ['LPxTG'],
u'SPy_0191a': ['SLH_ls'],
}
for seqid in self.expected_output:
for motif in self.expected_output[seqid]:
self.assertTrue(motif in self.proteins[seqid]['hmmsearch'])
os.chdir(save_dir)
if __name__ == '__main__':
unittest.main()
|
import os
import unittest
import sys
# hack to allow tests to find inmembrane in directory above
module_dir = os.path.abspath(os.path.dirname(__file__))
sys.path.insert(0, os.path.join(module_dir, '..'))
import inmembrane
class TestHmmsearch3(unittest.TestCase):
def setUp(self):
self.dir = os.path.join(module_dir, 'hmmsearch3')
def test_hmmsearch3(self):
save_dir = os.getcwd()
os.chdir(self.dir)
inmembrane.silence_log(True)
self.params = inmembrane.get_params()
self.params['fasta'] = "hmmsearch3.fasta"
self.params['hmm_profiles_dir'] = "../../protocols/gram_pos_profiles"
self.seqids, self.proteins = \
inmembrane.create_protein_data_structure(self.params['fasta'])
inmembrane.hmmsearch3(self.params, self.proteins)
self.expected_output = {
u'SPy_0128': ['LPxTG'],
u'SPy_0191a': ['SLH_ls'],
}
for seqid in self.expected_output:
for motif in self.expected_output[seqid]:
self.assertTrue(motif in self.proteins[seqid]['hmmsearch'])
os.chdir(save_dir)
if __name__ == '__main__':
unittest.main()
|
Put correct directory for profiles in test_hmmsearch
|
Put correct directory for profiles in test_hmmsearch
|
Python
|
bsd-2-clause
|
boscoh/inmembrane
|
---
+++
@@ -21,7 +21,7 @@
self.params = inmembrane.get_params()
self.params['fasta'] = "hmmsearch3.fasta"
- self.params['hmm_profiles_dir'] = "../../protocols/gram_neg_profiles"
+ self.params['hmm_profiles_dir'] = "../../protocols/gram_pos_profiles"
self.seqids, self.proteins = \
inmembrane.create_protein_data_structure(self.params['fasta'])
inmembrane.hmmsearch3(self.params, self.proteins)
|
cbadf5c564d7f5f701499409e2ae77ff90ba477c
|
tests/test_tensorflow.py
|
tests/test_tensorflow.py
|
import unittest
import numpy as np
import tensorflow as tf
from common import gpu_test
class TestTensorflow(unittest.TestCase):
def test_addition(self):
op = tf.add(2, 3)
sess = tf.Session()
result = sess.run(op)
self.assertEqual(5, result)
@gpu_test
def test_gpu(self):
with tf.device('/gpu:0'):
m1 = tf.constant([2.0, 3.0], shape=[1, 2], name='a')
m2 = tf.constant([3.0, 4.0], shape=[2, 1], name='b')
op = tf.matmul(m1, m2)
sess = tf.Session()
result = sess.run(op)
self.assertEqual(np.array(18, dtype=np.float32, ndmin=2), result)
|
import unittest
import numpy as np
import tensorflow as tf
from common import gpu_test
class TestTensorflow(unittest.TestCase):
def test_addition(self):
op = tf.add(2, 3)
sess = tf.Session()
result = sess.run(op)
self.assertEqual(5, result)
def test_conv2d(self):
input = tf.random_normal([1,2,2,1])
filter = tf.random_normal([1,1,1,1])
op = tf.nn.conv2d(input, filter, strides=[1, 1, 1, 1], padding='SAME')
with tf.Session() as sess:
result = sess.run(op)
self.assertEqual(4, len(result.shape))
@gpu_test
def test_gpu(self):
with tf.device('/gpu:0'):
m1 = tf.constant([2.0, 3.0], shape=[1, 2], name='a')
m2 = tf.constant([3.0, 4.0], shape=[2, 1], name='b')
op = tf.matmul(m1, m2)
sess = tf.Session()
result = sess.run(op)
self.assertEqual(np.array(18, dtype=np.float32, ndmin=2), result)
|
Add conv2d test for tensorflow
|
Add conv2d test for tensorflow
|
Python
|
apache-2.0
|
Kaggle/docker-python,Kaggle/docker-python
|
---
+++
@@ -14,6 +14,15 @@
result = sess.run(op)
self.assertEqual(5, result)
+
+ def test_conv2d(self):
+ input = tf.random_normal([1,2,2,1])
+ filter = tf.random_normal([1,1,1,1])
+
+ op = tf.nn.conv2d(input, filter, strides=[1, 1, 1, 1], padding='SAME')
+ with tf.Session() as sess:
+ result = sess.run(op)
+ self.assertEqual(4, len(result.shape))
@gpu_test
def test_gpu(self):
|
dfd4a6f6b23447538b2b22da11666f5218d791db
|
mots_vides/constants.py
|
mots_vides/constants.py
|
"""
Constants for mots-vides
"""
import os
DATA_DIRECTORY = os.path.join(
os.path.dirname(
os.path.abspath(__file__)),
'datas/'
)
|
"""
Constants for mots-vides
"""
import os
DATA_DIRECTORY = os.path.join(
os.path.dirname(
os.path.abspath(__file__)),
'datas/'
)
LANGUAGE_CODES = {
'af': 'afrikaans',
'ar': 'arabic',
'az': 'azerbaijani',
'bg': 'bulgarian',
'be': 'belarusian',
'bn': 'bengali',
'br': 'breton',
'bs': 'bosnian',
'ca': 'catalan',
'cs': 'czech',
'cy': 'welsh',
'da': 'danish',
'de': 'german',
'el': 'greek',
'en': 'english',
'eo': 'esperanto',
'es': 'spanish',
'et': 'estonian',
'eu': 'basque',
'fa': 'persian',
'fi': 'finnish',
'fr': 'french',
'fy': 'frisian',
'ga': 'irish',
'gl': 'galician',
'he': 'hebrew',
'hi': 'hindi',
'hr': 'croatian',
'hu': 'hungarian',
'ia': 'interlingua',
'id': 'indonesian',
'io': 'ido',
'is': 'icelandic',
'it': 'italian',
'ja': 'japanese',
'ka': 'georgian',
'kk': 'kazakh',
'km': 'khmer',
'kn': 'kannada',
'ko': 'korean',
'lb': 'luxembourgish',
'lt': 'lithuanian',
'lv': 'latvian',
'mk': 'macedonian',
'ml': 'malayalam',
'mn': 'mongolian',
'mr': 'marathi',
'my': 'burmese',
'nb': 'norwegian',
'ne': 'nepali',
'nl': 'dutch',
'os': 'ossetic',
'pa': 'punjabi',
'pl': 'polish',
'pt': 'portuguese',
'ro': 'romanian',
'ru': 'russian',
'sk': 'slovak',
'sl': 'slovenian',
'sq': 'albanian',
'sr': 'serbian',
'sv': 'swedish',
'sw': 'swahili',
'ta': 'tamil',
'te': 'telugu',
'th': 'thai',
'tr': 'turkish',
'tt': 'tatar',
'uk': 'ukrainian',
'ur': 'urdu',
'vi': 'vietnamese',
'zh': 'chinese',
}
|
Define a complete list of language code, for easy future maintenance
|
Define a complete list of language code, for easy future maintenance
|
Python
|
bsd-3-clause
|
Fantomas42/mots-vides,Fantomas42/mots-vides
|
---
+++
@@ -8,3 +8,78 @@
os.path.abspath(__file__)),
'datas/'
)
+
+LANGUAGE_CODES = {
+ 'af': 'afrikaans',
+ 'ar': 'arabic',
+ 'az': 'azerbaijani',
+ 'bg': 'bulgarian',
+ 'be': 'belarusian',
+ 'bn': 'bengali',
+ 'br': 'breton',
+ 'bs': 'bosnian',
+ 'ca': 'catalan',
+ 'cs': 'czech',
+ 'cy': 'welsh',
+ 'da': 'danish',
+ 'de': 'german',
+ 'el': 'greek',
+ 'en': 'english',
+ 'eo': 'esperanto',
+ 'es': 'spanish',
+ 'et': 'estonian',
+ 'eu': 'basque',
+ 'fa': 'persian',
+ 'fi': 'finnish',
+ 'fr': 'french',
+ 'fy': 'frisian',
+ 'ga': 'irish',
+ 'gl': 'galician',
+ 'he': 'hebrew',
+ 'hi': 'hindi',
+ 'hr': 'croatian',
+ 'hu': 'hungarian',
+ 'ia': 'interlingua',
+ 'id': 'indonesian',
+ 'io': 'ido',
+ 'is': 'icelandic',
+ 'it': 'italian',
+ 'ja': 'japanese',
+ 'ka': 'georgian',
+ 'kk': 'kazakh',
+ 'km': 'khmer',
+ 'kn': 'kannada',
+ 'ko': 'korean',
+ 'lb': 'luxembourgish',
+ 'lt': 'lithuanian',
+ 'lv': 'latvian',
+ 'mk': 'macedonian',
+ 'ml': 'malayalam',
+ 'mn': 'mongolian',
+ 'mr': 'marathi',
+ 'my': 'burmese',
+ 'nb': 'norwegian',
+ 'ne': 'nepali',
+ 'nl': 'dutch',
+ 'os': 'ossetic',
+ 'pa': 'punjabi',
+ 'pl': 'polish',
+ 'pt': 'portuguese',
+ 'ro': 'romanian',
+ 'ru': 'russian',
+ 'sk': 'slovak',
+ 'sl': 'slovenian',
+ 'sq': 'albanian',
+ 'sr': 'serbian',
+ 'sv': 'swedish',
+ 'sw': 'swahili',
+ 'ta': 'tamil',
+ 'te': 'telugu',
+ 'th': 'thai',
+ 'tr': 'turkish',
+ 'tt': 'tatar',
+ 'uk': 'ukrainian',
+ 'ur': 'urdu',
+ 'vi': 'vietnamese',
+ 'zh': 'chinese',
+}
|
8dc6c7567f9bc94dc1b4a96b80d059f1231039bc
|
st2auth_flat_file_backend/__init__.py
|
st2auth_flat_file_backend/__init__.py
|
# Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from flat_file import FlatFileAuthenticationBackend
__all__ = [
'FlatFileAuthenticationBackend'
]
__version__ = '0.1.0'
|
# Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
from .flat_file import FlatFileAuthenticationBackend
__all__ = [
'FlatFileAuthenticationBackend'
]
__version__ = '0.1.0'
|
Fix code so it also works under Python 3.
|
Fix code so it also works under Python 3.
|
Python
|
apache-2.0
|
StackStorm/st2-auth-backend-flat-file
|
---
+++
@@ -13,7 +13,9 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-from flat_file import FlatFileAuthenticationBackend
+from __future__ import absolute_import
+
+from .flat_file import FlatFileAuthenticationBackend
__all__ = [
'FlatFileAuthenticationBackend'
|
4abd7baafcd982993471d5c0137d4b506ea49e8b
|
src/runcommands/util/enums.py
|
src/runcommands/util/enums.py
|
import enum
import os
import subprocess
import sys
import blessings
from .misc import isatty
if isatty(sys.stdout) and os.getenv("TERM"):
Terminal = blessings.Terminal
else:
class Terminal:
def __getattr__(self, name):
return ""
TERM = Terminal()
class Color(enum.Enum):
none = ""
reset = TERM.normal
black = TERM.black
red = TERM.red
green = TERM.green
yellow = TERM.yellow
blue = TERM.blue
magenta = TERM.magenta
cyan = TERM.cyan
white = TERM.white
def __str__(self):
return self.value
class StreamOptions(enum.Enum):
"""Choices for stream handling."""
capture = "capture"
hide = "hide"
none = "none"
@property
def option(self):
return {
"capture": subprocess.PIPE,
"hide": subprocess.DEVNULL,
"none": None,
}[self.value]
|
import enum
import os
import subprocess
import sys
import blessings
from .misc import isatty
if isatty(sys.stdout) and os.getenv("TERM"):
Terminal = blessings.Terminal
else:
# XXX: Mock terminal that returns "" for all attributes
class TerminalValue:
registry = {}
@classmethod
def get(cls, name):
if name not in cls.registry:
cls.registry[name] = cls(name)
return cls.registry[name]
def __init__(self, name):
self.name = name
def __repr__(self):
return f"{self.__class__.__name__}({self.name})"
def __str__(self):
return ""
class Terminal:
def __getattr__(self, name):
return TerminalValue.get(name)
TERM = Terminal()
class Color(enum.Enum):
none = ""
reset = TERM.normal
black = TERM.black
red = TERM.red
green = TERM.green
yellow = TERM.yellow
blue = TERM.blue
magenta = TERM.magenta
cyan = TERM.cyan
white = TERM.white
def __str__(self):
return str(self.value)
class StreamOptions(enum.Enum):
"""Choices for stream handling."""
capture = "capture"
hide = "hide"
none = "none"
@property
def option(self):
return {
"capture": subprocess.PIPE,
"hide": subprocess.DEVNULL,
"none": None,
}[self.value]
|
Fix Color enum setup when TERM isn't set
|
Fix Color enum setup when TERM isn't set
The previous version of this didn't work right because all the values
were the same empty string.
This works around that by creating distinct values that evaluate to "".
Amends 94b55ead63523f7f5677989f1a4999994b205cdf
|
Python
|
mit
|
wylee/runcommands,wylee/runcommands
|
---
+++
@@ -11,10 +11,28 @@
if isatty(sys.stdout) and os.getenv("TERM"):
Terminal = blessings.Terminal
else:
+ # XXX: Mock terminal that returns "" for all attributes
+ class TerminalValue:
+ registry = {}
+
+ @classmethod
+ def get(cls, name):
+ if name not in cls.registry:
+ cls.registry[name] = cls(name)
+ return cls.registry[name]
+
+ def __init__(self, name):
+ self.name = name
+
+ def __repr__(self):
+ return f"{self.__class__.__name__}({self.name})"
+
+ def __str__(self):
+ return ""
class Terminal:
def __getattr__(self, name):
- return ""
+ return TerminalValue.get(name)
TERM = Terminal()
@@ -34,7 +52,7 @@
white = TERM.white
def __str__(self):
- return self.value
+ return str(self.value)
class StreamOptions(enum.Enum):
|
c4ee061f62e34c70cc67286ed0291423353cbcbe
|
imgur_cli/utils.py
|
imgur_cli/utils.py
|
import json
def cli_arg(*args, **kwargs):
"""Decorator for CLI args"""
def _decorator(func):
add_arg(func, *args, **kwargs)
return func
return _decorator
def add_arg(func, *args, **kwargs):
"""Bind CLI arguments a 'cmd_' format function"""
if not hasattr(func, 'arguments'):
func.arguments = []
if (args, kwargs) not in func.arguments:
# Because of the semantics of decorator composition if we just append
# to the options list positional options will appear to be backwards.
func.arguments.insert(0, (args, kwargs))
def generate_output(result, output_filename=None):
if output_filename:
with open(output_filename, 'w') as json_file:
data = json.dumps(result, json_file, indent=4, separators=(',', ': '))
json_file.write(data)
else:
print(json.dumps(result, indent=4, separators=(',', ': ')))
|
import json
def cli_arg(*args, **kwargs):
"""Decorator for CLI args"""
def _decorator(func):
add_arg(func, *args, **kwargs)
return func
return _decorator
def add_arg(func, *args, **kwargs):
"""Bind CLI arguments to a 'cmd_' format function"""
if not hasattr(func, 'arguments'):
func.arguments = []
if (args, kwargs) not in func.arguments:
# Because of the semantics of decorator composition if we just append
# to the options list positional options will appear to be backwards.
func.arguments.insert(0, (args, kwargs))
def cli_subparser(*args, **kwargs):
"""Decorator for CLI subparsers"""
def _decorator(func):
add_subparser(func, *args, **kwargs)
return func
return _decorator
def add_subparser(func, *args, **kwargs):
"""Bind CLI subparsers to a 'subparser_' format function"""
if not hasattr(func, 'subparser'):
func.subparser = args[0]
def generate_output(result, output_filename=None):
if output_filename:
with open(output_filename, 'w') as json_file:
data = json.dumps(result, json_file, indent=4, separators=(',', ': '))
json_file.write(data)
else:
print(json.dumps(result, indent=4, separators=(',', ': ')))
|
Define function and decorators for subparsers
|
Define function and decorators for subparsers
|
Python
|
mit
|
ueg1990/imgur-cli
|
---
+++
@@ -10,7 +10,7 @@
def add_arg(func, *args, **kwargs):
- """Bind CLI arguments a 'cmd_' format function"""
+ """Bind CLI arguments to a 'cmd_' format function"""
if not hasattr(func, 'arguments'):
func.arguments = []
@@ -20,6 +20,20 @@
func.arguments.insert(0, (args, kwargs))
+def cli_subparser(*args, **kwargs):
+ """Decorator for CLI subparsers"""
+ def _decorator(func):
+ add_subparser(func, *args, **kwargs)
+ return func
+ return _decorator
+
+
+def add_subparser(func, *args, **kwargs):
+ """Bind CLI subparsers to a 'subparser_' format function"""
+ if not hasattr(func, 'subparser'):
+ func.subparser = args[0]
+
+
def generate_output(result, output_filename=None):
if output_filename:
with open(output_filename, 'w') as json_file:
|
5ff58311b6cf2dc8ad03351e818d05fca9e33e1b
|
hastexo/migrations/0010_add_user_foreign_key.py
|
hastexo/migrations/0010_add_user_foreign_key.py
|
from django.conf import settings
from django.core.exceptions import ObjectDoesNotExist
from django.db import migrations, models
import django.db.models.deletion
import logging
logger = logging.getLogger(__name__)
class Migration(migrations.Migration):
def backfill_learner(apps, schema_editor):
"""
Use the 'student_id' to link stacks to the User model.
"""
Stack = apps.get_model("hastexo", "Stack")
AnonymousUserId = apps.get_model("student", "AnonymousUserId")
for stack in Stack.objects.all():
try:
stack.learner = AnonymousUserId.objects.get(
anonymous_user_id=stack.student_id).user
stack.save(update_fields=['learner'])
except ObjectDoesNotExist:
logger.warning('Unable to link stack to user: '
f'{stack.name}')
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('hastexo', '0009_add_null_true_for_key_and_password'),
]
operations = [
migrations.AddField(
model_name='stack',
name='learner',
field=models.ForeignKey(
db_constraint=False,
null=True,
on_delete=django.db.models.deletion.PROTECT,
to=settings.AUTH_USER_MODEL),
),
migrations.RunPython(backfill_learner),
]
|
from django.conf import settings
from django.core.exceptions import ObjectDoesNotExist
from django.db import migrations, models
import django.db.models.deletion
import logging
logger = logging.getLogger(__name__)
class Migration(migrations.Migration):
def backfill_learner(apps, schema_editor):
"""
Use the 'student_id' to link stacks to the User model.
"""
Stack = apps.get_model("hastexo", "Stack")
AnonymousUserId = apps.get_model("student", "AnonymousUserId")
for stack in Stack.objects.all():
try:
stack.learner = AnonymousUserId.objects.get(
anonymous_user_id=stack.student_id).user
stack.save(update_fields=['learner'])
except ObjectDoesNotExist:
logger.warning('Unable to link stack to user: '
f'{stack.name}')
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('hastexo', '0009_add_null_true_for_key_and_password'),
]
operations = [
migrations.AddField(
model_name='stack',
name='learner',
field=models.ForeignKey(
db_constraint=False,
null=True,
on_delete=django.db.models.deletion.PROTECT,
to=settings.AUTH_USER_MODEL),
),
migrations.RunPython(backfill_learner),
migrations.AlterField(
model_name='stack',
name='learner',
field=models.ForeignKey(
db_constraint=True,
null=True,
on_delete=django.db.models.deletion.PROTECT,
to=settings.AUTH_USER_MODEL),
),
]
|
Apply additional fix to add_user_foreign_key migration
|
Apply additional fix to add_user_foreign_key migration
The hack in 583fb729b1e201c830579345dca5beca4b131006 modified
0010_add_user_foreign_key in such a way that it ended up *not* setting
a database constraint when it should have.
Enable the database-enforced constraint in the right place.
Co-authored-by: Florian Haas <73262ad0334ab37227b2f7a0205f51db1e606681@cleura.com>
|
Python
|
agpl-3.0
|
hastexo/hastexo-xblock,hastexo/hastexo-xblock,hastexo/hastexo-xblock,hastexo/hastexo-xblock
|
---
+++
@@ -40,4 +40,13 @@
to=settings.AUTH_USER_MODEL),
),
migrations.RunPython(backfill_learner),
+ migrations.AlterField(
+ model_name='stack',
+ name='learner',
+ field=models.ForeignKey(
+ db_constraint=True,
+ null=True,
+ on_delete=django.db.models.deletion.PROTECT,
+ to=settings.AUTH_USER_MODEL),
+ ),
]
|
ea73cd99b6ff67d65c0784471603d8734b6b3d75
|
scripts/plot_example.py
|
scripts/plot_example.py
|
import os
import matplotlib.pyplot as plt
plt.style.use("ggplot")
plt.rcParams["figure.figsize"] = 10, 5
plt.rcParams["font.family"] = "serif"
plt.rcParams["font.size"] = 12
import pyhector
from pyhector import rcp26, rcp45, rcp60, rcp85
path = os.path.join(os.path.dirname(__file__),
'./example-plot.png')
for rcp in [rcp26, rcp45, rcp60, rcp85]:
output, _ = pyhector.run(rcp, {"core": {"endDate": 2100}})
temp = output["temperature.Tgav"]
temp = temp.loc[1850:] - temp.loc[1850:1900].mean()
temp.plot(label=rcp.name.split("_")[0])
plt.title("Global mean temperature")
plt.ylabel("°C over pre-industrial (1850-1900 mean)")
plt.legend(loc="best")
plt.savefig(path, dpi=96)
|
import os
import matplotlib.pyplot as plt
plt.style.use("ggplot")
plt.rcParams["figure.figsize"] = 10, 5
plt.rcParams["font.family"] = "serif"
plt.rcParams["font.size"] = 12
import pyhector
from pyhector import rcp26, rcp45, rcp60, rcp85
path = os.path.join(os.path.dirname(__file__),
'./example-plot.png')
for rcp in [rcp26, rcp45, rcp60, rcp85]:
output = pyhector.run(rcp, {"core": {"endDate": 2100}})
temp = output["temperature.Tgav"]
temp = temp.loc[1850:] - temp.loc[1850:1900].mean()
temp.plot(label=rcp.name.split("_")[0])
plt.title("Global mean temperature")
plt.ylabel("°C over pre-industrial (1850-1900 mean)")
plt.legend(loc="best")
plt.savefig(path, dpi=96)
|
Update example plot script with new API
|
Update example plot script with new API
|
Python
|
agpl-3.0
|
openclimatedata/pyhector,openclimatedata/pyhector,openclimatedata/pyhector
|
---
+++
@@ -13,7 +13,7 @@
'./example-plot.png')
for rcp in [rcp26, rcp45, rcp60, rcp85]:
- output, _ = pyhector.run(rcp, {"core": {"endDate": 2100}})
+ output = pyhector.run(rcp, {"core": {"endDate": 2100}})
temp = output["temperature.Tgav"]
temp = temp.loc[1850:] - temp.loc[1850:1900].mean()
temp.plot(label=rcp.name.split("_")[0])
|
7519bebe1d9d87930275858a537dcc0a0a64f007
|
tools/strip_filenames.py
|
tools/strip_filenames.py
|
#!/bin/python
import os
directory = os.listdir()
illegal_characters = "%?_'*+$!\""
tolowercase=True
for a in range(len(directory)):
newname=""
for c in directory[a]:
if c in illegal_characters:
continue
if c.isalnum() or c == '.':
newname=newname+c.lower()
print("convert {} to {}".format(directory[a],newname))
os.rename(directory[a], newname)
|
#!/bin/env python3
"""
Use only legal characters from files or current directory
Usage:
strip_filenames.py [<filename>...]
Options:
-l, --lowercase Only lowercase
-h, --help Show this screen and exit.
"""
import sys
import os
from docopt import docopt
# docopt(doc, argv=None, help=True, version=None, options_first=False))
def main():
opt = docopt(__doc__, sys.argv[1:])
directory = opt.get("filename", os.listdir())
legal_characters = ""
list_N010 = list(range(size))
list_alpha = [ chr(x+97) for x in range(26) ]
list_ALPHA = [ chr(x+65) for x in range(26) ]
legal_characters += "".join(list_N010)
legal_characters += "".join(list_alpha)
if not opt.get("--lowercase", False):
legal_characters += "".join(list_N010)
for a in range(len(directory)):
newname=""
for c in directory[a]:
if c not in legal_characters:
continue
newname += c
print("convert {} to {}".format(directory[a],newname))
os.rename(directory[a], newname)
if __name__ == "__main__":
main()
|
Use legal characters for stripping filenames
|
Use legal characters for stripping filenames
|
Python
|
mit
|
dgengtek/scripts,dgengtek/scripts
|
---
+++
@@ -1,16 +1,41 @@
-#!/bin/python
+#!/bin/env python3
+"""
+Use only legal characters from files or current directory
+Usage:
+ strip_filenames.py [<filename>...]
+Options:
+ -l, --lowercase Only lowercase
+ -h, --help Show this screen and exit.
+"""
+
+import sys
import os
-directory = os.listdir()
-illegal_characters = "%?_'*+$!\""
-tolowercase=True
+from docopt import docopt
-for a in range(len(directory)):
- newname=""
- for c in directory[a]:
- if c in illegal_characters:
- continue
- if c.isalnum() or c == '.':
- newname=newname+c.lower()
- print("convert {} to {}".format(directory[a],newname))
- os.rename(directory[a], newname)
+# docopt(doc, argv=None, help=True, version=None, options_first=False))
+def main():
+ opt = docopt(__doc__, sys.argv[1:])
+ directory = opt.get("filename", os.listdir())
+ legal_characters = ""
+ list_N010 = list(range(size))
+ list_alpha = [ chr(x+97) for x in range(26) ]
+ list_ALPHA = [ chr(x+65) for x in range(26) ]
+
+ legal_characters += "".join(list_N010)
+ legal_characters += "".join(list_alpha)
+ if not opt.get("--lowercase", False):
+ legal_characters += "".join(list_N010)
+
+
+ for a in range(len(directory)):
+ newname=""
+ for c in directory[a]:
+ if c not in legal_characters:
+ continue
+ newname += c
+ print("convert {} to {}".format(directory[a],newname))
+ os.rename(directory[a], newname)
+
+if __name__ == "__main__":
+ main()
|
0fac3d59a34a861c7a826b0d1fa2f3002356e04c
|
src/shared.py
|
src/shared.py
|
# -*- coding: utf-8 -*-
import logging
import os
import queue
import threading
listening_port = 8444
send_outgoing_connections = True
listen_for_connections = True
data_directory = 'minode_data/'
source_directory = os.path.dirname(os.path.realpath(__file__))
trusted_peer = None
# trusted_peer = ('127.0.0.1', 8444)
log_level = logging.INFO
magic_bytes = b'\xe9\xbe\xb4\xd9'
protocol_version = 3
services = 3 # NODE_NETWORK, NODE_SSL
stream = 1
nonce = os.urandom(8)
user_agent = b'MiNode-v0.2.0'
timeout = 600
header_length = 24
nonce_trials_per_byte = 1000
payload_length_extra_bytes = 1000
shutting_down = False
vector_advertise_queue = queue.Queue()
address_advertise_queue = queue.Queue()
connections = set()
connections_lock = threading.Lock()
hosts = set()
core_nodes = set()
node_pool = set()
unchecked_node_pool = set()
outgoing_connections = 8
connection_limit = 150
objects = {}
objects_lock = threading.Lock()
|
# -*- coding: utf-8 -*-
import logging
import os
import queue
import threading
listening_port = 8444
send_outgoing_connections = True
listen_for_connections = True
data_directory = 'minode_data/'
source_directory = os.path.dirname(os.path.realpath(__file__))
trusted_peer = None
# trusted_peer = ('127.0.0.1', 8444)
log_level = logging.INFO
magic_bytes = b'\xe9\xbe\xb4\xd9'
protocol_version = 3
services = 3 # NODE_NETWORK, NODE_SSL
stream = 1
nonce = os.urandom(8)
user_agent = b'/MiNode:0.2.1/'
timeout = 600
header_length = 24
nonce_trials_per_byte = 1000
payload_length_extra_bytes = 1000
shutting_down = False
vector_advertise_queue = queue.Queue()
address_advertise_queue = queue.Queue()
connections = set()
connections_lock = threading.Lock()
hosts = set()
core_nodes = set()
node_pool = set()
unchecked_node_pool = set()
outgoing_connections = 8
connection_limit = 150
objects = {}
objects_lock = threading.Lock()
|
Change User Agent to comply with specification
|
Change User Agent to comply with specification
|
Python
|
mit
|
TheKysek/MiNode,TheKysek/MiNode
|
---
+++
@@ -19,7 +19,7 @@
services = 3 # NODE_NETWORK, NODE_SSL
stream = 1
nonce = os.urandom(8)
-user_agent = b'MiNode-v0.2.0'
+user_agent = b'/MiNode:0.2.1/'
timeout = 600
header_length = 24
|
c0b76d401b305c1bcd2ed5814a89719d4c6a3d83
|
heat_cfnclient/tests/test_cli.py
|
heat_cfnclient/tests/test_cli.py
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import testtools
import heat_cfnclient
import os
import subprocess
basepath = os.path.join(heat_cfnclient.__path__[0], os.path.pardir)
class CliTest(testtools.TestCase):
def test_heat_cfn(self):
self.bin_run('heat-cfn')
def test_heat_boto(self):
self.bin_run('heat-boto')
def test_heat_watch(self):
self.bin_run('heat-watch')
def bin_run(self, bin):
fullpath = basepath + '/bin/' + bin
proc = subprocess.Popen(fullpath,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
stdout, stderr = proc.communicate()
if proc.returncode:
print('Error executing %s:\n %s %s ' % (bin, stdout, stderr))
raise subprocess.CalledProcessError(proc.returncode, bin)
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import testtools
import heat_cfnclient
import os
import subprocess
basepath = os.path.join(heat_cfnclient.__path__[0], os.path.pardir)
@testtools.skip
class CliTest(testtools.TestCase):
def test_heat_cfn(self):
self.bin_run('heat-cfn')
def test_heat_boto(self):
self.bin_run('heat-boto')
def test_heat_watch(self):
self.bin_run('heat-watch')
def bin_run(self, bin):
fullpath = basepath + '/bin/' + bin
proc = subprocess.Popen(fullpath,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
stdout, stderr = proc.communicate()
if proc.returncode:
print('Error executing %s:\n %s %s ' % (bin, stdout, stderr))
raise subprocess.CalledProcessError(proc.returncode, bin)
|
Disable tests until new repo is stable
|
Disable tests until new repo is stable
Change-Id: Ic6932c1028c72b5600d03ab59102d1c1cff1b36c
|
Python
|
apache-2.0
|
openstack-dev/heat-cfnclient
|
---
+++
@@ -21,6 +21,7 @@
basepath = os.path.join(heat_cfnclient.__path__[0], os.path.pardir)
+@testtools.skip
class CliTest(testtools.TestCase):
def test_heat_cfn(self):
|
82ad6bf164000940e17dcb01b27b22b97c69beba
|
questionnaire/urls.py
|
questionnaire/urls.py
|
# vim: set fileencoding=utf-8
from django.conf.urls.defaults import *
from views import *
urlpatterns = patterns('',
url(r'^$',
questionnaire, name='questionnaire_noargs'),
url(r'^csv/(?P<qid>\d+)/',
export_csv, name='export_csv'),
url(r'^(?P<runcode>[^/]+)/(?P<qs>\d+)/$',
questionnaire, name='questionset'),
url(r'^(?P<runcode>[^/]+)/',
questionnaire, name='questionnaire'),
)
|
# vim: set fileencoding=utf-8
from django.conf.urls.defaults import *
from views import *
urlpatterns = patterns('',
url(r'^$',
questionnaire, name='questionnaire_noargs'),
url(r'^csv/(?P<qid>\d+)/',
export_csv, name='export_csv'),
url(r'^(?P<runcode>[^/]+)/(?P<qs>[-]{0,1}\d+)/$',
questionnaire, name='questionset'),
url(r'^(?P<runcode>[^/]+)/',
questionnaire, name='questionnaire'),
)
|
Enable questionsets with negative sortids
|
Enable questionsets with negative sortids
|
Python
|
bsd-3-clause
|
JanOosting/ed-questionnaire,affan2/ed-questionnaire,seantis/seantis-questionnaire,n3storm/seantis-questionnaire,affan2/ed-questionnaire,daniboy/seantis-questionnaire,eugena/ed-questionnaire,eugena/seantis-questionnaire,JanOosting/ed-questionnaire,eugena/seantis-questionnaire,trantu/seantis-questionnaire,daniboy/seantis-questionnaire,JanOosting/ed-questionnaire,n3storm/seantis-questionnaire,seantis/seantis-questionnaire,eugena/ed-questionnaire,daniboy/seantis-questionnaire,eugena/ed-questionnaire,trantu/seantis-questionnaire,eugena/seantis-questionnaire,seantis/seantis-questionnaire,trantu/seantis-questionnaire,affan2/ed-questionnaire
|
---
+++
@@ -8,7 +8,7 @@
questionnaire, name='questionnaire_noargs'),
url(r'^csv/(?P<qid>\d+)/',
export_csv, name='export_csv'),
- url(r'^(?P<runcode>[^/]+)/(?P<qs>\d+)/$',
+ url(r'^(?P<runcode>[^/]+)/(?P<qs>[-]{0,1}\d+)/$',
questionnaire, name='questionset'),
url(r'^(?P<runcode>[^/]+)/',
questionnaire, name='questionnaire'),
|
c3bb58fbcbd7c1699571859af736952c36f3029a
|
project/library/urls.py
|
project/library/urls.py
|
from django.conf.urls.defaults import patterns, include, url
from django.conf import settings
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('library',
url(r'^all$',
view='views.listing',
kwargs={'template':'book_listing.html'},
name='listing'
),
url(r'^library/(?P<id>[-\w]+)/$',
view='views.book',
kwargs={'template':'book.html'},
name='book'
)
)
|
from django.conf.urls.defaults import patterns, include, url
from django.conf import settings
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('library',
url(r'^all$',
view='views.listing',
kwargs={'template':'book_listing.html'},
name='listing'
),
url(r'^book/(?P<id>[-\w]+)/$',
view='views.book',
kwargs={'template':'book.html'},
name='book'
)
)
|
Update url for books to be more semantic
|
Update url for books to be more semantic
|
Python
|
mit
|
DUCSS/ducss-site-old,DUCSS/ducss-site-old,DUCSS/ducss-site-old
|
---
+++
@@ -10,7 +10,7 @@
kwargs={'template':'book_listing.html'},
name='listing'
),
- url(r'^library/(?P<id>[-\w]+)/$',
+ url(r'^book/(?P<id>[-\w]+)/$',
view='views.book',
kwargs={'template':'book.html'},
name='book'
|
a15518111b6d03a4b67a2dbaa759afff15fe3302
|
spec/Report_S52_spec.py
|
spec/Report_S52_spec.py
|
from expects import expect, equal
from primestg.report import Report
with fdescription('Report S52 example'):
with before.all:
self.data_filename = 'spec/data/MRTR000000822522_0_S52_1_20200929001048'
self.report = {}
with open(self.data_filename) as data_file:
self.report = Report(data_file)
with it('generates expected results for a value of the first line of '
'first remote terminal unit'):
expected_first_value = dict(
ae=0.0,
bc='00',
ai=8717.0,
r1=43.0,
r2=0.0,
r3=0.0,
r4=142.0,
timestamp='2020-09-14 01:00:00',
rt_unit_name='MRTR000000822522',
name='MRTL000006609121',
magn=1
)
rt_unit = list(self.report.rt_units)[0]
line = list(rt_unit.lines)[0]
values = line.values
first_value_first_line = {}
for x in values:
if x['timestamp'] == expected_first_value['timestamp']:
first_value_first_line = x
expect(first_value_first_line)\
.to(equal(expected_first_value))
|
from expects import expect, equal
from primestg.report import Report
with description('Report S52 example'):
with before.all:
self.data_filename = 'spec/data/MRTR000000822522_0_S52_1_20200929001048'
self.report = {}
with open(self.data_filename) as data_file:
self.report = Report(data_file)
with it('generates expected results for a value of the first line of '
'first remote terminal unit'):
expected_first_value = dict(
ae=0.0,
bc='00',
ai=8717.0,
r1=43.0,
r2=0.0,
r3=0.0,
r4=142.0,
timestamp='2020-09-14 01:00:00',
rt_unit_name='MRTR000000822522',
name='MRTL000006609121',
magn=1
)
rt_unit = list(self.report.rt_units)[0]
line = list(rt_unit.lines)[0]
values = line.values
first_value_first_line = {}
for x in values:
if x['timestamp'] == expected_first_value['timestamp']:
first_value_first_line = x
expect(first_value_first_line)\
.to(equal(expected_first_value))
|
FIX only pass S52 test
|
FIX only pass S52 test
|
Python
|
agpl-3.0
|
gisce/primestg
|
---
+++
@@ -2,7 +2,7 @@
from primestg.report import Report
-with fdescription('Report S52 example'):
+with description('Report S52 example'):
with before.all:
self.data_filename = 'spec/data/MRTR000000822522_0_S52_1_20200929001048'
|
8d5d45f3a04235a9ee4fd1cadd39cc0010775ac9
|
humbug/ratelimit.py
|
humbug/ratelimit.py
|
import traceback
from hashlib import sha256
from datetime import datetime, timedelta
# Adapted http://djangosnippets.org/snippets/2242/ by user s29 (October 25, 2010)
class _RateLimitFilter(object):
last_error = 0
def filter(self, record):
from django.conf import settings
from django.core.cache import cache
# Track duplicate errors
duplicate = False
rate = getattr(settings, '%s_LIMIT' % self.__class__.__name__.upper(),
600) # seconds
if rate > 0:
# Test if the cache works
try:
cache.set('RLF_TEST_KEY', 1, 1)
use_cache = cache.get('RLF_TEST_KEY') == 1
except:
use_cache = False
if use_cache:
key = self.__class__.__name__.upper()
duplicate = cache.get(key) == 1
cache.set(key, 1, rate)
else:
min_date = datetime.now() - timedelta(seconds=rate)
duplicate = (self.last_error >= min_date)
if not duplicate:
self.last_error = datetime.now()
return not duplicate
class HumbugLimiter(_RateLimitFilter):
pass
class EmailLimiter(_RateLimitFilter):
pass
|
import traceback
from hashlib import sha256
from datetime import datetime, timedelta
# Adapted http://djangosnippets.org/snippets/2242/ by user s29 (October 25, 2010)
class _RateLimitFilter(object):
last_error = datetime.min
def filter(self, record):
from django.conf import settings
from django.core.cache import cache
# Track duplicate errors
duplicate = False
rate = getattr(settings, '%s_LIMIT' % self.__class__.__name__.upper(),
600) # seconds
if rate > 0:
# Test if the cache works
try:
cache.set('RLF_TEST_KEY', 1, 1)
use_cache = cache.get('RLF_TEST_KEY') == 1
except:
use_cache = False
if use_cache:
key = self.__class__.__name__.upper()
duplicate = cache.get(key) == 1
cache.set(key, 1, rate)
else:
min_date = datetime.now() - timedelta(seconds=rate)
duplicate = (self.last_error >= min_date)
if not duplicate:
self.last_error = datetime.now()
return not duplicate
class HumbugLimiter(_RateLimitFilter):
pass
class EmailLimiter(_RateLimitFilter):
pass
|
Use datetime.min for initial last_error rather than int 0.
|
Use datetime.min for initial last_error rather than int 0.
Otherwise, code may break when it encounters a comparison against
last_error.
(imported from commit 301f256fba065ae9704b1d7f6e91e69ec54f1aa1)
|
Python
|
apache-2.0
|
levixie/zulip,zwily/zulip,reyha/zulip,rht/zulip,jrowan/zulip,praveenaki/zulip,esander91/zulip,jeffcao/zulip,Juanvulcano/zulip,zachallaun/zulip,Batterfii/zulip,KingxBanana/zulip,krtkmj/zulip,zorojean/zulip,christi3k/zulip,easyfmxu/zulip,arpitpanwar/zulip,glovebx/zulip,yuvipanda/zulip,ashwinirudrappa/zulip,johnnygaddarr/zulip,jackrzhang/zulip,yuvipanda/zulip,jerryge/zulip,bastianh/zulip,zachallaun/zulip,hengqujushi/zulip,JPJPJPOPOP/zulip,dattatreya303/zulip,babbage/zulip,JanzTam/zulip,fw1121/zulip,sup95/zulip,udxxabp/zulip,so0k/zulip,JanzTam/zulip,zhaoweigg/zulip,technicalpickles/zulip,moria/zulip,eeshangarg/zulip,moria/zulip,wangdeshui/zulip,hackerkid/zulip,bssrdf/zulip,ryanbackman/zulip,bitemyapp/zulip,Cheppers/zulip,gkotian/zulip,punchagan/zulip,tdr130/zulip,joshisa/zulip,synicalsyntax/zulip,j831/zulip,RobotCaleb/zulip,amanharitsh123/zulip,dawran6/zulip,lfranchi/zulip,amyliu345/zulip,Cheppers/zulip,aakash-cr7/zulip,tbutter/zulip,dattatreya303/zulip,jonesgithub/zulip,LAndreas/zulip,thomasboyt/zulip,MayB/zulip,kokoar/zulip,jerryge/zulip,wangdeshui/zulip,qq1012803704/zulip,themass/zulip,Vallher/zulip,calvinleenyc/zulip,zwily/zulip,vabs22/zulip,j831/zulip,hj3938/zulip,ufosky-server/zulip,gigawhitlocks/zulip,dnmfarrell/zulip,SmartPeople/zulip,itnihao/zulip,noroot/zulip,ikasumiwt/zulip,codeKonami/zulip,zorojean/zulip,adnanh/zulip,jeffcao/zulip,brainwane/zulip,sup95/zulip,ericzhou2008/zulip,dwrpayne/zulip,zacps/zulip,johnnygaddarr/zulip,shaunstanislaus/zulip,schatt/zulip,swinghu/zulip,reyha/zulip,tiansiyuan/zulip,Batterfii/zulip,so0k/zulip,brockwhittaker/zulip,firstblade/zulip,proliming/zulip,themass/zulip,vakila/zulip,calvinleenyc/zulip,developerfm/zulip,codeKonami/zulip,shubhamdhama/zulip,mahim97/zulip,hafeez3000/zulip,itnihao/zulip,adnanh/zulip,voidException/zulip,AZtheAsian/zulip,guiquanz/zulip,umkay/zulip,susansls/zulip,fw1121/zulip,ericzhou2008/zulip,dwrpayne/zulip,timabbott/zulip,dhcrzf/zulip,he15his/zulip,fw1121/zulip,timabbott/zulip,KJin99/zulip,adnanh/zulip,Galexrt/zulip,verma-varsha/zulip,wangdeshui/zulip,kaiyuanheshang/zulip,wdaher/zulip,luyifan/zulip,dxq-git/zulip,natanovia/zulip,glovebx/zulip,armooo/zulip,PaulPetring/zulip,easyfmxu/zulip,MayB/zulip,joshisa/zulip,yocome/zulip,he15his/zulip,hackerkid/zulip,natanovia/zulip,adnanh/zulip,Diptanshu8/zulip,xuxiao/zulip,jackrzhang/zulip,johnnygaddarr/zulip,brainwane/zulip,souravbadami/zulip,bssrdf/zulip,Galexrt/zulip,aakash-cr7/zulip,dotcool/zulip,easyfmxu/zulip,babbage/zulip,KJin99/zulip,fw1121/zulip,Batterfii/zulip,eastlhu/zulip,pradiptad/zulip,KingxBanana/zulip,xuanhan863/zulip,umkay/zulip,AZtheAsian/zulip,tdr130/zulip,suxinde2009/zulip,zorojean/zulip,zacps/zulip,zhaoweigg/zulip,paxapy/zulip,andersk/zulip,seapasulli/zulip,voidException/zulip,jackrzhang/zulip,bowlofstew/zulip,paxapy/zulip,Suninus/zulip,showell/zulip,udxxabp/zulip,Juanvulcano/zulip,luyifan/zulip,voidException/zulip,hafeez3000/zulip,arpitpanwar/zulip,hafeez3000/zulip,ufosky-server/zulip,MariaFaBella85/zulip,babbage/zulip,codeKonami/zulip,firstblade/zulip,ryanbackman/zulip,eeshangarg/zulip,dwrpayne/zulip,christi3k/zulip,bluesea/zulip,pradiptad/zulip,niftynei/zulip,ikasumiwt/zulip,wavelets/zulip,JanzTam/zulip,zwily/zulip,he15his/zulip,praveenaki/zulip,alliejones/zulip,peiwei/zulip,amallia/zulip,verma-varsha/zulip,DazWorrall/zulip,jerryge/zulip,calvinleenyc/zulip,ericzhou2008/zulip,codeKonami/zulip,luyifan/zulip,kou/zulip,arpitpanwar/zulip,andersk/zulip,KingxBanana/zulip,nicholasbs/zulip,technicalpickles/zulip,xuxiao/zulip,nicholasbs/zulip,jonesgithub/zulip,zhaoweigg/zulip,akuseru/zulip,andersk/zulip,littledogboy/zulip,eeshangarg/zulip,bitemyapp/zulip,xuxiao/zulip,esander91/zulip,ipernet/zulip,shaunstanislaus/zulip,Drooids/zulip,JanzTam/zulip,Frouk/zulip,mdavid/zulip,tiansiyuan/zulip,jimmy54/zulip,luyifan/zulip,jerryge/zulip,babbage/zulip,bastianh/zulip,hj3938/zulip,proliming/zulip,glovebx/zulip,karamcnair/zulip,PhilSk/zulip,xuanhan863/zulip,vaidap/zulip,Diptanshu8/zulip,sonali0901/zulip,synicalsyntax/zulip,tbutter/zulip,mohsenSy/zulip,LeeRisk/zulip,dnmfarrell/zulip,itnihao/zulip,souravbadami/zulip,hengqujushi/zulip,ipernet/zulip,LeeRisk/zulip,schatt/zulip,jphilipsen05/zulip,Batterfii/zulip,seapasulli/zulip,showell/zulip,jerryge/zulip,ufosky-server/zulip,bssrdf/zulip,umkay/zulip,proliming/zulip,moria/zulip,bastianh/zulip,Jianchun1/zulip,sharmaeklavya2/zulip,ApsOps/zulip,voidException/zulip,timabbott/zulip,saitodisse/zulip,AZtheAsian/zulip,saitodisse/zulip,ashwinirudrappa/zulip,tbutter/zulip,isht3/zulip,Diptanshu8/zulip,Juanvulcano/zulip,luyifan/zulip,wavelets/zulip,zulip/zulip,kou/zulip,vakila/zulip,joyhchen/zulip,developerfm/zulip,vabs22/zulip,j831/zulip,cosmicAsymmetry/zulip,itnihao/zulip,akuseru/zulip,jrowan/zulip,johnny9/zulip,blaze225/zulip,bowlofstew/zulip,shrikrishnaholla/zulip,amanharitsh123/zulip,amallia/zulip,avastu/zulip,lfranchi/zulip,deer-hope/zulip,hackerkid/zulip,zorojean/zulip,yocome/zulip,vabs22/zulip,JPJPJPOPOP/zulip,andersk/zulip,themass/zulip,krtkmj/zulip,mansilladev/zulip,jrowan/zulip,deer-hope/zulip,susansls/zulip,zacps/zulip,arpith/zulip,mohsenSy/zulip,vikas-parashar/zulip,gigawhitlocks/zulip,AZtheAsian/zulip,nicholasbs/zulip,hengqujushi/zulip,ahmadassaf/zulip,pradiptad/zulip,Gabriel0402/zulip,reyha/zulip,ryansnowboarder/zulip,codeKonami/zulip,ashwinirudrappa/zulip,zorojean/zulip,natanovia/zulip,jeffcao/zulip,jphilipsen05/zulip,Gabriel0402/zulip,yocome/zulip,karamcnair/zulip,Vallher/zulip,Gabriel0402/zulip,easyfmxu/zulip,dawran6/zulip,ikasumiwt/zulip,shrikrishnaholla/zulip,ericzhou2008/zulip,gkotian/zulip,akuseru/zulip,voidException/zulip,suxinde2009/zulip,Juanvulcano/zulip,mahim97/zulip,littledogboy/zulip,Gabriel0402/zulip,shaunstanislaus/zulip,schatt/zulip,saitodisse/zulip,xuanhan863/zulip,zacps/zulip,PhilSk/zulip,ahmadassaf/zulip,developerfm/zulip,niftynei/zulip,he15his/zulip,peguin40/zulip,samatdav/zulip,esander91/zulip,willingc/zulip,rht/zulip,natanovia/zulip,Batterfii/zulip,mahim97/zulip,Qgap/zulip,schatt/zulip,mdavid/zulip,shubhamdhama/zulip,dnmfarrell/zulip,ericzhou2008/zulip,zwily/zulip,swinghu/zulip,arpith/zulip,willingc/zulip,christi3k/zulip,shaunstanislaus/zulip,qq1012803704/zulip,DazWorrall/zulip,he15his/zulip,natanovia/zulip,zwily/zulip,hj3938/zulip,yuvipanda/zulip,Galexrt/zulip,arpith/zulip,sharmaeklavya2/zulip,PaulPetring/zulip,tdr130/zulip,moria/zulip,wangdeshui/zulip,amallia/zulip,Drooids/zulip,dnmfarrell/zulip,pradiptad/zulip,MariaFaBella85/zulip,hayderimran7/zulip,jrowan/zulip,blaze225/zulip,grave-w-grave/zulip,timabbott/zulip,jerryge/zulip,paxapy/zulip,Frouk/zulip,joshisa/zulip,Batterfii/zulip,johnny9/zulip,zwily/zulip,pradiptad/zulip,dxq-git/zulip,thomasboyt/zulip,wweiradio/zulip,wweiradio/zulip,qq1012803704/zulip,amanharitsh123/zulip,shubhamdhama/zulip,hafeez3000/zulip,udxxabp/zulip,xuanhan863/zulip,jessedhillon/zulip,hackerkid/zulip,mohsenSy/zulip,huangkebo/zulip,udxxabp/zulip,hustlzp/zulip,kokoar/zulip,souravbadami/zulip,moria/zulip,developerfm/zulip,MariaFaBella85/zulip,nicholasbs/zulip,Galexrt/zulip,niftynei/zulip,tdr130/zulip,bastianh/zulip,developerfm/zulip,vakila/zulip,jainayush975/zulip,ApsOps/zulip,SmartPeople/zulip,brainwane/zulip,sup95/zulip,thomasboyt/zulip,hafeez3000/zulip,seapasulli/zulip,brainwane/zulip,Frouk/zulip,shubhamdhama/zulip,dxq-git/zulip,amyliu345/zulip,deer-hope/zulip,reyha/zulip,susansls/zulip,johnny9/zulip,levixie/zulip,isht3/zulip,atomic-labs/zulip,glovebx/zulip,wweiradio/zulip,Drooids/zulip,aliceriot/zulip,dnmfarrell/zulip,eastlhu/zulip,kaiyuanheshang/zulip,developerfm/zulip,gigawhitlocks/zulip,brockwhittaker/zulip,hackerkid/zulip,Suninus/zulip,punchagan/zulip,levixie/zulip,armooo/zulip,he15his/zulip,hayderimran7/zulip,hustlzp/zulip,yocome/zulip,jphilipsen05/zulip,JPJPJPOPOP/zulip,SmartPeople/zulip,johnnygaddarr/zulip,ryanbackman/zulip,lfranchi/zulip,avastu/zulip,pradiptad/zulip,verma-varsha/zulip,hayderimran7/zulip,karamcnair/zulip,vakila/zulip,sonali0901/zulip,qq1012803704/zulip,stamhe/zulip,bitemyapp/zulip,zacps/zulip,gkotian/zulip,Vallher/zulip,hafeez3000/zulip,willingc/zulip,adnanh/zulip,atomic-labs/zulip,EasonYi/zulip,thomasboyt/zulip,dwrpayne/zulip,itnihao/zulip,kokoar/zulip,Frouk/zulip,jimmy54/zulip,jimmy54/zulip,amanharitsh123/zulip,easyfmxu/zulip,zofuthan/zulip,zofuthan/zulip,Cheppers/zulip,mohsenSy/zulip,noroot/zulip,hayderimran7/zulip,yuvipanda/zulip,amallia/zulip,vikas-parashar/zulip,mansilladev/zulip,aps-sids/zulip,KJin99/zulip,Gabriel0402/zulip,littledogboy/zulip,showell/zulip,zachallaun/zulip,aakash-cr7/zulip,peiwei/zulip,m1ssou/zulip,TigorC/zulip,hustlzp/zulip,LeeRisk/zulip,shaunstanislaus/zulip,vaidap/zulip,aakash-cr7/zulip,aliceriot/zulip,cosmicAsymmetry/zulip,joshisa/zulip,ipernet/zulip,LeeRisk/zulip,hj3938/zulip,m1ssou/zulip,sharmaeklavya2/zulip,LAndreas/zulip,dattatreya303/zulip,SmartPeople/zulip,rht/zulip,tdr130/zulip,praveenaki/zulip,blaze225/zulip,karamcnair/zulip,zulip/zulip,jainayush975/zulip,Jianchun1/zulip,Diptanshu8/zulip,zorojean/zulip,tommyip/zulip,bssrdf/zulip,eastlhu/zulip,esander91/zulip,deer-hope/zulip,zhaoweigg/zulip,TigorC/zulip,themass/zulip,Juanvulcano/zulip,aliceriot/zulip,JPJPJPOPOP/zulip,ahmadassaf/zulip,eastlhu/zulip,jimmy54/zulip,dotcool/zulip,kaiyuanheshang/zulip,Jianchun1/zulip,susansls/zulip,bluesea/zulip,swinghu/zulip,Vallher/zulip,shaunstanislaus/zulip,hengqujushi/zulip,christi3k/zulip,peiwei/zulip,littledogboy/zulip,vikas-parashar/zulip,Suninus/zulip,wdaher/zulip,amanharitsh123/zulip,fw1121/zulip,hustlzp/zulip,adnanh/zulip,ahmadassaf/zulip,wavelets/zulip,tommyip/zulip,johnny9/zulip,xuxiao/zulip,Diptanshu8/zulip,avastu/zulip,jessedhillon/zulip,m1ssou/zulip,punchagan/zulip,rht/zulip,wangdeshui/zulip,timabbott/zulip,aliceriot/zulip,jphilipsen05/zulip,sup95/zulip,noroot/zulip,ryansnowboarder/zulip,Jianchun1/zulip,synicalsyntax/zulip,willingc/zulip,wdaher/zulip,nicholasbs/zulip,guiquanz/zulip,Vallher/zulip,kou/zulip,grave-w-grave/zulip,avastu/zulip,souravbadami/zulip,dxq-git/zulip,Batterfii/zulip,noroot/zulip,m1ssou/zulip,saitodisse/zulip,voidException/zulip,vaidap/zulip,noroot/zulip,aakash-cr7/zulip,Drooids/zulip,jainayush975/zulip,bluesea/zulip,jessedhillon/zulip,RobotCaleb/zulip,jackrzhang/zulip,xuanhan863/zulip,gkotian/zulip,DazWorrall/zulip,armooo/zulip,easyfmxu/zulip,peguin40/zulip,praveenaki/zulip,LAndreas/zulip,suxinde2009/zulip,huangkebo/zulip,wavelets/zulip,Jianchun1/zulip,akuseru/zulip,ryansnowboarder/zulip,hackerkid/zulip,tiansiyuan/zulip,PaulPetring/zulip,jonesgithub/zulip,he15his/zulip,MariaFaBella85/zulip,dotcool/zulip,hackerkid/zulip,gigawhitlocks/zulip,bowlofstew/zulip,KJin99/zulip,ikasumiwt/zulip,zulip/zulip,vaidap/zulip,grave-w-grave/zulip,PhilSk/zulip,amyliu345/zulip,ikasumiwt/zulip,bssrdf/zulip,thomasboyt/zulip,arpith/zulip,praveenaki/zulip,bluesea/zulip,babbage/zulip,bluesea/zulip,guiquanz/zulip,vakila/zulip,ApsOps/zulip,amyliu345/zulip,thomasboyt/zulip,PaulPetring/zulip,ryansnowboarder/zulip,dhcrzf/zulip,hafeez3000/zulip,Suninus/zulip,eeshangarg/zulip,MariaFaBella85/zulip,jeffcao/zulip,Cheppers/zulip,alliejones/zulip,jeffcao/zulip,avastu/zulip,suxinde2009/zulip,so0k/zulip,aliceriot/zulip,dhcrzf/zulip,arpitpanwar/zulip,udxxabp/zulip,TigorC/zulip,niftynei/zulip,lfranchi/zulip,arpith/zulip,tommyip/zulip,KingxBanana/zulip,hj3938/zulip,tommyip/zulip,cosmicAsymmetry/zulip,bssrdf/zulip,bastianh/zulip,xuxiao/zulip,sharmaeklavya2/zulip,PhilSk/zulip,Cheppers/zulip,eastlhu/zulip,jphilipsen05/zulip,gkotian/zulip,akuseru/zulip,LAndreas/zulip,jimmy54/zulip,EasonYi/zulip,rishig/zulip,umkay/zulip,deer-hope/zulip,ApsOps/zulip,suxinde2009/zulip,ipernet/zulip,Gabriel0402/zulip,bitemyapp/zulip,aps-sids/zulip,tdr130/zulip,easyfmxu/zulip,eeshangarg/zulip,cosmicAsymmetry/zulip,gigawhitlocks/zulip,dhcrzf/zulip,kou/zulip,jimmy54/zulip,dotcool/zulip,aliceriot/zulip,dxq-git/zulip,wangdeshui/zulip,peiwei/zulip,sup95/zulip,brockwhittaker/zulip,shubhamdhama/zulip,isht3/zulip,glovebx/zulip,vaidap/zulip,kou/zulip,zulip/zulip,zhaoweigg/zulip,tbutter/zulip,tiansiyuan/zulip,xuanhan863/zulip,grave-w-grave/zulip,TigorC/zulip,zhaoweigg/zulip,bluesea/zulip,ryanbackman/zulip,aps-sids/zulip,rishig/zulip,kokoar/zulip,dawran6/zulip,EasonYi/zulip,dattatreya303/zulip,Qgap/zulip,j831/zulip,bitemyapp/zulip,RobotCaleb/zulip,Qgap/zulip,jphilipsen05/zulip,proliming/zulip,xuxiao/zulip,Cheppers/zulip,ryansnowboarder/zulip,rishig/zulip,willingc/zulip,bastianh/zulip,reyha/zulip,MayB/zulip,ashwinirudrappa/zulip,vikas-parashar/zulip,reyha/zulip,DazWorrall/zulip,peguin40/zulip,zorojean/zulip,jessedhillon/zulip,ApsOps/zulip,KJin99/zulip,developerfm/zulip,kou/zulip,KingxBanana/zulip,gigawhitlocks/zulip,yuvipanda/zulip,arpitpanwar/zulip,firstblade/zulip,isht3/zulip,DazWorrall/zulip,hustlzp/zulip,proliming/zulip,proliming/zulip,Frouk/zulip,babbage/zulip,karamcnair/zulip,fw1121/zulip,lfranchi/zulip,mdavid/zulip,ryanbackman/zulip,JanzTam/zulip,mansilladev/zulip,zulip/zulip,ashwinirudrappa/zulip,m1ssou/zulip,susansls/zulip,dnmfarrell/zulip,zhaoweigg/zulip,andersk/zulip,gigawhitlocks/zulip,KJin99/zulip,peiwei/zulip,armooo/zulip,voidException/zulip,hayderimran7/zulip,wavelets/zulip,vabs22/zulip,peguin40/zulip,dhcrzf/zulip,bowlofstew/zulip,joshisa/zulip,dawran6/zulip,EasonYi/zulip,levixie/zulip,ericzhou2008/zulip,shubhamdhama/zulip,verma-varsha/zulip,LeeRisk/zulip,calvinleenyc/zulip,wweiradio/zulip,peguin40/zulip,bastianh/zulip,themass/zulip,ipernet/zulip,glovebx/zulip,bowlofstew/zulip,DazWorrall/zulip,saitodisse/zulip,zacps/zulip,ufosky-server/zulip,krtkmj/zulip,samatdav/zulip,rht/zulip,kaiyuanheshang/zulip,EasonYi/zulip,dwrpayne/zulip,tdr130/zulip,aps-sids/zulip,tbutter/zulip,joyhchen/zulip,punchagan/zulip,Suninus/zulip,tbutter/zulip,synicalsyntax/zulip,bssrdf/zulip,brainwane/zulip,swinghu/zulip,wdaher/zulip,so0k/zulip,guiquanz/zulip,mahim97/zulip,dattatreya303/zulip,ahmadassaf/zulip,ashwinirudrappa/zulip,arpith/zulip,RobotCaleb/zulip,zofuthan/zulip,atomic-labs/zulip,luyifan/zulip,dotcool/zulip,suxinde2009/zulip,paxapy/zulip,krtkmj/zulip,jainayush975/zulip,RobotCaleb/zulip,deer-hope/zulip,Gabriel0402/zulip,PhilSk/zulip,vabs22/zulip,levixie/zulip,sonali0901/zulip,dotcool/zulip,j831/zulip,schatt/zulip,dhcrzf/zulip,zachallaun/zulip,christi3k/zulip,yuvipanda/zulip,shubhamdhama/zulip,JPJPJPOPOP/zulip,Drooids/zulip,schatt/zulip,saitodisse/zulip,shrikrishnaholla/zulip,PaulPetring/zulip,wdaher/zulip,EasonYi/zulip,ipernet/zulip,technicalpickles/zulip,shrikrishnaholla/zulip,yocome/zulip,firstblade/zulip,rishig/zulip,ApsOps/zulip,joyhchen/zulip,paxapy/zulip,armooo/zulip,ashwinirudrappa/zulip,tommyip/zulip,ikasumiwt/zulip,atomic-labs/zulip,blaze225/zulip,esander91/zulip,Suninus/zulip,synicalsyntax/zulip,amanharitsh123/zulip,samatdav/zulip,adnanh/zulip,amallia/zulip,stamhe/zulip,qq1012803704/zulip,hengqujushi/zulip,qq1012803704/zulip,mohsenSy/zulip,joshisa/zulip,samatdav/zulip,PaulPetring/zulip,ApsOps/zulip,kaiyuanheshang/zulip,peguin40/zulip,amyliu345/zulip,Suninus/zulip,jrowan/zulip,hustlzp/zulip,codeKonami/zulip,armooo/zulip,hengqujushi/zulip,brockwhittaker/zulip,vabs22/zulip,joyhchen/zulip,swinghu/zulip,bitemyapp/zulip,technicalpickles/zulip,glovebx/zulip,AZtheAsian/zulip,johnny9/zulip,joshisa/zulip,bitemyapp/zulip,krtkmj/zulip,JanzTam/zulip,jackrzhang/zulip,vikas-parashar/zulip,calvinleenyc/zulip,johnnygaddarr/zulip,vakila/zulip,hustlzp/zulip,wweiradio/zulip,codeKonami/zulip,zachallaun/zulip,willingc/zulip,babbage/zulip,KingxBanana/zulip,johnnygaddarr/zulip,showell/zulip,schatt/zulip,dwrpayne/zulip,MayB/zulip,tiansiyuan/zulip,littledogboy/zulip,mdavid/zulip,andersk/zulip,shrikrishnaholla/zulip,jimmy54/zulip,seapasulli/zulip,JanzTam/zulip,LAndreas/zulip,levixie/zulip,yocome/zulip,wangdeshui/zulip,shrikrishnaholla/zulip,huangkebo/zulip,cosmicAsymmetry/zulip,grave-w-grave/zulip,LeeRisk/zulip,grave-w-grave/zulip,amyliu345/zulip,stamhe/zulip,yuvipanda/zulip,jeffcao/zulip,wdaher/zulip,shrikrishnaholla/zulip,brockwhittaker/zulip,jonesgithub/zulip,zachallaun/zulip,sup95/zulip,avastu/zulip,tbutter/zulip,vakila/zulip,jonesgithub/zulip,AZtheAsian/zulip,itnihao/zulip,krtkmj/zulip,moria/zulip,Galexrt/zulip,verma-varsha/zulip,Diptanshu8/zulip,luyifan/zulip,m1ssou/zulip,paxapy/zulip,dxq-git/zulip,yocome/zulip,jackrzhang/zulip,kou/zulip,aps-sids/zulip,j831/zulip,saitodisse/zulip,ahmadassaf/zulip,aliceriot/zulip,huangkebo/zulip,LAndreas/zulip,udxxabp/zulip,nicholasbs/zulip,sharmaeklavya2/zulip,xuxiao/zulip,umkay/zulip,mdavid/zulip,lfranchi/zulip,brockwhittaker/zulip,SmartPeople/zulip,Jianchun1/zulip,sharmaeklavya2/zulip,rishig/zulip,wweiradio/zulip,Juanvulcano/zulip,jainayush975/zulip,tommyip/zulip,JPJPJPOPOP/zulip,firstblade/zulip,pradiptad/zulip,alliejones/zulip,souravbadami/zulip,praveenaki/zulip,natanovia/zulip,fw1121/zulip,RobotCaleb/zulip,atomic-labs/zulip,huangkebo/zulip,sonali0901/zulip,rht/zulip,umkay/zulip,dhcrzf/zulip,alliejones/zulip,wweiradio/zulip,vaidap/zulip,ipernet/zulip,levixie/zulip,jessedhillon/zulip,eeshangarg/zulip,Vallher/zulip,cosmicAsymmetry/zulip,tiansiyuan/zulip,suxinde2009/zulip,dawran6/zulip,alliejones/zulip,esander91/zulip,sonali0901/zulip,johnny9/zulip,bowlofstew/zulip,dwrpayne/zulip,ufosky-server/zulip,akuseru/zulip,MariaFaBella85/zulip,Vallher/zulip,zofuthan/zulip,showell/zulip,tiansiyuan/zulip,aps-sids/zulip,arpitpanwar/zulip,qq1012803704/zulip,ufosky-server/zulip,verma-varsha/zulip,so0k/zulip,KJin99/zulip,littledogboy/zulip,ikasumiwt/zulip,hengqujushi/zulip,samatdav/zulip,hj3938/zulip,m1ssou/zulip,kaiyuanheshang/zulip,andersk/zulip,kokoar/zulip,vikas-parashar/zulip,aakash-cr7/zulip,blaze225/zulip,Drooids/zulip,dattatreya303/zulip,rishig/zulip,TigorC/zulip,MayB/zulip,timabbott/zulip,jackrzhang/zulip,karamcnair/zulip,ryansnowboarder/zulip,karamcnair/zulip,wavelets/zulip,technicalpickles/zulip,susansls/zulip,huangkebo/zulip,zofuthan/zulip,swinghu/zulip,littledogboy/zulip,mohsenSy/zulip,Qgap/zulip,deer-hope/zulip,mahim97/zulip,tommyip/zulip,mdavid/zulip,udxxabp/zulip,gkotian/zulip,LeeRisk/zulip,Cheppers/zulip,noroot/zulip,MayB/zulip,swinghu/zulip,punchagan/zulip,armooo/zulip,punchagan/zulip,punchagan/zulip,jrowan/zulip,zwily/zulip,Galexrt/zulip,brainwane/zulip,Galexrt/zulip,Qgap/zulip,Qgap/zulip,lfranchi/zulip,showell/zulip,praveenaki/zulip,Frouk/zulip,eastlhu/zulip,dotcool/zulip,itnihao/zulip,arpitpanwar/zulip,technicalpickles/zulip,so0k/zulip,kokoar/zulip,joyhchen/zulip,dxq-git/zulip,natanovia/zulip,EasonYi/zulip,blaze225/zulip,stamhe/zulip,firstblade/zulip,ahmadassaf/zulip,samatdav/zulip,DazWorrall/zulip,avastu/zulip,mansilladev/zulip,alliejones/zulip,jessedhillon/zulip,zofuthan/zulip,jainayush975/zulip,RobotCaleb/zulip,mansilladev/zulip,ryanbackman/zulip,brainwane/zulip,mansilladev/zulip,TigorC/zulip,calvinleenyc/zulip,LAndreas/zulip,ericzhou2008/zulip,atomic-labs/zulip,timabbott/zulip,shaunstanislaus/zulip,nicholasbs/zulip,dnmfarrell/zulip,hayderimran7/zulip,isht3/zulip,guiquanz/zulip,moria/zulip,ryansnowboarder/zulip,aps-sids/zulip,atomic-labs/zulip,so0k/zulip,SmartPeople/zulip,jeffcao/zulip,seapasulli/zulip,Qgap/zulip,isht3/zulip,eeshangarg/zulip,jessedhillon/zulip,noroot/zulip,seapasulli/zulip,MayB/zulip,souravbadami/zulip,joyhchen/zulip,firstblade/zulip,Drooids/zulip,huangkebo/zulip,synicalsyntax/zulip,amallia/zulip,sonali0901/zulip,bowlofstew/zulip,proliming/zulip,zulip/zulip,gkotian/zulip,xuanhan863/zulip,showell/zulip,amallia/zulip,rht/zulip,eastlhu/zulip,stamhe/zulip,mansilladev/zulip,mahim97/zulip,niftynei/zulip,alliejones/zulip,wavelets/zulip,christi3k/zulip,wdaher/zulip,zulip/zulip,thomasboyt/zulip,willingc/zulip,umkay/zulip,esander91/zulip,zofuthan/zulip,mdavid/zulip,stamhe/zulip,themass/zulip,PaulPetring/zulip,rishig/zulip,Frouk/zulip,MariaFaBella85/zulip,peiwei/zulip,PhilSk/zulip,zachallaun/zulip,ufosky-server/zulip,seapasulli/zulip,bluesea/zulip,jonesgithub/zulip,akuseru/zulip,krtkmj/zulip,kaiyuanheshang/zulip,stamhe/zulip,themass/zulip,johnnygaddarr/zulip,hayderimran7/zulip,technicalpickles/zulip,jonesgithub/zulip,peiwei/zulip,guiquanz/zulip,dawran6/zulip,hj3938/zulip,kokoar/zulip,synicalsyntax/zulip,niftynei/zulip,guiquanz/zulip,jerryge/zulip,johnny9/zulip
|
---
+++
@@ -5,7 +5,7 @@
# Adapted http://djangosnippets.org/snippets/2242/ by user s29 (October 25, 2010)
class _RateLimitFilter(object):
- last_error = 0
+ last_error = datetime.min
def filter(self, record):
from django.conf import settings
|
79c6c71ab6edd8313fd6c9c6441d69ad04d50721
|
update-database/stackdoc/namespaces/microsoftkb.py
|
update-database/stackdoc/namespaces/microsoftkb.py
|
import re
import urllib
############### Functions called by stackdoc
def get_version():
return 1
def get_ids(title, body, tags):
ids = []
if "http://support.microsoft.com/":
urls = re.findall(r'<a href="([^"]+)"', body)
for url in urls:
m = re.match("http://support\.microsoft\.com/(?:default.aspx/)?kb/(\w+)", url)
if m:
ids.append(m.group(1))
return ids
def get_tags():
return None # There isn't a reliable set of tags to filter by. Null indicates that we're not filtering
|
import re
import urllib
############### Functions called by stackdoc
def get_version():
return 1
def get_ids(title, body, tags):
ids = []
if "http://support.microsoft.com/":
urls = re.findall(r'<a href="([^"]+)"', body)
for url in urls:
m = re.match("http://support\.microsoft\.com/(?:default\.aspx/)?[kK][bB]/(\w+)", url)
if m:
ids.append(m.group(1))
m2 = re.match("http://support\.microsoft\.com/(?:default\.aspx)?\?scid=[kK][bB];[-\w]+;(\w+)", url)
if m2:
ids.append(m2.group(1))
return ids
def get_tags():
return None # There isn't a reliable set of tags to filter by. Null indicates that we're not filtering
|
Support another form of KB URL.
|
Support another form of KB URL.
|
Python
|
bsd-3-clause
|
alnorth/stackdoc,alnorth/stackdoc,alnorth/stackdoc
|
---
+++
@@ -12,9 +12,12 @@
if "http://support.microsoft.com/":
urls = re.findall(r'<a href="([^"]+)"', body)
for url in urls:
- m = re.match("http://support\.microsoft\.com/(?:default.aspx/)?kb/(\w+)", url)
+ m = re.match("http://support\.microsoft\.com/(?:default\.aspx/)?[kK][bB]/(\w+)", url)
if m:
ids.append(m.group(1))
+ m2 = re.match("http://support\.microsoft\.com/(?:default\.aspx)?\?scid=[kK][bB];[-\w]+;(\w+)", url)
+ if m2:
+ ids.append(m2.group(1))
return ids
|
640ad3ed45eef21f2b7a71b4fd73a469ebed4b44
|
reobject/models/fields.py
|
reobject/models/fields.py
|
import attr
def Field(*args, default=attr.NOTHING, **kwargs):
if callable(default):
default = attr.Factory(default)
return attr.ib(*args, default=default, **kwargs)
def ManyToManyField(cls, *args, **kwargs):
metadata = {
'related': {
'target': cls,
'type': 'ManyToMany',
}
}
return attr.ib(*args, **kwargs, metadata=metadata)
|
import attr
def Field(*args, default=attr.NOTHING, **kwargs):
if callable(default):
default = attr.Factory(default)
return attr.ib(*args, default=default, **kwargs)
def ManyToManyField(cls, *args, **kwargs):
metadata = {
'related': {
'target': cls,
'type': 'ManyToMany',
}
}
return attr.ib(*args, metadata=metadata, **kwargs)
|
Fix tests on Python 3.3 and 3.4
|
Fix tests on Python 3.3 and 3.4
|
Python
|
apache-2.0
|
onyb/reobject,onyb/reobject
|
---
+++
@@ -16,4 +16,4 @@
}
}
- return attr.ib(*args, **kwargs, metadata=metadata)
+ return attr.ib(*args, metadata=metadata, **kwargs)
|
c32bdff4b0ee570ed58cd869830d89e3251cf82a
|
pytils/test/__init__.py
|
pytils/test/__init__.py
|
# -*- coding: utf-8 -*-
"""
Unit tests for pytils
"""
__all__ = ["test_numeral", "test_dt", "test_translit", "test_utils", "test_typo"]
import unittest
def get_django_suite():
try:
import django
except ImportError:
return unittest.TestSuite()
import pytils.test.templatetags
return pytils.test.templatetags.get_suite()
def get_suite():
"""Return TestSuite for all unit-test of pytils"""
suite = unittest.TestSuite()
for module_name in __all__:
imported_module = __import__("pytils.test."+module_name,
globals(),
locals(),
["pytils.test"])
loader = unittest.defaultTestLoader
suite.addTest(loader.loadTestsFromModule(imported_module))
suite.addTest(get_django_suite())
return suite
def run_tests_from_module(module, verbosity=1):
"""Run unit-tests for single module"""
suite = unittest.TestSuite()
loader = unittest.defaultTestLoader
suite.addTest(loader.loadTestsFromModule(module))
unittest.TextTestRunner(verbosity=verbosity).run(suite)
def run(verbosity=1):
"""Run all unit-test of pytils"""
suite = get_suite()
unittest.TextTestRunner(verbosity=verbosity).run(suite)
if __name__ == '__main__':
run(2)
|
# -*- coding: utf-8 -*-
"""
Unit tests for pytils
"""
__all__ = ["test_numeral", "test_dt", "test_translit", "test_utils", "test_typo"]
import unittest
import sys
def get_django_suite():
try:
import django
except ImportError:
return unittest.TestSuite()
import pytils.test.templatetags
return pytils.test.templatetags.get_suite()
def get_suite():
"""Return TestSuite for all unit-test of pytils"""
suite = unittest.TestSuite()
for module_name in __all__:
imported_module = __import__("pytils.test."+module_name,
globals(),
locals(),
["pytils.test"])
loader = unittest.defaultTestLoader
suite.addTest(loader.loadTestsFromModule(imported_module))
suite.addTest(get_django_suite())
return suite
def run_tests_from_module(module, verbosity=1):
"""Run unit-tests for single module"""
suite = unittest.TestSuite()
loader = unittest.defaultTestLoader
suite.addTest(loader.loadTestsFromModule(module))
unittest.TextTestRunner(verbosity=verbosity).run(suite)
def run(verbosity=1):
"""Run all unit-test of pytils"""
suite = get_suite()
res = unittest.TextTestRunner(verbosity=verbosity).run(suite)
if res.errors or res.failures:
sys.exit(1)
if __name__ == '__main__':
run(2)
|
Exit with non-0 status if there are failed tests or errors.
|
Py3: Exit with non-0 status if there are failed tests or errors.
|
Python
|
mit
|
Forever-Young/pytils,j2a/pytils
|
---
+++
@@ -5,6 +5,7 @@
__all__ = ["test_numeral", "test_dt", "test_translit", "test_utils", "test_typo"]
import unittest
+import sys
def get_django_suite():
try:
@@ -40,7 +41,9 @@
def run(verbosity=1):
"""Run all unit-test of pytils"""
suite = get_suite()
- unittest.TextTestRunner(verbosity=verbosity).run(suite)
+ res = unittest.TextTestRunner(verbosity=verbosity).run(suite)
+ if res.errors or res.failures:
+ sys.exit(1)
if __name__ == '__main__':
run(2)
|
7e25472dab7732dc76bfb81d720946c18811962f
|
src/appengine/driver.py
|
src/appengine/driver.py
|
"""List drivers and send them commands."""
import logging
import flask
from appengine import device, rest
class Query(object):
def iter(self):
for name, cls in device.DEVICE_TYPES.iteritems():
yield Driver(name, cls)
class Driver(object):
"""This is a fake for compatibility with the rest module"""
def __init__(self, name, cls):
self._name = name
self._cls = cls
def to_dict(self):
return {'name': self._name}
# This is a trampoline through to the driver
# mainly for commands
def __getattr__(self, name):
func = getattr(self._cls, name)
if func is None or not getattr(func, 'is_static', False):
logging.error('Command %s does not exist or is not a static command',
name)
flask.abort(400)
return func
@staticmethod
def query():
return Query()
@staticmethod
def get_by_id(_id):
return Driver(_id, device.DEVICE_TYPES[_id])
# pylint: disable=invalid-name
blueprint = flask.Blueprint('driver', __name__)
rest.register_class(blueprint, Driver, None)
|
"""List drivers and send them commands."""
import logging
import flask
from appengine import device, rest
class Query(object):
def iter(self):
for name, cls in device.DEVICE_TYPES.iteritems():
yield Driver(name, cls)
class Driver(object):
"""This is a fake for compatibility with the rest module"""
def __init__(self, name, cls):
self._name = name
self._cls = cls
def to_dict(self):
return {'name': self._name}
# This is a trampoline through to the driver
# mainly for commands
def __getattr__(self, name):
func = getattr(self._cls, name)
if func is None or not getattr(func, 'is_static', False):
logging.error('Command %s does not exist or is not a static command',
name)
flask.abort(400)
return func
@staticmethod
def put():
pass
@staticmethod
def query():
return Query()
@staticmethod
def get_by_id(_id):
return Driver(_id, device.DEVICE_TYPES[_id])
# pylint: disable=invalid-name
blueprint = flask.Blueprint('driver', __name__)
rest.register_class(blueprint, Driver, None)
|
Fix 'put is not a command' error on static commands
|
Fix 'put is not a command' error on static commands
|
Python
|
mit
|
tomwilkie/awesomation,tomwilkie/awesomation,tomwilkie/awesomation,tomwilkie/awesomation,tomwilkie/awesomation
|
---
+++
@@ -33,6 +33,10 @@
return func
@staticmethod
+ def put():
+ pass
+
+ @staticmethod
def query():
return Query()
|
a54933f5fb5e958c890839c58fcba4e658c8e2a0
|
bitbots_head_behavior/scripts/testHeadBehaviour.py
|
bitbots_head_behavior/scripts/testHeadBehaviour.py
|
#!/usr/bin/env python3
# -*- coding:utf-8 -*-
import rospy
from humanoid_league_msgs.msg import BallInImage, BallRelative, BallsInImage
from trajectory_msgs.msg import JointTrajectory, JointTrajectoryPoint
def run():
pub_ball = rospy.Publisher("ball_in_image", BallsInImage, queue_size=1)
pub_hmg = rospy.Publisher("head_motor_goals", JointTrajectory, queue_size=1)
hmg = JointTrajectory()
goal = JointTrajectoryPoint()
goal.positions = [0, 0]
goal.velocities = [0, 0]
hmg.points = [goal]
counter = 320
direction = 1
rospy.loginfo("Create Test")
rospy.init_node("bitbots_testHeadBehaviour")
pub_hmg.publish(hmg)
rate = rospy.Rate(4)
rospy.logdebug("Laeuft...")
while not rospy.is_shutdown():
# Ball in Image
ball = BallInImage()
ball.center.x = counter
if(counter > 340 or counter < 300):
direction *= -1
counter += direction
else:
counter += direction
ball.center.y = 200
ball.diameter = 10
ball.confidence = 1
balls = BallsInImage()
balls.candidates.append(ball)
pub_ball.publish(balls)
rospy.loginfo("Published ball: %s" % counter)
rate.sleep()
if __name__ == "__main__":
run()
|
#!/usr/bin/env python3
# -*- coding:utf-8 -*-
import rospy
from humanoid_league_msgs.msg import BallInImage, BallRelative, BallInImageArray
from trajectory_msgs.msg import JointTrajectory, JointTrajectoryPoint
def run():
pub_ball = rospy.Publisher("ball_in_image", BallsInImage, queue_size=1)
pub_hmg = rospy.Publisher("head_motor_goals", JointTrajectory, queue_size=1)
hmg = JointTrajectory()
goal = JointTrajectoryPoint()
goal.positions = [0, 0]
goal.velocities = [0, 0]
hmg.points = [goal]
counter = 320
direction = 1
rospy.loginfo("Create Test")
rospy.init_node("bitbots_testHeadBehaviour")
pub_hmg.publish(hmg)
rate = rospy.Rate(4)
rospy.logdebug("Laeuft...")
while not rospy.is_shutdown():
# Ball in Image
ball = BallInImage()
ball.center.x = counter
if(counter > 340 or counter < 300):
direction *= -1
counter += direction
else:
counter += direction
ball.center.y = 200
ball.diameter = 10
ball.confidence = 1
balls = BallInImageArray()
balls.candidates.append(ball)
pub_ball.publish(balls)
rospy.loginfo("Published ball: %s" % counter)
rate.sleep()
if __name__ == "__main__":
run()
|
Test Head Behavior: Apply new HLM
|
Test Head Behavior: Apply new HLM
|
Python
|
bsd-3-clause
|
bit-bots/bitbots_behaviour
|
---
+++
@@ -2,7 +2,7 @@
# -*- coding:utf-8 -*-
import rospy
-from humanoid_league_msgs.msg import BallInImage, BallRelative, BallsInImage
+from humanoid_league_msgs.msg import BallInImage, BallRelative, BallInImageArray
from trajectory_msgs.msg import JointTrajectory, JointTrajectoryPoint
@@ -19,7 +19,7 @@
counter = 320
direction = 1
-
+
rospy.loginfo("Create Test")
rospy.init_node("bitbots_testHeadBehaviour")
pub_hmg.publish(hmg)
@@ -38,7 +38,7 @@
ball.center.y = 200
ball.diameter = 10
ball.confidence = 1
- balls = BallsInImage()
+ balls = BallInImageArray()
balls.candidates.append(ball)
pub_ball.publish(balls)
|
14d0e3b887b469c2b1979352804d8ade3184ef18
|
scripts/symlinks/parent/foogroup.py
|
scripts/symlinks/parent/foogroup.py
|
#!/usr/bin/env python
import json
print json.dumps({
"_meta": {
"hostvars": {
'afoo': {}
},
},
"foo": {
"hosts": ['afoo']
}
})
|
#!/usr/bin/env python
import json
print(json.dumps({
"_meta": {
"hostvars": {
'afoo': {}
},
},
"foo": {
"hosts": ['afoo']
}
}))
|
Fix print statement to be py3 compatible
|
Fix print statement to be py3 compatible
|
Python
|
mit
|
AlanCoding/Ansible-inventory-file-examples,AlanCoding/Ansible-inventory-file-examples
|
---
+++
@@ -1,7 +1,7 @@
#!/usr/bin/env python
import json
-print json.dumps({
+print(json.dumps({
"_meta": {
"hostvars": {
'afoo': {}
@@ -10,4 +10,4 @@
"foo": {
"hosts": ['afoo']
}
-})
+}))
|
24e80d80034084f6d2067df39fdc070e4eb41447
|
diceclient.py
|
diceclient.py
|
from twisted.internet import reactor, defer
from twisted.internet.protocol import ClientCreator
from twisted.protocols import amp
from ampserver import Sum, Divide
def doMath():
d1 = ClientCreator(reactor, amp.AMP).connectTCP(
'127.0.0.1', 1234).addCallback(
lambda p: p.callRemote(Sum, a=13, b=81)).addCallback(
lambda result: result['total'])
def trapZero(result):
result.trap(ZeroDivisionError)
print "Divided by zero: returning INF"
return 1e1000
d2 = ClientCreator(reactor, amp.AMP).connectTCP(
'127.0.0.1', 1234).addCallback(
lambda p: p.callRemote(Divide, numerator=1234,
denominator=0)).addErrback(trapZero)
def done(result):
print 'Done with math:', result
defer.DeferredList([d1, d2]).addCallback(done)
if __name__ == '__main__':
doMath()
reactor.run()
|
from twisted.internet import reactor, defer
from twisted.internet.protocol import ClientCreator
from twisted.protocols import amp
from diceserver import Sum, Divide
def doMath():
d1 = ClientCreator(reactor, amp.AMP).connectTCP(
'127.0.0.1', 1234).addCallback(
lambda p: p.callRemote(Sum, a=13, b=81)).addCallback(
lambda result: result['total'])
def trapZero(result):
result.trap(ZeroDivisionError)
print "Divided by zero: returning INF"
return 1e1000
d2 = ClientCreator(reactor, amp.AMP).connectTCP(
'127.0.0.1', 1234).addCallback(
lambda p: p.callRemote(Divide, numerator=1234,
denominator=0)).addErrback(trapZero)
def done(result):
print 'Done with math:', result
defer.DeferredList([d1, d2]).addCallback(done)
if __name__ == '__main__':
doMath()
reactor.run()
|
Fix import path to match rename
|
Fix import path to match rename
|
Python
|
mit
|
dripton/ampchat
|
---
+++
@@ -1,7 +1,7 @@
from twisted.internet import reactor, defer
from twisted.internet.protocol import ClientCreator
from twisted.protocols import amp
-from ampserver import Sum, Divide
+from diceserver import Sum, Divide
def doMath():
|
14dd9f6cab99be6832ab98291337f4d38faae936
|
fellowms/forms.py
|
fellowms/forms.py
|
from django.forms import ModelForm, widgets
from .models import Fellow, Event, Expense, Blog
class FellowForm(ModelForm):
class Meta:
model = Fellow
exclude = [
"home_lon",
"home_lat",
"inauguration_year",
"funding_notes",
"mentor",
]
class EventForm(ModelForm):
class Meta:
model = Event
exclude = [
"status",
"budget_approve",
"report_url",
]
# We don't want to expose fellows' data
# so we will request the email
# and match on the database.
labels = {
'fellow': 'Fellow',
'url': "Event's homepage url",
'name': "Event's name",
}
class ExpenseForm(ModelForm):
class Meta:
model = Expense
exclude = [
'id',
'status',
]
class BlogForm(ModelForm):
class Meta:
model = Blog
exclude = [
'status',
]
|
from django.forms import ModelForm, widgets
from .models import Fellow, Event, Expense, Blog
class FellowForm(ModelForm):
class Meta:
model = Fellow
exclude = [
"user",
"home_lon",
"home_lat",
"inauguration_year",
"funding_notes",
"mentor",
]
class EventForm(ModelForm):
class Meta:
model = Event
exclude = [
"status",
"budget_approve",
"report_url",
]
# We don't want to expose fellows' data
# so we will request the email
# and match on the database.
labels = {
'fellow': 'Fellow',
'url': "Event's homepage url",
'name': "Event's name",
}
class ExpenseForm(ModelForm):
class Meta:
model = Expense
exclude = [
'id',
'status',
]
class BlogForm(ModelForm):
class Meta:
model = Blog
exclude = [
'status',
]
|
Exclude user field from form
|
Exclude user field from form
|
Python
|
bsd-3-clause
|
softwaresaved/fat,softwaresaved/fat,softwaresaved/fat,softwaresaved/fat
|
---
+++
@@ -6,6 +6,7 @@
class Meta:
model = Fellow
exclude = [
+ "user",
"home_lon",
"home_lat",
"inauguration_year",
|
785208c904caacd69cb98f9ea44ee9f720752baf
|
src/tmlib/imextract/argparser.py
|
src/tmlib/imextract/argparser.py
|
'''
Arguments of the command line program.
'''
from . import __version__
from .cli import Imextract
from .args import ImextractInitArgs
parser, subparsers = Imextract.get_parser_and_subparsers(
required_subparsers=['init', 'run', 'submit', 'cleanup'])
parser.description = '''
Extract images from heterogeneous microscopic image file formats
and store each 2D plane in a separate PNG file.
'''
parser.version = __version__
init_parser = subparsers.choices['init']
init_extra_group = init_parser.add_argument_group(
'additional program-specific arguments')
ImextractInitArgs().add_to_argparser(init_extra_group)
for name in subparsers.choices:
subparsers.choices[name].set_defaults(handler=Imextract.call)
|
'''
Arguments of the command line program.
'''
from . import __version__
from .cli import Imextract
from .args import ImextractInitArgs
parser, subparsers = Imextract.get_parser_and_subparsers(
required_subparsers=['init', 'run', 'submit', 'cleanup', 'log'])
parser.description = '''
Extract images from heterogeneous microscopic image file formats
and store each 2D plane in a separate PNG file.
'''
parser.version = __version__
init_parser = subparsers.choices['init']
init_extra_group = init_parser.add_argument_group(
'additional program-specific arguments')
ImextractInitArgs().add_to_argparser(init_extra_group)
for name in subparsers.choices:
subparsers.choices[name].set_defaults(handler=Imextract.call)
|
Fix bug in imextract argument parser module
|
Fix bug in imextract argument parser module
|
Python
|
agpl-3.0
|
TissueMAPS/TmLibrary,TissueMAPS/TmLibrary,TissueMAPS/TmLibrary,TissueMAPS/TmLibrary,TissueMAPS/TmLibrary
|
---
+++
@@ -8,7 +8,7 @@
parser, subparsers = Imextract.get_parser_and_subparsers(
- required_subparsers=['init', 'run', 'submit', 'cleanup'])
+ required_subparsers=['init', 'run', 'submit', 'cleanup', 'log'])
parser.description = '''
Extract images from heterogeneous microscopic image file formats
|
629bfe7ba928bc9650217b90190409708740ee82
|
lib/cretonne/meta/isa/intel/defs.py
|
lib/cretonne/meta/isa/intel/defs.py
|
"""
Intel definitions.
Commonly used definitions.
"""
from __future__ import absolute_import
from cdsl.isa import TargetISA, CPUMode
import base.instructions
from . import instructions as x86
ISA = TargetISA('intel', [base.instructions.GROUP, x86.GROUP])
# CPU modes for 32-bit and 64-bit operation.
I32 = CPUMode('I32', ISA)
I64 = CPUMode('I64', ISA)
|
"""
Intel definitions.
Commonly used definitions.
"""
from __future__ import absolute_import
from cdsl.isa import TargetISA, CPUMode
import base.instructions
from . import instructions as x86
ISA = TargetISA('intel', [base.instructions.GROUP, x86.GROUP])
# CPU modes for 32-bit and 64-bit operation.
I64 = CPUMode('I64', ISA)
I32 = CPUMode('I32', ISA)
|
Define I64 before I32 for better encoding table compression.
|
Define I64 before I32 for better encoding table compression.
The encoding list compression algorithm is not the sharpest knife in the
drawer. It can reuse subsets of I64 encoding lists for I32 instructions,
but only when the I64 lists are defined first.
With this change and the previous change to the encoding list format, we
get the following table sizes for the Intel ISA:
ENCLISTS: 1478 B -> 662 B
LEVEL2: 1072 B (unchanged)
LEVEL1: 32 B -> 48 B
Total: 2582 B -> 1782 B (-31%)
|
Python
|
apache-2.0
|
sunfishcode/cretonne,stoklund/cretonne,sunfishcode/cretonne,stoklund/cretonne,stoklund/cretonne,sunfishcode/cretonne
|
---
+++
@@ -11,5 +11,5 @@
ISA = TargetISA('intel', [base.instructions.GROUP, x86.GROUP])
# CPU modes for 32-bit and 64-bit operation.
+I64 = CPUMode('I64', ISA)
I32 = CPUMode('I32', ISA)
-I64 = CPUMode('I64', ISA)
|
d028f66964249bab928a29d92ab4cff075352546
|
integration/main.py
|
integration/main.py
|
from spec import Spec, skip
class Tessera(Spec):
def is_importable(self):
import tessera
assert tessera.app
assert tessera.db
|
from contextlib import contextmanager
import os
from shutil import rmtree
from tempfile import mkdtemp
from spec import Spec, skip
@contextmanager
def _tmp():
try:
tempdir = mkdtemp()
yield tempdir
finally:
rmtree(tempdir)
@contextmanager
def _db():
with _tmp() as tempdir:
from tessera import app, db
# Temp db location
path = os.path.join(tempdir, 'tessera.db')
dbfile = 'sqlite:///{0}'.format(path)
# Inform app of that location & setup
app.config.from_object(_config(SQLALCHEMY_DATABASE_URI=dbfile))
db.create_all()
# Let test have its way with that temp db
yield db
class Config(object):
pass
def _config(**options):
config = Config()
for key, value in options.iteritems():
setattr(config, key, value)
class Tessera(Spec):
def is_importable(self):
import tessera
assert tessera.app
assert tessera.db
def creates_a_nonempty_database_schema(self):
with _db() as db:
meta = db.MetaData()
meta.reflect(db.engine)
assert len(meta.tables) > 0
|
Add temp DB test harness + basic test
|
Add temp DB test harness + basic test
|
Python
|
apache-2.0
|
tessera-metrics/tessera,jmptrader/tessera,aalpern/tessera,Slach/tessera,filippog/tessera,aalpern/tessera,aalpern/tessera,section-io/tessera,urbanairship/tessera,aalpern/tessera,urbanairship/tessera,Slach/tessera,jmptrader/tessera,urbanairship/tessera,Slach/tessera,urbanairship/tessera,urbanairship/tessera,tessera-metrics/tessera,section-io/tessera,aalpern/tessera,tessera-metrics/tessera,filippog/tessera,tessera-metrics/tessera,section-io/tessera,jmptrader/tessera,jmptrader/tessera,filippog/tessera,section-io/tessera,Slach/tessera,jmptrader/tessera,tessera-metrics/tessera
|
---
+++
@@ -1,4 +1,40 @@
+from contextlib import contextmanager
+import os
+from shutil import rmtree
+from tempfile import mkdtemp
+
from spec import Spec, skip
+
+
+@contextmanager
+def _tmp():
+ try:
+ tempdir = mkdtemp()
+ yield tempdir
+ finally:
+ rmtree(tempdir)
+
+@contextmanager
+def _db():
+ with _tmp() as tempdir:
+ from tessera import app, db
+ # Temp db location
+ path = os.path.join(tempdir, 'tessera.db')
+ dbfile = 'sqlite:///{0}'.format(path)
+ # Inform app of that location & setup
+ app.config.from_object(_config(SQLALCHEMY_DATABASE_URI=dbfile))
+ db.create_all()
+ # Let test have its way with that temp db
+ yield db
+
+
+class Config(object):
+ pass
+
+def _config(**options):
+ config = Config()
+ for key, value in options.iteritems():
+ setattr(config, key, value)
class Tessera(Spec):
@@ -6,3 +42,9 @@
import tessera
assert tessera.app
assert tessera.db
+
+ def creates_a_nonempty_database_schema(self):
+ with _db() as db:
+ meta = db.MetaData()
+ meta.reflect(db.engine)
+ assert len(meta.tables) > 0
|
1100830d3b48262dd9b94d96eb50d75c8ff69fe4
|
Cogs/Emoji.py
|
Cogs/Emoji.py
|
import discord
from discord.ext import commands
from Cogs import GetImage
def setup(bot):
bot.add_cog(Emoji(bot))
class Emoji(commands.Cog):
def __init__(self, bot):
self.bot = bot
@commands.command()
async def emoji(self, ctx, emoji = None):
'''Outputs your CUSTOM emoji... but bigger! (Does not work with standard discord emojis)'''
emojiparts = emoji.replace("<","").replace(">","").split(":") if emoji else []
if not len(emojiparts) == 3:
await ctx.send("Usage: `{}emoji [emoji]` - must be a CUSTOM emoji, and not just the name.".format(ctx.prefix))
return
emoji_obj = discord.PartialEmoji(animated=len(emojiparts[0]) > 0, name=emojiparts[1], id=emojiparts[2])
if not emoji_obj.url:
await ctx.send("Could not find url for emoji :(")
return
f = await GetImage.download(emoji_obj.url)
if not f:
await ctx.send("I couldn't get that emoji :(")
return
await ctx.send(file=discord.File(f))
|
import discord
from discord.ext import commands
from Cogs import GetImage
def setup(bot):
bot.add_cog(Emoji(bot))
class Emoji(commands.Cog):
def __init__(self, bot):
self.bot = bot
@commands.command()
async def emoji(self, ctx, emoji = None):
'''Outputs the passed emoji... but bigger!'''
if emoji is None:
await ctx.send("Usage: `{}emoji [emoji]`".format(ctx.prefix))
return
if len(emoji) < 3:
# Try to get just the unicode
h = "-".join([hex(ord(x)).lower()[2:] for x in emoji])
url = "https://raw.githubusercontent.com/twitter/twemoji/gh-pages/2/72x72/{}.png".format(h)
f = await GetImage.download(url)
if not f:
await ctx.send("I couldn't get that emoji :(")
else:
await ctx.send(file=discord.File(f))
return
emojiparts = emoji.replace("<","").replace(">","").split(":") if emoji else []
if not len(emojiparts) == 3:
await ctx.send("Usage: `{}emoji [emoji]`".format(ctx.prefix))
return
emoji_obj = discord.PartialEmoji(animated=len(emojiparts[0]) > 0, name=emojiparts[1], id=emojiparts[2])
if not emoji_obj.url:
await ctx.send("Could not find a url for that emoji :(")
return
f = await GetImage.download(emoji_obj.url)
if not f:
await ctx.send("I couldn't get that emoji :(")
return
await ctx.send(file=discord.File(f))
|
Add support for built-in emojis
|
Add support for built-in emojis
|
Python
|
mit
|
corpnewt/CorpBot.py,corpnewt/CorpBot.py
|
---
+++
@@ -12,14 +12,27 @@
@commands.command()
async def emoji(self, ctx, emoji = None):
- '''Outputs your CUSTOM emoji... but bigger! (Does not work with standard discord emojis)'''
+ '''Outputs the passed emoji... but bigger!'''
+ if emoji is None:
+ await ctx.send("Usage: `{}emoji [emoji]`".format(ctx.prefix))
+ return
+ if len(emoji) < 3:
+ # Try to get just the unicode
+ h = "-".join([hex(ord(x)).lower()[2:] for x in emoji])
+ url = "https://raw.githubusercontent.com/twitter/twemoji/gh-pages/2/72x72/{}.png".format(h)
+ f = await GetImage.download(url)
+ if not f:
+ await ctx.send("I couldn't get that emoji :(")
+ else:
+ await ctx.send(file=discord.File(f))
+ return
emojiparts = emoji.replace("<","").replace(">","").split(":") if emoji else []
if not len(emojiparts) == 3:
- await ctx.send("Usage: `{}emoji [emoji]` - must be a CUSTOM emoji, and not just the name.".format(ctx.prefix))
+ await ctx.send("Usage: `{}emoji [emoji]`".format(ctx.prefix))
return
emoji_obj = discord.PartialEmoji(animated=len(emojiparts[0]) > 0, name=emojiparts[1], id=emojiparts[2])
if not emoji_obj.url:
- await ctx.send("Could not find url for emoji :(")
+ await ctx.send("Could not find a url for that emoji :(")
return
f = await GetImage.download(emoji_obj.url)
if not f:
|
6464028097b13b5d03969c20bae56f9f70acbbd1
|
saleor/cart/middleware.py
|
saleor/cart/middleware.py
|
from __future__ import unicode_literals
from . import SessionCart, CART_SESSION_KEY
class CartMiddleware(object):
'''
Saves the cart instance into the django session.
'''
def process_request(self, request):
try:
cart_data = request.session[CART_SESSION_KEY]
cart = SessionCart.from_storage(cart_data)
except KeyError:
cart = SessionCart()
setattr(request, 'cart', cart)
def process_response(self, request, response):
if hasattr(request, 'cart'):
request.session[CART_SESSION_KEY] = request.cart.for_storage()
return response
|
from __future__ import unicode_literals
from . import SessionCart, CART_SESSION_KEY
class CartMiddleware(object):
'''
Saves the cart instance into the django session.
'''
def process_request(self, request):
try:
cart_data = request.session[CART_SESSION_KEY]
cart = SessionCart.from_storage(cart_data)
except KeyError:
cart = SessionCart()
setattr(request, 'cart', cart)
def process_response(self, request, response):
if hasattr(request, 'cart') and request.cart.modified:
request.session[CART_SESSION_KEY] = request.cart.for_storage()
return response
|
Store cart in session only when it was modified
|
Store cart in session only when it was modified
|
Python
|
bsd-3-clause
|
HyperManTT/ECommerceSaleor,taedori81/saleor,car3oon/saleor,UITools/saleor,rodrigozn/CW-Shop,mociepka/saleor,spartonia/saleor,UITools/saleor,arth-co/saleor,UITools/saleor,paweltin/saleor,hongquan/saleor,Drekscott/Motlaesaleor,UITools/saleor,avorio/saleor,josesanch/saleor,tfroehlich82/saleor,maferelo/saleor,spartonia/saleor,dashmug/saleor,taedori81/saleor,HyperManTT/ECommerceSaleor,Drekscott/Motlaesaleor,UITools/saleor,jreigel/saleor,josesanch/saleor,dashmug/saleor,maferelo/saleor,paweltin/saleor,arth-co/saleor,Drekscott/Motlaesaleor,KenMutemi/saleor,car3oon/saleor,mociepka/saleor,taedori81/saleor,dashmug/saleor,laosunhust/saleor,taedori81/saleor,jreigel/saleor,josesanch/saleor,itbabu/saleor,hongquan/saleor,rchav/vinerack,laosunhust/saleor,paweltin/saleor,HyperManTT/ECommerceSaleor,KenMutemi/saleor,avorio/saleor,rchav/vinerack,arth-co/saleor,hongquan/saleor,arth-co/saleor,mociepka/saleor,jreigel/saleor,laosunhust/saleor,spartonia/saleor,Drekscott/Motlaesaleor,itbabu/saleor,tfroehlich82/saleor,rodrigozn/CW-Shop,KenMutemi/saleor,paweltin/saleor,maferelo/saleor,rchav/vinerack,avorio/saleor,spartonia/saleor,tfroehlich82/saleor,itbabu/saleor,rodrigozn/CW-Shop,laosunhust/saleor,avorio/saleor,car3oon/saleor
|
---
+++
@@ -17,6 +17,6 @@
setattr(request, 'cart', cart)
def process_response(self, request, response):
- if hasattr(request, 'cart'):
+ if hasattr(request, 'cart') and request.cart.modified:
request.session[CART_SESSION_KEY] = request.cart.for_storage()
return response
|
3dfa781ce8e073f40eda3d80794ad1caff5d5920
|
samples/migrateAccount.py
|
samples/migrateAccount.py
|
#### Migrate person to a new account within the same Org
# Requires admin role
# Useful when migrating to Enterprise Logins.
# Reassigns all items/groups to new owner and
# adds userTo to all groups which userFrom is a member.'''
from agoTools.admin import Admin
myAgol = Admin('<username>') # Replace <username> your ADMIN account
# for migrating a single account...
myAgol.migrateAccount(myAgol, '<userFrom>', '<userTo>') # Replace with usernames between which you are moving items
# for migrating a batch of accounts...
myAgol.migrateAccounts(myAgol, <path to user mapping CSV>) # Replace with path to CSV file with col1=userFrom, col2=userTo
|
#### Migrate a member to a new account within the same Organization
# Requires admin role
# Useful when migrating to Enterprise Logins
# Reassigns all items/groups to new owner
# Adds userTo to all groups which userFrom is a member
from agoTools.admin import Admin
myAgol = Admin('<username>') # Replace <username> your ADMIN account
# un-comment one of the lines below, depending on which workflow you wish to use
### for migrating a single account...
# myAgol.migrateAccount(myAgol, '<userFrom>', '<userTo>') # Replace with usernames between which you are moving items
### for migrating multiple accounts...
# myAgol.migrateAccounts(myAgol, <path to user mapping CSV>) # Replace with path to CSV file with col1=userFrom, col2=userTo
|
Enhance comments in Migrate Account sample
|
Enhance comments in Migrate Account sample
|
Python
|
apache-2.0
|
oevans/ago-tools
|
---
+++
@@ -1,15 +1,17 @@
-#### Migrate person to a new account within the same Org
+#### Migrate a member to a new account within the same Organization
# Requires admin role
-# Useful when migrating to Enterprise Logins.
-# Reassigns all items/groups to new owner and
-# adds userTo to all groups which userFrom is a member.'''
+# Useful when migrating to Enterprise Logins
+# Reassigns all items/groups to new owner
+# Adds userTo to all groups which userFrom is a member
from agoTools.admin import Admin
myAgol = Admin('<username>') # Replace <username> your ADMIN account
-# for migrating a single account...
-myAgol.migrateAccount(myAgol, '<userFrom>', '<userTo>') # Replace with usernames between which you are moving items
+# un-comment one of the lines below, depending on which workflow you wish to use
-# for migrating a batch of accounts...
-myAgol.migrateAccounts(myAgol, <path to user mapping CSV>) # Replace with path to CSV file with col1=userFrom, col2=userTo
+### for migrating a single account...
+# myAgol.migrateAccount(myAgol, '<userFrom>', '<userTo>') # Replace with usernames between which you are moving items
+
+### for migrating multiple accounts...
+# myAgol.migrateAccounts(myAgol, <path to user mapping CSV>) # Replace with path to CSV file with col1=userFrom, col2=userTo
|
a90c2eecf95323a6f968e1313c3d7852e4eb25b2
|
speeches/management/commands/populatespeakers.py
|
speeches/management/commands/populatespeakers.py
|
from django.core.management.base import NoArgsCommand
from django.conf import settings
from popit import PopIt
from speeches.models import Speaker
class Command(NoArgsCommand):
help = 'Populates the database with people from Popit'
def handle_noargs(self, **options):
api = PopIt(instance = settings.POPIT_INSTANCE,
hostname = settings.POPIT_HOSTNAME,
api_version = settings.POPIT_API_VERSION)
results = api.person.get()
for person in results['results']:
speaker, created = Speaker.objects.get_or_create(popit_id=person['_id'])
# we ignore created for now, just always set the name
speaker.name = person['name']
speaker.save();
|
import logging
from django.core.management.base import NoArgsCommand
from django.conf import settings
from popit import PopIt
from speeches.models import Speaker
logger = logging.getLogger(__name__)
class Command(NoArgsCommand):
help = 'Populates the database with people from Popit'
def handle_noargs(self, **options):
api = PopIt(instance = settings.POPIT_INSTANCE,
hostname = settings.POPIT_HOSTNAME,
api_version = settings.POPIT_API_VERSION)
results = api.person.get()
for person in results['results']:
logger.warn('Processing: {0}'.format(person['meta']['api_url']))
speaker, created = Speaker.objects.get_or_create(popit_url=person['meta']['api_url'])
logger.warn('Person was created? {0}'.format(created))
logger.warn('Persons id in the spoke db is: {0}'.format(speaker.id))
# we ignore created for now, just always set the name
speaker.name = person['name']
speaker.save();
|
Update speaker population command to set popit_url instead of popit_id
|
Update speaker population command to set popit_url instead of popit_id
|
Python
|
agpl-3.0
|
opencorato/sayit,opencorato/sayit,opencorato/sayit,opencorato/sayit
|
---
+++
@@ -1,9 +1,13 @@
+import logging
from django.core.management.base import NoArgsCommand
from django.conf import settings
from popit import PopIt
from speeches.models import Speaker
+logger = logging.getLogger(__name__)
+
class Command(NoArgsCommand):
+
help = 'Populates the database with people from Popit'
def handle_noargs(self, **options):
@@ -12,7 +16,14 @@
api_version = settings.POPIT_API_VERSION)
results = api.person.get()
for person in results['results']:
- speaker, created = Speaker.objects.get_or_create(popit_id=person['_id'])
+
+ logger.warn('Processing: {0}'.format(person['meta']['api_url']))
+
+ speaker, created = Speaker.objects.get_or_create(popit_url=person['meta']['api_url'])
+
+ logger.warn('Person was created? {0}'.format(created))
+ logger.warn('Persons id in the spoke db is: {0}'.format(speaker.id))
+
# we ignore created for now, just always set the name
speaker.name = person['name']
speaker.save();
|
a5ef9a5d141ba5fd0d1d6c983cd8ac82079a1782
|
run_tests.py
|
run_tests.py
|
#!/usr/bin/env python3
import os
import tempfile
from distutils.sysconfig import get_python_lib
from coalib.tests.TestHelper import TestHelper
if __name__ == '__main__':
parser = TestHelper.create_argparser(description="Runs coalas tests.")
parser.add_argument("-b",
"--ignore-bear-tests",
help="ignore bear tests",
action="store_true")
parser.add_argument("-m",
"--ignore-main-tests",
help="ignore main program tests",
action="store_true")
testhelper = TestHelper(parser)
if not testhelper.args.ignore_main_tests:
testhelper.add_test_files(os.path.abspath(os.path.join("coalib",
"tests")))
if not testhelper.args.ignore_bear_tests:
testhelper.add_test_files(os.path.abspath(os.path.join("bears",
"tests")))
ignore_list = [
os.path.join(tempfile.gettempdir(), "**"),
os.path.join(get_python_lib(), "**"),
os.path.join("coalib", "tests", "**"),
os.path.join("bears", "tests", "**")
]
exit(testhelper.execute_python3_files(ignore_list))
|
#!/usr/bin/env python3
import os
import tempfile
from distutils.sysconfig import get_python_lib
from coalib.tests.TestHelper import TestHelper
if __name__ == '__main__':
parser = TestHelper.create_argparser(description="Runs coalas tests.")
parser.add_argument("-b",
"--ignore-bear-tests",
help="ignore bear tests",
action="store_true")
parser.add_argument("-m",
"--ignore-main-tests",
help="ignore main program tests",
action="store_true")
testhelper = TestHelper(parser)
if not testhelper.args.ignore_main_tests:
testhelper.add_test_files(os.path.abspath(os.path.join("coalib",
"tests")))
if not testhelper.args.ignore_bear_tests:
testhelper.add_test_files(os.path.abspath(os.path.join("bears",
"tests")))
ignore_list = [
os.path.join(tempfile.gettempdir(), "**"),
os.path.join(os.path.dirname(get_python_lib()), "**"),
os.path.join("coalib", "tests", "**"),
os.path.join("bears", "tests", "**")
]
exit(testhelper.execute_python3_files(ignore_list))
|
Update run_test.py to fix coverage
|
tests: Update run_test.py to fix coverage
|
Python
|
agpl-3.0
|
Asalle/coala,ManjiriBirajdar/coala,jayvdb/coala,Asnelchristian/coala,RJ722/coala,abhiroyg/coala,FeodorFitsner/coala,meetmangukiya/coala,sils1297/coala,Tanmay28/coala,yashLadha/coala,Asalle/coala,scottbelden/coala,stevemontana1980/coala,sophiavanvalkenburg/coala,Tanmay28/coala,JohnS-01/coala,Nosferatul/coala,yashLadha/coala,SambitAcharya/coala,arjunsinghy96/coala,d6e/coala,sagark123/coala,Tanmay28/coala,mr-karan/coala,SanketDG/coala,CruiseDevice/coala,sagark123/coala,refeed/coala,NalinG/coala,SambitAcharya/coala,AbdealiJK/coala,sudheesh001/coala,kartikeys98/coala,coala-analyzer/coala,karansingh1559/coala,NalinG/coala,CruiseDevice/coala,Shade5/coala,RJ722/coala,lonewolf07/coala,rresol/coala,NiklasMM/coala,impmihai/coala,SanketDG/coala,dagdaggo/coala,Tanmay28/coala,incorrectusername/coala,coala-analyzer/coala,CruiseDevice/coala,Shade5/coala,vinc456/coala,yashtrivedi96/coala,NiklasMM/coala,coala-analyzer/coala,nemaniarjun/coala,JohnS-01/coala,abhiroyg/coala,coala/coala,Tanmay28/coala,swatilodha/coala,Shade5/coala,SambitAcharya/coala,dagdaggo/coala,incorrectusername/coala,mr-karan/coala,nemaniarjun/coala,stevemontana1980/coala,Asnelchristian/coala,MattAllmendinger/coala,yashtrivedi96/coala,coala/coala,MattAllmendinger/coala,shreyans800755/coala,dagdaggo/coala,damngamerz/coala,meetmangukiya/coala,SambitAcharya/coala,arush0311/coala,Balaji2198/coala,ManjiriBirajdar/coala,rimacone/testing2,andreimacavei/coala,sils1297/coala,rresol/coala,AbdealiJK/coala,AdeshAtole/coala,netman92/coala,Nosferatul/coala,Uran198/coala,arjunsinghy96/coala,yland/coala,karansingh1559/coala,SambitAcharya/coala,arush0311/coala,vinc456/coala,NalinG/coala,shreyans800755/coala,NalinG/coala,tushar-rishav/coala,incorrectusername/coala,arafsheikh/coala,andreimacavei/coala,Tanmay28/coala,d6e/coala,tltuan/coala,sophiavanvalkenburg/coala,svsn2117/coala,arush0311/coala,lonewolf07/coala,meetmangukiya/coala,aptrishu/coala,MattAllmendinger/coala,swatilodha/coala,SanketDG/coala,RJ722/coala,damngamerz/coala,aptrishu/coala,kartikeys98/coala,scriptnull/coala,rresol/coala,sils1297/coala,ayushin78/coala,tltuan/coala,ayushin78/coala,swatilodha/coala,saurabhiiit/coala,impmihai/coala,svsn2117/coala,FeodorFitsner/coala,shreyans800755/coala,NiklasMM/coala,d6e/coala,arjunsinghy96/coala,coala/coala,Asalle/coala,scriptnull/coala,abhiroyg/coala,FeodorFitsner/coala,AdeshAtole/coala,Uran198/coala,tushar-rishav/coala,tushar-rishav/coala,yland/coala,karansingh1559/coala,vinc456/coala,aptrishu/coala,MariosPanag/coala,impmihai/coala,Balaji2198/coala,netman92/coala,mr-karan/coala,djkonro/coala,netman92/coala,nemaniarjun/coala,damngamerz/coala,scriptnull/coala,NalinG/coala,kartikeys98/coala,MariosPanag/coala,Asnelchristian/coala,scriptnull/coala,refeed/coala,Balaji2198/coala,djkonro/coala,saurabhiiit/coala,arafsheikh/coala,scriptnull/coala,scottbelden/coala,sudheesh001/coala,Tanmay28/coala,Uran198/coala,sophiavanvalkenburg/coala,MariosPanag/coala,refeed/coala,jayvdb/coala,jayvdb/coala,svsn2117/coala,rimacone/testing2,stevemontana1980/coala,sudheesh001/coala,yland/coala,scriptnull/coala,ManjiriBirajdar/coala,AbdealiJK/coala,Tanmay28/coala,andreimacavei/coala,NalinG/coala,saurabhiiit/coala,AdeshAtole/coala,ayushin78/coala,lonewolf07/coala,scriptnull/coala,tltuan/coala,rimacone/testing2,yashLadha/coala,SambitAcharya/coala,scottbelden/coala,sagark123/coala,JohnS-01/coala,NalinG/coala,arafsheikh/coala,SambitAcharya/coala,Nosferatul/coala,djkonro/coala,yashtrivedi96/coala
|
---
+++
@@ -29,7 +29,7 @@
ignore_list = [
os.path.join(tempfile.gettempdir(), "**"),
- os.path.join(get_python_lib(), "**"),
+ os.path.join(os.path.dirname(get_python_lib()), "**"),
os.path.join("coalib", "tests", "**"),
os.path.join("bears", "tests", "**")
]
|
6de9457215e5a41a40acaf428132f46ab94fed2c
|
miniraf/combine.py
|
miniraf/combine.py
|
import astropy.io.fits as fits
import numpy as np
import sys
from .util import stack_fits_data
METHOD_MAP = {"median": lambda x: np.median(x, axis=0, overwrite_input=True),
"average": lambda x: np.average(x, axis=0),
"sum": lambda x: np.sum(x, axis=0)}
def create_parser(subparsers):
parser_combine = subparsers.add_parser("combine", help="combine help")
parser_combine.add_argument("-m", "--method", choices=["median", "average", "sum"], required=True)
parser_combine.add_argument("-o", "--output", metavar="OUTFILE", default=sys.stdout.buffer)
parser_combine.add_argument("file", nargs="+")
parser_combine.set_defaults(func=main)
def main(args):
image_stack = stack_fits_data(args.file)
result = METHOD_MAP[args.method](image_stack)
hdu = fits.PrimaryHDU(result)
hdu.writeto(args.output)
|
import astropy.io.fits as fits
import numpy as np
import sys
from .util import stack_fits_data
METHOD_MAP = {"median": lambda x: np.median(x, axis=0, overwrite_input=True),
"average": lambda x: np.mean(x, axis=0),
"sum": lambda x: np.sum(x, axis=0)}
def create_parser(subparsers):
parser_combine = subparsers.add_parser("combine", help="combine help")
parser_combine.add_argument("-m", "--method", choices=["median", "average", "sum"], required=True)
parser_combine.add_argument("-o", "--output", metavar="OUTFILE", default=sys.stdout.buffer)
parser_combine.add_argument("file", nargs="+")
parser_combine.set_defaults(func=main)
def main(args):
image_stack = stack_fits_data(args.file)
result = METHOD_MAP[args.method](image_stack)
hdu = fits.PrimaryHDU(result)
hdu.writeto(args.output)
|
Use np.mean instead for unweighted mean
|
Use np.mean instead for unweighted mean
Signed-off-by: Lizhou Sha <d6acb26e253550574bc1141efa0eb5e6de15daeb@mit.edu>
|
Python
|
mit
|
vulpicastor/miniraf
|
---
+++
@@ -5,7 +5,7 @@
from .util import stack_fits_data
METHOD_MAP = {"median": lambda x: np.median(x, axis=0, overwrite_input=True),
- "average": lambda x: np.average(x, axis=0),
+ "average": lambda x: np.mean(x, axis=0),
"sum": lambda x: np.sum(x, axis=0)}
def create_parser(subparsers):
|
81f2a561ac27d13fb43edae1fb94b237951ff9cc
|
tests/rietveld/test_braggtree.py
|
tests/rietveld/test_braggtree.py
|
from __future__ import absolute_import, print_function
import unittest
from qtpy.QtWidgets import QApplication
from addie.rietveld.braggtree import BraggTree, BankRegexException
class BraggTreeTests(unittest.TestCase):
def setUp(self):
self.main_window = QApplication([])
def tearDown(self):
self.main_window.quit()
def test_get_bank_id(self):
"""Test we can extract a bank id from bank workspace name"""
braggtree = BraggTree(None)
target = 12345
bank_wksp_name = "Bank {} - 90.0".format(target)
bank_id = braggtree._get_bank_id(bank_wksp_name)
self.assertEqual(int(bank_id), target)
def test_get_bank_id_exception(self):
"""Test for raised exception from a bad workspace name"""
braggtree = BraggTree(None)
bad_ws = "Bank jkl 1 -- 90.0"
self.assertRaises(BankRegexException, braggtree._get_bank_id, bad_ws)
def test_do_plot_ws_exception(self):
"""Test for raised exception from MainWindow==None"""
braggtree = BraggTree(None)
self.assertRaises(NotImplementedError, braggtree.do_plot_ws)
if __name__ == '__main__':
unittest.main() # pragma: no cover
|
from __future__ import absolute_import, print_function
import pytest
from addie.main import MainWindow
from addie.rietveld.braggtree import BraggTree, BankRegexException
@pytest.fixture
def braggtree():
return BraggTree(None)
def test_get_bank_id(qtbot, braggtree):
"""Test we can extract a bank id from bank workspace name"""
target = 12345
bank_wksp_name = "Bank {} - 90.0".format(target)
bank_id = braggtree._get_bank_id(bank_wksp_name)
assert int(bank_id) == target
def test_get_bank_id_exception(qtbot, braggtree):
"""Test for raised exception from a bad workspace name"""
bad_ws = "Bank jkl 1 -- 90.0"
with pytest.raises(BankRegexException) as e:
braggtree._get_bank_id(bad_ws)
def test_do_plot_ws_exception(qtbot, braggtree):
"""Test for raised exception from MainWindow==None"""
with pytest.raises(NotImplementedError) as e:
braggtree.do_plot_ws()
|
Refactor BraggTree test to use pytest-qt
|
Refactor BraggTree test to use pytest-qt
|
Python
|
mit
|
neutrons/FastGR,neutrons/FastGR,neutrons/FastGR
|
---
+++
@@ -1,35 +1,27 @@
from __future__ import absolute_import, print_function
-import unittest
-from qtpy.QtWidgets import QApplication
+import pytest
+from addie.main import MainWindow
from addie.rietveld.braggtree import BraggTree, BankRegexException
-
-class BraggTreeTests(unittest.TestCase):
- def setUp(self):
- self.main_window = QApplication([])
-
- def tearDown(self):
- self.main_window.quit()
-
- def test_get_bank_id(self):
- """Test we can extract a bank id from bank workspace name"""
- braggtree = BraggTree(None)
- target = 12345
- bank_wksp_name = "Bank {} - 90.0".format(target)
- bank_id = braggtree._get_bank_id(bank_wksp_name)
- self.assertEqual(int(bank_id), target)
-
- def test_get_bank_id_exception(self):
- """Test for raised exception from a bad workspace name"""
- braggtree = BraggTree(None)
- bad_ws = "Bank jkl 1 -- 90.0"
- self.assertRaises(BankRegexException, braggtree._get_bank_id, bad_ws)
-
- def test_do_plot_ws_exception(self):
- """Test for raised exception from MainWindow==None"""
- braggtree = BraggTree(None)
- self.assertRaises(NotImplementedError, braggtree.do_plot_ws)
+@pytest.fixture
+def braggtree():
+ return BraggTree(None)
-if __name__ == '__main__':
- unittest.main() # pragma: no cover
+def test_get_bank_id(qtbot, braggtree):
+ """Test we can extract a bank id from bank workspace name"""
+ target = 12345
+ bank_wksp_name = "Bank {} - 90.0".format(target)
+ bank_id = braggtree._get_bank_id(bank_wksp_name)
+ assert int(bank_id) == target
+
+def test_get_bank_id_exception(qtbot, braggtree):
+ """Test for raised exception from a bad workspace name"""
+ bad_ws = "Bank jkl 1 -- 90.0"
+ with pytest.raises(BankRegexException) as e:
+ braggtree._get_bank_id(bad_ws)
+
+def test_do_plot_ws_exception(qtbot, braggtree):
+ """Test for raised exception from MainWindow==None"""
+ with pytest.raises(NotImplementedError) as e:
+ braggtree.do_plot_ws()
|
5daa628d59576f00d0c5d49358a800dd728c6fdf
|
necropsy/models.py
|
necropsy/models.py
|
# -*- coding: utf-8 -*-
from django.db import models
# Create your models here.
class Necropsy (models.Model):
clinical_information = models.TextField(null=True, blank=True)
macroscopic = models.TextField(null=True, blank=True)
microscopic = models.TextField(null=True, blank=True)
conclusion = models.TextField(null=True, blank=True)
notes = models.TextField(null=True, blank=True)
footer = models.TextField(null=True, blank=True)
|
# -*- coding: utf-8 -*-
from django.db import models
from modeling.exam import Exam
from modeling.report import ReportStatus
class NecropsyStatus(models.Model):
description = models.CharField(max_length=50)
class Necropsy(models.Model):
clinical_information = models.TextField(null=True, blank=True)
main_disease = models.TextField(null=True, blank=True)
consequential_final_disease = models.TextField(null=True, blank=True)
contributors_disease = models.TextField(null=True, blank=True)
consequential_disease = models.TextField(null=True, blank=True)
other_disases = models.TextField(null=True, blank=True)
note = models.TextField(null=True, blank=True)
footer = models.TextField(null=True, blank=True)
status = models.ForeignKey(NecropsyStatus)
exam = models.ForeignKey(Exam)
class NecropsyReport(models.Model):
clinical_information = models.TextField(null=True, blank=True)
main_disease = models.TextField(null=True, blank=True)
consequential_final_disease = models.TextField(null=True, blank=True)
contributors_disease = models.TextField(null=True, blank=True)
consequential_disease = models.TextField(null=True, blank=True)
other_disases = models.TextField(null=True, blank=True)
note = models.TextField(null=True, blank=True)
footer = models.TextField(null=True, blank=True)
status = models.ForeignKey(ReportStatus)
necropsy = models.ForeignKey(Necropsy)
|
Add NecropsyReport in Model Necropsy
|
Add NecropsyReport in Model Necropsy
|
Python
|
mit
|
msfernandes/anato-hub,msfernandes/anato-hub,msfernandes/anato-hub,msfernandes/anato-hub
|
---
+++
@@ -1,12 +1,33 @@
# -*- coding: utf-8 -*-
+
from django.db import models
+from modeling.exam import Exam
+from modeling.report import ReportStatus
-# Create your models here.
-class Necropsy (models.Model):
- clinical_information = models.TextField(null=True, blank=True)
- macroscopic = models.TextField(null=True, blank=True)
- microscopic = models.TextField(null=True, blank=True)
- conclusion = models.TextField(null=True, blank=True)
- notes = models.TextField(null=True, blank=True)
- footer = models.TextField(null=True, blank=True)
+class NecropsyStatus(models.Model):
+ description = models.CharField(max_length=50)
+
+class Necropsy(models.Model):
+ clinical_information = models.TextField(null=True, blank=True)
+ main_disease = models.TextField(null=True, blank=True)
+ consequential_final_disease = models.TextField(null=True, blank=True)
+ contributors_disease = models.TextField(null=True, blank=True)
+ consequential_disease = models.TextField(null=True, blank=True)
+ other_disases = models.TextField(null=True, blank=True)
+ note = models.TextField(null=True, blank=True)
+ footer = models.TextField(null=True, blank=True)
+ status = models.ForeignKey(NecropsyStatus)
+ exam = models.ForeignKey(Exam)
+
+class NecropsyReport(models.Model):
+ clinical_information = models.TextField(null=True, blank=True)
+ main_disease = models.TextField(null=True, blank=True)
+ consequential_final_disease = models.TextField(null=True, blank=True)
+ contributors_disease = models.TextField(null=True, blank=True)
+ consequential_disease = models.TextField(null=True, blank=True)
+ other_disases = models.TextField(null=True, blank=True)
+ note = models.TextField(null=True, blank=True)
+ footer = models.TextField(null=True, blank=True)
+ status = models.ForeignKey(ReportStatus)
+ necropsy = models.ForeignKey(Necropsy)
|
2ba4e0758c04bebcd1dcde78e99605d0b9460abf
|
foldatlas/monitor.py
|
foldatlas/monitor.py
|
import os
# must call "sudo apt-get install sendmail" first...
# if sts != 0:
# print("Sendmail exit status "+str(sts))
def send_error(recipient, error_details):
SENDMAIL = "/usr/sbin/sendmail" # sendmail location
p = os.popen("%s -t" % SENDMAIL, "w")
p.write("To: "+recipient+"\n")
p.write("Subject: FoldAtlas error\n")
p.write("\n") # blank line separating headers from body
p.write("Some text\n")
p.write("some more text\n")
sts = p.close()
|
import traceback
import os
import urllib.request # the lib that handles the url stuff
test_url = "http://www.foldatlas.com/transcript/AT2G45180.1"
recipient = "matthew.gs.norris@gmail.com"
search_str = "AT2G45180.1"
def run_test():
try:
data = urllib.request.urlopen(test_url) # it's a file like object and works just like a file
text = str(data.read())
if search_str in text:
print("It worked!")
else:
send_error(text)
except:
send_error(traceback.format_exc())
def send_error(error_details):
print("FAILED")
SENDMAIL = "/usr/sbin/sendmail" # sendmail location
p = os.popen("%s -t" % SENDMAIL, "w")
p.write("To: "+recipient+"\n")
p.write("Subject: FoldAtlas error\n")
p.write("\n") # blank line separating headers from body
p.write(error_details)
sts = p.close()
run_test()
|
Monitor now checks and emails
|
Monitor now checks and emails
|
Python
|
mit
|
mnori/foldatlas,mnori/foldatlas,mnori/foldatlas,mnori/foldatlas
|
---
+++
@@ -1,20 +1,32 @@
+import traceback
import os
+import urllib.request # the lib that handles the url stuff
-# must call "sudo apt-get install sendmail" first...
+test_url = "http://www.foldatlas.com/transcript/AT2G45180.1"
+recipient = "matthew.gs.norris@gmail.com"
+search_str = "AT2G45180.1"
+def run_test():
+ try:
+ data = urllib.request.urlopen(test_url) # it's a file like object and works just like a file
+ text = str(data.read())
-# if sts != 0:
-# print("Sendmail exit status "+str(sts))
+ if search_str in text:
+ print("It worked!")
+ else:
+ send_error(text)
+ except:
+ send_error(traceback.format_exc())
-
-
-def send_error(recipient, error_details):
+def send_error(error_details):
+ print("FAILED")
SENDMAIL = "/usr/sbin/sendmail" # sendmail location
p = os.popen("%s -t" % SENDMAIL, "w")
p.write("To: "+recipient+"\n")
p.write("Subject: FoldAtlas error\n")
p.write("\n") # blank line separating headers from body
- p.write("Some text\n")
- p.write("some more text\n")
+ p.write(error_details)
sts = p.close()
+
+run_test()
|
6b0774eab70c42fbdd28869b6bcdab9b81183b8e
|
run_tests.py
|
run_tests.py
|
#!/usr/bin/env python
# tests require pytest-cov and pytest-xdist
import os
import signal
import sys
from bluesky.testing.noseclasses import KnownFailure
import pytest
try:
from pcaspy import Driver, SimpleServer
from multiprocessing import Process
def to_subproc():
prefix = 'BSTEST:'
pvdb = {
'VAL': {
'prec': 3,
},
}
class myDriver(Driver):
def __init__(self):
super(myDriver, self).__init__()
if __name__ == '__main__':
server = SimpleServer()
server.createPV(prefix, pvdb)
driver = myDriver()
# process CA transactions
while True:
try:
server.process(0.1)
except KeyboardInterrupt:
break
p = Process(target=to_subproc)
except ImportError:
p = None
def run():
if p is not None:
p.start()
try:
args = ['--cov bluesky']
args.extend(sys.argv)
pytest.main(args)
finally:
if p is not None:
os.kill(p.pid, signal.SIGINT)
p.join()
if __name__ == '__main__':
run()
|
#!/usr/bin/env python
# tests require pytest-cov and pytest-xdist
import os
import signal
import sys
import pytest
try:
from pcaspy import Driver, SimpleServer
from multiprocessing import Process
def to_subproc():
prefix = 'BSTEST:'
pvdb = {
'VAL': {
'prec': 3,
},
}
class myDriver(Driver):
def __init__(self):
super(myDriver, self).__init__()
if __name__ == '__main__':
server = SimpleServer()
server.createPV(prefix, pvdb)
driver = myDriver()
# process CA transactions
while True:
try:
server.process(0.1)
except KeyboardInterrupt:
break
p = Process(target=to_subproc)
except ImportError:
p = None
def run():
if p is not None:
p.start()
try:
# adding rxs to show extra info on skips and xfails
args = ['--cov bluesky -srx -vv']
args.extend(sys.argv)
pytest.main(args)
finally:
if p is not None:
os.kill(p.pid, signal.SIGINT)
p.join()
if __name__ == '__main__':
run()
|
Remove deleted subpackage. Add better args to pytest
|
TST: Remove deleted subpackage. Add better args to pytest
|
Python
|
bsd-3-clause
|
ericdill/bluesky,ericdill/bluesky
|
---
+++
@@ -4,7 +4,6 @@
import os
import signal
import sys
-from bluesky.testing.noseclasses import KnownFailure
import pytest
try:
@@ -45,7 +44,8 @@
if p is not None:
p.start()
try:
- args = ['--cov bluesky']
+ # adding rxs to show extra info on skips and xfails
+ args = ['--cov bluesky -srx -vv']
args.extend(sys.argv)
pytest.main(args)
finally:
|
c3a184a188d18f87bad2d7f34a2dfd3a7cca4827
|
signac/common/errors.py
|
signac/common/errors.py
|
# Copyright (c) 2016 The Regents of the University of Michigan
# All rights reserved.
# This software is licensed under the BSD 3-Clause License.
class Error(Exception):
pass
class ConfigError(Error, RuntimeError):
pass
class AuthenticationError(Error, RuntimeError):
def __str__(self):
if len(self.args) > 0:
return "Failed to authenticate with host '{}'.".format(
self.args[0])
else:
return "Failed to authenticate with host."
class ExportError(Error, RuntimeError):
pass
class FileNotFoundError(Error, FileNotFoundError):
pass
class FetchError(FileNotFoundError):
pass
|
# Copyright (c) 2016 The Regents of the University of Michigan
# All rights reserved.
# This software is licensed under the BSD 3-Clause License.
from . import six
class Error(Exception):
pass
class ConfigError(Error, RuntimeError):
pass
class AuthenticationError(Error, RuntimeError):
def __str__(self):
if len(self.args) > 0:
return "Failed to authenticate with host '{}'.".format(
self.args[0])
else:
return "Failed to authenticate with host."
class ExportError(Error, RuntimeError):
pass
if six.PY2:
class FileNotFoundError(Error, IOError):
pass
else:
class FileNotFoundError(Error, FileNotFoundError):
pass
class FetchError(FileNotFoundError):
pass
|
Fix py27 issue in error module.
|
Fix py27 issue in error module.
Inherit signac internal FileNotFoundError class from IOError
instead of FileNotFoundError in python 2.7.
|
Python
|
bsd-3-clause
|
csadorf/signac,csadorf/signac
|
---
+++
@@ -1,7 +1,7 @@
# Copyright (c) 2016 The Regents of the University of Michigan
# All rights reserved.
# This software is licensed under the BSD 3-Clause License.
-
+from . import six
class Error(Exception):
pass
@@ -25,8 +25,12 @@
pass
-class FileNotFoundError(Error, FileNotFoundError):
- pass
+if six.PY2:
+ class FileNotFoundError(Error, IOError):
+ pass
+else:
+ class FileNotFoundError(Error, FileNotFoundError):
+ pass
class FetchError(FileNotFoundError):
|
54e78b61db2660a57762b0f0115d532b308386e4
|
opal/tests/test_core_commandline.py
|
opal/tests/test_core_commandline.py
|
"""
Unittests for opal.core.commandline
"""
from opal.core.test import OpalTestCase
from opal.core import commandline
|
"""
Unittests for opal.core.commandline
"""
from mock import patch, MagicMock
from opal.core.test import OpalTestCase
from opal.core import commandline
class StartprojectTestCase(OpalTestCase):
def test_startproject(self):
mock_args = MagicMock(name='Mock Args')
mock_args.name = 'projectname'
with patch.object(commandline.scaffold_utils, 'start_project') as sp:
commandline.startproject(mock_args)
sp.assert_called_with('projectname', commandline.USERLAND_HERE)
class StartpluginTestCase(OpalTestCase):
def test_startplugin(self):
mock_args = MagicMock(name='Mock Args')
mock_args.name = 'pluginname'
with patch.object(commandline.scaffold_utils, 'start_plugin') as sp:
commandline.startplugin(mock_args)
sp.assert_called_with('pluginname', commandline.USERLAND_HERE)
|
Add simple basic unittests for some of our commandline argparse target functions
|
Add simple basic unittests for some of our commandline argparse target functions
|
Python
|
agpl-3.0
|
khchine5/opal,khchine5/opal,khchine5/opal
|
---
+++
@@ -1,6 +1,28 @@
"""
Unittests for opal.core.commandline
"""
+from mock import patch, MagicMock
+
from opal.core.test import OpalTestCase
from opal.core import commandline
+
+
+class StartprojectTestCase(OpalTestCase):
+
+ def test_startproject(self):
+ mock_args = MagicMock(name='Mock Args')
+ mock_args.name = 'projectname'
+ with patch.object(commandline.scaffold_utils, 'start_project') as sp:
+ commandline.startproject(mock_args)
+ sp.assert_called_with('projectname', commandline.USERLAND_HERE)
+
+
+class StartpluginTestCase(OpalTestCase):
+
+ def test_startplugin(self):
+ mock_args = MagicMock(name='Mock Args')
+ mock_args.name = 'pluginname'
+ with patch.object(commandline.scaffold_utils, 'start_plugin') as sp:
+ commandline.startplugin(mock_args)
+ sp.assert_called_with('pluginname', commandline.USERLAND_HERE)
|
c00a55b8337dbc354921c195dfa4becc7ee1346a
|
ipython/profile_default/startup/00-imports.py
|
ipython/profile_default/startup/00-imports.py
|
"""Imports for IPython"""
# pylint: disable=W0611
# import this
import os
import re
import sys
import inspect
pyprint = print
mores = []
try:
from rich.console import Console
console = Console(color_system="standard")
print = console.print
mores += ["rich"]
except ImportError:
pass
try:
from importlib import reload
except ImportError:
def reload(x):
raise NotImplementedError("importlib.reload is not available")
try:
import requests
mores += ["requests"]
except ModuleNotFoundError:
pass
try:
import pysyte
from pysyte.types import paths
from pysyte.types.paths import path
from pysyte import cli
except ImportError as e:
print(e)
sys.stderr.write("pip install pysyte # please")
try:
from pathlib import Path
mores += ["Path"]
except ImportError:
pass
more = ", ".join([" "] + mores) if mores else ""
executable = sys.executable.replace(os.environ['HOME'], '~')
version = sys.version.split()[0]
stdout = lambda x: sys.stdout.write(f"{x}\n")
stdout(f"import os, re, sys, inspect, pysyte, paths, path, cli{more}")
stdout("")
stdout(f"{executable} {version}")
|
"""Imports for IPython"""
# pylint: disable=W0611
# import this
import os
import re
import sys
import inspect
pyprint = print
mores = []
try:
from rich.console import Console
console = Console(color_system="standard")
print = console.print
mores += ["rich"]
from rich import pretty
pretty.install()
except ImportError:
pass
try:
from importlib import reload
except ImportError:
def reload(x):
raise NotImplementedError("importlib.reload is not available")
try:
import requests
mores += ["requests"]
except ModuleNotFoundError:
pass
try:
import pysyte
from pysyte.types import paths
from pysyte.types.paths import path
from pysyte import cli
except ImportError as e:
print(e)
sys.stderr.write("pip install pysyte # please")
try:
from pathlib import Path
mores += ["Path"]
except ImportError:
pass
more = ", ".join([" "] + mores) if mores else ""
executable = sys.executable.replace(os.environ['HOME'], '~')
version = sys.version.split()[0]
stdout = lambda x: sys.stdout.write(f"{x}\n")
stdout(f"import os, re, sys, inspect, pysyte, paths, path, cli{more}")
stdout("")
stdout(f"{executable} {version}")
|
Use rich for printing in ipython
|
Use rich for printing in ipython
|
Python
|
mit
|
jalanb/jab,jalanb/dotjab,jalanb/dotjab,jalanb/jab
|
---
+++
@@ -17,6 +17,8 @@
console = Console(color_system="standard")
print = console.print
mores += ["rich"]
+ from rich import pretty
+ pretty.install()
except ImportError:
pass
|
80ca0bebce22f64d0d01377493126ed95d8a64cb
|
falcom/luhn.py
|
falcom/luhn.py
|
# Copyright (c) 2017 The Regents of the University of Michigan.
# All Rights Reserved. Licensed according to the terms of the Revised
# BSD License. See LICENSE.txt for details.
def get_check_digit_from_checkable_int (number):
return (9 * ((number // 10) + rotate_digit(number % 10))) % 10
def rotate_digit (digit):
if digit > 4:
return (digit * 2) - 9
else:
return digit * 2
def get_check_digit (number = None):
if number:
return get_check_digit_from_checkable_int(int(number))
else:
return None
|
# Copyright (c) 2017 The Regents of the University of Michigan.
# All Rights Reserved. Licensed according to the terms of the Revised
# BSD License. See LICENSE.txt for details.
def rotate_digit (digit):
if digit > 4:
return (digit * 2) - 9
else:
return digit * 2
def get_check_digit_from_checkable_int (number):
return (9 * ((number // 10) + rotate_digit(number % 10))) % 10
def get_check_digit (number = None):
if number:
return get_check_digit_from_checkable_int(int(number))
else:
return None
|
Reorder methods to make sense
|
Reorder methods to make sense
|
Python
|
bsd-3-clause
|
mlibrary/image-conversion-and-validation,mlibrary/image-conversion-and-validation
|
---
+++
@@ -1,9 +1,6 @@
# Copyright (c) 2017 The Regents of the University of Michigan.
# All Rights Reserved. Licensed according to the terms of the Revised
# BSD License. See LICENSE.txt for details.
-
-def get_check_digit_from_checkable_int (number):
- return (9 * ((number // 10) + rotate_digit(number % 10))) % 10
def rotate_digit (digit):
if digit > 4:
@@ -12,6 +9,9 @@
else:
return digit * 2
+def get_check_digit_from_checkable_int (number):
+ return (9 * ((number // 10) + rotate_digit(number % 10))) % 10
+
def get_check_digit (number = None):
if number:
return get_check_digit_from_checkable_int(int(number))
|
d5ee1185f0249d2e29f78866eb29552921b69ec9
|
config.py
|
config.py
|
import os
import jinja2
basedir = os.path.abspath(os.path.dirname(__file__))
class Config:
@staticmethod
def init_app(app):
repo_root = os.path.abspath(os.path.dirname(__file__))
template_folders = [
os.path.join(repo_root,
'bower_components/govuk_template/views/layouts'),
os.path.join(repo_root, 'app/templates')
]
jinja_loader = jinja2.FileSystemLoader(template_folders)
app.jinja_loader = jinja_loader
class Test(Config):
DEBUG = True
class Development(Config):
DEBUG = True,
BASE_TEMPLATE_DATA = {
'asset_path': '/static/',
'header_class': 'with-proposition'
}
class Live(Config):
DEBUG = False
config = {
'live': Live,
'development': Development,
'test': Test,
'default': Development
}
|
import os
import jinja2
basedir = os.path.abspath(os.path.dirname(__file__))
class Config(object):
DEBUG = False
STATIC_URL_PATH = '/supplier/static'
ASSET_PATH = STATIC_URL_PATH + '/'
BASE_TEMPLATE_DATA = {
'asset_path': ASSET_PATH,
'header_class': 'with-proposition'
}
@staticmethod
def init_app(app):
repo_root = os.path.abspath(os.path.dirname(__file__))
template_folders = [
os.path.join(repo_root,
'bower_components/govuk_template/views/layouts'),
os.path.join(repo_root, 'app/templates')
]
jinja_loader = jinja2.FileSystemLoader(template_folders)
app.jinja_loader = jinja_loader
class Test(Config):
DEBUG = True
class Development(Config):
DEBUG = True,
class Live(Config):
DEBUG = False
config = {
'live': Live,
'development': Development,
'test': Test,
'default': Development
}
|
Add supplier/ prefix to static file paths
|
Add supplier/ prefix to static file paths
|
Python
|
mit
|
mtekel/digitalmarketplace-supplier-frontend,alphagov/digitalmarketplace-supplier-frontend,alphagov/digitalmarketplace-supplier-frontend,mtekel/digitalmarketplace-supplier-frontend,alphagov/digitalmarketplace-supplier-frontend,alphagov/digitalmarketplace-supplier-frontend,mtekel/digitalmarketplace-supplier-frontend,mtekel/digitalmarketplace-supplier-frontend
|
---
+++
@@ -4,7 +4,15 @@
basedir = os.path.abspath(os.path.dirname(__file__))
-class Config:
+class Config(object):
+ DEBUG = False
+ STATIC_URL_PATH = '/supplier/static'
+ ASSET_PATH = STATIC_URL_PATH + '/'
+ BASE_TEMPLATE_DATA = {
+ 'asset_path': ASSET_PATH,
+ 'header_class': 'with-proposition'
+ }
+
@staticmethod
def init_app(app):
repo_root = os.path.abspath(os.path.dirname(__file__))
@@ -23,10 +31,6 @@
class Development(Config):
DEBUG = True,
- BASE_TEMPLATE_DATA = {
- 'asset_path': '/static/',
- 'header_class': 'with-proposition'
- }
class Live(Config):
|
286dced2c23b90dba53848423d6f29873779d177
|
config.py
|
config.py
|
import os
class Config:
SECRET_KEY = os.environ.get('SECRET_KEY')
SQLALCHEMY_COMMIT_ON_TEARDOWN = True
@staticmethod
def init_app(app):
pass
class DevelopmentConfig(Config):
DEBUG = True
SQLALCHEMY_DATABASE_URI = os.environ.get('DEV_DATABASE_URL')
class TestingConfig(Config):
TESTING = True
SQLALCHEMY_DATABASE_URI = os.environ.get('TEST_DATABASE_URL')
class ProductionConfig(Config):
SQLALCHEMY_DATABASE_URI = os.environ.get('DATABASE_URL')
@classmethod
def init_app(cls, app):
pass
class HerokuConfig(ProductionConfig):
@classmethod
def init_app(cls, app):
ProductionConfig.init_app(app)
# # log to stderr
# import logging
# from logging import StreamHandler
# file_handler = StreamHandler()
# file_handler.setLevel(logging.WARNING)
# app.logger.addHandler(file_handler)
config = {
'development': DevelopmentConfig,
'testing': TestingConfig,
'production': ProductionConfig,
'heroku': HerokuConfig,
'default': DevelopmentConfig
}
|
import os
class Config:
SECRET_KEY = os.environ.get('SECRET_KEY')
SQLALCHEMY_COMMIT_ON_TEARDOWN = True
@staticmethod
def init_app(app):
pass
class DevelopmentConfig(Config):
DEBUG = True
SQLALCHEMY_DATABASE_URI = os.environ.get('DEV_DATABASE_URL')
class TestingConfig(Config):
TESTING = True
if os.environ.get('TEST_DATABASE_URL'):
SQLALCHEMY_DATABASE_URI = os.environ.get('TEST_DATABASE_URL')
else:
basedir = os.path.abspath(os.path.dirname(__file__))
SQLALCHEMY_DATABASE_URI = 'sqlite:///' + os.path.join(basedir, 'data-test.sqlite')
class ProductionConfig(Config):
SQLALCHEMY_DATABASE_URI = os.environ.get('DATABASE_URL')
@classmethod
def init_app(cls, app):
pass
class HerokuConfig(ProductionConfig):
@classmethod
def init_app(cls, app):
ProductionConfig.init_app(app)
# # log to stderr
# import logging
# from logging import StreamHandler
# file_handler = StreamHandler()
# file_handler.setLevel(logging.WARNING)
# app.logger.addHandler(file_handler)
config = {
'development': DevelopmentConfig,
'testing': TestingConfig,
'production': ProductionConfig,
'heroku': HerokuConfig,
'default': DevelopmentConfig
}
|
Use sqlite as DB for test if none set in environment
|
Use sqlite as DB for test if none set in environment
|
Python
|
mit
|
boltzj/movies-in-sf
|
---
+++
@@ -17,7 +17,12 @@
class TestingConfig(Config):
TESTING = True
- SQLALCHEMY_DATABASE_URI = os.environ.get('TEST_DATABASE_URL')
+
+ if os.environ.get('TEST_DATABASE_URL'):
+ SQLALCHEMY_DATABASE_URI = os.environ.get('TEST_DATABASE_URL')
+ else:
+ basedir = os.path.abspath(os.path.dirname(__file__))
+ SQLALCHEMY_DATABASE_URI = 'sqlite:///' + os.path.join(basedir, 'data-test.sqlite')
class ProductionConfig(Config):
|
a6e46fc5429840fd3ff47c03d8b0d9f3b28c7811
|
src/sentry/api/endpoints/group_events_latest.py
|
src/sentry/api/endpoints/group_events_latest.py
|
from __future__ import absolute_import
from rest_framework.response import Response
from sentry.api import client
from sentry.api.base import DocSection
from sentry.api.bases.group import GroupEndpoint
class GroupEventsLatestEndpoint(GroupEndpoint):
doc_section = DocSection.EVENTS
def get(self, request, group):
"""
Retrieve the latest sample for an aggregate
Return details on the latest sample for this aggregate.
{method} {path}
"""
event = group.get_latest_event()
try:
return client.get('/events/{}/'.format(event.id), request.user, request.auth)
except client.ApiError as e:
return Response(e.body, status=e.status_code)
|
from __future__ import absolute_import
from rest_framework.response import Response
from sentry.api import client
from sentry.api.base import DocSection
from sentry.api.bases.group import GroupEndpoint
class GroupEventsLatestEndpoint(GroupEndpoint):
doc_section = DocSection.EVENTS
def get(self, request, group):
"""
Retrieve the latest sample for an aggregate
Return details on the latest sample for this aggregate.
{method} {path}
"""
event = group.get_latest_event()
if not event:
return Response({'detail': 'No events found for group'}, status=404)
try:
return client.get('/events/{}/'.format(event.id), request.user, request.auth)
except client.ApiError as e:
return Response(e.body, status=e.status_code)
|
Handle no latest event (fixes GH-1727)
|
Handle no latest event (fixes GH-1727)
|
Python
|
bsd-3-clause
|
imankulov/sentry,hongliang5623/sentry,fotinakis/sentry,BuildingLink/sentry,gencer/sentry,mitsuhiko/sentry,BuildingLink/sentry,beeftornado/sentry,mvaled/sentry,daevaorn/sentry,wong2/sentry,ifduyue/sentry,jean/sentry,ifduyue/sentry,Kryz/sentry,jean/sentry,kevinlondon/sentry,fotinakis/sentry,pauloschilling/sentry,korealerts1/sentry,1tush/sentry,alexm92/sentry,pauloschilling/sentry,ngonzalvez/sentry,JamesMura/sentry,zenefits/sentry,fuziontech/sentry,1tush/sentry,gencer/sentry,gencer/sentry,JamesMura/sentry,jean/sentry,BuildingLink/sentry,mvaled/sentry,mvaled/sentry,korealerts1/sentry,gencer/sentry,felixbuenemann/sentry,daevaorn/sentry,wong2/sentry,ifduyue/sentry,wong2/sentry,nicholasserra/sentry,imankulov/sentry,fuziontech/sentry,ngonzalvez/sentry,beeftornado/sentry,felixbuenemann/sentry,Kryz/sentry,zenefits/sentry,looker/sentry,fotinakis/sentry,kevinlondon/sentry,felixbuenemann/sentry,fuziontech/sentry,pauloschilling/sentry,Natim/sentry,JackDanger/sentry,mitsuhiko/sentry,mvaled/sentry,looker/sentry,Natim/sentry,songyi199111/sentry,korealerts1/sentry,BayanGroup/sentry,zenefits/sentry,BuildingLink/sentry,daevaorn/sentry,hongliang5623/sentry,JamesMura/sentry,ifduyue/sentry,imankulov/sentry,looker/sentry,beeftornado/sentry,nicholasserra/sentry,alexm92/sentry,songyi199111/sentry,JackDanger/sentry,hongliang5623/sentry,JamesMura/sentry,looker/sentry,nicholasserra/sentry,JamesMura/sentry,BayanGroup/sentry,zenefits/sentry,ngonzalvez/sentry,zenefits/sentry,daevaorn/sentry,mvaled/sentry,jean/sentry,kevinlondon/sentry,Kryz/sentry,gencer/sentry,jean/sentry,mvaled/sentry,songyi199111/sentry,fotinakis/sentry,BuildingLink/sentry,looker/sentry,ifduyue/sentry,JackDanger/sentry,BayanGroup/sentry,1tush/sentry,Natim/sentry,alexm92/sentry
|
---
+++
@@ -20,6 +20,8 @@
"""
event = group.get_latest_event()
+ if not event:
+ return Response({'detail': 'No events found for group'}, status=404)
try:
return client.get('/events/{}/'.format(event.id), request.user, request.auth)
|
666fc19e2949a30cbe40bf6020c141e84dfcae1e
|
app/soc/models/project_survey.py
|
app/soc/models/project_survey.py
|
#!/usr/bin/python2.5
#
# Copyright 2009 the Melange authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This module contains the ProjectSurvey model.
"""
__authors__ = [
'"Daniel Diniz" <ajaksu@gmail.com>',
'"Lennard de Rijk" <ljvderijk@gmail.com>',
]
from soc.models.survey import Survey
class ProjectSurvey(Survey):
"""Survey for Students that have a StudentProject.
"""
def __init__(self, *args, **kwargs):
super(ProjectSurvey, self).__init__(*args, **kwargs)
# TODO: prefix has to be set to gsoc_program once data has been transferred
self.prefix = 'program'
self.taking_access = 'student'
|
#!/usr/bin/python2.5
#
# Copyright 2009 the Melange authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This module contains the ProjectSurvey model.
"""
__authors__ = [
'"Daniel Diniz" <ajaksu@gmail.com>',
'"Lennard de Rijk" <ljvderijk@gmail.com>',
]
from soc.models.survey import Survey
class ProjectSurvey(Survey):
"""Survey for Students that have a StudentProject.
"""
def __init__(self, *args, **kwargs):
super(ProjectSurvey, self).__init__(*args, **kwargs)
self.prefix = 'gsoc_program'
self.taking_access = 'student'
|
Set the default prefix for ProjectSurveys to gsoc_program.
|
Set the default prefix for ProjectSurveys to gsoc_program.
|
Python
|
apache-2.0
|
rhyolight/nupic.son,rhyolight/nupic.son,rhyolight/nupic.son
|
---
+++
@@ -32,6 +32,5 @@
def __init__(self, *args, **kwargs):
super(ProjectSurvey, self).__init__(*args, **kwargs)
- # TODO: prefix has to be set to gsoc_program once data has been transferred
- self.prefix = 'program'
+ self.prefix = 'gsoc_program'
self.taking_access = 'student'
|
1b9aa9909b284489c9f8a5d38b1c5520d5916dc7
|
feature_extraction/measurements/__init__.py
|
feature_extraction/measurements/__init__.py
|
from collections import defaultdict
from feature_extraction.util import DefaultAttributeDict
class Measurement(object):
"""
A generic feature measurement.
Attributes
----------
default_options
Can be set by subclasses to set default option values
"""
default_options = {}
def __init__(self, options=None):
"""
When initializing this measurement, options can be passed.
These are exposed to internal algorithms as `self.options`.
Parameters
----------
options : dict
A dict of options for this measurement.
"""
self.options = DefaultAttributeDict()
self.options.update(self.default_options or {})
self.options.update(options or {})
from .pixelaverage import PixelAverage
from .texture_haralick import HaralickTexture
|
from collections import defaultdict
from feature_extraction.util import AttributeDict
class Measurement(object):
"""
A generic feature measurement.
Attributes
----------
default_options
Can be set by subclasses to set default option values
"""
default_options = {}
def __init__(self, options=None):
"""
When initializing this measurement, options can be passed.
These are exposed to internal algorithms as `self.options`.
Parameters
----------
options : dict
A dict of options for this measurement.
"""
self.options = AttributeDict()
self.options.update(self.default_options or {})
self.options.update(options or {})
from .pixelaverage import PixelAverage
from .texture_haralick import HaralickTexture
|
Switch back to AttributeDict for measurement options
|
Switch back to AttributeDict for measurement options
|
Python
|
apache-2.0
|
widoptimization-willett/feature-extraction
|
---
+++
@@ -1,5 +1,5 @@
from collections import defaultdict
-from feature_extraction.util import DefaultAttributeDict
+from feature_extraction.util import AttributeDict
class Measurement(object):
"""
@@ -23,7 +23,7 @@
options : dict
A dict of options for this measurement.
"""
- self.options = DefaultAttributeDict()
+ self.options = AttributeDict()
self.options.update(self.default_options or {})
self.options.update(options or {})
|
f0bca27d58fb4bc74b6627275486dbfd159954d6
|
tests/test_datafeed_fms_teams.py
|
tests/test_datafeed_fms_teams.py
|
import unittest2
import datetime
from google.appengine.ext import testbed
from datafeeds.datafeed_fms import DatafeedFms
class TestDatafeedFmsTeams(unittest2.TestCase):
def setUp(self):
self.testbed = testbed.Testbed()
self.testbed.activate()
self.testbed.init_urlfetch_stub()
self.testbed.init_memcache_stub()
self.datafeed = DatafeedFms()
def tearDown(self):
self.testbed.deactivate()
def test_getFmsTeamList(self):
teams = self.datafeed.getFmsTeamList()
self.find177(teams)
def find177(self, teams):
found_177 = False
for team in teams:
if team.team_number == 177:
found_177 = True
self.assertEqual(team.name, "UTC / Ensign Bickford Aerospace & Defense & South Windsor High School")
#self.assertEqual(team.address, u"South Windsor, CT, USA")
self.assertEqual(team.nickname, "Bobcat Robotics")
self.assertTrue(found_177)
self.assertTrue(len(teams) > 0)
|
import unittest2
import datetime
from google.appengine.ext import testbed
from datafeeds.datafeed_fms import DatafeedFms
class TestDatafeedFmsTeams(unittest2.TestCase):
def setUp(self):
self.testbed = testbed.Testbed()
self.testbed.activate()
self.testbed.init_urlfetch_stub()
self.testbed.init_memcache_stub()
self.datafeed = DatafeedFms()
def tearDown(self):
self.testbed.deactivate()
def test_getFmsTeamList(self):
teams = self.datafeed.getFmsTeamList()
self.find177(teams)
def find177(self, teams):
found_177 = False
for team in teams:
if team.team_number == 177:
found_177 = True
self.assertEqual(team.name, "ClearEdge Power / United Technologies / Gain Talent / EBA&D & South Windsor High School")
#self.assertEqual(team.address, u"South Windsor, CT, USA")
self.assertEqual(team.nickname, "Bobcat Robotics")
self.assertTrue(found_177)
self.assertTrue(len(teams) > 0)
|
Update test case for 2014
|
Update test case for 2014
|
Python
|
mit
|
tsteward/the-blue-alliance,verycumbersome/the-blue-alliance,1fish2/the-blue-alliance,fangeugene/the-blue-alliance,synth3tk/the-blue-alliance,synth3tk/the-blue-alliance,bvisness/the-blue-alliance,nwalters512/the-blue-alliance,verycumbersome/the-blue-alliance,synth3tk/the-blue-alliance,1fish2/the-blue-alliance,verycumbersome/the-blue-alliance,jaredhasenklein/the-blue-alliance,phil-lopreiato/the-blue-alliance,the-blue-alliance/the-blue-alliance,josephbisch/the-blue-alliance,phil-lopreiato/the-blue-alliance,josephbisch/the-blue-alliance,nwalters512/the-blue-alliance,phil-lopreiato/the-blue-alliance,the-blue-alliance/the-blue-alliance,josephbisch/the-blue-alliance,1fish2/the-blue-alliance,bdaroz/the-blue-alliance,tsteward/the-blue-alliance,bvisness/the-blue-alliance,synth3tk/the-blue-alliance,bvisness/the-blue-alliance,phil-lopreiato/the-blue-alliance,synth3tk/the-blue-alliance,the-blue-alliance/the-blue-alliance,tsteward/the-blue-alliance,josephbisch/the-blue-alliance,bdaroz/the-blue-alliance,bvisness/the-blue-alliance,bvisness/the-blue-alliance,fangeugene/the-blue-alliance,tsteward/the-blue-alliance,bdaroz/the-blue-alliance,verycumbersome/the-blue-alliance,nwalters512/the-blue-alliance,bvisness/the-blue-alliance,bdaroz/the-blue-alliance,jaredhasenklein/the-blue-alliance,fangeugene/the-blue-alliance,1fish2/the-blue-alliance,the-blue-alliance/the-blue-alliance,verycumbersome/the-blue-alliance,the-blue-alliance/the-blue-alliance,nwalters512/the-blue-alliance,phil-lopreiato/the-blue-alliance,the-blue-alliance/the-blue-alliance,jaredhasenklein/the-blue-alliance,fangeugene/the-blue-alliance,fangeugene/the-blue-alliance,josephbisch/the-blue-alliance,jaredhasenklein/the-blue-alliance,nwalters512/the-blue-alliance,jaredhasenklein/the-blue-alliance,synth3tk/the-blue-alliance,verycumbersome/the-blue-alliance,bdaroz/the-blue-alliance,bdaroz/the-blue-alliance,fangeugene/the-blue-alliance,1fish2/the-blue-alliance,tsteward/the-blue-alliance,1fish2/the-blue-alliance,josephbisch/the-blue-alliance,jaredhasenklein/the-blue-alliance,tsteward/the-blue-alliance,phil-lopreiato/the-blue-alliance,nwalters512/the-blue-alliance
|
---
+++
@@ -28,7 +28,7 @@
for team in teams:
if team.team_number == 177:
found_177 = True
- self.assertEqual(team.name, "UTC / Ensign Bickford Aerospace & Defense & South Windsor High School")
+ self.assertEqual(team.name, "ClearEdge Power / United Technologies / Gain Talent / EBA&D & South Windsor High School")
#self.assertEqual(team.address, u"South Windsor, CT, USA")
self.assertEqual(team.nickname, "Bobcat Robotics")
|
c43e120319248a804328893aad34fc774c4928d3
|
stdup/kde.py
|
stdup/kde.py
|
# -*- coding: utf-8 -*-
# Copyright 2013 Jacek Mitręga
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import unicode_literals
import envoy
import logging
logger = logging.getLogger(__name__)
class KdeWindowManager(object):
def __init__(self, workspace=6):
self.prev_workspace = 1
self.workspace = 6
def show(self):
envoy.run('qdbus org.kde.kwin /KWin org.kde.KWin.setCurrentDesktop 6',
timeout=2)
# envoy.run('killall firefox', timeout=2)
# envoy.connect('firefox http://standup-backend.herokuapp.com/?room=12')
def hide(self):
envoy.run(('qdbus org.kde.kwin /KWin '
'org.kde.KWin.setCurrentDesktop {}')
.format(self.prev_workspace),
timeout=2)
# envoy.run('killall firefox', timeout=2)
|
# -*- coding: utf-8 -*-
# Copyright 2013 Jacek Mitręga
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import unicode_literals
import envoy
import logging
logger = logging.getLogger(__name__)
class KdeWindowManager(object):
def __init__(self, workspace=6):
self.prev_workspace = 1
self.workspace = 6
def show(self):
logger.info('kde show')
envoy.run('qdbus org.kde.kwin /KWin org.kde.KWin.setCurrentDesktop 6',
timeout=2)
# envoy.run('killall firefox', timeout=2)
# envoy.connect('firefox http://standup-backend.herokuapp.com/?room=12')
def hide(self):
logger.info('kde hide')
envoy.run(('qdbus org.kde.kwin /KWin '
'org.kde.KWin.setCurrentDesktop {}')
.format(self.prev_workspace),
timeout=2)
# envoy.run('killall firefox', timeout=2)
|
Add KDE show & hide logging
|
Add KDE show & hide logging
|
Python
|
apache-2.0
|
waawal/standup-desktop,waawal/standup-desktop
|
---
+++
@@ -30,12 +30,14 @@
self.workspace = 6
def show(self):
+ logger.info('kde show')
envoy.run('qdbus org.kde.kwin /KWin org.kde.KWin.setCurrentDesktop 6',
timeout=2)
# envoy.run('killall firefox', timeout=2)
# envoy.connect('firefox http://standup-backend.herokuapp.com/?room=12')
def hide(self):
+ logger.info('kde hide')
envoy.run(('qdbus org.kde.kwin /KWin '
'org.kde.KWin.setCurrentDesktop {}')
.format(self.prev_workspace),
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.