commit
stringlengths 40
40
| old_file
stringlengths 4
150
| new_file
stringlengths 4
150
| old_contents
stringlengths 0
3.26k
| new_contents
stringlengths 1
4.43k
| subject
stringlengths 15
501
| message
stringlengths 15
4.06k
| lang
stringclasses 4
values | license
stringclasses 13
values | repos
stringlengths 5
91.5k
| diff
stringlengths 0
4.35k
|
|---|---|---|---|---|---|---|---|---|---|---|
dbe622d2297d62f61adf34e17de7c84d0cffbeaf
|
project/project/local_settings_example.py
|
project/project/local_settings_example.py
|
DEBUG = True
ADMINS = (
('Zaphod Beeblebrox', 'hoopyfrood@heartofgold.com'),
)
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '^lkajsdlfkjaoif09ijoi23092309i02[93ip2j3[r29u3[0923jorij'
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql',
'NAME': 'db_name_here',
'USER': 'db_user_here',
'PASSWORD': 'db_password_here',
'HOST': 'localhost',
'PORT': '3306',
}
}
GOOGLE_ACCOUNT_CODE = "UA-XXXXXXX-XX"
|
DEBUG = True
ADMINS = (
('Zaphod Beeblebrox', 'hoopyfrood@heartofgold.com'),
)
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '^lkajsdlfkjaoif09ijoi23092309i02[93ip2j3[r29u3[0923jorij'
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql',
'NAME': 'db_name_here',
'USER': 'db_user_here',
'PASSWORD': 'db_password_here',
'HOST': 'localhost',
'PORT': '3306',
}
}
EMAIL_HOST = ''
EMAIL_HOST_USER = ''
EMAIL_HOST_PASSWORD = ''
EMAIL_PORT = 587
DEFAULT_FROM_EMAIL = 'My Site Admin <me@myproject.com>'
GOOGLE_ACCOUNT_CODE = "UA-XXXXXXX-XX"
|
Add email settings example to localsettings
|
Add email settings example to localsettings
|
Python
|
mit
|
colbypalmer/cp-project-template,colbypalmer/cp-project-template,colbypalmer/cp-project-template
|
---
+++
@@ -18,4 +18,10 @@
}
}
+EMAIL_HOST = ''
+EMAIL_HOST_USER = ''
+EMAIL_HOST_PASSWORD = ''
+EMAIL_PORT = 587
+DEFAULT_FROM_EMAIL = 'My Site Admin <me@myproject.com>'
+
GOOGLE_ACCOUNT_CODE = "UA-XXXXXXX-XX"
|
b6f3c619e8c3fa375ac9b66e7ce555c77f02f152
|
pytest_raisesregexp/plugin.py
|
pytest_raisesregexp/plugin.py
|
import re
import py.code
import pytest
def pytest_namespace():
return {'raises_regexp': raises_regexp}
class raises_regexp(object):
def __init__(self, expected_exception, regexp):
self.exception = expected_exception
self.regexp = regexp
self.excinfo = None
def __enter__(self):
self.excinfo = object.__new__(py.code.ExceptionInfo)
return self.excinfo
def __exit__(self, exc_type, exc_val, exc_tb):
__tracebackhide__ = True
if exc_type is None:
pytest.fail('DID NOT RAISE %s' % self.exception)
self.excinfo.__init__((exc_type, exc_val, exc_tb))
if not issubclass(exc_type, self.exception):
pytest.fail('%s RAISED instead of %s' % (exc_type, self.exception))
if not re.search(self.regexp, str(exc_val)):
pytest.fail('pattern "%s" not found in "%s"' % (self.regexp, str(exc_val)))
return True
|
import re
import py.code
import pytest
def pytest_namespace():
return {'raises_regexp': raises_regexp}
class raises_regexp(object):
def __init__(self, expected_exception, regexp):
self.exception = expected_exception
self.regexp = regexp
self.excinfo = None
def __enter__(self):
self.excinfo = object.__new__(py.code.ExceptionInfo)
return self.excinfo
def __exit__(self, exc_type, exc_val, exc_tb):
__tracebackhide__ = True
if exc_type is None:
pytest.fail('DID NOT RAISE %s' % self.exception)
self.excinfo.__init__((exc_type, exc_val, exc_tb))
if not issubclass(exc_type, self.exception):
pytest.fail('%s RAISED instead of %s\n%s' % (exc_type, self.exception, repr(exc_val)))
if not re.search(self.regexp, str(exc_val)):
pytest.fail('pattern "%s" not found in "%s"' % (self.regexp, str(exc_val)))
return True
|
Add originally raised exception value to pytest error message
|
Add originally raised exception value to pytest error message
|
Python
|
mit
|
kissgyorgy/pytest-raisesregexp
|
---
+++
@@ -25,7 +25,7 @@
self.excinfo.__init__((exc_type, exc_val, exc_tb))
if not issubclass(exc_type, self.exception):
- pytest.fail('%s RAISED instead of %s' % (exc_type, self.exception))
+ pytest.fail('%s RAISED instead of %s\n%s' % (exc_type, self.exception, repr(exc_val)))
if not re.search(self.regexp, str(exc_val)):
pytest.fail('pattern "%s" not found in "%s"' % (self.regexp, str(exc_val)))
|
69cf5602ba9dd9d7e0a89c169682ac72e2e18a67
|
everywhere/base.py
|
everywhere/base.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
def fib(number: int) -> int:
'''
>>> fib(10)
55
'''
if number < 2:
return number
else:
return fib(number-1) + fib(number-2)
def hello() -> None:
'''
>>> hello()
'Hello World'
'''
return 'Hello World'
def add42(number: int) -> int:
'''
>>> add42(100)
142
'''
return number + 42
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
def fib(number: int) -> int:
'''
>>> fib(10)
55
'''
if number < 2:
return number
else:
return fib(number-1) + fib(number-2)
def hello() -> str:
'''
>>> hello()
'Hello World'
'''
return 'Hello World'
def add42(number: int) -> int:
'''
>>> add42(100)
142
'''
return number + 42
|
Fix return type of hello
|
Fix return type of hello
|
Python
|
bsd-2-clause
|
wdv4758h/python-everywhere,wdv4758h/python-everywhere,wdv4758h/python-everywhere
|
---
+++
@@ -13,7 +13,7 @@
return fib(number-1) + fib(number-2)
-def hello() -> None:
+def hello() -> str:
'''
>>> hello()
'Hello World'
|
5b8da0d318d7b37b3f1a3d868980507b15aa4213
|
salt/renderers/json.py
|
salt/renderers/json.py
|
from __future__ import absolute_import
import json
def render(json_data, env='', sls='', **kws):
if not isinstance(json_data, basestring):
json_data = json_data.read()
if json_data.startswith('#!'):
json_data = json_data[json_data.find('\n')+1:]
return json.loads(json_data)
|
from __future__ import absolute_import
import json
def render(json_data, env='', sls='', **kws):
if not isinstance(json_data, basestring):
json_data = json_data.read()
if json_data.startswith('#!'):
json_data = json_data[json_data.find('\n')+1:]
if not json_data.strip():
return {}
return json.loads(json_data)
|
Add missed changes for the previous commit.
|
Add missed changes for the previous commit.
|
Python
|
apache-2.0
|
saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt
|
---
+++
@@ -7,6 +7,7 @@
if json_data.startswith('#!'):
json_data = json_data[json_data.find('\n')+1:]
-
+ if not json_data.strip():
+ return {}
return json.loads(json_data)
|
3ad64c06f917efdaa94ad5debc23941f4d95105a
|
fuzzy_happiness/attributes.py
|
fuzzy_happiness/attributes.py
|
#!/usr/bin/python
#
# Copyright 2013 Rackspace Australia
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# Work out what fields to anonymize. To run a simple example, do this:
# cd <nova checkout dir>
# . .tox/py27/bin/activate
# <path to fuzzy happiness>/attributes.py
import inspect
from nova.db.sqlalchemy import models
def load_configuration():
configs = {}
for name, obj in inspect.getmembers(models):
if not inspect.isclass(obj):
continue
if not issubclass(obj, models.NovaBase):
continue
if not hasattr(obj, '__confidential__'):
continue
configs[name] = obj.__confidential__
return configs
if __name__ == '__main__':
print load_configuration()
|
#!/usr/bin/python
#
# Copyright 2013 Rackspace Australia
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# Work out what fields to anonymize. To run a simple example, do this:
# cd <nova checkout dir>
# . .tox/py27/bin/activate
# <path to fuzzy happiness>/attributes.py
import inspect
from nova.db.sqlalchemy import models
def load_configuration():
configs = {}
for name, obj in inspect.getmembers(models):
if not inspect.isclass(obj):
continue
if not issubclass(obj, models.NovaBase):
continue
attrs_missing = []
for required_attr in ['__tablename__', '__confidential__']:
if not hasattr(obj, required_attr):
attrs_missing.append(required_attr)
if attrs_missing:
print ('Required attributes %s missing from %s'
%(', '.join(attrs_missing), name))
continue
configs[obj.__tablename__] = obj.__confidential__
return configs
if __name__ == '__main__':
print load_configuration()
|
Return table name not model object name.
|
Return table name not model object name.
|
Python
|
apache-2.0
|
rcbau/fuzzy-happiness
|
---
+++
@@ -36,10 +36,17 @@
if not issubclass(obj, models.NovaBase):
continue
- if not hasattr(obj, '__confidential__'):
+ attrs_missing = []
+ for required_attr in ['__tablename__', '__confidential__']:
+ if not hasattr(obj, required_attr):
+ attrs_missing.append(required_attr)
+
+ if attrs_missing:
+ print ('Required attributes %s missing from %s'
+ %(', '.join(attrs_missing), name))
continue
- configs[name] = obj.__confidential__
+ configs[obj.__tablename__] = obj.__confidential__
return configs
|
dbbd6e1e87964db6b2279a661a63751da31213e5
|
millipede.py
|
millipede.py
|
#!/usr/bin/env python3
class millipede:
def __init__(self, size, comment=None):
self._millipede = ""
if comment:
self._millipede = comment + "\n\n"
self._millipede += " ββ ββ \n"
padding = 2
direction = -1
while (size):
for i in range(0, padding):
self._millipede += " "
self._millipede += "ββ(βββ)ββ\n"
padding += direction
if padding == 0:
direction = 1
elif padding == 4:
padding = 3
direction = -1
size -= 1
def __str__(self):
return self._millipede
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser(description='Millipede generator')
parser.add_argument('size', metavar='s', type=int, help='the size of the millipede')
parser.add_argument('comment', metavar='c', type=str, help='the comment', nargs="?")
args = parser.parse_args()
print(millipede(args.size, args.comment))
|
#!/usr/bin/env python3
class millipede:
def __init__(self, size, comment=None, reverse=False):
self._padding_offsets = [2, 1, 0, 1, 2, 3, 4, 4, 3]
head = " ββ ββ\n" if reverse else " ββ ββ\n"
body = "".join([
"{}{}\n".format(
" " * self._padding_offsets[(x + 3) % 9 if reverse else x % 9],
"ββ(βββ)ββ" if reverse else "ββ(βββ)ββ"
)
for x in range(size)
])
self._millipede = ""
if reverse:
self._millipede += body + head
if comment:
self._millipede += "\n" + comment
else:
if comment:
self._millipede += comment + "\n\n"
self._millipede += head + body
def __str__(self):
return self._millipede
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser(description='Millipede generator')
parser.add_argument('size', metavar='s', type=int, help='the size of the millipede')
parser.add_argument('comment', metavar='c', type=str, help='the comment', nargs="?")
parser.add_argument('-r', '--reverse', action='store_true', help='reverse the millipede')
args = parser.parse_args()
print(millipede(args.size, comment=args.comment, reverse=args.reverse))
|
Rewrite body generation and add reverse option
|
Rewrite body generation and add reverse option
|
Python
|
bsd-3-clause
|
evadot/millipede-python,getmillipede/millipede-python,moul/millipede-python,EasonYi/millipede-python,EasonYi/millipede-python,evadot/millipede-python,moul/millipede-python,getmillipede/millipede-python
|
---
+++
@@ -2,26 +2,28 @@
class millipede:
- def __init__(self, size, comment=None):
+
+ def __init__(self, size, comment=None, reverse=False):
+ self._padding_offsets = [2, 1, 0, 1, 2, 3, 4, 4, 3]
+
+ head = " ββ ββ\n" if reverse else " ββ ββ\n"
+ body = "".join([
+ "{}{}\n".format(
+ " " * self._padding_offsets[(x + 3) % 9 if reverse else x % 9],
+ "ββ(βββ)ββ" if reverse else "ββ(βββ)ββ"
+ )
+ for x in range(size)
+ ])
+
self._millipede = ""
- if comment:
- self._millipede = comment + "\n\n"
-
- self._millipede += " ββ ββ \n"
- padding = 2
- direction = -1
- while (size):
- for i in range(0, padding):
- self._millipede += " "
- self._millipede += "ββ(βββ)ββ\n"
- padding += direction
-
- if padding == 0:
- direction = 1
- elif padding == 4:
- padding = 3
- direction = -1
- size -= 1
+ if reverse:
+ self._millipede += body + head
+ if comment:
+ self._millipede += "\n" + comment
+ else:
+ if comment:
+ self._millipede += comment + "\n\n"
+ self._millipede += head + body
def __str__(self):
return self._millipede
@@ -32,6 +34,7 @@
parser = argparse.ArgumentParser(description='Millipede generator')
parser.add_argument('size', metavar='s', type=int, help='the size of the millipede')
parser.add_argument('comment', metavar='c', type=str, help='the comment', nargs="?")
+ parser.add_argument('-r', '--reverse', action='store_true', help='reverse the millipede')
args = parser.parse_args()
- print(millipede(args.size, args.comment))
+ print(millipede(args.size, comment=args.comment, reverse=args.reverse))
|
8233abab6084db39df064b87d256fd0caffecb89
|
simpy/test/test_simulation.py
|
simpy/test/test_simulation.py
|
from simpy import Simulation, InterruptedException
def test_simple_process():
def pem(ctx, result):
while True:
result.append(ctx.now)
yield ctx.wait(1)
result = []
Simulation(pem, result).simulate(until=4)
assert result == [0, 1, 2, 3]
def test_interrupt():
def pem(ctx):
try:
yield ctx.wait(10)
raise RuntimeError('Expected an interrupt')
except InterruptedException:
pass
def root(ctx):
process = ctx.fork(pem)
yield ctx.wait(5)
process.interrupt()
Simulation(root).simulate(until=20)
def test_wait_for_process():
def pem(ctx):
yield ctx.wait(10)
def root(ctx):
yield ctx.wait(ctx.fork(pem))
assert ctx.now == 10
Simulation(root).simulate(until=20)
def test_process_result():
def pem(ctx):
yield ctx.wait(10)
ctx.exit('oh noes, i am dead x_x')
def root(ctx):
result = yield ctx.wait(ctx.fork(pem))
assert result == 'oh noes, i am dead x_x'
Simulation(root).simulate(until=20)
|
from simpy import Simulation, InterruptedException
def test_simple_process():
def pem(ctx, result):
while True:
result.append(ctx.now)
yield ctx.wait(1)
result = []
Simulation(pem, result).simulate(until=4)
assert result == [0, 1, 2, 3]
def test_interrupt():
def root(ctx):
def pem(ctx):
try:
yield ctx.wait(10)
raise RuntimeError('Expected an interrupt')
except InterruptedException:
pass
process = ctx.fork(pem)
yield ctx.wait(5)
process.interrupt()
Simulation(root).simulate(until=20)
def test_wait_for_process():
def root(ctx):
def pem(ctx):
yield ctx.wait(10)
yield ctx.wait(ctx.fork(pem))
assert ctx.now == 10
Simulation(root).simulate(until=20)
def test_process_result():
def root(ctx):
def pem(ctx):
yield ctx.wait(10)
ctx.exit('oh noes, i am dead x_x')
result = yield ctx.wait(ctx.fork(pem))
assert result == 'oh noes, i am dead x_x'
Simulation(root).simulate(until=20)
|
Define subprocesses in the context of the root process. Maybe this is more readable?
|
Define subprocesses in the context of the root process. Maybe this is more readable?
|
Python
|
mit
|
Uzere/uSim
|
---
+++
@@ -12,14 +12,14 @@
assert result == [0, 1, 2, 3]
def test_interrupt():
- def pem(ctx):
- try:
- yield ctx.wait(10)
- raise RuntimeError('Expected an interrupt')
- except InterruptedException:
- pass
+ def root(ctx):
+ def pem(ctx):
+ try:
+ yield ctx.wait(10)
+ raise RuntimeError('Expected an interrupt')
+ except InterruptedException:
+ pass
- def root(ctx):
process = ctx.fork(pem)
yield ctx.wait(5)
process.interrupt()
@@ -27,21 +27,21 @@
Simulation(root).simulate(until=20)
def test_wait_for_process():
- def pem(ctx):
- yield ctx.wait(10)
+ def root(ctx):
+ def pem(ctx):
+ yield ctx.wait(10)
- def root(ctx):
yield ctx.wait(ctx.fork(pem))
assert ctx.now == 10
Simulation(root).simulate(until=20)
def test_process_result():
- def pem(ctx):
- yield ctx.wait(10)
- ctx.exit('oh noes, i am dead x_x')
+ def root(ctx):
+ def pem(ctx):
+ yield ctx.wait(10)
+ ctx.exit('oh noes, i am dead x_x')
- def root(ctx):
result = yield ctx.wait(ctx.fork(pem))
assert result == 'oh noes, i am dead x_x'
|
a51f5e108f6fb81fce5d99b53888f2a2954fb9a6
|
server_app/__main__.py
|
server_app/__main__.py
|
import sys
import os
import logging
import time
if not os.path.exists(os.path.expanduser("~/.chatserver")):
os.makedirs(os.path.expanduser("~/.chatserver"))
logging.basicConfig(filename=os.path.expanduser("~/.chatserver/chat-"+time.strftime("%d-%m-%Y.log"), level=logging.DEBUG))
sys.stderr.close()
sys.stdout.close()
sys.stdin.close()
from app import app, db, main, socketio
db.create_all()
app.register_blueprint(main)
port = app.config['PORT']
if len(sys.argv) == 2:
port = int(sys.argv[1])
logging.info("Chat server is now running on 0.0.0.0:%r" % port)
socketio.run(app, host="0.0.0.0", port=port)
|
import sys
import os
import logging
import time
if not os.path.exists(os.path.expanduser("~/.chatserver")):
os.makedirs(os.path.expanduser("~/.chatserver"))
logging.basicConfig(filename=os.path.expanduser("~/.chatserver/chat-"+time.strftime("%d-%m-%Y.log")), level=logging.DEBUG)
sys.stderr.close()
sys.stdout.close()
sys.stdin.close()
from app import app, db, main, socketio
db.create_all()
app.register_blueprint(main)
port = app.config['PORT']
if len(sys.argv) == 2:
port = int(sys.argv[1])
logging.info("Chat server is now running on 0.0.0.0:%r" % port)
socketio.run(app, host="0.0.0.0", port=port)
|
Make logger sort by date
|
Make logger sort by date
|
Python
|
bsd-3-clause
|
jos0003/Chat,jos0003/Chat,jos0003/Chat,jos0003/Chat,jos0003/Chat
|
---
+++
@@ -5,7 +5,7 @@
if not os.path.exists(os.path.expanduser("~/.chatserver")):
os.makedirs(os.path.expanduser("~/.chatserver"))
-logging.basicConfig(filename=os.path.expanduser("~/.chatserver/chat-"+time.strftime("%d-%m-%Y.log"), level=logging.DEBUG))
+logging.basicConfig(filename=os.path.expanduser("~/.chatserver/chat-"+time.strftime("%d-%m-%Y.log")), level=logging.DEBUG)
sys.stderr.close()
sys.stdout.close()
|
1b0edd2eeb722397e9c6c7da04ab6cbd3865a476
|
reddit_adzerk/adzerkads.py
|
reddit_adzerk/adzerkads.py
|
from urllib import quote
from pylons import c, g
from r2.lib.pages import Ads as BaseAds
from r2.models.subreddit import DefaultSR
class Ads(BaseAds):
def __init__(self):
BaseAds.__init__(self)
url_key = "adzerk_https_url" if c.secure else "adzerk_url"
site_name = getattr(c.site, "analytics_name", c.site.name)
# adzerk reporting is easier when not using a space in the tag
if isinstance(c.site, DefaultSR):
site_name = "-reddit.com"
self.ad_url = g.config[url_key].format(
subreddit=quote(site_name.lower()),
origin=c.request_origin,
)
self.frame_id = "ad_main"
|
from urllib import quote
from pylons import c, g
from r2.lib.pages import Ads as BaseAds
from r2.models.subreddit import DefaultSR
FRONTPAGE_NAME = "-reddit.com"
class Ads(BaseAds):
def __init__(self):
BaseAds.__init__(self)
url_key = "adzerk_https_url" if c.secure else "adzerk_url"
site_name = getattr(c.site, "analytics_name", c.site.name)
# adzerk reporting is easier when not using a space in the tag
if isinstance(c.site, DefaultSR):
site_name = FRONTPAGE_NAME
self.ad_url = g.config[url_key].format(
subreddit=quote(site_name.lower()),
origin=c.request_origin,
)
self.frame_id = "ad_main"
|
Move special frontpage name to variable.
|
Move special frontpage name to variable.
|
Python
|
bsd-3-clause
|
madbook/reddit-plugin-adzerk,madbook/reddit-plugin-adzerk,madbook/reddit-plugin-adzerk
|
---
+++
@@ -5,6 +5,8 @@
from r2.lib.pages import Ads as BaseAds
from r2.models.subreddit import DefaultSR
+
+FRONTPAGE_NAME = "-reddit.com"
class Ads(BaseAds):
def __init__(self):
@@ -15,7 +17,7 @@
# adzerk reporting is easier when not using a space in the tag
if isinstance(c.site, DefaultSR):
- site_name = "-reddit.com"
+ site_name = FRONTPAGE_NAME
self.ad_url = g.config[url_key].format(
subreddit=quote(site_name.lower()),
|
2cb03ff8c3d21f36b95103eaf9ae0fb3e43077bd
|
pinax_theme_bootstrap/templatetags/pinax_theme_bootstrap_tags.py
|
pinax_theme_bootstrap/templatetags/pinax_theme_bootstrap_tags.py
|
from django import template
from django.contrib.messages.utils import get_level_tags
LEVEL_TAGS = get_level_tags()
register = template.Library()
@register.simple_tag()
def get_message_tags(message):
"""
Returns the message's level_tag prefixed with Bootstrap's "alert-" prefix
along with any tags included in message.extra_tags
Messages in Django >= 1.7 have a message.level_tag attr
"""
level_tag = LEVEL_TAGS[message.level]
if level_tag == u"error":
level_tag = u"danger"
alert_level_tag = u"alert-{tag}".format(tag=level_tag)
tags = [alert_level_tag]
if message.extra_tags:
tags.append(message.extra_tags)
return u" ".join(tags)
|
from django import template
from django.contrib.messages.utils import get_level_tags
from django.utils.encoding import force_text
LEVEL_TAGS = get_level_tags()
register = template.Library()
@register.simple_tag()
def get_message_tags(message):
"""
Returns the message's level_tag prefixed with Bootstrap's "alert-" prefix
along with any tags included in message.extra_tags
Messages in Django >= 1.7 have a message.level_tag attr
"""
level_tag = force_text(LEVEL_TAGS.get(message.level, ''), strings_only=True)
if level_tag == u"error":
level_tag = u"danger"
alert_level_tag = u"alert-{tag}".format(tag=level_tag)
tags = [alert_level_tag]
extra_tags = force_text(message.extra_tags, strings_only=True)
if extra_tags:
tags.append(extra_tags)
return u" ".join(tags)
|
Allow for lazy translation of message tags
|
Allow for lazy translation of message tags
|
Python
|
mit
|
foraliving/foraliving,druss16/danslist,grahamu/pinax-theme-bootstrap,foraliving/foraliving,grahamu/pinax-theme-bootstrap,druss16/danslist,jacobwegner/pinax-theme-bootstrap,jacobwegner/pinax-theme-bootstrap,druss16/danslist,grahamu/pinax-theme-bootstrap,jacobwegner/pinax-theme-bootstrap,foraliving/foraliving
|
---
+++
@@ -1,5 +1,6 @@
from django import template
from django.contrib.messages.utils import get_level_tags
+from django.utils.encoding import force_text
LEVEL_TAGS = get_level_tags()
@@ -15,14 +16,15 @@
Messages in Django >= 1.7 have a message.level_tag attr
"""
- level_tag = LEVEL_TAGS[message.level]
+ level_tag = force_text(LEVEL_TAGS.get(message.level, ''), strings_only=True)
if level_tag == u"error":
level_tag = u"danger"
alert_level_tag = u"alert-{tag}".format(tag=level_tag)
tags = [alert_level_tag]
- if message.extra_tags:
- tags.append(message.extra_tags)
+ extra_tags = force_text(message.extra_tags, strings_only=True)
+ if extra_tags:
+ tags.append(extra_tags)
return u" ".join(tags)
|
d20f9d5e56c4f430f6ca7b4ab03a279e34bbbd45
|
importer/tasks.py
|
importer/tasks.py
|
from .models import FileImport
from .importers import ImportFailure
from django.db import transaction
import celery
assuming_failure_message = '{0} did not return True. Assuming failure.'
processing_status = 'processing'
processing_description = 'Processing the data in {filename}.'
success_status = 'success'
success_description = 'The import appears to have completed successfully.'
# The description for failures is the contents of the exception message.
failure_status = 'failure'
@celery.task
@transaction.atomic
def importer_asynchronous_task(import_pk, *args, **kwargs):
logger = importer_asynchronous_task.get_logger()
import_instance = FileImport.objects.get(pk=import_pk)
ImportType = import_instance.get_related_importer(**kwargs)
if ImportType is None:
import_instance.status = 30
return False
importer = ImportType()
import_instance.status = processing_status
import_instance.status_description = 'Currently processing file'
import_instance.save()
import_context = import_instance.get_context()
try:
if importer.process(import_context, logger) is True:
import_instance.status = success_status
import_instance.status_description = success_description
import_instance.save()
else:
raise ImportFailure(assuming_failure_message.format(
importer.__class__.__name__
))
except ImportFailure, e:
import_instance.status = failure_status
import_instance.status_description = e.message
import_instance.save()
return True
|
from .models import FileImport
from .importers import ImportFailure
from django.db import transaction
import celery
assuming_failure_message = '{0} did not return True. Assuming failure.'
processing_status = 'processing'
processing_description = 'Processing the data in {filename}.'
success_status = 'success'
success_description = 'The import appears to have completed successfully.'
# The description for failures is the contents of the exception message.
failure_status = 'failure'
@celery.shared_task
@transaction.atomic
def importer_asynchronous_task(import_pk, *args, **kwargs):
logger = importer_asynchronous_task.get_logger()
import_instance = FileImport.objects.get(pk=import_pk)
ImportType = import_instance.get_related_importer(**kwargs)
if ImportType is None:
import_instance.status = 30
return False
importer = ImportType()
import_instance.status = processing_status
import_instance.status_description = 'Currently processing file'
import_instance.save()
import_context = import_instance.get_context()
try:
if importer.process(import_context, logger) is True:
import_instance.status = success_status
import_instance.status_description = success_description
import_instance.save()
else:
raise ImportFailure(assuming_failure_message.format(
importer.__class__.__name__
))
except ImportFailure, e:
import_instance.status = failure_status
import_instance.status_description = e.message
import_instance.save()
return True
|
Use shared_task instead of task.
|
Use shared_task instead of task.
|
Python
|
mit
|
monokrome/django-drift
|
---
+++
@@ -19,7 +19,7 @@
failure_status = 'failure'
-@celery.task
+@celery.shared_task
@transaction.atomic
def importer_asynchronous_task(import_pk, *args, **kwargs):
logger = importer_asynchronous_task.get_logger()
|
3973e0d2591b2554e96da0a22b2d723a71d2423e
|
imgaug/augmenters/__init__.py
|
imgaug/augmenters/__init__.py
|
from __future__ import absolute_import
from imgaug.augmenters.arithmetic import *
from imgaug.augmenters.blur import *
from imgaug.augmenters.color import *
from imgaug.augmenters.contrast import GammaContrast, SigmoidContrast, LogContrast, LinearContrast
from imgaug.augmenters.convolutional import *
from imgaug.augmenters.flip import *
from imgaug.augmenters.geometric import *
from imgaug.augmenters.meta import *
from imgaug.augmenters.overlay import *
from imgaug.augmenters.segmentation import *
from imgaug.augmenters.size import *
|
from __future__ import absolute_import
from imgaug.augmenters.arithmetic import *
from imgaug.augmenters.blur import *
from imgaug.augmenters.color import *
from imgaug.augmenters.contrast import *
from imgaug.augmenters.convolutional import *
from imgaug.augmenters.flip import *
from imgaug.augmenters.geometric import *
from imgaug.augmenters.meta import *
from imgaug.augmenters.overlay import *
from imgaug.augmenters.segmentation import *
from imgaug.augmenters.size import *
|
Switch import from contrast to all
|
Switch import from contrast to all
Change import from contrast.py in
augmenters/__init__.py to * instead of
selective, as * should not import private
methods anyways.
|
Python
|
mit
|
aleju/ImageAugmenter,aleju/imgaug,aleju/imgaug
|
---
+++
@@ -2,7 +2,7 @@
from imgaug.augmenters.arithmetic import *
from imgaug.augmenters.blur import *
from imgaug.augmenters.color import *
-from imgaug.augmenters.contrast import GammaContrast, SigmoidContrast, LogContrast, LinearContrast
+from imgaug.augmenters.contrast import *
from imgaug.augmenters.convolutional import *
from imgaug.augmenters.flip import *
from imgaug.augmenters.geometric import *
|
ad558a5acc93e1e5206ed27b2dc679089b277890
|
me_api/app.py
|
me_api/app.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
from flask import Flask
from .middleware.me import me
from .middleware import github, keybase, medium
from .cache import cache
def create_app(config):
app = Flask(__name__)
app.config.from_object(config)
cache.init_app(app)
modules = config.modules['modules']
blueprints = {
'github': github.github_api,
'keybase': keybase.keybase_api,
'medium': medium.medium_api
}
app.register_blueprint(me)
for module in modules.keys():
app.register_blueprint(blueprints[module])
return app
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
from flask import Flask
from .middleware.me import me
from .cache import cache
def _register_module(app, module):
if module == 'github':
from .middleware import github
app.register_blueprint(github.github_api)
elif module == 'keybase':
from .middleware import keybase
app.register_blueprint(keybase.keybase_api)
elif module == 'medium':
from .middleware import medium
app.register_blueprint(medium.medium_api)
def create_app(config):
app = Flask(__name__)
app.config.from_object(config)
cache.init_app(app)
modules = config.modules['modules']
app.register_blueprint(me)
for module in modules.keys():
_register_module(app, module)
return app
|
Fix giant bug: crash when don't config all modules
|
Fix giant bug: crash when don't config all modules
that's bacause you import all the modules
> from .middleware import github, keybase, medium
while each module need to get configurations from modules.json, e.g.
> config = Config.modules['modules']['github']
but can't get anything at all, so it will crash.
that's not the correct behaviour, so we just import the desired module
and then register it. The solution isn't very elegant.
|
Python
|
mit
|
lord63/me-api
|
---
+++
@@ -7,8 +7,19 @@
from flask import Flask
from .middleware.me import me
-from .middleware import github, keybase, medium
from .cache import cache
+
+
+def _register_module(app, module):
+ if module == 'github':
+ from .middleware import github
+ app.register_blueprint(github.github_api)
+ elif module == 'keybase':
+ from .middleware import keybase
+ app.register_blueprint(keybase.keybase_api)
+ elif module == 'medium':
+ from .middleware import medium
+ app.register_blueprint(medium.medium_api)
def create_app(config):
@@ -17,14 +28,8 @@
cache.init_app(app)
modules = config.modules['modules']
- blueprints = {
- 'github': github.github_api,
- 'keybase': keybase.keybase_api,
- 'medium': medium.medium_api
- }
-
app.register_blueprint(me)
for module in modules.keys():
- app.register_blueprint(blueprints[module])
+ _register_module(app, module)
return app
|
42b330e5629b25db45e7a0f3f08bdb21e608b106
|
skimage/viewer/qt.py
|
skimage/viewer/qt.py
|
has_qt = True
try:
from matplotlib.backends.qt_compat import QtGui, QtCore, QtWidgets
except ImportError:
try:
from matplotlib.backends.qt4_compat import QtGui, QtCore
QtWidgets = QtGui
except ImportError:
# Mock objects
class QtGui(object):
QMainWindow = object
QDialog = object
QWidget = object
class QtCore_cls(object):
class Qt(object):
TopDockWidgetArea = None
BottomDockWidgetArea = None
LeftDockWidgetArea = None
RightDockWidgetArea = None
def Signal(self, *args, **kwargs):
pass
QtCore = QtWidgets = QtCore_cls()
has_qt = False
Qt = QtCore.Qt
Signal = QtCore.Signal
|
has_qt = True
try:
from matplotlib.backends.qt_compat import QtGui, QtCore, QtWidgets
except ImportError:
try:
from matplotlib.backends.qt4_compat import QtGui, QtCore
QtWidgets = QtGui
except ImportError:
# Mock objects
class QtGui(object):
QMainWindow = object
QDialog = object
QWidget = object
class QtCore_cls(object):
class Qt(object):
TopDockWidgetArea = None
BottomDockWidgetArea = None
LeftDockWidgetArea = None
RightDockWidgetArea = None
def Signal(self, *args, **kwargs):
pass
QWidget = object
QtCore = QtWidgets = QtCore_cls()
has_qt = False
Qt = QtCore.Qt
Signal = QtCore.Signal
|
Add QWidget to the mock Qt
|
Add QWidget to the mock Qt
|
Python
|
bsd-3-clause
|
ClinicalGraphics/scikit-image,bsipocz/scikit-image,rjeli/scikit-image,juliusbierk/scikit-image,vighneshbirodkar/scikit-image,pratapvardhan/scikit-image,juliusbierk/scikit-image,keflavich/scikit-image,warmspringwinds/scikit-image,Britefury/scikit-image,oew1v07/scikit-image,bennlich/scikit-image,ClinicalGraphics/scikit-image,rjeli/scikit-image,ajaybhat/scikit-image,ajaybhat/scikit-image,keflavich/scikit-image,michaelaye/scikit-image,blink1073/scikit-image,paalge/scikit-image,Midafi/scikit-image,michaelpacer/scikit-image,michaelaye/scikit-image,GaZ3ll3/scikit-image,chriscrosscutler/scikit-image,michaelpacer/scikit-image,WarrenWeckesser/scikits-image,youprofit/scikit-image,oew1v07/scikit-image,bsipocz/scikit-image,Hiyorimi/scikit-image,newville/scikit-image,youprofit/scikit-image,paalge/scikit-image,robintw/scikit-image,chriscrosscutler/scikit-image,vighneshbirodkar/scikit-image,dpshelio/scikit-image,Midafi/scikit-image,bennlich/scikit-image,warmspringwinds/scikit-image,robintw/scikit-image,Britefury/scikit-image,ofgulban/scikit-image,GaZ3ll3/scikit-image,dpshelio/scikit-image,emon10005/scikit-image,WarrenWeckesser/scikits-image,blink1073/scikit-image,pratapvardhan/scikit-image,ofgulban/scikit-image,emon10005/scikit-image,rjeli/scikit-image,ofgulban/scikit-image,vighneshbirodkar/scikit-image,Hiyorimi/scikit-image,jwiggins/scikit-image,paalge/scikit-image,jwiggins/scikit-image,newville/scikit-image
|
---
+++
@@ -23,6 +23,8 @@
def Signal(self, *args, **kwargs):
pass
+ QWidget = object
+
QtCore = QtWidgets = QtCore_cls()
has_qt = False
|
bd3d97cefe61886ab8c2fa24eecd624ca1c6f751
|
profile_collection/startup/90-settings.py
|
profile_collection/startup/90-settings.py
|
import logging
# metadata set at startup
RE.md['owner'] = 'xf11id'
RE.md['beamline_id'] = 'CHX'
# removing 'custom' as it is raising an exception in 0.3.2
# gs.RE.md['custom'] = {}
def print_scanid(name, doc):
if name == 'start':
print('Scan ID:', doc['scan_id'])
print('Unique ID:', doc['uid'])
def print_md(name, doc):
if name == 'start':
print('Metadata:\n', repr(doc))
RE.subscribe(print_scanid)
#from eiger_io.fs_handler import LazyEigerHandler
#db.fs.register_handler("AD_EIGER", LazyEigerHandler)
|
import logging
# metadata set at startup
RE.md['owner'] = 'xf11id'
RE.md['beamline_id'] = 'CHX'
# removing 'custom' as it is raising an exception in 0.3.2
# gs.RE.md['custom'] = {}
def print_md(name, doc):
if name == 'start':
print('Metadata:\n', repr(doc))
RE.subscribe(print_scanid)
#from eiger_io.fs_handler import LazyEigerHandler
#db.fs.register_handler("AD_EIGER", LazyEigerHandler)
|
Remove redundant Scan ID printing (there is another one elsewhere)
|
Remove redundant Scan ID printing (there is another one elsewhere)
|
Python
|
bsd-2-clause
|
NSLS-II-CHX/ipython_ophyd,NSLS-II-CHX/ipython_ophyd
|
---
+++
@@ -5,13 +5,6 @@
RE.md['beamline_id'] = 'CHX'
# removing 'custom' as it is raising an exception in 0.3.2
# gs.RE.md['custom'] = {}
-
-
-
-def print_scanid(name, doc):
- if name == 'start':
- print('Scan ID:', doc['scan_id'])
- print('Unique ID:', doc['uid'])
def print_md(name, doc):
if name == 'start':
|
ad60b0cd3326c0729237afbb094d22f4415fb422
|
laboratory/experiment.py
|
laboratory/experiment.py
|
import traceback
from laboratory.observation import Observation, Test
from laboratory import exceptions
class Experiment(object):
def __init__(self, name='Experiment', raise_on_mismatch=False):
self.name = name
self.raise_on_mismatch = raise_on_mismatch
self._control = None
self.observations = []
def control(self):
self._control = Observation('Control')
return Test('Control', True, self._control)
def candidate(self, name='Candidate'):
observation = Observation(name)
self.observations.append(observation)
return Test(name, False, observation)
def run(self):
control = self._control
if control is None:
raise exceptions.LaboratoryException(
'Your experiment must record a control case'
)
match = self.compare(control, *self.observations)
return control.value
def compare(self, control, *candidates):
for observation in candidates:
if observation.failure or control.value != observation.value:
return self._comparison_mismatch(control, observation)
return True
def _comparison_mismatch(self, control, observation):
if self.raise_on_mismatch:
if observation.failure:
msg = '%s raised an exception:\n%s' % (
observation.name, traceback.format_exc(observation.exception)
)
else:
msg = '%s does not match control value (%s != %s)' % (
observation.name, control.value, observation.value
)
raise exceptions.MismatchException(msg)
return False
def publish(self):
raise NotImplementedError
|
import traceback
from laboratory.observation import Observation, Test
from laboratory import exceptions
class Experiment(object):
def __init__(self, name='Experiment', raise_on_mismatch=False):
self.name = name
self.raise_on_mismatch = raise_on_mismatch
self._control = None
self.observations = []
def control(self):
self._control = Observation('Control')
return Test('Control', True, self._control)
def candidate(self, name='Candidate'):
observation = Observation(name)
self.observations.append(observation)
return Test(name, False, observation)
def run(self):
control = self._control
if control is None:
raise exceptions.LaboratoryException(
'Your experiment must record a control case'
)
match = self.compare(control, *self.observations)
self.publish(match)
return control.value
def compare(self, control, *candidates):
for observation in candidates:
if observation.failure or control.value != observation.value:
return self._comparison_mismatch(control, observation)
return True
def _comparison_mismatch(self, control, observation):
if self.raise_on_mismatch:
if observation.failure:
msg = '%s raised an exception:\n%s' % (
observation.name, traceback.format_exc(observation.exception)
)
else:
msg = '%s does not match control value (%s != %s)' % (
observation.name, control.value, observation.value
)
raise exceptions.MismatchException(msg)
return False
def publish(self, match):
return
|
Call Experiment.publish in run method
|
Call Experiment.publish in run method
|
Python
|
mit
|
joealcorn/laboratory,shaunvxc/laboratory
|
---
+++
@@ -30,6 +30,7 @@
)
match = self.compare(control, *self.observations)
+ self.publish(match)
return control.value
def compare(self, control, *candidates):
@@ -53,5 +54,5 @@
return False
- def publish(self):
- raise NotImplementedError
+ def publish(self, match):
+ return
|
9560ccf476a887c20b2373eca52f38f186b6ed58
|
conanfile.py
|
conanfile.py
|
from conans import ConanFile, CMake
class NostalgiaConan(ConanFile):
settings = "os", "compiler", "build_type", "arch"
requires = "jsoncpp/1.9.2", "sdl2/2.0.10@bincrafters/stable", "qt/5.14.1@bincrafters/stable", "sqlite3/3.31.0", "libiconv/1.16"
generators = "cmake", "cmake_find_package", "cmake_paths"
#default_options = {
# "sdl2:nas": False
#}
def requirements(self):
pass
|
from conans import ConanFile, CMake
class NostalgiaConan(ConanFile):
settings = "os", "compiler", "build_type", "arch"
requires = "jsoncpp/1.9.2", "sdl2/2.0.10@bincrafters/stable"
generators = "cmake", "cmake_find_package", "cmake_paths"
#default_options = {
# "sdl2:nas": False
#}
|
Remove conan Qt, as it is currently being ignored
|
[nostalgia] Remove conan Qt, as it is currently being ignored
|
Python
|
mpl-2.0
|
wombatant/nostalgia,wombatant/nostalgia,wombatant/nostalgia
|
---
+++
@@ -2,11 +2,8 @@
class NostalgiaConan(ConanFile):
settings = "os", "compiler", "build_type", "arch"
- requires = "jsoncpp/1.9.2", "sdl2/2.0.10@bincrafters/stable", "qt/5.14.1@bincrafters/stable", "sqlite3/3.31.0", "libiconv/1.16"
+ requires = "jsoncpp/1.9.2", "sdl2/2.0.10@bincrafters/stable"
generators = "cmake", "cmake_find_package", "cmake_paths"
#default_options = {
# "sdl2:nas": False
#}
-
- def requirements(self):
- pass
|
a284a69432b2e0052fd2da4121cf4512fc9423da
|
lemon/dashboard/admin.py
|
lemon/dashboard/admin.py
|
from django.conf import settings
from lemon import extradmin as admin
from lemon.dashboard import views
from lemon.dashboard.base import dashboard, Widget
class DashboardAdmin(admin.AppAdmin):
instance = dashboard
@property
def urls(self):
return self.instance.get_urls(self), 'dashboard', 'dashboard'
admin.site.register_app('dashboard', DashboardAdmin)
|
from django.conf import settings
from lemon import extradmin as admin
from lemon.dashboard import views
from lemon.dashboard.base import dashboard, Widget
class DashboardAdmin(admin.AppAdmin):
dashboard = dashboard
@property
def urls(self):
return self.dashboard.get_urls(self), 'dashboard', 'dashboard'
admin.site.register_app('dashboard', DashboardAdmin)
|
Rename instance to dashboard in DashboardAdmin
|
Rename instance to dashboard in DashboardAdmin
|
Python
|
bsd-3-clause
|
trilan/lemon,trilan/lemon,trilan/lemon
|
---
+++
@@ -7,11 +7,11 @@
class DashboardAdmin(admin.AppAdmin):
- instance = dashboard
+ dashboard = dashboard
@property
def urls(self):
- return self.instance.get_urls(self), 'dashboard', 'dashboard'
+ return self.dashboard.get_urls(self), 'dashboard', 'dashboard'
admin.site.register_app('dashboard', DashboardAdmin)
|
8541ec09e237f1401095d31177bdde9ac1adaa39
|
util/linkJS.py
|
util/linkJS.py
|
#!/usr/bin/env python
import os
def linkJS(target_fn, file_list_fn, source_base, prologue="", module_dirs=[]):
with open(target_fn, "wb") as target:
target.write(prologue)
# Add files listed in file_list_fn
with open(file_list_fn) as file_list:
for source_fn in file_list:
source_fn = source_fn.replace("/", os.path.sep).strip()
if len(source_fn) > 0 and source_fn[0] != "#":
addContents(source_base, source_fn, target)
# Add all *.js files in module_dirs
for module_base in module_dirs:
for module_fn in os.listdir(module_base):
if module_fn.endswith(".js"):
addContents(module_base, module_fn, target)
def addContents(source_base, source_fn, target):
target.write("\n\n// " + source_fn + "\n\n")
with open(os.path.join(source_base, source_fn)) as source:
for line in source:
target.write(line)
|
#!/usr/bin/env python
import os
def linkJS(target_fn, file_list_fn, source_base, prologue="", module_dirs=[]):
with open(target_fn, "wb") as target:
target.write(prologue)
# Add files listed in file_list_fn
with open(file_list_fn) as file_list:
for source_fn in file_list:
source_fn = source_fn.replace("/", os.path.sep).strip()
if len(source_fn) > 0 and source_fn[0] != "#":
addContents(os.path.join(source_base, source_fn), target)
# Add all *.js files in module_dirs
for module_base in module_dirs:
for module_fn in os.listdir(module_base):
if module_fn.endswith(".js"):
addContents(os.path.join(module_base, module_fn), target)
def addContents(source_fn, target):
target.write("\n\n// " + source_fn + "\n\n")
with open(source_fn) as source:
for line in source:
target.write(line)
|
Include full path to original files
|
Include full path to original files
|
Python
|
mpl-2.0
|
MozillaSecurity/funfuzz,nth10sd/funfuzz,MozillaSecurity/funfuzz,nth10sd/funfuzz,MozillaSecurity/funfuzz,nth10sd/funfuzz
|
---
+++
@@ -12,17 +12,17 @@
for source_fn in file_list:
source_fn = source_fn.replace("/", os.path.sep).strip()
if len(source_fn) > 0 and source_fn[0] != "#":
- addContents(source_base, source_fn, target)
+ addContents(os.path.join(source_base, source_fn), target)
# Add all *.js files in module_dirs
for module_base in module_dirs:
for module_fn in os.listdir(module_base):
if module_fn.endswith(".js"):
- addContents(module_base, module_fn, target)
+ addContents(os.path.join(module_base, module_fn), target)
-def addContents(source_base, source_fn, target):
+def addContents(source_fn, target):
target.write("\n\n// " + source_fn + "\n\n")
- with open(os.path.join(source_base, source_fn)) as source:
+ with open(source_fn) as source:
for line in source:
target.write(line)
|
72dd10849190fb191fdab4962996ea537322e103
|
tests/TestPluginManager.py
|
tests/TestPluginManager.py
|
import socket
import time
from unittest import TestCase
from PluginManager import PluginManager
class TestPluginManager(TestCase):
def test_stop(self):
class Plugin:
def __init__(self):
sock = socket.socket()
sock.bind(('', 0)) # bind to any available port
self._port = sock.getsockname()[1]
sock.close()
def get_port(self):
return self._port
plugin_manager = PluginManager(Plugin(), lambda: None)
plugin_manager.start()
time.sleep(1)
plugin_manager.stop()
plugin_manager.join()
self.assertFalse(plugin_manager.is_alive())
|
import socket
import time
from unittest import TestCase
from PluginManager import PluginManager
class TestPluginManager(TestCase):
def test_stop(self):
class Plugin:
def __init__(self):
sock = socket.socket()
sock.bind(('', 0)) # bind to any available port
self._port = sock.getsockname()[1]
sock.close()
def get_port(self):
return self._port
plugin_manager = PluginManager(Plugin(), lambda: None)
plugin_manager.start()
time.sleep(0.01)
plugin_manager.stop()
plugin_manager.join()
self.assertFalse(plugin_manager.is_alive())
|
Reduce sleep duration in PluginManager.stop() test
|
Reduce sleep duration in PluginManager.stop() test
|
Python
|
mit
|
ckaz18/honeypot,laurenmalone/honeypot,theplue/honeypot,ckaz18/honeypot,laurenmalone/honeypot,theplue/honeypot,coyle5280/honeypot,coyle5280/honeypot,ckaz18/honeypot,theplue/honeypot,theplue/honeypot,coyle5280/honeypot,ckaz18/honeypot,laurenmalone/honeypot,laurenmalone/honeypot,coyle5280/honeypot
|
---
+++
@@ -15,7 +15,7 @@
return self._port
plugin_manager = PluginManager(Plugin(), lambda: None)
plugin_manager.start()
- time.sleep(1)
+ time.sleep(0.01)
plugin_manager.stop()
plugin_manager.join()
self.assertFalse(plugin_manager.is_alive())
|
2117778d777120293e506eca9743f97619b5ad5c
|
kiwi/interface.py
|
kiwi/interface.py
|
class Menu(object):
def __init__(self, dialog, items, title, caller = None):
self.d = dialog
self.caller = caller
self.entries = []
self.dispatch_table = {}
tag = 1
self.title = title
for entry, func in items:
self.entries.append(tuple([str(tag), entry]))
self.dispatch_table[str(tag)] = func
tag += 1
def run(self, ret=None):
code, tag = self.d.menu(self.title, choices=self.entries)
if code == self.d.OK: self.dispatch(tag)
if ret: ret()
def dispatch(self, tag):
if tag in self.dispatch_table:
func = self.dispatch_table[tag]
if isinstance(func, Menu):
func.run(ret=self.run)
else: func()
|
class MenuItem(object):
def __init__(self, func=None):
if func: self.function = func
# Wrapper for child.function() that creates a call stack
def run(self, ret=None):
self.function()
if ret: ret()
class Menu(MenuItem):
def __init__(self, dialog, items, title):
self.d = dialog
self.entries = []
self.dispatch_table = {}
tag = 1
self.title = title
for entry, func in items:
self.entries.append(tuple([str(tag), entry]))
self.dispatch_table[str(tag)] = func
tag += 1
def function(self):
code, tag = self.d.menu(self.title, choices=self.entries)
if code == self.d.OK: self._dispatch(tag)
def _dispatch(self, tag):
if tag in self.dispatch_table:
func = self.dispatch_table[tag]
if isinstance(func, MenuItem):
func.run(ret=self.run)
else: func()
|
Create object MenuItem that wraps functions to create a call stack
|
Create object MenuItem that wraps functions to create a call stack
|
Python
|
mit
|
jakogut/KiWI
|
---
+++
@@ -1,7 +1,15 @@
-class Menu(object):
- def __init__(self, dialog, items, title, caller = None):
+class MenuItem(object):
+ def __init__(self, func=None):
+ if func: self.function = func
+
+ # Wrapper for child.function() that creates a call stack
+ def run(self, ret=None):
+ self.function()
+ if ret: ret()
+
+class Menu(MenuItem):
+ def __init__(self, dialog, items, title):
self.d = dialog
- self.caller = caller
self.entries = []
self.dispatch_table = {}
@@ -14,15 +22,14 @@
self.dispatch_table[str(tag)] = func
tag += 1
- def run(self, ret=None):
+ def function(self):
code, tag = self.d.menu(self.title, choices=self.entries)
- if code == self.d.OK: self.dispatch(tag)
- if ret: ret()
+ if code == self.d.OK: self._dispatch(tag)
- def dispatch(self, tag):
+ def _dispatch(self, tag):
if tag in self.dispatch_table:
func = self.dispatch_table[tag]
- if isinstance(func, Menu):
+ if isinstance(func, MenuItem):
func.run(ret=self.run)
else: func()
|
ccf285c30a0110f2ff59b91ec0166f9b5306239d
|
dukpy/evaljs.py
|
dukpy/evaljs.py
|
import json
from . import _dukpy
try:
from collections.abc import Iterable
except ImportError:
from collections import Iterable
try: # pragma: no cover
unicode
string_types = (str, unicode)
except NameError: # pragma: no cover
string_types = (bytes, str)
class JSInterpreter(object):
"""JavaScript Interpreter"""
def __init__(self):
self._ctx = _dukpy.create_context()
def evaljs(self, code, **kwargs):
"""Runs JavaScript code in the context of the interpreter.
All arguments will be converted to plain javascript objects
through the JSON encoder and will be available in `dukpy`
global object.
Returns the last object on javascript stack.
"""
jsvars = json.dumps(kwargs)
jscode = code
if not isinstance(code, string_types):
jscode = ';\n'.join(code)
if not isinstance(jscode, bytes):
jscode = jscode.encode('utf-8')
res = _dukpy.eval_string(self._ctx, jscode, jsvars)
if res is None:
return None
return json.loads(res.decode('utf-8'))
def evaljs(code, **kwargs):
"""Evaluates the given ``code`` as JavaScript and returns the result"""
return JSInterpreter().evaljs(code, **kwargs)
|
import json
from . import _dukpy
try:
from collections.abc import Iterable
except ImportError:
from collections import Iterable
try: # pragma: no cover
unicode
string_types = (str, unicode)
jscode_type = str
except NameError: # pragma: no cover
string_types = (bytes, str)
jscode_type = str
class JSInterpreter(object):
"""JavaScript Interpreter"""
def __init__(self):
self._ctx = _dukpy.create_context()
def evaljs(self, code, **kwargs):
"""Runs JavaScript code in the context of the interpreter.
All arguments will be converted to plain javascript objects
through the JSON encoder and will be available in `dukpy`
global object.
Returns the last object on javascript stack.
"""
jsvars = json.dumps(kwargs)
jscode = code
if not isinstance(code, string_types):
jscode = ';\n'.join(code)
if not isinstance(jscode, str):
# Source code must be str on both Py2 and Py3
# so it must be encoded on Py2 and decoded on Py3
if isinstance(jscode, bytes):
jscode = jscode.decode('utf-8')
else:
jscode = jscode.encode('utf-8')
res = _dukpy.eval_string(self._ctx, jscode, jsvars)
if res is None:
return None
return json.loads(res.decode('utf-8'))
def evaljs(code, **kwargs):
"""Evaluates the given ``code`` as JavaScript and returns the result"""
return JSInterpreter().evaljs(code, **kwargs)
|
Fix unicode source code on py3
|
Fix unicode source code on py3
|
Python
|
mit
|
amol-/dukpy,amol-/dukpy,amol-/dukpy
|
---
+++
@@ -9,8 +9,10 @@
try: # pragma: no cover
unicode
string_types = (str, unicode)
+ jscode_type = str
except NameError: # pragma: no cover
string_types = (bytes, str)
+ jscode_type = str
class JSInterpreter(object):
@@ -33,8 +35,13 @@
if not isinstance(code, string_types):
jscode = ';\n'.join(code)
- if not isinstance(jscode, bytes):
- jscode = jscode.encode('utf-8')
+ if not isinstance(jscode, str):
+ # Source code must be str on both Py2 and Py3
+ # so it must be encoded on Py2 and decoded on Py3
+ if isinstance(jscode, bytes):
+ jscode = jscode.decode('utf-8')
+ else:
+ jscode = jscode.encode('utf-8')
res = _dukpy.eval_string(self._ctx, jscode, jsvars)
if res is None:
|
fdcdb5416bccf85a1745ccd07915e15629128ff9
|
es_config.py
|
es_config.py
|
# A list of ES hosts
ES_HOSTS = ['https://c3d581bfab179c1101d5b7a9e22a5f95.us-east-1.aws.found.io:9243']
ES_HTTP_AUTH = ("elastic:u3Mk8jjADYJ4NzUmPTn15MNx")
|
import os
import ast
# A list of ES hosts
# Uncomment the following for debugging
# ES_HOSTS = ['https://c3d581bfab179c1101d5b7a9e22a5f95.us-east-1.aws.found.io:9243']
# ES_HTTP_AUTH = ("elastic:u3Mk8jjADYJ4NzUmPTn15MNx")
# Comment the following for debugging,
# or set corresponding environment variables
try:
ES_HOSTS = ast.literal_eval(os.environ['ES_HOSTS'])
ES_HTTP_AUTH = ast.literal_eval(os.environ['ES_HTTP_AUTH'])
except Exception as err:
print(err)
print("Please set ES_HOSTS and ES_HTTP_AUTH correctly.")
|
Update ES server config to make use of environment variables
|
Update ES server config to make use of environment variables
|
Python
|
mit
|
justinchuby/cmu-courseapi-flask
|
---
+++
@@ -1,4 +1,17 @@
+import os
+import ast
+
# A list of ES hosts
-ES_HOSTS = ['https://c3d581bfab179c1101d5b7a9e22a5f95.us-east-1.aws.found.io:9243']
-ES_HTTP_AUTH = ("elastic:u3Mk8jjADYJ4NzUmPTn15MNx")
+# Uncomment the following for debugging
+# ES_HOSTS = ['https://c3d581bfab179c1101d5b7a9e22a5f95.us-east-1.aws.found.io:9243']
+# ES_HTTP_AUTH = ("elastic:u3Mk8jjADYJ4NzUmPTn15MNx")
+
+# Comment the following for debugging,
+# or set corresponding environment variables
+try:
+ ES_HOSTS = ast.literal_eval(os.environ['ES_HOSTS'])
+ ES_HTTP_AUTH = ast.literal_eval(os.environ['ES_HTTP_AUTH'])
+except Exception as err:
+ print(err)
+ print("Please set ES_HOSTS and ES_HTTP_AUTH correctly.")
|
08fe9e7beb4285feec9205012a62d464b3489bcf
|
natasha/grammars/person/interpretation.py
|
natasha/grammars/person/interpretation.py
|
from enum import Enum
from collections import Counter
from yargy.interpretation import InterpretationObject
class PersonObject(InterpretationObject):
class Attributes(Enum):
Firstname = 0 # Π²Π»Π°Π΄ΠΈΠΌΠΈΡ
Middlename = 1 # Π²Π»Π°Π΄ΠΈΠΌΠΈΡΠΎΠ²ΠΈΡ
Lastname = 2 # ΠΏΡΡΠΈΠ½
Descriptor = 3 # ΠΏΡΠ΅Π·ΠΈΠ΄Π΅Π½Ρ
DescriptorDestination = 4 # ΡΠΎΡΡΠΈΠΉΡΠΊΠΎΠΉ ΡΠ΅Π΄Π΅ΡΠ°ΡΠΈΠΈ
@property
def gender(self):
'''
Very simple gender prediction algorithm
'''
counter = Counter()
for field, token in self.__dict__.items():
if not token:
continue
for form in token.forms:
grammemes = set()
if ('Ms-f' in form['grammemes']) or ('Fixd' in form['grammemes']):
continue
elif 'femn' in form['grammemes']:
grammemes |= {'femn'}
elif 'masc' in form['grammemes']:
grammemes |= {'masc'}
counter.update(grammemes)
return counter
|
# coding: utf-8
from __future__ import unicode_literals
from enum import Enum
from collections import Counter
from yargy.interpretation import InterpretationObject
class PersonObject(InterpretationObject):
class Attributes(Enum):
Firstname = 0 # Π²Π»Π°Π΄ΠΈΠΌΠΈΡ
Middlename = 1 # Π²Π»Π°Π΄ΠΈΠΌΠΈΡΠΎΠ²ΠΈΡ
Lastname = 2 # ΠΏΡΡΠΈΠ½
Descriptor = 3 # ΠΏΡΠ΅Π·ΠΈΠ΄Π΅Π½Ρ
DescriptorDestination = 4 # ΡΠΎΡΡΠΈΠΉΡΠΊΠΎΠΉ ΡΠ΅Π΄Π΅ΡΠ°ΡΠΈΠΈ
@property
def gender(self):
'''
Very simple gender prediction algorithm
'''
counter = Counter()
for field, token in self.__dict__.items():
if not token:
continue
for form in token.forms:
grammemes = set()
if ('Ms-f' in form['grammemes']) or ('Fixd' in form['grammemes']):
continue
elif 'femn' in form['grammemes']:
grammemes |= {'femn'}
elif 'masc' in form['grammemes']:
grammemes |= {'masc'}
counter.update(grammemes)
return counter
|
Fix encoding for python 2.x
|
Fix encoding for python 2.x
|
Python
|
mit
|
natasha/natasha
|
---
+++
@@ -1,3 +1,6 @@
+# coding: utf-8
+from __future__ import unicode_literals
+
from enum import Enum
from collections import Counter
from yargy.interpretation import InterpretationObject
|
294c251d83bec3738ce54a67d718c2ba959a7b4b
|
git.py
|
git.py
|
import os
import subprocess
class cd:
"""Context manager for changing the current working directory."""
def __init__(self, new_path):
self.new_path = os.path.expanduser(new_path)
def __enter__(self):
self.previous_path = os.getcwd()
os.chdir(self.new_path)
def __exit__(self, etype, value, traceback):
os.chdir(self.previous_path)
# TODO use same context for all methods
class git:
"""Minimal git wrapper, providing only funtions to init add and commit."""
def __init__(self, path):
assert os.path.isabs(path)
self.path = path
def init(self):
with cd(self.path):
subprocess.call('git init')
def add(self, filenames):
for filename in filenames:
assert os.path.isfile(filename) # needs entire path
subprocess.call('git add ' + filename)
def commit(self, date, message=''):
subprocess.call(
"git commit -m '{m}' --date {d}".format(m=message, d=date)
)
|
import os
import subprocess
from contextlib import ContextDecorator
class cd(ContextDecorator):
"""Context manager/decorator for changing the current working directory."""
def __init__(self, new_path):
self.new_path = os.path.expanduser(new_path)
def __enter__(self):
self.previous_path = os.getcwd()
os.chdir(self.new_path)
return self
def __exit__(self, *exc):
os.chdir(self.previous_path)
return False
# TODO use same context for all methods
class git:
"""Minimal git wrapper, providing only funtions to init add and commit."""
path = ''
def __init__(self, path):
assert os.path.isabs(path)
self.path = path
@cd(path)
def init(self):
print('current dir: {}'.format(os.getcwd()))
subprocess.call('git init')
def add(self, filenames):
for filename in filenames:
assert os.path.isfile(filename) # needs entire path
subprocess.call('git add ' + filename)
def commit(self, date, message=''):
subprocess.call(
"git commit -m '{m}' --date {d}".format(m=message, d=date)
)
|
Extend cd context manager to decorator.
|
Extend cd context manager to decorator.
|
Python
|
mit
|
0xfoo/punchcard
|
---
+++
@@ -1,31 +1,36 @@
import os
import subprocess
+from contextlib import ContextDecorator
-class cd:
- """Context manager for changing the current working directory."""
+class cd(ContextDecorator):
+ """Context manager/decorator for changing the current working directory."""
def __init__(self, new_path):
self.new_path = os.path.expanduser(new_path)
def __enter__(self):
self.previous_path = os.getcwd()
os.chdir(self.new_path)
+ return self
- def __exit__(self, etype, value, traceback):
+ def __exit__(self, *exc):
os.chdir(self.previous_path)
+ return False
# TODO use same context for all methods
class git:
"""Minimal git wrapper, providing only funtions to init add and commit."""
+ path = ''
def __init__(self, path):
assert os.path.isabs(path)
self.path = path
+ @cd(path)
def init(self):
- with cd(self.path):
- subprocess.call('git init')
+ print('current dir: {}'.format(os.getcwd()))
+ subprocess.call('git init')
def add(self, filenames):
|
2e6efd4ecf22a1e7c673f90f113bfae47b98d294
|
medical_insurance_us/models/__init__.py
|
medical_insurance_us/models/__init__.py
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Author: Dave Lasley <dave@laslabs.com>
# Copyright: 2015 LasLabs, Inc [https://laslabs.com]
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from . import medical_insurance_plan
from . import medical_insurance_company
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Author: Dave Lasley <dave@laslabs.com>
# Copyright: 2015 LasLabs, Inc [https://laslabs.com]
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from . import medical_insurance_plan
from . import medical_insurance_template
|
Add template and remove company from insurance_us imports
|
Add template and remove company from insurance_us imports
|
Python
|
agpl-3.0
|
ShaheenHossain/eagle-medical,laslabs/vertical-medical,ShaheenHossain/eagle-medical,laslabs/vertical-medical
|
---
+++
@@ -20,4 +20,4 @@
##############################################################################
from . import medical_insurance_plan
-from . import medical_insurance_company
+from . import medical_insurance_template
|
3d3a81efc36e39888929e62287b9d895922d8615
|
tests/sentry/filters/test_web_crawlers.py
|
tests/sentry/filters/test_web_crawlers.py
|
from __future__ import absolute_import
from sentry.filters.web_crawlers import WebCrawlersFilter
from sentry.testutils import TestCase
class WebCrawlersFilterTest(TestCase):
filter_cls = WebCrawlersFilter
def apply_filter(self, data):
return self.filter_cls(self.project).test(data)
def get_mock_data(self, user_agent):
return {
'sentry.interfaces.Http': {
'url': 'http://example.com',
'method': 'GET',
'headers': [
['User-Agent', user_agent],
]
}
}
def test_filters_googlebot(self):
data = self.get_mock_data('Googlebot')
assert self.apply_filter(data)
def test_does_not_filter_chrome(self):
data = self.get_mock_data('Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2228.0 Safari/537.36')
assert not self.apply_filter(data)
|
from __future__ import absolute_import
from sentry.filters.web_crawlers import WebCrawlersFilter
from sentry.testutils import TestCase
class WebCrawlersFilterTest(TestCase):
filter_cls = WebCrawlersFilter
def apply_filter(self, data):
return self.filter_cls(self.project).test(data)
def get_mock_data(self, user_agent):
return {
'sentry.interfaces.Http': {
'url': 'http://example.com',
'method': 'GET',
'headers': [
['User-Agent', user_agent],
]
}
}
def test_filters_googlebot(self):
data = self.get_mock_data('Googlebot')
assert self.apply_filter(data)
def test_does_not_filter_chrome(self):
data = self.get_mock_data('Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2228.0 Safari/537.36')
assert not self.apply_filter(data)
def test_filters_twitterbot(self):
data = self.get_mock_data('Twitterbot/1.0')
assert self.apply_filter(data)
def test_filters_slack(self):
data = self.get_mock_data('Slackbot-LinkExpanding 1.0 (+https://api.slack.com/robots)')
assert self.apply_filter(data)
data = self.get_mock_data('Slack-ImgProxy 0.19 (+https://api.slack.com/robots)')
assert self.apply_filter(data)
data = self.get_mock_data('Slackbot 1.0(+https://api.slack.com/robots)')
assert self.apply_filter(data)
|
Add unit tests for filtering Twitterbot and Slack.
|
Add unit tests for filtering Twitterbot and Slack.
|
Python
|
bsd-3-clause
|
ifduyue/sentry,mvaled/sentry,gencer/sentry,gencer/sentry,mvaled/sentry,JackDanger/sentry,beeftornado/sentry,mvaled/sentry,mvaled/sentry,jean/sentry,looker/sentry,looker/sentry,jean/sentry,looker/sentry,mvaled/sentry,looker/sentry,ifduyue/sentry,ifduyue/sentry,mvaled/sentry,JackDanger/sentry,gencer/sentry,gencer/sentry,jean/sentry,jean/sentry,JackDanger/sentry,ifduyue/sentry,beeftornado/sentry,gencer/sentry,beeftornado/sentry,looker/sentry,ifduyue/sentry,jean/sentry
|
---
+++
@@ -28,3 +28,17 @@
def test_does_not_filter_chrome(self):
data = self.get_mock_data('Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2228.0 Safari/537.36')
assert not self.apply_filter(data)
+
+ def test_filters_twitterbot(self):
+ data = self.get_mock_data('Twitterbot/1.0')
+ assert self.apply_filter(data)
+
+ def test_filters_slack(self):
+ data = self.get_mock_data('Slackbot-LinkExpanding 1.0 (+https://api.slack.com/robots)')
+ assert self.apply_filter(data)
+
+ data = self.get_mock_data('Slack-ImgProxy 0.19 (+https://api.slack.com/robots)')
+ assert self.apply_filter(data)
+
+ data = self.get_mock_data('Slackbot 1.0(+https://api.slack.com/robots)')
+ assert self.apply_filter(data)
|
6bd088acd0ec0cfa5298051e286ce76e42430067
|
shuup/front/themes/views/_product_preview.py
|
shuup/front/themes/views/_product_preview.py
|
# -*- coding: utf-8 -*-
# This file is part of Shuup.
#
# Copyright (c) 2012-2016, Shoop Ltd. All rights reserved.
#
# This source code is licensed under the AGPLv3 license found in the
# LICENSE file in the root directory of this source tree.
from shuup.front.views.product import ProductDetailView
class ProductPreviewView(ProductDetailView):
template_name = "shuup/front/product/product_preview.jinja"
def get_context_data(self, **kwargs):
# By default the template rendering the basket add form
# uses the `request.path` as its' `next` value.
# This is fine if you are on product page but here in
# preview, we cannot redirect back to `/xtheme/product_preview`.
context = super(ProductPreviewView, self).get_context_data(**kwargs)
# Add `return_url` to context to avoid usage of `request.path` in
# `classic_gray/shuup/front/product/_detail_order_section.jinja`
context["return_url"] = "/xtheme/products"
return context
def product_preview(request):
return ProductPreviewView.as_view()(request, pk=request.GET["id"])
|
# -*- coding: utf-8 -*-
# This file is part of Shuup.
#
# Copyright (c) 2012-2016, Shoop Ltd. All rights reserved.
#
# This source code is licensed under the AGPLv3 license found in the
# LICENSE file in the root directory of this source tree.
from shuup.front.views.product import ProductDetailView
class ProductPreviewView(ProductDetailView):
template_name = "shuup/front/product/product_preview.jinja"
def get_context_data(self, **kwargs):
# By default the template rendering the basket add form
# uses the `request.path` as its' `next` value.
# This is fine if you are on product page but here in
# preview, we cannot redirect back to `/xtheme/product_preview`.
context = super(ProductPreviewView, self).get_context_data(**kwargs)
# Add `return_url` to context to avoid usage of `request.path`
context["return_url"] = "/xtheme/products"
return context
def product_preview(request):
return ProductPreviewView.as_view()(request, pk=request.GET["id"])
|
Remove reference to nonexistent file
|
Front: Remove reference to nonexistent file
|
Python
|
agpl-3.0
|
shoopio/shoop,shoopio/shoop,shawnadelic/shuup,suutari-ai/shoop,shawnadelic/shuup,hrayr-artunyan/shuup,suutari/shoop,suutari/shoop,shoopio/shoop,suutari-ai/shoop,hrayr-artunyan/shuup,shawnadelic/shuup,suutari/shoop,suutari-ai/shoop,hrayr-artunyan/shuup
|
---
+++
@@ -18,8 +18,7 @@
# preview, we cannot redirect back to `/xtheme/product_preview`.
context = super(ProductPreviewView, self).get_context_data(**kwargs)
- # Add `return_url` to context to avoid usage of `request.path` in
- # `classic_gray/shuup/front/product/_detail_order_section.jinja`
+ # Add `return_url` to context to avoid usage of `request.path`
context["return_url"] = "/xtheme/products"
return context
|
5fdedac2eae25d88d2595f0fe79ca3a332f24dfe
|
pysuru/api.py
|
pysuru/api.py
|
# coding: utf-8
"""
Public endpoint to import API classes
Instead of importing each module individually (eg.
``from pysuru.apps import AppsAPI``), import from this module.
"""
from __future__ import absolute_imports
from .apps import AppsAPI
from .services import ServicesAPI
|
# coding: utf-8
"""
Public endpoint to import API classes
Instead of importing each module individually (eg.
``from pysuru.apps import AppsAPI``), import from this module.
"""
from __future__ import absolute_imports
from .apps import AppsAPI
from .services import ServiceInstanceAPI
|
Fix service instance API class name
|
Fix service instance API class name
|
Python
|
mit
|
rcmachado/pysuru
|
---
+++
@@ -8,4 +8,4 @@
from __future__ import absolute_imports
from .apps import AppsAPI
-from .services import ServicesAPI
+from .services import ServiceInstanceAPI
|
a347c699be3ce5659db4b76a26ce253a209e232e
|
webapp_health_monitor/verificators/base.py
|
webapp_health_monitor/verificators/base.py
|
from webapp_health_monitor import errors
class Verificator(object):
verificator_name = None
def __init__(self, **kwargs):
pass
def run(self):
raise NotImplementedError()
def __str__(self):
if self.verificator_name:
return self.verificator_name
else:
return self.__class__.__name__
class RangeVerificator(Verificator):
value_extractor = None
upper_bound = None
lower_bound = None
def run(self):
self._check_configuration()
self._check_value()
def _check_configuration(self):
if not self._are_bounds_configured():
raise errors.BadConfigurationError()
def _are_bounds_configured(self):
if self.lower_bound is None:
return self.upper_bound is not None
elif self.upper_bound is not None:
return self.lower_bound <= self.upper_bound
else:
return True
def _check_value(self):
value = self.get_value()
self._check_lower_bound(value)
self._check_upper_bound(value)
def get_value(self):
raise NotImplementedError
def _check_lower_bound(self, value):
if self.lower_bound is not None:
if value < self.lower_bound:
raise errors.VerificationFailure()
def _check_upper_bound(self, value):
if self.upper_bound is not None:
if value > self.upper_bound:
raise errors.VerificationFailure()
|
from webapp_health_monitor import errors
class Verificator(object):
verificator_name = None
def __init__(self, **kwargs):
pass
def run(self):
raise NotImplementedError()
def __str__(self):
if self.verificator_name:
return self.verificator_name
else:
return self.__class__.__name__
class RangeVerificator(Verificator):
upper_bound = None
lower_bound = None
def run(self):
self._check_configuration()
self._check_value()
def _check_configuration(self):
if not self._are_bounds_configured():
raise errors.BadConfigurationError()
def _are_bounds_configured(self):
if self.lower_bound is None:
return self.upper_bound is not None
elif self.upper_bound is not None:
return self.lower_bound <= self.upper_bound
else:
return True
def _check_value(self):
value = self.get_value()
self._check_lower_bound(value)
self._check_upper_bound(value)
def get_value(self):
raise NotImplementedError
def _check_lower_bound(self, value):
if self.lower_bound is not None:
if value < self.lower_bound:
raise errors.VerificationFailure()
def _check_upper_bound(self, value):
if self.upper_bound is not None:
if value > self.upper_bound:
raise errors.VerificationFailure()
|
Delete unused value extractor attribute.
|
Delete unused value extractor attribute.
|
Python
|
mit
|
pozytywnie/webapp-health-monitor,serathius/webapp-health-monitor
|
---
+++
@@ -18,7 +18,6 @@
class RangeVerificator(Verificator):
- value_extractor = None
upper_bound = None
lower_bound = None
|
47672fe44673fe9cae54a736bdc9eb496494ab58
|
UI/utilities/synchronization_core.py
|
UI/utilities/synchronization_core.py
|
# -*- coding: utf-8 -*-
# Synchronization core module for Storj GUI Client #
class StorjFileSynchronization():
def start_sync_thread(self):
return 1
def reload_sync_configuration(self):
return 1
def add_file_to_sync_queue(self):
return 1
|
# -*- coding: utf-8 -*-
# Synchronization core module for Storj GUI Client #
import time
from watchdog.observers import Observer
from watchdog.events import PatternMatchingEventHandler
import threading
HANDLE_ON_MOVE_EVENT = True
HANDLE_ON_DELETE_EVENT = True
class StorjFileSynchronization():
def start_sync_thread(self):
return 1
def reload_sync_configuration(self):
return 1
def add_file_to_sync_queue(self, file_path):
return 1
class FileChangesHandler(PatternMatchingEventHandler):
#patterns = ["*.xml", "*.lxml"]
def __init__(self):
self.storj_file_synchronization_core = StorjFileSynchronization()
def process(self, event):
"""
event.event_type
'modified' | 'created' | 'moved' | 'deleted'
event.is_directory
True | False
event.src_path
path/to/observed/file
"""
# the file will be processed there
self.storj_file_synchronization_core.add_file_to_sync_queue(file_path=str(event.src_path))
print str(event)
#print str(event.src_path) + str(event.event_type) + "event" # print now only for degug
def on_deleted(self, event):
if HANDLE_ON_DELETE_EVENT:
self.process(event)
def on_moved(self, event):
if HANDLE_ON_MOVE_EVENT:
self.process(event)
def on_modified(self, event):
self.process(event)
def on_created(self, event):
self.process(event)
class SyncObserverWorker():
def start_observing_thread(self):
observing_main_thread = threading.Thread(
target=self.start_observing)
observing_main_thread.start()
def start_observing(self):
paths_to_observe = []
paths_to_observe.append("/home/lakewik/storjsync")
self.observer = Observer()
for path in paths_to_observe:
self.observer.schedule(FileChangesHandler(), path=str(path))
self.observer.start()
print "Synchronization directories observing started!"
def stop_observers(self):
self.observer.stop()
return 1
#try:
# while True:
# time.sleep(1)
#except KeyboardInterrupt:
# observer.stop()
#observer.join()
|
Add synchronization directory observer and handler
|
Add synchronization directory observer and handler
|
Python
|
mit
|
lakewik/storj-gui-client
|
---
+++
@@ -1,13 +1,83 @@
# -*- coding: utf-8 -*-
# Synchronization core module for Storj GUI Client #
+import time
+from watchdog.observers import Observer
+from watchdog.events import PatternMatchingEventHandler
+import threading
+
+HANDLE_ON_MOVE_EVENT = True
+HANDLE_ON_DELETE_EVENT = True
class StorjFileSynchronization():
+
def start_sync_thread(self):
return 1
def reload_sync_configuration(self):
return 1
- def add_file_to_sync_queue(self):
+ def add_file_to_sync_queue(self, file_path):
return 1
+
+class FileChangesHandler(PatternMatchingEventHandler):
+ #patterns = ["*.xml", "*.lxml"]
+
+ def __init__(self):
+ self.storj_file_synchronization_core = StorjFileSynchronization()
+
+
+ def process(self, event):
+ """
+ event.event_type
+ 'modified' | 'created' | 'moved' | 'deleted'
+ event.is_directory
+ True | False
+ event.src_path
+ path/to/observed/file
+ """
+ # the file will be processed there
+ self.storj_file_synchronization_core.add_file_to_sync_queue(file_path=str(event.src_path))
+ print str(event)
+ #print str(event.src_path) + str(event.event_type) + "event" # print now only for degug
+
+ def on_deleted(self, event):
+ if HANDLE_ON_DELETE_EVENT:
+ self.process(event)
+
+ def on_moved(self, event):
+ if HANDLE_ON_MOVE_EVENT:
+ self.process(event)
+
+ def on_modified(self, event):
+ self.process(event)
+
+ def on_created(self, event):
+ self.process(event)
+
+class SyncObserverWorker():
+ def start_observing_thread(self):
+ observing_main_thread = threading.Thread(
+ target=self.start_observing)
+ observing_main_thread.start()
+
+ def start_observing(self):
+ paths_to_observe = []
+ paths_to_observe.append("/home/lakewik/storjsync")
+ self.observer = Observer()
+ for path in paths_to_observe:
+ self.observer.schedule(FileChangesHandler(), path=str(path))
+ self.observer.start()
+ print "Synchronization directories observing started!"
+
+ def stop_observers(self):
+ self.observer.stop()
+ return 1
+ #try:
+ # while True:
+ # time.sleep(1)
+ #except KeyboardInterrupt:
+ # observer.stop()
+
+ #observer.join()
+
|
23af33b7ca48c59ff58638b733437d8f348b279b
|
openapi_core/__init__.py
|
openapi_core/__init__.py
|
# -*- coding: utf-8 -*-
"""OpenAPI core module"""
from openapi_core.shortcuts import (
create_spec, validate_parameters, validate_body, validate_data,
)
__author__ = 'Artur MaciΔ
g'
__email__ = 'maciag.artur@gmail.com'
__version__ = '0.5.0'
__url__ = 'https://github.com/p1c2u/openapi-core'
__license__ = 'BSD 3-Clause License'
__all__ = [
'create_spec', 'validate_parameters', 'validate_body', 'validate_data',
]
|
# -*- coding: utf-8 -*-
"""OpenAPI core module"""
from openapi_core.shortcuts import (
create_spec, validate_parameters, validate_body, validate_data,
)
__author__ = 'Artur Maciag'
__email__ = 'maciag.artur@gmail.com'
__version__ = '0.5.0'
__url__ = 'https://github.com/p1c2u/openapi-core'
__license__ = 'BSD 3-Clause License'
__all__ = [
'create_spec', 'validate_parameters', 'validate_body', 'validate_data',
]
|
Replace unicode character for RPM build.
|
Replace unicode character for RPM build.
To make building RPMs of package easier when using ascii by
default.
|
Python
|
bsd-3-clause
|
p1c2u/openapi-core
|
---
+++
@@ -4,7 +4,7 @@
create_spec, validate_parameters, validate_body, validate_data,
)
-__author__ = 'Artur MaciΔ
g'
+__author__ = 'Artur Maciag'
__email__ = 'maciag.artur@gmail.com'
__version__ = '0.5.0'
__url__ = 'https://github.com/p1c2u/openapi-core'
|
f4c9482e41ec2ee6c894a413e8fcb0349a9edbd1
|
tapiriik/web/templatetags/displayutils.py
|
tapiriik/web/templatetags/displayutils.py
|
from django import template
import json
register = template.Library()
@register.filter(name="format_meters")
def meters_to_kms(value):
try:
return round(value / 1000)
except:
return "NaN"
@register.filter(name='json')
def jsonit(obj):
return json.dumps(obj)
@register.filter(name='dict_get')
def dict_get(tdict, key):
if type(tdict) is not dict:
tdict = tdict.__dict__
return tdict.get(key, None)
@register.filter(name='format')
def format(format, var):
return format.format(var)
@register.simple_tag
def stringformat(value, *args):
return value.format(*args)
@register.filter(name="percentage")
def percentage(value, *args):
if not value:
return "NaN"
return str(round(float(value) * 100)) + "%"
|
from django import template
import json
register = template.Library()
@register.filter(name="format_meters")
def meters_to_kms(value):
try:
return round(value / 1000)
except:
return "NaN"
@register.filter(name='json')
def jsonit(obj):
return json.dumps(obj)
@register.filter(name='dict_get')
def dict_get(tdict, key):
if type(tdict) is not dict:
tdict = tdict.__dict__
return tdict.get(key, None)
@register.filter(name='format')
def format(format, var):
return format.format(var)
@register.simple_tag
def stringformat(value, *args):
return value.format(*args)
@register.filter(name="percentage")
def percentage(value, *args):
if not value:
return "NaN"
try:
return str(round(float(value) * 100)) + "%"
except ValueError:
return value
|
Fix broken diagnostic dashboard with new sync progress values
|
Fix broken diagnostic dashboard with new sync progress values
|
Python
|
apache-2.0
|
campbellr/tapiriik,niosus/tapiriik,gavioto/tapiriik,cheatos101/tapiriik,cheatos101/tapiriik,brunoflores/tapiriik,abhijit86k/tapiriik,mjnbike/tapiriik,dlenski/tapiriik,abhijit86k/tapiriik,cpfair/tapiriik,marxin/tapiriik,abhijit86k/tapiriik,dlenski/tapiriik,cheatos101/tapiriik,abs0/tapiriik,niosus/tapiriik,dmschreiber/tapiriik,gavioto/tapiriik,cmgrote/tapiriik,campbellr/tapiriik,abs0/tapiriik,cheatos101/tapiriik,brunoflores/tapiriik,mjnbike/tapiriik,cmgrote/tapiriik,cpfair/tapiriik,marxin/tapiriik,dmschreiber/tapiriik,mduggan/tapiriik,cmgrote/tapiriik,mduggan/tapiriik,dmschreiber/tapiriik,cgourlay/tapiriik,brunoflores/tapiriik,dlenski/tapiriik,mjnbike/tapiriik,cpfair/tapiriik,abs0/tapiriik,gavioto/tapiriik,campbellr/tapiriik,marxin/tapiriik,campbellr/tapiriik,dlenski/tapiriik,mduggan/tapiriik,gavioto/tapiriik,brunoflores/tapiriik,mduggan/tapiriik,olamy/tapiriik,niosus/tapiriik,marxin/tapiriik,cgourlay/tapiriik,dmschreiber/tapiriik,abhijit86k/tapiriik,mjnbike/tapiriik,cmgrote/tapiriik,cgourlay/tapiriik,cpfair/tapiriik,niosus/tapiriik,olamy/tapiriik,cgourlay/tapiriik,olamy/tapiriik,olamy/tapiriik,abs0/tapiriik
|
---
+++
@@ -33,4 +33,7 @@
def percentage(value, *args):
if not value:
return "NaN"
- return str(round(float(value) * 100)) + "%"
+ try:
+ return str(round(float(value) * 100)) + "%"
+ except ValueError:
+ return value
|
03eb0081a4037e36775271fb2373277f8e89835b
|
src/mcedit2/resourceloader.py
|
src/mcedit2/resourceloader.py
|
"""
${NAME}
"""
from __future__ import absolute_import, division, print_function, unicode_literals
import logging
import zipfile
log = logging.getLogger(__name__)
class ResourceNotFound(KeyError):
pass
class ResourceLoader(object):
def __init__(self):
super(ResourceLoader, self).__init__()
self.zipFiles = []
def addZipFile(self, zipPath):
try:
zf = zipfile.ZipFile(zipPath)
except zipfile.BadZipfile as e:
raise IOError("Could not read %s as a zip file." % zipPath)
self.zipFiles.append(zf)
def openStream(self, path):
for zipFile in self.zipFiles:
try:
stream = zipFile.open(path)
break
except KeyError: # Not found in zip file
continue
else:
raise ResourceNotFound("Resource %s not found in search path" % path)
return stream
|
"""
${NAME}
"""
from __future__ import absolute_import, division, print_function, unicode_literals
import logging
import zipfile
log = logging.getLogger(__name__)
class ResourceNotFound(KeyError):
pass
class ResourceLoader(object):
def __init__(self):
super(ResourceLoader, self).__init__()
self.zipFiles = []
def addZipFile(self, zipPath):
try:
zf = zipfile.ZipFile(zipPath)
except zipfile.BadZipfile as e:
raise IOError("Could not read %s as a zip file." % zipPath)
self.zipFiles.append(zf)
def openStream(self, path):
for zipFile in self.zipFiles:
try:
stream = zipFile.open(path)
break
except KeyError: # Not found in zip file
continue
else:
raise ResourceNotFound("Resource %s not found in search path" % path)
return stream
def blockModelPaths(self):
for zf in self.zipFiles:
for name in zf.namelist():
if name.startswith("assets/minecraft/models/block"):
yield name
|
Add function to ResourceLoader for listing all block models
|
Add function to ResourceLoader for listing all block models
xxx only lists Vanilla models. haven't looked at mods with models yet.
|
Python
|
bsd-3-clause
|
vorburger/mcedit2,vorburger/mcedit2,Rubisk/mcedit2,Rubisk/mcedit2
|
---
+++
@@ -33,3 +33,9 @@
raise ResourceNotFound("Resource %s not found in search path" % path)
return stream
+
+ def blockModelPaths(self):
+ for zf in self.zipFiles:
+ for name in zf.namelist():
+ if name.startswith("assets/minecraft/models/block"):
+ yield name
|
de4f43613b5f3a8b6f49ace6b8e9585a242d7cb2
|
src/build.py
|
src/build.py
|
# Copyright 2007 Pompeu Fabra University (Computational Imaging Laboratory), Barcelona, Spain. Web: www.cilab.upf.edu.
# This software is distributed WITHOUT ANY WARRANTY;
# without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
#
# Script to automate CSnake calls from cruise control
import sys
import csnGUIHandler
import csnGUIOptions
import csnGenerator
# Check command line arguments
if len(sys.argv) != 3:
sys.exit("Error: not enough arguments. You need to provide an option and a configuration file.")
# Command line inputs
options_file = sys.argv[1]
config_file = sys.argv[2]
# Create GUI handler
handler = csnGUIHandler.Handler()
# Read options
options = csnGUIOptions.Options()
options.Load( options_file )
# Read settings
settings = csnGenerator.Settings()
settings.Load( config_file )
# Set the options
handler.SetOptions( options )
# Configure the project with the settings
if settings.instance == "thirdParty":
res = handler.ConfigureThirdPartyFolder(settings)
else:
res = handler.ConfigureProjectToBinFolder( settings, 1 )
sys.exit(res)
|
# Copyright 2007 Pompeu Fabra University (Computational Imaging Laboratory), Barcelona, Spain. Web: www.cilab.upf.edu.
# This software is distributed WITHOUT ANY WARRANTY;
# without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
#
# Script to automate CSnake calls from cruise control
import sys
import csnGUIHandler
import csnGUIOptions
import csnGenerator
# Check command line arguments
if len(sys.argv) != 3:
sys.exit("Error: not enough arguments. You need to provide an option and a configuration file.")
# Command line inputs
options_file = sys.argv[1]
config_file = sys.argv[2]
# Create GUI handler
handler = csnGUIHandler.Handler()
# Read options
options = csnGUIOptions.Options()
options.Load( options_file )
# Read settings
settings = csnGenerator.Settings()
settings.Load( config_file )
# Set the options
handler.SetOptions( options )
# Configure the project with the settings
if settings.instance == "thirdParty":
res = handler.ConfigureThirdPartyFolder(settings)
else:
res = handler.ConfigureProjectToBinFolder( settings, 1 )
# exit with error if there was a problem
if res == false:
sys.exit(1)
|
Exit with the proper value.
|
Exit with the proper value.
git-svn-id: a26c1b3dc012bc7b166f1b96505d8277332098eb@265 9ffc3505-93cb-cd4b-9e5d-8a77f6415fcf
|
Python
|
bsd-3-clause
|
csnake-org/CSnake,csnake-org/CSnake,msteghofer/CSnake,msteghofer/CSnake,csnake-org/CSnake,msteghofer/CSnake
|
---
+++
@@ -32,5 +32,7 @@
res = handler.ConfigureThirdPartyFolder(settings)
else:
res = handler.ConfigureProjectToBinFolder( settings, 1 )
-sys.exit(res)
+# exit with error if there was a problem
+if res == false:
+ sys.exit(1)
|
55464daa00ca68b07737433b0983df4667432a9c
|
system/plugins/info.py
|
system/plugins/info.py
|
__author__ = 'Gareth Coles'
import weakref
class Info(object):
data = None
core = None
info = None
def __init__(self, yaml_data, plugin_object=None):
"""
:param yaml_data:
:type yaml_data: dict
:return:
"""
self.data = yaml_data
if plugin_object:
self._plugin_object = weakref.ref(plugin_object)
for key in yaml_data.keys():
obj = yaml_data[key]
if isinstance(obj, dict):
setattr(self, key, Info(obj))
else:
setattr(self, key, obj)
if self.core is not None:
self.name = self.core.name
self.module = self.core.module
if hasattr(self.core, "dependencies"):
self.dependencies = self.core.dependencies
else:
self.dependencies = []
if self.info is not None:
self.version = self.info.version
self.description = self.info.description
self.author = self.info.author
self.website = self.info.website
self.copyright = self.info.copyright
@property
def plugin_object(self):
if hasattr(self, "_plugin_object"):
return self._plugin_object()
return None
def get_module(self):
if hasattr(self, "module"):
return "plugins.%s" % self.module
return None
def set_plugin_object(self, obj):
self._plugin_object = weakref.ref(obj)
|
__author__ = 'Gareth Coles'
import weakref
class Info(object):
data = None
core = None
info = None
def __init__(self, yaml_data, plugin_object=None):
"""
:param yaml_data:
:type yaml_data: dict
:return:
"""
self.data = yaml_data
if plugin_object:
self._plugin_object = weakref.ref(plugin_object)
for key in yaml_data.keys():
obj = yaml_data[key]
if isinstance(obj, dict):
setattr(self, key, Info(obj))
else:
setattr(self, key, obj)
if self.core is not None:
self.name = self.core.name
self.module = self.core.module
if hasattr(self.core, "dependencies"):
self.dependencies = self.core.dependencies
else:
self.core.dependencies = []
self.dependencies = []
if self.info is not None:
self.version = self.info.version
self.description = self.info.description
self.author = self.info.author
self.website = self.info.website
self.copyright = self.info.copyright
@property
def plugin_object(self):
if hasattr(self, "_plugin_object"):
return self._plugin_object()
return None
def get_module(self):
if hasattr(self, "module"):
return "plugins.%s" % self.module
return None
def set_plugin_object(self, obj):
self._plugin_object = weakref.ref(obj)
|
Fix missing dependencies on core
|
Fix missing dependencies on core
|
Python
|
artistic-2.0
|
UltrosBot/Ultros,UltrosBot/Ultros
|
---
+++
@@ -37,6 +37,7 @@
if hasattr(self.core, "dependencies"):
self.dependencies = self.core.dependencies
else:
+ self.core.dependencies = []
self.dependencies = []
if self.info is not None:
|
985dd1ada1b2ad9ceaae111fa32b1d8e54b61786
|
mailqueue/tasks.py
|
mailqueue/tasks.py
|
from celery.task import task
from .models import MailerMessage
@task(name="tasks.send_mail")
def send_mail(pk):
message = MailerMessage.objects.get(pk=pk)
message._send()
@task()
def clear_sent_messages():
from mailqueue.models import MailerMessage
MailerMessage.objects.clear_sent_messages()
|
from celery.task import task
from .models import MailerMessage
@task(name="tasks.send_mail", default_retry_delay=5, max_retries=5)
def send_mail(pk):
message = MailerMessage.objects.get(pk=pk)
message._send()
# Retry when message is not sent
if not message.sent:
send_mail.retry([message.pk,])
@task()
def clear_sent_messages():
from mailqueue.models import MailerMessage
MailerMessage.objects.clear_sent_messages()
|
Add retry to celery task
|
Add retry to celery task
Messages do not always get delivered. Built in a retry when message is not sent.
Max retry count could also be a setting.
|
Python
|
mit
|
Goury/django-mail-queue,dstegelman/django-mail-queue,winfieldco/django-mail-queue,Goury/django-mail-queue,styrmis/django-mail-queue,dstegelman/django-mail-queue
|
---
+++
@@ -1,10 +1,14 @@
from celery.task import task
from .models import MailerMessage
-@task(name="tasks.send_mail")
+@task(name="tasks.send_mail", default_retry_delay=5, max_retries=5)
def send_mail(pk):
message = MailerMessage.objects.get(pk=pk)
message._send()
+
+ # Retry when message is not sent
+ if not message.sent:
+ send_mail.retry([message.pk,])
@task()
def clear_sent_messages():
|
8d8798554d996776eecc61b673adcbc2680f327a
|
mastermind/main.py
|
mastermind/main.py
|
from __future__ import (absolute_import, print_function, division)
from itertools import repeat
from mitmproxy.main import mitmdump
import os
from . import (cli, proxyswitch, say)
def main():
parser = cli.args()
args, extra_args = parser.parse_known_args()
try:
config = cli.config(args)
except IOError as err:
parser.error(err)
except toml.core.TomlError as err:
parser.error("Errors found in the config file:\n\n", err)
mitm_args = cli.mitm_args(config)
is_sudo = os.getuid() == 0
if type(mitm_args) == Exception:
parser.error(mitm_args.message)
say.level(config["core"]["verbose"])
try:
if config["os"]["proxy-settings"]:
if not is_sudo:
parser.error("proxy-settings is enabled, please provide sudo in order to change the OSX proxy configuration.")
proxyswitch.enable(config["core"]["host"],
str(config["core"]["port"]))
mitmdump(mitm_args + extra_args)
finally:
if config["os"]["proxy-settings"] and is_sudo:
proxyswitch.disable()
|
from __future__ import (absolute_import, print_function, division)
from itertools import repeat
from mitmproxy.main import mitmdump
import os
from . import (cli, proxyswitch, say)
def main():
parser = cli.args()
args, extra_args = parser.parse_known_args()
try:
config = cli.config(args)
except IOError as err:
parser.error(err)
except toml.core.TomlError as err:
parser.error("Errors found in the config file:\n\n", err)
mitm_args = cli.mitm_args(config)
is_sudo = os.getuid() == 0
if type(mitm_args) == Exception:
parser.error(mitm_args.message)
say.level(config["core"]["verbose"])
host= config["core"]["host"]
port = config["core"]["port"]
pid_filename = "/var/tmp/mastermind.{}{}.pid".format(host.replace('.', ''), port)
try:
if config["os"]["proxy-settings"]:
if not is_sudo:
parser.error("proxy-settings is enabled, please provide sudo in order to change the OSX proxy configuration.")
proxyswitch.enable(host, str(port))
with open(pid_filename, "w") as f:
f.write(str(os.getpid()))
mitmdump(mitm_args + extra_args)
finally:
os.remove(pid_filename)
if config["os"]["proxy-settings"] and is_sudo:
proxyswitch.disable()
|
Write PID to a file
|
Write PID to a file
|
Python
|
mit
|
ustwo/mastermind,ustwo/mastermind
|
---
+++
@@ -24,15 +24,23 @@
say.level(config["core"]["verbose"])
+ host= config["core"]["host"]
+ port = config["core"]["port"]
+ pid_filename = "/var/tmp/mastermind.{}{}.pid".format(host.replace('.', ''), port)
+
try:
if config["os"]["proxy-settings"]:
if not is_sudo:
parser.error("proxy-settings is enabled, please provide sudo in order to change the OSX proxy configuration.")
- proxyswitch.enable(config["core"]["host"],
- str(config["core"]["port"]))
+ proxyswitch.enable(host, str(port))
+
+ with open(pid_filename, "w") as f:
+ f.write(str(os.getpid()))
mitmdump(mitm_args + extra_args)
finally:
+ os.remove(pid_filename)
+
if config["os"]["proxy-settings"] and is_sudo:
proxyswitch.disable()
|
4ef5d9ae7a571f97242cf2cc44e539d039486549
|
runserver.py
|
runserver.py
|
#!/usr/bin/env python
"""
# # #### ##### # # ##### # # # #
# # # # # ## ## # # # ## # #
### #### #### # # # # # # # # #####
# # # # # # # # ## # # #
# # # ##### # # # # # # # #
Kremlin Magical Everything System
Glasnost Image Board and Boredom Inhibitor
"""
from kremlin import app
def main():
print "Launching kremlin in development mode."
print "--------------------------------------"
app.run(debug=True)
if __name__ == '__main__':
main()
|
#!/usr/bin/env python
"""
# # #### ##### # # ##### # # # #
# # # # # ## ## # # # ## # #
### #### #### # # # # # # # # #####
# # # # # # # # ## # # #
# # # ##### # # # # # # # #
Kremlin Magical Everything System
Glasnost Image Board and Boredom Inhibitor
"""
from kremlin import app
def main():
print "Kremlin Magical Everything System v 0.0.0-None"
print "Copyright (c) Glasnost 2010-2011"
print "-----------------------------------------------"
print "RUNNING IN DEVELOPMENT MODE! ** NOT FOR PRODUCTION **"
print "Connect to http://127.0.0.1:5000 to access."
app.run(debug=True)
if __name__ == '__main__':
main()
|
Add proper banner to dev launch script
|
Add proper banner to dev launch script
|
Python
|
bsd-2-clause
|
glasnost/kremlin,glasnost/kremlin,glasnost/kremlin
|
---
+++
@@ -14,8 +14,11 @@
from kremlin import app
def main():
- print "Launching kremlin in development mode."
- print "--------------------------------------"
+ print "Kremlin Magical Everything System v 0.0.0-None"
+ print "Copyright (c) Glasnost 2010-2011"
+ print "-----------------------------------------------"
+ print "RUNNING IN DEVELOPMENT MODE! ** NOT FOR PRODUCTION **"
+ print "Connect to http://127.0.0.1:5000 to access."
app.run(debug=True)
if __name__ == '__main__':
|
928cbc47cb7430d8a2fef924b61179cd30f5ca34
|
linter.py
|
linter.py
|
#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Ethan Zimmerman
# Copyright (c) 2014 Ethan Zimmerman
#
# License: MIT
#
"""This module exports the RamlCop plugin class."""
from SublimeLinter.lint import NodeLinter
class RamlCop(NodeLinter):
"""Provides an interface to raml-cop."""
syntax = 'raml'
cmd = 'raml-cop --no-color'
version_requirement = '>= 1.0.0'
regex = (
r'^\[.+:(?P<line>\d+):(?P<col>\d+)\] '
r'(?P<message>.+)'
)
line_col_base = (0, 0)
tempfile_suffix = '-'
|
#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Ethan Zimmerman
# Copyright (c) 2014 Ethan Zimmerman
#
# License: MIT
#
"""This module exports the RamlCop plugin class."""
from SublimeLinter.lint import NodeLinter
class RamlCop(NodeLinter):
"""Provides an interface to raml-cop."""
syntax = 'raml'
cmd = 'raml-cop --no-color'
version_requirement = '>= 1.0.0'
regex = (
r'^\[.+:(?P<line>\d+):(?P<col>\d+)\] '
r'(?P<message>.+)'
)
line_col_base = (0, 0)
tempfile_suffix = '-'
|
Remove empty line before class docstring
|
Remove empty line before class docstring
|
Python
|
mit
|
thebinarypenguin/SublimeLinter-contrib-raml-cop
|
---
+++
@@ -14,7 +14,6 @@
class RamlCop(NodeLinter):
-
"""Provides an interface to raml-cop."""
syntax = 'raml'
|
9ee0f3f7be90046f796f3395b2149288a2b52a26
|
src/zeit/magazin/preview.py
|
src/zeit/magazin/preview.py
|
# Copyright (c) 2013 gocept gmbh & co. kg
# See also LICENSE.txt
import grokcore.component as grok
import zeit.cms.browser.preview
import zeit.magazin.interfaces
@grok.adapter(zeit.magazin.interfaces.IZMOContent, basestring)
@grok.implementer(zeit.cms.browser.interfaces.IPreviewURL)
def preview_url(content, preview_type):
return zeit.cms.browser.preview.prefixed_url(
'zmo-%s-prefix' % preview_type, content.uniqueId)
# XXX there also is a (basestring, basestring) variant of the adapter
# which is used by zeit.find to caluclate preview-urls for search results
# without looking up the content object first. What do we do about that?
|
# Copyright (c) 2013 gocept gmbh & co. kg
# See also LICENSE.txt
import grokcore.component as grok
import zeit.cms.browser.preview
import zeit.magazin.interfaces
@grok.adapter(zeit.magazin.interfaces.IZMOContent, basestring)
@grok.implementer(zeit.cms.browser.interfaces.IPreviewURL)
def preview_url(content, preview_type):
return zeit.cms.browser.preview.prefixed_url(
'zmo-%s-prefix' % preview_type, content.uniqueId)
|
Remove oddity marker, it's been resolved in zeit.find now
|
Remove oddity marker, it's been resolved in zeit.find now
|
Python
|
bsd-3-clause
|
ZeitOnline/zeit.magazin
|
---
+++
@@ -11,8 +11,3 @@
def preview_url(content, preview_type):
return zeit.cms.browser.preview.prefixed_url(
'zmo-%s-prefix' % preview_type, content.uniqueId)
-
-
-# XXX there also is a (basestring, basestring) variant of the adapter
-# which is used by zeit.find to caluclate preview-urls for search results
-# without looking up the content object first. What do we do about that?
|
ee0f28abd70396bf1e094592028aa693e5d6fe6c
|
rechunker/executors/python.py
|
rechunker/executors/python.py
|
import itertools
from functools import partial
import math
from typing import Any, Callable, Iterable
from rechunker.types import CopySpec, StagedCopySpec, Executor
Thunk = Callable[[], None]
class PythonExecutor(Executor[Thunk]):
"""An execution engine based on Python loops.
Supports copying between any arrays that implement ``__getitem__`` and
``__setitem__`` for tuples of ``slice`` objects.
Execution plans for PythonExecutor are functions that accept no arguments.
"""
def prepare_plan(self, specs: Iterable[StagedCopySpec]) -> Thunk:
tasks = []
for staged_copy_spec in specs:
for copy_spec in staged_copy_spec.stages:
tasks.append(partial(_direct_copy_array, copy_spec))
return partial(_execute_all, tasks)
def execute_plan(self, plan: Thunk):
plan()
def _direct_copy_array(copy_spec: CopySpec) -> None:
"""Direct copy between zarr arrays."""
source_array, target_array, chunks = copy_spec
shape = source_array.shape
ranges = [range(math.ceil(s / c)) for s, c in zip(shape, chunks)]
for indices in itertools.product(*ranges):
key = tuple(slice(c * i, c * (i + 1)) for i, c in zip(indices, chunks))
target_array[key] = source_array[key]
def _execute_all(tasks: Iterable[Callable[[], Any]]) -> None:
for task in tasks:
task()
|
import itertools
from functools import partial
import math
from typing import Callable, Iterable
from rechunker.types import CopySpec, StagedCopySpec, Executor
# PythonExecutor represents delayed execution tasks as functions that require
# no arguments.
Task = Callable[[], None]
class PythonExecutor(Executor[Task]):
"""An execution engine based on Python loops.
Supports copying between any arrays that implement ``__getitem__`` and
``__setitem__`` for tuples of ``slice`` objects.
Execution plans for PythonExecutor are functions that accept no arguments.
"""
def prepare_plan(self, specs: Iterable[StagedCopySpec]) -> Task:
tasks = []
for staged_copy_spec in specs:
for copy_spec in staged_copy_spec.stages:
tasks.append(partial(_direct_copy_array, copy_spec))
return partial(_execute_all, tasks)
def execute_plan(self, plan: Task):
plan()
def _direct_copy_array(copy_spec: CopySpec) -> None:
"""Direct copy between zarr arrays."""
source_array, target_array, chunks = copy_spec
shape = source_array.shape
ranges = [range(math.ceil(s / c)) for s, c in zip(shape, chunks)]
for indices in itertools.product(*ranges):
key = tuple(slice(c * i, c * (i + 1)) for i, c in zip(indices, chunks))
target_array[key] = source_array[key]
def _execute_all(tasks: Iterable[Task]) -> None:
for task in tasks:
task()
|
Remove 'thunk' jargon from PythonExecutor
|
Remove 'thunk' jargon from PythonExecutor
|
Python
|
mit
|
pangeo-data/rechunker
|
---
+++
@@ -2,15 +2,17 @@
from functools import partial
import math
-from typing import Any, Callable, Iterable
+from typing import Callable, Iterable
from rechunker.types import CopySpec, StagedCopySpec, Executor
-Thunk = Callable[[], None]
+# PythonExecutor represents delayed execution tasks as functions that require
+# no arguments.
+Task = Callable[[], None]
-class PythonExecutor(Executor[Thunk]):
+class PythonExecutor(Executor[Task]):
"""An execution engine based on Python loops.
Supports copying between any arrays that implement ``__getitem__`` and
@@ -19,14 +21,14 @@
Execution plans for PythonExecutor are functions that accept no arguments.
"""
- def prepare_plan(self, specs: Iterable[StagedCopySpec]) -> Thunk:
+ def prepare_plan(self, specs: Iterable[StagedCopySpec]) -> Task:
tasks = []
for staged_copy_spec in specs:
for copy_spec in staged_copy_spec.stages:
tasks.append(partial(_direct_copy_array, copy_spec))
return partial(_execute_all, tasks)
- def execute_plan(self, plan: Thunk):
+ def execute_plan(self, plan: Task):
plan()
@@ -40,6 +42,6 @@
target_array[key] = source_array[key]
-def _execute_all(tasks: Iterable[Callable[[], Any]]) -> None:
+def _execute_all(tasks: Iterable[Task]) -> None:
for task in tasks:
task()
|
01dc78bc4cea6c11744879c0f2066ab627314625
|
django_stackoverflow_trace/__init__.py
|
django_stackoverflow_trace/__init__.py
|
from django.views import debug
def _patch_django_debug_view():
new_data = """
<h3 style="margin-bottom:10px;">
<a href="http://stackoverflow.com/search?q=[python] or [django]+{{ exception_value|force_escape }}"
target="_blank">View in Stackoverflow</a>
</h3>
"""
replace_point = '<table class="meta">'
replacement = new_data + replace_point
# monkey patch the built-in template.
debug.TECHNICAL_500_TEMPLATE = debug.TECHNICAL_500_TEMPLATE.replace(
replace_point,
replacement,
1 # replace the first occurence
)
class DjangoStackoverTraceMiddleware(object):
def __init__(self):
_patch_django_debug_view()
def process_response(self, request, response):
return response
|
from django.views import debug
from django.conf import settings
def get_search_link():
default_choice = "stackoverflow"
search_urls = {
"stackoverflow": "http://stackoverflow.com/search?q=[python] or "
"[django]+{{ exception_value|force_escape }}",
"googlesearch": "https://www.google.com.tr/#q=site:stackoverflow.com"
"+django+{{ exception_value|force_escape }}"
}
search_url = getattr(
settings,
'DJANGO_STACKOVERFLOW_TRACE_SEARCH_SITE',
default_choice
)
return search_urls.get(search_url, search_urls[default_choice])
def _patch_django_debug_view():
new_data = """
<h3 style="margin-bottom:10px;">
<a href="%s"
target="_blank">View in Stackoverflow</a>
</h3>
""" % get_search_link()
replace_point = '<table class="meta">'
replacement = new_data + replace_point
# monkey patch the built-in template.
debug.TECHNICAL_500_TEMPLATE = debug.TECHNICAL_500_TEMPLATE.replace(
replace_point,
replacement,
1 # replace the first occurence
)
class DjangoStackoverTraceMiddleware(object):
def __init__(self):
_patch_django_debug_view()
def process_response(self, request, response):
return response
|
Add a google search option
|
Add a google search option
|
Python
|
mit
|
emre/django-stackoverflow-trace
|
---
+++
@@ -1,14 +1,34 @@
from django.views import debug
+from django.conf import settings
+
+
+def get_search_link():
+ default_choice = "stackoverflow"
+
+ search_urls = {
+ "stackoverflow": "http://stackoverflow.com/search?q=[python] or "
+ "[django]+{{ exception_value|force_escape }}",
+ "googlesearch": "https://www.google.com.tr/#q=site:stackoverflow.com"
+ "+django+{{ exception_value|force_escape }}"
+ }
+
+ search_url = getattr(
+ settings,
+ 'DJANGO_STACKOVERFLOW_TRACE_SEARCH_SITE',
+ default_choice
+ )
+
+ return search_urls.get(search_url, search_urls[default_choice])
def _patch_django_debug_view():
new_data = """
<h3 style="margin-bottom:10px;">
- <a href="http://stackoverflow.com/search?q=[python] or [django]+{{ exception_value|force_escape }}"
+ <a href="%s"
target="_blank">View in Stackoverflow</a>
</h3>
- """
+ """ % get_search_link()
replace_point = '<table class="meta">'
replacement = new_data + replace_point
|
ddc03637b19059f6fb06d72dc380afaf4fba57c2
|
indra/tests/test_context.py
|
indra/tests/test_context.py
|
from indra.databases import context_client
def test_get_protein_expression():
res = context_client.get_protein_expression('EGFR', 'BT20_BREAST')
assert(res is not None)
assert(res.get('EGFR') is not None)
assert(res['EGFR'].get('BT20_BREAST') is not None)
assert(res['EGFR']['BT20_BREAST'] > 1000)
def test_get_mutations():
res = context_client.get_mutations('BRAF', 'A375_SKIN')
assert(res is not None)
assert(res.get('BRAF') is not None)
assert(res['BRAF'].get('A375_SKIN') is not None)
assert(res['BRAF']['A375_SKIN'] == 1.0)
def test_send_request_bad():
res = context_client.send_request('xyz', None)
assert(res is None)
|
from indra.databases import context_client
def test_get_protein_expression():
res = context_client.get_protein_expression('EGFR', 'BT20_BREAST')
assert(res is not None)
assert(res.get('EGFR') is not None)
assert(res['EGFR'].get('BT20_BREAST') is not None)
assert(res['EGFR']['BT20_BREAST'] > 1000)
def test_get_mutations():
res = context_client.get_mutations('BRAF', 'A375_SKIN')
assert(res is not None)
assert(res.get('BRAF') is not None)
assert(res['BRAF'].get('A375_SKIN') is not None)
assert(res['BRAF']['A375_SKIN'] == 1.0)
|
Remove deprecated context client test
|
Remove deprecated context client test
|
Python
|
bsd-2-clause
|
johnbachman/belpy,sorgerlab/indra,johnbachman/belpy,sorgerlab/indra,pvtodorov/indra,bgyori/indra,sorgerlab/indra,sorgerlab/belpy,johnbachman/indra,jmuhlich/indra,jmuhlich/indra,johnbachman/belpy,sorgerlab/belpy,pvtodorov/indra,pvtodorov/indra,jmuhlich/indra,bgyori/indra,johnbachman/indra,sorgerlab/belpy,johnbachman/indra,pvtodorov/indra,bgyori/indra
|
---
+++
@@ -13,7 +13,3 @@
assert(res.get('BRAF') is not None)
assert(res['BRAF'].get('A375_SKIN') is not None)
assert(res['BRAF']['A375_SKIN'] == 1.0)
-
-def test_send_request_bad():
- res = context_client.send_request('xyz', None)
- assert(res is None)
|
a1f5a392d5270dd6f80a40e45c5e25b6ae04b7c3
|
embed_video/fields.py
|
embed_video/fields.py
|
from django.db import models
from django import forms
from django.utils.translation import ugettext_lazy as _
from .backends import detect_backend, UnknownIdException, \
UnknownBackendException
__all__ = ('EmbedVideoField', 'EmbedVideoFormField')
class EmbedVideoField(models.URLField):
"""
Model field for embeded video. Descendant of
:py:class:`django.db.models.URLField`.
"""
def formfield(self, **kwargs):
defaults = {'form_class': EmbedVideoFormField}
defaults.update(kwargs)
return super(EmbedVideoField, self).formfield(**defaults)
def south_field_triple(self):
from south.modelsinspector import introspector
cls_name = '%s.%s' % (
self.__class__.__module__,
self.__class__.__name__
)
args, kwargs = introspector(self)
return (cls_name, args, kwargs)
class EmbedVideoFormField(forms.URLField):
"""
Form field for embeded video. Descendant of
:py:class:`django.forms.URLField`
"""
def validate(self, url):
super(EmbedVideoFormField, self).validate(url)
if url:
try:
detect_backend(url)
except UnknownBackendException:
raise forms.ValidationError(_(u'URL could not be recognized.'))
except UnknownIdException:
raise forms.ValidationError(_(u'ID of this video could not be \
recognized.'))
return url
|
from django.db import models
from django import forms
from django.utils.translation import ugettext_lazy as _
from .backends import detect_backend, UnknownIdException, \
UnknownBackendException
__all__ = ('EmbedVideoField', 'EmbedVideoFormField')
class EmbedVideoField(models.URLField):
"""
Model field for embeded video. Descendant of
:py:class:`django.db.models.URLField`.
"""
def formfield(self, **kwargs):
defaults = {'form_class': EmbedVideoFormField}
defaults.update(kwargs)
return super(EmbedVideoField, self).formfield(**defaults)
def south_field_triple(self):
from south.modelsinspector import introspector
cls_name = '%s.%s' % (
self.__class__.__module__,
self.__class__.__name__
)
args, kwargs = introspector(self)
return (cls_name, args, kwargs)
class EmbedVideoFormField(forms.URLField):
"""
Form field for embeded video. Descendant of
:py:class:`django.forms.URLField`
"""
def validate(self, url):
# if empty url is not allowed throws an exception
super(EmbedVideoFormField, self).validate(url)
if not url:
return
try:
detect_backend(url)
except UnknownBackendException:
raise forms.ValidationError(_(u'URL could not be recognized.'))
except UnknownIdException:
raise forms.ValidationError(_(u'ID of this video could not be \
recognized.'))
return url
|
Simplify validate method in FormField.
|
Simplify validate method in FormField.
|
Python
|
mit
|
yetty/django-embed-video,jazzband/django-embed-video,jazzband/django-embed-video,mpachas/django-embed-video,yetty/django-embed-video,mpachas/django-embed-video
|
---
+++
@@ -36,15 +36,17 @@
"""
def validate(self, url):
+ # if empty url is not allowed throws an exception
super(EmbedVideoFormField, self).validate(url)
+
+ if not url:
+ return
- if url:
- try:
- detect_backend(url)
- except UnknownBackendException:
- raise forms.ValidationError(_(u'URL could not be recognized.'))
- except UnknownIdException:
- raise forms.ValidationError(_(u'ID of this video could not be \
- recognized.'))
-
+ try:
+ detect_backend(url)
+ except UnknownBackendException:
+ raise forms.ValidationError(_(u'URL could not be recognized.'))
+ except UnknownIdException:
+ raise forms.ValidationError(_(u'ID of this video could not be \
+ recognized.'))
return url
|
7f6167ef9f62b9b79e3c30b358c796caae69a2e6
|
PyWXSB/exceptions_.py
|
PyWXSB/exceptions_.py
|
"""Extensions of standard exceptions for PyWXSB events.
Yeah, I'd love this module to be named exceptions.py, but it can't
because the standard library has one of those, and we need to
reference it below.
"""
import exceptions
class PyWXSBException (exceptions.Exception):
"""Base class for exceptions that indicate a problem that the user should fix."""
pass
class SchemaValidationError (PyWXSBException):
"""Raised when the XML hierarchy does not appear to be valid for an XML schema."""
pass
class BadTypeValueError (PyWXSBException):
"""Raised when a value in an XML attribute does not conform to the simple type."""
pass
class NotInNamespaceError (PyWXSBException):
'''Raised when a name is referenced that is not defined in the appropriate namespace.'''
__namespace = None
__ncName = None
class BadPropertyError (PyWXSBException):
"""Raised when a schema component property is accessed on a component instance that does not define that property."""
pass
class PyWXSBError (exceptions.Exception):
"""Base class for exceptions that indicate a problem that the user probably can't fix."""
pass
class LogicError (PyWXSBError):
"""Raised when the code detects an implementation problem."""
class IncompleteImplementationError (LogicError):
"""Raised when a code branch is taken that has not yet been implemented."""
|
"""Extensions of standard exceptions for PyWXSB events.
Yeah, I'd love this module to be named exceptions.py, but it can't
because the standard library has one of those, and we need to
reference it below.
"""
import exceptions
class PyWXSBException (exceptions.Exception):
"""Base class for exceptions that indicate a problem that the user should fix."""
pass
class SchemaValidationError (PyWXSBException):
"""Raised when the XML hierarchy does not appear to be valid for an XML schema."""
pass
class BadTypeValueError (PyWXSBException):
"""Raised when a value in an XML attribute does not conform to the simple type."""
pass
class NotInNamespaceError (PyWXSBException):
'''Raised when a name is referenced that is not defined in the appropriate namespace.'''
__namespace = None
__ncName = None
class BadPropertyError (PyWXSBException):
"""Raised when a schema component property is accessed on a component instance that does not define that property."""
pass
class BadDocumentError (PyWXSBException):
"""Raised when processing document content and an error is encountered."""
pass
class PyWXSBError (exceptions.Exception):
"""Base class for exceptions that indicate a problem that the user probably can't fix."""
pass
class LogicError (PyWXSBError):
"""Raised when the code detects an implementation problem."""
class IncompleteImplementationError (LogicError):
"""Raised when a code branch is taken that has not yet been implemented."""
|
Add an exception to throw when a document does have the expected structure
|
Add an exception to throw when a document does have the expected structure
|
Python
|
apache-2.0
|
jonfoster/pyxb-upstream-mirror,jonfoster/pyxb-upstream-mirror,balanced/PyXB,jonfoster/pyxb2,pabigot/pyxb,CantemoInternal/pyxb,jonfoster/pyxb2,balanced/PyXB,CantemoInternal/pyxb,jonfoster/pyxb1,pabigot/pyxb,jonfoster/pyxb2,CantemoInternal/pyxb,jonfoster/pyxb-upstream-mirror,jonfoster/pyxb1,balanced/PyXB
|
---
+++
@@ -28,6 +28,10 @@
"""Raised when a schema component property is accessed on a component instance that does not define that property."""
pass
+class BadDocumentError (PyWXSBException):
+ """Raised when processing document content and an error is encountered."""
+ pass
+
class PyWXSBError (exceptions.Exception):
"""Base class for exceptions that indicate a problem that the user probably can't fix."""
pass
|
3bb0e65eac5c93fa6e331d22252fd7b17ecdf964
|
__main__.py
|
__main__.py
|
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import sys
from tensorflow.tensorboard.tensorboard import main
if __name__ == '__main__':
sys.exit(main())
|
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import sys
from tensorflow.tensorboard.tensorboard import main
if __name__ == '__main__':
sys.exit(main())
|
Add pylint indentation check to sanity and fix existing indentation Change: 132840696
|
Add pylint indentation check to sanity and fix existing indentation
Change: 132840696
|
Python
|
apache-2.0
|
francoisluus/tensorboard-supervise,qiuminxu/tensorboard,qiuminxu/tensorboard,tensorflow/tensorboard,shakedel/tensorboard,qiuminxu/tensorboard,ioeric/tensorboard,shakedel/tensorboard,qiuminxu/tensorboard,tensorflow/tensorboard,qiuminxu/tensorboard,francoisluus/tensorboard-supervise,agrubb/tensorboard,tensorflow/tensorboard,shakedel/tensorboard,ioeric/tensorboard,tensorflow/tensorboard,tensorflow/tensorboard,shakedel/tensorboard,francoisluus/tensorboard-supervise,agrubb/tensorboard,francoisluus/tensorboard-supervise,qiuminxu/tensorboard,tensorflow/tensorboard,ioeric/tensorboard,ioeric/tensorboard,agrubb/tensorboard,ioeric/tensorboard,ioeric/tensorboard,tensorflow/tensorboard,shakedel/tensorboard,agrubb/tensorboard,agrubb/tensorboard,francoisluus/tensorboard-supervise,francoisluus/tensorboard-supervise,shakedel/tensorboard,agrubb/tensorboard
|
---
+++
@@ -22,4 +22,4 @@
from tensorflow.tensorboard.tensorboard import main
if __name__ == '__main__':
- sys.exit(main())
+ sys.exit(main())
|
36298a9f9a7a373a716e44cac6226a0ec8c8c40c
|
__main__.py
|
__main__.py
|
from twisted.internet.endpoints import TCP4ServerEndpoint
from twisted.internet import reactor
import editorFactory
if __name__ == "__main__":
server = editorFactory.EditorFactory()
TCP4ServerEndpoint(reactor, 4567).listen(server)
reactor.run()
|
from twisted.internet.endpoints import TCP4ServerEndpoint
from twisted.internet import reactor
import editorFactory
if __name__ == "__main__":
server = editorFactory.EditorFactory()
TCP4ServerEndpoint(reactor, 4567).listen(server)
print('Starting up...')
reactor.run()
|
Print something at the start
|
Print something at the start
|
Python
|
apache-2.0
|
Floobits/floobits-emacs
|
---
+++
@@ -6,4 +6,5 @@
if __name__ == "__main__":
server = editorFactory.EditorFactory()
TCP4ServerEndpoint(reactor, 4567).listen(server)
+ print('Starting up...')
reactor.run()
|
602c01caa23df0c6dad5963412a340087012f692
|
thinc/tests/integration/test_shape_check.py
|
thinc/tests/integration/test_shape_check.py
|
import pytest
import numpy
from ...neural._classes.model import Model
def test_mismatched_shapes_raise_ShapeError():
X = numpy.ones((3, 4))
model = Model(10, 5)
with pytest.raises(ValueError):
y = model.begin_training(X)
|
import pytest
import numpy
from ...neural._classes.model import Model
from ...exceptions import UndefinedOperatorError, DifferentLengthError
from ...exceptions import ExpectedTypeError, ShapeMismatchError
def test_mismatched_shapes_raise_ShapeError():
X = numpy.ones((3, 4))
model = Model(10, 5)
with pytest.raises(ShapeMismatchError):
y = model.begin_training(X, X)
|
Update test and import errors
|
Update test and import errors
|
Python
|
mit
|
explosion/thinc,explosion/thinc,explosion/thinc,spacy-io/thinc,spacy-io/thinc,spacy-io/thinc,explosion/thinc
|
---
+++
@@ -2,12 +2,13 @@
import numpy
from ...neural._classes.model import Model
-
+from ...exceptions import UndefinedOperatorError, DifferentLengthError
+from ...exceptions import ExpectedTypeError, ShapeMismatchError
def test_mismatched_shapes_raise_ShapeError():
X = numpy.ones((3, 4))
model = Model(10, 5)
- with pytest.raises(ValueError):
- y = model.begin_training(X)
-
+ with pytest.raises(ShapeMismatchError):
+ y = model.begin_training(X, X)
+
|
3e413b9f0afea5e33f8698e13984fe5dcf4783dd
|
src/core/homepage_elements/about/hooks.py
|
src/core/homepage_elements/about/hooks.py
|
__copyright__ = "Copyright 2017 Birkbeck, University of London"
__author__ = "Martin Paul Eve & Andy Byers"
__license__ = "AGPL v3"
__maintainer__ = "Birkbeck Centre for Technology and Publishing"
from django.utils.translation import ugettext_lazy as _
from utils.setting_handler import get_plugin_setting
from core.homepage_elements.about import plugin_settings
def yield_homepage_element_context(request, homepage_elements):
if homepage_elements is not None and homepage_elements.filter(name='About').exists():
try:
title = get_plugin_setting(
plugin_settings.get_self(),
'about_title',
request.journal,
)
title_value = title.value if title.value else ''
except IndexError:
title_value = _('About this Journal')
return {
'about_content': request.journal.description,
'title_value': title_value,
}
else:
return {}
|
__copyright__ = "Copyright 2017 Birkbeck, University of London"
__author__ = "Martin Paul Eve & Andy Byers"
__license__ = "AGPL v3"
__maintainer__ = "Birkbeck Centre for Technology and Publishing"
from django.utils.translation import ugettext_lazy as _
from utils.setting_handler import get_plugin_setting
from core.homepage_elements.about import plugin_settings
def yield_homepage_element_context(request, homepage_elements):
if homepage_elements is not None and homepage_elements.filter(name='About').exists():
try:
title = get_plugin_setting(
plugin_settings.get_self(),
'about_title',
request.journal,
)
title_value = title.value if title.value else ''
except AttributeError:
title_value = _('About this Journal')
return {
'about_content': request.journal.description,
'title_value': title_value,
}
else:
return {}
|
Swap IndexError for AttributeError as a result of the swap from HVAD to MT
|
Swap IndexError for AttributeError as a result of the swap from HVAD to MT
|
Python
|
agpl-3.0
|
BirkbeckCTP/janeway,BirkbeckCTP/janeway,BirkbeckCTP/janeway,BirkbeckCTP/janeway
|
---
+++
@@ -19,7 +19,7 @@
request.journal,
)
title_value = title.value if title.value else ''
- except IndexError:
+ except AttributeError:
title_value = _('About this Journal')
return {
|
838012c457d6c963707bb16259cd72d28c231672
|
cellcounter/accounts/decorators.py
|
cellcounter/accounts/decorators.py
|
__author__ = 'jvc26'
|
from functools import wraps
from ratelimit.exceptions import Ratelimited
from ratelimit.helpers import is_ratelimited
def registration_ratelimit(ip=True, block=False, method=['POST'], field=None, rate='1/h',
skip_if=None, keys=None):
def decorator(fn):
@wraps(fn)
def _wrapped(request, *args, **kw):
request.limited = getattr(request, 'limited', False)
if skip_if is None or not skip_if(request):
ratelimited = is_ratelimited(request=request, increment=False,
ip=ip, method=method, field=field,
rate=rate, keys=keys)
if ratelimited and block:
raise Ratelimited()
return_val, success = fn(request, *args, **kw)
if success:
is_ratelimited(request=request, increment=True, ip=ip,
method=method, field=field, rate=rate, keys=keys)
return return_val
return _wrapped
return decorator
|
Use custom decorator to allow ratelimiting only on successful POST - prevents blocking form errors
|
Use custom decorator to allow ratelimiting only on successful POST - prevents blocking form errors
|
Python
|
mit
|
haematologic/cellcounter,cellcounter/cellcounter,cellcounter/cellcounter,haematologic/cellcounter,cellcounter/cellcounter,cellcounter/cellcounter,haematologic/cellcounter
|
---
+++
@@ -1 +1,25 @@
-__author__ = 'jvc26'
+from functools import wraps
+
+from ratelimit.exceptions import Ratelimited
+from ratelimit.helpers import is_ratelimited
+
+
+def registration_ratelimit(ip=True, block=False, method=['POST'], field=None, rate='1/h',
+ skip_if=None, keys=None):
+ def decorator(fn):
+ @wraps(fn)
+ def _wrapped(request, *args, **kw):
+ request.limited = getattr(request, 'limited', False)
+ if skip_if is None or not skip_if(request):
+ ratelimited = is_ratelimited(request=request, increment=False,
+ ip=ip, method=method, field=field,
+ rate=rate, keys=keys)
+ if ratelimited and block:
+ raise Ratelimited()
+ return_val, success = fn(request, *args, **kw)
+ if success:
+ is_ratelimited(request=request, increment=True, ip=ip,
+ method=method, field=field, rate=rate, keys=keys)
+ return return_val
+ return _wrapped
+ return decorator
|
6ec3b50a087e68373f71162b3dd2421ce7655e4f
|
neuroimaging/testing/__init__.py
|
neuroimaging/testing/__init__.py
|
"""The testing directory contains a small set of imaging files to be used
for doctests only. More thorough tests and example data will be stored in
a nipy-data-suite to be created later and downloaded separately.
Examples
--------
>>> from neuroimaging.testing import funcfile
>>> from neuroimaging.core.image import image
>>> img = image.load(funcfile)
>>> img.shape
(20, 2, 20, 20)
Notes
-----
BUG: anatomical.nii.gz is a copy of functional.nii.gz. This is a place-holder
until we build a proper anatomical test image.
"""
import os
#__all__ = ['funcfile', 'anatfile']
# Discover directory path
filepath = os.path.abspath(__file__)
basedir = os.path.dirname(filepath)
funcfile = os.path.join(basedir, 'functional.nii.gz')
anatfile = os.path.join(basedir, 'anatomical.nii.gz')
from numpy.testing import *
import decorators as dec
|
"""The testing directory contains a small set of imaging files to be used
for doctests only. More thorough tests and example data will be stored in
a nipy-data-suite to be created later and downloaded separately.
Examples
--------
>>> from neuroimaging.testing import funcfile
>>> from neuroimaging.core.image import image
>>> img = image.load(funcfile)
>>> img.shape
(20, 2, 20, 20)
Notes
-----
BUG: anatomical.nii.gz is a copy of functional.nii.gz. This is a place-holder
until we build a proper anatomical test image.
"""
import os
#__all__ = ['funcfile', 'anatfile']
# Discover directory path
filepath = os.path.abspath(__file__)
basedir = os.path.dirname(filepath)
funcfile = os.path.join(basedir, 'functional.nii.gz')
anatfile = os.path.join(basedir, 'anatomical.nii.gz')
from numpy.testing import *
import decorators as dec
from nose.tools import assert_true, assert_false
|
Add some nose.tools to testing imports.
|
Add some nose.tools to testing imports.
|
Python
|
bsd-3-clause
|
alexis-roche/nipy,nipy/nipy-labs,bthirion/nipy,alexis-roche/niseg,arokem/nipy,alexis-roche/nipy,arokem/nipy,bthirion/nipy,nipy/nireg,nipy/nipy-labs,alexis-roche/niseg,nipy/nireg,alexis-roche/nipy,arokem/nipy,arokem/nipy,alexis-roche/register,alexis-roche/nireg,alexis-roche/register,bthirion/nipy,alexis-roche/register,alexis-roche/nipy,alexis-roche/nireg,bthirion/nipy
|
---
+++
@@ -31,3 +31,5 @@
from numpy.testing import *
import decorators as dec
+from nose.tools import assert_true, assert_false
+
|
bda420a0f9abd31b78decdc43359d0dcff36381f
|
zephyr/management/commands/dump_pointers.py
|
zephyr/management/commands/dump_pointers.py
|
from optparse import make_option
from django.core.management.base import BaseCommand
from zephyr.models import Realm, UserProfile
import simplejson
def dump():
pointers = []
for u in UserProfile.objects.select_related("user__email").all():
pointers.append((u.user.email, u.pointer))
file("dumped-pointers", "w").write(simplejson.dumps(pointers) + "\n")
def restore(change):
for (email, pointer) in simplejson.loads(file("dumped-pointers").read()):
u = UserProfile.objects.get(user__email=email)
print "%s: pointer %s => %s" % (email, u.pointer, pointer)
if change:
u.pointer = pointer
u.save()
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
make_option('--restore', default=False, action='store_true'),
make_option('--dry-run', '-n', default=False, action='store_true'),)
def handle(self, *args, **options):
if options["restore"]:
restore(change=not options['dry_run'])
else:
dump()
|
from optparse import make_option
from django.core.management.base import BaseCommand
from zephyr.models import Realm, UserProfile
import simplejson
def dump():
pointers = []
for u in UserProfile.objects.select_related("user__email").all():
pointers.append((u.user.email, u.pointer))
file("dumped-pointers", "w").write(simplejson.dumps(pointers) + "\n")
def restore(change):
for (email, pointer) in simplejson.loads(file("dumped-pointers").read()):
u = UserProfile.objects.get(user__email__iexact=email)
print "%s: pointer %s => %s" % (email, u.pointer, pointer)
if change:
u.pointer = pointer
u.save()
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
make_option('--restore', default=False, action='store_true'),
make_option('--dry-run', '-n', default=False, action='store_true'),)
def handle(self, *args, **options):
if options["restore"]:
restore(change=not options['dry_run'])
else:
dump()
|
Fix email case issues when restoring user pointers.
|
Fix email case issues when restoring user pointers.
(imported from commit 84d3288dffc1cb010d8cd2a749fe71aa2a4d0df3)
|
Python
|
apache-2.0
|
KingxBanana/zulip,zachallaun/zulip,themass/zulip,avastu/zulip,bastianh/zulip,bitemyapp/zulip,EasonYi/zulip,lfranchi/zulip,levixie/zulip,pradiptad/zulip,adnanh/zulip,hengqujushi/zulip,vaidap/zulip,ashwinirudrappa/zulip,reyha/zulip,brainwane/zulip,arpitpanwar/zulip,reyha/zulip,grave-w-grave/zulip,KJin99/zulip,Gabriel0402/zulip,yocome/zulip,Drooids/zulip,proliming/zulip,jessedhillon/zulip,mohsenSy/zulip,souravbadami/zulip,bssrdf/zulip,tbutter/zulip,hafeez3000/zulip,grave-w-grave/zulip,joyhchen/zulip,technicalpickles/zulip,mansilladev/zulip,bowlofstew/zulip,kokoar/zulip,zorojean/zulip,dawran6/zulip,kokoar/zulip,ApsOps/zulip,Qgap/zulip,hafeez3000/zulip,qq1012803704/zulip,Drooids/zulip,mahim97/zulip,voidException/zulip,kokoar/zulip,showell/zulip,EasonYi/zulip,johnny9/zulip,jerryge/zulip,hafeez3000/zulip,dxq-git/zulip,tbutter/zulip,tbutter/zulip,amanharitsh123/zulip,jphilipsen05/zulip,tiansiyuan/zulip,LeeRisk/zulip,reyha/zulip,brainwane/zulip,PhilSk/zulip,deer-hope/zulip,dwrpayne/zulip,synicalsyntax/zulip,willingc/zulip,jrowan/zulip,showell/zulip,KingxBanana/zulip,eastlhu/zulip,saitodisse/zulip,ashwinirudrappa/zulip,amyliu345/zulip,eastlhu/zulip,noroot/zulip,joshisa/zulip,christi3k/zulip,developerfm/zulip,natanovia/zulip,developerfm/zulip,wweiradio/zulip,armooo/zulip,hengqujushi/zulip,blaze225/zulip,ufosky-server/zulip,shaunstanislaus/zulip,dwrpayne/zulip,JanzTam/zulip,luyifan/zulip,susansls/zulip,dhcrzf/zulip,tdr130/zulip,aps-sids/zulip,hackerkid/zulip,jessedhillon/zulip,hackerkid/zulip,tommyip/zulip,atomic-labs/zulip,j831/zulip,ryanbackman/zulip,zhaoweigg/zulip,shubhamdhama/zulip,gkotian/zulip,rht/zulip,isht3/zulip,amyliu345/zulip,zachallaun/zulip,zulip/zulip,Drooids/zulip,ryansnowboarder/zulip,AZtheAsian/zulip,LeeRisk/zulip,proliming/zulip,pradiptad/zulip,adnanh/zulip,hj3938/zulip,kaiyuanheshang/zulip,sharmaeklavya2/zulip,littledogboy/zulip,sup95/zulip,tbutter/zulip,Vallher/zulip,luyifan/zulip,AZtheAsian/zulip,wangdeshui/zulip,he15his/zulip,itnihao/zulip,vabs22/zulip,Vallher/zulip,yuvipanda/zulip,saitodisse/zulip,RobotCaleb/zulip,adnanh/zulip,hayderimran7/zulip,fw1121/zulip,lfranchi/zulip,bluesea/zulip,ikasumiwt/zulip,Galexrt/zulip,karamcnair/zulip,m1ssou/zulip,jrowan/zulip,lfranchi/zulip,karamcnair/zulip,dawran6/zulip,ryanbackman/zulip,jackrzhang/zulip,zulip/zulip,joshisa/zulip,hj3938/zulip,levixie/zulip,krtkmj/zulip,aps-sids/zulip,bowlofstew/zulip,LeeRisk/zulip,hj3938/zulip,KJin99/zulip,eastlhu/zulip,Vallher/zulip,tiansiyuan/zulip,Drooids/zulip,ufosky-server/zulip,zulip/zulip,easyfmxu/zulip,calvinleenyc/zulip,shubhamdhama/zulip,dwrpayne/zulip,joyhchen/zulip,timabbott/zulip,jimmy54/zulip,timabbott/zulip,MayB/zulip,proliming/zulip,kaiyuanheshang/zulip,KJin99/zulip,wdaher/zulip,eastlhu/zulip,johnny9/zulip,MayB/zulip,Jianchun1/zulip,zacps/zulip,jimmy54/zulip,littledogboy/zulip,DazWorrall/zulip,xuxiao/zulip,saitodisse/zulip,alliejones/zulip,hafeez3000/zulip,synicalsyntax/zulip,nicholasbs/zulip,shrikrishnaholla/zulip,bssrdf/zulip,amanharitsh123/zulip,timabbott/zulip,swinghu/zulip,babbage/zulip,niftynei/zulip,jrowan/zulip,bastianh/zulip,he15his/zulip,developerfm/zulip,JPJPJPOPOP/zulip,schatt/zulip,JanzTam/zulip,mohsenSy/zulip,Qgap/zulip,gigawhitlocks/zulip,xuxiao/zulip,codeKonami/zulip,Frouk/zulip,hengqujushi/zulip,themass/zulip,jainayush975/zulip,developerfm/zulip,SmartPeople/zulip,aps-sids/zulip,ashwinirudrappa/zulip,tommyip/zulip,joyhchen/zulip,hengqujushi/zulip,wweiradio/zulip,blaze225/zulip,esander91/zulip,sharmaeklavya2/zulip,DazWorrall/zulip,ahmadassaf/zulip,vakila/zulip,proliming/zulip,willingc/zulip,Qgap/zulip,Suninus/zulip,tbutter/zulip,krtkmj/zulip,tdr130/zulip,dwrpayne/zulip,Qgap/zulip,moria/zulip,alliejones/zulip,Gabriel0402/zulip,jessedhillon/zulip,Qgap/zulip,sharmaeklavya2/zulip,JPJPJPOPOP/zulip,kou/zulip,bastianh/zulip,Juanvulcano/zulip,krtkmj/zulip,kou/zulip,littledogboy/zulip,kokoar/zulip,hackerkid/zulip,hj3938/zulip,Cheppers/zulip,dawran6/zulip,zwily/zulip,natanovia/zulip,mansilladev/zulip,Qgap/zulip,wavelets/zulip,so0k/zulip,wdaher/zulip,karamcnair/zulip,technicalpickles/zulip,dawran6/zulip,Juanvulcano/zulip,ryansnowboarder/zulip,bssrdf/zulip,ipernet/zulip,peguin40/zulip,krtkmj/zulip,peiwei/zulip,samatdav/zulip,amanharitsh123/zulip,mdavid/zulip,mansilladev/zulip,willingc/zulip,peiwei/zulip,vaidap/zulip,xuxiao/zulip,avastu/zulip,joshisa/zulip,dhcrzf/zulip,tdr130/zulip,peguin40/zulip,xuanhan863/zulip,kou/zulip,ericzhou2008/zulip,verma-varsha/zulip,EasonYi/zulip,he15his/zulip,firstblade/zulip,tommyip/zulip,jerryge/zulip,vikas-parashar/zulip,bastianh/zulip,natanovia/zulip,brockwhittaker/zulip,swinghu/zulip,Cheppers/zulip,atomic-labs/zulip,shrikrishnaholla/zulip,udxxabp/zulip,fw1121/zulip,wavelets/zulip,dotcool/zulip,bowlofstew/zulip,zacps/zulip,qq1012803704/zulip,armooo/zulip,moria/zulip,sup95/zulip,jainayush975/zulip,itnihao/zulip,hafeez3000/zulip,wavelets/zulip,voidException/zulip,tommyip/zulip,jonesgithub/zulip,praveenaki/zulip,dwrpayne/zulip,xuanhan863/zulip,isht3/zulip,johnny9/zulip,andersk/zulip,cosmicAsymmetry/zulip,paxapy/zulip,suxinde2009/zulip,JPJPJPOPOP/zulip,vikas-parashar/zulip,suxinde2009/zulip,glovebx/zulip,showell/zulip,seapasulli/zulip,esander91/zulip,rishig/zulip,fw1121/zulip,joshisa/zulip,itnihao/zulip,jerryge/zulip,xuanhan863/zulip,stamhe/zulip,so0k/zulip,esander91/zulip,Galexrt/zulip,Jianchun1/zulip,noroot/zulip,TigorC/zulip,zorojean/zulip,stamhe/zulip,gkotian/zulip,johnnygaddarr/zulip,zulip/zulip,yocome/zulip,MariaFaBella85/zulip,codeKonami/zulip,codeKonami/zulip,dxq-git/zulip,hj3938/zulip,fw1121/zulip,dawran6/zulip,AZtheAsian/zulip,paxapy/zulip,synicalsyntax/zulip,PhilSk/zulip,hayderimran7/zulip,noroot/zulip,calvinleenyc/zulip,andersk/zulip,Drooids/zulip,ahmadassaf/zulip,Gabriel0402/zulip,bluesea/zulip,suxinde2009/zulip,mdavid/zulip,deer-hope/zulip,samatdav/zulip,shubhamdhama/zulip,jonesgithub/zulip,ikasumiwt/zulip,zhaoweigg/zulip,he15his/zulip,avastu/zulip,seapasulli/zulip,eeshangarg/zulip,lfranchi/zulip,noroot/zulip,KingxBanana/zulip,rht/zulip,sharmaeklavya2/zulip,arpith/zulip,dotcool/zulip,m1ssou/zulip,rishig/zulip,themass/zulip,LeeRisk/zulip,jrowan/zulip,karamcnair/zulip,hayderimran7/zulip,vabs22/zulip,zulip/zulip,brockwhittaker/zulip,thomasboyt/zulip,vakila/zulip,Batterfii/zulip,ApsOps/zulip,firstblade/zulip,amyliu345/zulip,sup95/zulip,udxxabp/zulip,souravbadami/zulip,ikasumiwt/zulip,joshisa/zulip,itnihao/zulip,glovebx/zulip,atomic-labs/zulip,johnny9/zulip,wangdeshui/zulip,dattatreya303/zulip,luyifan/zulip,EasonYi/zulip,peiwei/zulip,RobotCaleb/zulip,jimmy54/zulip,so0k/zulip,brockwhittaker/zulip,guiquanz/zulip,ApsOps/zulip,JanzTam/zulip,eeshangarg/zulip,lfranchi/zulip,shaunstanislaus/zulip,dotcool/zulip,Frouk/zulip,LAndreas/zulip,natanovia/zulip,calvinleenyc/zulip,gigawhitlocks/zulip,wweiradio/zulip,ericzhou2008/zulip,ipernet/zulip,Vallher/zulip,tiansiyuan/zulip,dnmfarrell/zulip,technicalpickles/zulip,verma-varsha/zulip,easyfmxu/zulip,timabbott/zulip,technicalpickles/zulip,eeshangarg/zulip,krtkmj/zulip,Jianchun1/zulip,isht3/zulip,johnnygaddarr/zulip,dhcrzf/zulip,calvinleenyc/zulip,dwrpayne/zulip,firstblade/zulip,zhaoweigg/zulip,niftynei/zulip,huangkebo/zulip,Frouk/zulip,verma-varsha/zulip,RobotCaleb/zulip,jphilipsen05/zulip,souravbadami/zulip,niftynei/zulip,ipernet/zulip,synicalsyntax/zulip,schatt/zulip,rht/zulip,hafeez3000/zulip,punchagan/zulip,levixie/zulip,ipernet/zulip,samatdav/zulip,Galexrt/zulip,mahim97/zulip,luyifan/zulip,umkay/zulip,peguin40/zulip,arpitpanwar/zulip,gigawhitlocks/zulip,tdr130/zulip,Juanvulcano/zulip,showell/zulip,EasonYi/zulip,jeffcao/zulip,bluesea/zulip,amanharitsh123/zulip,bluesea/zulip,jeffcao/zulip,ikasumiwt/zulip,reyha/zulip,sharmaeklavya2/zulip,amyliu345/zulip,zwily/zulip,noroot/zulip,ahmadassaf/zulip,fw1121/zulip,JanzTam/zulip,bssrdf/zulip,voidException/zulip,Batterfii/zulip,dotcool/zulip,rishig/zulip,j831/zulip,natanovia/zulip,glovebx/zulip,brainwane/zulip,krtkmj/zulip,atomic-labs/zulip,andersk/zulip,dnmfarrell/zulip,bowlofstew/zulip,Suninus/zulip,dwrpayne/zulip,armooo/zulip,Gabriel0402/zulip,tiansiyuan/zulip,karamcnair/zulip,jerryge/zulip,sup95/zulip,peiwei/zulip,johnnygaddarr/zulip,qq1012803704/zulip,eastlhu/zulip,PaulPetring/zulip,hustlzp/zulip,ipernet/zulip,ikasumiwt/zulip,yocome/zulip,xuanhan863/zulip,j831/zulip,aakash-cr7/zulip,wweiradio/zulip,Drooids/zulip,LAndreas/zulip,ryansnowboarder/zulip,tdr130/zulip,natanovia/zulip,MariaFaBella85/zulip,suxinde2009/zulip,jainayush975/zulip,synicalsyntax/zulip,KJin99/zulip,arpith/zulip,verma-varsha/zulip,wweiradio/zulip,hengqujushi/zulip,wavelets/zulip,itnihao/zulip,amallia/zulip,karamcnair/zulip,shaunstanislaus/zulip,willingc/zulip,RobotCaleb/zulip,SmartPeople/zulip,samatdav/zulip,nicholasbs/zulip,m1ssou/zulip,bluesea/zulip,swinghu/zulip,huangkebo/zulip,jessedhillon/zulip,alliejones/zulip,Suninus/zulip,j831/zulip,akuseru/zulip,yuvipanda/zulip,arpitpanwar/zulip,bitemyapp/zulip,moria/zulip,lfranchi/zulip,kaiyuanheshang/zulip,AZtheAsian/zulip,so0k/zulip,dhcrzf/zulip,qq1012803704/zulip,umkay/zulip,ahmadassaf/zulip,JPJPJPOPOP/zulip,punchagan/zulip,glovebx/zulip,themass/zulip,zacps/zulip,timabbott/zulip,paxapy/zulip,jrowan/zulip,dotcool/zulip,ApsOps/zulip,LAndreas/zulip,noroot/zulip,avastu/zulip,Diptanshu8/zulip,zorojean/zulip,so0k/zulip,arpitpanwar/zulip,j831/zulip,MariaFaBella85/zulip,seapasulli/zulip,jimmy54/zulip,rht/zulip,stamhe/zulip,wdaher/zulip,eeshangarg/zulip,ryansnowboarder/zulip,bluesea/zulip,SmartPeople/zulip,Frouk/zulip,mdavid/zulip,paxapy/zulip,qq1012803704/zulip,shubhamdhama/zulip,PaulPetring/zulip,DazWorrall/zulip,Batterfii/zulip,andersk/zulip,babbage/zulip,umkay/zulip,hafeez3000/zulip,themass/zulip,Diptanshu8/zulip,krtkmj/zulip,aakash-cr7/zulip,johnny9/zulip,punchagan/zulip,cosmicAsymmetry/zulip,kaiyuanheshang/zulip,joyhchen/zulip,joyhchen/zulip,sonali0901/zulip,jphilipsen05/zulip,dnmfarrell/zulip,TigorC/zulip,dattatreya303/zulip,mansilladev/zulip,eeshangarg/zulip,shrikrishnaholla/zulip,moria/zulip,guiquanz/zulip,zorojean/zulip,bowlofstew/zulip,Jianchun1/zulip,KingxBanana/zulip,zofuthan/zulip,Cheppers/zulip,ipernet/zulip,zorojean/zulip,armooo/zulip,littledogboy/zulip,yuvipanda/zulip,vabs22/zulip,alliejones/zulip,ikasumiwt/zulip,firstblade/zulip,guiquanz/zulip,bowlofstew/zulip,yocome/zulip,Juanvulcano/zulip,yocome/zulip,wavelets/zulip,SmartPeople/zulip,akuseru/zulip,bluesea/zulip,huangkebo/zulip,dnmfarrell/zulip,showell/zulip,jimmy54/zulip,zachallaun/zulip,mohsenSy/zulip,ericzhou2008/zulip,brockwhittaker/zulip,praveenaki/zulip,gkotian/zulip,fw1121/zulip,TigorC/zulip,codeKonami/zulip,kou/zulip,dattatreya303/zulip,zwily/zulip,ericzhou2008/zulip,stamhe/zulip,mohsenSy/zulip,udxxabp/zulip,Diptanshu8/zulip,luyifan/zulip,showell/zulip,zorojean/zulip,Suninus/zulip,TigorC/zulip,arpitpanwar/zulip,isht3/zulip,xuxiao/zulip,Vallher/zulip,xuanhan863/zulip,zorojean/zulip,saitodisse/zulip,JPJPJPOPOP/zulip,jonesgithub/zulip,adnanh/zulip,proliming/zulip,jonesgithub/zulip,souravbadami/zulip,udxxabp/zulip,guiquanz/zulip,AZtheAsian/zulip,zhaoweigg/zulip,zwily/zulip,AZtheAsian/zulip,hackerkid/zulip,amanharitsh123/zulip,babbage/zulip,bitemyapp/zulip,saitodisse/zulip,cosmicAsymmetry/zulip,Juanvulcano/zulip,voidException/zulip,deer-hope/zulip,huangkebo/zulip,dxq-git/zulip,vaidap/zulip,Galexrt/zulip,mahim97/zulip,wavelets/zulip,cosmicAsymmetry/zulip,dxq-git/zulip,jeffcao/zulip,proliming/zulip,esander91/zulip,PaulPetring/zulip,kaiyuanheshang/zulip,adnanh/zulip,guiquanz/zulip,blaze225/zulip,huangkebo/zulip,eastlhu/zulip,Vallher/zulip,paxapy/zulip,huangkebo/zulip,avastu/zulip,nicholasbs/zulip,jackrzhang/zulip,Diptanshu8/zulip,vabs22/zulip,adnanh/zulip,suxinde2009/zulip,vakila/zulip,bitemyapp/zulip,deer-hope/zulip,vabs22/zulip,Cheppers/zulip,amallia/zulip,m1ssou/zulip,rishig/zulip,voidException/zulip,ApsOps/zulip,RobotCaleb/zulip,alliejones/zulip,PaulPetring/zulip,dxq-git/zulip,MayB/zulip,samatdav/zulip,samatdav/zulip,brainwane/zulip,DazWorrall/zulip,bastianh/zulip,cosmicAsymmetry/zulip,aakash-cr7/zulip,schatt/zulip,hj3938/zulip,m1ssou/zulip,Frouk/zulip,xuxiao/zulip,ahmadassaf/zulip,punchagan/zulip,seapasulli/zulip,JanzTam/zulip,amyliu345/zulip,zacps/zulip,joshisa/zulip,glovebx/zulip,saitodisse/zulip,tdr130/zulip,ericzhou2008/zulip,ryanbackman/zulip,moria/zulip,RobotCaleb/zulip,Cheppers/zulip,umkay/zulip,brockwhittaker/zulip,christi3k/zulip,technicalpickles/zulip,brainwane/zulip,natanovia/zulip,rht/zulip,peiwei/zulip,developerfm/zulip,brainwane/zulip,aps-sids/zulip,calvinleenyc/zulip,ashwinirudrappa/zulip,LeeRisk/zulip,aliceriot/zulip,jonesgithub/zulip,wangdeshui/zulip,dhcrzf/zulip,sup95/zulip,Drooids/zulip,ericzhou2008/zulip,johnny9/zulip,xuxiao/zulip,shaunstanislaus/zulip,PhilSk/zulip,vikas-parashar/zulip,praveenaki/zulip,PhilSk/zulip,esander91/zulip,jeffcao/zulip,hustlzp/zulip,ipernet/zulip,dattatreya303/zulip,susansls/zulip,bastianh/zulip,umkay/zulip,synicalsyntax/zulip,schatt/zulip,wavelets/zulip,johnnygaddarr/zulip,jimmy54/zulip,pradiptad/zulip,akuseru/zulip,sonali0901/zulip,niftynei/zulip,levixie/zulip,mahim97/zulip,susansls/zulip,Suninus/zulip,mansilladev/zulip,hengqujushi/zulip,tommyip/zulip,EasonYi/zulip,suxinde2009/zulip,JanzTam/zulip,aliceriot/zulip,zacps/zulip,so0k/zulip,yuvipanda/zulip,Suninus/zulip,akuseru/zulip,zhaoweigg/zulip,johnnygaddarr/zulip,kokoar/zulip,ashwinirudrappa/zulip,gkotian/zulip,shaunstanislaus/zulip,dawran6/zulip,peguin40/zulip,thomasboyt/zulip,so0k/zulip,aps-sids/zulip,amallia/zulip,KingxBanana/zulip,dxq-git/zulip,wdaher/zulip,sonali0901/zulip,itnihao/zulip,technicalpickles/zulip,akuseru/zulip,mansilladev/zulip,zhaoweigg/zulip,zofuthan/zulip,bssrdf/zulip,kokoar/zulip,willingc/zulip,arpitpanwar/zulip,hustlzp/zulip,sonali0901/zulip,aakash-cr7/zulip,vabs22/zulip,verma-varsha/zulip,arpith/zulip,avastu/zulip,wdaher/zulip,shrikrishnaholla/zulip,schatt/zulip,hj3938/zulip,deer-hope/zulip,schatt/zulip,mahim97/zulip,alliejones/zulip,hackerkid/zulip,littledogboy/zulip,udxxabp/zulip,bssrdf/zulip,rishig/zulip,swinghu/zulip,isht3/zulip,gkotian/zulip,dxq-git/zulip,christi3k/zulip,wangdeshui/zulip,arpith/zulip,EasonYi/zulip,PhilSk/zulip,suxinde2009/zulip,joshisa/zulip,jphilipsen05/zulip,grave-w-grave/zulip,babbage/zulip,luyifan/zulip,ryanbackman/zulip,wdaher/zulip,niftynei/zulip,shubhamdhama/zulip,ufosky-server/zulip,swinghu/zulip,codeKonami/zulip,aliceriot/zulip,amallia/zulip,gkotian/zulip,proliming/zulip,dhcrzf/zulip,Suninus/zulip,KingxBanana/zulip,deer-hope/zulip,levixie/zulip,wangdeshui/zulip,qq1012803704/zulip,jerryge/zulip,jackrzhang/zulip,jainayush975/zulip,avastu/zulip,zhaoweigg/zulip,LeeRisk/zulip,shrikrishnaholla/zulip,Batterfii/zulip,he15his/zulip,thomasboyt/zulip,levixie/zulip,timabbott/zulip,dattatreya303/zulip,peguin40/zulip,shaunstanislaus/zulip,punchagan/zulip,Jianchun1/zulip,jessedhillon/zulip,m1ssou/zulip,easyfmxu/zulip,voidException/zulip,easyfmxu/zulip,pradiptad/zulip,ashwinirudrappa/zulip,Cheppers/zulip,hustlzp/zulip,reyha/zulip,peiwei/zulip,isht3/zulip,punchagan/zulip,mdavid/zulip,zofuthan/zulip,hayderimran7/zulip,dnmfarrell/zulip,willingc/zulip,vaidap/zulip,arpith/zulip,guiquanz/zulip,amyliu345/zulip,aliceriot/zulip,akuseru/zulip,bitemyapp/zulip,souravbadami/zulip,easyfmxu/zulip,Jianchun1/zulip,glovebx/zulip,yuvipanda/zulip,jainayush975/zulip,jessedhillon/zulip,atomic-labs/zulip,cosmicAsymmetry/zulip,zachallaun/zulip,johnnygaddarr/zulip,xuanhan863/zulip,Juanvulcano/zulip,brainwane/zulip,itnihao/zulip,gigawhitlocks/zulip,zachallaun/zulip,jackrzhang/zulip,armooo/zulip,calvinleenyc/zulip,schatt/zulip,andersk/zulip,jessedhillon/zulip,hayderimran7/zulip,Batterfii/zulip,ufosky-server/zulip,noroot/zulip,reyha/zulip,littledogboy/zulip,luyifan/zulip,lfranchi/zulip,armooo/zulip,ufosky-server/zulip,RobotCaleb/zulip,willingc/zulip,ikasumiwt/zulip,bssrdf/zulip,MariaFaBella85/zulip,thomasboyt/zulip,yocome/zulip,blaze225/zulip,nicholasbs/zulip,hustlzp/zulip,mohsenSy/zulip,jainayush975/zulip,firstblade/zulip,umkay/zulip,jeffcao/zulip,arpitpanwar/zulip,ApsOps/zulip,LAndreas/zulip,tbutter/zulip,codeKonami/zulip,ryansnowboarder/zulip,ashwinirudrappa/zulip,Diptanshu8/zulip,nicholasbs/zulip,pradiptad/zulip,developerfm/zulip,he15his/zulip,zachallaun/zulip,he15his/zulip,yocome/zulip,praveenaki/zulip,rishig/zulip,mahim97/zulip,grave-w-grave/zulip,xuanhan863/zulip,rishig/zulip,shubhamdhama/zulip,ryansnowboarder/zulip,Frouk/zulip,shrikrishnaholla/zulip,mohsenSy/zulip,nicholasbs/zulip,jerryge/zulip,jonesgithub/zulip,vakila/zulip,vikas-parashar/zulip,vakila/zulip,yuvipanda/zulip,esander91/zulip,dnmfarrell/zulip,jonesgithub/zulip,KJin99/zulip,jimmy54/zulip,tiansiyuan/zulip,udxxabp/zulip,thomasboyt/zulip,atomic-labs/zulip,tommyip/zulip,aakash-cr7/zulip,DazWorrall/zulip,PaulPetring/zulip,Diptanshu8/zulip,rht/zulip,dnmfarrell/zulip,kaiyuanheshang/zulip,MayB/zulip,levixie/zulip,zwily/zulip,jeffcao/zulip,MayB/zulip,karamcnair/zulip,TigorC/zulip,aliceriot/zulip,paxapy/zulip,timabbott/zulip,gigawhitlocks/zulip,dhcrzf/zulip,thomasboyt/zulip,vakila/zulip,johnnygaddarr/zulip,ryansnowboarder/zulip,MariaFaBella85/zulip,joyhchen/zulip,akuseru/zulip,kaiyuanheshang/zulip,jphilipsen05/zulip,LAndreas/zulip,stamhe/zulip,blaze225/zulip,jackrzhang/zulip,amallia/zulip,wweiradio/zulip,brockwhittaker/zulip,wdaher/zulip,deer-hope/zulip,easyfmxu/zulip,Qgap/zulip,codeKonami/zulip,mdavid/zulip,ufosky-server/zulip,souravbadami/zulip,amallia/zulip,huangkebo/zulip,tbutter/zulip,zofuthan/zulip,hustlzp/zulip,dotcool/zulip,jackrzhang/zulip,vaidap/zulip,PhilSk/zulip,pradiptad/zulip,christi3k/zulip,zwily/zulip,zulip/zulip,technicalpickles/zulip,seapasulli/zulip,qq1012803704/zulip,Cheppers/zulip,gigawhitlocks/zulip,JanzTam/zulip,mansilladev/zulip,arpith/zulip,developerfm/zulip,littledogboy/zulip,rht/zulip,voidException/zulip,PaulPetring/zulip,tiansiyuan/zulip,susansls/zulip,vikas-parashar/zulip,christi3k/zulip,punchagan/zulip,nicholasbs/zulip,verma-varsha/zulip,sonali0901/zulip,zacps/zulip,DazWorrall/zulip,MariaFaBella85/zulip,babbage/zulip,tommyip/zulip,vakila/zulip,aps-sids/zulip,Galexrt/zulip,xuxiao/zulip,hayderimran7/zulip,moria/zulip,kokoar/zulip,wangdeshui/zulip,udxxabp/zulip,bowlofstew/zulip,amanharitsh123/zulip,grave-w-grave/zulip,vaidap/zulip,blaze225/zulip,hayderimran7/zulip,JPJPJPOPOP/zulip,glovebx/zulip,synicalsyntax/zulip,shubhamdhama/zulip,zofuthan/zulip,atomic-labs/zulip,DazWorrall/zulip,Galexrt/zulip,LAndreas/zulip,gkotian/zulip,alliejones/zulip,Galexrt/zulip,MayB/zulip,kou/zulip,tiansiyuan/zulip,kou/zulip,eeshangarg/zulip,seapasulli/zulip,guiquanz/zulip,pradiptad/zulip,mdavid/zulip,Vallher/zulip,hengqujushi/zulip,adnanh/zulip,aps-sids/zulip,zofuthan/zulip,praveenaki/zulip,j831/zulip,hackerkid/zulip,hustlzp/zulip,babbage/zulip,esander91/zulip,Batterfii/zulip,themass/zulip,jeffcao/zulip,firstblade/zulip,LAndreas/zulip,ahmadassaf/zulip,aliceriot/zulip,MayB/zulip,praveenaki/zulip,andersk/zulip,ryanbackman/zulip,christi3k/zulip,susansls/zulip,PaulPetring/zulip,dattatreya303/zulip,moria/zulip,tdr130/zulip,dotcool/zulip,babbage/zulip,ahmadassaf/zulip,zwily/zulip,susansls/zulip,armooo/zulip,praveenaki/zulip,ryanbackman/zulip,mdavid/zulip,gigawhitlocks/zulip,jackrzhang/zulip,yuvipanda/zulip,zofuthan/zulip,Gabriel0402/zulip,hackerkid/zulip,vikas-parashar/zulip,SmartPeople/zulip,themass/zulip,wangdeshui/zulip,stamhe/zulip,stamhe/zulip,wweiradio/zulip,bastianh/zulip,Batterfii/zulip,sup95/zulip,ericzhou2008/zulip,KJin99/zulip,shrikrishnaholla/zulip,bitemyapp/zulip,swinghu/zulip,johnny9/zulip,easyfmxu/zulip,Gabriel0402/zulip,seapasulli/zulip,aakash-cr7/zulip,KJin99/zulip,saitodisse/zulip,showell/zulip,niftynei/zulip,Gabriel0402/zulip,jphilipsen05/zulip,jrowan/zulip,fw1121/zulip,sonali0901/zulip,TigorC/zulip,MariaFaBella85/zulip,ufosky-server/zulip,jerryge/zulip,eastlhu/zulip,zachallaun/zulip,peguin40/zulip,LeeRisk/zulip,m1ssou/zulip,grave-w-grave/zulip,sharmaeklavya2/zulip,umkay/zulip,ApsOps/zulip,andersk/zulip,firstblade/zulip,SmartPeople/zulip,zulip/zulip,Frouk/zulip,eeshangarg/zulip,aliceriot/zulip,kou/zulip,amallia/zulip,peiwei/zulip,swinghu/zulip,shaunstanislaus/zulip,thomasboyt/zulip,bitemyapp/zulip
|
---
+++
@@ -11,7 +11,7 @@
def restore(change):
for (email, pointer) in simplejson.loads(file("dumped-pointers").read()):
- u = UserProfile.objects.get(user__email=email)
+ u = UserProfile.objects.get(user__email__iexact=email)
print "%s: pointer %s => %s" % (email, u.pointer, pointer)
if change:
u.pointer = pointer
|
4742f587e3e66fd1916dcb7200517e2ac06ddcf4
|
uconnrcmpy/__init__.py
|
uconnrcmpy/__init__.py
|
from .ignitiondelayexp import ExperimentalIgnitionDelay
from .compare_to_sim import CompareToSimulation
from .volume_trace import VolumeTraceBuilder
from .nonreactive import NonReactiveExperiments
__all__ = [
'ExperimentalIgnitionDelay',
'CompareToSimulation',
'VolumeTraceBuilder',
'NonReactiveExperiments',
]
|
import sys
if sys.version_info[0] < 3 and sys.version_info[1] < 4:
raise Exception('Python 3.4 is required to use this package.')
from .ignitiondelayexp import ExperimentalIgnitionDelay
from .compare_to_sim import CompareToSimulation
from .volume_trace import VolumeTraceBuilder
from .nonreactive import NonReactiveExperiments
__all__ = [
'ExperimentalIgnitionDelay',
'CompareToSimulation',
'VolumeTraceBuilder',
'NonReactiveExperiments',
]
|
Enforce Python >= 3.4 on import of the package
|
Enforce Python >= 3.4 on import of the package
Python 3.4 is required for the pathlib module
|
Python
|
bsd-3-clause
|
bryanwweber/UConnRCMPy
|
---
+++
@@ -1,3 +1,7 @@
+import sys
+if sys.version_info[0] < 3 and sys.version_info[1] < 4:
+ raise Exception('Python 3.4 is required to use this package.')
+
from .ignitiondelayexp import ExperimentalIgnitionDelay
from .compare_to_sim import CompareToSimulation
from .volume_trace import VolumeTraceBuilder
|
77cf2fb0f63a5520de3b8b3456ce4c9181b91d16
|
spacy/tests/regression/test_issue595.py
|
spacy/tests/regression/test_issue595.py
|
from __future__ import unicode_literals
import pytest
from ...symbols import POS, VERB, VerbForm_inf
from ...tokens import Doc
from ...vocab import Vocab
from ...lemmatizer import Lemmatizer
@pytest.fixture
def index():
return {'verb': {}}
@pytest.fixture
def exceptions():
return {'verb': {}}
@pytest.fixture
def rules():
return {"verb": [["ed", "e"]]}
@pytest.fixture
def lemmatizer(index, exceptions, rules):
return Lemmatizer(index, exceptions, rules)
@pytest.fixture
def tag_map():
return {'VB': {POS: VERB, 'morph': VerbForm_inf}}
@pytest.fixture
def vocab(lemmatizer, tag_map):
return Vocab(lemmatizer=lemmatizer, tag_map=tag_map)
def test_not_lemmatize_base_forms(vocab, lemmatizer):
doc = Doc(vocab, words=["Do", "n't", "feed", "the", "dog"])
feed = doc[2]
feed.tag_ = u'VB'
assert feed.text == u'feed'
assert feed.lemma_ == u'feed'
|
from __future__ import unicode_literals
import pytest
from ...symbols import POS, VERB, VerbForm_inf
from ...tokens import Doc
from ...vocab import Vocab
from ...lemmatizer import Lemmatizer
@pytest.fixture
def index():
return {'verb': {}}
@pytest.fixture
def exceptions():
return {'verb': {}}
@pytest.fixture
def rules():
return {"verb": [["ed", "e"]]}
@pytest.fixture
def lemmatizer(index, exceptions, rules):
return Lemmatizer(index, exceptions, rules)
@pytest.fixture
def tag_map():
return {'VB': {POS: VERB, 'morph': VerbForm_inf}}
@pytest.fixture
def vocab(lemmatizer, tag_map):
return Vocab(lemmatizer=lemmatizer, tag_map=tag_map)
def test_not_lemmatize_base_forms(vocab):
doc = Doc(vocab, words=["Do", "n't", "feed", "the", "dog"])
feed = doc[2]
feed.tag_ = u'VB'
assert feed.text == u'feed'
assert feed.lemma_ == u'feed'
|
Remove unnecessary argument in test
|
Remove unnecessary argument in test
|
Python
|
mit
|
oroszgy/spaCy.hu,honnibal/spaCy,banglakit/spaCy,explosion/spaCy,explosion/spaCy,explosion/spaCy,recognai/spaCy,raphael0202/spaCy,recognai/spaCy,recognai/spaCy,explosion/spaCy,banglakit/spaCy,Gregory-Howard/spaCy,recognai/spaCy,Gregory-Howard/spaCy,raphael0202/spaCy,honnibal/spaCy,Gregory-Howard/spaCy,oroszgy/spaCy.hu,spacy-io/spaCy,honnibal/spaCy,banglakit/spaCy,banglakit/spaCy,spacy-io/spaCy,raphael0202/spaCy,raphael0202/spaCy,Gregory-Howard/spaCy,explosion/spaCy,aikramer2/spaCy,aikramer2/spaCy,banglakit/spaCy,aikramer2/spaCy,aikramer2/spaCy,Gregory-Howard/spaCy,spacy-io/spaCy,spacy-io/spaCy,oroszgy/spaCy.hu,spacy-io/spaCy,explosion/spaCy,oroszgy/spaCy.hu,recognai/spaCy,banglakit/spaCy,raphael0202/spaCy,recognai/spaCy,aikramer2/spaCy,spacy-io/spaCy,oroszgy/spaCy.hu,Gregory-Howard/spaCy,aikramer2/spaCy,honnibal/spaCy,oroszgy/spaCy.hu,raphael0202/spaCy
|
---
+++
@@ -34,7 +34,7 @@
return Vocab(lemmatizer=lemmatizer, tag_map=tag_map)
-def test_not_lemmatize_base_forms(vocab, lemmatizer):
+def test_not_lemmatize_base_forms(vocab):
doc = Doc(vocab, words=["Do", "n't", "feed", "the", "dog"])
feed = doc[2]
feed.tag_ = u'VB'
|
bca3c8f7b2c12b86e0d200009d23201bdc05d716
|
make_spectra.py
|
make_spectra.py
|
# -*- coding: utf-8 -*-
import randspectra as rs
import sys
import os.path as path
snapnum=sys.argv[1]
sim=sys.argv[2]
#base="/n/hernquistfs1/mvogelsberger/projects/GFM/Production/Cosmo/Cosmo"+str(sim)+"_V6/L25n512/output/"
#savedir="/n/home11/spb/scratch/Cosmo/Cosmo"+str(sim)+"_V6_512/snapdir_"+str(snapnum).rjust(3,'0')
base=path.expanduser("~/data/Cosmo/Cosmo"+str(sim)+"_V6/L25n512")
halo = rs.RandSpectra(snapnum, base)
halo.get_observer_tau("Si",2)
halo.get_col_density("H",1)
#halo.get_tau("H",1,1)
halo.get_col_density("Z",-1)
halo.get_col_density("H",-1)
halo.save_file()
|
# -*- coding: utf-8 -*-
import randspectra as rs
import sys
import os.path as path
snapnum=sys.argv[1]
sim=sys.argv[2]
#base="/n/hernquistfs1/mvogelsberger/projects/GFM/Production/Cosmo/Cosmo"+str(sim)+"_V6/L25n512/output/"
#savedir="/n/home11/spb/scratch/Cosmo/Cosmo"+str(sim)+"_V6_512/snapdir_"+str(snapnum).rjust(3,'0')
base=path.expanduser("~/data/Cosmo/Cosmo"+str(sim)+"_V6/L25n512")
halo = rs.RandSpectra(snapnum, base)
halo.save_file()
halo.get_observer_tau("Si",2)
halo.get_col_density("H",1)
#halo.get_tau("H",1,1)
halo.get_col_density("Z",-1)
halo.get_col_density("H",-1)
halo.save_file()
|
Handle the case where the savefile already exists by moving it out of the way
|
Handle the case where the savefile already exists by moving it out of the way
|
Python
|
mit
|
sbird/vw_spectra
|
---
+++
@@ -9,6 +9,7 @@
#savedir="/n/home11/spb/scratch/Cosmo/Cosmo"+str(sim)+"_V6_512/snapdir_"+str(snapnum).rjust(3,'0')
base=path.expanduser("~/data/Cosmo/Cosmo"+str(sim)+"_V6/L25n512")
halo = rs.RandSpectra(snapnum, base)
+halo.save_file()
halo.get_observer_tau("Si",2)
halo.get_col_density("H",1)
#halo.get_tau("H",1,1)
|
bed7f5c80f6c5b5ce9b9a17aea5c9eadd047ee47
|
mfr/conftest.py
|
mfr/conftest.py
|
"""Project-wide test configuration, including fixutres that can be
used by any module.
Example test: ::
def test_my_renderer(fakefile):
assert my_renderer(fakefile) == '..expected result..'
"""
import io
import pytest
@pytest.fixture
def fakefile():
return io.BytesIO(b'foo')
|
"""Project-wide test configuration, including fixutres that can be
used by any module.
Example test: ::
def test_my_renderer(fakefile):
assert my_renderer(fakefile) == '..expected result..'
"""
import pytest
import mock
@pytest.fixture
def fakefile():
"""A simple file-like object."""
return mock.Mock(spec=file)
|
Make fakefile a mock instead of an io object
|
Make fakefile a mock instead of an io object
Makes it possible to mutate attributes, e.g. the name,
for tests
|
Python
|
apache-2.0
|
felliott/modular-file-renderer,CenterForOpenScience/modular-file-renderer,mfraezz/modular-file-renderer,CenterForOpenScience/modular-file-renderer,Johnetordoff/modular-file-renderer,mfraezz/modular-file-renderer,icereval/modular-file-renderer,icereval/modular-file-renderer,AddisonSchiller/modular-file-renderer,chrisseto/modular-file-renderer,haoyuchen1992/modular-file-renderer,erinspace/modular-file-renderer,rdhyee/modular-file-renderer,TomBaxter/modular-file-renderer,rdhyee/modular-file-renderer,CenterForOpenScience/modular-file-renderer,Johnetordoff/modular-file-renderer,felliott/modular-file-renderer,erinspace/modular-file-renderer,chrisseto/modular-file-renderer,felliott/modular-file-renderer,felliott/modular-file-renderer,rdhyee/modular-file-renderer,haoyuchen1992/modular-file-renderer,chrisseto/modular-file-renderer,CenterForOpenScience/modular-file-renderer,mfraezz/modular-file-renderer,AddisonSchiller/modular-file-renderer,haoyuchen1992/modular-file-renderer,haoyuchen1992/modular-file-renderer,mfraezz/modular-file-renderer,TomBaxter/modular-file-renderer,rdhyee/modular-file-renderer,erinspace/modular-file-renderer,TomBaxter/modular-file-renderer,icereval/modular-file-renderer,AddisonSchiller/modular-file-renderer,Johnetordoff/modular-file-renderer,TomBaxter/modular-file-renderer,AddisonSchiller/modular-file-renderer,Johnetordoff/modular-file-renderer
|
---
+++
@@ -7,9 +7,10 @@
assert my_renderer(fakefile) == '..expected result..'
"""
-import io
import pytest
+import mock
@pytest.fixture
def fakefile():
- return io.BytesIO(b'foo')
+ """A simple file-like object."""
+ return mock.Mock(spec=file)
|
3770095f087309efe901c2f22afd29ba6f3ddd18
|
comrade/core/context_processors.py
|
comrade/core/context_processors.py
|
from django.conf import settings
from django.contrib.sites.models import Site
from settings import DeploymentType
def default(request):
context = {}
context['DEPLOYMENT'] = settings.DEPLOYMENT
context['current_site'] = Site.objects.get_current()
if settings.DEPLOYMENT != DeploymentType.PRODUCTION:
context['GIT_COMMIT'] = settings.GIT_COMMIT
context['site_email'] = settings.CONTACT_EMAIL
if request.is_secure():
context['protocol'] = 'https://'
else:
context['protocol'] = 'http://'
context['current_site_url'] = (context['protocol'] +
context['current_site'].domain)
return context
def ssl_media(request):
if request.is_secure():
ssl_media_url = settings.MEDIA_URL.replace('http://','https://')
else:
ssl_media_url = settings.MEDIA_URL
return {'MEDIA_URL': ssl_media_url}
|
from django.conf import settings
from django.contrib.sites.models import Site
from settings import DeploymentType
def default(request):
context = {}
context['DEPLOYMENT'] = settings.DEPLOYMENT
context['current_site'] = Site.objects.get_current()
if settings.DEPLOYMENT != DeploymentType.PRODUCTION:
context['GIT_COMMIT'] = settings.GIT_COMMIT
context['site_email'] = settings.CONTACT_EMAIL
if request.is_secure():
context['protocol'] = 'https://'
else:
context['protocol'] = 'http://'
context['current_site_url'] = (context['protocol'] +
context['current_site'].domain)
return context
def profile(request):
context = {}
if request.user.is_authenticated():
context['profile'] = request.user.get_profile()
return context
def ssl_media(request):
if request.is_secure():
ssl_media_url = settings.MEDIA_URL.replace('http://','https://')
else:
ssl_media_url = settings.MEDIA_URL
return {'MEDIA_URL': ssl_media_url}
|
Add a context processor that adds the UserProfile to each context.
|
Add a context processor that adds the UserProfile to each context.
|
Python
|
mit
|
bueda/django-comrade
|
---
+++
@@ -17,6 +17,12 @@
context['current_site'].domain)
return context
+def profile(request):
+ context = {}
+ if request.user.is_authenticated():
+ context['profile'] = request.user.get_profile()
+ return context
+
def ssl_media(request):
if request.is_secure():
ssl_media_url = settings.MEDIA_URL.replace('http://','https://')
|
921e315e61355d80caea673ce09f8944388d86e2
|
tests/unit/util/test_cache.py
|
tests/unit/util/test_cache.py
|
"""Test praw.util.cache."""
from .. import UnitTest
from praw.util.cache import cachedproperty
class TestCachedProperty(UnitTest):
class Klass:
@cachedproperty
def nine(self):
"""Return 9."""
return 9
def ten(self):
return 10
ten = cachedproperty(ten, doc="Return 10.")
def test_get(self):
klass = self.Klass()
assert "nine" not in klass.__dict__
assert klass.nine == 9
assert "nine" in klass.__dict__
def test_repr(self):
klass = self.Klass()
assert repr(klass.nine) == "9"
property_repr = repr(self.Klass.nine)
assert property_repr.startswith("<cachedproperty <function")
def test_doc(self):
assert self.Klass.nine.__doc__ == "Return 9."
assert self.Klass.ten.__doc__ == "Return 10."
|
"""Test praw.util.cache."""
from .. import UnitTest
from praw.util.cache import cachedproperty
class TestCachedProperty(UnitTest):
class Klass:
@cachedproperty
def nine(self):
"""Return 9."""
return 9
def ten(self):
return 10
ten = cachedproperty(ten, doc="Return 10.")
def test_get(self):
klass = self.Klass()
assert "nine" not in klass.__dict__
assert klass.nine == 9
assert "nine" in klass.__dict__
assert "ten" not in klass.__dict__
assert klass.ten == 10
assert "ten" in klass.__dict__
def test_repr(self):
klass = self.Klass()
assert repr(klass.nine) == "9"
property_repr = repr(self.Klass.nine)
assert property_repr.startswith("<cachedproperty <function")
assert repr(klass.ten) == "10"
property_repr = repr(self.Klass.ten)
assert property_repr.startswith("<cachedproperty <function")
def test_doc(self):
assert self.Klass.nine.__doc__ == "Return 9."
assert self.Klass.ten.__doc__ == "Return 10."
|
Test for property ten as well
|
Test for property ten as well
|
Python
|
bsd-2-clause
|
praw-dev/praw,gschizas/praw,praw-dev/praw,gschizas/praw
|
---
+++
@@ -22,6 +22,9 @@
assert "nine" not in klass.__dict__
assert klass.nine == 9
assert "nine" in klass.__dict__
+ assert "ten" not in klass.__dict__
+ assert klass.ten == 10
+ assert "ten" in klass.__dict__
def test_repr(self):
klass = self.Klass()
@@ -30,6 +33,11 @@
property_repr = repr(self.Klass.nine)
assert property_repr.startswith("<cachedproperty <function")
+ assert repr(klass.ten) == "10"
+
+ property_repr = repr(self.Klass.ten)
+ assert property_repr.startswith("<cachedproperty <function")
+
def test_doc(self):
assert self.Klass.nine.__doc__ == "Return 9."
assert self.Klass.ten.__doc__ == "Return 10."
|
6f29293e6f447dfd80d10c173b7c5a6cc13a4243
|
main/urls.py
|
main/urls.py
|
from django.conf.urls import url
from django.views import generic
from . import views
app_name = 'main'
urlpatterns = [
url(r'^$', views.AboutView.as_view(), name='about'),
url(r'^chas/$', views.AboutChasView.as_view(), name='chas'),
url(r'^evan/$', views.AboutEvanView.as_view(), name='evan'),
]
|
from django.urls import include, path
from . import views
app_name = 'main'
urlpatterns = [
path('', views.AboutView.as_view(), name='about'),
path('chas/', views.AboutChasView.as_view(), name='chas'),
path('evan/', views.AboutEvanView.as_view(), name='evan'),
]
|
Move some urlpatterns to DJango 2.0 preferred method
|
Move some urlpatterns to DJango 2.0 preferred method
|
Python
|
mit
|
evanepio/dotmanca,evanepio/dotmanca,evanepio/dotmanca
|
---
+++
@@ -1,11 +1,10 @@
-from django.conf.urls import url
-from django.views import generic
+from django.urls import include, path
from . import views
app_name = 'main'
urlpatterns = [
- url(r'^$', views.AboutView.as_view(), name='about'),
- url(r'^chas/$', views.AboutChasView.as_view(), name='chas'),
- url(r'^evan/$', views.AboutEvanView.as_view(), name='evan'),
+ path('', views.AboutView.as_view(), name='about'),
+ path('chas/', views.AboutChasView.as_view(), name='chas'),
+ path('evan/', views.AboutEvanView.as_view(), name='evan'),
]
|
3de9cd44ef803b7c7f3e05e29ecaa30113caf1ba
|
test/db/relations.py
|
test/db/relations.py
|
import unittest
from firmant.db.relations import schema
class TestSchemaLoad(unittest.TestCase):
def testLoad(self):
if schema('loader-works') != 'SCHEMA LOAD WORKING PROPERLY':
self.fail()
suite = unittest.TestLoader().loadTestsFromTestCase(TestSchemaLoad)
|
import unittest
from firmant.db.relations import schema
class TestSchemaLoad(unittest.TestCase):
def testLoad(self):
self.assertEqual(schema('loader-works'), 'SCHEMA LOAD WORKING PROPERLY')
suite = unittest.TestLoader().loadTestsFromTestCase(TestSchemaLoad)
|
Update the schema load test.
|
Update the schema load test.
It now has 100% coverage if the tests pass.
|
Python
|
bsd-3-clause
|
rescrv/firmant
|
---
+++
@@ -6,7 +6,6 @@
class TestSchemaLoad(unittest.TestCase):
def testLoad(self):
- if schema('loader-works') != 'SCHEMA LOAD WORKING PROPERLY':
- self.fail()
+ self.assertEqual(schema('loader-works'), 'SCHEMA LOAD WORKING PROPERLY')
suite = unittest.TestLoader().loadTestsFromTestCase(TestSchemaLoad)
|
ad7e1149081461d2d34578e06cf5f470d2d20e71
|
tomviz/python/Subtract_TiltSer_Background.py
|
tomviz/python/Subtract_TiltSer_Background.py
|
def transform_scalars(dataset):
from tomviz import utils
import numpy as np
#----USER SPECIFIED VARIABLES-----#
###XRANGE###
###YRANGE###
###ZRANGE###
#---------------------------------#
data_bs = utils.get_array(dataset) #get data as numpy array
if data_bs is None: #Check if data exists
raise RuntimeError("No data array found!")
for i in range(ZRANGE[0],ZRANGE[1]):
a = data_bs[:,:,i] - np.average(data_bs[XRANGE[0]:XRANGE[1],YRANGE[0]:YRANGE[1],i])
data_bs[:,:,i] = a
utils.set_array(dataset, data_bs)
|
def transform_scalars(dataset):
from tomviz import utils
import numpy as np
#----USER SPECIFIED VARIABLES-----#
###XRANGE###
###YRANGE###
###ZRANGE###
#---------------------------------#
data_bs = utils.get_array(dataset) #get data as numpy array
data_bs = data_bs.astype(np.float32) #change tilt series type to float
if data_bs is None: #Check if data exists
raise RuntimeError("No data array found!")
for i in range(ZRANGE[0],ZRANGE[1]):
a = data_bs[:,:,i] - np.average(data_bs[XRANGE[0]:XRANGE[1],YRANGE[0]:YRANGE[1],i])
data_bs[:,:,i] = a
utils.set_array(dataset, data_bs)
|
Change tilt series type to float in manual background sub.
|
Change tilt series type to float in manual background sub.
|
Python
|
bsd-3-clause
|
cjh1/tomviz,cryos/tomviz,cjh1/tomviz,mathturtle/tomviz,thewtex/tomviz,OpenChemistry/tomviz,cryos/tomviz,cryos/tomviz,OpenChemistry/tomviz,thewtex/tomviz,thewtex/tomviz,cjh1/tomviz,mathturtle/tomviz,OpenChemistry/tomviz,OpenChemistry/tomviz,mathturtle/tomviz
|
---
+++
@@ -9,7 +9,9 @@
#---------------------------------#
data_bs = utils.get_array(dataset) #get data as numpy array
-
+
+ data_bs = data_bs.astype(np.float32) #change tilt series type to float
+
if data_bs is None: #Check if data exists
raise RuntimeError("No data array found!")
|
850c5c6f133fdfd131605eb1bf1e971b33dd7416
|
website/addons/twofactor/tests/test_views.py
|
website/addons/twofactor/tests/test_views.py
|
from nose.tools import *
from webtest_plus import TestApp
from tests.base import OsfTestCase
from tests.factories import AuthUserFactory
from website.app import init_app
from website.addons.twofactor.tests import _valid_code
app = init_app(
routes=True,
set_backends=False,
settings_module='website.settings',
)
class TestViews(OsfTestCase):
def setUp(self):
super(TestViews, self).setUp()
self.user = AuthUserFactory()
self.user.add_addon('twofactor')
self.user_settings = self.user.get_addon('twofactor')
self.app = TestApp(app)
def test_confirm_code(self):
# Send a valid code to the API endpoint for the user settings.
res = self.app.post_json(
'/api/v1/settings/twofactor/',
{'code': _valid_code(self.user_settings.totp_secret)},
auth=self.user.auth
)
# reload the user settings object from the DB
self.user_settings.reload()
assert_true(self.user_settings.is_confirmed)
assert_equal(res.status_code, 200)
|
from nose.tools import *
from webtest.app import AppError
from webtest_plus import TestApp
from tests.base import OsfTestCase
from tests.factories import AuthUserFactory
from website.app import init_app
from website.addons.twofactor.tests import _valid_code
app = init_app(
routes=True,
set_backends=False,
settings_module='website.settings',
)
class TestViews(OsfTestCase):
def setUp(self):
super(TestViews, self).setUp()
self.user = AuthUserFactory()
self.user.add_addon('twofactor')
self.user_settings = self.user.get_addon('twofactor')
self.app = TestApp(app)
def test_confirm_code(self):
# Send a valid code to the API endpoint for the user settings.
res = self.app.post_json(
'/api/v1/settings/twofactor/',
{'code': _valid_code(self.user_settings.totp_secret)},
auth=self.user.auth
)
# reload the user settings object from the DB
self.user_settings.reload()
assert_true(self.user_settings.is_confirmed)
assert_equal(res.status_code, 200)
def test_confirm_code_failure(self):
with assert_raises(AppError) as error:
res = self.app.post_json(
'/api/v1/settings/twofactor/',
{'code': '000000'},
auth=self.user.auth
)
assert_in('403 FORBIDDEN', error.message)
# reload the user settings object from the DB
self.user_settings.reload()
assert_false(self.user_settings.is_confirmed)
|
Add test for failure to confirm 2FA code
|
Add test for failure to confirm 2FA code
|
Python
|
apache-2.0
|
doublebits/osf.io,brianjgeiger/osf.io,billyhunt/osf.io,cwisecarver/osf.io,CenterForOpenScience/osf.io,barbour-em/osf.io,wearpants/osf.io,MerlinZhang/osf.io,alexschiller/osf.io,brandonPurvis/osf.io,haoyuchen1992/osf.io,SSJohns/osf.io,HalcyonChimera/osf.io,dplorimer/osf,amyshi188/osf.io,SSJohns/osf.io,chrisseto/osf.io,ticklemepierce/osf.io,mattclark/osf.io,SSJohns/osf.io,amyshi188/osf.io,caneruguz/osf.io,ckc6cz/osf.io,baylee-d/osf.io,hmoco/osf.io,Nesiehr/osf.io,revanthkolli/osf.io,Ghalko/osf.io,caseyrollins/osf.io,Johnetordoff/osf.io,mluo613/osf.io,sloria/osf.io,caseyrygt/osf.io,bdyetton/prettychart,erinspace/osf.io,binoculars/osf.io,himanshuo/osf.io,ZobairAlijan/osf.io,jmcarp/osf.io,reinaH/osf.io,petermalcolm/osf.io,ZobairAlijan/osf.io,binoculars/osf.io,mluke93/osf.io,himanshuo/osf.io,zachjanicki/osf.io,haoyuchen1992/osf.io,cosenal/osf.io,abought/osf.io,hmoco/osf.io,AndrewSallans/osf.io,caseyrollins/osf.io,zamattiac/osf.io,kwierman/osf.io,jolene-esposito/osf.io,cwisecarver/osf.io,MerlinZhang/osf.io,chrisseto/osf.io,revanthkolli/osf.io,rdhyee/osf.io,barbour-em/osf.io,abought/osf.io,acshi/osf.io,lyndsysimon/osf.io,monikagrabowska/osf.io,mluo613/osf.io,arpitar/osf.io,leb2dg/osf.io,SSJohns/osf.io,ZobairAlijan/osf.io,asanfilippo7/osf.io,wearpants/osf.io,monikagrabowska/osf.io,chrisseto/osf.io,erinspace/osf.io,sbt9uc/osf.io,asanfilippo7/osf.io,abought/osf.io,caneruguz/osf.io,icereval/osf.io,felliott/osf.io,KAsante95/osf.io,kch8qx/osf.io,leb2dg/osf.io,samanehsan/osf.io,cldershem/osf.io,asanfilippo7/osf.io,TomHeatwole/osf.io,fabianvf/osf.io,chrisseto/osf.io,brandonPurvis/osf.io,ckc6cz/osf.io,GaryKriebel/osf.io,aaxelb/osf.io,emetsger/osf.io,barbour-em/osf.io,jolene-esposito/osf.io,dplorimer/osf,mfraezz/osf.io,HalcyonChimera/osf.io,rdhyee/osf.io,billyhunt/osf.io,samchrisinger/osf.io,acshi/osf.io,fabianvf/osf.io,samanehsan/osf.io,felliott/osf.io,revanthkolli/osf.io,Nesiehr/osf.io,brandonPurvis/osf.io,monikagrabowska/osf.io,KAsante95/osf.io,MerlinZhang/osf.io,mluo613/osf.io,mfraezz/osf.io,zamattiac/osf.io,emetsger/osf.io,AndrewSallans/osf.io,alexschiller/osf.io,TomBaxter/osf.io,doublebits/osf.io,cosenal/osf.io,crcresearch/osf.io,monikagrabowska/osf.io,aaxelb/osf.io,zkraime/osf.io,erinspace/osf.io,felliott/osf.io,wearpants/osf.io,petermalcolm/osf.io,mluke93/osf.io,himanshuo/osf.io,cosenal/osf.io,ZobairAlijan/osf.io,zkraime/osf.io,jinluyuan/osf.io,rdhyee/osf.io,sloria/osf.io,lamdnhan/osf.io,KAsante95/osf.io,TomHeatwole/osf.io,fabianvf/osf.io,DanielSBrown/osf.io,cslzchen/osf.io,mluo613/osf.io,TomHeatwole/osf.io,HarryRybacki/osf.io,danielneis/osf.io,haoyuchen1992/osf.io,icereval/osf.io,barbour-em/osf.io,aaxelb/osf.io,jolene-esposito/osf.io,alexschiller/osf.io,mattclark/osf.io,kch8qx/osf.io,chennan47/osf.io,Johnetordoff/osf.io,amyshi188/osf.io,samchrisinger/osf.io,pattisdr/osf.io,haoyuchen1992/osf.io,chennan47/osf.io,jinluyuan/osf.io,acshi/osf.io,cwisecarver/osf.io,Ghalko/osf.io,GaryKriebel/osf.io,Johnetordoff/osf.io,adlius/osf.io,acshi/osf.io,DanielSBrown/osf.io,mattclark/osf.io,ticklemepierce/osf.io,GaryKriebel/osf.io,arpitar/osf.io,kwierman/osf.io,wearpants/osf.io,reinaH/osf.io,cwisecarver/osf.io,sbt9uc/osf.io,aaxelb/osf.io,caneruguz/osf.io,adlius/osf.io,amyshi188/osf.io,leb2dg/osf.io,dplorimer/osf,petermalcolm/osf.io,cldershem/osf.io,HarryRybacki/osf.io,cldershem/osf.io,cslzchen/osf.io,sbt9uc/osf.io,binoculars/osf.io,Nesiehr/osf.io,danielneis/osf.io,zkraime/osf.io,baylee-d/osf.io,jeffreyliu3230/osf.io,adlius/osf.io,Johnetordoff/osf.io,MerlinZhang/osf.io,kushG/osf.io,HalcyonChimera/osf.io,jmcarp/osf.io,njantrania/osf.io,GageGaskins/osf.io,laurenrevere/osf.io,mluo613/osf.io,kushG/osf.io,sloria/osf.io,Ghalko/osf.io,ticklemepierce/osf.io,CenterForOpenScience/osf.io,zachjanicki/osf.io,GageGaskins/osf.io,leb2dg/osf.io,lyndsysimon/osf.io,caseyrygt/osf.io,mfraezz/osf.io,GageGaskins/osf.io,njantrania/osf.io,brandonPurvis/osf.io,kwierman/osf.io,GaryKriebel/osf.io,billyhunt/osf.io,GageGaskins/osf.io,HarryRybacki/osf.io,billyhunt/osf.io,fabianvf/osf.io,mluke93/osf.io,hmoco/osf.io,laurenrevere/osf.io,baylee-d/osf.io,cslzchen/osf.io,DanielSBrown/osf.io,jmcarp/osf.io,lyndsysimon/osf.io,lyndsysimon/osf.io,saradbowman/osf.io,zamattiac/osf.io,HalcyonChimera/osf.io,alexschiller/osf.io,TomHeatwole/osf.io,cldershem/osf.io,billyhunt/osf.io,emetsger/osf.io,lamdnhan/osf.io,zamattiac/osf.io,jnayak1/osf.io,danielneis/osf.io,samchrisinger/osf.io,arpitar/osf.io,mluke93/osf.io,RomanZWang/osf.io,acshi/osf.io,doublebits/osf.io,jolene-esposito/osf.io,reinaH/osf.io,zkraime/osf.io,samchrisinger/osf.io,zachjanicki/osf.io,dplorimer/osf,petermalcolm/osf.io,cslzchen/osf.io,bdyetton/prettychart,rdhyee/osf.io,RomanZWang/osf.io,felliott/osf.io,brianjgeiger/osf.io,caseyrollins/osf.io,CenterForOpenScience/osf.io,bdyetton/prettychart,lamdnhan/osf.io,mfraezz/osf.io,jinluyuan/osf.io,reinaH/osf.io,samanehsan/osf.io,chennan47/osf.io,zachjanicki/osf.io,laurenrevere/osf.io,HarryRybacki/osf.io,kushG/osf.io,crcresearch/osf.io,adlius/osf.io,brianjgeiger/osf.io,lamdnhan/osf.io,jnayak1/osf.io,arpitar/osf.io,pattisdr/osf.io,GageGaskins/osf.io,alexschiller/osf.io,ticklemepierce/osf.io,TomBaxter/osf.io,revanthkolli/osf.io,cosenal/osf.io,DanielSBrown/osf.io,hmoco/osf.io,jeffreyliu3230/osf.io,asanfilippo7/osf.io,TomBaxter/osf.io,jeffreyliu3230/osf.io,bdyetton/prettychart,jnayak1/osf.io,crcresearch/osf.io,ckc6cz/osf.io,kch8qx/osf.io,pattisdr/osf.io,himanshuo/osf.io,RomanZWang/osf.io,jinluyuan/osf.io,njantrania/osf.io,brianjgeiger/osf.io,ckc6cz/osf.io,kushG/osf.io,njantrania/osf.io,danielneis/osf.io,Nesiehr/osf.io,RomanZWang/osf.io,monikagrabowska/osf.io,caseyrygt/osf.io,doublebits/osf.io,jnayak1/osf.io,brandonPurvis/osf.io,doublebits/osf.io,Ghalko/osf.io,samanehsan/osf.io,kwierman/osf.io,caneruguz/osf.io,RomanZWang/osf.io,saradbowman/osf.io,emetsger/osf.io,sbt9uc/osf.io,CenterForOpenScience/osf.io,caseyrygt/osf.io,jmcarp/osf.io,kch8qx/osf.io,abought/osf.io,KAsante95/osf.io,kch8qx/osf.io,icereval/osf.io,KAsante95/osf.io,jeffreyliu3230/osf.io
|
---
+++
@@ -1,4 +1,5 @@
from nose.tools import *
+from webtest.app import AppError
from webtest_plus import TestApp
from tests.base import OsfTestCase
@@ -35,4 +36,17 @@
assert_true(self.user_settings.is_confirmed)
assert_equal(res.status_code, 200)
+ def test_confirm_code_failure(self):
+ with assert_raises(AppError) as error:
+ res = self.app.post_json(
+ '/api/v1/settings/twofactor/',
+ {'code': '000000'},
+ auth=self.user.auth
+ )
+ assert_in('403 FORBIDDEN', error.message)
+
+ # reload the user settings object from the DB
+ self.user_settings.reload()
+
+ assert_false(self.user_settings.is_confirmed)
|
33550cab832da5b90cf2fb5af2f211840dfe2caa
|
test/mintraatests.py
|
test/mintraatests.py
|
import cherrypy
import os
import raatest
from conary import dbstore
from conary.server import schema
class webPluginTest(raatest.webTest):
def __init__(
self, module = None, init = True, preInit = None, preConst = None):
def func(rt):
cherrypy.root.servicecfg.pluginDirs = [os.path.abspath(os.path.join(os.path.dirname(__file__), "..", "..", "raaplugins"))]
if preInit:
preInit(rt)
return raatest.webTest.__init__(
self, module=module, init=init, preInit=func, preConst=preConst)
|
import cherrypy
import os
import raatest
from conary import dbstore
from conary.server import schema
class webPluginTest(raatest.webTest):
def __init__(
self, module = None, init = True, preInit = None, preConst = None):
def func(rt):
cherrypy.root.servicecfg.pluginDirs = [os.path.abspath(os.path.join(os.path.dirname(__file__), "..", "raaplugins"))]
if preInit:
preInit(rt)
return raatest.webTest.__init__(
self, module=module, init=init, preInit=func, preConst=preConst)
|
Fix relative path to plugin directories (RBL-2737)
|
Fix relative path to plugin directories (RBL-2737)
|
Python
|
apache-2.0
|
sassoftware/mint,sassoftware/mint,sassoftware/mint,sassoftware/mint,sassoftware/mint
|
---
+++
@@ -10,7 +10,7 @@
self, module = None, init = True, preInit = None, preConst = None):
def func(rt):
- cherrypy.root.servicecfg.pluginDirs = [os.path.abspath(os.path.join(os.path.dirname(__file__), "..", "..", "raaplugins"))]
+ cherrypy.root.servicecfg.pluginDirs = [os.path.abspath(os.path.join(os.path.dirname(__file__), "..", "raaplugins"))]
if preInit:
preInit(rt)
|
78f2ff20e86a7801c9b28cff17f3fe7db93c2788
|
alerts/views.py
|
alerts/views.py
|
from django.shortcuts import render
from django.contrib import messages
# Create your views here.
def test(request):
messages.debug(request, 'This is a debug alert')
messages.info(request, 'This is an info alert')
messages.success(request, 'This is a success alert')
messages.warning(request, 'This is a warning alert')
messages.error(request, 'This is an error alert')
return render(request, 'alerts/test.html')
|
from django.shortcuts import render
from django.contrib import messages
# Create your views here.
def test(request):
messages.set_level(request, messages.DEBUG)
messages.debug(request, 'This is a debug alert')
messages.info(request, 'This is an info alert')
messages.success(request, 'This is a success alert')
messages.warning(request, 'This is a warning alert')
messages.error(request, 'This is an error alert')
return render(request, 'alerts/test.html')
|
Set message level to DEBUG in test view
|
Set message level to DEBUG in test view
|
Python
|
mit
|
Kromey/akwriters,Kromey/fbxnano,Kromey/akwriters,Kromey/akwriters,Kromey/akwriters,Kromey/fbxnano,Kromey/fbxnano,Kromey/fbxnano
|
---
+++
@@ -4,6 +4,8 @@
# Create your views here.
def test(request):
+ messages.set_level(request, messages.DEBUG)
+
messages.debug(request, 'This is a debug alert')
messages.info(request, 'This is an info alert')
messages.success(request, 'This is a success alert')
|
4744f3b3e5193ad66a4bba64d8a8d8c4e328fdcc
|
pychat.py
|
pychat.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from lib.login import Login
def pychat():
login = Login
login()
if __name__ == '__main__':
pychat()
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from lib.login import Login
from lib.client import Client
from Tkinter import *
class PyChat(object):
def __init__(self):
self.root = Tk()
self.root.geometry("300x275+400+100")
def login(self):
self.login = Login(self.root, self.create_client)
def run(self):
self.root.mainloop()
def create_client(self):
credentials = self.login.login_credentials()
credentials['root'] = self.root
self.reset()
self.client = Client(**credentials)
def reset(self):
for element in self.root.winfo_children():
element.destroy()
if __name__ == '__main__':
pychat = PyChat()
pychat.login()
pychat.run()
|
Rework to have a central root window controlled from a top class
|
Rework to have a central root window controlled from a top class
|
Python
|
mit
|
tijko/PyChat
|
---
+++
@@ -2,12 +2,34 @@
# -*- coding: utf-8 -*-
from lib.login import Login
+from lib.client import Client
+from Tkinter import *
-def pychat():
- login = Login
- login()
+class PyChat(object):
+
+ def __init__(self):
+ self.root = Tk()
+ self.root.geometry("300x275+400+100")
+
+ def login(self):
+ self.login = Login(self.root, self.create_client)
+
+ def run(self):
+ self.root.mainloop()
+
+ def create_client(self):
+ credentials = self.login.login_credentials()
+ credentials['root'] = self.root
+ self.reset()
+ self.client = Client(**credentials)
+
+ def reset(self):
+ for element in self.root.winfo_children():
+ element.destroy()
if __name__ == '__main__':
- pychat()
+ pychat = PyChat()
+ pychat.login()
+ pychat.run()
|
f49c132f0bc90daf98b89bcf7270a2d37cd411d2
|
tools/hexlifyscript.py
|
tools/hexlifyscript.py
|
'''
Turn a Python script into Intel HEX format to be concatenated at the
end of the MicroPython firmware.hex. A simple header is added to the
script.
'''
import sys
import struct
import binascii
# read script body
with open(sys.argv[1], "rb") as f:
data = f.read()
# add header, pad to multiple of 16 bytes
data = b'MP' + struct.pack('<H', len(data)) + data
data = data + bytes(16 - len(data) % 16)
assert len(data) <= 0x2000
# convert to .hex format
addr = 0x3e000 # magic start address in flash
for i in range(0, len(data), 16):
chunk = data[i:min(i + 16, len(data))]
chunk = struct.pack('>BHB', len(chunk), addr & 0xffff, 0) + chunk
checksum = (-(sum(data))) & 0xff
hexline = ':%s%02X' % (str(binascii.hexlify(chunk), 'utf8').upper(), checksum)
print(hexline)
addr += 16
|
'''
Turn a Python script into Intel HEX format to be concatenated at the
end of the MicroPython firmware.hex. A simple header is added to the
script.
'''
import sys
import struct
import binascii
# read script body
with open(sys.argv[1], "rb") as f:
data = f.read()
# add header, pad to multiple of 16 bytes
data = b'MP' + struct.pack('<H', len(data)) + data
data = data + bytes(16 - len(data) % 16)
assert len(data) <= 0x2000
# convert to .hex format
addr = 0x3e000 # magic start address in flash
for i in range(0, len(data), 16):
chunk = data[i:min(i + 16, len(data))]
chunk = struct.pack('>BHB', len(chunk), addr & 0xffff, 0) + chunk
checksum = (-(sum(chunk))) & 0xff
hexline = ':%s%02X' % (str(binascii.hexlify(chunk), 'utf8').upper(), checksum)
print(hexline)
addr += 16
|
Fix bug in heexlifyscript.py when computing checksum.
|
Fix bug in heexlifyscript.py when computing checksum.
|
Python
|
mit
|
JoeGlancy/micropython,JoeGlancy/micropython,JoeGlancy/micropython
|
---
+++
@@ -22,7 +22,7 @@
for i in range(0, len(data), 16):
chunk = data[i:min(i + 16, len(data))]
chunk = struct.pack('>BHB', len(chunk), addr & 0xffff, 0) + chunk
- checksum = (-(sum(data))) & 0xff
+ checksum = (-(sum(chunk))) & 0xff
hexline = ':%s%02X' % (str(binascii.hexlify(chunk), 'utf8').upper(), checksum)
print(hexline)
addr += 16
|
e40ef4cbe59c5c3d064e60f02f60f19b0bb202a4
|
test_daily_parser.py
|
test_daily_parser.py
|
#!/usr/bin/env python
# -*- coding: latin-1 -*-
"""Unit tests."""
import unittest
from daily_parser import url_from_args
class TestDailyParser(unittest.TestCase):
"""Testing methods from daily_parser."""
def test_url_from_args(self):
output = url_from_args(2014, 1)
expected = 'https://dons.wikimedia.fr/journal/2014-01'
self.assertEqual(output, expected)
|
#!/usr/bin/env python
# -*- coding: latin-1 -*-
"""Unit tests."""
import unittest
from daily_parser import url_from_args, DonationsParser
class TestDailyParser(unittest.TestCase):
"""Testing methods from daily_parser."""
def test_url_from_args(self):
output = url_from_args(2014, 1)
expected = 'https://dons.wikimedia.fr/journal/2014-01'
self.assertEqual(output, expected)
class TestDonationsParser(unittest.TestCase):
"""Testing DonationsParser class."""
def setUp(self):
self.donations_parser = DonationsParser(2014, 01)
donations_data = {
'01': {'sum': 370, 'avg': 46.25, 'quantity': 8},
'02': {'sum': 5682, 'avg': 132.14, 'quantity': 43}
}
self.donations_parser.donations = donations_data
def test_get_csv(self):
expected = """'day', 'sum', 'quantity', 'avg'
'2014-01-01', 370, 8, 46.25
'2014-01-02', 5682, 43, 132.14
"""
output = self.donations_parser.get_csv()
self.assertEqual(output, expected)
|
Add unit test for DonationsParser.get_csv
|
Add unit test for DonationsParser.get_csv
|
Python
|
mit
|
Commonists/DonationsLogParser,Commonists/DonationsLogParser
|
---
+++
@@ -4,7 +4,7 @@
"""Unit tests."""
import unittest
-from daily_parser import url_from_args
+from daily_parser import url_from_args, DonationsParser
class TestDailyParser(unittest.TestCase):
@@ -15,3 +15,24 @@
output = url_from_args(2014, 1)
expected = 'https://dons.wikimedia.fr/journal/2014-01'
self.assertEqual(output, expected)
+
+
+class TestDonationsParser(unittest.TestCase):
+
+ """Testing DonationsParser class."""
+
+ def setUp(self):
+ self.donations_parser = DonationsParser(2014, 01)
+ donations_data = {
+ '01': {'sum': 370, 'avg': 46.25, 'quantity': 8},
+ '02': {'sum': 5682, 'avg': 132.14, 'quantity': 43}
+ }
+ self.donations_parser.donations = donations_data
+
+ def test_get_csv(self):
+ expected = """'day', 'sum', 'quantity', 'avg'
+'2014-01-01', 370, 8, 46.25
+'2014-01-02', 5682, 43, 132.14
+"""
+ output = self.donations_parser.get_csv()
+ self.assertEqual(output, expected)
|
2377f500d4667623da9a2921c62862b00d7f404c
|
school/frontend/views.py
|
school/frontend/views.py
|
from flask import Blueprint, render_template, url_for, redirect, flash
from flask.ext.login import login_required, logout_user, current_user, login_user
from .forms import LoginForm
from school.config import FLASH_SUCCESS, FLASH_INFO, FLASH_WARNING
frontend = Blueprint('frontend', __name__)
@frontend.route('/login', methods=["GET", "POST"])
def login():
if current_user.is_authenticated(): # user is already logged in
flash("You are already logged in", FLASH_WARNING)
return redirect(url_for('user.index'))
form = LoginForm()
if form.validate_on_submit():
flash('Successfully logged in as %s' % form.user.username, FLASH_SUCCESS)
login_user(form.user)
return form.redirect("user.index")
return render_template('frontend/index.html', form=form)
@frontend.route('/logout')
@login_required
def logout():
logout_user()
flash('You have been logged out.', FLASH_INFO)
return redirect(url_for("frontend.login"))
|
from flask import Blueprint, render_template, url_for, redirect, flash
from flask.ext.login import login_required, logout_user, current_user, login_user
from .forms import LoginForm
from school.config import FLASH_SUCCESS, FLASH_INFO, FLASH_WARNING
frontend = Blueprint('frontend', __name__)
@frontend.route('/login', methods=["GET", "POST"])
def login():
if current_user.is_authenticated(): # user is already logged in
flash("You are already logged in", FLASH_WARNING)
return redirect(url_for('user.index'))
form = LoginForm()
if form.validate_on_submit():
login_user(form.user)
return form.redirect("user.index")
return render_template('frontend/index.html', form=form)
@frontend.route('/logout')
@login_required
def logout():
logout_user()
flash('You have been logged out.', FLASH_INFO)
return redirect(url_for("frontend.login"))
|
Remove flash success message when logging in.
|
Remove flash success message when logging in.
|
Python
|
mit
|
leyyin/university-SE,leyyin/university-SE,leyyin/university-SE
|
---
+++
@@ -14,7 +14,6 @@
form = LoginForm()
if form.validate_on_submit():
- flash('Successfully logged in as %s' % form.user.username, FLASH_SUCCESS)
login_user(form.user)
return form.redirect("user.index")
|
7aedc2151035174632a7f3e55be7563f71e65117
|
tests/audio/test_loading.py
|
tests/audio/test_loading.py
|
import pytest
@pytest.mark.xfail
def test_missing_file(audiomgr):
sound = audiomgr.get_sound('/not/a/valid/file.ogg')
assert sound is None
|
import pytest
def test_missing_file(audiomgr):
sound = audiomgr.get_sound('/not/a/valid/file.ogg')
assert str(sound).startswith('NullAudioSound')
|
Update audio test to recognize missing sounds as NullAudioSound
|
tests: Update audio test to recognize missing sounds as NullAudioSound
|
Python
|
bsd-3-clause
|
chandler14362/panda3d,chandler14362/panda3d,chandler14362/panda3d,chandler14362/panda3d,chandler14362/panda3d,chandler14362/panda3d,chandler14362/panda3d,chandler14362/panda3d,chandler14362/panda3d,chandler14362/panda3d
|
---
+++
@@ -1,6 +1,5 @@
import pytest
-@pytest.mark.xfail
def test_missing_file(audiomgr):
sound = audiomgr.get_sound('/not/a/valid/file.ogg')
- assert sound is None
+ assert str(sound).startswith('NullAudioSound')
|
9c5349595dca8013f1353785bbd34fb3d7cd4a6a
|
misc/__init__.py
|
misc/__init__.py
|
# -*- coding: utf-8 -*-
import logging
__version__ = VERSION = '0.0.1'
__project__ = PROJECT = 'django-misc'
log = logging.getLogger( __name__ )
|
# -*- coding: utf-8 -*-
import logging
__version__ = VERSION = '0.0.2'
__project__ = PROJECT = 'django-misc'
log = logging.getLogger( __name__ )
|
Change version to 0.0.2 to update pypi repo
|
Change version to 0.0.2 to update pypi repo
|
Python
|
mit
|
ilblackdragon/django-misc
|
---
+++
@@ -1,7 +1,7 @@
# -*- coding: utf-8 -*-
import logging
-__version__ = VERSION = '0.0.1'
+__version__ = VERSION = '0.0.2'
__project__ = PROJECT = 'django-misc'
log = logging.getLogger( __name__ )
|
66c50cdeda974f2159259b466995339244ffb694
|
training/level-2-command-line-interfaces/dragon-warrior/tmarsha1/primes/Tests/PrimeFinderTests.py
|
training/level-2-command-line-interfaces/dragon-warrior/tmarsha1/primes/Tests/PrimeFinderTests.py
|
"""
Test the Prime Finder class
Still working on getting dependency injection working.
"""
import unittest
from primes.Primes import PrimeFinder
#from primes.Primes import PrimeGenerator
class PrimeFinderTests(unittest.TestCase):
def test_find_prime(self):
prime_finder = PrimeFinder.PrimeFinder(PrimeGenerator.PrimeGenerator())
self.assertEqual(prime_finder.find_prime(6), 13)
|
"""
Test the Prime Finder class
Still working on getting dependency injection working.
Injecting the Generator into the Finder allows for many possibilities.
From the testing perspective this would allow me to inject a mock object
for the Generator that returns a set value speeding up the testing of the
Prime Finder class.
"""
import unittest
from primes.Primes import PrimeFinder
#from primes.Primes import PrimeGenerator
class PrimeFinderTests(unittest.TestCase):
def test_find_prime(self):
prime_finder = PrimeFinder.PrimeFinder(PrimeGenerator.PrimeGenerator())
self.assertEqual(prime_finder.find_prime(6), 13)
|
Add additional comments regarding Dependency Injection
|
Add additional comments regarding Dependency Injection
|
Python
|
artistic-2.0
|
bigfatpanda-training/pandas-practical-python-primer,bigfatpanda-training/pandas-practical-python-primer
|
---
+++
@@ -2,6 +2,10 @@
Test the Prime Finder class
Still working on getting dependency injection working.
+Injecting the Generator into the Finder allows for many possibilities.
+From the testing perspective this would allow me to inject a mock object
+for the Generator that returns a set value speeding up the testing of the
+Prime Finder class.
"""
import unittest
|
7a7729e9af8e91411526525c19c5d434609e0f21
|
logger.py
|
logger.py
|
MSG_INFO = 0x01
MSG_WARNING = 0x02
MSG_ERROR = 0x04
MSG_VERBOSE = 0x08
MSG_ALL = MSG_INFO | MSG_WARNING | MSG_ERROR | MSG_VERBOSE
def logi(msg):
print("[INFO] " + msg)
def logv(msg):
print("[VERBOSE] " + msg)
def logw(msg):
print("[WARNING] " + msg)
def loge(msg):
print("[ERROR] " + msg)
class Logger(object):
def __init__(self):
self.logger_level = MSG_ALL
def info(self, msg):
if self.logger_level & MSG_INFO:
logi(msg)
def warning(self, msg):
if self.logger_level & MSG_WARNING:
logw(msg)
def error(self, msg):
if self.logger_level & MSG_ERROR:
loge(msg)
def verbose(self, msg):
if self.logger_level & MSG_VERBOSE:
logv(msg)
|
MSG_INFO = 0x01
MSG_WARNING = 0x02
MSG_ERROR = 0x04
MSG_VERBOSE = 0x08
MSG_ALL = MSG_INFO | MSG_WARNING | MSG_ERROR | MSG_VERBOSE
def logi(msg):
print("[INFO] " + msg)
def logv(msg):
print("[VERBOSE] " + msg)
def logw(msg):
print("[WARNING] " + msg)
def loge(msg):
print("\033[1;31m[ERROR] " + msg + "\033[m")
class Logger(object):
def __init__(self):
self.logger_level = MSG_ALL
def info(self, msg):
if self.logger_level & MSG_INFO:
logi(msg)
def warning(self, msg):
if self.logger_level & MSG_WARNING:
logw(msg)
def error(self, msg):
if self.logger_level & MSG_ERROR:
loge(msg)
def verbose(self, msg):
if self.logger_level & MSG_VERBOSE:
logv(msg)
|
Add color for error message.
|
Add color for error message.
|
Python
|
mit
|
PyOCL/oclGA,PyOCL/OpenCLGA,PyOCL/OpenCLGA,PyOCL/oclGA,PyOCL/oclGA,PyOCL/TSP,PyOCL/TSP,PyOCL/oclGA,PyOCL/OpenCLGA
|
---
+++
@@ -11,7 +11,7 @@
def logw(msg):
print("[WARNING] " + msg)
def loge(msg):
- print("[ERROR] " + msg)
+ print("\033[1;31m[ERROR] " + msg + "\033[m")
class Logger(object):
def __init__(self):
|
d3b326e421ca482723cafcadd2442eebb8cf2ee6
|
bot/main.py
|
bot/main.py
|
import os
import json
import time
import socket
from subprocess import call
import requests
from test import test_activity
from build import compile_bundle
# Fixes a weird bug... it might create some though :P
os.environ['http_proxy'] = ''
# HOST = 'http://localhost:5001'
HOST = 'http://aslo-bot-master.sugarlabs.org'
print 'Waiting for 1st task'
while True:
r = requests.get(HOST + '/task')
if r.status_code == 404:
time.sleep(7)
continue
task = r.json()
print 'Got new task'
call(['git', 'clone', 'https://www.github.com/' + task['gh'],
'dl'])
result = test_activity(task['bundle_id'], task['gh'])
data = {'result': result, 'file': compile_bundle(),
'bundle_id': task['bundle_id'], 'task_id': task['task_id']}
headers = {'Content-type': 'application/json', 'Accept': 'text/plain'}
r = requests.post(HOST + '/done',
data=json.dumps(data), headers=headers)
call(['rm', '-rf', 'dl'])
print 'Mined 1 activity:', task['bundle_id'], task['gh']
|
import os
import json
import time
import socket
from subprocess import call
import requests
from test import test_activity
from build import compile_bundle
# Fixes a weird bug... it might create some though :P
os.environ['http_proxy'] = ''
# HOST = 'http://localhost:5001'
HOST = 'http://aslo-bot-master.sugarlabs.org'
print 'Waiting for 1st task'
while True:
try:
r = requests.get(HOST + '/task')
except requests.exceptions.ConnectionError, e:
continue
if r.status_code == 404:
time.sleep(7)
continue
task = r.json()
print 'Got new task'
call(['git', 'clone', 'https://www.github.com/' + task['gh'],
'dl'])
result = test_activity(task['bundle_id'], task['gh'])
data = {'result': result, 'file': compile_bundle(),
'bundle_id': task['bundle_id'], 'task_id': task['task_id']}
headers = {'Content-type': 'application/json', 'Accept': 'text/plain'}
r = requests.post(HOST + '/done',
data=json.dumps(data), headers=headers)
call(['rm', '-rf', 'dl'])
print 'Mined 1 activity:', task['bundle_id'], task['gh']
|
Stop bots from randomly disconnecting
|
Stop bots from randomly disconnecting
|
Python
|
agpl-3.0
|
samdroid-apps/aslo,samdroid-apps/aslo,samdroid-apps/aslo
|
---
+++
@@ -18,7 +18,10 @@
print 'Waiting for 1st task'
while True:
- r = requests.get(HOST + '/task')
+ try:
+ r = requests.get(HOST + '/task')
+ except requests.exceptions.ConnectionError, e:
+ continue
if r.status_code == 404:
time.sleep(7)
continue
|
fc50a002e967f8e3b7de205a866e010dda717962
|
logger.py
|
logger.py
|
import os
import logging
def logger_setup(name):
loglevel = ''
try:
if os.environ['PB_LOGLEVEL'] == 'DEBUG':
loglevel = logging.DEBUG
if os.environ['PB_LOGLEVEL'] == 'INFO':
loglevel = logging.INFO
if os.environ['PB_LOGLEVEL'] == 'WARN':
loglevel = logging.WARN
except:
loglevel = logging.DEBUG
# from https://docs.python.org/2/howto/logging.html#configuring-logging
# set up new logger for this file
logger = logging.getLogger(name)
logger.setLevel(loglevel)
# console handler for logging
conLog = logging.StreamHandler()
conLog.setLevel(loglevel)
# formatter
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
# format console logs using formatter
conLog.setFormatter(formatter)
# add console logging transport to logger
logger.addHandler(conLog)
return logger
|
import os
import logging
def logger_setup(name):
loglevel = ''
try:
if os.environ['PB_LOGLEVEL'] == 'DEBUG':
loglevel = logging.DEBUG
if os.environ['PB_LOGLEVEL'] == 'INFO':
loglevel = logging.INFO
if os.environ['PB_LOGLEVEL'] == 'WARN':
loglevel = logging.WARN
except:
loglevel = logging.DEBUG
# from https://docs.python.org/2/howto/logging.html#configuring-logging
# set up new logger for this file
logger = logging.getLogger(name)
logger.setLevel(loglevel)
# formatter
formatter = logging.Formatter('PID: %(process)d - %(asctime)s - %(name)s - %(levelname)s - %(funcName)s - %(message)s')
# console handler for logging
conLog = logging.StreamHandler()
conLog.setLevel(loglevel)
# format console logs using formatter
conLog.setFormatter(formatter)
# log to file handler
fileLog = logging.FileHandler('pairing-bot.log', encoding='utf-8')
fileLog.setLevel(logging.DEBUG)
# format console logs using formatter
fileLog.setFormatter(formatter)
# add console logging transport to logger
logger.addHandler(conLog)
# add file transport to logger
logger.addHandler(fileLog)
return logger
|
Add file transport and update log msg formatting to include pid and fn name
|
Add file transport and update log msg formatting to include pid and fn name
|
Python
|
mit
|
zeusdeux/zulip-pairing-bot
|
---
+++
@@ -20,17 +20,25 @@
logger = logging.getLogger(name)
logger.setLevel(loglevel)
+ # formatter
+ formatter = logging.Formatter('PID: %(process)d - %(asctime)s - %(name)s - %(levelname)s - %(funcName)s - %(message)s')
+
# console handler for logging
conLog = logging.StreamHandler()
conLog.setLevel(loglevel)
-
- # formatter
- formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
-
# format console logs using formatter
conLog.setFormatter(formatter)
+
+ # log to file handler
+ fileLog = logging.FileHandler('pairing-bot.log', encoding='utf-8')
+ fileLog.setLevel(logging.DEBUG)
+ # format console logs using formatter
+ fileLog.setFormatter(formatter)
# add console logging transport to logger
logger.addHandler(conLog)
+ # add file transport to logger
+ logger.addHandler(fileLog)
+
return logger
|
4f71339cad35b2444ea295fd4b518e539f1088bb
|
fluent_faq/urls.py
|
fluent_faq/urls.py
|
from django.conf.urls import patterns, url
from .views import FaqQuestionList, FaqCategoryDetail, FaqQuestionDetail
urlpatterns = patterns('',
url(r'^$', FaqQuestionList.as_view(), name='faqquestion_index'),
url(r'^(?P<slug>[^/]+)/$', FaqCategoryDetail.as_view(), name='faqcategory_detail'),
url(r'^(?P<cat_slug>[^/]+)/(?P<slug>[^/]+)/$', FaqQuestionDetail.as_view(), name='faqquestion_detail'),
)
|
from django.conf.urls import url
from .views import FaqQuestionList, FaqCategoryDetail, FaqQuestionDetail
urlpatterns = [
url(r'^$', FaqQuestionList.as_view(), name='faqquestion_index'),
url(r'^(?P<slug>[^/]+)/$', FaqCategoryDetail.as_view(), name='faqcategory_detail'),
url(r'^(?P<cat_slug>[^/]+)/(?P<slug>[^/]+)/$', FaqQuestionDetail.as_view(), name='faqquestion_detail'),
]
|
Fix Django 1.9 warnings about patterns('', ..)
|
Fix Django 1.9 warnings about patterns('', ..)
|
Python
|
apache-2.0
|
edoburu/django-fluent-faq,edoburu/django-fluent-faq
|
---
+++
@@ -1,8 +1,8 @@
-from django.conf.urls import patterns, url
+from django.conf.urls import url
from .views import FaqQuestionList, FaqCategoryDetail, FaqQuestionDetail
-urlpatterns = patterns('',
+urlpatterns = [
url(r'^$', FaqQuestionList.as_view(), name='faqquestion_index'),
url(r'^(?P<slug>[^/]+)/$', FaqCategoryDetail.as_view(), name='faqcategory_detail'),
url(r'^(?P<cat_slug>[^/]+)/(?P<slug>[^/]+)/$', FaqQuestionDetail.as_view(), name='faqquestion_detail'),
-)
+]
|
f113123a3f31e176ae7165f1ca11118dc00625a3
|
tests/test_backend_forms.py
|
tests/test_backend_forms.py
|
import floppyforms.__future__ as floppyforms
from django.test import TestCase
from django_backend.backend.base.forms import BaseBackendForm
from .models import OneFieldModel
class OneFieldForm(BaseBackendForm):
class Meta:
model = OneFieldModel
exclude = ()
class BaseBackendFormTests(TestCase):
def test_has_superform_metaclass(self):
from django_superform.forms import SuperModelFormMetaclass
self.assertTrue(
issubclass(BaseBackendForm.__metaclass__, SuperModelFormMetaclass))
def test_has_floppyforms_metaclass(self):
from floppyforms.__future__.models import ModelFormMetaclass
self.assertTrue(
issubclass(BaseBackendForm.__metaclass__, ModelFormMetaclass))
def test_model_field_is_using_floppyforms_widget(self):
form = OneFieldForm()
self.assertTrue(
isinstance(form.fields['chars'].widget, floppyforms.TextInput))
|
import floppyforms.__future__ as floppyforms
from django.test import TestCase
from django_backend.forms import BaseBackendForm
from .models import OneFieldModel
class OneFieldForm(BaseBackendForm):
class Meta:
model = OneFieldModel
exclude = ()
class BaseBackendFormTests(TestCase):
def test_has_superform_metaclass(self):
from django_superform.forms import SuperModelFormMetaclass
self.assertTrue(
issubclass(BaseBackendForm.__metaclass__, SuperModelFormMetaclass))
def test_has_floppyforms_metaclass(self):
from floppyforms.__future__.models import ModelFormMetaclass
self.assertTrue(
issubclass(BaseBackendForm.__metaclass__, ModelFormMetaclass))
def test_model_field_is_using_floppyforms_widget(self):
form = OneFieldForm()
self.assertTrue(
isinstance(form.fields['chars'].widget, floppyforms.TextInput))
|
Fix forms import in tests
|
Fix forms import in tests
|
Python
|
bsd-3-clause
|
team23/django_backend,team23/django_backend,team23/django_backend,team23/django_backend,team23/django_backend
|
---
+++
@@ -1,7 +1,7 @@
import floppyforms.__future__ as floppyforms
from django.test import TestCase
-from django_backend.backend.base.forms import BaseBackendForm
+from django_backend.forms import BaseBackendForm
from .models import OneFieldModel
|
e291ae29926a3cd05c9268c625e14d205638dfe8
|
jarn/mkrelease/scp.py
|
jarn/mkrelease/scp.py
|
import tempfile
import tee
from process import Process
from exit import err_exit
class SCP(object):
"""Secure copy and FTP abstraction."""
def __init__(self, process=None):
self.process = process or Process()
def run_scp(self, distfile, location):
if not self.process.quiet:
print 'scp-ing to %(location)s' % locals()
rc = self.process.os_system(
'scp "%(distfile)s" "%(location)s"' % locals())
if rc != 0:
err_exit('scp failed')
return rc
def run_sftp(self, distfile, location):
if not self.process.quiet:
print 'sftp-ing to %(location)s' % locals()
with tempfile.NamedTemporaryFile(prefix='sftp-') as file:
file.write('put "%(distfile)s"\n' % locals())
file.write('quit\n')
file.flush()
cmdfile = file.name
rc, lines = self.process.popen(
'sftp -b "%(cmdfile)s" "%(location)s"' % locals(),
echo=tee.StartsWith('Uploading'))
if rc != 0:
err_exit('sftp failed')
return rc
|
import tempfile
import tee
from os.path import split
from process import Process
from chdir import ChdirStack
from exit import err_exit
class SCP(object):
"""Secure copy and FTP abstraction."""
def __init__(self, process=None):
self.process = process or Process()
self.dirstack = ChdirStack()
def run_scp(self, distfile, location):
if not self.process.quiet:
print 'scp-ing to %(location)s' % locals()
rc = self.process.os_system(
'scp "%(distfile)s" "%(location)s"' % locals())
if rc != 0:
err_exit('scp failed')
return rc
def run_sftp(self, distfile, location):
if not self.process.quiet:
print 'sftp-ing to %(location)s' % locals()
dir, distfile = split(distfile)
self.dirstack.push(dir)
try:
with tempfile.NamedTemporaryFile(prefix='sftp-') as file:
file.write('put "%(distfile)s"\n' % locals())
file.write('bye\n')
file.flush()
cmdfile = file.name
rc, lines = self.process.popen(
'sftp -b "%(cmdfile)s" "%(location)s"' % locals(),
echo=tee.StartsWith('Uploading'))
if rc != 0:
err_exit('sftp failed')
return rc
finally:
self.dirstack.pop()
|
Change to dist dir before uploading to lose the absolute path.
|
Change to dist dir before uploading to lose the absolute path.
|
Python
|
bsd-2-clause
|
Jarn/jarn.mkrelease
|
---
+++
@@ -1,7 +1,10 @@
import tempfile
import tee
+from os.path import split
+
from process import Process
+from chdir import ChdirStack
from exit import err_exit
@@ -10,6 +13,7 @@
def __init__(self, process=None):
self.process = process or Process()
+ self.dirstack = ChdirStack()
def run_scp(self, distfile, location):
if not self.process.quiet:
@@ -23,15 +27,20 @@
def run_sftp(self, distfile, location):
if not self.process.quiet:
print 'sftp-ing to %(location)s' % locals()
- with tempfile.NamedTemporaryFile(prefix='sftp-') as file:
- file.write('put "%(distfile)s"\n' % locals())
- file.write('quit\n')
- file.flush()
- cmdfile = file.name
- rc, lines = self.process.popen(
- 'sftp -b "%(cmdfile)s" "%(location)s"' % locals(),
- echo=tee.StartsWith('Uploading'))
- if rc != 0:
- err_exit('sftp failed')
- return rc
+ dir, distfile = split(distfile)
+ self.dirstack.push(dir)
+ try:
+ with tempfile.NamedTemporaryFile(prefix='sftp-') as file:
+ file.write('put "%(distfile)s"\n' % locals())
+ file.write('bye\n')
+ file.flush()
+ cmdfile = file.name
+ rc, lines = self.process.popen(
+ 'sftp -b "%(cmdfile)s" "%(location)s"' % locals(),
+ echo=tee.StartsWith('Uploading'))
+ if rc != 0:
+ err_exit('sftp failed')
+ return rc
+ finally:
+ self.dirstack.pop()
|
6b9ccae880e9582f38e2a8aa3c451bc6f6a88d37
|
thing/tasks/tablecleaner.py
|
thing/tasks/tablecleaner.py
|
import datetime
from celery import task
from celery.utils.log import get_task_logger
logger = get_task_logger(__name__)
from django.db.models import Q
from thing.models import APIKey, TaskState
# ---------------------------------------------------------------------------
# Periodic task to perform database table cleanup
@task(name='thing.table_cleaner')
def table_cleaner():
utcnow = datetime.datetime.utcnow()
queued_timeout = utcnow - datetime.timedelta(minutes=120)
# Build a QuerySet to find broken tasks
taskstates = TaskState.objects.filter(state=TaskState.QUEUED_STATE, mod_time__lte=queued_timeout)
for ts in taskstates:
logger.warn('[table_cleaner] Stuck task: %d | %d | %s | %s', ts.id, ts.keyid, ts.parameter, ts.url)
count = taskstates.update(mod_time=utcnow, next_time=utcnow, state=TaskState.READY_STATE)
if count > 0:
logger.warn('[table_cleaner] Reset %d broken task(s)', count)
# Build a QuerySet to find tasks that refer to no longer existent keys
taskstates = TaskState.objects.exclude(
Q(keyid=-1)
|
Q(keyid__in=APIKey.objects.values('keyid'))
)
taskstates.delete()
# ---------------------------------------------------------------------------
|
import datetime
from celery import task
from celery.utils.log import get_task_logger
logger = get_task_logger(__name__)
from django.db.models import Q
from thing.models import APIKey, TaskState
# ---------------------------------------------------------------------------
# Periodic task to perform database table cleanup
@task(name='thing.table_cleaner')
def table_cleaner():
utcnow = datetime.datetime.utcnow()
queued_timeout = utcnow - datetime.timedelta(minutes=120)
# Build a QuerySet to find broken tasks
taskstates = TaskState.objects.filter(state=TaskState.QUEUED_STATE, mod_time__lte=queued_timeout)
for ts in taskstates:
logger.warn('[table_cleaner] Stuck task: %d | %d | %s | %s', ts.id, ts.keyid, ts.parameter, ts.url)
count = taskstates.update(mod_time=utcnow, next_time=utcnow, state=TaskState.READY_STATE)
if count > 0:
logger.warn('[table_cleaner] Reset %d broken task(s)', count)
# Build a QuerySet to find tasks that refer to no longer existent keys
taskstates = TaskState.objects.exclude(
Q(keyid=-1)
|
Q(keyid__in=APIKey.objects.filter(valid=True).values('keyid'))
)
taskstates.delete()
# ---------------------------------------------------------------------------
|
Change thing.tasks.table_cleaner to delete TaskState objects for any invalid APIKeys
|
Change thing.tasks.table_cleaner to delete TaskState objects for any invalid APIKeys
|
Python
|
bsd-2-clause
|
madcowfred/evething,madcowfred/evething,Gillingham/evething,Gillingham/evething,cmptrgeekken/evething,madcowfred/evething,Gillingham/evething,cmptrgeekken/evething,cmptrgeekken/evething,Gillingham/evething,madcowfred/evething,cmptrgeekken/evething,cmptrgeekken/evething
|
---
+++
@@ -20,7 +20,7 @@
taskstates = TaskState.objects.filter(state=TaskState.QUEUED_STATE, mod_time__lte=queued_timeout)
for ts in taskstates:
logger.warn('[table_cleaner] Stuck task: %d | %d | %s | %s', ts.id, ts.keyid, ts.parameter, ts.url)
-
+
count = taskstates.update(mod_time=utcnow, next_time=utcnow, state=TaskState.READY_STATE)
if count > 0:
logger.warn('[table_cleaner] Reset %d broken task(s)', count)
@@ -29,7 +29,7 @@
taskstates = TaskState.objects.exclude(
Q(keyid=-1)
|
- Q(keyid__in=APIKey.objects.values('keyid'))
+ Q(keyid__in=APIKey.objects.filter(valid=True).values('keyid'))
)
taskstates.delete()
|
e1c970d76dbd0eb631e726e101b09c0f5e5599ec
|
doc/api_changes/2015-04-27-core.py
|
doc/api_changes/2015-04-27-core.py
|
Grid-building functions
-----------------------
:func:`pixels_to_radius` and :func:`pixels_to_phi` were renamed to
:func:`radial_grid` and :func:`angle_grid` respectively. The name and order
of their arguments was also changed: see their docstring or API docs for
details.
|
Grid-building functions
-----------------------
:func:`pixels_to_radius` and :func:`pixels_to_phi` were renamed to
:func:`radial_grid` and :func:`angle_grid` respectively. The name and order
of their arguments was also changed: see their docstring or API docs for
details. Importantly, the orientation of the output of angle grid has been
changed. Again, see the docstring for details.
|
Add change in angle_grid orientation to API changes.
|
DOC: Add change in angle_grid orientation to API changes.
|
Python
|
bsd-3-clause
|
licode/scikit-xray,Nikea/scikit-xray,hainm/scikit-xray,celiafish/scikit-xray,licode/scikit-beam,CJ-Wright/scikit-beam,CJ-Wright/scikit-beam,tacaswell/scikit-xray,giltis/scikit-xray,ericdill/scikit-xray,scikit-xray/scikit-xray,yugangzhang/scikit-beam,scikit-xray/scikit-xray,giltis/scikit-xray,danielballan/scikit-xray,tacaswell/scikit-beam,tacaswell/scikit-xray,licode/scikit-beam,danielballan/scikit-xray,celiafish/scikit-xray,Nikea/scikit-xray,Nikea/scikit-xray,celiafish/scikit-xray,yugangzhang/scikit-beam,licode/scikit-xray,licode/scikit-beam,danielballan/scikit-xray,hainm/scikit-xray,hainm/scikit-xray,yugangzhang/scikit-beam,tacaswell/scikit-xray,giltis/scikit-xray,licode/scikit-xray,tacaswell/scikit-beam,ericdill/scikit-xray,tacaswell/scikit-beam,scikit-xray/scikit-xray,CJ-Wright/scikit-beam,ericdill/scikit-xray
|
---
+++
@@ -4,4 +4,5 @@
:func:`pixels_to_radius` and :func:`pixels_to_phi` were renamed to
:func:`radial_grid` and :func:`angle_grid` respectively. The name and order
of their arguments was also changed: see their docstring or API docs for
-details.
+details. Importantly, the orientation of the output of angle grid has been
+changed. Again, see the docstring for details.
|
6974cba56413527c8b7cef9e4b6ad6ca9fe5049e
|
tests/test_memory.py
|
tests/test_memory.py
|
# coding: utf-8
from unittest import TestCase
from chipy8 import Memory
class TestMemory(TestCase):
def setUp(self):
self.memory = Memory()
def test_write(self):
'Write a byte to memory then read it.'
address = 0x200
self.memory.write_byte(0x200, 0x01)
self.assertEqual(0x01, self.memory.read_byte(0x200))
def test_load(self):
'Load a stream of bytes to memory starting on an address.'
address = 0x200
self.memory.load(0x200, [0x01, 0x02, 0x03])
self.assertEqual(0x01, self.memory.read_byte(address))
self.assertEqual(0x02, self.memory.read_byte(address + 1))
self.assertEqual(0x03, self.memory.read_byte(address + 2))
|
# coding: utf-8
from unittest import TestCase
from chipy8 import Memory
class TestMemory(TestCase):
def setUp(self):
self.memory = Memory()
def test_write(self):
'Write a byte to memory then read it.'
address = 0x200
self.memory.write_byte(address, 0x01)
self.assertEqual(0x01, self.memory.read_byte(address))
def test_load(self):
'Load a stream of bytes to memory starting on an address.'
address = 0x200
self.memory.load(address, [0x01, 0x02, 0x03])
self.assertEqual(0x01, self.memory.read_byte(address))
self.assertEqual(0x02, self.memory.read_byte(address + 1))
self.assertEqual(0x03, self.memory.read_byte(address + 2))
|
Clarify values used in tests.
|
Clarify values used in tests.
|
Python
|
bsd-3-clause
|
gutomaia/chipy8
|
---
+++
@@ -10,13 +10,13 @@
def test_write(self):
'Write a byte to memory then read it.'
address = 0x200
- self.memory.write_byte(0x200, 0x01)
- self.assertEqual(0x01, self.memory.read_byte(0x200))
+ self.memory.write_byte(address, 0x01)
+ self.assertEqual(0x01, self.memory.read_byte(address))
def test_load(self):
'Load a stream of bytes to memory starting on an address.'
address = 0x200
- self.memory.load(0x200, [0x01, 0x02, 0x03])
+ self.memory.load(address, [0x01, 0x02, 0x03])
self.assertEqual(0x01, self.memory.read_byte(address))
self.assertEqual(0x02, self.memory.read_byte(address + 1))
self.assertEqual(0x03, self.memory.read_byte(address + 2))
|
ff489b1541f896025a0c630be6abe2d23843ec36
|
examples/05_alternative_language.py
|
examples/05_alternative_language.py
|
#!/usr/bin/env python
from pyhmsa.datafile import DataFile
from pyhmsa.type.language import langstr
datafile = DataFile()
author = langstr('Fyodor Dostoyevsky', {'ru': u'Π€ΡΠ΄ΠΎΡ ΠΠΈΡ
Π°ΜΠΉΠ»ΠΎΠ²ΠΈΡ ΠΠΎΡΡΠΎΠ΅ΜΠ²ΡΠΊΠΈΠΉ'})
datafile.header.author = author
print(datafile.header.author.alternatives['ru']) # Returns ...
|
#!/usr/bin/env python
from pyhmsa.datafile import DataFile
from pyhmsa.type.language import langstr
datafile = DataFile()
author = langstr('Wilhelm Conrad Roentgen', {'de': u'Wilhelm Conrad RΓΆntgen'})
datafile.header.author = author
print(datafile.header.author.alternatives['de']) # Returns ...
|
Replace name in alternative language to prevent compilation problems with LaTeX
|
Replace name in alternative language to prevent compilation problems
with LaTeX
|
Python
|
mit
|
pyhmsa/pyhmsa
|
---
+++
@@ -4,7 +4,7 @@
from pyhmsa.type.language import langstr
datafile = DataFile()
-author = langstr('Fyodor Dostoyevsky', {'ru': u'Π€ΡΠ΄ΠΎΡ ΠΠΈΡ
Π°ΜΠΉΠ»ΠΎΠ²ΠΈΡ ΠΠΎΡΡΠΎΠ΅ΜΠ²ΡΠΊΠΈΠΉ'})
+author = langstr('Wilhelm Conrad Roentgen', {'de': u'Wilhelm Conrad RΓΆntgen'})
datafile.header.author = author
-print(datafile.header.author.alternatives['ru']) # Returns ...
+print(datafile.header.author.alternatives['de']) # Returns ...
|
61fecbed71129228e7020a9e95dbcd2487bbdbb3
|
turbustat/tests/test_scf.py
|
turbustat/tests/test_scf.py
|
# Licensed under an MIT open source license - see LICENSE
'''
Test functions for SCF
'''
from unittest import TestCase
import numpy as np
import numpy.testing as npt
from ..statistics import SCF, SCF_Distance
from ._testing_data import \
dataset1, dataset2, computed_data, computed_distances
class testSCF(TestCase):
def setUp(self):
self.dataset1 = dataset1
self.dataset2 = dataset2
def test_SCF_method(self):
self.tester = SCF(dataset1["cube"], size=11)
self.tester.run()
assert np.allclose(self.tester.scf_surface, computed_data['scf_val'])
def test_SCF_distance(self):
self.tester_dist = \
SCF_Distance(dataset1["cube"],
dataset2["cube"], size=11).distance_metric()
npt.assert_almost_equal(self.tester_dist.distance,
computed_distances['scf_distance'])
|
# Licensed under an MIT open source license - see LICENSE
'''
Test functions for SCF
'''
from unittest import TestCase
import numpy as np
import numpy.testing as npt
from scipy.ndimage import zoom
from ..statistics import SCF, SCF_Distance
from ._testing_data import \
dataset1, dataset2, computed_data, computed_distances
class testSCF(TestCase):
def test_SCF_method(self):
self.tester = SCF(dataset1["cube"], size=11)
self.tester.run()
assert np.allclose(self.tester.scf_surface, computed_data['scf_val'])
def test_SCF_distance(self):
self.tester_dist = \
SCF_Distance(dataset1["cube"],
dataset2["cube"], size=11).distance_metric()
npt.assert_almost_equal(self.tester_dist.distance,
computed_distances['scf_distance'])
def test_SCF_regrid_distance(self):
hdr = dataset1["cube"][1].copy()
hdr["CDELT2"] = 0.5 * hdr["CDELT2"]
hdr["CDELT1"] = 0.5 * hdr["CDELT1"]
cube = zoom(dataset1["cube"][0], (1, 2, 2))
self.tester_dist_zoom = \
SCF_Distance([cube, hdr], dataset1["cube"],
size=11).distance_metric(verbose=True)
fid_dist = 0.02
assert self.tester_dist_zoom < fid_dist
|
Add simple SCF test for unequal grids
|
Add simple SCF test for unequal grids
|
Python
|
mit
|
e-koch/TurbuStat,Astroua/TurbuStat
|
---
+++
@@ -9,6 +9,7 @@
import numpy as np
import numpy.testing as npt
+from scipy.ndimage import zoom
from ..statistics import SCF, SCF_Distance
from ._testing_data import \
@@ -16,10 +17,6 @@
class testSCF(TestCase):
-
- def setUp(self):
- self.dataset1 = dataset1
- self.dataset2 = dataset2
def test_SCF_method(self):
self.tester = SCF(dataset1["cube"], size=11)
@@ -33,3 +30,17 @@
dataset2["cube"], size=11).distance_metric()
npt.assert_almost_equal(self.tester_dist.distance,
computed_distances['scf_distance'])
+
+ def test_SCF_regrid_distance(self):
+ hdr = dataset1["cube"][1].copy()
+ hdr["CDELT2"] = 0.5 * hdr["CDELT2"]
+ hdr["CDELT1"] = 0.5 * hdr["CDELT1"]
+ cube = zoom(dataset1["cube"][0], (1, 2, 2))
+
+ self.tester_dist_zoom = \
+ SCF_Distance([cube, hdr], dataset1["cube"],
+ size=11).distance_metric(verbose=True)
+
+ fid_dist = 0.02
+
+ assert self.tester_dist_zoom < fid_dist
|
698677a623722b63ec4cceb7690b62fa7e4ede37
|
django_prices_openexchangerates/templatetags/prices_multicurrency_i18n.py
|
django_prices_openexchangerates/templatetags/prices_multicurrency_i18n.py
|
from django.template import Library
from django_prices.templatetags import prices_i18n
from .. import exchange_currency
register = Library()
@register.simple_tag # noqa
def gross_in_currency(price, currency): # noqa
converted_price = exchange_currency(price, currency)
return prices_i18n.gross(converted_price)
@register.simple_tag # noqa
def net_in_currency(price, currency): # noqa
converted_price = exchange_currency(price, currency)
return prices_i18n.net(converted_price)
@register.simple_tag # noqa
def tax_in_currency(price, currency): # noqa
converted_price = exchange_currency(price, currency)
return prices_i18n.tax(converted_price)
|
from django.template import Library
from django_prices.templatetags import prices_i18n
from .. import exchange_currency
register = Library()
@register.simple_tag # noqa
def gross_in_currency(price, currency): # noqa
converted_price = exchange_currency(price, currency)
return prices_i18n.gross(converted_price)
@register.simple_tag # noqa
def net_in_currency(price, currency): # noqa
converted_price = exchange_currency(price, currency)
return prices_i18n.net(converted_price)
@register.simple_tag # noqa
def tax_in_currency(price, currency): # noqa
converted_price = exchange_currency(price, currency)
return prices_i18n.tax(converted_price)
@register.simple_tag
def discount_amount_in_currency(discount, price, currency):
price = exchange_currency(price, to_currency=currency)
discount_amount = exchange_currency(discount.amount, to_currency=currency)
discount.amount = discount_amount
return (price | discount) - price
|
Add templatetag for converting discounts between currencies
|
Add templatetag for converting discounts between currencies
|
Python
|
bsd-3-clause
|
mirumee/django-prices-openexchangerates,artursmet/django-prices-openexchangerates
|
---
+++
@@ -23,3 +23,10 @@
converted_price = exchange_currency(price, currency)
return prices_i18n.tax(converted_price)
+
+@register.simple_tag
+def discount_amount_in_currency(discount, price, currency):
+ price = exchange_currency(price, to_currency=currency)
+ discount_amount = exchange_currency(discount.amount, to_currency=currency)
+ discount.amount = discount_amount
+ return (price | discount) - price
|
32a54ff0588930efc5e0ee3c61f2efbf57e450e0
|
inviter/tests.py
|
inviter/tests.py
|
"""
This file demonstrates writing tests using the unittest module. These will pass
when you run "manage.py test".
Replace this with more appropriate tests for your application.
"""
from django.contrib.auth.models import User
from django.core.mail import get_connection
from django.test import TestCase
from inviter.utils import invite
import shortuuid
class InviteTest(TestCase):
def setUp(self):
self.inviter = User.objects.create(username = shortuuid.uuid())
self.existing = User.objects.create(username = shortuuid.uuid(),
email = 'existing@example.com')
self.backend = get_connection('django.core.mail.backends.locmem.EmailBackend')
def test_inviting(self):
user = invite("foo@example.com", self.inviter)
self.assertFalse(user.is_active)
self.assertEqual(1, len(self.backend.emails))
self.assertEqual(3, User.objects.count())
user = invite("foo@example.com", self.inviter)
self.assertFalse(user.is_active)
self.assertEqual(2, len(self.backend.emails))
self.assertEqual(3, User.objects.count())
user = invite("existing@example.com", self.inviter)
self.assertTrue(user.is_active)
self.assertEqual(2, len(self.backend.emails))
self.assertEqual(3, User.objects.count())
|
"""
This file demonstrates writing tests using the unittest module. These will pass
when you run "manage.py test".
Replace this with more appropriate tests for your application.
"""
from django.contrib.auth.models import User
from django.core.mail import outbox
from django.test import TestCase
from inviter.utils import invite
import shortuuid
class InviteTest(TestCase):
def setUp(self):
self.inviter = User.objects.create(username = shortuuid.uuid())
self.existing = User.objects.create(username = shortuuid.uuid(),
email = 'existing@example.com')
self.outbox = outbox
def test_inviting(self):
user = invite("foo@example.com", self.inviter)
self.assertFalse(user.is_active)
self.assertEqual(1, len(self.outbox))
self.assertEqual(3, User.objects.count())
user = invite("foo@example.com", self.inviter)
self.assertFalse(user.is_active)
self.assertEqual(2, len(self.outbox))
self.assertEqual(3, User.objects.count())
user = invite("existing@example.com", self.inviter)
self.assertTrue(user.is_active)
self.assertEqual(2, len(self.outbox))
self.assertEqual(3, User.objects.count())
|
Test fix to check django.core.mail.outbox
|
Test fix to check django.core.mail.outbox
|
Python
|
mit
|
caffeinehit/django-inviter
|
---
+++
@@ -5,7 +5,7 @@
Replace this with more appropriate tests for your application.
"""
from django.contrib.auth.models import User
-from django.core.mail import get_connection
+from django.core.mail import outbox
from django.test import TestCase
from inviter.utils import invite
import shortuuid
@@ -16,22 +16,22 @@
self.inviter = User.objects.create(username = shortuuid.uuid())
self.existing = User.objects.create(username = shortuuid.uuid(),
email = 'existing@example.com')
- self.backend = get_connection('django.core.mail.backends.locmem.EmailBackend')
+ self.outbox = outbox
def test_inviting(self):
user = invite("foo@example.com", self.inviter)
self.assertFalse(user.is_active)
- self.assertEqual(1, len(self.backend.emails))
+ self.assertEqual(1, len(self.outbox))
self.assertEqual(3, User.objects.count())
user = invite("foo@example.com", self.inviter)
self.assertFalse(user.is_active)
- self.assertEqual(2, len(self.backend.emails))
+ self.assertEqual(2, len(self.outbox))
self.assertEqual(3, User.objects.count())
user = invite("existing@example.com", self.inviter)
self.assertTrue(user.is_active)
- self.assertEqual(2, len(self.backend.emails))
+ self.assertEqual(2, len(self.outbox))
self.assertEqual(3, User.objects.count())
|
9d6c8eaa491d0988bf16633bbba9847350f57778
|
spacy/lang/norm_exceptions.py
|
spacy/lang/norm_exceptions.py
|
# coding: utf8
from __future__ import unicode_literals
# These exceptions are used to add NORM values based on a token's ORTH value.
# Individual languages can also add their own exceptions and overwrite them -
# for example, British vs. American spelling in English.
# Norms are only set if no alternative is provided in the tokenizer exceptions.
# Note that this does not change any other token attributes. Its main purpose
# is to normalise the word representations so that equivalent tokens receive
# similar representations. For example: $ and β¬ are very different, but they're
# both currency symbols. By normalising currency symbols to $, all symbols are
# seen as similar, no matter how common they are in the training data.
BASE_NORMS = {
"'s": "'s",
"'S": "'s",
"βs": "'s",
"βS": "'s",
"β": "'",
"β": "'",
"Β΄": "'",
"`": "'",
"β": '"',
"β": '"',
"''": '"',
"``": '"',
"´´": '"',
"β": '"',
"Β»": '"',
"Β«": '"',
"β¦": "...",
"β": "-",
"β": "-",
"--": "-",
"---": "-",
"β¬": "$",
"Β£": "$",
"Β₯": "$",
"ΰΈΏ": "$",
"US$": "$",
"C$": "$",
"A$": "$"
}
|
# coding: utf8
from __future__ import unicode_literals
# These exceptions are used to add NORM values based on a token's ORTH value.
# Individual languages can also add their own exceptions and overwrite them -
# for example, British vs. American spelling in English.
# Norms are only set if no alternative is provided in the tokenizer exceptions.
# Note that this does not change any other token attributes. Its main purpose
# is to normalise the word representations so that equivalent tokens receive
# similar representations. For example: $ and β¬ are very different, but they're
# both currency symbols. By normalising currency symbols to $, all symbols are
# seen as similar, no matter how common they are in the training data.
BASE_NORMS = {
"'s": "'s",
"'S": "'s",
"βs": "'s",
"βS": "'s",
"β": "'",
"β": "'",
"Β΄": "'",
"`": "'",
"β": '"',
"β": '"',
"''": '"',
"``": '"',
"´´": '"',
"β": '"',
"Β»": '"',
"Β«": '"',
"ββ": '"',
"ββ": '"',
"οΌ": "?",
"οΌ": "!",
"οΌ": ",",
"οΌ": ";",
"οΌ": ":",
"γ": ".",
"ΰ₯€": ".",
"β¦": "...",
"β": "-",
"β": "-",
"--": "-",
"---": "-",
"ββ": "-",
"β¬": "$",
"Β£": "$",
"Β₯": "$",
"ΰΈΏ": "$",
"US$": "$",
"C$": "$",
"A$": "$"
}
|
Update base norm exceptions with more unicode characters
|
Update base norm exceptions with more unicode characters
e.g. unicode variations of punctuation used in Chinese
|
Python
|
mit
|
aikramer2/spaCy,aikramer2/spaCy,recognai/spaCy,explosion/spaCy,aikramer2/spaCy,recognai/spaCy,honnibal/spaCy,explosion/spaCy,recognai/spaCy,aikramer2/spaCy,explosion/spaCy,recognai/spaCy,aikramer2/spaCy,recognai/spaCy,spacy-io/spaCy,spacy-io/spaCy,honnibal/spaCy,explosion/spaCy,honnibal/spaCy,explosion/spaCy,spacy-io/spaCy,recognai/spaCy,spacy-io/spaCy,aikramer2/spaCy,spacy-io/spaCy,explosion/spaCy,spacy-io/spaCy,honnibal/spaCy
|
---
+++
@@ -31,11 +31,21 @@
"β": '"',
"Β»": '"',
"Β«": '"',
+ "ββ": '"',
+ "ββ": '"',
+ "οΌ": "?",
+ "οΌ": "!",
+ "οΌ": ",",
+ "οΌ": ";",
+ "οΌ": ":",
+ "γ": ".",
+ "ΰ₯€": ".",
"β¦": "...",
"β": "-",
"β": "-",
"--": "-",
"---": "-",
+ "ββ": "-",
"β¬": "$",
"Β£": "$",
"Β₯": "$",
|
56c4de583847be5fb16818dcd1ca855fc6007b50
|
pylsdj/__init__.py
|
pylsdj/__init__.py
|
__title__ = 'pylsdj'
__version__ = '1.1.0'
__build__ = 0x010100
__author__ = 'Alex Rasmussen'
__license__ = 'MIT'
__copyright__ = 'Copyright 2014 Alex Rasmussen'
import bread_spec
import chain
import clock
import consts
import filepack
from instrument import Instrument
from phrase import Phrase
from project import load_lsdsng, load_srm, Project
from savfile import SAVFile
from song import Song, Sequence
from speech_instrument import Word, SpeechInstrument
from synth import Synth
from table import Table
|
Make imports a little more intuitive.
|
Make imports a little more intuitive.
|
Python
|
mit
|
alexras/pylsdj,alexras/pylsdj
|
---
+++
@@ -0,0 +1,20 @@
+__title__ = 'pylsdj'
+__version__ = '1.1.0'
+__build__ = 0x010100
+__author__ = 'Alex Rasmussen'
+__license__ = 'MIT'
+__copyright__ = 'Copyright 2014 Alex Rasmussen'
+
+import bread_spec
+import chain
+import clock
+import consts
+import filepack
+from instrument import Instrument
+from phrase import Phrase
+from project import load_lsdsng, load_srm, Project
+from savfile import SAVFile
+from song import Song, Sequence
+from speech_instrument import Word, SpeechInstrument
+from synth import Synth
+from table import Table
|
|
770fd77e0fc7a0700b81e4418e5f97bd88d842d0
|
pymoira/filesys.py
|
pymoira/filesys.py
|
#
## PyMoira client library
##
## This file contains the more abstract methods which allow user to work with
## lists and list members.
#
import protocol
import utils
import datetime
from errors import *
class Filesys(object):
info_query_description = (
('label', str),
('type', str),
('machine', str),
('name', str),
('mountpoint', str),
('access_mode', str),
('description', str),
('owner_user', str),
('owner_group', str),
('create', bool),
('type', str),
('lastmod_datetime', datetime.datetime),
('lastmod_by', str),
('lastmod_with', str),
)
def __init__(self, client, name):
self.client = client
self.name = name
def loadInfo(self):
"""Loads the information about the list from the server into the object."""
response, = self.client.query( 'get_filesys_by_label', (self.name, ), version = 14 )
result = utils.responseToDict(self.info_query_description, response)
self.__dict__.update(result)
|
#
## PyMoira client library
##
## This file contains the more abstract methods which allow user to work with
## lists and list members.
#
import protocol
import utils
import datetime
from errors import *
class Filesys(object):
info_query_description = (
('label', str),
('type', str),
('machine', str),
('name', str),
('mountpoint', str),
('access_mode', str),
('description', str),
('owner_user', str),
('owner_group', str),
('create', bool),
('locker_type', str),
('lastmod_datetime', datetime.datetime),
('lastmod_by', str),
('lastmod_with', str),
)
def __init__(self, client, name):
self.client = client
self.name = name
def loadInfo(self):
"""Loads the information about the list from the server into the object."""
response, = self.client.query( 'get_filesys_by_label', (self.name, ), version = 14 )
result = utils.responseToDict(self.info_query_description, response)
self.__dict__.update(result)
|
Fix a name collision for two types of types.
|
Fix a name collision for two types of types.
|
Python
|
mit
|
vasilvv/pymoira
|
---
+++
@@ -22,7 +22,7 @@
('owner_user', str),
('owner_group', str),
('create', bool),
- ('type', str),
+ ('locker_type', str),
('lastmod_datetime', datetime.datetime),
('lastmod_by', str),
('lastmod_with', str),
|
7394ba6eba50282bd7252e504a80e5d595dd12bc
|
ci/fix_paths.py
|
ci/fix_paths.py
|
import distutils.sysconfig
from glob import glob
import os
from os.path import join as pjoin, basename
from shutil import copy
from sys import platform
def main():
"""
Copy HDF5 DLLs into installed h5py package
"""
# This is the function Tox also uses to locate site-packages (Apr 2019)
sitepackagesdir = distutils.sysconfig.get_python_lib(plat_specific=True)
print("site packages dir:", sitepackagesdir)
hdf5_path = os.environ.get("HDF5_DIR")
print("HDF5_DIR", hdf5_path)
# HDF5_DIR is not set when we're testing wheels; these should already have
# the necessary libraries bundled in.
if platform.startswith('win') and hdf5_path is not None:
for f in glob(pjoin(hdf5_path, 'lib/*.dll')):
copy(f, pjoin(sitepackagesdir, 'h5py', basename(f)))
print("Copied", f)
print("In installed h5py:", os.listdir(pjoin(sitepackagesdir, 'h5py')))
if __name__ == '__main__':
main()
|
import distutils.sysconfig
from glob import glob
import os
from os.path import join as pjoin, basename
from shutil import copy
from sys import platform
def main():
"""
Copy HDF5 DLLs into installed h5py package
"""
# This is the function Tox also uses to locate site-packages (Apr 2019)
sitepackagesdir = distutils.sysconfig.get_python_lib(plat_specific=True)
print("site packages dir:", sitepackagesdir)
hdf5_path = os.environ.get("HDF5_DIR")
print("HDF5_DIR", hdf5_path)
# HDF5_DIR is not set when we're testing wheels; these should already have
# the necessary libraries bundled in.
if platform.startswith('win') and hdf5_path is not None:
for f in glob(pjoin(hdf5_path, 'lib/*.dll')):
copy(f, pjoin(sitepackagesdir, 'h5py', basename(f)))
print("Copied", f)
zlib_root = os.environ.get("ZLIB_ROOT")
if zlib_root:
f = pjoin(zlib_root, 'bin_release', 'zlib.dll')
copy(f, pjoin(sitepackagesdir, 'h5py', 'zlib.dll'))
print("Copied", f)
print("In installed h5py:", os.listdir(pjoin(sitepackagesdir, 'h5py')))
if __name__ == '__main__':
main()
|
Copy zlib.dll into Windows h5py installed from source
|
Copy zlib.dll into Windows h5py installed from source
|
Python
|
bsd-3-clause
|
h5py/h5py,h5py/h5py,h5py/h5py
|
---
+++
@@ -23,6 +23,12 @@
copy(f, pjoin(sitepackagesdir, 'h5py', basename(f)))
print("Copied", f)
+ zlib_root = os.environ.get("ZLIB_ROOT")
+ if zlib_root:
+ f = pjoin(zlib_root, 'bin_release', 'zlib.dll')
+ copy(f, pjoin(sitepackagesdir, 'h5py', 'zlib.dll'))
+ print("Copied", f)
+
print("In installed h5py:", os.listdir(pjoin(sitepackagesdir, 'h5py')))
if __name__ == '__main__':
|
6db2bb9b1634a7b37790207e5b8d420de643a9cb
|
turbasen/__init__.py
|
turbasen/__init__.py
|
VERSION = '1.0.0'
def configure(**settings):
from .settings import Settings
for key, value in settings.items():
Settings.setattr(key, value)
|
VERSION = '1.0.0'
from .models import \
Omrade, \
Sted
def configure(**settings):
from .settings import Settings
for key, value in settings.items():
Settings.setattr(key, value)
|
Add relevant models to turbasen module
|
Add relevant models to turbasen module
|
Python
|
mit
|
Turbasen/turbasen.py
|
---
+++
@@ -1,4 +1,8 @@
VERSION = '1.0.0'
+
+from .models import \
+ Omrade, \
+ Sted
def configure(**settings):
from .settings import Settings
|
e8b1cee54f679cbd6a2d158b3c2789f3f6a3d9c0
|
uppercase.py
|
uppercase.py
|
from twisted.internet import protocol, reactor
factory = protocol.ServerFactory()
factory.protocol = protocol.Protocol
reactor.listenTCP(8000, factory)
reactor.run()
|
from twisted.internet import endpoints, protocol, reactor
class UpperProtocol(protocol.Protocol):
pass
factory = protocol.ServerFactory()
factory.protocol = UpperProtocol
endpoints.serverFromString(reactor, 'tcp:8000').listen(factory)
reactor.run()
|
Convert to endpoints API and use custom protocol class
|
Convert to endpoints API and use custom protocol class
|
Python
|
mit
|
cataliniacob/ep2012-tutorial-twisted
|
---
+++
@@ -1,7 +1,10 @@
-from twisted.internet import protocol, reactor
+from twisted.internet import endpoints, protocol, reactor
+
+class UpperProtocol(protocol.Protocol):
+ pass
factory = protocol.ServerFactory()
-factory.protocol = protocol.Protocol
+factory.protocol = UpperProtocol
-reactor.listenTCP(8000, factory)
+endpoints.serverFromString(reactor, 'tcp:8000').listen(factory)
reactor.run()
|
6b179dc4fb95f4db380b9156381b6210adeef2e5
|
conftest.py
|
conftest.py
|
# Copyright 2016 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import mock
import pytest
PROJECT = os.environ['GCLOUD_PROJECT']
@pytest.fixture
def api_client_inject_project_id():
"""Patches all googleapiclient requests to replace 'YOUR_PROJECT_ID' with
the project ID."""
import googleapiclient.http
old_execute = googleapiclient.http.HttpRequest.execute
def new_execute(self, http=None, num_retries=0):
self.uri = self.uri.replace('YOUR_PROJECT_ID', PROJECT)
return old_execute(self, http=http, num_retries=num_retries)
with mock.patch(
'googleapiclient.http.HttpRequest.execute',
new=new_execute):
yield
|
# Copyright 2016 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import mock
import pytest
PROJECT = 'python-docs-samples'
@pytest.fixture
def api_client_inject_project_id():
"""Patches all googleapiclient requests to replace 'YOUR_PROJECT_ID' with
the project ID."""
import googleapiclient.http
old_execute = googleapiclient.http.HttpRequest.execute
def new_execute(self, http=None, num_retries=0):
self.uri = self.uri.replace('YOUR_PROJECT_ID', PROJECT)
return old_execute(self, http=http, num_retries=num_retries)
with mock.patch(
'googleapiclient.http.HttpRequest.execute',
new=new_execute):
yield
|
Set the Project in code
|
Set the Project in code
|
Python
|
apache-2.0
|
GoogleCloudPlatform/getting-started-python,GoogleCloudPlatform/getting-started-python,GoogleCloudPlatform/getting-started-python
|
---
+++
@@ -17,7 +17,7 @@
import mock
import pytest
-PROJECT = os.environ['GCLOUD_PROJECT']
+PROJECT = 'python-docs-samples'
@pytest.fixture
|
378f3bf0bb2e05260b7cbeeb4a4637d7d3a7ca7c
|
workflows/consumers.py
|
workflows/consumers.py
|
from urllib.parse import parse_qs
from channels import Group
from channels.sessions import channel_session
@channel_session
def ws_add(message):
message.reply_channel.send({"accept": True})
qs = parse_qs(message['query_string'])
workflow_pk = qs['workflow_pk'][0]
message.channel_session['workflow_pk'] = workflow_pk
Group("workflow-{}".format(workflow_pk)).add(message.reply_channel)
@channel_session
def ws_disconnect(message):
workflow_pk = message.channel_session['workflow_pk']
Group("workflow-{}".format(workflow_pk)).discard(message.reply_channel)
|
from urllib.parse import parse_qs
from channels import Group
from channels.sessions import channel_session
@channel_session
def ws_add(message):
message.reply_channel.send({"accept": True})
qs = parse_qs(message['query_string'])
workflow_pk = qs[b'workflow_pk'][0].decode('utf-8')
message.channel_session['workflow_pk'] = workflow_pk
Group("workflow-{}".format(workflow_pk)).add(message.reply_channel)
@channel_session
def ws_disconnect(message):
workflow_pk = message.channel_session['workflow_pk']
Group("workflow-{}".format(workflow_pk)).discard(message.reply_channel)
|
Fix query string problem of comparing byte strings and unicode strings in py3
|
Fix query string problem of comparing byte strings and unicode strings in py3
|
Python
|
mit
|
xflows/clowdflows-backend,xflows/clowdflows-backend,xflows/clowdflows-backend,xflows/clowdflows-backend
|
---
+++
@@ -7,7 +7,7 @@
def ws_add(message):
message.reply_channel.send({"accept": True})
qs = parse_qs(message['query_string'])
- workflow_pk = qs['workflow_pk'][0]
+ workflow_pk = qs[b'workflow_pk'][0].decode('utf-8')
message.channel_session['workflow_pk'] = workflow_pk
Group("workflow-{}".format(workflow_pk)).add(message.reply_channel)
|
9a7c80744bc1e57fe0ec5fc7cf149dada2d05121
|
neuroimaging/utils/tests/data/__init__.py
|
neuroimaging/utils/tests/data/__init__.py
|
"""Information used for locating nipy test data.
Nipy uses a set of test data that is installed separately. The test
data should be located in the directory ``~/.nipy/tests/data``.
Install the data in your home directory from the data repository::
$ mkdir -p .nipy/tests/data
$ svn co http://neuroimaging.scipy.org/svn/ni/data/trunk/fmri .nipy/tests/data
"""
from os.path import expanduser, exists, join
from neuroimaging.io.datasource import Repository
# data directory should be: $HOME/.nipy/tests/data
datapath = expanduser(join('~', '.nipy', 'tests', 'data'))
if not exists(datapath):
raise IOError, 'Nipy data directory is not found!'
repository = Repository(datapath)
|
"""
Nipy uses a set of test data that is installed separately. The test
data should be located in the directory ``~/.nipy/tests/data``.
Install the data in your home directory from the data repository::
$ mkdir -p .nipy/tests/data
$ svn co http://neuroimaging.scipy.org/svn/ni/data/trunk/fmri .nipy/tests/data
"""
# Fernando pointed out that we should wrap the test data into a
# tarball and write a pure python function to grab the data for people
# instead of using svn. Users may not have svn and the mkdir may not
# work on Windows.
from os.path import expanduser, exists, join
from neuroimaging.io.datasource import Repository
# data directory should be: $HOME/.nipy/tests/data
datapath = expanduser(join('~', '.nipy', 'tests', 'data'))
if not exists(datapath):
msg = 'Nipy data directory is not found!\n%s' % __doc__
raise IOError(msg)
repository = Repository(datapath)
|
Extend error message regarding missing test data.
|
Extend error message regarding missing test data.
|
Python
|
bsd-3-clause
|
alexis-roche/register,bthirion/nipy,bthirion/nipy,nipy/nipy-labs,alexis-roche/nipy,bthirion/nipy,alexis-roche/register,alexis-roche/nireg,bthirion/nipy,arokem/nipy,alexis-roche/register,alexis-roche/niseg,alexis-roche/nipy,arokem/nipy,nipy/nipy-labs,alexis-roche/nipy,alexis-roche/nipy,alexis-roche/nireg,arokem/nipy,arokem/nipy,nipy/nireg,nipy/nireg,alexis-roche/niseg
|
---
+++
@@ -1,5 +1,4 @@
-"""Information used for locating nipy test data.
-
+"""
Nipy uses a set of test data that is installed separately. The test
data should be located in the directory ``~/.nipy/tests/data``.
@@ -8,6 +7,12 @@
$ svn co http://neuroimaging.scipy.org/svn/ni/data/trunk/fmri .nipy/tests/data
"""
+
+# Fernando pointed out that we should wrap the test data into a
+# tarball and write a pure python function to grab the data for people
+# instead of using svn. Users may not have svn and the mkdir may not
+# work on Windows.
+
from os.path import expanduser, exists, join
from neuroimaging.io.datasource import Repository
@@ -16,7 +21,7 @@
datapath = expanduser(join('~', '.nipy', 'tests', 'data'))
if not exists(datapath):
- raise IOError, 'Nipy data directory is not found!'
+ msg = 'Nipy data directory is not found!\n%s' % __doc__
+ raise IOError(msg)
repository = Repository(datapath)
-
|
446400fa4e40ca7e47e48dd00209d80858094552
|
buffer/managers/profiles.py
|
buffer/managers/profiles.py
|
import json
from buffer.models.profile import PATHS, Profile
class Profiles(list):
def __init__(self, api, *args, **kwargs):
super(Profiles, self).__init__(*args, **kwargs)
self.api = api
def all(self):
response = self.api.get(url=PATHS['GET_PROFILES'], parser=json.loads)
for raw_profile in response:
self.append(Profile(self.api, raw_profile))
return self
def filter(self, **kwargs):
if not len(self):
self.all()
new_list = filter(lambda item: [True for arg in kwargs if item[arg] == kwargs[arg]] != [], self)
return Profiles(self.api, new_list)
|
import json
from buffer.models.profile import PATHS, Profile
class Profiles(list):
'''
Manage profiles
+ all -> get all the profiles from buffer
+ filter -> wrapper for list filtering
'''
def __init__(self, api, *args, **kwargs):
super(Profiles, self).__init__(*args, **kwargs)
self.api = api
def all(self):
'''
Get all network profiles
'''
response = self.api.get(url=PATHS['GET_PROFILES'], parser=json.loads)
for raw_profile in response:
self.append(Profile(self.api, raw_profile))
return self
def filter(self, **kwargs):
'''
Based on some criteria, filter the profiles and return a new Profiles
Manager containing only the chosen items
If the manager doen't have any items, get all the profiles from Buffer
'''
if not len(self):
self.all()
new_list = filter(lambda item: [True for arg in kwargs if item[arg] == kwargs[arg]] != [], self)
return Profiles(self.api, new_list)
|
Write documentation for profile mananger
|
Write documentation for profile mananger
|
Python
|
mit
|
bufferapp/buffer-python,vtemian/buffpy
|
---
+++
@@ -3,6 +3,11 @@
from buffer.models.profile import PATHS, Profile
class Profiles(list):
+ '''
+ Manage profiles
+ + all -> get all the profiles from buffer
+ + filter -> wrapper for list filtering
+ '''
def __init__(self, api, *args, **kwargs):
super(Profiles, self).__init__(*args, **kwargs)
@@ -10,6 +15,10 @@
self.api = api
def all(self):
+ '''
+ Get all network profiles
+ '''
+
response = self.api.get(url=PATHS['GET_PROFILES'], parser=json.loads)
for raw_profile in response:
@@ -18,6 +27,12 @@
return self
def filter(self, **kwargs):
+ '''
+ Based on some criteria, filter the profiles and return a new Profiles
+ Manager containing only the chosen items
+
+ If the manager doen't have any items, get all the profiles from Buffer
+ '''
if not len(self):
self.all()
|
1c7bbeabe1c1f3eea053c8fd8b6649ba388c1d2e
|
waliki/slides/views.py
|
waliki/slides/views.py
|
from os import path
import shutil
import tempfile
from sh import hovercraft
from django.shortcuts import get_object_or_404
from django.http import HttpResponse
from waliki.models import Page
def slides(request, slug):
page = get_object_or_404(Page, slug=slug)
outpath = tempfile.mkdtemp()
try:
infile = page.abspath
template = path.join(path.dirname(path.realpath(__file__)), 'template')
hovercraft('-t', template, infile, outpath)
with open(path.join(outpath, 'index.html')) as f:
content = f.read()
finally:
shutil.rmtree(outpath)
return HttpResponse(content)
|
from os import path
import shutil
import tempfile
from sh import hovercraft
from django.shortcuts import get_object_or_404
from django.http import HttpResponse
from waliki.models import Page
from waliki.acl import permission_required
@permission_required('view_page')
def slides(request, slug):
page = get_object_or_404(Page, slug=slug)
outpath = tempfile.mkdtemp()
try:
infile = page.abspath
template = path.join(path.dirname(path.realpath(__file__)), 'template')
hovercraft('-t', template, infile, outpath)
with open(path.join(outpath, 'index.html')) as f:
content = f.read()
finally:
shutil.rmtree(outpath)
return HttpResponse(content)
|
Add permission check to slide urls.
|
Add permission check to slide urls.
|
Python
|
bsd-3-clause
|
RobertoMaurizzi/waliki,aszepieniec/waliki,aszepieniec/waliki,OlegGirko/waliki,RobertoMaurizzi/waliki,rizotas/waliki,beres/waliki,beres/waliki,santiavenda2/waliki,mgaitan/waliki,fpytloun/waliki,OlegGirko/waliki,OlegGirko/waliki,santiavenda2/waliki,santiavenda2/waliki,beres/waliki,mgaitan/waliki,RobertoMaurizzi/waliki,aszepieniec/waliki,rizotas/waliki,mgaitan/waliki,fpytloun/waliki,fpytloun/waliki,rizotas/waliki
|
---
+++
@@ -5,8 +5,10 @@
from django.shortcuts import get_object_or_404
from django.http import HttpResponse
from waliki.models import Page
+from waliki.acl import permission_required
+@permission_required('view_page')
def slides(request, slug):
page = get_object_or_404(Page, slug=slug)
outpath = tempfile.mkdtemp()
|
ceac9c401f80a279e7291e7ba2a9e06757d4dd1d
|
buildtools/wrapper/cmake.py
|
buildtools/wrapper/cmake.py
|
import os
from buildtools.bt_logging import log
from buildtools.os_utils import cmd, ENV
class CMake(object):
def __init__(self):
self.flags = {}
self.generator = None
def setFlag(self, key, val):
log.info('CMake: {} = {}'.format(key, val))
self.flags[key] = val
def build(self, CMAKE, dir='.', env=None, target=None, moreflags=[]):
moreflags += ['--build']
if target is not None:
moreflags += ['--target',target]
self.run(CMAKE,dir,env,moreflags)
def run(self, CMAKE, env=None, dir='.', moreflags=[]):
if env is None:
env = ENV.env
flags = []
if self.generator is not None:
flags += ['-G',self.generator]
for key, value in self.flags.items():
flags += ['-D{0}={1}'.format(key, value)]
flags += moreflags
with log.info('Running CMake:'):
for key, value in env.items():
log.info('+{0}="{1}"'.format(key, value))
return cmd([CMAKE] + flags + [dir], env=env, critical=True, echo=True)
return False
|
import os
from buildtools.bt_logging import log
from buildtools.os_utils import cmd, ENV
class CMake(object):
def __init__(self):
self.flags = {}
self.generator = None
def setFlag(self, key, val):
log.info('CMake: {} = {}'.format(key, val))
self.flags[key] = val
def build(self, CMAKE, dir='.', env=None, target=None, moreflags=[]):
moreflags += ['--build']
if target is not None:
moreflags += ['--target', target]
self.run(CMAKE, env, dir, moreflags)
def run(self, CMAKE, env=None, dir='.', moreflags=[]):
if env is None:
env = ENV.env
flags = []
if self.generator is not None:
flags += ['-G', self.generator]
for key, value in self.flags.items():
flags += ['-D{0}={1}'.format(key, value)]
flags += moreflags
with log.info('Running CMake:'):
for key, value in env.items():
log.info('+{0}="{1}"'.format(key, value))
return cmd([CMAKE] + flags + [dir], env=env, critical=True, echo=True)
return False
|
Fix mismatched args in CMake.build
|
Fix mismatched args in CMake.build
|
Python
|
mit
|
N3X15/python-build-tools,N3X15/python-build-tools,N3X15/python-build-tools
|
---
+++
@@ -15,8 +15,8 @@
def build(self, CMAKE, dir='.', env=None, target=None, moreflags=[]):
moreflags += ['--build']
if target is not None:
- moreflags += ['--target',target]
- self.run(CMAKE,dir,env,moreflags)
+ moreflags += ['--target', target]
+ self.run(CMAKE, env, dir, moreflags)
def run(self, CMAKE, env=None, dir='.', moreflags=[]):
if env is None:
@@ -24,7 +24,7 @@
flags = []
if self.generator is not None:
- flags += ['-G',self.generator]
+ flags += ['-G', self.generator]
for key, value in self.flags.items():
flags += ['-D{0}={1}'.format(key, value)]
|
f04c451de83b66b733dc28eb13bc16ade2675b3a
|
changes/api/stream.py
|
changes/api/stream.py
|
from __future__ import absolute_import
import gevent
from gevent.queue import Queue
from changes.config import pubsub
class EventStream(object):
def __init__(self, channels, pubsub=pubsub):
self.pubsub = pubsub
self.pending = Queue()
self.channels = channels
self.active = True
for channel in channels:
self.pubsub.subscribe(channel, self.push)
def __iter__(self):
while self.active:
# TODO(dcramer): figure out why we have to send this to ensure
# the connection is opened
yield "\n"
event = self.pending.get()
yield "event: {}\n".format(event['event'])
for line in event['data'].splitlines():
yield "data: {}\n".format(line)
yield "\n"
gevent.sleep(0)
def __del__(self):
self.close()
def push(self, message):
self.pending.put_nowait(message)
def close(self):
for channel in self.channels:
self.pubsub.unsubscribe(channel, self.push)
|
from __future__ import absolute_import
import gevent
from collections import deque
from changes.config import pubsub
class EventStream(object):
def __init__(self, channels, pubsub=pubsub):
self.pubsub = pubsub
self.pending = deque()
self.channels = channels
self.active = True
for channel in channels:
self.pubsub.subscribe(channel, self.push)
def __iter__(self):
while self.active:
# TODO(dcramer): figure out why we have to send this to ensure
# the connection is opened
yield "\n"
while self.pending:
event = self.pending.pop()
yield "event: {}\n".format(event['event'])
for line in event['data'].splitlines():
yield "data: {}\n".format(line)
yield "\n"
gevent.sleep(0)
gevent.sleep(0.3)
def __del__(self):
self.close()
def push(self, message):
self.pending.append(message)
def close(self):
for channel in self.channels:
self.pubsub.unsubscribe(channel, self.push)
|
Revert "Switch to gevent queues for EventStream"
|
Revert "Switch to gevent queues for EventStream"
This reverts commit 4c102945fe46bb463e6a641324d26384c77fbae8.
|
Python
|
apache-2.0
|
bowlofstew/changes,wfxiang08/changes,dropbox/changes,wfxiang08/changes,dropbox/changes,bowlofstew/changes,dropbox/changes,dropbox/changes,bowlofstew/changes,wfxiang08/changes,wfxiang08/changes,bowlofstew/changes
|
---
+++
@@ -2,7 +2,7 @@
import gevent
-from gevent.queue import Queue
+from collections import deque
from changes.config import pubsub
@@ -10,7 +10,7 @@
class EventStream(object):
def __init__(self, channels, pubsub=pubsub):
self.pubsub = pubsub
- self.pending = Queue()
+ self.pending = deque()
self.channels = channels
self.active = True
@@ -22,18 +22,20 @@
# TODO(dcramer): figure out why we have to send this to ensure
# the connection is opened
yield "\n"
- event = self.pending.get()
- yield "event: {}\n".format(event['event'])
- for line in event['data'].splitlines():
- yield "data: {}\n".format(line)
- yield "\n"
- gevent.sleep(0)
+ while self.pending:
+ event = self.pending.pop()
+ yield "event: {}\n".format(event['event'])
+ for line in event['data'].splitlines():
+ yield "data: {}\n".format(line)
+ yield "\n"
+ gevent.sleep(0)
+ gevent.sleep(0.3)
def __del__(self):
self.close()
def push(self, message):
- self.pending.put_nowait(message)
+ self.pending.append(message)
def close(self):
for channel in self.channels:
|
24a90b0aa38a21c9b116d4b8b9c4878678fda9cc
|
suddendev/tasks.py
|
suddendev/tasks.py
|
from . import celery, celery_socketio
from .game_instance import GameInstance
import time
@celery.task(time_limit=5, max_retries=3)
def play_game(game_id, player_names, scripts, player_ids, colors, namespace, room, wave=1):
cleared = True
current_wave = wave - 1
while cleared:
current_wave += 1
game = GameInstance(game_id, player_names, scripts, player_ids, colors, wave=current_wave)
for batch in game.run():
celery_socketio.emit('result', '{\"result\": [ ' + ','.join(batch) + ']}', room=room, namespace=namespace)
cleared = game.was_cleared()
# return highest reached wave
return current_wave - 1
@celery.task(time_limit=5, max_retries=3)
def test_round(game_id, player_names, scripts, player_ids, colors, namespace, room, wave=1):
game = GameInstance(game_id, player_names, scripts, player_ids, colors, wave=wave)
for batch in game.run():
celery_socketio.emit('result', '{\"result\": [ ' + ','.join(batch) + ']}', room=room, namespace=namespace)
# return if the test round was cleared
return game.was_cleared()
|
from . import celery, celery_socketio
from .game_instance import GameInstance
import time
@celery.task(time_limit=15, max_retries=3)
def play_game(game_id, player_names, scripts, player_ids, colors, namespace, room, wave=1):
cleared = True
current_wave = wave - 1
while cleared:
current_wave += 1
game = GameInstance(game_id, player_names, scripts, player_ids, colors, wave=current_wave)
for batch in game.run():
celery_socketio.emit('result', '{\"result\": [ ' + ','.join(batch) + ']}', room=room, namespace=namespace)
cleared = game.was_cleared()
# return highest reached wave
return current_wave - 1
@celery.task(time_limit=5, max_retries=3)
def test_round(game_id, player_names, scripts, player_ids, colors, namespace, room, wave=1):
game = GameInstance(game_id, player_names, scripts, player_ids, colors, wave=wave)
for batch in game.run():
celery_socketio.emit('result', '{\"result\": [ ' + ','.join(batch) + ']}', room=room, namespace=namespace)
# return if the test round was cleared
return game.was_cleared()
|
Bump up max game time to 15.
|
[NG] Bump up max game time to 15.
|
Python
|
mit
|
SuddenDevs/SuddenDev,SuddenDevs/SuddenDev,SuddenDevs/SuddenDev,SuddenDevs/SuddenDev
|
---
+++
@@ -2,7 +2,7 @@
from .game_instance import GameInstance
import time
-@celery.task(time_limit=5, max_retries=3)
+@celery.task(time_limit=15, max_retries=3)
def play_game(game_id, player_names, scripts, player_ids, colors, namespace, room, wave=1):
cleared = True
|
1b0428aaf77f1c6eadfb6b20611a2e2e6f30fbce
|
poller.py
|
poller.py
|
#!/usr/bin/env python
import urllib2
import ssl
def poll(sites, timeout):
for site in sites:
print 'polling ' + site
try:
response = urllib2.urlopen(site, timeout=timeout)
response.read()
except urllib2.URLError as e:
print e.code
except ssl.SSLError as e:
print e.message
else:
print 'ok'
if __name__ == '__main__':
poll(sites=(
'https://redmine.codegrove.org',
'http://koodilehto.fi',
'http://vakiopaine.net',
), timeout=5)
|
#!/usr/bin/env python
import urllib2
import ssl
try:
import gntp.notifier as notify
except ImportError:
notify = None
def poll(sites, timeout, ok, error):
for site in sites:
ok('polling ' + site)
try:
response = urllib2.urlopen(site, timeout=timeout)
response.read()
except urllib2.URLError as e:
error(site + ' ' + e.code)
except ssl.SSLError as e:
error(site + ' ' + e.message)
else:
ok('ok')
def empty(data):
pass
def output(data):
if notify:
notify.mini(data)
else:
print data
if __name__ == '__main__':
poll(sites=(
'https://redmine.codegrove.org',
'http://koodilehto.fi',
'http://vakiopaine.net',
), timeout=5, ok=empty, error=output)
|
Add initial support for growl
|
Add initial support for growl
If growl lib isn't available, prints to console instead.
|
Python
|
mit
|
koodilehto/website-poller,koodilehto/website-poller
|
---
+++
@@ -2,24 +2,40 @@
import urllib2
import ssl
+try:
+ import gntp.notifier as notify
+except ImportError:
+ notify = None
-def poll(sites, timeout):
+def poll(sites, timeout, ok, error):
for site in sites:
- print 'polling ' + site
+ ok('polling ' + site)
try:
response = urllib2.urlopen(site, timeout=timeout)
response.read()
except urllib2.URLError as e:
- print e.code
+ error(site + ' ' + e.code)
except ssl.SSLError as e:
- print e.message
+ error(site + ' ' + e.message)
else:
- print 'ok'
+ ok('ok')
+
+
+def empty(data):
+ pass
+
+
+def output(data):
+ if notify:
+ notify.mini(data)
+ else:
+ print data
+
if __name__ == '__main__':
poll(sites=(
'https://redmine.codegrove.org',
'http://koodilehto.fi',
'http://vakiopaine.net',
- ), timeout=5)
+ ), timeout=5, ok=empty, error=output)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.