commit stringlengths 40 40 | old_file stringlengths 4 150 | new_file stringlengths 4 150 | old_contents stringlengths 0 3.26k | new_contents stringlengths 1 4.43k | subject stringlengths 15 501 | message stringlengths 15 4.06k | lang stringclasses 4 values | license stringclasses 13 values | repos stringlengths 5 91.5k | diff stringlengths 0 4.35k |
|---|---|---|---|---|---|---|---|---|---|---|
e721f5fc7481e7970ba5e281e37b426123b415c3 | ruledxml/tests/test_order.py | ruledxml/tests/test_order.py | #!/usr/bin/env python3
import io
import unittest
import ruledxml
from . import utils
class TestRuledXmlForeach(unittest.TestCase):
def test_030(self):
result = io.BytesIO()
with open(utils.data('030_source.xml')) as src:
ruledxml.run(src, utils.data('030_rules.py'), result)
with open(utils.data('030_target.xml'), 'rb') as target:
utils.xmlEquals(self, result.getvalue(), target.read())
def test_031(self):
result = io.BytesIO()
with open(utils.data('031_source.xml')) as src:
ruledxml.run(src, utils.data('031_rules.py'), result)
with open(utils.data('031_target.xml'), 'rb') as target:
utils.xmlEquals(self, result.getvalue(), target.read())
def run():
unittest.main()
if __name__ == '__main__':
run()
| #!/usr/bin/env python3
import io
import unittest
import ruledxml
from . import utils
class TestRuledXmlOrder(unittest.TestCase):
def test_030(self):
result = io.BytesIO()
with open(utils.data('030_source.xml')) as src:
ruledxml.run(src, utils.data('030_rules.py'), result)
with open(utils.data('030_target.xml'), 'rb') as target:
utils.xmlEquals(self, result.getvalue(), target.read())
def test_031(self):
result = io.BytesIO()
with open(utils.data('031_source.xml')) as src:
ruledxml.run(src, utils.data('031_rules.py'), result)
with open(utils.data('031_target.xml'), 'rb') as target:
utils.xmlEquals(self, result.getvalue(), target.read())
def run():
unittest.main()
if __name__ == '__main__':
run()
| Fix name: should be TestRuledXmlOrder, not TestRuledXmlForeach. | Fix name: should be TestRuledXmlOrder, not TestRuledXmlForeach.
| Python | bsd-3-clause | meisterluk/ruledxml | ---
+++
@@ -8,7 +8,7 @@
from . import utils
-class TestRuledXmlForeach(unittest.TestCase):
+class TestRuledXmlOrder(unittest.TestCase):
def test_030(self):
result = io.BytesIO()
with open(utils.data('030_source.xml')) as src: |
5a8f107f987198740a0f0b9f1ee1f79d90662109 | txircd/modules/rfc/cmode_n.py | txircd/modules/rfc/cmode_n.py | from twisted.plugin import IPlugin
from twisted.words.protocols import irc
from txircd.module_interface import IMode, IModuleData, Mode, ModuleData
from txircd.utils import ModeType
from zope.interface import implements
class NoExtMsgMode(ModuleData, Mode):
implements(IPlugin, IModuleData, IMode)
name = "NoExtMsgMode"
core = True
affectedActions = [ "commandmodify-PRIVMSG", "commandmodify-NOTICE" ]
def hookIRCd(self, ircd):
self.ircd = ircd
def channelModes(self):
return [ ("n", ModeType.NoParam, self) ]
def actions(self):
return [ ("modeactioncheck-channel-n-commandpermission-PRIVMSG", 1, self.channelHasMode),
("modeactioncheck-channel-n-commandpermission-NOTICE", 1, self.channelHasMode) ]
def apply(self, actionType, channel, param, user, command, data):
if user not in channel.users and channel in data["targetchans"]:
del data["targetchans"][channel]
user.sendMessage(irc.ERR_CANNOTSENDTOCHAN, channel.name, ":Cannot send to channel (no external messages)")
def channelHasMode(self, channel, user, command, data):
if "n" in channel.modes:
return ""
return None
noExtMsgMode = NoExtMsgMode() | from twisted.plugin import IPlugin
from twisted.words.protocols import irc
from txircd.module_interface import IMode, IModuleData, Mode, ModuleData
from txircd.utils import ModeType
from zope.interface import implements
class NoExtMsgMode(ModuleData, Mode):
implements(IPlugin, IModuleData, IMode)
name = "NoExtMsgMode"
core = True
affectedActions = [ "commandmodify-PRIVMSG", "commandmodify-NOTICE" ]
def hookIRCd(self, ircd):
self.ircd = ircd
def channelModes(self):
return [ ("n", ModeType.NoParam, self) ]
def actions(self):
return [ ("modeactioncheck-channel-n-commandmodify-PRIVMSG", 1, self.channelHasMode),
("modeactioncheck-channel-n-commandmodify-NOTICE", 1, self.channelHasMode) ]
def apply(self, actionType, channel, param, user, command, data):
if user not in channel.users and channel in data["targetchans"]:
del data["targetchans"][channel]
user.sendMessage(irc.ERR_CANNOTSENDTOCHAN, channel.name, ":Cannot send to channel (no external messages)")
def channelHasMode(self, channel, user, command, data):
if "n" in channel.modes:
return ""
return None
noExtMsgMode = NoExtMsgMode() | Fix mode +n specification so that it actually fires ever | Fix mode +n specification so that it actually fires ever
| Python | bsd-3-clause | Heufneutje/txircd,ElementalAlchemist/txircd | ---
+++
@@ -18,8 +18,8 @@
return [ ("n", ModeType.NoParam, self) ]
def actions(self):
- return [ ("modeactioncheck-channel-n-commandpermission-PRIVMSG", 1, self.channelHasMode),
- ("modeactioncheck-channel-n-commandpermission-NOTICE", 1, self.channelHasMode) ]
+ return [ ("modeactioncheck-channel-n-commandmodify-PRIVMSG", 1, self.channelHasMode),
+ ("modeactioncheck-channel-n-commandmodify-NOTICE", 1, self.channelHasMode) ]
def apply(self, actionType, channel, param, user, command, data):
if user not in channel.users and channel in data["targetchans"]: |
3ff9f60e857c9ffbd7c72c53403ae7bf3afecab8 | test/features/steps/system.py | test/features/steps/system.py | from __future__ import print_function
import sys
import subprocess
import os
@given('a system executable {exe}')
def step_impl(context, exe):
binary = None
if sys.platform.startswith('win'):
try:
binary = subprocess.check_output(["where", exe]).decode('utf8').strip()
except:
pass
else:
try:
binary = subprocess.check_output(["which", exe]).decode('utf8').strip()
except:
pass
if binary is None:
print(
"Skipping scenario", context.scenario,
"(executable %s not found)" % exe,
file = sys.stderr
)
context.scenario.skip("The executable '%s' is not present" % exe)
else:
print(
"Found executable '%s' at '%s'" % (exe, binary),
file = sys.stderr
)
@then('{exe} is a static executable')
def step_impl(ctx, exe):
if sys.platform.lower().startswith('darwin'):
context.scenario.skip("Static runtime linking is not supported on OS X")
if sys.platform.startswith('win'):
lines = subprocess.check_output(["dumpbin.exe", "/DEPENDENTS", exe]).decode('utf8').split('\r\n')
for line in lines:
if 'msvcrt' in line.lower():
assert False, 'Found MSVCRT: %s' % line
else:
out = subprocess.check_output(["file", exe]).decode('utf8')
assert 'statically linked' in out, "Not a static executable: %s" % out
| from __future__ import print_function
import sys
import subprocess
import os
@given('a system executable {exe}')
def step_impl(context, exe):
binary = None
if sys.platform.startswith('win'):
try:
binary = subprocess.check_output(["where", exe]).decode('utf8').strip()
except:
pass
else:
try:
binary = subprocess.check_output(["which", exe]).decode('utf8').strip()
except:
pass
if binary is None:
print(
"Skipping scenario", context.scenario,
"(executable %s not found)" % exe,
file = sys.stderr
)
context.scenario.skip("The executable '%s' is not present" % exe)
else:
print(
"Found executable '%s' at '%s'" % (exe, binary),
file = sys.stderr
)
@then('{exe} is a static executable')
def step_impl(ctx, exe):
if sys.platform.lower().startswith('darwin'):
ctx.scenario.skip("Static runtime linking is not supported on OS X")
return
if sys.platform.startswith('win'):
lines = subprocess.check_output(["dumpbin.exe", "/DEPENDENTS", exe]).decode('utf8').split('\r\n')
for line in lines:
if 'msvcrt' in line.lower():
assert False, 'Found MSVCRT: %s' % line
else:
out = subprocess.check_output(["file", exe]).decode('utf8')
assert 'statically linked' in out, "Not a static executable: %s" % out
| Fix OS X test skip. | tests.features: Fix OS X test skip.
| Python | bsd-3-clause | hotgloupi/configure,hotgloupi/configure,hotgloupi/configure,hotgloupi/configure,hotgloupi/configure | ---
+++
@@ -33,7 +33,8 @@
@then('{exe} is a static executable')
def step_impl(ctx, exe):
if sys.platform.lower().startswith('darwin'):
- context.scenario.skip("Static runtime linking is not supported on OS X")
+ ctx.scenario.skip("Static runtime linking is not supported on OS X")
+ return
if sys.platform.startswith('win'):
lines = subprocess.check_output(["dumpbin.exe", "/DEPENDENTS", exe]).decode('utf8').split('\r\n') |
1dfe45d9ce6c81e5ae2396f97cc979192251c906 | selectable/apps.py | selectable/apps.py | try:
from django.apps import AppConfig
except ImportError:
AppConfig = object
class SelectableConfig(AppConfig):
"""App configuration for django-selectable."""
name = 'selectable'
def ready(self):
self.module.registry.autodiscover()
| try:
from django.apps import AppConfig
except ImportError:
AppConfig = object
class SelectableConfig(AppConfig):
"""App configuration for django-selectable."""
name = 'selectable'
def ready(self):
from . import registry
registry.autodiscover()
| Update auto-registration to work while running the tests. | Update auto-registration to work while running the tests.
| Python | bsd-2-clause | affan2/django-selectable,affan2/django-selectable,mlavin/django-selectable,affan2/django-selectable,mlavin/django-selectable,mlavin/django-selectable | ---
+++
@@ -10,4 +10,5 @@
name = 'selectable'
def ready(self):
- self.module.registry.autodiscover()
+ from . import registry
+ registry.autodiscover() |
96ace17d9cd800a5649ad32a8cb496a55d73ca9f | wapps/templatetags/wagtail.py | wapps/templatetags/wagtail.py | import jinja2
from django.conf import settings
from django_jinja import library
from jinja2.ext import Extension
from wagtail.wagtailcore.models import Page
from wagtail.contrib.wagtailroutablepage.templatetags.wagtailroutablepage_tags import (
routablepageurl as dj_routablepageurl
)
from wapps.utils import get_image_url
@library.global_function
def menu():
return Page.objects.live().in_menu().filter(depth__lte=3)
@library.global_function
@jinja2.contextfunction
def is_site_root(context, page):
if 'request' not in context or not context['request'].site:
return False
site = context['request'].site
return site.root_page.pk == page.pk
@library.global_function
def image_url(image, specs):
return get_image_url(image, specs)
@library.global_function
@jinja2.contextfunction
def routablepageurl(context, page, name, *args, **kwargs):
return dj_routablepageurl(context, page, name, *args, **kwargs)
@library.extension
class WagtailSettings(Extension):
def __init__(self, environment):
super(WagtailSettings, self).__init__(environment)
environment.globals['WAGTAIL_SITE_NAME'] = getattr(settings, 'WAGTAIL_SITE_NAME', None)
| import jinja2
from django.conf import settings
from django_jinja import library
from jinja2.ext import Extension
from wagtail.wagtailcore.models import Page
from wagtail.contrib.wagtailroutablepage.templatetags.wagtailroutablepage_tags import (
routablepageurl as dj_routablepageurl
)
from wapps.utils import get_image_url
@library.global_function
def menu():
return Page.objects.live().in_menu().filter(depth__lte=3)
@library.global_function
@jinja2.contextfunction
def is_site_root(context, page):
if 'request' not in context or not context['request'].site or not page:
return False
site = context['request'].site
return site.root_page.pk == page.pk
@library.global_function
def image_url(image, specs):
return get_image_url(image, specs)
@library.global_function
@jinja2.contextfunction
def routablepageurl(context, page, name, *args, **kwargs):
return dj_routablepageurl(context, page, name, *args, **kwargs)
@library.extension
class WagtailSettings(Extension):
def __init__(self, environment):
super(WagtailSettings, self).__init__(environment)
environment.globals['WAGTAIL_SITE_NAME'] = getattr(settings, 'WAGTAIL_SITE_NAME', None)
| Fix is_site_root when no page | Fix is_site_root when no page
| Python | mit | apihackers/wapps,apihackers/wapps,apihackers/wapps,apihackers/wapps | ---
+++
@@ -19,7 +19,7 @@
@library.global_function
@jinja2.contextfunction
def is_site_root(context, page):
- if 'request' not in context or not context['request'].site:
+ if 'request' not in context or not context['request'].site or not page:
return False
site = context['request'].site
return site.root_page.pk == page.pk |
020ffbe8436da2f7ee654fa6a12d50f9915db17f | examples/collection/views.py | examples/collection/views.py | from cruditor.contrib.collection import CollectionViewMixin
from cruditor.views import CruditorAddView, CruditorChangeView, CruditorDeleteView, CruditorListView
from django.urls import reverse, reverse_lazy
from examples.mixins import ExamplesMixin
from store.models import Person
from .filters import PersonFilter
from .forms import PersonForm
from .tables import PersonTable
class PersonViewMixin(ExamplesMixin, CollectionViewMixin):
model = Person
collection_list_title = 'Persons'
collection_list_urlname = 'collection:list'
collection_detail_urlname = 'collection:change'
class PersonListView(PersonViewMixin, CruditorListView):
title = 'Persons'
table_class = PersonTable
def get_titlebuttons(self):
return [{'url': reverse('collection:add'), 'label': 'Add person'}]
class PersonFilterView(PersonListView):
filter_class = PersonFilter
class PersonAddView(PersonViewMixin, CruditorAddView):
success_url = reverse_lazy('collection:lits')
form_class = PersonForm
class PersonChangeView(PersonViewMixin, CruditorChangeView):
form_class = PersonForm
def get_delete_url(self):
return reverse('collection:delete', args=(self.object.pk,))
class PersonDeleteView(PersonViewMixin, CruditorDeleteView):
pass
| from cruditor.contrib.collection import CollectionViewMixin
from cruditor.views import CruditorAddView, CruditorChangeView, CruditorDeleteView, CruditorListView
from django.urls import reverse, reverse_lazy
from examples.mixins import ExamplesMixin
from store.models import Person
from .filters import PersonFilter
from .forms import PersonForm
from .tables import PersonTable
class PersonViewMixin(ExamplesMixin, CollectionViewMixin):
model = Person
collection_list_title = 'Persons'
collection_list_urlname = 'collection:list'
collection_detail_urlname = 'collection:change'
class PersonListView(PersonViewMixin, CruditorListView):
title = 'Persons'
def get_titlebuttons(self):
return [{'url': reverse('collection:add'), 'label': 'Add person'}]
class PersonFilterView(PersonListView):
filter_class = PersonFilter
table_class = PersonTable
class PersonAddView(PersonViewMixin, CruditorAddView):
success_url = reverse_lazy('collection:lits')
form_class = PersonForm
class PersonChangeView(PersonViewMixin, CruditorChangeView):
form_class = PersonForm
def get_delete_url(self):
return reverse('collection:delete', args=(self.object.pk,))
class PersonDeleteView(PersonViewMixin, CruditorDeleteView):
pass
| Make use of auto generated table classes. | Make use of auto generated table classes.
| Python | mit | moccu/django-cruditor,moccu/django-cruditor,moccu/django-cruditor | ---
+++
@@ -19,7 +19,6 @@
class PersonListView(PersonViewMixin, CruditorListView):
title = 'Persons'
- table_class = PersonTable
def get_titlebuttons(self):
return [{'url': reverse('collection:add'), 'label': 'Add person'}]
@@ -27,6 +26,7 @@
class PersonFilterView(PersonListView):
filter_class = PersonFilter
+ table_class = PersonTable
class PersonAddView(PersonViewMixin, CruditorAddView): |
4ce7f8ce338c84b44e7ad16475ff68bc0fad970e | dddp/accounts/tests.py | dddp/accounts/tests.py | """Django DDP Accounts test suite."""
from __future__ import unicode_literals
import sys
from dddp import tests
class AccountsTestCase(tests.DDPServerTestCase):
# gevent-websocket doesn't work with Python 3 yet
@tests.expected_failure_if(sys.version_info.major == 3)
def test_login_no_accounts(self):
sockjs = self.server.sockjs('/sockjs/1/a/websocket')
resp = sockjs.websocket.recv()
self.assertEqual(resp, 'o')
msgs = sockjs.recv()
self.assertEqual(
msgs, [
{'server_id': '0'},
],
)
sockjs.connect('1', 'pre2', 'pre1')
msgs = sockjs.recv()
self.assertEqual(
msgs, [
{'msg': 'connected', 'session': msgs[0]['session']},
],
)
id_ = sockjs.call(
'login', {'user': 'invalid@example.com', 'password': 'foo'},
)
msgs = sockjs.recv()
self.assertEqual(
msgs, [
{
'msg': 'result', 'id': id_,
'error': {
'error': 403, 'reason': 'Authentication failed.',
},
},
],
)
sockjs.close()
| """Django DDP Accounts test suite."""
from __future__ import unicode_literals
import sys
from dddp import tests
# gevent-websocket doesn't work with Python 3 yet
@tests.expected_failure_if(sys.version_info.major == 3)
class AccountsTestCase(tests.DDPServerTestCase):
def test_login_no_accounts(self):
sockjs = self.server.sockjs('/sockjs/1/a/websocket')
resp = sockjs.websocket.recv()
self.assertEqual(resp, 'o')
msgs = sockjs.recv()
self.assertEqual(
msgs, [
{'server_id': '0'},
],
)
sockjs.connect('1', 'pre2', 'pre1')
msgs = sockjs.recv()
self.assertEqual(
msgs, [
{'msg': 'connected', 'session': msgs[0]['session']},
],
)
id_ = sockjs.call(
'login', {'user': 'invalid@example.com', 'password': 'foo'},
)
msgs = sockjs.recv()
self.assertEqual(
msgs, [
{
'msg': 'result', 'id': id_,
'error': {
'error': 403, 'reason': 'Authentication failed.',
},
},
],
)
sockjs.close()
| Move expected test failure to TestCase class. | Move expected test failure to TestCase class.
| Python | mit | commoncode/django-ddp,django-ddp/django-ddp,commoncode/django-ddp,django-ddp/django-ddp,django-ddp/django-ddp,commoncode/django-ddp,django-ddp/django-ddp,commoncode/django-ddp | ---
+++
@@ -5,10 +5,10 @@
from dddp import tests
+# gevent-websocket doesn't work with Python 3 yet
+@tests.expected_failure_if(sys.version_info.major == 3)
class AccountsTestCase(tests.DDPServerTestCase):
- # gevent-websocket doesn't work with Python 3 yet
- @tests.expected_failure_if(sys.version_info.major == 3)
def test_login_no_accounts(self):
sockjs = self.server.sockjs('/sockjs/1/a/websocket')
|
02f7a546cda7b8b3ce31616a74f3aa3518632885 | djangocms_spa_vue_js/templatetags/router_tags.py | djangocms_spa_vue_js/templatetags/router_tags.py | import json
from django import template
from django.utils.safestring import mark_safe
from ..menu_helpers import get_vue_js_router
register = template.Library()
@register.simple_tag(takes_context=True)
def vue_js_router(context):
if context.has_key('vue_js_router'):
router = context['vue_js_router']
else:
router = get_vue_js_router(context=context)
router_json = json.dumps(router)
escaped_router_json = router_json.replace("'", "'") # Escape apostrophes to prevent JS errors.
return mark_safe(escaped_router_json)
| import json
from django import template
from django.utils.safestring import mark_safe
from ..menu_helpers import get_vue_js_router
register = template.Library()
@register.simple_tag(takes_context=True)
def vue_js_router(context):
if 'vue_js_router' in context:
router = context['vue_js_router']
else:
router = get_vue_js_router(context=context)
router_json = json.dumps(router)
escaped_router_json = router_json.replace("'", "'") # Escape apostrophes to prevent JS errors.
return mark_safe(escaped_router_json)
| Use `in` rather than `has_key` | Use `in` rather than `has_key`
| Python | mit | dreipol/djangocms-spa-vue-js | ---
+++
@@ -10,7 +10,7 @@
@register.simple_tag(takes_context=True)
def vue_js_router(context):
- if context.has_key('vue_js_router'):
+ if 'vue_js_router' in context:
router = context['vue_js_router']
else:
router = get_vue_js_router(context=context) |
15bbe3aaaa017513ac652bf246b906139a71be00 | doc/tutorials/examples/general/client/headers.py | doc/tutorials/examples/general/client/headers.py | import base64
from pyamf.remoting.client import RemotingService
gw = RemotingService('http://demo.pyamf.org/gateway/recordset')
gw.addHTTPHeader('Set-Cookie', 'sessionid=QT3cUmACNeKQo5oPeM0')
gw.removeHTTPHeader('Set-Cookie')
username = 'admin'
password = 'admin'
auth = base64.encodestring('%s:%s' % (username, password))[:-1]
gw.addHTTPHeader("Authorization", "Basic %s" % auth) | from pyamf.remoting.client import RemotingService
gw = RemotingService('http://demo.pyamf.org/gateway/recordset')
gw.addHTTPHeader('Set-Cookie', 'sessionid=QT3cUmACNeKQo5oPeM0')
gw.removeHTTPHeader('Set-Cookie')
username = 'admin'
password = 'admin'
auth = ('%s:%s' % (username, password)).encode('base64')[:-1]
gw.addHTTPHeader("Authorization", "Basic %s" % auth)
service = gw.getService('service')
print service.getLanguages()
| Apply client authorization fix from wiki | Apply client authorization fix from wiki
git-svn-id: f3978d5834b2aa37aa734927aace4f0b92cf88c5@2985 2dde4cc4-cf3c-0410-b1a3-a9b8ff274da5
| Python | mit | cardmagic/PyAMF,cardmagic/PyAMF,cardmagic/PyAMF | ---
+++
@@ -1,5 +1,3 @@
-import base64
-
from pyamf.remoting.client import RemotingService
gw = RemotingService('http://demo.pyamf.org/gateway/recordset')
@@ -9,6 +7,9 @@
username = 'admin'
password = 'admin'
-auth = base64.encodestring('%s:%s' % (username, password))[:-1]
+auth = ('%s:%s' % (username, password)).encode('base64')[:-1]
gw.addHTTPHeader("Authorization", "Basic %s" % auth)
+
+service = gw.getService('service')
+print service.getLanguages() |
a3c3a6ed4d01f1857fc4728b10505e330af9e6ae | code/helper/easierlife.py | code/helper/easierlife.py | #! /usr/bin/env python3
""" Helper functions to make our life easier.
Originally obtained from the 'pharm' repository, but modified.
"""
import fileinput
import json
import os.path
from dstruct import Sentence
## BASE_DIR denotes the application directory
BASE_DIR, throwaway = os.path.split(os.path.realpath(__file__))
BASE_DIR = os.path.realpath(BASE_DIR + "/../..")
## Return the start and end indexes of all subsets of words in the sentence
## sent, with size at most max_phrase_length
def get_all_phrases_in_sentence(sent, max_phrase_length):
for start in range(len(sent.words)):
for end in reversed(range(start + 1, min(len(sent.words), start + 1 + max_phrase_length))):
yield (start, end)
## Return Sentence objects from input lines
def get_input_sentences(input_files=[]):
for line in fileinput.input(input_files):
sent_dict = json.loads(line)
yield Sentence(sent_dict["doc_id"], sent_dict["sent_id"],
sent_dict["wordidxs"], sent_dict["words"],
sent_dict["poses"], sent_dict["ners"], sent_dict["lemmas"],
sent_dict["dep_paths"], sent_dict["dep_parents"],
sent_dict["bounding_boxes"])
| #! /usr/bin/env python3
""" Helper functions to make our life easier.
Originally obtained from the 'pharm' repository, but modified.
"""
import fileinput
import json
import os.path
import sys
from dstruct.Sentence import Sentence
## BASE_DIR denotes the application directory
BASE_DIR, throwaway = os.path.split(os.path.realpath(__file__))
BASE_DIR = os.path.realpath(BASE_DIR + "/../..")
## Return the start and end indexes of all subsets of words in the sentence
## sent, with size at most max_phrase_length
def get_all_phrases_in_sentence(sent, max_phrase_length):
for start in range(len(sent.words)):
for end in reversed(range(start + 1, min(len(sent.words), start + 1 + max_phrase_length))):
yield (start, end)
## Return Sentence objects from input lines
def get_input_sentences(input_files=sys.argv[1:]):
with fileinput.input(files=input_files) as f:
for line in f:
sent_dict = json.loads(line)
yield Sentence(sent_dict["doc_id"], sent_dict["sent_id"],
sent_dict["wordidxs"], sent_dict["words"],
sent_dict["poses"], sent_dict["ners"], sent_dict["lemmas"],
sent_dict["dep_paths"], sent_dict["dep_parents"],
sent_dict["bounding_boxes"])
| Fix import, use fileinput.iput as context, and fix its argument | Fix import, use fileinput.iput as context, and fix its argument
| Python | apache-2.0 | amwenger/dd-genomics,rionda/dd-genomics,HazyResearch/dd-genomics,amwenger/dd-genomics,HazyResearch/dd-genomics,HazyResearch/dd-genomics,HazyResearch/dd-genomics,rionda/dd-genomics,amwenger/dd-genomics,HazyResearch/dd-genomics | ---
+++
@@ -7,8 +7,9 @@
import fileinput
import json
import os.path
+import sys
-from dstruct import Sentence
+from dstruct.Sentence import Sentence
## BASE_DIR denotes the application directory
BASE_DIR, throwaway = os.path.split(os.path.realpath(__file__))
@@ -23,12 +24,13 @@
yield (start, end)
## Return Sentence objects from input lines
-def get_input_sentences(input_files=[]):
- for line in fileinput.input(input_files):
- sent_dict = json.loads(line)
- yield Sentence(sent_dict["doc_id"], sent_dict["sent_id"],
- sent_dict["wordidxs"], sent_dict["words"],
- sent_dict["poses"], sent_dict["ners"], sent_dict["lemmas"],
- sent_dict["dep_paths"], sent_dict["dep_parents"],
- sent_dict["bounding_boxes"])
+def get_input_sentences(input_files=sys.argv[1:]):
+ with fileinput.input(files=input_files) as f:
+ for line in f:
+ sent_dict = json.loads(line)
+ yield Sentence(sent_dict["doc_id"], sent_dict["sent_id"],
+ sent_dict["wordidxs"], sent_dict["words"],
+ sent_dict["poses"], sent_dict["ners"], sent_dict["lemmas"],
+ sent_dict["dep_paths"], sent_dict["dep_parents"],
+ sent_dict["bounding_boxes"])
|
91aa7ed06d168700692a33fd3c51add585d60ac0 | backend/uclapi/roombookings/migrations/0007_auto_20170327_1323.py | backend/uclapi/roombookings/migrations/0007_auto_20170327_1323.py | # -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-03-27 13:23
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('roombookings', '0006_bookinga_bookingb_lock'),
]
operations = [
migrations.AddField(
model_name='bookinga',
name='id',
field=models.AutoField(primary_key=True, serialize=False),
preserve_default=False,
),
migrations.AddField(
model_name='bookingb',
name='id',
field=models.AutoField(primary_key=True, serialize=False),
preserve_default=False,
),
migrations.AlterField(
model_name='bookinga',
name='slotid',
field=models.BigIntegerField(),
),
migrations.AlterField(
model_name='bookingb',
name='slotid',
field=models.BigIntegerField(),
),
]
| # -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-03-27 13:23
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('roombookings', '0006_bookinga_bookingb_lock'),
]
operations = [
migrations.AlterField(
model_name='bookinga',
name='slotid',
field=models.BigIntegerField(null=True, primary_key=False),
),
migrations.AlterField(
model_name='bookingb',
name='slotid',
field=models.BigIntegerField(null=True, primary_key=False),
),
migrations.AddField(
model_name='bookinga',
name='id',
field=models.AutoField(primary_key=True, serialize=False),
preserve_default=False,
),
migrations.AddField(
model_name='bookingb',
name='id',
field=models.AutoField(primary_key=True, serialize=False),
preserve_default=False,
),
]
| Fix up migration to have only one PK | Fix up migration to have only one PK
| Python | mit | uclapi/uclapi,uclapi/uclapi,uclapi/uclapi,uclapi/uclapi | ---
+++
@@ -12,6 +12,16 @@
]
operations = [
+ migrations.AlterField(
+ model_name='bookinga',
+ name='slotid',
+ field=models.BigIntegerField(null=True, primary_key=False),
+ ),
+ migrations.AlterField(
+ model_name='bookingb',
+ name='slotid',
+ field=models.BigIntegerField(null=True, primary_key=False),
+ ),
migrations.AddField(
model_name='bookinga',
name='id',
@@ -24,14 +34,4 @@
field=models.AutoField(primary_key=True, serialize=False),
preserve_default=False,
),
- migrations.AlterField(
- model_name='bookinga',
- name='slotid',
- field=models.BigIntegerField(),
- ),
- migrations.AlterField(
- model_name='bookingb',
- name='slotid',
- field=models.BigIntegerField(),
- ),
] |
61e30e91ffc87a7a8f575d32fba43e61a65b477a | bot/storage/data_source/data_sources/sqlite/sqlite.py | bot/storage/data_source/data_sources/sqlite/sqlite.py | from sqlite_framework.log.logger import SqliteLogger
from sqlite_framework.session.session import SqliteSession
from bot.storage.data_source.data_source import StorageDataSource
class SqliteStorageDataSource(StorageDataSource):
def __init__(self, database_filename: str, debug: bool, logger: SqliteLogger):
super().__init__()
self.session = SqliteSession(database_filename, debug)
self.logger = logger
def init(self):
self.session.init()
def context_manager(self):
return self.session.context_manager()
| from sqlite_framework.log.logger import SqliteLogger
from sqlite_framework.session.session import SqliteSession
from bot.storage.data_source.data_source import StorageDataSource
class SqliteStorageDataSource(StorageDataSource):
def __init__(self, session: SqliteSession, logger: SqliteLogger):
super().__init__()
self.session = session
self.logger = logger
def init(self):
self.session.init()
def context_manager(self):
return self.session.context_manager()
| Update SqliteStorageDataSource to receive the SqliteSession already built, so that clients can have more control over its construction | Update SqliteStorageDataSource to receive the SqliteSession already built, so that clients can have more control over its construction
| Python | agpl-3.0 | alvarogzp/telegram-bot,alvarogzp/telegram-bot | ---
+++
@@ -5,9 +5,9 @@
class SqliteStorageDataSource(StorageDataSource):
- def __init__(self, database_filename: str, debug: bool, logger: SqliteLogger):
+ def __init__(self, session: SqliteSession, logger: SqliteLogger):
super().__init__()
- self.session = SqliteSession(database_filename, debug)
+ self.session = session
self.logger = logger
def init(self): |
9207009ae26324650f904b010a065d98c2f41300 | server/server/debug.py | server/server/debug.py | from django.http import HttpResponse
import json
class NonHtmlDebugToolbarMiddleware(object):
"""
The Django Debug Toolbar usually only works for views that return HTML.
This middleware wraps any non-HTML response in HTML if the request has a
'debug' query parameter (e.g. http://localhost/foo?debug) Special handling
for json (pretty printing) and binary data (only show data length).
Based on http://stackoverflow.com/a/19249559/10817
"""
@staticmethod
def process_response(request, response):
if request.GET.get('debug') == '':
if response['Content-Type'] == 'application/octet-stream':
new_content = '<html><body>Binary Data, ' \
'Length: {}</body></html>'.format(len(response.content))
response = HttpResponse(new_content)
elif response['Content-Type'] != 'text/html':
content = response.content
try:
json_ = json.loads(content)
content = json.dumps(json_, sort_keys=True, indent=2)
except ValueError:
pass
response = HttpResponse('<html><body><pre>{}'
'</pre></body></html>'.format(content))
return response
# Middleware classes for debug toolbar.
middleware = ('debug_toolbar.middleware.DebugToolbarMiddleware',
'server.debug.NonHtmlDebugToolbarMiddleware')
| from django.http import HttpResponse
import json
class NonHtmlDebugToolbarMiddleware(object):
"""
The Django Debug Toolbar usually only works for views that return HTML.
This middleware wraps any non-HTML response in HTML if the request has a
'debug' query parameter (e.g. http://localhost/foo?debug) Special handling
for json (pretty printing) and binary data (only show data length).
Based on http://stackoverflow.com/a/19249559/10817
"""
@staticmethod
def process_response(request, response):
if request.GET.get('debug') == '':
if response['Content-Type'] == 'application/octet-stream':
new_content = '<html><body>Binary Data, ' \
'Length: {}</body></html>'.format(len(response.content))
response = HttpResponse(new_content)
elif not response['Content-Type'].startswith('text/html'):
content = response.content
try:
json_ = json.loads(content)
content = json.dumps(json_, sort_keys=True, indent=2)
except ValueError:
pass
response = HttpResponse('<html><body><pre>{}'
'</pre></body></html>'.format(content))
return response
# Middleware classes for debug toolbar.
middleware = ('debug_toolbar.middleware.DebugToolbarMiddleware',
'server.debug.NonHtmlDebugToolbarMiddleware')
| Drop wrap content starting with text/html | Drop wrap content starting with text/html
New versions of Django give HTML responses with the content type
'text/html; charset: utf-8'. We don't want to wrap that, so only check
for a start of text/html.
| Python | apache-2.0 | auvsi-suas/interop,auvsi-suas/interop,auvsi-suas/interop,justineaster/interop,justineaster/interop,justineaster/interop,auvsi-suas/interop,justineaster/interop,justineaster/interop | ---
+++
@@ -19,7 +19,7 @@
new_content = '<html><body>Binary Data, ' \
'Length: {}</body></html>'.format(len(response.content))
response = HttpResponse(new_content)
- elif response['Content-Type'] != 'text/html':
+ elif not response['Content-Type'].startswith('text/html'):
content = response.content
try:
json_ = json.loads(content) |
023e814e6661c11bfe58a4e3e4ce4167ae63cd7f | rdio_dl/cli.py | rdio_dl/cli.py | import click
import youtube_dl
from .config import storage_load
from .extractor import RdioIE
@click.command()
@click.option(u'-u', u'--user', help=u'A Rdio user')
@click.option(u'-p', u'--password', help=u'The password')
@click.argument(u'urls', required=True, nargs=-1)
def main(user, password, urls):
storage = storage_load()
with youtube_dl.YoutubeDL() as ydl:
ydl.add_info_extractor(RdioIE(storage, user, password))
ydl.download(urls)
| # -*- coding: utf-8 -*-
import click
import youtube_dl
from .config import storage_load
from .extractor import RdioIE
def add_info_extractor_above_generic(ydl, ie):
generic = ydl._ies.pop()
ydl.add_info_extractor(ie)
ydl.add_info_extractor(generic)
@click.command()
@click.option(u'-u', u'--user', help=u'A Rdio user')
@click.option(u'-p', u'--password', help=u'The password')
@click.argument(u'urls', required=True, nargs=-1)
def main(user, password, urls):
storage = storage_load()
with youtube_dl.YoutubeDL() as ydl:
add_info_extractor_above_generic(ydl, RdioIE(storage, user, password))
ydl.download(urls)
| Fix generic extractor being always selected | Fix generic extractor being always selected
Turns out our extractor was being inserted *after* the GenericIE.
Now we are inserting our RdioIE right above GenericIE.
| Python | mit | ravishi/rdio-dl | ---
+++
@@ -1,8 +1,16 @@
+# -*- coding: utf-8 -*-
import click
import youtube_dl
from .config import storage_load
from .extractor import RdioIE
+
+
+def add_info_extractor_above_generic(ydl, ie):
+ generic = ydl._ies.pop()
+ ydl.add_info_extractor(ie)
+ ydl.add_info_extractor(generic)
+
@click.command()
@click.option(u'-u', u'--user', help=u'A Rdio user')
@@ -11,5 +19,5 @@
def main(user, password, urls):
storage = storage_load()
with youtube_dl.YoutubeDL() as ydl:
- ydl.add_info_extractor(RdioIE(storage, user, password))
+ add_info_extractor_above_generic(ydl, RdioIE(storage, user, password))
ydl.download(urls) |
fb3f1023faedda37e5ca16b87d2b9ddc38a2196c | deployer/tasks/util.py | deployer/tasks/util.py | from celery.result import ResultBase, AsyncResult, GroupResult
import deployer
from deployer.tasks.exceptions import TaskExecutionException
__author__ = 'sukrit'
def check_or_raise_task_exception(result):
if isinstance(result, AsyncResult) and result.failed():
if isinstance(result.result, TaskExecutionException):
raise result.result
else:
raise TaskExecutionException(result.result, result.traceback)
def _check_error(result):
if not result or not isinstance(result, AsyncResult):
return
check_or_raise_task_exception(result)
_check_error(result.parent)
def simple_result(result):
# DO not remove line below
# Explanation: https://github.com/celery/celery/issues/2315
deployer.celery.app.set_current()
if isinstance(result, GroupResult):
return simple_result(result.results)
elif hasattr(result, '__iter__') and not isinstance(result, dict):
return [simple_result(each_result)
for each_result in result]
elif isinstance(result, ResultBase):
_check_error(result)
if result.ready():
check_or_raise_task_exception(result)
return simple_result(result.result)
else:
raise TaskNotReadyException()
return result
class TaskNotReadyException(Exception):
pass
| import socket
from celery.result import ResultBase, AsyncResult, GroupResult
import deployer
from deployer.tasks.exceptions import TaskExecutionException
from deployer.util import retry
__author__ = 'sukrit'
def check_or_raise_task_exception(result):
if isinstance(result, AsyncResult) and result.failed():
if isinstance(result.result, TaskExecutionException):
raise result.result
else:
raise TaskExecutionException(result.result, result.traceback)
def _check_error(result):
if not result or not isinstance(result, AsyncResult):
return
check_or_raise_task_exception(result)
_check_error(result.parent)
@retry(10, delay=5, backoff=1, except_on=(IOError, socket.error))
def simple_result(result):
# DO not remove line below
# Explanation: https://github.com/celery/celery/issues/2315
deployer.celery.app.set_current()
if isinstance(result, GroupResult):
return simple_result(result.results)
elif hasattr(result, '__iter__') and not isinstance(result, dict):
return [simple_result(each_result)
for each_result in result]
elif isinstance(result, ResultBase):
_check_error(result)
if result.ready():
check_or_raise_task_exception(result)
return simple_result(result.result)
else:
raise TaskNotReadyException()
return result
class TaskNotReadyException(Exception):
pass
| Add retry for socket error | Add retry for socket error
| Python | mit | totem/cluster-deployer,totem/cluster-deployer,totem/cluster-deployer | ---
+++
@@ -1,6 +1,8 @@
+import socket
from celery.result import ResultBase, AsyncResult, GroupResult
import deployer
from deployer.tasks.exceptions import TaskExecutionException
+from deployer.util import retry
__author__ = 'sukrit'
@@ -20,6 +22,7 @@
_check_error(result.parent)
+@retry(10, delay=5, backoff=1, except_on=(IOError, socket.error))
def simple_result(result):
# DO not remove line below
# Explanation: https://github.com/celery/celery/issues/2315 |
d0ea27a56013af944ef9e7fef9ebe1c8f44e3aab | community_blog/__openerp__.py | community_blog/__openerp__.py | # -*- coding: utf-8 -*-
##############################################################################
#
# Author: Yannick Buron
# Copyright 2013 Yannick Buron
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{'name': 'OpenERP CommunityTools - Website Blog',
'version': '1.0',
'category': 'Community',
'depends': ['community',
'website_blog',
],
'author': 'Yannick Buron',
'license': 'AGPL-3',
'website': 'https://launchpad.net/openerp-communitytools',
'description': """
OpenERP for Communities - Blog
=================
""",
'data': ['security/community_blog_security.xml'],
'demo': [],
'installable': True,
'application': True,
}
| # -*- coding: utf-8 -*-
##############################################################################
#
# Author: Yannick Buron
# Copyright 2013 Yannick Buron
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{'name': 'Odoo for Communities - Website Blog',
'version': '1.0',
'category': 'Community',
'depends': ['community',
'website_blog',
],
'author': 'Yannick Buron',
'license': 'AGPL-3',
'website': 'https://launchpad.net/openerp-communitytools',
'description': """
Odoo for Communities - Blog
=================
""",
'data': ['security/community_blog_security.xml'],
'demo': [],
'installable': True,
'application': True,
}
| Change name of module community_blog | Change name of module community_blog
Changement de nom | Python | agpl-3.0 | YannickB/vertical-community,Valeureux/wezer-exchange,Valeureux/wezer-exchange,codoo/vertical-community,open-synergy/vertical-community,Valeureux/wezer-exchange,Valeureux/wezer-exchange | ---
+++
@@ -19,7 +19,7 @@
#
##############################################################################
-{'name': 'OpenERP CommunityTools - Website Blog',
+{'name': 'Odoo for Communities - Website Blog',
'version': '1.0',
'category': 'Community',
'depends': ['community',
@@ -29,7 +29,7 @@
'license': 'AGPL-3',
'website': 'https://launchpad.net/openerp-communitytools',
'description': """
-OpenERP for Communities - Blog
+Odoo for Communities - Blog
=================
""", |
b7bad7823384ec5261271e3f54ed272775a7562f | sparqllib/formatter.py | sparqllib/formatter.py | import abc
import re
class Formatter:
@abc.abstractmethod
def format(self, query):
''' Should return a human-readable version of the query string
'''
pass
class BasicFormatter(Formatter):
''' Provides a basic default formatting for query strings
This formatter provides only indentation levels and newlines at
open braces.
'''
def __init__(self):
self.indent_str = " "
def format(self, query):
#TODO handle braces inside literals correctly
formatted_query = ""
indent_level = 0
for letter in query:
# newline and reindent on open brace
if letter == "{":
indent_level += 1
formatted_query += "{\n" + self.indent_str*indent_level
# newline and reindent on close brace
elif letter == "}":
indent_level -= 1
formatted_query += "\n" + self.indent_str*indent_level + "}"
# reindent after any newline
elif len(formatted_query) and formatted_query[-1] == '\n':
formatted_query += self.indent_str*indent_level + letter
# otherwise just add the letter
else:
formatted_query += letter
# remove duplicate newlines
formatted_query = re.sub(r'(\n+)', '\n', formatted_query, flags=re.MULTILINE)
return formatted_query
| import abc
import re
class Formatter:
@abc.abstractmethod
def format(self, query):
''' Should return a human-readable version of the query string
'''
pass
class BasicFormatter(Formatter):
''' Provides a basic default formatting for query strings
This formatter provides only indentation levels and newlines at
open braces.
'''
def __init__(self):
self.indent_str = " "
def format(self, query):
if not isinstance(query, str):
query = query.serialize()
#TODO handle braces inside literals correctly
formatted_query = ""
indent_level = 0
for letter in query:
# newline and reindent on open brace
if letter == "{":
indent_level += 1
formatted_query += "{\n" + self.indent_str*indent_level
# newline and reindent on close brace
elif letter == "}":
indent_level -= 1
formatted_query += "\n" + self.indent_str*indent_level + "}"
# reindent after any newline
elif len(formatted_query) and formatted_query[-1] == '\n':
formatted_query += self.indent_str*indent_level + letter
# otherwise just add the letter
else:
formatted_query += letter
# trim whitespace
formatted_query = re.sub(r'(.)\s+\n', '\g<1>\n', formatted_query, flags=re.MULTILINE)
# remove duplicate newlines
formatted_query = re.sub(r'(\n+)', '\n', formatted_query, flags=re.MULTILINE)
return formatted_query
| Trim trailing whitespace with BasicFormatter | Trim trailing whitespace with BasicFormatter
| Python | mit | ALSchwalm/sparqllib | ---
+++
@@ -18,6 +18,9 @@
self.indent_str = " "
def format(self, query):
+ if not isinstance(query, str):
+ query = query.serialize()
+
#TODO handle braces inside literals correctly
formatted_query = ""
indent_level = 0
@@ -42,6 +45,9 @@
else:
formatted_query += letter
+ # trim whitespace
+ formatted_query = re.sub(r'(.)\s+\n', '\g<1>\n', formatted_query, flags=re.MULTILINE)
+
# remove duplicate newlines
formatted_query = re.sub(r'(\n+)', '\n', formatted_query, flags=re.MULTILINE)
return formatted_query |
a7f467589c49020977328e45eed4eff5b607231f | checker/tests/downstream/test_check_files_menu_agreements.py | checker/tests/downstream/test_check_files_menu_agreements.py | import magic
import os.path as op
from checker.base import BakeryTestCase as TestCase, tags
from checker.metadata import Metadata
class CheckFontsMenuAgreements(TestCase):
path = '.'
name = __name__
targets = ['metadata']
tool = 'lint'
def menufile(self, font_metadata):
return '%s.menu' % font_metadata.post_script_name
@tags('required')
def test_menu_file_agreement(self):
""" Menu file have font-name-style.menu format """
contents = self.read_metadata_contents()
fm = Metadata.get_family_metadata(contents)
for font_metadata in fm.fonts:
menufile = self.menufile(font_metadata)
path = op.join(op.dirname(self.path), menufile)
if not op.exists(path):
self.fail('%s does not exist' % menufile)
if magic.from_file("%s.menu" % self.fname) != 'TrueType font data':
self.fail('%s is not actual TTF file' % menufile)
| import magic
import os.path as op
from checker.base import BakeryTestCase as TestCase, tags
from checker.metadata import Metadata
class CheckFontsMenuAgreements(TestCase):
path = '.'
name = __name__
targets = ['metadata']
tool = 'lint'
def read_metadata_contents(self):
return open(self.path).read()
def menufile(self, font_metadata):
return '%s.menu' % font_metadata.post_script_name
@tags('required')
def test_menu_file_agreement(self):
""" Menu file have font-name-style.menu format """
contents = self.read_metadata_contents()
fm = Metadata.get_family_metadata(contents)
for font_metadata in fm.fonts:
menufile = self.menufile(font_metadata)
path = op.join(op.dirname(self.path), menufile)
if not op.exists(path):
self.fail('%s does not exist' % menufile)
if magic.from_file("%s.menu" % self.fname) != 'TrueType font data':
self.fail('%s is not actual TTF file' % menufile)
| Fix check menu files agreement test | Fix check menu files agreement test
| Python | apache-2.0 | davelab6/fontbakery,googlefonts/fontbakery,moyogo/fontbakery,moyogo/fontbakery,graphicore/fontbakery,jessamynsmith/fontbakery,graphicore/fontbakery,googlefonts/fontbakery,graphicore/fontbakery,moyogo/fontbakery,googlefonts/fontbakery | ---
+++
@@ -11,6 +11,9 @@
name = __name__
targets = ['metadata']
tool = 'lint'
+
+ def read_metadata_contents(self):
+ return open(self.path).read()
def menufile(self, font_metadata):
return '%s.menu' % font_metadata.post_script_name |
4fc108a39476f92acf0d42b66466012cba868b1d | h2o-py/tests/testdir_algos/rf/pyunit_vi_toy_testRF.py | h2o-py/tests/testdir_algos/rf/pyunit_vi_toy_testRF.py | import sys
sys.path.insert(1, "../../../")
import h2o
def vi_toy_test(ip,port):
# Connect to h2o
h2o.init(ip,port)
toy_data = h2o.import_frame(path=h2o.locate("smalldata/gbm_test/toy_data_RF.csv"))
#toy_data.summary()
toy_data[6] = toy_data[6].asfactor()
toy_data.show()
rf = h2o.random_forest(x=toy_data[[0,1,2,3,4,5]], y=toy_data[6], ntrees=500, max_depth=20, nbins=100)
ranking = [rf._model_json['output']['variable_importances'].cell_values[v][0] for v in range(toy_data.ncol()-1)]
print(ranking)
assert tuple(ranking) == tuple(["V3","V2","V6","V1","V5","V4"]), "expected specific variable importance ranking"
if __name__ == "__main__":
h2o.run_test(sys.argv, vi_toy_test)
| import sys
sys.path.insert(1, "../../../")
import h2o
def vi_toy_test(ip,port):
# Connect to h2o
h2o.init(ip,port)
toy_data = h2o.import_frame(path=h2o.locate("smalldata/gbm_test/toy_data_RF.csv"))
#toy_data.summary()
toy_data[6] = toy_data[6].asfactor()
toy_data.show()
rf = h2o.random_forest(x=toy_data[[0,1,2,3,4,5]], y=toy_data[6], ntrees=500, max_depth=20, nbins=100, seed=0)
ranking = [rf._model_json['output']['variable_importances'].cell_values[v][0] for v in range(toy_data.ncol()-1)]
print(ranking)
assert tuple(ranking) == tuple(["V3","V2","V6","V1","V5","V4"]), "expected specific variable importance ranking"
if __name__ == "__main__":
h2o.run_test(sys.argv, vi_toy_test)
| Fix the seed for RF test. | Fix the seed for RF test.
| Python | apache-2.0 | ChristosChristofidis/h2o-3,YzPaul3/h2o-3,h2oai/h2o-3,bospetersen/h2o-3,spennihana/h2o-3,mathemage/h2o-3,weaver-viii/h2o-3,h2oai/h2o-dev,michalkurka/h2o-3,h2oai/h2o-3,mathemage/h2o-3,PawarPawan/h2o-v3,michalkurka/h2o-3,datachand/h2o-3,PawarPawan/h2o-v3,printedheart/h2o-3,kyoren/https-github.com-h2oai-h2o-3,mrgloom/h2o-3,h2oai/h2o-3,michalkurka/h2o-3,printedheart/h2o-3,h2oai/h2o-dev,YzPaul3/h2o-3,tarasane/h2o-3,pchmieli/h2o-3,YzPaul3/h2o-3,h2oai/h2o-3,tarasane/h2o-3,PawarPawan/h2o-v3,printedheart/h2o-3,jangorecki/h2o-3,mrgloom/h2o-3,h2oai/h2o-dev,kyoren/https-github.com-h2oai-h2o-3,nilbody/h2o-3,pchmieli/h2o-3,h2oai/h2o-3,PawarPawan/h2o-v3,kyoren/https-github.com-h2oai-h2o-3,mathemage/h2o-3,tarasane/h2o-3,ChristosChristofidis/h2o-3,michalkurka/h2o-3,spennihana/h2o-3,nilbody/h2o-3,nilbody/h2o-3,weaver-viii/h2o-3,bospetersen/h2o-3,pchmieli/h2o-3,junwucs/h2o-3,YzPaul3/h2o-3,junwucs/h2o-3,spennihana/h2o-3,spennihana/h2o-3,spennihana/h2o-3,YzPaul3/h2o-3,weaver-viii/h2o-3,kyoren/https-github.com-h2oai-h2o-3,mathemage/h2o-3,tarasane/h2o-3,pchmieli/h2o-3,nilbody/h2o-3,brightchen/h2o-3,tarasane/h2o-3,mrgloom/h2o-3,mrgloom/h2o-3,jangorecki/h2o-3,datachand/h2o-3,nilbody/h2o-3,madmax983/h2o-3,h2oai/h2o-3,h2oai/h2o-dev,ChristosChristofidis/h2o-3,ChristosChristofidis/h2o-3,h2oai/h2o-dev,bospetersen/h2o-3,ChristosChristofidis/h2o-3,brightchen/h2o-3,brightchen/h2o-3,madmax983/h2o-3,datachand/h2o-3,PawarPawan/h2o-v3,bospetersen/h2o-3,datachand/h2o-3,PawarPawan/h2o-v3,kyoren/https-github.com-h2oai-h2o-3,nilbody/h2o-3,michalkurka/h2o-3,mathemage/h2o-3,YzPaul3/h2o-3,brightchen/h2o-3,printedheart/h2o-3,bospetersen/h2o-3,weaver-viii/h2o-3,junwucs/h2o-3,jangorecki/h2o-3,printedheart/h2o-3,mathemage/h2o-3,pchmieli/h2o-3,jangorecki/h2o-3,nilbody/h2o-3,kyoren/https-github.com-h2oai-h2o-3,weaver-viii/h2o-3,madmax983/h2o-3,mrgloom/h2o-3,h2oai/h2o-dev,YzPaul3/h2o-3,brightchen/h2o-3,tarasane/h2o-3,jangorecki/h2o-3,pchmieli/h2o-3,h2oai/h2o-3,mathemage/h2o-3,printedheart/h2o-3,datachand/h2o-3,PawarPawan/h2o-v3,pchmieli/h2o-3,junwucs/h2o-3,h2oai/h2o-dev,printedheart/h2o-3,madmax983/h2o-3,mrgloom/h2o-3,jangorecki/h2o-3,datachand/h2o-3,weaver-viii/h2o-3,junwucs/h2o-3,datachand/h2o-3,jangorecki/h2o-3,spennihana/h2o-3,ChristosChristofidis/h2o-3,junwucs/h2o-3,ChristosChristofidis/h2o-3,bospetersen/h2o-3,h2oai/h2o-3,michalkurka/h2o-3,madmax983/h2o-3,junwucs/h2o-3,michalkurka/h2o-3,bospetersen/h2o-3,mrgloom/h2o-3,spennihana/h2o-3,brightchen/h2o-3,madmax983/h2o-3,madmax983/h2o-3,kyoren/https-github.com-h2oai-h2o-3,weaver-viii/h2o-3,tarasane/h2o-3,brightchen/h2o-3 | ---
+++
@@ -11,7 +11,7 @@
toy_data[6] = toy_data[6].asfactor()
toy_data.show()
- rf = h2o.random_forest(x=toy_data[[0,1,2,3,4,5]], y=toy_data[6], ntrees=500, max_depth=20, nbins=100)
+ rf = h2o.random_forest(x=toy_data[[0,1,2,3,4,5]], y=toy_data[6], ntrees=500, max_depth=20, nbins=100, seed=0)
ranking = [rf._model_json['output']['variable_importances'].cell_values[v][0] for v in range(toy_data.ncol()-1)]
print(ranking) |
ead5d7aa7a4a6fe4557c0e792ebc11e25359722f | rx/concurrency/scheduleditem.py | rx/concurrency/scheduleditem.py | from rx.disposables import SingleAssignmentDisposable
def default_sub_comparer(x, y):
return 0 if x == y else 1 if x > y else -1
class ScheduledItem(object):
def __init__(self, scheduler, state, action, duetime, comparer=None):
self.scheduler = scheduler
self.state = state
self.action = action
self.duetime = duetime
self.comparer = comparer or default_sub_comparer
self.disposable = SingleAssignmentDisposable()
def invoke(self):
self.disposable.disposable = self.invoke_core()
def compare_to(self, other):
return self.comparer(self.duetime, other.duetime)
def cancel(self):
"""Cancels the work item by disposing the resource returned by
invoke_core as soon as possible."""
self.disposable.dispose()
def is_cancelled(self):
return self.disposable.is_disposed
def invoke_core(self):
return self.action(self.scheduler, self.state)
def __lt__(self, other):
return self.compare_to(other) < 0
def __gt__(self, other):
return self.compare_to(other) > 0
def __eq__(self, other):
return self.compare_to(other) == 0
| from rx.core import Disposable
from rx.disposables import SingleAssignmentDisposable
def default_sub_comparer(x, y):
return 0 if x == y else 1 if x > y else -1
class ScheduledItem(object):
def __init__(self, scheduler, state, action, duetime, comparer=None):
self.scheduler = scheduler
self.state = state
self.action = action
self.duetime = duetime
self.comparer = comparer or default_sub_comparer
self.disposable = SingleAssignmentDisposable()
def invoke(self):
ret = self.action(self.scheduler, self.state)
if isinstance(ret, Disposable):
self.disposable.disposable = ret
def compare_to(self, other):
return self.comparer(self.duetime, other.duetime)
def cancel(self):
"""Cancels the work item by disposing the resource returned by
invoke_core as soon as possible."""
self.disposable.dispose()
def is_cancelled(self):
return self.disposable.is_disposed
def __lt__(self, other):
return self.compare_to(other) < 0
def __gt__(self, other):
return self.compare_to(other) > 0
def __eq__(self, other):
return self.compare_to(other) == 0
| Check if action returns disposable | Check if action returns disposable
| Python | mit | ReactiveX/RxPY,ReactiveX/RxPY | ---
+++
@@ -1,3 +1,4 @@
+from rx.core import Disposable
from rx.disposables import SingleAssignmentDisposable
@@ -15,7 +16,9 @@
self.disposable = SingleAssignmentDisposable()
def invoke(self):
- self.disposable.disposable = self.invoke_core()
+ ret = self.action(self.scheduler, self.state)
+ if isinstance(ret, Disposable):
+ self.disposable.disposable = ret
def compare_to(self, other):
return self.comparer(self.duetime, other.duetime)
@@ -29,9 +32,6 @@
def is_cancelled(self):
return self.disposable.is_disposed
- def invoke_core(self):
- return self.action(self.scheduler, self.state)
-
def __lt__(self, other):
return self.compare_to(other) < 0
|
840aef8fee59c9f1a9863177e060b05b09fcacd4 | tests/utils.py | tests/utils.py | # -*- coding: utf-8 -*-
def has_no_django():
try:
import django
return False
except ImportError:
return True
| # -*- coding: utf-8 -*-
def has_no_django():
try:
import django # noqa isort:skip
return False
except ImportError:
return True
| Add noqa to conditional import | Add noqa to conditional import
| Python | mit | python-thumbnails/python-thumbnails,relekang/python-thumbnails | ---
+++
@@ -3,7 +3,7 @@
def has_no_django():
try:
- import django
+ import django # noqa isort:skip
return False
except ImportError:
return True |
5ddde4a43ede87770543984e96eb8ccaf1d829b2 | lib/methods/drupalconsole.py | lib/methods/drupalconsole.py | from base import BaseMethod
from fabric.api import *
from lib.utils import SSHTunnel, RemoteSSHTunnel
from fabric.colors import green, red
from lib import configuration
import copy
class DrupalConsoleMethod(BaseMethod):
@staticmethod
def supports(methodName):
return methodName == 'drupalconsole'
def install(self, config):
with cd(config['tmpFolder']):
run('curl https://drupalconsole.com/installer -L -o drupal.phar')
run('mv drupal.phar /usr/local/bin/drupal')
run('chmod +x /usr/local/bin/drupal')
run('drupal init')
print green('Drupal Console installed successfully.')
def run_drupalconsole(self, config, command):
with cd(config['rootFolder']):
run('drupal %s' % command)
def drupalconsole(self, config, **kwargs):
if kwargs['command'] == 'install':
self.install(config)
return
self.run_drupalconsole(config, kwargs['command'])
| from base import BaseMethod
from fabric.api import *
from lib.utils import SSHTunnel, RemoteSSHTunnel
from fabric.colors import green, red
from lib import configuration
import copy
class DrupalConsoleMethod(BaseMethod):
@staticmethod
def supports(methodName):
return methodName == 'drupalconsole'
def install(self, config, **kwargs):
with cd(config['tmpFolder']):
run('curl https://drupalconsole.com/installer -L -o drupal.phar')
run('mv drupal.phar /usr/local/bin/drupal')
run('chmod +x /usr/local/bin/drupal')
run('drupal init')
print green('Drupal Console installed successfully.')
def run_drupalconsole(self, config, command):
with cd(config['rootFolder']):
run('drupal %s' % command)
def drupalconsole(self, config, **kwargs):
if kwargs['command'] == 'install':
self.install(config)
return
self.run_drupalconsole(config, kwargs['command'])
| Fix exception when running install-task | Fix exception when running install-task
| Python | mit | factorial-io/fabalicious,factorial-io/fabalicious | ---
+++
@@ -11,7 +11,7 @@
def supports(methodName):
return methodName == 'drupalconsole'
- def install(self, config):
+ def install(self, config, **kwargs):
with cd(config['tmpFolder']):
run('curl https://drupalconsole.com/installer -L -o drupal.phar')
run('mv drupal.phar /usr/local/bin/drupal') |
a667b3503b0434f01459bae2d29df800d95ba1c4 | gapipy/resources/tour/departure.py | gapipy/resources/tour/departure.py | from __future__ import unicode_literals
from ...models import Address, AddOn, DepartureRoom, PP2aPrice
from ..base import Product
from .tour_dossier import TourDossier
from .departure_component import DepartureComponent
class Departure(Product):
_resource_name = 'departures'
_is_listable = True
_is_parent_resource = True
_as_is_fields = [
'id', 'href', 'availability', 'flags', 'nearest_start_airport',
'nearest_finish_airport', 'product_line', 'sku', 'requirements',
]
_date_fields = ['start_date', 'finish_date']
_date_time_fields_utc = ['date_created', 'date_last_modified']
_date_time_fields_local = ['latest_arrival_time', 'earliest_departure_time']
_resource_fields = [('tour', 'Tour'), ('tour_dossier', TourDossier)]
_resource_collection_fields = [
('components', DepartureComponent),
]
_model_fields = [('start_address', Address), ('finish_address', Address)]
_model_collection_fields = [
('addons', AddOn),
('rooms', DepartureRoom),
('lowest_pp2a_prices', PP2aPrice),
]
_deprecated_fields = ['add_ons']
| from __future__ import unicode_literals
from ...models import Address, AddOn, DepartureRoom, PP2aPrice
from ..base import Product
from .tour_dossier import TourDossier
from .departure_component import DepartureComponent
class Departure(Product):
_resource_name = 'departures'
_is_listable = True
_is_parent_resource = True
_as_is_fields = [
'id', 'href', 'name', 'availability', 'flags', 'nearest_start_airport',
'nearest_finish_airport', 'product_line', 'sku', 'requirements',
]
_date_fields = ['start_date', 'finish_date']
_date_time_fields_utc = ['date_created', 'date_last_modified']
_date_time_fields_local = ['latest_arrival_time', 'earliest_departure_time']
_resource_fields = [('tour', 'Tour'), ('tour_dossier', TourDossier)]
_resource_collection_fields = [
('components', DepartureComponent),
]
_model_fields = [('start_address', Address), ('finish_address', Address)]
_model_collection_fields = [
('addons', AddOn),
('rooms', DepartureRoom),
('lowest_pp2a_prices', PP2aPrice),
]
_deprecated_fields = ['add_ons']
| Add name to Departure resource | Add name to Departure resource
| Python | mit | gadventures/gapipy | ---
+++
@@ -14,7 +14,7 @@
_is_parent_resource = True
_as_is_fields = [
- 'id', 'href', 'availability', 'flags', 'nearest_start_airport',
+ 'id', 'href', 'name', 'availability', 'flags', 'nearest_start_airport',
'nearest_finish_airport', 'product_line', 'sku', 'requirements',
]
_date_fields = ['start_date', 'finish_date'] |
ae1de4000a6e9f3fc70d14c6214038e83772a5f6 | Part2/main.py | Part2/main.py | import detectLang
import graph
# ====================================================================================================
# La detection est rapide car toute les perplexites sont stockées dans les fichiers binaires pp_EN etc
# Pour regénerer les fichiers :
# Executer detectLang.create_all_pp_and_save_to_disc()
# Les resultats des perplexités pour tous les fichiers seront serialisés dans les fichiers binaires.
# ces fichiers sont necessaire à la fontions detectLang.detect_language()
# Pour serialisé les dictionaires j'utilise la bibliotheque intégrer à python => pickle
# ====================================================================================================
test_file_number = 19
# detect_language(Numéro du fichier dans le repertoire de test, N du modele nGram)
# Print le code de la langue reconnue et la perplexité du modèle choisis.
detectLang.detect_language(test_file_number,1)
detectLang.detect_language(test_file_number,2)
detectLang.detect_language(test_file_number,3)
# ====================================================================================================
# Nécessite matplotlib
# Installé matplotlib avec => pip install matplotlib
# ====================================================================================================
# graphFile(Numéro du fichier dans le repertoire de test)
graph.graphFile(test_file_number)
| import detectLang
import graph
# ====================================================================================================
# La detection est rapide car toute les perplexites sont stockées dans les fichiers binaires pp_EN etc
# Pour regénerer les fichiers :
# Executer detectLang.create_all_pp_and_save_to_disc(), décommenter la ligne suivante
# detectLang.create_all_pp_and_save_to_disc()
# Les resultats des perplexités pour tous les fichiers seront serialisés dans les fichiers binaires.
# ces fichiers sont necessaire à la fontions detectLang.detect_language()
# Pour serialisé les dictionaires j'utilise la bibliotheque intégrer à python => pickle
# ====================================================================================================
test_file_number = 13
# detect_language(Numéro du fichier dans le repertoire de test, N du modele nGram)
# Print le code de la langue reconnue et la perplexité du modèle choisis.
# detectLang.detect_language(test_file_number,1)
# detectLang.detect_language(test_file_number,2)
# detectLang.detect_language(test_file_number,3)
# ====================================================================================================
# Nécessite matplotlib
# Installé matplotlib avec => pip install matplotlib
# ====================================================================================================
# Affiche sur un graphique les perplexité de tous les modeles sur un même fichier
# graph.graphFile(test_file_number)
# Pour donner le resultat sur tous les fichier test dans la console
detectLang.show_all_result()
| Update doc and add one call | Update doc and add one call
| Python | mit | Focom/NLPWork1,Focom/NLPWork1,Focom/NLPWork1 | ---
+++
@@ -4,25 +4,28 @@
# ====================================================================================================
# La detection est rapide car toute les perplexites sont stockées dans les fichiers binaires pp_EN etc
# Pour regénerer les fichiers :
-# Executer detectLang.create_all_pp_and_save_to_disc()
+# Executer detectLang.create_all_pp_and_save_to_disc(), décommenter la ligne suivante
+# detectLang.create_all_pp_and_save_to_disc()
# Les resultats des perplexités pour tous les fichiers seront serialisés dans les fichiers binaires.
# ces fichiers sont necessaire à la fontions detectLang.detect_language()
# Pour serialisé les dictionaires j'utilise la bibliotheque intégrer à python => pickle
# ====================================================================================================
-test_file_number = 19
+test_file_number = 13
# detect_language(Numéro du fichier dans le repertoire de test, N du modele nGram)
# Print le code de la langue reconnue et la perplexité du modèle choisis.
-detectLang.detect_language(test_file_number,1)
-detectLang.detect_language(test_file_number,2)
-detectLang.detect_language(test_file_number,3)
+# detectLang.detect_language(test_file_number,1)
+# detectLang.detect_language(test_file_number,2)
+# detectLang.detect_language(test_file_number,3)
# ====================================================================================================
# Nécessite matplotlib
# Installé matplotlib avec => pip install matplotlib
# ====================================================================================================
-# graphFile(Numéro du fichier dans le repertoire de test)
-graph.graphFile(test_file_number)
+# Affiche sur un graphique les perplexité de tous les modeles sur un même fichier
+# graph.graphFile(test_file_number)
+# Pour donner le resultat sur tous les fichier test dans la console
+detectLang.show_all_result() |
19cb68209252615c66cee0a1c6df1069f81f6f77 | stock_request_picking_type/models/stock_request_order.py | stock_request_picking_type/models/stock_request_order.py | # Copyright 2019 Open Source Integrators
# License LGPL-3.0 or later (https://www.gnu.org/licenses/lgpl.html).
from odoo import api, fields, models
class StockRequestOrder(models.Model):
_inherit = 'stock.request.order'
@api.model
def _get_default_picking_type(self):
return self.env['stock.picking.type'].search([
('code', '=', 'stock_request_order'),
('warehouse_id.company_id', 'in',
[self.env.context.get('company_id', self.env.user.company_id.id),
False])],
limit=1).id
picking_type_id = fields.Many2one(
'stock.picking.type', 'Operation Type',
default=_get_default_picking_type, required=True)
@api.onchange('warehouse_id')
def onchange_warehouse_picking_id(self):
if self.warehouse_id:
picking_type_id = self.env['stock.picking.type'].\
search([('code', '=', 'stock_request_order'),
('warehouse_id', '=', self.warehouse_id.id)], limit=1)
if picking_type_id:
self._origin.write({'picking_type_id': picking_type_id.id})
| # Copyright 2019 Open Source Integrators
# License LGPL-3.0 or later (https://www.gnu.org/licenses/lgpl.html).
from odoo import api, fields, models
class StockRequestOrder(models.Model):
_inherit = 'stock.request.order'
@api.model
def _get_default_picking_type(self):
return self.env['stock.picking.type'].search([
('code', '=', 'stock_request_order'),
('warehouse_id.company_id', 'in',
[self.env.context.get('company_id', self.env.user.company_id.id),
False])],
limit=1).id
picking_type_id = fields.Many2one(
'stock.picking.type', 'Operation Type',
default=_get_default_picking_type, required=True)
@api.onchange('warehouse_id')
def onchange_warehouse_picking_id(self):
if self.warehouse_id:
picking_type_id = self.env['stock.picking.type'].\
search([('code', '=', 'stock_request_order'),
('warehouse_id', '=', self.warehouse_id.id)], limit=1)
if picking_type_id:
self._origin.write({'picking_type_id': picking_type_id.id})
@api.model
def create(self, vals):
if vals.get('warehouse_id', False):
picking_type_id = self.env['stock.picking.type'].\
search([('code', '=', 'stock_request_order'),
('warehouse_id', '=', vals['warehouse_id'])], limit=1)
if picking_type_id:
vals.update({'picking_type_id': picking_type_id.id})
return super().create(vals)
| Set Picking Type in Create | [IMP] Set Picking Type in Create
[IMP] Flake8
| Python | agpl-3.0 | Vauxoo/stock-logistics-warehouse,Vauxoo/stock-logistics-warehouse,Vauxoo/stock-logistics-warehouse | ---
+++
@@ -28,3 +28,13 @@
('warehouse_id', '=', self.warehouse_id.id)], limit=1)
if picking_type_id:
self._origin.write({'picking_type_id': picking_type_id.id})
+
+ @api.model
+ def create(self, vals):
+ if vals.get('warehouse_id', False):
+ picking_type_id = self.env['stock.picking.type'].\
+ search([('code', '=', 'stock_request_order'),
+ ('warehouse_id', '=', vals['warehouse_id'])], limit=1)
+ if picking_type_id:
+ vals.update({'picking_type_id': picking_type_id.id})
+ return super().create(vals) |
b6139583bf5074c73c0de6626391b6f128ed6e34 | export_jars.py | export_jars.py | #!/usr/bin/env python
import os
import shutil
from glob import glob
from subprocess import call, check_output
OUTPUT_DIR_NAME = 'jars'
def call_unsafe(*args, **kwargs):
kwargs['shell'] = True
call(*args, **kwargs)
call_unsafe('./gradlew clean javadocRelease jarRelease')
try:
os.mkdir(OUTPUT_DIR_NAME)
except OSError:
pass
os.chdir(OUTPUT_DIR_NAME)
call_unsafe('cp ../beansdk/build/libs/*.jar .')
commit = check_output(['git', 'rev-parse', 'HEAD'])[:7]
for src in glob('*.jar'):
name, ext = os.path.splitext(src)
dest = name + '-' + commit + ext
shutil.move(src, dest)
call_unsafe('open .')
| #!/usr/bin/env python
import os
import shutil
from glob import glob
from subprocess import call, check_output
OUTPUT_DIR_NAME = 'jars'
def call_unsafe(*args, **kwargs):
kwargs['shell'] = True
call(*args, **kwargs)
call_unsafe('./gradlew clean javadocRelease jarRelease')
try:
os.mkdir(OUTPUT_DIR_NAME)
except OSError:
pass
os.chdir(OUTPUT_DIR_NAME)
call_unsafe('rm *.jar')
call_unsafe('cp ../beansdk/build/libs/*.jar .')
commit = check_output(['git', 'rev-parse', 'HEAD'])[:7]
for src in glob('*.jar'):
name, ext = os.path.splitext(src)
dest = name + '-' + commit + ext
shutil.move(src, dest)
call_unsafe('open .')
| Remove existing JARs before building new ones | Remove existing JARs before building new ones
| Python | mit | swstack/Bean-Android-SDK,PunchThrough/bean-sdk-android,colus001/Bean-Android-SDK,PunchThrough/Bean-Android-SDK,hongbinz/Bean-Android-SDK,androidgrl/Bean-Android-SDK,PunchThrough/Bean-Android-SDK,swstack/Bean-Android-SDK,PunchThrough/bean-sdk-android,androidgrl/Bean-Android-SDK,hongbinz/Bean-Android-SDK,colus001/Bean-Android-SDK | ---
+++
@@ -21,6 +21,7 @@
pass
os.chdir(OUTPUT_DIR_NAME)
+call_unsafe('rm *.jar')
call_unsafe('cp ../beansdk/build/libs/*.jar .')
commit = check_output(['git', 'rev-parse', 'HEAD'])[:7] |
1b7a3f045bf7a23ef993d136b481f22258c4a778 | wagtail/wagtailimages/rich_text.py | wagtail/wagtailimages/rich_text.py | from wagtail.wagtailimages.models import get_image_model
from wagtail.wagtailimages.formats import get_image_format
class ImageEmbedHandler(object):
"""
ImageEmbedHandler will be invoked whenever we encounter an element in HTML content
with an attribute of data-embedtype="image". The resulting element in the database
representation will be:
<embed embedtype="image" id="42" format="thumb" alt="some custom alt text">
"""
@staticmethod
def get_db_attributes(tag):
"""
Given a tag that we've identified as an image embed (because it has a
data-embedtype="image" attribute), return a dict of the attributes we should
have on the resulting <embed> element.
"""
return {
'id': tag['data-id'],
'format': tag['data-format'],
'alt': tag['data-alt'],
}
@staticmethod
def expand_db_attributes(attrs, for_editor):
"""
Given a dict of attributes from the <embed> tag, return the real HTML
representation.
"""
Image = get_image_model()
try:
image = Image.objects.get(id=attrs['id'])
image_format = get_image_format(attrs['format'])
if for_editor:
try:
return image_format.image_to_editor_html(image, attrs['alt'])
except:
return ''
else:
return image_format.image_to_html(image, attrs['alt'])
except Image.DoesNotExist:
return "<img>"
| from wagtail.wagtailimages.models import get_image_model
from wagtail.wagtailimages.formats import get_image_format
class ImageEmbedHandler(object):
"""
ImageEmbedHandler will be invoked whenever we encounter an element in HTML content
with an attribute of data-embedtype="image". The resulting element in the database
representation will be:
<embed embedtype="image" id="42" format="thumb" alt="some custom alt text">
"""
@staticmethod
def get_db_attributes(tag):
"""
Given a tag that we've identified as an image embed (because it has a
data-embedtype="image" attribute), return a dict of the attributes we should
have on the resulting <embed> element.
"""
return {
'id': tag['data-id'],
'format': tag['data-format'],
'alt': tag['data-alt'],
}
@staticmethod
def expand_db_attributes(attrs, for_editor):
"""
Given a dict of attributes from the <embed> tag, return the real HTML
representation.
"""
Image = get_image_model()
try:
image = Image.objects.get(id=attrs['id'])
except Image.DoesNotExist:
return "<img>"
image_format = get_image_format(attrs['format'])
if for_editor:
try:
return image_format.image_to_editor_html(image, attrs['alt'])
except:
return ''
else:
return image_format.image_to_html(image, attrs['alt'])
| Refactor try-catch block by limiting code in the try block | Refactor try-catch block by limiting code in the try block
Always good to know which line will raise an exception and limit the try block to that statement
| Python | bsd-3-clause | Toshakins/wagtail,timorieber/wagtail,nrsimha/wagtail,kurtrwall/wagtail,timorieber/wagtail,FlipperPA/wagtail,inonit/wagtail,davecranwell/wagtail,nealtodd/wagtail,iansprice/wagtail,thenewguy/wagtail,nutztherookie/wagtail,jnns/wagtail,kaedroho/wagtail,iansprice/wagtail,serzans/wagtail,inonit/wagtail,kurtw/wagtail,mixxorz/wagtail,takeflight/wagtail,jnns/wagtail,serzans/wagtail,kaedroho/wagtail,Toshakins/wagtail,wagtail/wagtail,mixxorz/wagtail,thenewguy/wagtail,chrxr/wagtail,thenewguy/wagtail,JoshBarr/wagtail,quru/wagtail,FlipperPA/wagtail,timorieber/wagtail,nimasmi/wagtail,Tivix/wagtail,nutztherookie/wagtail,wagtail/wagtail,mixxorz/wagtail,Tivix/wagtail,thenewguy/wagtail,nealtodd/wagtail,hamsterbacke23/wagtail,kaedroho/wagtail,gasman/wagtail,hamsterbacke23/wagtail,mikedingjan/wagtail,davecranwell/wagtail,nrsimha/wagtail,gogobook/wagtail,chrxr/wagtail,nimasmi/wagtail,zerolab/wagtail,timorieber/wagtail,mayapurmedia/wagtail,kurtw/wagtail,chrxr/wagtail,wagtail/wagtail,gasman/wagtail,nilnvoid/wagtail,wagtail/wagtail,nealtodd/wagtail,zerolab/wagtail,nimasmi/wagtail,jnns/wagtail,chrxr/wagtail,zerolab/wagtail,takeflight/wagtail,quru/wagtail,inonit/wagtail,nilnvoid/wagtail,rsalmaso/wagtail,mikedingjan/wagtail,nrsimha/wagtail,nutztherookie/wagtail,Tivix/wagtail,hanpama/wagtail,rsalmaso/wagtail,gogobook/wagtail,inonit/wagtail,mayapurmedia/wagtail,JoshBarr/wagtail,iansprice/wagtail,jnns/wagtail,Toshakins/wagtail,quru/wagtail,nilnvoid/wagtail,kurtrwall/wagtail,nutztherookie/wagtail,hamsterbacke23/wagtail,quru/wagtail,gasman/wagtail,FlipperPA/wagtail,kurtrwall/wagtail,hamsterbacke23/wagtail,serzans/wagtail,FlipperPA/wagtail,JoshBarr/wagtail,rsalmaso/wagtail,Toshakins/wagtail,mayapurmedia/wagtail,thenewguy/wagtail,mikedingjan/wagtail,hanpama/wagtail,mixxorz/wagtail,mikedingjan/wagtail,torchbox/wagtail,gasman/wagtail,zerolab/wagtail,mayapurmedia/wagtail,mixxorz/wagtail,gogobook/wagtail,rsalmaso/wagtail,hanpama/wagtail,serzans/wagtail,gogobook/wagtail,takeflight/wagtail,torchbox/wagtail,torchbox/wagtail,zerolab/wagtail,torchbox/wagtail,wagtail/wagtail,kaedroho/wagtail,JoshBarr/wagtail,rsalmaso/wagtail,gasman/wagtail,Tivix/wagtail,takeflight/wagtail,nrsimha/wagtail,iansprice/wagtail,hanpama/wagtail,nealtodd/wagtail,kurtrwall/wagtail,nimasmi/wagtail,nilnvoid/wagtail,kurtw/wagtail,davecranwell/wagtail,kurtw/wagtail,kaedroho/wagtail,davecranwell/wagtail | ---
+++
@@ -31,15 +31,14 @@
Image = get_image_model()
try:
image = Image.objects.get(id=attrs['id'])
- image_format = get_image_format(attrs['format'])
-
- if for_editor:
- try:
- return image_format.image_to_editor_html(image, attrs['alt'])
- except:
- return ''
- else:
- return image_format.image_to_html(image, attrs['alt'])
-
except Image.DoesNotExist:
return "<img>"
+
+ image_format = get_image_format(attrs['format'])
+ if for_editor:
+ try:
+ return image_format.image_to_editor_html(image, attrs['alt'])
+ except:
+ return ''
+ else:
+ return image_format.image_to_html(image, attrs['alt']) |
7e1ec1b27d69882005ac5492809c8847c21e2198 | baro.py | baro.py | from datetime import datetime
class Baro:
"""This class represents a Baro item and is initialized with
data in JSON format
"""
def __init__(self, data):
self.config = data['Config']
self.start = datetime.fromtimestamp(data['Activation']['sec'])
self.end = datetime.fromtimestamp(data['Expiry']['sec'])
self.location = data['Node']
self.manifest = data['Manifest']
def __str__(self):
"""Returns a string with all the information about Baro offer
"""
baroItemString = ""
if datetime.now() < self.start:
return "None"
else:
for item in self.manifest:
baroItemString += ('== '+ str(item["ItemType"]) +' ==\n'
'- price: '+ str(item["PrimePrice"]) +' ducats + '+ str(item["RegularPrice"]) +'cr -\n\n' )
return baroItemString
def get_eta_string(self):
"""Returns a string containing the Baro's ETA
"""
seconds = int((self.end - datetime.now()).total_seconds())
return '{} days, {} hrs, {} mins'.format((seconds // 86400), ((seconds % 86400) // 3600),
(seconds % 3600) // 60)
def get_start_string(self):
"""Returns a string containing the Baro's start
"""
seconds = int((self.start - datetime.now()).total_seconds())
return '{} days, {} hrs, {} mins'.format((seconds // 86400), ((seconds % 86400) // 3600),
(seconds % 3600) // 60) | from datetime import datetime
import utils
class Baro:
"""This class contains info about the Void Trader and is initialized with
data in JSON format
"""
def __init__(self, data):
self.config = data['Config']
self.start = datetime.fromtimestamp(data['Activation']['sec'])
self.end = datetime.fromtimestamp(data['Expiry']['sec'])
self.location = data['Node']
self.manifest = data['Manifest']
def __str__(self):
"""Returns a string with all the information about Baro's offers
"""
baroItemString = ""
if datetime.now() < self.start:
return "None"
else:
for item in self.manifest:
baroItemString += ('== '+ str(item["ItemType"]) +' ==\n'
'- price: '+ str(item["PrimePrice"]) +' ducats + '+ str(item["RegularPrice"]) +'cr -\n\n' )
return baroItemString
def get_end_string(self):
"""Returns a string containing Baro's departure time
"""
return timedelta_to_string(self.end - datetime.now())
def get_start_string(self):
"""Returns a string containing Baro's arrival time
"""
return timedelta_to_string(self.start - datetime.now())
| Change class Baro to use timedelta_to_string, some fixes | Change class Baro to use timedelta_to_string, some fixes
| Python | mit | pabletos/Hubot-Warframe,pabletos/Hubot-Warframe | ---
+++
@@ -1,8 +1,9 @@
from datetime import datetime
+import utils
class Baro:
- """This class represents a Baro item and is initialized with
+ """This class contains info about the Void Trader and is initialized with
data in JSON format
"""
@@ -15,7 +16,7 @@
self.manifest = data['Manifest']
def __str__(self):
- """Returns a string with all the information about Baro offer
+ """Returns a string with all the information about Baro's offers
"""
baroItemString = ""
@@ -30,18 +31,14 @@
return baroItemString
- def get_eta_string(self):
- """Returns a string containing the Baro's ETA
+ def get_end_string(self):
+ """Returns a string containing Baro's departure time
"""
- seconds = int((self.end - datetime.now()).total_seconds())
- return '{} days, {} hrs, {} mins'.format((seconds // 86400), ((seconds % 86400) // 3600),
- (seconds % 3600) // 60)
+ return timedelta_to_string(self.end - datetime.now())
def get_start_string(self):
- """Returns a string containing the Baro's start
+ """Returns a string containing Baro's arrival time
"""
- seconds = int((self.start - datetime.now()).total_seconds())
- return '{} days, {} hrs, {} mins'.format((seconds // 86400), ((seconds % 86400) // 3600),
- (seconds % 3600) // 60)
+ return timedelta_to_string(self.start - datetime.now()) |
12cd87394e09e7481b39ca519f15db4688ab0073 | tmpl/Prompt.py | tmpl/Prompt.py | #--coding:utf-8--
#Platform
class BasePrompt(object):
pass
class ErrPrompt(BasePrompt):
"""
Define some of Err Prompts
Usually print to sys.stderr
"""
def PrintErr(self, content):
import sys
"""
Automous write content to sys.stderr and add '\n' to the end
"""
sys.stderr.write(content + '\n')
sys.stderr.flush()
def IllegalURL(self):
"""
For URL with illegal characters
"""
self.PrintErr("Error: URL include illegal characters!")
def FileExist(self, File = "File"):
"""
return True if want to replace, and False for the other
"""
self.PrintErr("Warning: '%s' already exists, replace?(y/n)" %(Files))
tmp = raw_input()
if tmp == 'y' or tmp == 'Y':
return True
return False
def Exit(self):
self.PrintErr("Info: Terminated")
if __name__ == '__main__':
raise EnvironmentError ("DO NOT DIRECTLY RUN THIS TEMPLATE!")
| #--coding:utf-8--
#Platform
class BasePrompt(object):
pass
class ErrPrompt(BasePrompt):
"""
Define some of Err Prompts
Usually print to sys.stderr
"""
def PrintErr(self, content):
import sys
"""
Automous write content to sys.stderr and add '\n' to the end
"""
sys.stderr.write(content + '\n')
sys.stderr.flush()
def IllegalURL(self):
"""
For URL with illegal characters
"""
self.PrintErr("Error: URL include illegal characters!")
def FileExist(self, File = "File"):
"""
return True if want to replace, and False for the other
"""
self.PrintErr("Warning: '%s' already exists, replace?(y/n)" %(File))
tmp = raw_input()
if tmp == 'y' or tmp == 'Y':
return True
return False
def Exit(self):
self.PrintErr("Info: Terminated")
if __name__ == '__main__':
raise EnvironmentError ("DO NOT DIRECTLY RUN THIS TEMPLATE!")
| Fix Bugs in module when using -O option | Fix Bugs in module when using -O option
| Python | mit | nday-dev/Spider-Framework | ---
+++
@@ -27,7 +27,7 @@
"""
return True if want to replace, and False for the other
"""
- self.PrintErr("Warning: '%s' already exists, replace?(y/n)" %(Files))
+ self.PrintErr("Warning: '%s' already exists, replace?(y/n)" %(File))
tmp = raw_input()
if tmp == 'y' or tmp == 'Y':
return True |
d3078cafd4e64e9c093d9d823df2035b8380d643 | meta-refkit-computervision/recipes-computervision/caffe-bvlc-reference/files/dnn-test.py | meta-refkit-computervision/recipes-computervision/caffe-bvlc-reference/files/dnn-test.py | #!/usr/bin/env python3
# Classify an image using a suitable model. The image conversion magic
# is from
# https://github.com/opencv/opencv_contrib/blob/master/modules/dnn/samples/googlenet_python.py
# (3-clause BSD license).
import numpy as np
import cv2
import sys
if len(sys.argv) != 4:
print("Usage: dnn.py <prototxt> <caffemodel> <image>")
sys.exit(1)
cv2.ocl.setUseOpenCL(False)
# read the image
test_img = cv2.imread(sys.argv[3])
# resize
resized = cv2.resize(test_img, (224,224))
converted = np.moveaxis(resized, 2, 0)
data = np.reshape(converted.astype(np.float32), (-1, 3, 224, 224))
# initialize network
net = cv2.dnn.readNetFromCaffe(sys.argv[1], sys.argv[2])
net.setBlob(".data", data)
# run the network
net.forward()
# print the class
print(str(net.getBlob("prob").argmax()))
| #!/usr/bin/env python3
# Classify an image using a suitable model. The image conversion magic
# is from
# https://github.com/opencv/opencv_contrib/blob/master/modules/dnn/samples/googlenet_python.py
# (3-clause BSD license).
import numpy as np
import cv2
import sys
if len(sys.argv) != 4:
print("Usage: dnn.py <prototxt> <caffemodel> <image>")
sys.exit(1)
cv2.ocl.setUseOpenCL(False)
# read the image
test_img = cv2.imread(sys.argv[3])
# resize
resized = cv2.resize(test_img, (224,224))
converted = np.moveaxis(resized, 2, 0)
data = np.reshape(converted.astype(np.float32), (-1, 3, 224, 224))
# initialize network
net = cv2.dnn.readNetFromCaffe(sys.argv[1], sys.argv[2])
net.setInput(data, "data")
# run the network
result = net.forward("prob")
# print the class
print(str(result.argmax()))
| Fix DNN test to be compatible with OpenCV 3.3. | convnet: Fix DNN test to be compatible with OpenCV 3.3.
OpenCV DNN module API changed with OpenCV 3.3. Fix the tests to use the
new API.
Signed-off-by: Ismo Puustinen <75dda586a9213f0e0695eb79120c94222bb30e60@intel.com>
| Python | mit | intel/intel-iot-refkit,mythi/intel-iot-refkit,mythi/intel-iot-refkit,intel/intel-iot-refkit,intel/intel-iot-refkit,intel/intel-iot-refkit,mythi/intel-iot-refkit,klihub/intel-iot-refkit,klihub/intel-iot-refkit,klihub/intel-iot-refkit,klihub/intel-iot-refkit,klihub/intel-iot-refkit,klihub/intel-iot-refkit,mythi/intel-iot-refkit,mythi/intel-iot-refkit,klihub/intel-iot-refkit,mythi/intel-iot-refkit,intel/intel-iot-refkit,intel/intel-iot-refkit,mythi/intel-iot-refkit,intel/intel-iot-refkit | ---
+++
@@ -25,10 +25,10 @@
# initialize network
net = cv2.dnn.readNetFromCaffe(sys.argv[1], sys.argv[2])
-net.setBlob(".data", data)
+net.setInput(data, "data")
# run the network
-net.forward()
+result = net.forward("prob")
# print the class
-print(str(net.getBlob("prob").argmax()))
+print(str(result.argmax())) |
210be14772b403e8fb5938e4e2cd391d43275ab1 | tests/test_ot_propagators.py | tests/test_ot_propagators.py | import instana.http_propagator as ihp
import opentracing as ot
from instana import tracer, options, util
from nose.tools import assert_equals
import inspect
def test_basics():
inspect.isclass(ihp.HTTPPropagator)
inject_func = getattr(ihp.HTTPPropagator, "inject", None)
assert inject_func
assert inspect.ismethod(inject_func)
extract_func = getattr(ihp.HTTPPropagator, "extract", None)
assert extract_func
assert inspect.ismethod(extract_func)
def test_inject():
opts = options.Options()
ot.global_tracer = tracer.InstanaTracer(opts)
carrier = {}
span = ot.global_tracer.start_span("nosetests")
ot.global_tracer.inject(span.context, ot.Format.HTTP_HEADERS, carrier)
assert 'X-Instana-T' in carrier
assert_equals(carrier['X-Instana-T'], util.id_to_header(span.context.trace_id))
assert 'X-Instana-S' in carrier
assert_equals(carrier['X-Instana-S'], util.id_to_header(span.context.span_id))
assert 'X-Instana-L' in carrier
assert_equals(carrier['X-Instana-L'], "1")
| import instana.http_propagator as ihp
import opentracing as ot
from instana import tracer, options, util
from nose.tools import assert_equals
import inspect
def test_basics():
inspect.isclass(ihp.HTTPPropagator)
inject_func = getattr(ihp.HTTPPropagator, "inject", None)
assert inject_func
assert callable(inject_func)
extract_func = getattr(ihp.HTTPPropagator, "extract", None)
assert extract_func
assert callable(extract_func)
def test_inject():
opts = options.Options()
ot.global_tracer = tracer.InstanaTracer(opts)
carrier = {}
span = ot.global_tracer.start_span("nosetests")
ot.global_tracer.inject(span.context, ot.Format.HTTP_HEADERS, carrier)
assert 'X-Instana-T' in carrier
assert_equals(carrier['X-Instana-T'], util.id_to_header(span.context.trace_id))
assert 'X-Instana-S' in carrier
assert_equals(carrier['X-Instana-S'], util.id_to_header(span.context.span_id))
assert 'X-Instana-L' in carrier
assert_equals(carrier['X-Instana-L'], "1")
| Fix function test to work on both Py 2 + 3 | Fix function test to work on both Py 2 + 3
| Python | mit | instana/python-sensor,instana/python-sensor | ---
+++
@@ -10,11 +10,11 @@
inject_func = getattr(ihp.HTTPPropagator, "inject", None)
assert inject_func
- assert inspect.ismethod(inject_func)
+ assert callable(inject_func)
extract_func = getattr(ihp.HTTPPropagator, "extract", None)
assert extract_func
- assert inspect.ismethod(extract_func)
+ assert callable(extract_func)
def test_inject(): |
ef94948a8ce16d9d80fb69950381e0936a462bb0 | tests/config_tests.py | tests/config_tests.py | from nose.tools import assert_equal
from wunderapi.config import Config
def setup():
return Config(config_file="tests/resources/test_config")
def test_parse_config_with_correct_parms():
pass
def test_parse_config_with_incorrect_parms():
pass
def test_config_created_with_default_parms():
config = setup()
config.parse_config()
assert_equal(config.api_key, 'API Key')
assert_equal(config.location, 'Zipcode')
assert_equal(config.date_format, 'date')
assert_equal(config.units, 'english')
| from nose.tools import assert_equal
from wunderapi.config import Config
def setup():
return Config(config_file="tests/resources/test_config")
def test_parse_config_with_correct_parms():
pass
def test_parse_config_with_incorrect_parms():
pass
def test_config_created_with_default_parms():
config = setup()
config.parse_config()
assert_equal(config.date_format, 'date')
assert_equal(config.units, 'english')
| Update test to get api_key from environment | Update test to get api_key from environment
| Python | mit | paris3200/Weather,paris3200/wunderapi | ---
+++
@@ -17,7 +17,5 @@
def test_config_created_with_default_parms():
config = setup()
config.parse_config()
- assert_equal(config.api_key, 'API Key')
- assert_equal(config.location, 'Zipcode')
assert_equal(config.date_format, 'date')
assert_equal(config.units, 'english') |
0529c392c8c3e75a03aa312e4fc7b367008fdf27 | tests/test_20_main.py | tests/test_20_main.py |
import click.testing
import pytest
from cfgrib import __main__
def test_main():
runner = click.testing.CliRunner()
res = runner.invoke(__main__.cfgrib_cli, ['selfcheck'])
assert res.exit_code == 0
assert 'Your system is ready.' in res.output
res = runner.invoke(__main__.cfgrib_cli, ['non-existent-command'])
assert res.exit_code == 2
|
import click.testing
from cfgrib import __main__
def test_main():
runner = click.testing.CliRunner()
res = runner.invoke(__main__.cfgrib_cli, ['selfcheck'])
assert res.exit_code == 0
assert 'Your system is ready.' in res.output
res = runner.invoke(__main__.cfgrib_cli, ['non-existent-command'])
assert res.exit_code == 2
| Fix docs and CLI tests. | Fix docs and CLI tests.
| Python | apache-2.0 | ecmwf/cfgrib | ---
+++
@@ -1,6 +1,5 @@
import click.testing
-import pytest
from cfgrib import __main__
|
c5a0d0c5bf578a2221322c068a41ce6331b84c9b | tests/test_cattery.py | tests/test_cattery.py | import pytest
from catinabox import cattery
class TestCattery(object):
###########################################################################
# add_cats
###########################################################################
def test__add_cats__succeeds(self):
c = cattery.Cattery()
assert c
###########################################################################
# remove_cat
###########################################################################
def test__remove_cat__succeeds(self):
c = cattery.Cattery()
assert c
def test__remove_cat__no_cats__fails(self):
c = cattery.Cattery()
assert c
def test__remove_cat__cat_not_in_cattery__fails(self):
c = cattery.Cattery()
c.add_cats(["Fluffy"])
with pytest.raises(cattery.CatNotFound):
c.remove_cat("Snookums")
| import pytest
from catinabox import cattery, mccattery
@pytest.fixture(params=[
cattery.Cattery,
mccattery.McCattery
])
def cattery_fixture(request):
return request.param()
###########################################################################
# add_cats
###########################################################################
def test__add_cats__succeeds(cattery_fixture):
cattery_fixture.add_cats(["Fluffy", "Snookums"])
assert cattery_fixture.cats == ["Fluffy", "Snookums"]
assert cattery_fixture.num_cats == 2
###########################################################################
# remove_cat
###########################################################################
def test__remove_cat__succeeds(cattery_fixture):
cattery_fixture.add_cats(["Fluffy", "Junior"])
cattery_fixture.remove_cat("Fluffy")
assert cattery_fixture.cats == ["Junior"]
assert cattery_fixture.num_cats == 1
def test__remove_cat__no_cats__fails(cattery_fixture):
with pytest.raises(cattery.CatNotFound):
cattery_fixture.remove_cat("Fluffles")
def test__remove_cat__cat_not_in_cattery__fails(cattery_fixture):
cattery_fixture.add_cats(["Fluffy"])
with pytest.raises(cattery.CatNotFound):
cattery_fixture.remove_cat("Snookums")
| Add full tests for mccattery and cattery | Add full tests for mccattery and cattery
| Python | mit | keeppythonweird/catinabox,indexOutOfBound5/catinabox | ---
+++
@@ -1,32 +1,42 @@
import pytest
-from catinabox import cattery
+from catinabox import cattery, mccattery
-class TestCattery(object):
+@pytest.fixture(params=[
+ cattery.Cattery,
+ mccattery.McCattery
+])
+def cattery_fixture(request):
+ return request.param()
- ###########################################################################
- # add_cats
- ###########################################################################
- def test__add_cats__succeeds(self):
- c = cattery.Cattery()
- assert c
+###########################################################################
+# add_cats
+###########################################################################
- ###########################################################################
- # remove_cat
- ###########################################################################
+def test__add_cats__succeeds(cattery_fixture):
+ cattery_fixture.add_cats(["Fluffy", "Snookums"])
+ assert cattery_fixture.cats == ["Fluffy", "Snookums"]
+ assert cattery_fixture.num_cats == 2
- def test__remove_cat__succeeds(self):
- c = cattery.Cattery()
- assert c
+###########################################################################
+# remove_cat
+###########################################################################
- def test__remove_cat__no_cats__fails(self):
- c = cattery.Cattery()
- assert c
+def test__remove_cat__succeeds(cattery_fixture):
+ cattery_fixture.add_cats(["Fluffy", "Junior"])
+ cattery_fixture.remove_cat("Fluffy")
+ assert cattery_fixture.cats == ["Junior"]
+ assert cattery_fixture.num_cats == 1
- def test__remove_cat__cat_not_in_cattery__fails(self):
- c = cattery.Cattery()
- c.add_cats(["Fluffy"])
- with pytest.raises(cattery.CatNotFound):
- c.remove_cat("Snookums")
+
+def test__remove_cat__no_cats__fails(cattery_fixture):
+ with pytest.raises(cattery.CatNotFound):
+ cattery_fixture.remove_cat("Fluffles")
+
+
+def test__remove_cat__cat_not_in_cattery__fails(cattery_fixture):
+ cattery_fixture.add_cats(["Fluffy"])
+ with pytest.raises(cattery.CatNotFound):
+ cattery_fixture.remove_cat("Snookums") |
e79f87121a955847af92d93fad3e2687fc4f472f | tests/test_gen_sql.py | tests/test_gen_sql.py | #!/usr/bin/env python
class TestGenSql:
def test_gen_drop_statement(self):
pass
def test_create_statement(self):
pass
| #!/usr/bin/env python
import sys
from io import StringIO
from pg_bawler import gen_sql
def test_simple_main(monkeypatch):
stdout = StringIO()
monkeypatch.setattr(sys, 'stdout', stdout)
class Args:
tablename = 'foo'
gen_sql.main(*[Args.tablename])
sql = stdout.getvalue()
assert gen_sql.TRIGGER_FN_FMT.format(args=Args) in sql
assert gen_sql.TRIGGER_FN_FMT.format(args=Args) in sql
| Add simple test for sql full sql generation with only tablename | Add simple test for sql full sql generation with only tablename
| Python | bsd-3-clause | beezz/pg_bawler,beezz/pg_bawler | ---
+++
@@ -1,10 +1,19 @@
#!/usr/bin/env python
+import sys
+from io import StringIO
+
+from pg_bawler import gen_sql
-class TestGenSql:
+def test_simple_main(monkeypatch):
+ stdout = StringIO()
+ monkeypatch.setattr(sys, 'stdout', stdout)
- def test_gen_drop_statement(self):
- pass
+ class Args:
+ tablename = 'foo'
- def test_create_statement(self):
- pass
+ gen_sql.main(*[Args.tablename])
+ sql = stdout.getvalue()
+
+ assert gen_sql.TRIGGER_FN_FMT.format(args=Args) in sql
+ assert gen_sql.TRIGGER_FN_FMT.format(args=Args) in sql |
d60116aecbb6935fae508c94905a335fdb0603bb | tests/test_xgboost.py | tests/test_xgboost.py | import unittest
from sklearn import datasets
from xgboost import XGBClassifier
class TestXGBoost(unittest.TestCase):
def test_classifier(self):
boston = datasets.load_boston()
X, y = boston.data, boston.target
xgb1 = XGBClassifier(n_estimators=3)
xgb1.fit(X[0:70],y[0:70])
| import unittest
import xgboost
from distutils.version import StrictVersion
from sklearn import datasets
from xgboost import XGBClassifier
class TestXGBoost(unittest.TestCase):
def test_version(self):
# b/175051617 prevent xgboost version downgrade.
self.assertGreaterEqual(StrictVersion(xgboost.__version__), StrictVersion("1.2.1"))
def test_classifier(self):
boston = datasets.load_boston()
X, y = boston.data, boston.target
xgb1 = XGBClassifier(n_estimators=3)
xgb1.fit(X[0:70],y[0:70])
| Add xgboost version regression test. | Add xgboost version regression test.
BUG=175051617
| Python | apache-2.0 | Kaggle/docker-python,Kaggle/docker-python | ---
+++
@@ -1,9 +1,16 @@
import unittest
+import xgboost
+
+from distutils.version import StrictVersion
from sklearn import datasets
from xgboost import XGBClassifier
class TestXGBoost(unittest.TestCase):
+ def test_version(self):
+ # b/175051617 prevent xgboost version downgrade.
+ self.assertGreaterEqual(StrictVersion(xgboost.__version__), StrictVersion("1.2.1"))
+
def test_classifier(self):
boston = datasets.load_boston()
X, y = boston.data, boston.target |
a57e40ea7b0cc55ec67664d9f32658085c24900f | tools/project/check_style.py | tools/project/check_style.py | import subprocess
import sys
git_diff_output = subprocess.check_output(
"git diff --name-only --diff-filter=ACM", universal_newlines=True)
git_diff_lines = git_diff_output.split("\n")
for file_name in git_diff_lines:
if not file_name:
continue
print "Checking style for %s" %file_name
ret_value = subprocess.call(
"python 3rdparty/styleguide/cpplint/cpplint.py --filter=%s %s"
%("-build/include_order", file_name), stderr=subprocess.STDOUT)
if ret_value != 0:
sys.exit(ret_value)
sys.exit(0)
| import subprocess
import sys
git_diff_output = subprocess.check_output(
"git diff --name-only --diff-filter=ACM", universal_newlines=True)
git_diff_lines = git_diff_output.split("\n")
for file_name in git_diff_lines:
if not file_name:
continue
print "Checking style for %s" %file_name
ret_value = subprocess.call(
"python 3rdparty/styleguide/cpplint/cpplint.py --filter="
"-build/include_order,-legal/copyright %s"
%file_name, stderr=subprocess.STDOUT)
if ret_value != 0:
sys.exit(ret_value)
sys.exit(0)
| Update style checker options to ignore copyright. | Update style checker options to ignore copyright.
| Python | mit | damlaren/ogle,damlaren/ogle,damlaren/ogle | ---
+++
@@ -10,8 +10,9 @@
continue
print "Checking style for %s" %file_name
ret_value = subprocess.call(
- "python 3rdparty/styleguide/cpplint/cpplint.py --filter=%s %s"
- %("-build/include_order", file_name), stderr=subprocess.STDOUT)
+ "python 3rdparty/styleguide/cpplint/cpplint.py --filter="
+ "-build/include_order,-legal/copyright %s"
+ %file_name, stderr=subprocess.STDOUT)
if ret_value != 0:
sys.exit(ret_value)
sys.exit(0) |
800706f5835293ee20dd9505d1d11c28eb38bbb2 | tests/shipane_sdk/matchers/dataframe_matchers.py | tests/shipane_sdk/matchers/dataframe_matchers.py | # -*- coding: utf-8 -*-
import re
from hamcrest.core.base_matcher import BaseMatcher
class HasColumn(BaseMatcher):
def __init__(self, column):
self._column = column
def _matches(self, df):
return self._column in df.columns
def describe_to(self, description):
description.append_text(u'Dataframe doesn\'t have colum [{0}]'.format(self._column))
def has_column(column):
return HasColumn(column)
class HasColumnMatches(BaseMatcher):
def __init__(self, column_pattern):
self._column_pattern = re.compile(column_pattern)
def _matches(self, df):
return df.filter(regex=self._column_pattern).columns.size > 0
def describe_to(self, description):
description.append_text(u'Dataframe doesn\'t have colum matches [{0}]'.format(self._column_pattern))
def has_column_matches(column_pattern):
return HasColumnMatches(column_pattern)
class HasRow(BaseMatcher):
def __init__(self, row):
self._row = row
def _matches(self, df):
return self._row in df.index
def describe_to(self, description):
description.append_text(u'Dataframe doesn\'t have row [%s]'.format(self._row))
def has_row(row):
return HasRow(row)
| # -*- coding: utf-8 -*-
import re
from hamcrest.core.base_matcher import BaseMatcher
class HasColumn(BaseMatcher):
def __init__(self, column):
self._column = column
def _matches(self, df):
return self._column in df.columns
def describe_to(self, description):
description.append_text(u'Dataframe doesn\'t have colum [{0}]'.format(self._column))
def has_column(column):
return HasColumn(column)
class HasColumnMatches(BaseMatcher):
def __init__(self, column_pattern):
self._column_pattern = re.compile(column_pattern)
def _matches(self, df):
return len(list(filter(self._column_pattern.match, df.columns.values))) > 0
def describe_to(self, description):
description.append_text(u'Dataframe doesn\'t have colum matches [{0}]'.format(self._column_pattern))
def has_column_matches(column_pattern):
return HasColumnMatches(column_pattern)
class HasRow(BaseMatcher):
def __init__(self, row):
self._row = row
def _matches(self, df):
return self._row in df.index
def describe_to(self, description):
description.append_text(u'Dataframe doesn\'t have row [%s]'.format(self._row))
def has_row(row):
return HasRow(row)
| Fix HasColumn matcher for dataframe with duplicated columns | Fix HasColumn matcher for dataframe with duplicated columns
| Python | mit | sinall/ShiPanE-Python-SDK,sinall/ShiPanE-Python-SDK | ---
+++
@@ -25,7 +25,7 @@
self._column_pattern = re.compile(column_pattern)
def _matches(self, df):
- return df.filter(regex=self._column_pattern).columns.size > 0
+ return len(list(filter(self._column_pattern.match, df.columns.values))) > 0
def describe_to(self, description):
description.append_text(u'Dataframe doesn\'t have colum matches [{0}]'.format(self._column_pattern)) |
7d6800c33a525355714e355ec87e989372c293d7 | main.py | main.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from google.appengine.ext import webapp
from google.appengine.ext.webapp.util import run_wsgi_app
from google.appengine.api import xmpp
#from google.appengine.api import urlfetch
import urllib2,md5,logging,csv
############## webapp Models ###################
class MainPage(webapp.RequestHandler):
def get(self):
url = 'http://www.twse.com.tw/ch/trading/exchange/STOCK_DAY_AVG/STOCK_DAY_AVG2.php?STK_NO=2363&myear=2010&mmon=06&type=csv'
cc = urllib2.urlopen(url)
csv_read = csv.reader(cc)
self.response.out.write('Go Ri Stock')
#csv_read.next
for i in csv_read:
print i
self.response.out.write('<br>%s' % type(csv_read))
############## main Models ###################
def main():
""" Start up. """
application = webapp.WSGIApplication(
[
('/', MainPage)
],debug=True)
run_wsgi_app(application)
if __name__ == '__main__':
main()
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
from google.appengine.ext import webapp
from google.appengine.ext.webapp.util import run_wsgi_app
from google.appengine.api import xmpp
#from google.appengine.api import urlfetch
import urllib2,md5,logging,csv,re,math
def ckinv(oo):
""" check the value is date or not """
pattern = re.compile(r"[0-9]{2}/[0-9]{2}/[0-9]{2}")
b = re.search(pattern, oo[0])
try:
b.group()
return True
except:
return False
def covstr(s):
""" convert string to int or float. """
try:
ret = int(s)
except ValueError:
ret = float(s)
return ret
############## webapp Models ###################
class MainPage(webapp.RequestHandler):
def get(self):
url = 'http://www.twse.com.tw/ch/trading/exchange/STOCK_DAY_AVG/STOCK_DAY_AVG2.php?STK_NO=2363&myear=2010&mmon=06&type=csv'
cc = urllib2.urlopen(url)
csv_read = csv.reader(cc)
self.response.out.write('Go Ri Stock')
#csv_read.next
getr = []
for i in csv_read:
print i
if ckinv(i):
getr.append(covstr(i[1]))
print getr
print math.fsum(getr)
############## main Models ###################
def main():
""" Start up. """
application = webapp.WSGIApplication(
[
('/', MainPage)
],debug=True)
run_wsgi_app(application)
if __name__ == '__main__':
main()
| Add check the value is date or not. | Add check the value is date or not.
| Python | mit | toomore/goristock | ---
+++
@@ -7,7 +7,25 @@
#from google.appengine.api import urlfetch
-import urllib2,md5,logging,csv
+import urllib2,md5,logging,csv,re,math
+
+def ckinv(oo):
+ """ check the value is date or not """
+ pattern = re.compile(r"[0-9]{2}/[0-9]{2}/[0-9]{2}")
+ b = re.search(pattern, oo[0])
+ try:
+ b.group()
+ return True
+ except:
+ return False
+
+def covstr(s):
+ """ convert string to int or float. """
+ try:
+ ret = int(s)
+ except ValueError:
+ ret = float(s)
+ return ret
############## webapp Models ###################
class MainPage(webapp.RequestHandler):
@@ -19,9 +37,13 @@
self.response.out.write('Go Ri Stock')
#csv_read.next
+ getr = []
for i in csv_read:
print i
- self.response.out.write('<br>%s' % type(csv_read))
+ if ckinv(i):
+ getr.append(covstr(i[1]))
+ print getr
+ print math.fsum(getr)
############## main Models ###################
def main(): |
f85425a2c74cf15555bbed233287ddbd7ab8b24e | flexget/ui/plugins/log/log.py | flexget/ui/plugins/log/log.py | from __future__ import unicode_literals, division, absolute_import
from flexget.ui import register_plugin, Blueprint, register_menu
log = Blueprint('log', __name__)
register_plugin(log)
log.register_angular_route(
'',
url=log.url_prefix,
template_url='index.html',
controller='LogViewCtrl'
)
log.register_css('log', 'css/log.css', order=99)
log.register_js('log', 'js/log.js')
log.register_js('angular-oboe', 'js/libs/angular-oboe.js')
log.register_js('oboe-browser', 'js/libs/oboe-browser.js')
register_menu(log.url_prefix, 'Log', icon='fa fa-file-text-o')
| from __future__ import unicode_literals, division, absolute_import
from flexget.ui import register_plugin, Blueprint, register_menu
log = Blueprint('log', __name__)
register_plugin(log)
log.register_angular_route(
'',
url=log.url_prefix,
template_url='index.html',
controller='LogViewCtrl'
)
log.register_css('log', 'css/log.css', order=99)
log.register_js('log', 'js/log.js')
log.register_js('angular-oboe', 'libs/oboe/js/angular-oboe.js')
log.register_js('oboe-browser', 'libs/oboe/js/oboe-browser.js')
register_menu(log.url_prefix, 'Log', icon='fa fa-file-text-o')
| Rename libs to keep with standard | Rename libs to keep with standard
| Python | mit | LynxyssCZ/Flexget,qvazzler/Flexget,tobinjt/Flexget,malkavi/Flexget,ZefQ/Flexget,qk4l/Flexget,Flexget/Flexget,tsnoam/Flexget,ianstalk/Flexget,grrr2/Flexget,jacobmetrick/Flexget,qvazzler/Flexget,crawln45/Flexget,poulpito/Flexget,JorisDeRieck/Flexget,gazpachoking/Flexget,tobinjt/Flexget,oxc/Flexget,dsemi/Flexget,jawilson/Flexget,malkavi/Flexget,tsnoam/Flexget,jacobmetrick/Flexget,OmgOhnoes/Flexget,tarzasai/Flexget,jacobmetrick/Flexget,dsemi/Flexget,Danfocus/Flexget,antivirtel/Flexget,Danfocus/Flexget,drwyrm/Flexget,tobinjt/Flexget,JorisDeRieck/Flexget,drwyrm/Flexget,JorisDeRieck/Flexget,antivirtel/Flexget,lildadou/Flexget,tarzasai/Flexget,offbyone/Flexget,grrr2/Flexget,qvazzler/Flexget,Pretagonist/Flexget,crawln45/Flexget,Pretagonist/Flexget,grrr2/Flexget,tsnoam/Flexget,ZefQ/Flexget,dsemi/Flexget,poulpito/Flexget,crawln45/Flexget,gazpachoking/Flexget,sean797/Flexget,Pretagonist/Flexget,lildadou/Flexget,LynxyssCZ/Flexget,JorisDeRieck/Flexget,Flexget/Flexget,OmgOhnoes/Flexget,Flexget/Flexget,Flexget/Flexget,sean797/Flexget,LynxyssCZ/Flexget,jawilson/Flexget,tarzasai/Flexget,malkavi/Flexget,jawilson/Flexget,Danfocus/Flexget,antivirtel/Flexget,ianstalk/Flexget,OmgOhnoes/Flexget,jawilson/Flexget,oxc/Flexget,crawln45/Flexget,cvium/Flexget,qk4l/Flexget,sean797/Flexget,Danfocus/Flexget,lildadou/Flexget,LynxyssCZ/Flexget,ianstalk/Flexget,poulpito/Flexget,offbyone/Flexget,ZefQ/Flexget,tobinjt/Flexget,qk4l/Flexget,cvium/Flexget,cvium/Flexget,drwyrm/Flexget,offbyone/Flexget,malkavi/Flexget,oxc/Flexget | ---
+++
@@ -15,8 +15,8 @@
log.register_css('log', 'css/log.css', order=99)
log.register_js('log', 'js/log.js')
-log.register_js('angular-oboe', 'js/libs/angular-oboe.js')
-log.register_js('oboe-browser', 'js/libs/oboe-browser.js')
+log.register_js('angular-oboe', 'libs/oboe/js/angular-oboe.js')
+log.register_js('oboe-browser', 'libs/oboe/js/oboe-browser.js')
register_menu(log.url_prefix, 'Log', icon='fa fa-file-text-o') |
8eb47d151868c8e5906af054749993cd46a73b2d | capstone/player/kerasplayer.py | capstone/player/kerasplayer.py | from keras.models import load_model
from . import Player
from ..utils import normalize_board, utility
class KerasPlayer(Player):
'''
Takes moves based on a Keras neural network model.
'''
name = 'Keras'
def __init__(self, filepath):
self.model = load_model(filepath)
def __str__(self):
return self.name
def __repr__(self):
return self.name
##########
# Player #
##########
def choose_move(self, state):
assert state.cur_player() == 0
best_action = None
best_value = -1000000
for action in state.legal_moves():
s = state.copy()
s = s.make_move(action)
value = self.model.predict(normalize_board(s.board), batch_size=1)
assert value >= -1.0 and value <= 1.0
if value > best_value:
best_action = action
best_value = value
return best_action
| from keras.models import load_model
from . import Player
from ..utils import normalize_board, utility
class KerasPlayer(Player):
'''
Takes moves based on a Keras neural network model.
'''
name = 'Keras'
def __init__(self, filepath):
self.model = load_model(filepath)
def __str__(self):
return self.name
def __repr__(self):
return self.name
##########
# Player #
##########
def choose_move(self, game):
assert game.cur_player() == 0
best_move = None
best_value = -1000000
for move in game.legal_moves():
next_game = game.copy().make_move(move)
value = self.model.predict(normalize_board(next_game.board), batch_size=1)
assert value >= -1.0 and value <= 1.0
if value > best_value:
best_move = move
best_value = value
return best_move
| Rename state to game in KerasPlayer | Rename state to game in KerasPlayer
| Python | mit | davidrobles/mlnd-capstone-code | ---
+++
@@ -23,16 +23,15 @@
# Player #
##########
- def choose_move(self, state):
- assert state.cur_player() == 0
- best_action = None
+ def choose_move(self, game):
+ assert game.cur_player() == 0
+ best_move = None
best_value = -1000000
- for action in state.legal_moves():
- s = state.copy()
- s = s.make_move(action)
- value = self.model.predict(normalize_board(s.board), batch_size=1)
+ for move in game.legal_moves():
+ next_game = game.copy().make_move(move)
+ value = self.model.predict(normalize_board(next_game.board), batch_size=1)
assert value >= -1.0 and value <= 1.0
if value > best_value:
- best_action = action
+ best_move = move
best_value = value
- return best_action
+ return best_move |
9b032e06156aa011e5d78d0d9ea297420cb33c2e | form_designer/contrib/cms_plugins/form_designer_form/migrations/0001_initial.py | form_designer/contrib/cms_plugins/form_designer_form/migrations/0001_initial.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import cms
from django.db import migrations, models
from pkg_resources import parse_version as V
# Django CMS 3.3.1 is oldest release where the change affects.
# Refs https://github.com/divio/django-cms/commit/871a164
if V(cms.__version__) >= V('3.3.1'):
field_kwargs = {'related_name': 'form_designer_form_cmsformdefinition'}
else:
field_kwargs = {}
class Migration(migrations.Migration):
dependencies = [
('cms', '0001_initial'),
('form_designer', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='CMSFormDefinition',
fields=[
('cmsplugin_ptr',
models.OneToOneField(
serialize=False,
auto_created=True,
primary_key=True,
to='cms.CMSPlugin',
parent_link=True,
**field_kwargs)),
('form_definition',
models.ForeignKey(
verbose_name='form',
to='form_designer.FormDefinition')),
],
options={
'abstract': False,
},
bases=(
'cms.cmsplugin',
),
),
]
| # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import cms
from django.db import migrations, models
from pkg_resources import parse_version as V
# Django CMS 3.3.1 is oldest release where the change affects.
# Refs https://github.com/divio/django-cms/commit/871a164
if V(cms.__version__) >= V('3.3.1'):
field_kwargs = {'related_name': 'form_designer_form_cmsformdefinition'}
else:
field_kwargs = {}
class Migration(migrations.Migration):
dependencies = [
('cms', '0001_initial'),
('form_designer', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='CMSFormDefinition',
fields=[
('cmsplugin_ptr',
models.OneToOneField(
serialize=False,
auto_created=True,
primary_key=True,
to='cms.CMSPlugin',
parent_link=True,
on_delete=models.CASCADE,
**field_kwargs)),
('form_definition',
models.ForeignKey(
verbose_name='form',
to='form_designer.FormDefinition',
on_delete=models.CASCADE)),
],
options={
'abstract': False,
},
bases=(
'cms.cmsplugin',
),
),
]
| Add on_delete args to CMS plugin migration for Django 2 support | Add on_delete args to CMS plugin migration for Django 2 support
| Python | bsd-3-clause | kcsry/django-form-designer,kcsry/django-form-designer,andersinno/django-form-designer,andersinno/django-form-designer-ai,andersinno/django-form-designer-ai,andersinno/django-form-designer | ---
+++
@@ -30,11 +30,13 @@
primary_key=True,
to='cms.CMSPlugin',
parent_link=True,
+ on_delete=models.CASCADE,
**field_kwargs)),
('form_definition',
models.ForeignKey(
verbose_name='form',
- to='form_designer.FormDefinition')),
+ to='form_designer.FormDefinition',
+ on_delete=models.CASCADE)),
],
options={
'abstract': False, |
d6787523cb8b58c51fe32d4524389a500e3b7b21 | foliant/cli.py | foliant/cli.py | """Foliant: Markdown to PDF, Docx, and LaTeX generator powered by Pandoc.
Usage:
foliant (build | make) <target> [--path=<project-path>]
foliant (upload | up) <document> [--secret=<client_secret*.json>]
foliant (-h | --help)
foliant --version
Options:
-h --help Show this screen.
-v --version Show version.
-p --path=<project-path> Path to your project [default: .].
-s --secret=<client_secret*.json> Path to Google app's client secret file.
"""
from docopt import docopt
import colorama
from colorama import Fore
from foliant import builder, uploader
from foliant import __version__ as foliant_version
def main():
"""Handles command-line params and runs the respective core function."""
colorama.init(autoreset=True)
args = docopt(__doc__, version="Foliant %s (Python)" % foliant_version)
if args["build"] or args["make"]:
result = builder.build(args["<target>"], args["--path"])
elif args["upload"] or args["up"]:
result = uploader.upload(args["<document>"])
print("---")
print(Fore.GREEN + "Result: %s" % result)
colorama.deinit()
| """Foliant: Markdown to PDF, Docx, and LaTeX generator powered by Pandoc.
Usage:
foliant (build | make) <target> [--path=<project-path>]
foliant (upload | up) <document> [--secret=<client_secret*.json>]
foliant (-h | --help)
foliant --version
Options:
-h --help Show this screen.
-v --version Show version.
-p --path=<project-path> Path to your project [default: .].
-s --secret=<client_secret*.json> Path to Google app's client secret file.
"""
from docopt import docopt
import colorama
from colorama import Fore
from . import builder, uploader
from . import __version__ as foliant_version
def main():
"""Handles command-line params and runs the respective core function."""
colorama.init(autoreset=True)
args = docopt(__doc__, version="Foliant %s (Python)" % foliant_version)
if args["build"] or args["make"]:
result = builder.build(args["<target>"], args["--path"])
elif args["upload"] or args["up"]:
result = uploader.upload(args["<document>"])
print("---")
print(Fore.GREEN + "Result: %s" % result)
colorama.deinit()
| Replace hardcoded package name with ".". | CLI: Replace hardcoded package name with ".".
| Python | mit | foliant-docs/foliant | ---
+++
@@ -17,8 +17,8 @@
import colorama
from colorama import Fore
-from foliant import builder, uploader
-from foliant import __version__ as foliant_version
+from . import builder, uploader
+from . import __version__ as foliant_version
def main(): |
dd02861cd9fb5b06d42f7a6413b371c52c167ba8 | gcmconsumer.py | gcmconsumer.py | import sys
import fedmsg.consumers
import yaml
class GCMConsumer(fedmsg.consumers.FedmsgConsumer):
topic = 'org.fedoraproject.prod.*'
config_key = 'gcmconsumer'
def __init__(self, *args, **kw):
super(GCMConsumer, self).__init__(*args, **kw)
def get_registration_ids_for_topic(self, topic):
'''Get the Android/GCM registration IDs for all users who subscribe to a
particular topic. We load the config each time so that we can change
it on the fly to add users without having to miss any messages.'''
config = yaml.load(file('/home/ricky/devel/fedora/fedmsg-gcm-demo/config.yaml', 'r'))
return filter(None, [user['registration_id'] if topic in user['topics'] else None for user in config['users']])
def consume(self, msg):
users = self.get_registration_ids_for_topic(msg['topic'])
if users:
print "* SENDING MESSAGE TO USER(S)"
print msg['topic']
| import fedmsg.consumers
import json
import requests
import sys
import yaml
class GCMConsumer(fedmsg.consumers.FedmsgConsumer):
topic = 'org.fedoraproject.prod.*'
config_key = 'gcmconsumer'
def __init__(self, *args, **kw):
self.config_file = '/home/ricky/devel/fedora/fedmsg-gcm-demo/config.yaml'
self.config = yaml.load(file(self.config_file, 'r'))
super(GCMConsumer, self).__init__(*args, **kw)
def _get_registration_ids_for_topic(self, topic):
'''Get the Android/GCM registration IDs for all users who subscribe to a
particular topic. We load the config each time so that we can change
it on the fly to add users without having to miss any messages.'''
self.config = yaml.load(file(self.config_file, 'r'))
return filter(None, [user['registration_id'] if topic in user['topics'] else None for user in self.config['users']])
def _send_gcm(self, data, ids):
'''Send a message to GCM for specific registration IDs.'''
headers = {
'Authorization': 'key=%s' % self.config['api_key'],
'content-type': 'application/json'
}
body = {
'registration_ids': ids,
'data': data
}
request = requests.post(
self.config['post_url'],
data=json.dumps(body),
headers=headers)
return request
def consume(self, msg):
users = self._get_registration_ids_for_topic(msg['topic'])
for user in users:
print "* SENDING MESSAGE TO USER: %s" % user
response = self._send_gcm(
{
'topic': msg['topic']
},
[user])
print response.text
print response.status_code
print response
print msg['topic']
| Handle actually sending out notifications | Handle actually sending out notifications
| Python | apache-2.0 | fedora-infra/fedmsg-gcm-demo | ---
+++
@@ -1,5 +1,7 @@
+import fedmsg.consumers
+import json
+import requests
import sys
-import fedmsg.consumers
import yaml
class GCMConsumer(fedmsg.consumers.FedmsgConsumer):
@@ -7,17 +9,44 @@
config_key = 'gcmconsumer'
def __init__(self, *args, **kw):
+ self.config_file = '/home/ricky/devel/fedora/fedmsg-gcm-demo/config.yaml'
+ self.config = yaml.load(file(self.config_file, 'r'))
super(GCMConsumer, self).__init__(*args, **kw)
- def get_registration_ids_for_topic(self, topic):
+ def _get_registration_ids_for_topic(self, topic):
'''Get the Android/GCM registration IDs for all users who subscribe to a
particular topic. We load the config each time so that we can change
it on the fly to add users without having to miss any messages.'''
- config = yaml.load(file('/home/ricky/devel/fedora/fedmsg-gcm-demo/config.yaml', 'r'))
- return filter(None, [user['registration_id'] if topic in user['topics'] else None for user in config['users']])
+ self.config = yaml.load(file(self.config_file, 'r'))
+ return filter(None, [user['registration_id'] if topic in user['topics'] else None for user in self.config['users']])
+
+ def _send_gcm(self, data, ids):
+ '''Send a message to GCM for specific registration IDs.'''
+ headers = {
+ 'Authorization': 'key=%s' % self.config['api_key'],
+ 'content-type': 'application/json'
+ }
+ body = {
+ 'registration_ids': ids,
+ 'data': data
+ }
+ request = requests.post(
+ self.config['post_url'],
+ data=json.dumps(body),
+ headers=headers)
+ return request
def consume(self, msg):
- users = self.get_registration_ids_for_topic(msg['topic'])
- if users:
- print "* SENDING MESSAGE TO USER(S)"
+ users = self._get_registration_ids_for_topic(msg['topic'])
+ for user in users:
+ print "* SENDING MESSAGE TO USER: %s" % user
+ response = self._send_gcm(
+ {
+ 'topic': msg['topic']
+ },
+ [user])
+ print response.text
+ print response.status_code
+ print response
+
print msg['topic'] |
81e7d7d45e71f96d85468737708a31aef939091b | grip/default_config.py | grip/default_config.py | """\
Default Configuration
Do NOT change the values here for risk of accidentally committing them.
Override them using command-line arguments or with a local_config.py instead.
"""
HOST = 'localhost'
PORT = 5000
DEBUG = True
DEBUG_GRIP = False
STYLE_URLS = []
STYLE_URL_SOURCE = 'https://github.com/joeyespo/grip'
STYLE_URL_RE = '<link.+href=[\'"]?([^\'" >]+)[\'"]?.+media=[\'"]?screen[\'"]?.+rel=[\'"]?stylesheet[\'"]?.+/>'
| """\
Default Configuration
Do NOT change the values here for risk of accidentally committing them.
Override them using command-line arguments or with a local_config.py instead.
"""
HOST = 'localhost'
PORT = 5000
DEBUG = True
DEBUG_GRIP = False
STYLE_URLS = []
STYLE_URL_SOURCE = 'https://github.com/joeyespo/grip'
STYLE_URL_RE = '<link.+href=[\'"]?([^\'" >]+)[\'"]?.+media=[\'"]?(?:screen|all)[\'"]?.+rel=[\'"]?stylesheet[\'"]?.+/>'
| Update GitHub CSS file regex. | Update GitHub CSS file regex.
| Python | mit | mgoddard-pivotal/grip,mgoddard-pivotal/grip,ssundarraj/grip,joeyespo/grip,ssundarraj/grip,joeyespo/grip,jbarreras/grip,jbarreras/grip | ---
+++
@@ -12,4 +12,4 @@
DEBUG_GRIP = False
STYLE_URLS = []
STYLE_URL_SOURCE = 'https://github.com/joeyespo/grip'
-STYLE_URL_RE = '<link.+href=[\'"]?([^\'" >]+)[\'"]?.+media=[\'"]?screen[\'"]?.+rel=[\'"]?stylesheet[\'"]?.+/>'
+STYLE_URL_RE = '<link.+href=[\'"]?([^\'" >]+)[\'"]?.+media=[\'"]?(?:screen|all)[\'"]?.+rel=[\'"]?stylesheet[\'"]?.+/>' |
019c91a8cd32fe1a4034837ed75dcc849d9033e5 | format_json.py | format_json.py | #! /usr/bin/env python3
import sys
import json
for filepath in sys.argv[1:]:
with open(filepath) as f:
try:
oyster = json.load(f)
except ValueError:
sys.stderr.write("In file: {}\n".format(filepath))
raise
with open(filepath, 'w') as f:
json.dump(oyster, f, ensure_ascii=False, indent=4, separators=(',', ': '), sort_keys=True)
f.write('\n') # add a trailing newline.
| #! /usr/bin/env python3
import sys
import json
import argparse
def format_json(fp):
try:
data = json.load(fp)
except ValueError:
sys.stderr.write("In file: {}\n".format(fp.name))
raise
# Jump back to the beginning of the file before overwriting it.
fp.seek(0)
json.dump(data, fp, ensure_ascii=False, indent=4, separators=(',', ': '), sort_keys=True)
fp.write('\n') # add a trailing newline.
fp.close()
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description='Format JSON files in place.'
)
parser.add_argument(
'files',
type=argparse.FileType('r+'),
help='JSON filepaths',
nargs='+'
)
args = parser.parse_args()
for json_file in args.files:
format_json(json_file)
| Make this a proper argparse script. | Make this a proper argparse script.
| Python | mit | nbeaver/cmd-oysters,nbeaver/cmd-oysters | ---
+++
@@ -2,14 +2,30 @@
import sys
import json
+import argparse
-for filepath in sys.argv[1:]:
- with open(filepath) as f:
- try:
- oyster = json.load(f)
- except ValueError:
- sys.stderr.write("In file: {}\n".format(filepath))
- raise
- with open(filepath, 'w') as f:
- json.dump(oyster, f, ensure_ascii=False, indent=4, separators=(',', ': '), sort_keys=True)
- f.write('\n') # add a trailing newline.
+def format_json(fp):
+ try:
+ data = json.load(fp)
+ except ValueError:
+ sys.stderr.write("In file: {}\n".format(fp.name))
+ raise
+ # Jump back to the beginning of the file before overwriting it.
+ fp.seek(0)
+ json.dump(data, fp, ensure_ascii=False, indent=4, separators=(',', ': '), sort_keys=True)
+ fp.write('\n') # add a trailing newline.
+ fp.close()
+
+if __name__ == '__main__':
+ parser = argparse.ArgumentParser(
+ description='Format JSON files in place.'
+ )
+ parser.add_argument(
+ 'files',
+ type=argparse.FileType('r+'),
+ help='JSON filepaths',
+ nargs='+'
+ )
+ args = parser.parse_args()
+ for json_file in args.files:
+ format_json(json_file) |
2a242bb6984fae5e32f117fa5ae68118621f3c95 | pycroft/model/alembic/versions/fb8d553a7268_add_account_pattern.py | pycroft/model/alembic/versions/fb8d553a7268_add_account_pattern.py | """add account_pattern
Revision ID: fb8d553a7268
Revises: 28e56bf6f62c
Create Date: 2021-04-26 22:16:41.772282
"""
from alembic import op
import sqlalchemy as sa
import pycroft
# revision identifiers, used by Alembic.
revision = 'fb8d553a7268'
down_revision = '0b69e80a9388'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('account_pattern',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('pattern', sa.String(), nullable=False),
sa.Column('account_id', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['account_id'], ['account.id'], ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id')
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('account_pattern')
# ### end Alembic commands ###
| """add account_pattern
Revision ID: fb8d553a7268
Revises: 28e56bf6f62c
Create Date: 2021-04-26 22:16:41.772282
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'fb8d553a7268'
down_revision = '0b69e80a9388'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('account_pattern',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('pattern', sa.String(), nullable=False),
sa.Column('account_id', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['account_id'], ['account.id'], ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id')
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('account_pattern')
# ### end Alembic commands ###
| Remove unnecessary pycroft import in migration | Remove unnecessary pycroft import in migration
| Python | apache-2.0 | agdsn/pycroft,agdsn/pycroft,agdsn/pycroft,agdsn/pycroft,agdsn/pycroft | ---
+++
@@ -7,8 +7,6 @@
"""
from alembic import op
import sqlalchemy as sa
-import pycroft
-
# revision identifiers, used by Alembic.
revision = 'fb8d553a7268' |
569e180b99be2ec67f360a7081bbd54020d78a25 | grum/models.py | grum/models.py | import bcrypt
from grum import db
class User(db.Model):
id = db.Column(db.Integer, primary_key=True)
username = db.Column(db.String(64), unique=True)
password = db.Column(db.String(128))
display_name = db.Column(db.String(128))
def __init__(self, username=None, password=None):
if username:
self.username = username
if password:
self.set_password(password)
def set_password(self, plaintext_password):
self.password = bcrypt.hashpw(plaintext_password.encode('utf-8'), bcrypt.gensalt())
def validate_password(self, plaintext_password):
hashed = bcrypt.hashpw(plaintext_password.encode('utf-8'), bytes(self.password.encode('utf-8')))
return hashed == bytes(self.password.encode('utf-8'))
class EmailAccount(db.Model):
address = db.Column(db.String(128), primary_key=True)
owner_id = db.Column(db.Integer, db.ForeignKey('user.id'))
mg_api = db.Column(db.String(64)) | import bcrypt
from grum import db
class User(db.Model):
id = db.Column(db.Integer, primary_key=True)
username = db.Column(db.String(64), unique=True)
password = db.Column(db.String(128))
display_name = db.Column(db.String(128))
def __init__(self, username=None, display_name=None, password=None):
if username:
self.username = username
if display_name:
self.display_name = display_name
if password:
self.set_password(password)
def set_password(self, plaintext_password):
self.password = bcrypt.hashpw(plaintext_password.encode('utf-8'), bcrypt.gensalt())
def validate_password(self, plaintext_password):
hashed = bcrypt.hashpw(plaintext_password.encode('utf-8'), bytes(self.password.encode('utf-8')))
return hashed == bytes(self.password.encode('utf-8'))
class EmailAccount(db.Model):
address = db.Column(db.String(128), primary_key=True)
owner_id = db.Column(db.Integer, db.ForeignKey('user.id'))
mg_api = db.Column(db.String(64)) | Add display name to the constructor for User | Add display name to the constructor for User
| Python | mit | Grum-Hackdee/grum-web,Grum-Hackdee/grum-web,Grum-Hackdee/grum-web,Grum-Hackdee/grum-web | ---
+++
@@ -8,9 +8,11 @@
password = db.Column(db.String(128))
display_name = db.Column(db.String(128))
- def __init__(self, username=None, password=None):
+ def __init__(self, username=None, display_name=None, password=None):
if username:
self.username = username
+ if display_name:
+ self.display_name = display_name
if password:
self.set_password(password)
|
94861438189537b88deaf8d04cc9942192038d8c | user_messages/views.py | user_messages/views.py | from django.contrib.auth.decorators import login_required
from django.db.models import Q
from django.shortcuts import get_object_or_404
from django.template import RequestContext
from user_messages.models import Thread, Message
@login_required
def inbox(request, template_name='user_messages/inbox.html'):
threads = list(Thread.objects.inbox(request.user))
threads.sort(key=lambda o: o.latest_message.sent_at, reversed=True)
return render_to_response(template_name, {'threads': threads}, context_instance=RequestContext(request))
@login_required
def thread_detail(request, thread_id,
template_name='user_messages/thread_detail.html'):
qs = Thread.objects.filter(Q(to_user=request.user) | Q(from_user=request.user))
thread = get_object_or_404(qs, pk=thread_id)
return render_to_response(template_name, {'thread': thread}, context_instance=RequestContext(request))
| from django.contrib.auth.decorators import login_required
from django.db.models import Q
from django.shortcuts import get_object_or_404
from django.template import RequestContext
from user_messages.models import Thread, Message
@login_required
def inbox(request, template_name='user_messages/inbox.html'):
threads = list(Thread.objects.inbox(request.user))
threads.sort(key=lambda o: o.latest_message.sent_at, reversed=True)
return render_to_response(template_name, {'threads': threads}, context_instance=RequestContext(request))
@login_required
def thread_detail(request, thread_id,
template_name='user_messages/thread_detail.html'):
qs = Thread.objects.filter(Q(to_user=request.user) | Q(from_user=request.user))
thread = get_object_or_404(qs, pk=thread_id)
if request.user == thread.to_user:
thread.to_user_unread = False
else:
thread.from_user_unread = False
thread.save()
return render_to_response(template_name, {'thread': thread}, context_instance=RequestContext(request))
| Update the read status of a thread when it's viewed | Update the read status of a thread when it's viewed
| Python | mit | eldarion/user_messages,eldarion/user_messages,pinax/pinax-messages,arthur-wsw/pinax-messages,pinax/pinax-messages,arthur-wsw/pinax-messages | ---
+++
@@ -16,6 +16,9 @@
template_name='user_messages/thread_detail.html'):
qs = Thread.objects.filter(Q(to_user=request.user) | Q(from_user=request.user))
thread = get_object_or_404(qs, pk=thread_id)
+ if request.user == thread.to_user:
+ thread.to_user_unread = False
+ else:
+ thread.from_user_unread = False
+ thread.save()
return render_to_response(template_name, {'thread': thread}, context_instance=RequestContext(request))
-
- |
7897423ea3c8e418b405ce2d09318ad9b1526a22 | tests/test_urls.py | tests/test_urls.py | from django.conf.urls import patterns, url
from djproxy.urls import generate_routes
from test_views import LocalProxy, index
urlpatterns = patterns(
'',
url(r'^some/content/.*$', index, name='index'),
url(r'^local_proxy/(?P<url>.*)$', LocalProxy.as_view(), name='proxy')
) + generate_routes({
'service_one': {
'base_url': 'https://yahoo.com/',
'prefix': 'yahoo/'
},
'service_two': {
'base_url': 'https://google.com/',
'prefix': 'google/'
}
})
| from django.conf.urls import patterns, url
from djproxy.urls import generate_routes
from test_views import LocalProxy, index
urlpatterns = patterns(
'',
url(r'^some/content/.*$', index, name='index'),
url(r'^local_proxy/(?P<url>.*)$', LocalProxy.as_view(), name='proxy')
) + generate_routes({
'service_one': {
'base_url': 'https://yahoo.com/',
'prefix': 'yahoo/'
},
'service_two': {
'base_url': 'https://www.google.com/',
'prefix': 'google/'
}
})
| Add www to test google config | Add www to test google config
This makes it work right for google.com sub pages when testing via a browser.
| Python | mit | thomasw/djproxy | ---
+++
@@ -14,7 +14,7 @@
'prefix': 'yahoo/'
},
'service_two': {
- 'base_url': 'https://google.com/',
+ 'base_url': 'https://www.google.com/',
'prefix': 'google/'
}
}) |
64f9ef6fcc71ef09e113161711369fe4d9781a18 | shorpypaper.py | shorpypaper.py | #!/usr/bin/python
from pyquery import PyQuery as pq
import requests
import subprocess
APPLESCRIPT = """/usr/bin/osascript<<END
tell application "Finder"
set desktop picture to POSIX file "%s"
end tell
END"""
def main():
# Load main site.
root = 'http://www.shorpy.com'
r = requests.get(root)
j = pq(r.content)
# Load first photo.
first_photo = root + j('div.node div.content a').eq(1).attr('href')
r = requests.get(first_photo)
j = pq(r.content)
image = j('img').eq(0).attr('src')
with open('/tmp/dailyshorpy.jpg', 'wb') as handle:
# To reset the cached dailyshorpy.jpg.
subprocess.Popen(APPLESCRIPT % '/Library/Desktop Pictures/Frog.jpg', shell=True)
request = requests.get(image, stream=True)
for block in request.iter_content(1024):
if not block:
break
handle.write(block)
subprocess.Popen(APPLESCRIPT % '/tmp/dailyshorpy.jpg', shell=True)
if __name__ == '__main__':
main()
| #!/usr/bin/python
from pyquery import PyQuery as pq
import requests
import subprocess
APPLESCRIPT = """/usr/bin/osascript<<END
tell application "Finder"
set desktop picture to POSIX file "%s"
end tell
END"""
def main():
# Load main site.
root = 'http://www.shorpy.com'
r = requests.get(root)
j = pq(r.content)
# Load first photo.
first_photo = root + j('div.node div.content a').eq(1).attr('href')
r = requests.get(first_photo)
j = pq(r.content)
image = j('img').eq(0).attr('src')
with open('/tmp/dailyshorpy.jpg', 'wb') as handle:
# To reset the cached dailyshorpy.jpg.
subprocess.Popen(APPLESCRIPT % '/Library/Desktop Pictures/Solid Colors/Solid Gray Light.png', shell=True)
request = requests.get(image, stream=True)
for block in request.iter_content(1024):
if not block:
break
handle.write(block)
subprocess.Popen(APPLESCRIPT % '/tmp/dailyshorpy.jpg', shell=True)
if __name__ == '__main__':
main()
| Use a grey solid instead of the damn frog. | Use a grey solid instead of the damn frog.
| Python | mit | nicksergeant/shorpypaper | ---
+++
@@ -25,7 +25,7 @@
with open('/tmp/dailyshorpy.jpg', 'wb') as handle:
# To reset the cached dailyshorpy.jpg.
- subprocess.Popen(APPLESCRIPT % '/Library/Desktop Pictures/Frog.jpg', shell=True)
+ subprocess.Popen(APPLESCRIPT % '/Library/Desktop Pictures/Solid Colors/Solid Gray Light.png', shell=True)
request = requests.get(image, stream=True)
for block in request.iter_content(1024): |
c09f586bfa36f4ff66ae3b8a82fd7b4eeb8ea5d7 | windpowerlib/tools.py | windpowerlib/tools.py | """
The ``tools`` module contains a collection of functions used in the
windpowerlib.
"""
__copyright__ = "Copyright oemof developer group"
__license__ = "GPLv3"
| """
The ``tools`` module contains a collection of functions used in the
windpowerlib.
"""
__copyright__ = "Copyright oemof developer group"
__license__ = "GPLv3"
import collections
def select_closer_value(value_1, value_2, comp_value, corresp_1, corresp_2):
r"""
Selects the value with the smaller difference to a comparative value.
Additionally returns a corresponding value. This function is for example
used in :py:func:`~.modelchain.v_wind_hub` of the
:class:`~.modelchain.ModelChain` to choose the wind speed data that is
close to the hub height of the examined wind turbine. In this case
`value_1` and `value_2` are the heights of the corresponding wind speed
data sets `corresp_1` and `corresp_2`.
Parameters
----------
value_1 : float
First value of which the difference to `comp_value` will be
compared with the difference to `comp_value` of `value_2`.
value_2 : float
Second value for comparison.
comp_value : float
Comparative value.
corresp_1 : float
Corresponding value to `value_1`.
corresp_2 : float
Corresponding value to `value_2`.
Returns
-------
Tuple(float, float, string)
Value closer to comparing value as float, corresponding value as
float and a string for logging.debug.
"""
if (value_2 is not None and corresp_2 is not None):
if value_1 == comp_value:
closest_value = value_1
logging_string = '(at hub height).'
elif value_2 == comp_value:
closest_value = value_2
logging_string = '(2) (at hub height).'
elif abs(value_1 - comp_value) <= abs(value_2 - comp_value):
closest_value = value_1
logging_string = None
else:
closest_value = value_2
logging_string = None
else:
closest_value = value_1
if value_1 == comp_value:
logging_string = '(at hub height).'
# Select correponding value
if closest_value == value_1:
corresp_value = corresp_1
else:
corresp_value = corresp_2
# Store values in a named tuple
return_tuple = collections.namedtuple('selected_values',
['closest_value',
'corresp_value', 'logging_string'])
return return_tuple(closest_value, corresp_value, logging_string)
| Add function for selection of value closer to comparative value | Add function for selection of value closer to comparative value
| Python | mit | wind-python/windpowerlib | ---
+++
@@ -6,3 +6,66 @@
__copyright__ = "Copyright oemof developer group"
__license__ = "GPLv3"
+
+import collections
+
+
+def select_closer_value(value_1, value_2, comp_value, corresp_1, corresp_2):
+ r"""
+ Selects the value with the smaller difference to a comparative value.
+
+ Additionally returns a corresponding value. This function is for example
+ used in :py:func:`~.modelchain.v_wind_hub` of the
+ :class:`~.modelchain.ModelChain` to choose the wind speed data that is
+ close to the hub height of the examined wind turbine. In this case
+ `value_1` and `value_2` are the heights of the corresponding wind speed
+ data sets `corresp_1` and `corresp_2`.
+
+ Parameters
+ ----------
+ value_1 : float
+ First value of which the difference to `comp_value` will be
+ compared with the difference to `comp_value` of `value_2`.
+ value_2 : float
+ Second value for comparison.
+ comp_value : float
+ Comparative value.
+ corresp_1 : float
+ Corresponding value to `value_1`.
+ corresp_2 : float
+ Corresponding value to `value_2`.
+
+ Returns
+ -------
+ Tuple(float, float, string)
+ Value closer to comparing value as float, corresponding value as
+ float and a string for logging.debug.
+ """
+ if (value_2 is not None and corresp_2 is not None):
+ if value_1 == comp_value:
+ closest_value = value_1
+ logging_string = '(at hub height).'
+ elif value_2 == comp_value:
+ closest_value = value_2
+ logging_string = '(2) (at hub height).'
+ elif abs(value_1 - comp_value) <= abs(value_2 - comp_value):
+ closest_value = value_1
+ logging_string = None
+ else:
+ closest_value = value_2
+ logging_string = None
+ else:
+ closest_value = value_1
+ if value_1 == comp_value:
+ logging_string = '(at hub height).'
+
+ # Select correponding value
+ if closest_value == value_1:
+ corresp_value = corresp_1
+ else:
+ corresp_value = corresp_2
+ # Store values in a named tuple
+ return_tuple = collections.namedtuple('selected_values',
+ ['closest_value',
+ 'corresp_value', 'logging_string'])
+ return return_tuple(closest_value, corresp_value, logging_string) |
9f6d6509b1f3f4a5f3fd20919bcc465475fc1ce3 | app/composer.py | app/composer.py | import os
from app.configuration import get_value
from app.helper import php
def initialization():
checker_dir = get_value('checker-dir')
if not os.path.isfile(checker_dir+'bin/composer'):
download(checker_dir)
if not os.path.isfile(checker_dir+'bin/phpcs'):
php('bin/composer install')
def download(checker_dir):
php_bin = get_value('php')
if not os.path.exists(checker_dir+'bin'):
os.makedirs(checker_dir+'bin')
print('>>> Download composer')
os.system('curl -sS https://getcomposer.org/installer | '+php_bin+' -- --install-dir='+checker_dir+'bin --filename=composer')
def update():
php('bin/composer self-update')
php('bin/composer update')
def project_installation():
code = php('bin/composer install --optimize-autoloader')
if code != 0:
raise SystemExit('The composer install command for the project failed with the code '+str(code))
| import os
from app.configuration import get_value
from app.helper import php
def initialization():
checker_dir = get_value('checker-dir')
if not os.path.isfile(checker_dir+'bin/composer'):
download(checker_dir)
if not os.path.isfile(checker_dir+'bin/phpcs'):
composer('install')
def download(checker_dir):
php_bin = get_value('php')
if not os.path.exists(checker_dir+'bin'):
os.makedirs(checker_dir+'bin')
print('>>> Download composer')
os.system('curl -sS https://getcomposer.org/installer | '+php_bin+' -- --install-dir='+checker_dir+'bin --filename=composer')
def update():
composer('self-update')
composer('update')
def project_installation():
code = php('bin/composer install --optimize-autoloader')
if code != 0:
raise SystemExit('The composer install command for the project failed with the code '+str(code))
def composer(command):
base_dir = os.getcwd()
os.chdir(get_value('checker-dir'))
php('bin/composer '+command)
os.chdir(base_dir)
| Add execution path for internal update | Add execution path for internal update
| Python | mit | mi-schi/php-code-checker | ---
+++
@@ -9,7 +9,7 @@
if not os.path.isfile(checker_dir+'bin/composer'):
download(checker_dir)
if not os.path.isfile(checker_dir+'bin/phpcs'):
- php('bin/composer install')
+ composer('install')
def download(checker_dir):
@@ -21,11 +21,20 @@
def update():
- php('bin/composer self-update')
- php('bin/composer update')
+ composer('self-update')
+ composer('update')
def project_installation():
code = php('bin/composer install --optimize-autoloader')
if code != 0:
raise SystemExit('The composer install command for the project failed with the code '+str(code))
+
+
+def composer(command):
+ base_dir = os.getcwd()
+ os.chdir(get_value('checker-dir'))
+
+ php('bin/composer '+command)
+
+ os.chdir(base_dir) |
618245ab759cbf47fb53946b4c6149efdca7e1e0 | troposphere/sqs.py | troposphere/sqs.py | # Copyright (c) 2012-2013, Mark Peek <mark@peek.org>
# All rights reserved.
#
# See LICENSE file for full license.
from . import AWSObject
from .validators import integer
try:
from awacs.aws import Policy
policytypes = (dict, Policy)
except ImportError:
policytypes = dict,
class Queue(AWSObject):
type = "AWS::SQS::Queue"
props = {
'DelaySeconds': (integer, False),
'MaximumMessageSize': (integer, False),
'MessageRetentionPeriod': (integer, False),
'QueueName': (basestring, False),
'ReceiveMessageWaitTimeSeconds': (integer, False),
'VisibilityTimeout': (integer, False),
}
class QueuePolicy(AWSObject):
type = "AWS::SQS::QueuePolicy"
props = {
'PolicyDocument': (policytypes, False),
'Queues': (list, True),
}
| # Copyright (c) 2012-2013, Mark Peek <mark@peek.org>
# All rights reserved.
#
# See LICENSE file for full license.
from . import AWSObject, AWSProperty
from .validators import integer
try:
from awacs.aws import Policy
policytypes = (dict, Policy)
except ImportError:
policytypes = dict,
class RedrivePolicy(AWSProperty):
props = {
'deadLetterTargetArn': (basestring, False),
'maxReceiveCount': (integer, False),
}
class Queue(AWSObject):
type = "AWS::SQS::Queue"
props = {
'DelaySeconds': (integer, False),
'MaximumMessageSize': (integer, False),
'MessageRetentionPeriod': (integer, False),
'QueueName': (basestring, False),
'ReceiveMessageWaitTimeSeconds': (integer, False),
'RedrivePolicy': (RedrivePolicy, False),
'VisibilityTimeout': (integer, False),
}
class QueuePolicy(AWSObject):
type = "AWS::SQS::QueuePolicy"
props = {
'PolicyDocument': (policytypes, False),
'Queues': (list, True),
}
| Add SQS dead letter queue from CloudFormation release 2014-01-29 | Add SQS dead letter queue from CloudFormation release 2014-01-29
| Python | bsd-2-clause | cloudtools/troposphere,mhahn/troposphere,johnctitus/troposphere,ikben/troposphere,ikben/troposphere,garnaat/troposphere,pas256/troposphere,alonsodomin/troposphere,Yipit/troposphere,ptoraskar/troposphere,mannytoledo/troposphere,dmm92/troposphere,craigbruce/troposphere,jantman/troposphere,micahhausler/troposphere,jdc0589/troposphere,wangqiang8511/troposphere,cryptickp/troposphere,cloudtools/troposphere,unravelin/troposphere,7digital/troposphere,amosshapira/troposphere,LouTheBrew/troposphere,inetCatapult/troposphere,johnctitus/troposphere,pas256/troposphere,DualSpark/troposphere,horacio3/troposphere,horacio3/troposphere,Hons/troposphere,ccortezb/troposphere,alonsodomin/troposphere,yxd-hde/troposphere,xxxVxxx/troposphere,WeAreCloudar/troposphere,iblazevic/troposphere,dmm92/troposphere,kid/troposphere,7digital/troposphere,samcrang/troposphere,nicolaka/troposphere | ---
+++
@@ -3,13 +3,20 @@
#
# See LICENSE file for full license.
-from . import AWSObject
+from . import AWSObject, AWSProperty
from .validators import integer
try:
from awacs.aws import Policy
policytypes = (dict, Policy)
except ImportError:
policytypes = dict,
+
+
+class RedrivePolicy(AWSProperty):
+ props = {
+ 'deadLetterTargetArn': (basestring, False),
+ 'maxReceiveCount': (integer, False),
+ }
class Queue(AWSObject):
@@ -21,6 +28,7 @@
'MessageRetentionPeriod': (integer, False),
'QueueName': (basestring, False),
'ReceiveMessageWaitTimeSeconds': (integer, False),
+ 'RedrivePolicy': (RedrivePolicy, False),
'VisibilityTimeout': (integer, False),
}
|
1026581107668e15db91912302ae3fd577140008 | builder.py | builder.py | import ratebeer
import string
def strip_brewery_name(brewery_name, beer_name):
brewery_word_list = brewery_name.split()
for word in brewery_word_list:
beer_name = beer_name.replace(word, "")
return beer_name.strip()
categories = []
categories.append("0-9")
for letter in string.ascii_uppercase:
categories.append(letter)
rb = ratebeer.RateBeer()
with open("eng.user_words",'w') as f:
for category in categories:
brewery_list = rb.brewers_by_alpha(category)
for brewery in brewery_list:
beer_list = brewery.get_beers()
for beer in beer_list:
#index the beer name without the bewery too
beer_name_without_brewery = strip_brewery_name(brewery.name, beer.name)
f.writelines(beer_name_without_brewery.encode('utf8') + "\n")
| import ratebeer
import string
import csv
from io import BytesIO
def strip_brewery_name(brewery_name, beer_name):
brewery_word_list = brewery_name.split()
for word in brewery_word_list:
beer_name = beer_name.replace(word, "")
return beer_name.strip()
def brewery_name_field(brewery):
val = getattr(brewery, 'name', 'RateBeer does not have this field filled out for this brewery')
return val.encode('utf8')
def beer_description_field(beer):
val = getattr(beer, 'description', 'no description is available')
return val.encode('utf8')
categories = []
categories.append("0-9")
for letter in string.ascii_uppercase:
categories.append(letter)
rb = ratebeer.RateBeer()
mapping = open('mapping.csv','w')
kv = csv.writer(mapping, quoting=csv.QUOTE_NONNUMERIC)
kv.writerow( ('beer name','url','description','full name') )
with open("eng.user_words",'w') as f:
for category in categories:
brewery_list = rb.brewers_by_alpha(categories)
for brewery in brewery_list:
beer_list = brewery.get_beers()
for beer in beer_list:
#index the beer name without the bewery too
beer_name_without_brewery = strip_brewery_name(brewery.name, beer.name)
f.writelines(beer_name_without_brewery.encode('utf8') + "\n")
kv.writerow( (beer_name_without_brewery.encode('utf8'),beer.url,beer_description_field(beer),brewery_name_field(brewery)) )
mapping.close()
| Add a csv export for caching the list of beers for display in the app | Add a csv export for caching the list of beers for display in the app
| Python | mit | jwrubel/initial_dictionary | ---
+++
@@ -1,12 +1,21 @@
import ratebeer
import string
-
+import csv
+from io import BytesIO
def strip_brewery_name(brewery_name, beer_name):
brewery_word_list = brewery_name.split()
for word in brewery_word_list:
beer_name = beer_name.replace(word, "")
return beer_name.strip()
+
+def brewery_name_field(brewery):
+ val = getattr(brewery, 'name', 'RateBeer does not have this field filled out for this brewery')
+ return val.encode('utf8')
+
+def beer_description_field(beer):
+ val = getattr(beer, 'description', 'no description is available')
+ return val.encode('utf8')
categories = []
categories.append("0-9")
@@ -15,12 +24,18 @@
rb = ratebeer.RateBeer()
+mapping = open('mapping.csv','w')
+kv = csv.writer(mapping, quoting=csv.QUOTE_NONNUMERIC)
+kv.writerow( ('beer name','url','description','full name') )
+
with open("eng.user_words",'w') as f:
for category in categories:
- brewery_list = rb.brewers_by_alpha(category)
+ brewery_list = rb.brewers_by_alpha(categories)
for brewery in brewery_list:
beer_list = brewery.get_beers()
for beer in beer_list:
#index the beer name without the bewery too
beer_name_without_brewery = strip_brewery_name(brewery.name, beer.name)
f.writelines(beer_name_without_brewery.encode('utf8') + "\n")
+ kv.writerow( (beer_name_without_brewery.encode('utf8'),beer.url,beer_description_field(beer),brewery_name_field(brewery)) )
+mapping.close() |
7e36568d5b8aeaf2c77e4643a793fdc13cb9ba51 | spacy/about.py | spacy/about.py | # inspired from:
# https://python-packaging-user-guide.readthedocs.org/en/latest/single_source_version/
# https://github.com/pypa/warehouse/blob/master/warehouse/__about__.py
__title__ = 'spaCy'
__version__ = '1.6.0'
__summary__ = 'Industrial-strength Natural Language Processing (NLP) with Python and Cython'
__uri__ = 'https://spacy.io'
__author__ = 'Matthew Honnibal'
__email__ = 'matt@explosion.ai'
__license__ = 'MIT'
__models__ = {
'en': 'en>=1.1.0,<1.2.0',
'de': 'de>=1.0.0,<1.1.0',
}
| # inspired from:
# https://python-packaging-user-guide.readthedocs.org/en/latest/single_source_version/
# https://github.com/pypa/warehouse/blob/master/warehouse/__about__.py
__title__ = 'spacy'
__version__ = '1.6.0'
__summary__ = 'Industrial-strength Natural Language Processing (NLP) with Python and Cython'
__uri__ = 'https://spacy.io'
__author__ = 'Matthew Honnibal'
__email__ = 'matt@explosion.ai'
__license__ = 'MIT'
__models__ = {
'en': 'en>=1.1.0,<1.2.0',
'de': 'de>=1.0.0,<1.1.0',
}
| Fix title to accommodate sputnik | Fix title to accommodate sputnik | Python | mit | Gregory-Howard/spaCy,spacy-io/spaCy,honnibal/spaCy,oroszgy/spaCy.hu,raphael0202/spaCy,raphael0202/spaCy,aikramer2/spaCy,banglakit/spaCy,Gregory-Howard/spaCy,spacy-io/spaCy,aikramer2/spaCy,aikramer2/spaCy,explosion/spaCy,explosion/spaCy,honnibal/spaCy,spacy-io/spaCy,explosion/spaCy,Gregory-Howard/spaCy,raphael0202/spaCy,recognai/spaCy,raphael0202/spaCy,spacy-io/spaCy,Gregory-Howard/spaCy,Gregory-Howard/spaCy,aikramer2/spaCy,oroszgy/spaCy.hu,banglakit/spaCy,explosion/spaCy,recognai/spaCy,spacy-io/spaCy,banglakit/spaCy,raphael0202/spaCy,aikramer2/spaCy,aikramer2/spaCy,honnibal/spaCy,recognai/spaCy,banglakit/spaCy,banglakit/spaCy,spacy-io/spaCy,recognai/spaCy,recognai/spaCy,explosion/spaCy,oroszgy/spaCy.hu,oroszgy/spaCy.hu,recognai/spaCy,oroszgy/spaCy.hu,honnibal/spaCy,oroszgy/spaCy.hu,banglakit/spaCy,Gregory-Howard/spaCy,explosion/spaCy,raphael0202/spaCy | ---
+++
@@ -3,7 +3,7 @@
# https://python-packaging-user-guide.readthedocs.org/en/latest/single_source_version/
# https://github.com/pypa/warehouse/blob/master/warehouse/__about__.py
-__title__ = 'spaCy'
+__title__ = 'spacy'
__version__ = '1.6.0'
__summary__ = 'Industrial-strength Natural Language Processing (NLP) with Python and Cython'
__uri__ = 'https://spacy.io' |
054b0bf9cacef4e55fb8167fb5f2611e2ce39b43 | hw3/hw3_2a.py | hw3/hw3_2a.py | import sympy
x1, x2 = sympy.symbols('x1 x2')
f = 100*(x2 - x1**2)**2 + (1-x1)**2
df_dx1 = sympy.diff(f,x1)
df_dx2 = sympy.diff(f,x2)
H = sympy.hessian(f, (x1, x2))
xs = sympy.solve([df_dx1, df_dx2], [x1, x2])
H_xs = H.subs([(x1,xs[0][0]), (x2,xs[0][1])])
flag = True
for i in H_xs.eigenvals().keys():
if i.evalf() <= 0:
flag = False
break
if flag:
print 'Stationary point'
else:
print 'Saddle point'
| import sympy
x1, x2 = sympy.symbols('x1 x2')
f = 100*(x2 - x1**2)**2 + (1-x1)**2
df_dx1 = sympy.diff(f,x1)
df_dx2 = sympy.diff(f,x2)
H = sympy.hessian(f, (x1, x2))
xs = sympy.solve([df_dx1, df_dx2], [x1, x2])
H_xs = H.subs([(x1,xs[0][0]), (x2,xs[0][1])])
lambda_xs = H_xs.eigenvals()
count = 0
for i in lambda_xs.keys():
if i.evalf() <= 0:
count += 1
if count == 0:
print 'Local minima'
elif count == len(lambda_xs.keys()):
print 'Lacal maxima'
else:
print 'Saddle point'
| Fix decision about minima, maxima and saddle point | Fix decision about minima, maxima and saddle point
| Python | bsd-2-clause | escorciav/amcs211,escorciav/amcs211 | ---
+++
@@ -10,14 +10,16 @@
xs = sympy.solve([df_dx1, df_dx2], [x1, x2])
H_xs = H.subs([(x1,xs[0][0]), (x2,xs[0][1])])
+lambda_xs = H_xs.eigenvals()
-flag = True
-for i in H_xs.eigenvals().keys():
+count = 0
+for i in lambda_xs.keys():
if i.evalf() <= 0:
- flag = False
- break
+ count += 1
-if flag:
- print 'Stationary point'
+if count == 0:
+ print 'Local minima'
+elif count == len(lambda_xs.keys()):
+ print 'Lacal maxima'
else:
print 'Saddle point' |
210581cfef3d54b055ec9f9b1dc6d19b757a4d6e | cli/cli.py | cli/cli.py | import argparse
parser = argparse.ArgumentParser(prog='moocx', description='EdX MOOC Data Anaylysis')
parser.add_argument('-v', '--version', action='version', version='0.1.0')
parser.parse_args()
| Add cmd for getting version | Add cmd for getting version
| Python | mit | McGillX/edx_data_research,McGillX/edx_data_research,McGillX/edx_data_research | ---
+++
@@ -0,0 +1,5 @@
+import argparse
+
+parser = argparse.ArgumentParser(prog='moocx', description='EdX MOOC Data Anaylysis')
+parser.add_argument('-v', '--version', action='version', version='0.1.0')
+parser.parse_args() | |
ddbcd88bb086d1978c9196833d126ded18db97f8 | airflow/migrations/versions/211e584da130_add_ti_state_index.py | airflow/migrations/versions/211e584da130_add_ti_state_index.py | """add TI state index
Revision ID: 211e584da130
Revises: 2e82aab8ef20
Create Date: 2016-06-30 10:54:24.323588
"""
# revision identifiers, used by Alembic.
revision = '211e584da130'
down_revision = '2e82aab8ef20'
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
def upgrade():
op.create_index('ti_state', 'task_instance', ['state'], unique=False)
def downgrade():
op.drop_index('ti_state', table_name='task_instance')
| # -*- coding: utf-8 -*-
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""add TI state index
Revision ID: 211e584da130
Revises: 2e82aab8ef20
Create Date: 2016-06-30 10:54:24.323588
"""
# revision identifiers, used by Alembic.
revision = '211e584da130'
down_revision = '2e82aab8ef20'
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
def upgrade():
op.create_index('ti_state', 'task_instance', ['state'], unique=False)
def downgrade():
op.drop_index('ti_state', table_name='task_instance')
| Add license to migration file | Add license to migration file
| Python | apache-2.0 | dhuang/incubator-airflow,Twistbioscience/incubator-airflow,kerzhner/airflow,vineet-rh/incubator-airflow,zodiac/incubator-airflow,cademarkegard/airflow,MetrodataTeam/incubator-airflow,sekikn/incubator-airflow,sergiohgz/incubator-airflow,wooga/airflow,RealImpactAnalytics/airflow,CloverHealth/airflow,apache/incubator-airflow,r39132/airflow,MortalViews/incubator-airflow,RealImpactAnalytics/airflow,sergiohgz/incubator-airflow,KL-WLCR/incubator-airflow,apache/incubator-airflow,skudriashev/incubator-airflow,ronfung/incubator-airflow,AllisonWang/incubator-airflow,cademarkegard/airflow,caseyching/incubator-airflow,alexvanboxel/airflow,DEVELByte/incubator-airflow,dgies/incubator-airflow,owlabs/incubator-airflow,bolkedebruin/airflow,andyxhadji/incubator-airflow,bolkedebruin/airflow,nathanielvarona/airflow,apache/airflow,preete-dixit-ck/incubator-airflow,lxneng/incubator-airflow,mrkm4ntr/incubator-airflow,vineet-rh/incubator-airflow,DinoCow/airflow,OpringaoDoTurno/airflow,plypaul/airflow,DEVELByte/incubator-airflow,malmiron/incubator-airflow,jesusfcr/airflow,dgies/incubator-airflow,sid88in/incubator-airflow,cfei18/incubator-airflow,spektom/incubator-airflow,gritlogic/incubator-airflow,Acehaidrey/incubator-airflow,vijaysbhat/incubator-airflow,easytaxibr/airflow,easytaxibr/airflow,edgarRd/incubator-airflow,cademarkegard/airflow,ronfung/incubator-airflow,adrpar/incubator-airflow,jlowin/airflow,alexvanboxel/airflow,jhsenjaliya/incubator-airflow,akosel/incubator-airflow,ty707/airflow,alexvanboxel/airflow,ty707/airflow,kerzhner/airflow,akosel/incubator-airflow,jesusfcr/airflow,jesusfcr/airflow,ProstoMaxim/incubator-airflow,asnir/airflow,fenglu-g/incubator-airflow,stverhae/incubator-airflow,easytaxibr/airflow,lyft/incubator-airflow,adamhaney/airflow,ledsusop/airflow,criccomini/airflow,mrkm4ntr/incubator-airflow,yk5/incubator-airflow,MortalViews/incubator-airflow,wooga/airflow,CloverHealth/airflow,Fokko/incubator-airflow,yk5/incubator-airflow,plypaul/airflow,OpringaoDoTurno/airflow,cjqian/incubator-airflow,Acehaidrey/incubator-airflow,sid88in/incubator-airflow,spektom/incubator-airflow,zack3241/incubator-airflow,alexvanboxel/airflow,artwr/airflow,preete-dixit-ck/incubator-airflow,janczak10/incubator-airflow,hamedhsn/incubator-airflow,preete-dixit-ck/incubator-airflow,zoyahav/incubator-airflow,mrares/incubator-airflow,wolfier/incubator-airflow,btallman/incubator-airflow,asnir/airflow,jgao54/airflow,wooga/airflow,criccomini/airflow,apache/incubator-airflow,aminghadersohi/airflow,mrkm4ntr/incubator-airflow,caseyching/incubator-airflow,yati-sagade/incubator-airflow,vijaysbhat/incubator-airflow,nathanielvarona/airflow,dmitry-r/incubator-airflow,andrewmchen/incubator-airflow,vineet-rh/incubator-airflow,ty707/airflow,artwr/airflow,jfantom/incubator-airflow,danielvdende/incubator-airflow,sdiazb/airflow,d-lee/airflow,cjqian/incubator-airflow,gtoonstra/airflow,cjqian/incubator-airflow,adamhaney/airflow,yati-sagade/incubator-airflow,rishibarve/incubator-airflow,jiwang576/incubator-airflow,kerzhner/airflow,NielsZeilemaker/incubator-airflow,subodhchhabra/airflow,apache/incubator-airflow,sekikn/incubator-airflow,DinoCow/airflow,ronfung/incubator-airflow,fenglu-g/incubator-airflow,artwr/airflow,Twistbioscience/incubator-airflow,airbnb/airflow,nathanielvarona/airflow,jiwang576/incubator-airflow,Fokko/incubator-airflow,bolkedebruin/airflow,MetrodataTeam/incubator-airflow,andrewmchen/incubator-airflow,subodhchhabra/airflow,zack3241/incubator-airflow,mistercrunch/airflow,wooga/airflow,ty707/airflow,sdiazb/airflow,vijaysbhat/incubator-airflow,cfei18/incubator-airflow,jbhsieh/incubator-airflow,DEVELByte/incubator-airflow,yiqingj/airflow,CloverHealth/airflow,Twistbioscience/incubator-airflow,mistercrunch/airflow,mtdewulf/incubator-airflow,N3da/incubator-airflow,mattuuh7/incubator-airflow,rishibarve/incubator-airflow,Fokko/incubator-airflow,apache/airflow,NielsZeilemaker/incubator-airflow,dmitry-r/incubator-airflow,zoyahav/incubator-airflow,AllisonWang/incubator-airflow,mattuuh7/incubator-airflow,ledsusop/airflow,ronfung/incubator-airflow,mylons/incubator-airflow,wileeam/airflow,vineet-rh/incubator-airflow,fenglu-g/incubator-airflow,forevernull/incubator-airflow,lyft/incubator-airflow,lxneng/incubator-airflow,caseyching/incubator-airflow,wileeam/airflow,hamedhsn/incubator-airflow,mrares/incubator-airflow,KL-WLCR/incubator-airflow,jlowin/airflow,nathanielvarona/airflow,owlabs/incubator-airflow,btallman/incubator-airflow,spektom/incubator-airflow,malmiron/incubator-airflow,adrpar/incubator-airflow,brandsoulmates/incubator-airflow,juvoinc/airflow,juvoinc/airflow,jfantom/incubator-airflow,hgrif/incubator-airflow,dmitry-r/incubator-airflow,zack3241/incubator-airflow,asnir/airflow,bolkedebruin/airflow,malmiron/incubator-airflow,mtdewulf/incubator-airflow,OpringaoDoTurno/airflow,subodhchhabra/airflow,wolfier/incubator-airflow,apache/airflow,andrewmchen/incubator-airflow,jbhsieh/incubator-airflow,adrpar/incubator-airflow,hamedhsn/incubator-airflow,dgies/incubator-airflow,airbnb/airflow,stverhae/incubator-airflow,sergiohgz/incubator-airflow,jbhsieh/incubator-airflow,zack3241/incubator-airflow,Twistbioscience/incubator-airflow,jesusfcr/airflow,mtagle/airflow,wndhydrnt/airflow,ledsusop/airflow,andyxhadji/incubator-airflow,d-lee/airflow,cfei18/incubator-airflow,mrkm4ntr/incubator-airflow,jhsenjaliya/incubator-airflow,caseyching/incubator-airflow,danielvdende/incubator-airflow,forevernull/incubator-airflow,lyft/incubator-airflow,skudriashev/incubator-airflow,MetrodataTeam/incubator-airflow,jhsenjaliya/incubator-airflow,sekikn/incubator-airflow,r39132/airflow,cfei18/incubator-airflow,Acehaidrey/incubator-airflow,zodiac/incubator-airflow,dhuang/incubator-airflow,easytaxibr/airflow,DEVELByte/incubator-airflow,sid88in/incubator-airflow,apache/airflow,Tagar/incubator-airflow,janczak10/incubator-airflow,forevernull/incubator-airflow,jlowin/airflow,r39132/airflow,brandsoulmates/incubator-airflow,AllisonWang/incubator-airflow,NielsZeilemaker/incubator-airflow,hgrif/incubator-airflow,cjqian/incubator-airflow,aminghadersohi/airflow,mtagle/airflow,adamhaney/airflow,yati-sagade/incubator-airflow,gtoonstra/airflow,andyxhadji/incubator-airflow,yati-sagade/incubator-airflow,preete-dixit-ck/incubator-airflow,cfei18/incubator-airflow,bolkedebruin/airflow,saguziel/incubator-airflow,janczak10/incubator-airflow,criccomini/airflow,KL-WLCR/incubator-airflow,d-lee/airflow,mrares/incubator-airflow,DinoCow/airflow,criccomini/airflow,MortalViews/incubator-airflow,mtagle/airflow,andyxhadji/incubator-airflow,vijaysbhat/incubator-airflow,mattuuh7/incubator-airflow,btallman/incubator-airflow,stverhae/incubator-airflow,janczak10/incubator-airflow,jgao54/airflow,hamedhsn/incubator-airflow,saguziel/incubator-airflow,gilt/incubator-airflow,Fokko/incubator-airflow,yk5/incubator-airflow,DinoCow/airflow,danielvdende/incubator-airflow,MortalViews/incubator-airflow,ProstoMaxim/incubator-airflow,dhuang/incubator-airflow,N3da/incubator-airflow,zodiac/incubator-airflow,mylons/incubator-airflow,edgarRd/incubator-airflow,mtdewulf/incubator-airflow,jbhsieh/incubator-airflow,forevernull/incubator-airflow,edgarRd/incubator-airflow,gilt/incubator-airflow,mylons/incubator-airflow,owlabs/incubator-airflow,sekikn/incubator-airflow,ledsusop/airflow,apache/airflow,juvoinc/airflow,jfantom/incubator-airflow,akosel/incubator-airflow,gritlogic/incubator-airflow,hgrif/incubator-airflow,saguziel/incubator-airflow,lxneng/incubator-airflow,skudriashev/incubator-airflow,ProstoMaxim/incubator-airflow,saguziel/incubator-airflow,gtoonstra/airflow,dmitry-r/incubator-airflow,nathanielvarona/airflow,yiqingj/airflow,jlowin/airflow,yiqingj/airflow,skudriashev/incubator-airflow,wileeam/airflow,airbnb/airflow,gilt/incubator-airflow,mtdewulf/incubator-airflow,zoyahav/incubator-airflow,aminghadersohi/airflow,lxneng/incubator-airflow,jiwang576/incubator-airflow,Tagar/incubator-airflow,adrpar/incubator-airflow,mrares/incubator-airflow,brandsoulmates/incubator-airflow,hgrif/incubator-airflow,wndhydrnt/airflow,MetrodataTeam/incubator-airflow,nathanielvarona/airflow,mtagle/airflow,holygits/incubator-airflow,rishibarve/incubator-airflow,Acehaidrey/incubator-airflow,Acehaidrey/incubator-airflow,stverhae/incubator-airflow,Acehaidrey/incubator-airflow,OpringaoDoTurno/airflow,kerzhner/airflow,jgao54/airflow,juvoinc/airflow,ProstoMaxim/incubator-airflow,brandsoulmates/incubator-airflow,yk5/incubator-airflow,danielvdende/incubator-airflow,subodhchhabra/airflow,gritlogic/incubator-airflow,Tagar/incubator-airflow,adamhaney/airflow,KL-WLCR/incubator-airflow,wolfier/incubator-airflow,danielvdende/incubator-airflow,jhsenjaliya/incubator-airflow,apache/airflow,sdiazb/airflow,plypaul/airflow,dhuang/incubator-airflow,airbnb/airflow,N3da/incubator-airflow,holygits/incubator-airflow,gtoonstra/airflow,CloverHealth/airflow,mistercrunch/airflow,plypaul/airflow,rishibarve/incubator-airflow,mattuuh7/incubator-airflow,Tagar/incubator-airflow,d-lee/airflow,dgies/incubator-airflow,jiwang576/incubator-airflow,akosel/incubator-airflow,sdiazb/airflow,edgarRd/incubator-airflow,AllisonWang/incubator-airflow,yiqingj/airflow,malmiron/incubator-airflow,btallman/incubator-airflow,wndhydrnt/airflow,jfantom/incubator-airflow,holygits/incubator-airflow,r39132/airflow,mistercrunch/airflow,wndhydrnt/airflow,gilt/incubator-airflow,mylons/incubator-airflow,spektom/incubator-airflow,fenglu-g/incubator-airflow,jgao54/airflow,RealImpactAnalytics/airflow,holygits/incubator-airflow,RealImpactAnalytics/airflow,lyft/incubator-airflow,cfei18/incubator-airflow,asnir/airflow,andrewmchen/incubator-airflow,zoyahav/incubator-airflow,zodiac/incubator-airflow,owlabs/incubator-airflow,wolfier/incubator-airflow,sergiohgz/incubator-airflow,danielvdende/incubator-airflow,artwr/airflow,NielsZeilemaker/incubator-airflow,sid88in/incubator-airflow,gritlogic/incubator-airflow,cademarkegard/airflow,aminghadersohi/airflow,wileeam/airflow,N3da/incubator-airflow | ---
+++
@@ -1,3 +1,17 @@
+# -*- coding: utf-8 -*-
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
"""add TI state index
Revision ID: 211e584da130 |
0469f8f707ba542b0e1af2915d8a46a0107d9d62 | calexicon/tests/test_dates.py | calexicon/tests/test_dates.py | import unittest
from datetime import date
from calexicon.dates import DateWithCalendar
class TestDateWithCalendar(unittest.TestCase):
def setUp(self):
date_dt = date(2010, 8, 1)
self.date_wc = DateWithCalendar(None, date_dt)
def test_comparisons(self):
self.assertTrue(self.date_wc < date(2010, 8, 2))
self.assertFalse(self.date_wc < date(2010, 7, 31))
self.assertTrue(self.date_wc > date(2010, 7, 2))
self.assertFalse(self.date_wc > date(2010, 8, 31))
def test_nonstrict_comparisons(self):
self.assertTrue(self.date_wc <= date(2010, 8, 2))
self.assertFalse(self.date_wc <= date(2010, 7, 31))
self.assertTrue(self.date_wc >= date(2010, 7, 2))
self.assertFalse(self.date_wc >= date(2010, 8, 31))
self.assertTrue(self.date_wc <= date(2010, 8, 1))
self.assertTrue(self.date_wc >= date(2010, 8, 1))
| import unittest
from datetime import date, timedelta
from calexicon.dates import DateWithCalendar
class TestDateWithCalendar(unittest.TestCase):
def setUp(self):
date_dt = date(2010, 8, 1)
self.date_wc = DateWithCalendar(None, date_dt)
def test_comparisons(self):
self.assertTrue(self.date_wc < date(2010, 8, 2))
self.assertFalse(self.date_wc < date(2010, 7, 31))
self.assertTrue(self.date_wc > date(2010, 7, 2))
self.assertFalse(self.date_wc > date(2010, 8, 31))
def test_nonstrict_comparisons(self):
self.assertTrue(self.date_wc <= date(2010, 8, 2))
self.assertFalse(self.date_wc <= date(2010, 7, 31))
self.assertTrue(self.date_wc >= date(2010, 7, 2))
self.assertFalse(self.date_wc >= date(2010, 8, 31))
self.assertTrue(self.date_wc <= date(2010, 8, 1))
self.assertTrue(self.date_wc >= date(2010, 8, 1))
def test_subtraction(self):
self.assertEqual(self.date_wc - date(2012, 10, 30), timedelta(days=-821))
| Add a test for __sub__ between a DateWith and a vanilla date. | Add a test for __sub__ between a DateWith and a vanilla date.
| Python | apache-2.0 | jwg4/calexicon,jwg4/qual | ---
+++
@@ -1,6 +1,6 @@
import unittest
-from datetime import date
+from datetime import date, timedelta
from calexicon.dates import DateWithCalendar
@@ -23,3 +23,5 @@
self.assertTrue(self.date_wc <= date(2010, 8, 1))
self.assertTrue(self.date_wc >= date(2010, 8, 1))
+ def test_subtraction(self):
+ self.assertEqual(self.date_wc - date(2012, 10, 30), timedelta(days=-821)) |
f3e6bc366ea77468772905c0094c9b4305c49fed | jsonpickle/handlers.py | jsonpickle/handlers.py |
class TypeRegistered(type):
"""
As classes of this metaclass are created, they keep a registry in the
base class of all handler referenced by the keys in cls._handles.
"""
def __init__(cls, name, bases, namespace):
super(TypeRegistered, cls).__init__(name, bases, namespace)
if not hasattr(cls, '_registry'):
cls._registry = {}
types_handled = getattr(cls, '_handles', [])
cls._registry.update((type_, cls) for type_ in types_handled)
class BaseHandler(object):
"""
Abstract base class for handlers.
"""
__metaclass__ = TypeRegistered
def __init__(self, base):
"""
Initialize a new handler to handle `type`.
:Parameters:
- `base`: reference to pickler/unpickler
"""
self._base = base
def flatten(self, obj, data):
"""
Flatten `obj` into a json-friendly form.
:Parameters:
- `obj`: object of `type`
"""
raise NotImplementedError("Abstract method.")
def restore(self, obj):
"""
Restores the `obj` to `type`
:Parameters:
- `object`: json-friendly object
"""
raise NotImplementedError("Abstract method.")
|
class TypeRegistered(type):
"""
As classes of this metaclass are created, they keep a registry in the
base class of all handler referenced by the keys in cls._handles.
"""
def __init__(cls, name, bases, namespace):
super(TypeRegistered, cls).__init__(name, bases, namespace)
if not hasattr(cls, '_registry'):
cls._registry = {}
types_handled = getattr(cls, '_handles', [])
cls._registry.update((type_, cls) for type_ in types_handled)
class BaseHandler(object):
"""
Abstract base class for handlers.
"""
__metaclass__ = TypeRegistered
def __init__(self, base):
"""
Initialize a new handler to handle `type`.
:Parameters:
- `base`: reference to pickler/unpickler
"""
self._base = base
def flatten(self, obj, data):
"""
Flatten `obj` into a json-friendly form.
:Parameters:
- `obj`: object of `type`
"""
raise NotImplementedError("Abstract method.")
def restore(self, obj):
"""
Restores the `obj` to `type`
:Parameters:
- `object`: json-friendly object
"""
raise NotImplementedError("Abstract method.")
# for backward compatibility, provide 'registry'
# jsonpickle 0.4 clients will call it with something like:
# registry.register(handled_type, handler_class)
class registry:
@staticmethod
def register(handled_type, handler_class):
pass
| Add a backward-compatibility shim to lessen the burden upgrading from 0.4 to 0.5 | Add a backward-compatibility shim to lessen the burden upgrading from 0.4 to 0.5
| Python | bsd-3-clause | mandx/jsonpickle,dongguangming/jsonpickle,mandx/jsonpickle,mandx/jsonpickle,dongguangming/jsonpickle,dongguangming/jsonpickle,mandx/jsonpickle,dongguangming/jsonpickle | ---
+++
@@ -47,3 +47,11 @@
"""
raise NotImplementedError("Abstract method.")
+
+# for backward compatibility, provide 'registry'
+# jsonpickle 0.4 clients will call it with something like:
+# registry.register(handled_type, handler_class)
+class registry:
+ @staticmethod
+ def register(handled_type, handler_class):
+ pass |
08eb1f9e510b85e77d401ca4e13b7ad5354f4ecf | ingestors/email/outlookpst.py | ingestors/email/outlookpst.py | import logging
from followthemoney import model
from ingestors.ingestor import Ingestor
from ingestors.support.temp import TempFileSupport
from ingestors.support.shell import ShellSupport
from ingestors.support.ole import OLESupport
from ingestors.directory import DirectoryIngestor
log = logging.getLogger(__name__)
class OutlookPSTIngestor(Ingestor, TempFileSupport, OLESupport, ShellSupport):
MIME_TYPES = ['application/vnd.ms-outlook']
EXTENSIONS = ['pst', 'ost', 'pab']
BASE_SCORE = 5
COMMAND_TIMEOUT = 12 * 60 * 60
def ingest(self, file_path, entity):
entity.schema = model.get('Package')
self.extract_ole_metadata(file_path, entity)
temp_dir = self.make_empty_directory()
try:
self.exec_command('readpst',
'-e', # make subfolders, files per message
'-D', # include deleted
'-r', # recursive structure
'-8', # utf-8 where possible
'-b',
'-q', # quiet
'-o', temp_dir,
file_path)
self.manager.delegate(DirectoryIngestor, temp_dir, entity)
except Exception:
log.exception("Failed to unpack PST.")
# Handle partially extracted archives.
self.manager.delegate(DirectoryIngestor, temp_dir, entity)
raise
| import logging
from followthemoney import model
from ingestors.ingestor import Ingestor
from ingestors.support.temp import TempFileSupport
from ingestors.support.shell import ShellSupport
from ingestors.support.ole import OLESupport
from ingestors.directory import DirectoryIngestor
log = logging.getLogger(__name__)
class OutlookPSTIngestor(Ingestor, TempFileSupport, OLESupport, ShellSupport):
MIME_TYPES = ['application/vnd.ms-outlook']
EXTENSIONS = ['pst', 'ost', 'pab']
BASE_SCORE = 5
COMMAND_TIMEOUT = 12 * 60 * 60
def ingest(self, file_path, entity):
entity.schema = model.get('Package')
self.extract_ole_metadata(file_path, entity)
temp_dir = self.make_empty_directory()
try:
self.exec_command('readpst',
'-e', # make subfolders, files per message
'-S', # single files
'-D', # include deleted
# '-r', # recursive structure
'-8', # utf-8 where possible
'-cv', # export vcards
# '-q', # quiet
'-o', temp_dir,
file_path)
self.manager.delegate(DirectoryIngestor, temp_dir, entity)
except Exception:
log.exception("Failed to unpack PST.")
# Handle partially extracted archives.
self.manager.delegate(DirectoryIngestor, temp_dir, entity)
raise
| Make outlook emit single files | Make outlook emit single files
| Python | mit | alephdata/ingestors | ---
+++
@@ -22,12 +22,13 @@
temp_dir = self.make_empty_directory()
try:
self.exec_command('readpst',
- '-e', # make subfolders, files per message
- '-D', # include deleted
- '-r', # recursive structure
- '-8', # utf-8 where possible
- '-b',
- '-q', # quiet
+ '-e', # make subfolders, files per message
+ '-S', # single files
+ '-D', # include deleted
+ # '-r', # recursive structure
+ '-8', # utf-8 where possible
+ '-cv', # export vcards
+ # '-q', # quiet
'-o', temp_dir,
file_path)
self.manager.delegate(DirectoryIngestor, temp_dir, entity) |
a2c8ade4d73b6756fef2829c0e656acbe60f2b03 | fabfile.py | fabfile.py | from fabric.api import local
from fabric.api import warn_only
CMD_MANAGE = "python manage.py "
def auto_schema():
with warn_only():
schema('rockit.foundation.core')
schema('rockit.plugins.mailout')
schema('rockit.plugins.razberry')
def build():
migrate('rockit.foundation.core')
migrate('rockit.plugins.mailout')
migrate('rockit.plugins.razberry')
load_data('rockit/foundation/core/fixtures/settings.json')
load_data('rockit/plugins/mailout/fixtures/servers.json')
test()
def load_data(path):
local(CMD_MANAGE + 'loaddata %s' % path)
def migrate(app):
local(CMD_MANAGE + 'migrate %s' % app)
def runserver(localonly=True):
if localonly:
local(CMD_MANAGE + 'runserver')
else:
local(CMD_MANAGE + 'runserver 0.0.0.0')
def schema(app):
local(CMD_MANAGE + 'schemamigration %s --auto' % app)
def setup(environment):
local('pip install -r requirements/%s' % environment)
def test():
local(CMD_MANAGE + 'test') | from fabric.api import local
CMD_MANAGE = "python manage.py "
def auto_schema():
schema('rockit.foundation.core')
schema('rockit.plugins.mailout')
schema('rockit.plugins.razberry')
def build():
migrate('rockit.foundation.core')
migrate('rockit.plugins.mailout')
migrate('rockit.plugins.razberry')
load_data('rockit/foundation/core/fixtures/settings.json')
load_data('rockit/plugins/mailout/fixtures/servers.json')
test()
def load_data(path):
local(CMD_MANAGE + 'loaddata %s' % path)
def migrate(app):
local(CMD_MANAGE + 'migrate %s' % app)
def runserver(localonly=True):
if localonly:
local(CMD_MANAGE + 'runserver')
else:
local(CMD_MANAGE + 'runserver 0.0.0.0')
def schema(app):
local(CMD_MANAGE + 'schemamigration %s --auto' % app)
def setup(environment):
local('pip install -r requirements/%s' % environment)
def test():
local(CMD_MANAGE + 'test') | Remove warn only from fabric file | Remove warn only from fabric file
| Python | mit | acreations/rockit-server,acreations/rockit-server,acreations/rockit-server,acreations/rockit-server | ---
+++
@@ -1,13 +1,11 @@
from fabric.api import local
-from fabric.api import warn_only
CMD_MANAGE = "python manage.py "
def auto_schema():
- with warn_only():
- schema('rockit.foundation.core')
- schema('rockit.plugins.mailout')
- schema('rockit.plugins.razberry')
+ schema('rockit.foundation.core')
+ schema('rockit.plugins.mailout')
+ schema('rockit.plugins.razberry')
def build():
migrate('rockit.foundation.core') |
79911105899c95bf3fdb27c1aa61e8ff08ebef14 | bokeh/models/component.py | bokeh/models/component.py | from __future__ import absolute_import
from ..model import Model
from ..core.properties import abstract
from ..core.properties import Bool
from ..embed import notebook_div
@abstract
class Component(Model):
""" A base class for all embeddable models, i.e. plots, layouts and widgets.
"""
disabled = Bool(False, help="""
Whether the widget will be disabled when rendered. If ``True``,
the widget will be greyed-out, and not respond to UI events.
""")
# TODO: (mp) Not yet, because it breaks plotting/notebook examples.
# Rename to _repr_html_ if we decide to enable this by default.
def __repr_html__(self):
return notebook_div(self)
@property
def html(self):
from IPython.core.display import HTML
return HTML(self.__repr_html__())
| from __future__ import absolute_import
from ..model import Model
from ..core.properties import abstract
from ..core.properties import Bool
from ..embed import notebook_div
@abstract
class Component(Model):
""" A base class for all for all DOM-level components, i.e. plots, layouts
and widgets.
"""
disabled = Bool(False, help="""
Whether the widget will be disabled when rendered. If ``True``,
the widget will be greyed-out, and not respond to UI events.
""")
# TODO: (mp) Not yet, because it breaks plotting/notebook examples.
# Rename to _repr_html_ if we decide to enable this by default.
def __repr_html__(self):
return notebook_div(self)
@property
def html(self):
from IPython.core.display import HTML
return HTML(self.__repr_html__())
| Change Component docstring to mention DOM-level models | Change Component docstring to mention DOM-level models
| Python | bsd-3-clause | aavanian/bokeh,bokeh/bokeh,ericmjl/bokeh,schoolie/bokeh,ericmjl/bokeh,dennisobrien/bokeh,azjps/bokeh,mindriot101/bokeh,jakirkham/bokeh,aiguofer/bokeh,dennisobrien/bokeh,ptitjano/bokeh,msarahan/bokeh,azjps/bokeh,aiguofer/bokeh,ericmjl/bokeh,mindriot101/bokeh,aavanian/bokeh,phobson/bokeh,bokeh/bokeh,philippjfr/bokeh,KasperPRasmussen/bokeh,KasperPRasmussen/bokeh,msarahan/bokeh,msarahan/bokeh,bokeh/bokeh,DuCorey/bokeh,clairetang6/bokeh,jakirkham/bokeh,philippjfr/bokeh,clairetang6/bokeh,ericmjl/bokeh,mindriot101/bokeh,justacec/bokeh,stonebig/bokeh,azjps/bokeh,draperjames/bokeh,bokeh/bokeh,timsnyder/bokeh,draperjames/bokeh,timsnyder/bokeh,percyfal/bokeh,justacec/bokeh,Karel-van-de-Plassche/bokeh,draperjames/bokeh,KasperPRasmussen/bokeh,aavanian/bokeh,rs2/bokeh,Karel-van-de-Plassche/bokeh,draperjames/bokeh,rs2/bokeh,aavanian/bokeh,clairetang6/bokeh,ptitjano/bokeh,DuCorey/bokeh,dennisobrien/bokeh,percyfal/bokeh,DuCorey/bokeh,percyfal/bokeh,jakirkham/bokeh,azjps/bokeh,aiguofer/bokeh,msarahan/bokeh,rs2/bokeh,quasiben/bokeh,DuCorey/bokeh,stonebig/bokeh,Karel-van-de-Plassche/bokeh,azjps/bokeh,dennisobrien/bokeh,phobson/bokeh,schoolie/bokeh,schoolie/bokeh,philippjfr/bokeh,ptitjano/bokeh,DuCorey/bokeh,quasiben/bokeh,clairetang6/bokeh,KasperPRasmussen/bokeh,percyfal/bokeh,jakirkham/bokeh,phobson/bokeh,Karel-van-de-Plassche/bokeh,stonebig/bokeh,justacec/bokeh,mindriot101/bokeh,philippjfr/bokeh,aiguofer/bokeh,percyfal/bokeh,rs2/bokeh,Karel-van-de-Plassche/bokeh,stonebig/bokeh,timsnyder/bokeh,rs2/bokeh,justacec/bokeh,ptitjano/bokeh,schoolie/bokeh,schoolie/bokeh,philippjfr/bokeh,phobson/bokeh,jakirkham/bokeh,bokeh/bokeh,timsnyder/bokeh,quasiben/bokeh,timsnyder/bokeh,ericmjl/bokeh,aavanian/bokeh,phobson/bokeh,draperjames/bokeh,aiguofer/bokeh,KasperPRasmussen/bokeh,dennisobrien/bokeh,ptitjano/bokeh | ---
+++
@@ -7,7 +7,8 @@
@abstract
class Component(Model):
- """ A base class for all embeddable models, i.e. plots, layouts and widgets.
+ """ A base class for all for all DOM-level components, i.e. plots, layouts
+ and widgets.
"""
|
5847e9db8f316fdee6493fefc9cbc64a1e6a28de | km_api/know_me/serializers/subscription_serializers.py | km_api/know_me/serializers/subscription_serializers.py | import hashlib
import logging
from django.utils.translation import ugettext
from rest_framework import serializers
from know_me import models, subscriptions
logger = logging.getLogger(__name__)
class AppleSubscriptionSerializer(serializers.ModelSerializer):
"""
Serializer for an Apple subscription.
"""
class Meta:
fields = (
"id",
"time_created",
"time_updated",
"expiration_time",
"receipt_data",
)
model = models.SubscriptionAppleData
def validate(self, data):
"""
Ensure the provided receipt data corresponds to a valid Apple
receipt.
Returns:
The validated data.
"""
validated_data = data.copy()
receipt_data = validated_data["receipt_data"]
data_hash = hashlib.sha256(receipt_data.encode()).hexdigest()
if models.SubscriptionAppleData.objects.filter(
receipt_data_hash=data_hash
).exists():
logger.warning(
"Duplicate Apple receipt submitted with hash: %s", data_hash
)
raise serializers.ValidationError(
{
"receipt_data": ugettext(
"This receipt has already been used."
)
}
)
try:
receipt = subscriptions.validate_apple_receipt(receipt_data)
except subscriptions.ReceiptException as e:
raise serializers.ValidationError(
code=e.code, detail={"receipt_data": e.msg}
)
validated_data["expiration_time"] = receipt.expires_date
return validated_data
| import hashlib
import logging
from django.utils.translation import ugettext
from rest_framework import serializers
from know_me import models, subscriptions
logger = logging.getLogger(__name__)
class AppleSubscriptionSerializer(serializers.ModelSerializer):
"""
Serializer for an Apple subscription.
"""
class Meta:
fields = (
"id",
"time_created",
"time_updated",
"expiration_time",
"receipt_data",
)
model = models.SubscriptionAppleData
read_only_fields = ("expiration_time",)
def validate(self, data):
"""
Ensure the provided receipt data corresponds to a valid Apple
receipt.
Returns:
The validated data.
"""
validated_data = data.copy()
receipt_data = validated_data["receipt_data"]
data_hash = hashlib.sha256(receipt_data.encode()).hexdigest()
if models.SubscriptionAppleData.objects.filter(
receipt_data_hash=data_hash
).exists():
logger.warning(
"Duplicate Apple receipt submitted with hash: %s", data_hash
)
raise serializers.ValidationError(
{
"receipt_data": ugettext(
"This receipt has already been used."
)
}
)
try:
receipt = subscriptions.validate_apple_receipt(receipt_data)
except subscriptions.ReceiptException as e:
raise serializers.ValidationError(
code=e.code, detail={"receipt_data": e.msg}
)
validated_data["expiration_time"] = receipt.expires_date
return validated_data
| Mark apple receipt expiration time as read only. | Mark apple receipt expiration time as read only.
| Python | apache-2.0 | knowmetools/km-api,knowmetools/km-api,knowmetools/km-api,knowmetools/km-api | ---
+++
@@ -24,6 +24,7 @@
"receipt_data",
)
model = models.SubscriptionAppleData
+ read_only_fields = ("expiration_time",)
def validate(self, data):
""" |
fa279ca1f8e4c8e6b4094840d3ab40c0ac637eff | ocradmin/ocrpresets/models.py | ocradmin/ocrpresets/models.py | from django.db import models
from django.contrib.auth.models import User
from picklefield import fields
from tagging.fields import TagField
import tagging
class OcrPreset(models.Model):
user = models.ForeignKey(User)
tags = TagField()
name = models.CharField(max_length=100, unique=True)
description = models.TextField(null=True, blank=True)
public = models.BooleanField(default=True)
created_on = models.DateField(auto_now_add=True)
updated_on = models.DateField(null=True, blank=True, auto_now=True)
type = models.CharField(max_length=20,
choices=[("segment", "Segment"), ("binarize", "Binarize")])
data = fields.PickledObjectField()
def __unicode__(self):
"""
String representation.
"""
return self.name
| from django.db import models
from django.contrib.auth.models import User
from picklefield import fields
from tagging.fields import TagField
import tagging
class OcrPreset(models.Model):
user = models.ForeignKey(User)
tags = TagField()
name = models.CharField(max_length=100, unique=True)
description = models.TextField(null=True, blank=True)
public = models.BooleanField(default=True)
created_on = models.DateField(auto_now_add=True)
updated_on = models.DateField(null=True, blank=True, auto_now=True)
type = models.CharField(max_length=20,
choices=[("segment", "Segment"), ("binarize", "Binarize")])
data = fields.PickledObjectField()
def __unicode__(self):
"""
String representation.
"""
return "<%s: %s>" % (self.__class__.__name__, self.name)
| Improve unicode method. Whitespace cleanup | Improve unicode method. Whitespace cleanup
| Python | apache-2.0 | vitorio/ocropodium,vitorio/ocropodium,vitorio/ocropodium,vitorio/ocropodium | ---
+++
@@ -21,4 +21,4 @@
"""
String representation.
"""
- return self.name
+ return "<%s: %s>" % (self.__class__.__name__, self.name) |
ebdc3a1d00ddd8c15aaa64c436e43f3815317923 | pythainlp/segment/pyicu.py | pythainlp/segment/pyicu.py | from __future__ import absolute_import,print_function
from itertools import groupby
import PyICU
import six
# ตัดคำภาษาไทย
def segment(txt):
"""รับค่า ''str'' คืนค่าออกมาเป็น ''list'' ที่ได้มาจากการตัดคำโดย ICU"""
bd = PyICU.BreakIterator.createWordInstance(PyICU.Locale("th"))
bd.setText(six.u(txt))
breaks = list(bd)
return [txt[x[0]:x[1]] for x in zip([0]+breaks, breaks)]
if __name__ == "__main__":
print(segment('ทดสอบระบบตัดคำด้วยไอซียู'))
print(segment('ผมชอบพูดไทยคำ English คำ'))
print(segment('ผมชอบพูดไทยคำEnglishคำ')) | from __future__ import absolute_import,print_function
from itertools import groupby
import PyICU
# ตัดคำภาษาไทย
def segment(txt):
"""รับค่า ''str'' คืนค่าออกมาเป็น ''list'' ที่ได้มาจากการตัดคำโดย ICU"""
bd = PyICU.BreakIterator.createWordInstance(PyICU.Locale("th"))
bd.setText(six.u(txt))
breaks = list(bd)
return [txt[x[0]:x[1]] for x in zip([0]+breaks, breaks)]
if __name__ == "__main__":
print(segment('ทดสอบระบบตัดคำด้วยไอซียู'))
print(segment('ผมชอบพูดไทยคำ English คำ'))
print(segment('ผมชอบพูดไทยคำEnglishคำ')) | Revert "fix bug import six" | Revert "fix bug import six"
This reverts commit a80c1d7c80d68f72d435dbb7ac5c48a6114716fb.
| Python | apache-2.0 | PyThaiNLP/pythainlp | ---
+++
@@ -1,7 +1,6 @@
from __future__ import absolute_import,print_function
from itertools import groupby
import PyICU
-import six
# ตัดคำภาษาไทย
def segment(txt):
"""รับค่า ''str'' คืนค่าออกมาเป็น ''list'' ที่ได้มาจากการตัดคำโดย ICU""" |
f29665f853d1a33bcf08d1a9298460d0be11d610 | molly/apps/places/__init__.py | molly/apps/places/__init__.py | from flask import Blueprint
from flask.ext.babel import lazy_gettext as _
from molly.apps.common.app import BaseApp
from molly.apps.places.endpoints import PointOfInterestEndpoint
from molly.apps.places.services import PointsOfInterest
class App(BaseApp):
module = 'http://mollyproject.org/apps/places'
human_name = _('Places')
def __init__(self, instance_name, config, providers, services):
self.instance_name = instance_name
poi_service = PointsOfInterest(instance_name, services['kv'].db[instance_name])
for provider in providers:
provider.poi_service = poi_service
self._register_provider_as_importer(provider, services)
self._poi_endpoint = PointOfInterestEndpoint(instance_name, poi_service)
self.blueprint = Blueprint(self.instance_name, __name__)
self.blueprint.add_url_rule('/<slug>', 'poi', self._poi_endpoint.get)
self.links = []
| from flask import Blueprint
from flask.ext.babel import lazy_gettext as _
from molly.apps.common.app import BaseApp
from molly.apps.places.endpoints import PointOfInterestEndpoint
from molly.apps.places.services import PointsOfInterest
class App(BaseApp):
module = 'http://mollyproject.org/apps/places'
human_name = _('Places')
def __init__(self, instance_name, config, providers, services):
self.instance_name = instance_name
poi_service = PointsOfInterest(instance_name, services['kv'].db[instance_name])
for provider in providers:
provider.poi_service = poi_service
self._register_provider_as_importer(provider, services)
self._poi_endpoint = PointOfInterestEndpoint(instance_name, poi_service)
self.blueprint = Blueprint(self.instance_name, __name__)
self.blueprint.add_url_rule('/<slug>/', 'poi', self._poi_endpoint.get)
self.links = []
| Update URL rules to match Molly 1.x | Update URL rules to match Molly 1.x
| Python | apache-2.0 | ManchesterIO/mollyproject-next,ManchesterIO/mollyproject-next,ManchesterIO/mollyproject-next | ---
+++
@@ -22,5 +22,5 @@
self._poi_endpoint = PointOfInterestEndpoint(instance_name, poi_service)
self.blueprint = Blueprint(self.instance_name, __name__)
- self.blueprint.add_url_rule('/<slug>', 'poi', self._poi_endpoint.get)
+ self.blueprint.add_url_rule('/<slug>/', 'poi', self._poi_endpoint.get)
self.links = [] |
c7455da1b0092e926ed9dafe5ac5ae1335401dba | admin.py | admin.py | from django.contrib import admin
from django.contrib.sites.models import Site
from .models import Church
admin.site.site_header = "Churches of Bridlington Administration"
admin.site.register(Church)
admin.site.unregister(Site)
| from django.contrib import admin
from django.contrib.sites.models import Site
from .models import Church
admin.site.site_header = "Churches of Bridlington Administration"
admin.site.register(Church)
| Undo deregistration of Site object | Undo deregistration of Site object
This will now be controlled by restricting permissions in the
admin.
| Python | mit | bm424/churchmanager,bm424/churchmanager | ---
+++
@@ -6,4 +6,3 @@
admin.site.site_header = "Churches of Bridlington Administration"
admin.site.register(Church)
-admin.site.unregister(Site) |
dee0b3764259ee7f4916e8e5e303c48afb3e5edd | api/base/urls.py | api/base/urls.py | from django.conf import settings
from django.conf.urls import include, url
# from django.contrib import admin
from django.conf.urls.static import static
from . import views
urlpatterns = [
### API ###
url(r'^$', views.root),
url(r'^nodes/', include('api.nodes.urls', namespace='nodes')),
url(r'^users/', include('api.users.urls', namespace='users')),
url(r'^docs/', include('rest_framework_swagger.urls')),
] + static('/static/', document_root=settings.STATIC_ROOT) | from django.conf import settings
from django.conf.urls import include, url, patterns
# from django.contrib import admin
from django.conf.urls.static import static
from . import views
urlpatterns = [
### API ###
url(r'^v2/', include(patterns('',
url(r'^$', views.root),
url(r'^nodes/', include('api.nodes.urls', namespace='nodes')),
url(r'^users/', include('api.users.urls', namespace='users')),
url(r'^docs/', include('rest_framework_swagger.urls')),
)))] + static('/static/', document_root=settings.STATIC_ROOT) | Change API url prefix to 'v2' | Change API url prefix to 'v2'
| Python | apache-2.0 | laurenrevere/osf.io,Johnetordoff/osf.io,CenterForOpenScience/osf.io,cosenal/osf.io,reinaH/osf.io,binoculars/osf.io,CenterForOpenScience/osf.io,mluo613/osf.io,aaxelb/osf.io,MerlinZhang/osf.io,TomHeatwole/osf.io,felliott/osf.io,sloria/osf.io,dplorimer/osf,wearpants/osf.io,chrisseto/osf.io,zamattiac/osf.io,wearpants/osf.io,brianjgeiger/osf.io,KAsante95/osf.io,SSJohns/osf.io,cwisecarver/osf.io,cslzchen/osf.io,HarryRybacki/osf.io,TomBaxter/osf.io,cldershem/osf.io,alexschiller/osf.io,chrisseto/osf.io,rdhyee/osf.io,SSJohns/osf.io,jolene-esposito/osf.io,saradbowman/osf.io,alexschiller/osf.io,emetsger/osf.io,Ghalko/osf.io,MerlinZhang/osf.io,haoyuchen1992/osf.io,pattisdr/osf.io,jmcarp/osf.io,billyhunt/osf.io,GageGaskins/osf.io,hmoco/osf.io,acshi/osf.io,acshi/osf.io,CenterForOpenScience/osf.io,kch8qx/osf.io,monikagrabowska/osf.io,amyshi188/osf.io,samchrisinger/osf.io,ZobairAlijan/osf.io,jmcarp/osf.io,cosenal/osf.io,njantrania/osf.io,erinspace/osf.io,icereval/osf.io,hmoco/osf.io,Johnetordoff/osf.io,fabianvf/osf.io,samanehsan/osf.io,brianjgeiger/osf.io,bdyetton/prettychart,petermalcolm/osf.io,acshi/osf.io,zachjanicki/osf.io,caseyrygt/osf.io,chrisseto/osf.io,DanielSBrown/osf.io,caseyrygt/osf.io,CenterForOpenScience/osf.io,mattclark/osf.io,kch8qx/osf.io,barbour-em/osf.io,KAsante95/osf.io,TomBaxter/osf.io,baylee-d/osf.io,MerlinZhang/osf.io,doublebits/osf.io,felliott/osf.io,mluo613/osf.io,alexschiller/osf.io,bdyetton/prettychart,leb2dg/osf.io,doublebits/osf.io,lyndsysimon/osf.io,adlius/osf.io,zamattiac/osf.io,jolene-esposito/osf.io,brandonPurvis/osf.io,TomHeatwole/osf.io,felliott/osf.io,danielneis/osf.io,wearpants/osf.io,KAsante95/osf.io,icereval/osf.io,TomBaxter/osf.io,samchrisinger/osf.io,adlius/osf.io,jnayak1/osf.io,cldershem/osf.io,mluke93/osf.io,caneruguz/osf.io,leb2dg/osf.io,icereval/osf.io,jeffreyliu3230/osf.io,sbt9uc/osf.io,felliott/osf.io,njantrania/osf.io,HalcyonChimera/osf.io,ckc6cz/osf.io,Johnetordoff/osf.io,barbour-em/osf.io,kch8qx/osf.io,haoyuchen1992/osf.io,cslzchen/osf.io,rdhyee/osf.io,chennan47/osf.io,ckc6cz/osf.io,jnayak1/osf.io,lyndsysimon/osf.io,zachjanicki/osf.io,mfraezz/osf.io,abought/osf.io,caneruguz/osf.io,sbt9uc/osf.io,samanehsan/osf.io,jinluyuan/osf.io,pattisdr/osf.io,mfraezz/osf.io,DanielSBrown/osf.io,lyndsysimon/osf.io,alexschiller/osf.io,abought/osf.io,samanehsan/osf.io,kch8qx/osf.io,ticklemepierce/osf.io,ckc6cz/osf.io,mfraezz/osf.io,cosenal/osf.io,haoyuchen1992/osf.io,asanfilippo7/osf.io,doublebits/osf.io,mfraezz/osf.io,reinaH/osf.io,ZobairAlijan/osf.io,barbour-em/osf.io,caseyrygt/osf.io,cwisecarver/osf.io,cldershem/osf.io,petermalcolm/osf.io,adlius/osf.io,adlius/osf.io,lyndsysimon/osf.io,mattclark/osf.io,chrisseto/osf.io,alexschiller/osf.io,HalcyonChimera/osf.io,brandonPurvis/osf.io,crcresearch/osf.io,cslzchen/osf.io,mluo613/osf.io,acshi/osf.io,danielneis/osf.io,chennan47/osf.io,fabianvf/osf.io,wearpants/osf.io,jnayak1/osf.io,petermalcolm/osf.io,Nesiehr/osf.io,billyhunt/osf.io,TomHeatwole/osf.io,jolene-esposito/osf.io,njantrania/osf.io,laurenrevere/osf.io,HarryRybacki/osf.io,mluo613/osf.io,doublebits/osf.io,rdhyee/osf.io,rdhyee/osf.io,fabianvf/osf.io,monikagrabowska/osf.io,jinluyuan/osf.io,monikagrabowska/osf.io,Ghalko/osf.io,ticklemepierce/osf.io,HarryRybacki/osf.io,erinspace/osf.io,asanfilippo7/osf.io,SSJohns/osf.io,petermalcolm/osf.io,asanfilippo7/osf.io,arpitar/osf.io,jeffreyliu3230/osf.io,RomanZWang/osf.io,kwierman/osf.io,zachjanicki/osf.io,arpitar/osf.io,samanehsan/osf.io,cosenal/osf.io,MerlinZhang/osf.io,leb2dg/osf.io,arpitar/osf.io,kch8qx/osf.io,cslzchen/osf.io,mluke93/osf.io,sloria/osf.io,GageGaskins/osf.io,caseyrollins/osf.io,danielneis/osf.io,pattisdr/osf.io,SSJohns/osf.io,njantrania/osf.io,sbt9uc/osf.io,kwierman/osf.io,asanfilippo7/osf.io,amyshi188/osf.io,emetsger/osf.io,abought/osf.io,crcresearch/osf.io,jmcarp/osf.io,TomHeatwole/osf.io,binoculars/osf.io,emetsger/osf.io,amyshi188/osf.io,cwisecarver/osf.io,ZobairAlijan/osf.io,laurenrevere/osf.io,RomanZWang/osf.io,kwierman/osf.io,billyhunt/osf.io,billyhunt/osf.io,doublebits/osf.io,jinluyuan/osf.io,brandonPurvis/osf.io,reinaH/osf.io,haoyuchen1992/osf.io,GageGaskins/osf.io,aaxelb/osf.io,fabianvf/osf.io,hmoco/osf.io,caseyrollins/osf.io,brandonPurvis/osf.io,mluo613/osf.io,danielneis/osf.io,jeffreyliu3230/osf.io,bdyetton/prettychart,jmcarp/osf.io,chennan47/osf.io,sloria/osf.io,caneruguz/osf.io,brianjgeiger/osf.io,bdyetton/prettychart,monikagrabowska/osf.io,Nesiehr/osf.io,HarryRybacki/osf.io,Ghalko/osf.io,GageGaskins/osf.io,aaxelb/osf.io,RomanZWang/osf.io,zamattiac/osf.io,brandonPurvis/osf.io,erinspace/osf.io,emetsger/osf.io,jinluyuan/osf.io,caseyrygt/osf.io,RomanZWang/osf.io,cwisecarver/osf.io,kwierman/osf.io,mluke93/osf.io,zamattiac/osf.io,acshi/osf.io,samchrisinger/osf.io,RomanZWang/osf.io,reinaH/osf.io,KAsante95/osf.io,leb2dg/osf.io,HalcyonChimera/osf.io,Johnetordoff/osf.io,samchrisinger/osf.io,cldershem/osf.io,Ghalko/osf.io,caneruguz/osf.io,baylee-d/osf.io,crcresearch/osf.io,binoculars/osf.io,zachjanicki/osf.io,KAsante95/osf.io,barbour-em/osf.io,amyshi188/osf.io,arpitar/osf.io,DanielSBrown/osf.io,dplorimer/osf,ckc6cz/osf.io,jeffreyliu3230/osf.io,ticklemepierce/osf.io,aaxelb/osf.io,Nesiehr/osf.io,mluke93/osf.io,brianjgeiger/osf.io,sbt9uc/osf.io,ZobairAlijan/osf.io,mattclark/osf.io,jolene-esposito/osf.io,Nesiehr/osf.io,GageGaskins/osf.io,abought/osf.io,caseyrollins/osf.io,DanielSBrown/osf.io,dplorimer/osf,baylee-d/osf.io,jnayak1/osf.io,ticklemepierce/osf.io,billyhunt/osf.io,monikagrabowska/osf.io,HalcyonChimera/osf.io,dplorimer/osf,saradbowman/osf.io,hmoco/osf.io | ---
+++
@@ -1,5 +1,5 @@
from django.conf import settings
-from django.conf.urls import include, url
+from django.conf.urls import include, url, patterns
# from django.contrib import admin
from django.conf.urls.static import static
@@ -9,8 +9,9 @@
urlpatterns = [
### API ###
- url(r'^$', views.root),
- url(r'^nodes/', include('api.nodes.urls', namespace='nodes')),
- url(r'^users/', include('api.users.urls', namespace='users')),
- url(r'^docs/', include('rest_framework_swagger.urls')),
-] + static('/static/', document_root=settings.STATIC_ROOT)
+ url(r'^v2/', include(patterns('',
+ url(r'^$', views.root),
+ url(r'^nodes/', include('api.nodes.urls', namespace='nodes')),
+ url(r'^users/', include('api.users.urls', namespace='users')),
+ url(r'^docs/', include('rest_framework_swagger.urls')),
+ )))] + static('/static/', document_root=settings.STATIC_ROOT) |
f70574c38140c9a5493981f5baf72bab82be8c60 | opps/articles/tests/models.py | opps/articles/tests/models.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.test import TestCase
from opps.articles.models import Post
class PostModelTest(TestCase):
fixtures = ['tests/initial_data.json']
def setUP(self):
self.post = Post.objects.get(id=1)
def test_basic_post_exist(self):
post = Post.objects.all()
self.assertTrue(post)
self.assertTrue(post[0], self.post)
self.assertEqual(len(post), 1)
self.assertEqual(post[0].slug, u'test-post-application')
self.assertEqual(post[0].title, u'test post application')
self.assertTrue(post[0].short_url)
def test_child_class(self):
self.assertTrue(self.post.child_class)
self.assertEqual(self.post.child_class, 'Post')
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.test import TestCase
from opps.articles.models import Post
class PostModelTest(TestCase):
fixtures = ['tests/initial_data.json']
def setUp(self):
self.post = Post.objects.get(id=1)
def test_basic_post_exist(self):
post = Post.objects.all()
self.assertTrue(post)
self.assertTrue(post[0], self.post)
self.assertEqual(len(post), 1)
self.assertEqual(post[0].slug, u'test-post-application')
self.assertEqual(post[0].title, u'test post application')
self.assertTrue(post[0].short_url)
def test_child_class(self):
self.assertTrue(self.post.child_class)
self.assertEqual(self.post.child_class, 'Post')
def test_get_absolute_url(self):
self.assertEqual(self.post.get_absolute_url(),
u'/channel-01/test-post-application')
self.assertEqual(self.post.get_absolute_url(),
"/{0}/{1}".format(self.post.channel.long_slug,
self.post.slug))
| Add test articles, post get absolute url | Add test articles, post get absolute url
| Python | mit | YACOWS/opps,YACOWS/opps,YACOWS/opps,jeanmask/opps,williamroot/opps,opps/opps,jeanmask/opps,williamroot/opps,YACOWS/opps,opps/opps,williamroot/opps,jeanmask/opps,opps/opps,jeanmask/opps,opps/opps,williamroot/opps | ---
+++
@@ -9,7 +9,7 @@
fixtures = ['tests/initial_data.json']
- def setUP(self):
+ def setUp(self):
self.post = Post.objects.get(id=1)
def test_basic_post_exist(self):
@@ -26,3 +26,11 @@
self.assertTrue(self.post.child_class)
self.assertEqual(self.post.child_class, 'Post')
+
+ def test_get_absolute_url(self):
+
+ self.assertEqual(self.post.get_absolute_url(),
+ u'/channel-01/test-post-application')
+ self.assertEqual(self.post.get_absolute_url(),
+ "/{0}/{1}".format(self.post.channel.long_slug,
+ self.post.slug)) |
3eee55236a709e2929ffab7f15b8e50d541ed9a7 | utils/graph.py | utils/graph.py | """
This module serves as an interface to
matplotlib.
"""
from utils import config
OFFSET = 2 # offset = max_x/stepsize * OFFSET
def init(output):
import matplotlib
config.mpl(matplotlib, bool(output))
from matplotlib import pyplot
globals()['plt'] = pyplot
def line_plot(xs, ys, color='red'):
plt.plot(
xs,
ys,
color=color,
linewidth=2.0
)
def legend(*args):
plt.legend(args, loc='best')
def scatter_plot(x, y, color='red'):
plt.scatter(x, y, color=color)
def scale_plot(max_x, stepsize):
offset = max_x/stepsize * OFFSET
plt.axis(xmin=-offset, xmax=max_x+offset, ymin=0)
def prepare_plot(xlabel, ylabel, title):
plt.xlabel(xlabel)
plt.ylabel(ylabel)
plt.title(title)
plt.grid(True)
def display_plot(output):
if output:
if output == 'stdout':
plt.savefig(sys.stdout, format='png')
else:
plt.savefig(output)
else:
plt.show()
| """
This module serves as an interface to
matplotlib.
"""
from utils import config
OFFSET = 2 # offset = max_x/stepsize * OFFSET
def init(output):
import matplotlib
config.mpl(matplotlib, bool(output))
from matplotlib import pyplot
globals()['plt'] = pyplot
def line_plot(xs, ys, color='red'):
plt.plot(
xs,
ys,
color=color,
linewidth=2.0
)
def legend(*args):
plt.legend(args, loc='best')
def scatter_plot(x, y, color='blue'):
plt.scatter(x, y, color=color)
def scale_plot(max_x, stepsize):
offset = max_x/stepsize * OFFSET
plt.axis(xmin=-offset, xmax=max_x+offset, ymin=0)
def prepare_plot(xlabel, ylabel, title):
plt.xlabel(xlabel)
plt.ylabel(ylabel)
plt.title(title)
plt.grid(True)
def display_plot(output):
if output:
if output == 'stdout':
plt.savefig(sys.stdout, format='png')
else:
plt.savefig(output)
else:
plt.show()
| Add a different color for scatter plots to differentiate from line. | Add a different color for scatter plots to differentiate from line.
| Python | mit | wei2912/bce-simulation,wei2912/bce-simulation,wei2912/bce-simulation,wei2912/bce-simulation | ---
+++
@@ -24,7 +24,7 @@
def legend(*args):
plt.legend(args, loc='best')
-def scatter_plot(x, y, color='red'):
+def scatter_plot(x, y, color='blue'):
plt.scatter(x, y, color=color)
def scale_plot(max_x, stepsize): |
dcc810f3181ebe358481c30c2248d25511aab26c | npz_to_my5c.py | npz_to_my5c.py | import numpy as np
import argparse
import sys
import pandas as pd
parser = argparse.ArgumentParser(description='Calculate coverage over different window sizes for a list of bam files.')
parser.add_argument('-n', '--npz_frequencies_file', required=True, help='An npz file containing co-segregation frequencies to convert to correlations')
args = parser.parse_args()
def open_npz(fp):
handle = np.load(fp)
return handle['windows'], handle['scores']
windows, data = open_npz(args.npz_frequencies_file)
names = [ '{}:{}-{}'.format(*i) for i in windows ]
pd.DataFrame(data, index=names, columns=names).to_csv(sys.stdout, sep='\t')
| import numpy as np
import argparse
import sys
import pandas as pd
parser = argparse.ArgumentParser(description='Calculate coverage over different window sizes for a list of bam files.')
parser.add_argument('-n', '--npz_frequencies_file', required=True, help='An npz file containing co-segregation frequencies to convert to correlations')
args = parser.parse_args()
def open_npz(fp):
handle = np.load(fp)
return handle['windows'], handle['scores']
windows, data = open_npz(args.npz_frequencies_file)
names = [ '{}:{}-{}'.format(*i) for i in windows ]
pd.DataFrame(data, index=names, columns=names).to_csv(sys.stdout, sep='\t', na_rep="NaN")
| Handle NaNs properly when flattening matrices. | Handle NaNs properly when flattening matrices.
| Python | apache-2.0 | pombo-lab/gamtools,pombo-lab/gamtools | ---
+++
@@ -14,4 +14,4 @@
windows, data = open_npz(args.npz_frequencies_file)
names = [ '{}:{}-{}'.format(*i) for i in windows ]
-pd.DataFrame(data, index=names, columns=names).to_csv(sys.stdout, sep='\t')
+pd.DataFrame(data, index=names, columns=names).to_csv(sys.stdout, sep='\t', na_rep="NaN") |
8ad95ada5e57ad941b1333cea8f8b81ce739a245 | knights/defaultfilters.py | knights/defaultfilters.py |
from .library import Library
from .filters import Filter
register = Library()
@register.filter(name='title')
class TitleFilter(Filter):
def __rshift__(self, other):
return str(other).title()
|
from .library import Library
register = Library()
@register.filter
def title(val):
return str(val).title()
| Convert to new style filter | Convert to new style filter
| Python | mit | funkybob/knights-templater,funkybob/knights-templater | ---
+++
@@ -1,12 +1,10 @@
from .library import Library
-from .filters import Filter
register = Library()
-@register.filter(name='title')
-class TitleFilter(Filter):
- def __rshift__(self, other):
- return str(other).title()
+@register.filter
+def title(val):
+ return str(val).title() |
0512534c4067b6c36d68241d1ccc7de349a3bbe8 | betfairlightweight/__init__.py | betfairlightweight/__init__.py | from .apiclient import APIClient
from .exceptions import BetfairError
from .streaming import StreamListener
from .filters import MarketFilter, StreamingMarketFilter, StreamingMarketDataFilter
__title__ = 'betfairlightweight'
__version__ = '0.9.9'
__author__ = 'Liam Pauling'
| import logging
from .apiclient import APIClient
from .exceptions import BetfairError
from .filters import MarketFilter, StreamingMarketFilter, StreamingMarketDataFilter
from .streaming import StreamListener
__title__ = 'betfairlightweight'
__version__ = '0.9.9'
__author__ = 'Liam Pauling'
# Set default logging handler to avoid "No handler found" warnings.
try: # Python 2.7+
from logging import NullHandler
except ImportError:
class NullHandler(logging.Handler):
def emit(self, record):
pass
logging.getLogger(__name__).addHandler(NullHandler())
| Add NullHandler to top level package logger | Add NullHandler to top level package logger
| Python | mit | liampauling/betfair,liampauling/betfairlightweight | ---
+++
@@ -1,9 +1,21 @@
+import logging
+
from .apiclient import APIClient
from .exceptions import BetfairError
+from .filters import MarketFilter, StreamingMarketFilter, StreamingMarketDataFilter
from .streaming import StreamListener
-from .filters import MarketFilter, StreamingMarketFilter, StreamingMarketDataFilter
__title__ = 'betfairlightweight'
__version__ = '0.9.9'
__author__ = 'Liam Pauling'
+
+# Set default logging handler to avoid "No handler found" warnings.
+try: # Python 2.7+
+ from logging import NullHandler
+except ImportError:
+ class NullHandler(logging.Handler):
+ def emit(self, record):
+ pass
+
+logging.getLogger(__name__).addHandler(NullHandler()) |
9be232ab83a4c482eaf56ea99f7b1be81412c517 | Bookie/fabfile/development.py | Bookie/fabfile/development.py | """Fabric commands useful for working on developing Bookie are loaded here"""
from fabric.api import hosts
from fabric.api import local
from fabric.contrib.project import rsync_project
bootstrap_host = 'ubuntu@bmark'
bootstrap_server = '/var/www/bootstrap.py'
bootstrap_local = 'scripts/bootstrap/bootstrap.py'
def gen_bootstrap():
"""Run the generator that builds a custom virtualenv bootstrap file"""
local('python scripts/bootstrap/gen_bootstrap.py > scripts/bootstrap/bootstrap.py', capture=False)
@hosts(bootstrap_host)
def push_bootstrap():
"""Sync the bootstrap.py up to the server for download"""
rsync_project(bootstrap_server, bootstrap_local)
| """Fabric commands useful for working on developing Bookie are loaded here"""
import os
from fabric.api import hosts
from fabric.api import local
from fabric.contrib.project import rsync_project
bootstrap_host = 'ubuntu@bmark'
bootstrap_server = '/var/www/bootstrap.py'
bootstrap_local = 'scripts/bootstrap/bootstrap.py'
def gen_bootstrap():
"""Run the generator that builds a custom virtualenv bootstrap file"""
local('python scripts/bootstrap/gen_bootstrap.py > scripts/bootstrap/bootstrap.py', capture=False)
@hosts(bootstrap_host)
def push_bootstrap():
"""Sync the bootstrap.py up to the server for download"""
rsync_project(bootstrap_server, bootstrap_local)
def jstest():
"""Launch the JS tests we have in the system
Currently only the ones there are for extensions
"""
cwd = os.path.dirname(os.path.dirname(__file__))
local('google-chrome {0}/extensions/tests/index.html'.format(cwd))
| Add a fab command to run jstests | Add a fab command to run jstests
| Python | agpl-3.0 | adamlincoln/Bookie,skmezanul/Bookie,charany1/Bookie,charany1/Bookie,adamlincoln/Bookie,bookieio/Bookie,wangjun/Bookie,teodesson/Bookie,GreenLunar/Bookie,wangjun/Bookie,teodesson/Bookie,GreenLunar/Bookie,pombredanne/Bookie,pombredanne/Bookie,charany1/Bookie,GreenLunar/Bookie,skmezanul/Bookie,teodesson/Bookie,wangjun/Bookie,teodesson/Bookie,bookieio/Bookie,adamlincoln/Bookie,adamlincoln/Bookie,bookieio/Bookie,bookieio/Bookie,GreenLunar/Bookie,skmezanul/Bookie,pombredanne/Bookie,skmezanul/Bookie,wangjun/Bookie | ---
+++
@@ -1,4 +1,6 @@
"""Fabric commands useful for working on developing Bookie are loaded here"""
+import os
+
from fabric.api import hosts
from fabric.api import local
from fabric.contrib.project import rsync_project
@@ -17,3 +19,12 @@
def push_bootstrap():
"""Sync the bootstrap.py up to the server for download"""
rsync_project(bootstrap_server, bootstrap_local)
+
+def jstest():
+ """Launch the JS tests we have in the system
+
+ Currently only the ones there are for extensions
+
+ """
+ cwd = os.path.dirname(os.path.dirname(__file__))
+ local('google-chrome {0}/extensions/tests/index.html'.format(cwd)) |
7197f1578335b38eb2037e8d82f15a27d786d5c1 | var/spack/repos/builtin/packages/py-setuptools/package.py | var/spack/repos/builtin/packages/py-setuptools/package.py | from spack import *
class PySetuptools(Package):
"""Easily download, build, install, upgrade, and uninstall Python packages."""
homepage = "https://pypi.python.org/pypi/setuptools"
url = "https://pypi.python.org/packages/source/s/setuptools/setuptools-11.3.tar.gz"
version('11.3.1', '01f69212e019a2420c1693fb43593930')
version('16.0', '0ace0b96233516fc5f7c857d086aa3ad')
version('18.1', 'f72e87f34fbf07f299f6cb46256a0b06')
version('19.2', '78353b1f80375ca5e088f4b4627ffe03')
version('20.5', 'fadc1e1123ddbe31006e5e43e927362b')
extends('python')
def install(self, spec, prefix):
python('setup.py', 'install', '--prefix=%s' % prefix)
| from spack import *
class PySetuptools(Package):
"""Easily download, build, install, upgrade, and uninstall Python packages."""
homepage = "https://pypi.python.org/pypi/setuptools"
url = "https://pypi.python.org/packages/source/s/setuptools/setuptools-11.3.tar.gz"
version('11.3.1', '01f69212e019a2420c1693fb43593930')
version('16.0', '0ace0b96233516fc5f7c857d086aa3ad')
version('18.1', 'f72e87f34fbf07f299f6cb46256a0b06')
version('19.2', '78353b1f80375ca5e088f4b4627ffe03')
version('20.5', 'fadc1e1123ddbe31006e5e43e927362b')
version('20.6.7', '45d6110f3ec14924e44c33411db64fe6')
extends('python')
def install(self, spec, prefix):
python('setup.py', 'install', '--prefix=%s' % prefix)
| Add version 2.6.7 of py-setuptools | Add version 2.6.7 of py-setuptools
| Python | lgpl-2.1 | skosukhin/spack,mfherbst/spack,tmerrick1/spack,lgarren/spack,skosukhin/spack,krafczyk/spack,iulian787/spack,skosukhin/spack,tmerrick1/spack,skosukhin/spack,matthiasdiener/spack,matthiasdiener/spack,TheTimmy/spack,LLNL/spack,LLNL/spack,krafczyk/spack,mfherbst/spack,mfherbst/spack,matthiasdiener/spack,mfherbst/spack,EmreAtes/spack,iulian787/spack,krafczyk/spack,matthiasdiener/spack,tmerrick1/spack,EmreAtes/spack,skosukhin/spack,matthiasdiener/spack,iulian787/spack,iulian787/spack,lgarren/spack,EmreAtes/spack,TheTimmy/spack,tmerrick1/spack,EmreAtes/spack,iulian787/spack,LLNL/spack,lgarren/spack,LLNL/spack,EmreAtes/spack,LLNL/spack,TheTimmy/spack,krafczyk/spack,mfherbst/spack,lgarren/spack,tmerrick1/spack,lgarren/spack,TheTimmy/spack,krafczyk/spack,TheTimmy/spack | ---
+++
@@ -10,6 +10,7 @@
version('18.1', 'f72e87f34fbf07f299f6cb46256a0b06')
version('19.2', '78353b1f80375ca5e088f4b4627ffe03')
version('20.5', 'fadc1e1123ddbe31006e5e43e927362b')
+ version('20.6.7', '45d6110f3ec14924e44c33411db64fe6')
extends('python')
|
90e7bc2c8313de2a5054d5290441c527f5f2c253 | gameButton.py | gameButton.py | # Game Button class for menu
# Marshall Ehlinger
import pygame
class gameButton:
GRAY = [131, 131, 131]
PINK = [255, 55, 135]
def __init__(self, label, buttonWidth, buttonHeight, importedGameFunction):
self.label = label
self.height = buttonHeight
self.width = buttonWidth
self.importedGameFunction = importedGameFunction
def renderButton(self, surface, isSelected, origin_x, origin_y):
if isSelected:
# pygame.draw.rect(surface, self.PINK, [origin_x, origin_y, self.width, self.height])
surface.fill(self.PINK,[origin_x, origin_y, self.width, self.height])
else:
# pygame.draw.rect(surface, self.GRAY, [origin_x, origin_y, self.width, self.height])
surface.fill(self.GRAY,[origin_x, origin_y, self.width, self.height])
def runGame(self):
self.importedGameFunction()
| # Game Button class for menu
# Marshall Ehlinger
import pygame
class gameButton:
GRAY = [131, 131, 131]
PINK = [255, 55, 135]
WHITE = [255, 255, 255]
BLACK = [0, 0, 0]
def __init__(self, label, buttonWidth, buttonHeight, importedGameFunction):
self.label = label
self.height = buttonHeight
self.width = buttonWidth
self.importedGameFunction = importedGameFunction
self.font = pygame.font.SysFont("monospace", 15)
def renderButton(self, surface, isSelected, origin_x, origin_y):
label = self.font.render(self.label, True, self.BLACK)
if isSelected:
# pygame.draw.rect(surface, self.PINK, [origin_x, origin_y, self.width, self.height])
surface.fill(self.PINK,[origin_x, origin_y, self.width, self.height])
else:
# pygame.draw.rect(surface, self.GRAY, [origin_x, origin_y, self.width, self.height])
surface.fill(self.GRAY,[origin_x, origin_y, self.width, self.height])
surface.blit(label,[origin_x + 5, origin_y + (.3 * self.height)])
def runGame(self):
self.importedGameFunction()
| Add labels to menu buttons | Add labels to menu buttons
| Python | mit | MEhlinger/rpi_pushbutton_games | ---
+++
@@ -7,19 +7,24 @@
GRAY = [131, 131, 131]
PINK = [255, 55, 135]
+ WHITE = [255, 255, 255]
+ BLACK = [0, 0, 0]
def __init__(self, label, buttonWidth, buttonHeight, importedGameFunction):
self.label = label
self.height = buttonHeight
self.width = buttonWidth
self.importedGameFunction = importedGameFunction
+ self.font = pygame.font.SysFont("monospace", 15)
def renderButton(self, surface, isSelected, origin_x, origin_y):
+ label = self.font.render(self.label, True, self.BLACK)
if isSelected:
# pygame.draw.rect(surface, self.PINK, [origin_x, origin_y, self.width, self.height])
surface.fill(self.PINK,[origin_x, origin_y, self.width, self.height])
else:
# pygame.draw.rect(surface, self.GRAY, [origin_x, origin_y, self.width, self.height])
surface.fill(self.GRAY,[origin_x, origin_y, self.width, self.height])
+ surface.blit(label,[origin_x + 5, origin_y + (.3 * self.height)])
def runGame(self):
self.importedGameFunction()
|
761367713658e2a436e1d600af026b375a7a332b | pymks/bases/real_ffts.py | pymks/bases/real_ffts.py | from .abstract import _AbstractMicrostructureBasis
import numpy as np
class _RealFFTBasis(_AbstractMicrostructureBasis):
def __init__(self, *args, **kwargs):
super(_RealFFTBasis, self).__init__(*args, **kwargs)
def _fftn(self, X, threads=1, avoid_copy=True):
if self._pyfftw:
return self._fftmodule.rfftn(np.ascontiguousarray(X),
axes=self._axes,
threads=threads,
planner_effort='FFTW_ESTIMATE',
overwrite_input=True,
avoid_copy=avoid_copy)()
else:
return self._fftmodule.rfftn(X, axes=self._axes)
def _ifftn(self, X, s=None, threads=1, avoid_copy=True):
if self._pyfftw:
return self._fftmodule.irfftn(np.ascontiguousarray(X), s=s,
axes=self._axes,
threads=threads,
planner_effort='FFTW_ESTIMATE',
avoid_copy=avoid_copy)().real
else:
return self._fftmodule.irfftn(X, axes=self._axes).real
def discretize(self, X):
raise NotImplementedError
| from .abstract import _AbstractMicrostructureBasis
import numpy as np
class _RealFFTBasis(_AbstractMicrostructureBasis):
def __init__(self, *args, **kwargs):
super(_RealFFTBasis, self).__init__(*args, **kwargs)
def _fftn(self, X, threads=1, avoid_copy=True):
if self._pyfftw:
return self._fftmodule.rfftn(np.ascontiguousarray(X),
axes=self._axes,
threads=threads,
planner_effort='FFTW_ESTIMATE',
overwrite_input=True,
avoid_copy=avoid_copy)()
else:
return self._fftmodule.rfftn(X, axes=self._axes)
def _ifftn(self, X, s=None, threads=1, avoid_copy=True):
if self._pyfftw:
return self._fftmodule.irfftn(np.ascontiguousarray(X), s=s,
axes=self._axes,
threads=threads,
planner_effort='FFTW_ESTIMATE',
avoid_copy=avoid_copy)().real
else:
return self._fftmodule.irfftn(X, axes=self._axes, s=s).real
def discretize(self, X):
raise NotImplementedError
| Fix bug for numpy's irfftn | Fix bug for numpy's irfftn
address #232
Fix bug for numpy's irfftn. The size of the returned array must be
passed because the returned size is potentially not unique. Without
this change only the return kernel would possibly have the wrong shape.
| Python | mit | davidbrough1/pymks,davidbrough1/pymks | ---
+++
@@ -25,7 +25,7 @@
planner_effort='FFTW_ESTIMATE',
avoid_copy=avoid_copy)().real
else:
- return self._fftmodule.irfftn(X, axes=self._axes).real
+ return self._fftmodule.irfftn(X, axes=self._axes, s=s).real
def discretize(self, X):
raise NotImplementedError |
3c6c242fd42bd9acf9866f458fa70536d56f3ccd | tests/test_tabulate.py | tests/test_tabulate.py | from pgcli.packages.tabulate import tabulate
from textwrap import dedent
def test_dont_strip_leading_whitespace():
data = [[' abc']]
headers = ['xyz']
tbl, _ = tabulate(data, headers, tablefmt='psql')
assert tbl == dedent('''
+---------+
| xyz |
|---------|
| abc |
+---------+ ''').strip()
| from mycli.packages.tabulate import tabulate
from textwrap import dedent
def test_dont_strip_leading_whitespace():
data = [[' abc']]
headers = ['xyz']
tbl, _ = tabulate(data, headers, tablefmt='psql')
assert tbl == dedent('''
+---------+
| xyz |
|---------|
| abc |
+---------+ ''').strip()
| Change the pgcli import to mycli. | Change the pgcli import to mycli.
| Python | bsd-3-clause | mdsrosa/mycli,mdsrosa/mycli | ---
+++
@@ -1,4 +1,4 @@
-from pgcli.packages.tabulate import tabulate
+from mycli.packages.tabulate import tabulate
from textwrap import dedent
|
633b23fd862f152c3f7d9e88fbeb660635386c3f | qmtp_package/__init__.py | qmtp_package/__init__.py | import os
if not os.environ.get("RMG_workingDirectory"):
import os.path
message = "Please set your RMG_workingDirectory environment variable.\n" +\
"(eg. export RMG_workingDirectory=%s )" % \
os.path.abspath(os.path.join(os.path.dirname(__file__),'..'))
raise Exception(message)
| Check for RMG_workingDirectory environment variable in qmtp_package | Check for RMG_workingDirectory environment variable in qmtp_package
I dislike the way this is needed, but for now this commit will at least help people
discover their mistake if they forget. | Python | mit | nickvandewiele/RMG-Py,pierrelb/RMG-Py,KEHANG/RMG-Py,chatelak/RMG-Py,enochd/RMG-Py,comocheng/RMG-Py,nickvandewiele/RMG-Py,nyee/RMG-Py,nyee/RMG-Py,comocheng/RMG-Py,enochd/RMG-Py,pierrelb/RMG-Py,chatelak/RMG-Py,faribas/RMG-Py,KEHANG/RMG-Py,faribas/RMG-Py | ---
+++
@@ -0,0 +1,7 @@
+import os
+if not os.environ.get("RMG_workingDirectory"):
+ import os.path
+ message = "Please set your RMG_workingDirectory environment variable.\n" +\
+ "(eg. export RMG_workingDirectory=%s )" % \
+ os.path.abspath(os.path.join(os.path.dirname(__file__),'..'))
+ raise Exception(message) | |
f791354a098c32617f02f05dbbb53861b7a94139 | rapt/cmds/ingredients.py | rapt/cmds/ingredients.py | import click
from rapt.connection import get_vr
from rapt.models import query
from rapt.util import dump_yaml, load_yaml, edit_yaml
@click.command()
@click.option('--name', '-n')
def ingredients(name, verbose):
"""List builds.
"""
vr = get_vr()
q = {}
if name:
q['name'] = name
# add filters if we need to be...
for i, ingredient in enumerate(query('Ingredient', vr, q)):
click.echo(ingredient.name)
@click.command()
@click.argument('name')
def ingredient(name):
"""View a complete ingredient config."""
vr = get_vr()
q = {'name': name}
ingredient = query('Ingredient', vr, q).next()
doc = {
'config': load_yaml(ingredient.config_yaml),
'env': load_yaml(ingredient.env_yaml),
}
config = edit_yaml(dump_yaml(doc))
print(config)
| import click
from rapt.connection import get_vr
from rapt.models import query
from rapt.util import dump_yaml, load_yaml, edit_yaml
@click.command()
@click.option('--name', '-n')
def ingredients(name, verbose):
"""List builds.
"""
vr = get_vr()
q = {}
if name:
q['name'] = name
# add filters if we need to be...
for i, ingredient in enumerate(query('Ingredient', vr, q)):
click.echo(ingredient.name)
@click.command()
@click.argument('name')
def ingredient(name):
"""View a complete ingredient config."""
vr = get_vr()
q = {'name': name}
ingredient = query('Ingredient', vr, q).next()
doc = {
'config': load_yaml(ingredient.config_yaml),
'env': load_yaml(ingredient.env_yaml),
}
config = edit_yaml(dump_yaml(doc))
if not config:
click.echo('No changes')
return
ingredient.config_yaml = dump_yaml(config['config'])
ingredient.env_yaml = dump_yaml(config['env'])
ingredient.save()
| Update the ingredient or noop if there are no changes | Update the ingredient or noop if there are no changes
| Python | bsd-3-clause | yougov/rapt,yougov/rapt | ---
+++
@@ -35,4 +35,11 @@
}
config = edit_yaml(dump_yaml(doc))
- print(config)
+
+ if not config:
+ click.echo('No changes')
+ return
+
+ ingredient.config_yaml = dump_yaml(config['config'])
+ ingredient.env_yaml = dump_yaml(config['env'])
+ ingredient.save() |
903458640ec8db1c39c822b229e466bc717efe40 | registration/__init__.py | registration/__init__.py | from django.utils.version import get_version as django_get_version
VERSION = (0, 9, 0, 'beta', 1)
def get_version():
return django_get_version(VERSION) # pragma: no cover
| VERSION = (0, 9, 0, 'beta', 1)
def get_version():
from django.utils.version import get_version as django_get_version
return django_get_version(VERSION) # pragma: no cover
| Move import of Django's get_version into django-registration's get_version, to avoid dependency-order problems. | Move import of Django's get_version into django-registration's get_version, to avoid dependency-order problems.
| Python | bsd-3-clause | christang/django-registration-1.5,AndrewLvov/django-registration,AndrewLvov/django-registration,fedenko/django-registration,fedenko/django-registration,christang/django-registration-1.5 | ---
+++
@@ -1,8 +1,6 @@
-from django.utils.version import get_version as django_get_version
-
-
VERSION = (0, 9, 0, 'beta', 1)
def get_version():
+ from django.utils.version import get_version as django_get_version
return django_get_version(VERSION) # pragma: no cover |
c763d42e48d501461ab6c8c875d691f52045ada8 | intelmq/bots/outputs/mongodb/output.py | intelmq/bots/outputs/mongodb/output.py | # -*- coding: utf-8 -*-
"""
pymongo library automatically tries to reconnect if connection has been lost
"""
from intelmq.lib.bot import Bot
try:
import pymongo
except ImportError:
pymongo = None
class MongoDBOutputBot(Bot):
def init(self):
if pymongo is None:
self.logger.error('Could not import pymongo. Please install it.')
self.stop()
self.connect()
def connect(self):
self.logger.debug('Connecting to mongodb server.')
try:
self.client = pymongo.MongoClient(self.parameters.host,
int(self.parameters.port))
except pymongo.errors.ConnectionFailure:
raise ValueError('Connection to mongodb server failed.')
else:
db = self.client[self.parameters.database]
self.collection = db[self.parameters.collection]
self.logger.info('Successfully connected to mongodb server.')
def process(self):
event = self.receive_message()
try:
self.collection.insert(event.to_dict(hierarchical=self.parameters.hierarchical_output))
except pymongo.errors.AutoReconnect:
self.logger.error('Connection Lost. Connecting again.')
self.connect()
else:
self.acknowledge_message()
def shutdown(self):
self.client.close()
BOT = MongoDBOutputBot
| # -*- coding: utf-8 -*-
"""
pymongo library automatically tries to reconnect if connection has been lost
"""
from intelmq.lib.bot import Bot
try:
import pymongo
except ImportError:
pymongo = None
class MongoDBOutputBot(Bot):
def init(self):
if pymongo is None:
self.logger.error('Could not import pymongo. Please install it.')
self.stop()
self.connect()
def connect(self):
self.logger.debug('Connecting to mongodb server.')
try:
self.client = pymongo.MongoClient(self.parameters.host,
int(self.parameters.port))
except pymongo.errors.ConnectionFailure:
raise ValueError('Connection to mongodb server failed.')
else:
db = self.client[self.parameters.database]
if self.parameters.db_user and self.parameters.db_pass:
try:
db.authenticate(name=self.parameters.db_user,
password=self.parameters.db_pass)
except pymongo.errors.OperationFailure:
raise ValueError('Authentication to {} failed'.format(self.parameters.database))
self.collection = db[self.parameters.collection]
self.logger.info('Successfully connected to mongodb server.')
def process(self):
event = self.receive_message()
try:
self.collection.insert(event.to_dict(hierarchical=self.parameters.hierarchical_output))
except pymongo.errors.AutoReconnect:
self.logger.error('Connection Lost. Connecting again.')
self.connect()
else:
self.acknowledge_message()
def shutdown(self):
self.client.close()
BOT = MongoDBOutputBot
| Add authentication otpion to mongodb | Add authentication otpion to mongodb
| Python | agpl-3.0 | certtools/intelmq,certtools/intelmq,certtools/intelmq,aaronkaplan/intelmq,aaronkaplan/intelmq,aaronkaplan/intelmq | ---
+++
@@ -29,6 +29,12 @@
raise ValueError('Connection to mongodb server failed.')
else:
db = self.client[self.parameters.database]
+ if self.parameters.db_user and self.parameters.db_pass:
+ try:
+ db.authenticate(name=self.parameters.db_user,
+ password=self.parameters.db_pass)
+ except pymongo.errors.OperationFailure:
+ raise ValueError('Authentication to {} failed'.format(self.parameters.database))
self.collection = db[self.parameters.collection]
self.logger.info('Successfully connected to mongodb server.')
|
afe90ba2a9720ffd80780e7696353510501362c7 | studygroups/management/commands/generate_reminders.py | studygroups/management/commands/generate_reminders.py | from django.core.management.base import BaseCommand, CommandError
from studygroups.tasks import gen_reminders
class Command(BaseCommand):
help = 'Generate reminders for all study groups happening in 3 days from now'
def handle(self, *args, **options):
gen_reminders()
| from django.core.management.base import BaseCommand, CommandError
from django.utils import timezone
from studygroups.models import Meeting
from studygroups.models.learningcircle import generate_meeting_reminder
class Command(BaseCommand):
help = 'Transitional command to generate reminders for all meetings in the future.'
def handle(self, *args, **options):
today = timezone.now().date()
meetings = Meeting.objects.active().filter(study_group__deleted_at__isnull=True).filter(meeting_date__gte=today)
for meeting in meetings:
print(f'Generating meeting reminder for meeting happening {meeting.meeting_date}')
generate_meeting_reminder(meeting)
| Update task to generate reminders for all future meetings | Update task to generate reminders for all future meetings
| Python | mit | p2pu/learning-circles,p2pu/learning-circles,p2pu/learning-circles,p2pu/learning-circles | ---
+++
@@ -1,9 +1,15 @@
from django.core.management.base import BaseCommand, CommandError
+from django.utils import timezone
-from studygroups.tasks import gen_reminders
+from studygroups.models import Meeting
+from studygroups.models.learningcircle import generate_meeting_reminder
class Command(BaseCommand):
- help = 'Generate reminders for all study groups happening in 3 days from now'
+ help = 'Transitional command to generate reminders for all meetings in the future.'
def handle(self, *args, **options):
- gen_reminders()
+ today = timezone.now().date()
+ meetings = Meeting.objects.active().filter(study_group__deleted_at__isnull=True).filter(meeting_date__gte=today)
+ for meeting in meetings:
+ print(f'Generating meeting reminder for meeting happening {meeting.meeting_date}')
+ generate_meeting_reminder(meeting) |
52a9e0b5f3f0df4d2a9a092ecf6935def7a3e5cf | lib/ansiblelint/formatters/__init__.py | lib/ansiblelint/formatters/__init__.py | class Formatter(object):
def format(self, match):
formatstr = u"[{0}] {1}\n{2}:{3}\n{4}\n"
return formatstr.format(match.rule.id,
match.message,
match.filename,
match.linenumber,
match.line)
class QuietFormatter(object):
def format(self, match):
formatstr = u"[{0}] {1}:{2}"
return formatstr.format(match.rule.id, match.filename,
match.linenumber)
class ParseableFormatter(object):
def format(self, match):
formatstr = u"{0}:{1}: [{2}] {3}"
return formatstr.format(match.filename,
match.linenumber,
match.rule.id,
match.message,
)
| class Formatter(object):
def format(self, match):
formatstr = u"[{0}] {1}\n{2}:{3}\n{4}\n"
return formatstr.format(match.rule.id,
match.message,
match.filename,
match.linenumber,
match.line)
class QuietFormatter(object):
def format(self, match):
formatstr = u"[{0}] {1}:{2}"
return formatstr.format(match.rule.id, match.filename,
match.linenumber)
class ParseableFormatter(object):
def format(self, match):
formatstr = u"{0}:{1}: [{2}] {3}"
return formatstr.format(match.filename,
match.linenumber,
"E" + match.rule.id,
match.message,
)
| Improve ParseableFormatter to be more like pylint | Improve ParseableFormatter to be more like pylint
Add an E in front of the rule ID so that pylint detects
it as an error.
Fixes #154
| Python | mit | willthames/ansible-lint,dataxu/ansible-lint,MatrixCrawler/ansible-lint | ---
+++
@@ -23,6 +23,6 @@
formatstr = u"{0}:{1}: [{2}] {3}"
return formatstr.format(match.filename,
match.linenumber,
- match.rule.id,
+ "E" + match.rule.id,
match.message,
) |
d1d55450db13766f51f264c9bfef1bcea74ef7b1 | convert.py | convert.py | #!/usr/bin/env python
import os, sys
import pexpect
import geom
from sfepy.fem.mesh import Mesh
try:
from site_cfg import tetgen_path
except ImportError:
tetgen_path = '/usr/bin/tetgen'
def mesh():
if len( sys.argv ) == 3:
geomFileName = sys.argv[1]
vtkFileName = sys.argv[2]
if len( sys.argv ) == 2:
geomFileName = sys.argv[1]
vtkFileName = "tmp/t.1.vtk"
else:
geomFileName = "database/box.geo"
vtkFileName = "tmp/t.1.vtk"
pexpect.run( "gmsh -0 %s -o tmp/x.geo" % geomFileName )
g=geom.read_gmsh("tmp/x.geo")
g.printinfo()
geom.write_tetgen(g,"tmp/t.poly")
geom.runtetgen("tmp/t.poly",a=0.0003,Q=1.0,quadratic=False,
tetgenpath = tetgen_path)
m = Mesh.fromFile("tmp/t.1.node")
m.write( vtkFileName, io = "auto" )
try:
os.makedirs( "tmp" )
except OSError, e:
if e.errno != 17: # [Errno 17] File exists
raise
mesh()
| #!/usr/bin/env python
import os, sys
import geom
from sfepy.fem.mesh import Mesh
try:
from site_cfg import tetgen_path
except ImportError:
tetgen_path = '/usr/bin/tetgen'
def mesh():
if len( sys.argv ) == 3:
geomFileName = sys.argv[1]
vtkFileName = sys.argv[2]
if len( sys.argv ) == 2:
geomFileName = sys.argv[1]
vtkFileName = "tmp/t.1.vtk"
else:
geomFileName = "database/box.geo"
vtkFileName = "tmp/t.1.vtk"
os.system( "gmsh -0 %s -o tmp/x.geo" % geomFileName )
g=geom.read_gmsh("tmp/x.geo")
g.printinfo()
geom.write_tetgen(g,"tmp/t.poly")
geom.runtetgen("tmp/t.poly",a=0.03,Q=1.0,quadratic=False,
tetgenpath = tetgen_path)
m = Mesh.fromFile("tmp/t.1.node")
m.write( vtkFileName, io = "auto" )
try:
os.makedirs( "tmp" )
except OSError, e:
if e.errno != 17: # [Errno 17] File exists
raise
mesh()
| Use os.system() instead of pexpect.run(). | Use os.system() instead of pexpect.run().
Also do not generate too dense mesh by default, so that it's faster.
| Python | bsd-3-clause | BubuLK/sfepy,sfepy/sfepy,BubuLK/sfepy,RexFuzzle/sfepy,vlukes/sfepy,sfepy/sfepy,rc/sfepy,lokik/sfepy,RexFuzzle/sfepy,BubuLK/sfepy,olivierverdier/sfepy,sfepy/sfepy,olivierverdier/sfepy,vlukes/sfepy,lokik/sfepy,vlukes/sfepy,RexFuzzle/sfepy,RexFuzzle/sfepy,olivierverdier/sfepy,lokik/sfepy,lokik/sfepy,rc/sfepy,rc/sfepy | ---
+++
@@ -1,6 +1,5 @@
#!/usr/bin/env python
import os, sys
-import pexpect
import geom
from sfepy.fem.mesh import Mesh
@@ -20,11 +19,11 @@
geomFileName = "database/box.geo"
vtkFileName = "tmp/t.1.vtk"
- pexpect.run( "gmsh -0 %s -o tmp/x.geo" % geomFileName )
+ os.system( "gmsh -0 %s -o tmp/x.geo" % geomFileName )
g=geom.read_gmsh("tmp/x.geo")
g.printinfo()
geom.write_tetgen(g,"tmp/t.poly")
- geom.runtetgen("tmp/t.poly",a=0.0003,Q=1.0,quadratic=False,
+ geom.runtetgen("tmp/t.poly",a=0.03,Q=1.0,quadratic=False,
tetgenpath = tetgen_path)
m = Mesh.fromFile("tmp/t.1.node") |
d178fb001b8b6869038ed6ec288acf5fb427205c | rssmailer/tasks/mail.py | rssmailer/tasks/mail.py | from celery.decorators import task
from django.core.mail import send_mail
from ..models import Email
@task(ignore_result=True, name="rssmailer.tasks.mail.send")
def send(entry, **kwargs):
logger = send.get_logger(**kwargs)
logger.info("Sending entry: %s" % entry.title)
emails_all = Email.objects.all()
step = 3 # how many recipients in one e-mail
for i in range(0, len(emails_all), step):
recipients = map(lambda e: e.email, emails_all[i:i+step])
send_entry_to.delay(entry.title, entry.summary, recipients)
@task(ignore_result=True, name="rssmailer.tasks.mail.send_entry_to")
def send_entry_to(title, body, recipients, **kwargs):
logger = send.get_logger(**kwargs)
logger.info("Sending to: %s" % ','.join(recipients))
send_mail(title, body, "rssmailer@praus.net", recipients)
| from celery.decorators import task
from django.core.mail import send_mail
from ..models import Email
@task(ignore_result=True, name="rssmailer.tasks.send")
def send(entry, **kwargs):
logger = send.get_logger(**kwargs)
logger.info("Sending entry: %s" % entry.title)
emails_all = Email.objects.all()
step = 3 # how many recipients in one e-mail
for i in range(0, len(emails_all), step):
recipients = map(lambda e: e.email, emails_all[i:i+step])
send_entry_to.delay(entry.title, entry.summary, recipients)
@task(ignore_result=True, name="rssmailer.tasks.send_entry_to")
def send_entry_to(title, body, recipients, **kwargs):
logger = send.get_logger(**kwargs)
logger.info("Sending to: %s" % ','.join(recipients))
send_mail(title, body, "rssmailer@praus.net", recipients) | Fix naming issues with tasks | Fix naming issues with tasks
| Python | bsd-3-clause | praus/django-rssmailer | ---
+++
@@ -3,7 +3,7 @@
from ..models import Email
-@task(ignore_result=True, name="rssmailer.tasks.mail.send")
+@task(ignore_result=True, name="rssmailer.tasks.send")
def send(entry, **kwargs):
logger = send.get_logger(**kwargs)
logger.info("Sending entry: %s" % entry.title)
@@ -16,8 +16,8 @@
send_entry_to.delay(entry.title, entry.summary, recipients)
-@task(ignore_result=True, name="rssmailer.tasks.mail.send_entry_to")
+@task(ignore_result=True, name="rssmailer.tasks.send_entry_to")
def send_entry_to(title, body, recipients, **kwargs):
logger = send.get_logger(**kwargs)
logger.info("Sending to: %s" % ','.join(recipients))
- send_mail(title, body, "rssmailer@praus.net", recipients)
+ send_mail(title, body, "rssmailer@praus.net", recipients) |
615627cf6ea4725bed7886e822bc01c12d9fdead | nodewatcher/web/sanitize-dump.py | nodewatcher/web/sanitize-dump.py | #!/usr/bin/python
# Setup import paths, since we are using Django models
import sys, os
sys.path.append('/var/www/django')
os.environ['DJANGO_SETTINGS_MODULE'] = 'wlanlj.settings_production'
# Imports
from django.core import serializers
if len(sys.argv) != 4:
print "Usage: %s format input-file output-file" % sys.argv[0]
exit(1)
if sys.argv[1] not in ('json', 'xml'):
print "Invalid format '%s'! Valid formats are: json xml" % sys.argv[1]
exit(1)
def object_transformator():
# Read all objects one by one
for holder in serializers.deserialize(sys.argv[1], open(sys.argv[2], 'r')):
name = holder.object.__class__.__name__
object = holder.object
# Some objects need to be sanitized
if name == 'Node':
object.notes = ''
elif name == 'UserAccount':
object.vpn_password = 'XXX'
object.phone = '5551234'
elif name == 'User':
object.password = 'XXX'
elif name == 'Profile':
object.root_pass = 'XXX'
yield holder.object
# Write transformed objects out
out = open(sys.argv[3], 'w')
serializers.serialize(sys.argv[1], object_transformator(), stream = out)
out.close()
| #!/usr/bin/python
# Setup import paths, since we are using Django models
import sys, os
sys.path.append('/var/www/django')
os.environ['DJANGO_SETTINGS_MODULE'] = 'wlanlj.settings_production'
# Imports
from django.core import serializers
if len(sys.argv) != 4:
print "Usage: %s format input-file output-file" % sys.argv[0]
exit(1)
if sys.argv[1] not in ('json', 'xml'):
print "Invalid format '%s'! Valid formats are: json xml" % sys.argv[1]
exit(1)
def object_transformator():
# Read all objects one by one
for holder in serializers.deserialize(sys.argv[1], open(sys.argv[2], 'r')):
name = holder.object.__class__.__name__
object = holder.object
# Some objects need to be sanitized
if name == 'Node':
object.notes = ''
elif name == 'UserAccount':
object.vpn_password = 'XXX'
object.phone = '5551234'
elif name == 'User':
object.password = 'XXX'
elif name == 'Profile':
object.root_pass = 'XXX'
elif name == 'StatsSolar':
continue
yield holder.object
# Write transformed objects out
out = open(sys.argv[3], 'w')
serializers.serialize(sys.argv[1], object_transformator(), stream = out)
out.close()
| Remove solar statistics data from dumps. | Remove solar statistics data from dumps.
| Python | agpl-3.0 | galaxor/Nodewatcher,galaxor/Nodewatcher,galaxor/Nodewatcher,galaxor/Nodewatcher | ---
+++
@@ -32,7 +32,9 @@
object.password = 'XXX'
elif name == 'Profile':
object.root_pass = 'XXX'
-
+ elif name == 'StatsSolar':
+ continue
+
yield holder.object
# Write transformed objects out |
8ca20bec63b8f8aaff55a7012c69a2644e292095 | mltsp/science_features/lomb_scargle_fast.py | mltsp/science_features/lomb_scargle_fast.py | import numpy as np
import gatspy
def lomb_scargle_fast_period(t, m, e):
"""Fits a simple sinuosidal model
y(t) = A sin(2*pi*w*t) + B cos(2*pi*w*t) + c
and returns the estimated period 1/w. Much faster than fitting the
full multi-frequency model used by `science_features.lomb_scargle`.
"""
opt_args = {'period_range': (2*t.max() / len(t), t.max()), 'quiet': True}
model = gatspy.periodic.LombScargleFast(fit_period=True, optimizer_kwds=opt_args)
model.fit(t, m, e)
return model.best_period
| import numpy as np
import gatspy
def lomb_scargle_fast_period(t, m, e):
"""Fits a simple sinuosidal model
y(t) = A sin(2*pi*w*t + phi) + c
and returns the estimated period 1/w. Much faster than fitting the
full multi-frequency model used by `science_features.lomb_scargle`.
"""
opt_args = {'period_range': (2*t.max() / len(t), t.max()), 'quiet': True}
model = gatspy.periodic.LombScargleFast(fit_period=True, optimizer_kwds=opt_args)
model.fit(t, m, e)
return model.best_period
| Change docstring for `period_fast` feature | Change docstring for `period_fast` feature
| Python | bsd-3-clause | bnaul/mltsp,mltsp/mltsp,mltsp/mltsp,bnaul/mltsp,acrellin/mltsp,bnaul/mltsp,acrellin/mltsp,mltsp/mltsp,mltsp/mltsp,bnaul/mltsp,acrellin/mltsp,mltsp/mltsp,mltsp/mltsp,acrellin/mltsp,bnaul/mltsp,acrellin/mltsp,bnaul/mltsp,acrellin/mltsp | ---
+++
@@ -5,7 +5,7 @@
def lomb_scargle_fast_period(t, m, e):
"""Fits a simple sinuosidal model
- y(t) = A sin(2*pi*w*t) + B cos(2*pi*w*t) + c
+ y(t) = A sin(2*pi*w*t + phi) + c
and returns the estimated period 1/w. Much faster than fitting the
full multi-frequency model used by `science_features.lomb_scargle`. |
4d5c8ec9c2006b78a42461af43944de8ab7bc9ea | us_ignite/common/sanitizer.py | us_ignite/common/sanitizer.py | import bleach
ALLOWED_TAGS = [
'a',
'abbr',
'acronym',
'b',
'blockquote',
'code',
'em',
'i',
'li',
'ol',
'strong',
'ul',
'p',
'br',
]
ALLOWED_ATTRIBUTES = {
'a': ['href', 'title'],
'abbr': ['title'],
'acronym': ['title'],
}
ALLOWED_STYLES = []
def sanitize(text):
"""Cleans the HTML received."""
cleaned_text = bleach.clean(
text, tags=ALLOWED_TAGS, attributes=ALLOWED_ATTRIBUTES,
styles=ALLOWED_STYLES, strip=True)
return cleaned_text
| import bleach
ALLOWED_TAGS = [
'a',
'abbr',
'acronym',
'b',
'blockquote',
'code',
'em',
'i',
'li',
'ol',
'strong',
'ul',
'p',
'br',
'h3',
'h4',
'h5',
'h6',
]
ALLOWED_ATTRIBUTES = {
'a': ['href', 'title'],
'abbr': ['title'],
'acronym': ['title'],
}
ALLOWED_STYLES = []
def sanitize(text):
"""Cleans the HTML received."""
cleaned_text = bleach.clean(
text, tags=ALLOWED_TAGS, attributes=ALLOWED_ATTRIBUTES,
styles=ALLOWED_STYLES, strip=True)
return cleaned_text
| Allow low level titles when sanitising. | Allow low level titles when sanitising.
| Python | bsd-3-clause | us-ignite/us_ignite,us-ignite/us_ignite,us-ignite/us_ignite,us-ignite/us_ignite,us-ignite/us_ignite | ---
+++
@@ -15,6 +15,10 @@
'ul',
'p',
'br',
+ 'h3',
+ 'h4',
+ 'h5',
+ 'h6',
]
ALLOWED_ATTRIBUTES = { |
858bc6f152a87298f9bd3568712aed49b6e02e42 | suave/suave.py | suave/suave.py | #!/usr/bin/env python
import curses
import os
import time
from box import Box
from utils import load_yaml
def main(screen):
"""
Draws and redraws the screen.
"""
# Hide the cursor.
curses.curs_set(0)
# Load config from file.
config = load_yaml(os.path.expanduser('~/.suave/config.yml'))
# Create boxes from config.
boxes = []
for box in config:
boxes.append(
Box(
screen=screen,
rows=box['rows'],
columns=box['columns'],
rows_offset=box['rows-offset'],
columns_offset=box['columns-offset'],
command=box['command'],
interval=box['interval'],
)
)
while True:
# Redraw the screen only when it changes.
if screen.is_wintouched():
screen.clear()
screen.refresh()
# Give every box an opportunity to redraw if it has changed.
[box.redraw_if_changed() for box in boxes]
# Wait before redrawing again.
time.sleep(1)
curses.wrapper(main)
| #!/usr/bin/env python
import curses
import os
from box import Box
from utils import load_yaml
def main(screen):
"""
Draws and redraws the screen.
"""
# Hide the cursor.
curses.curs_set(0)
# Load config from file.
config = load_yaml(os.path.expanduser('~/.suave/config.yml'))
# Create boxes from config.
boxes = []
for box in config:
boxes.append(
Box(
screen=screen,
rows=box['rows'],
columns=box['columns'],
rows_offset=box['rows-offset'],
columns_offset=box['columns-offset'],
command=box['command'],
interval=box['interval'],
)
)
while True:
# Redraw the screen only when it changes.
if screen.is_wintouched():
screen.clear()
screen.refresh()
# Give every box an opportunity to redraw if it has changed.
[box.redraw_if_changed() for box in boxes]
# Wait before redrawing again.
curses.napms(1000)
curses.wrapper(main)
| Use napms method from curses rather than sleep method from time | Use napms method from curses rather than sleep method from time
| Python | mit | countermeasure/suave | ---
+++
@@ -2,7 +2,6 @@
import curses
import os
-import time
from box import Box
from utils import load_yaml
@@ -44,7 +43,7 @@
[box.redraw_if_changed() for box in boxes]
# Wait before redrawing again.
- time.sleep(1)
+ curses.napms(1000)
curses.wrapper(main) |
59627d96975b2735fabd0d44e34e018ca97dec2b | tweepy/error.py | tweepy/error.py | # Tweepy
# Copyright 2009-2010 Joshua Roesslein
# See LICENSE for details.
from __future__ import print_function
import six
class TweepError(Exception):
"""Tweepy exception"""
def __init__(self, reason, response=None, api_code=None):
self.reason = six.text_type(reason)
self.response = response
self.api_code = api_code
super(TweepError, self).__init__(self, reason)
def __str__(self):
return self.reason
def is_rate_limit_error_message(message):
"""Check if the supplied error message belongs to a rate limit error."""
return isinstance(message, list) \
and len(message) > 0 \
and 'code' in message[0] \
and message[0]['code'] == 88
class RateLimitError(TweepError):
"""Exception for Tweepy hitting the rate limit."""
# RateLimitError has the exact same properties and inner workings
# as TweepError for backwards compatibility reasons.
pass
| # Tweepy
# Copyright 2009-2010 Joshua Roesslein
# See LICENSE for details.
from __future__ import print_function
import six
class TweepError(Exception):
"""Tweepy exception"""
def __init__(self, reason, response=None, api_code=None):
self.reason = six.text_type(reason)
self.response = response
self.api_code = api_code
super(TweepError, self).__init__(reason)
def __str__(self):
return self.reason
def is_rate_limit_error_message(message):
"""Check if the supplied error message belongs to a rate limit error."""
return isinstance(message, list) \
and len(message) > 0 \
and 'code' in message[0] \
and message[0]['code'] == 88
class RateLimitError(TweepError):
"""Exception for Tweepy hitting the rate limit."""
# RateLimitError has the exact same properties and inner workings
# as TweepError for backwards compatibility reasons.
pass
| Fix super usage in TweepError initialization | Fix super usage in TweepError initialization
| Python | mit | svven/tweepy,tweepy/tweepy | ---
+++
@@ -13,7 +13,7 @@
self.reason = six.text_type(reason)
self.response = response
self.api_code = api_code
- super(TweepError, self).__init__(self, reason)
+ super(TweepError, self).__init__(reason)
def __str__(self):
return self.reason |
be9ce58461e56873b0d8f60c85c0af96e48ce3fb | fabfile.py | fabfile.py | import logging
import yaml
from fabric.api import lcd, env, task
from fabric.contrib.project import rsync_project
logging.basicConfig(level=logging.DEBUG)
log = logging.getLogger()
try:
conf = yaml.load(open('deploy.yaml', 'rb').read())
except:
log.exception('error: unable to read deply.yaml config file:')
env.user = conf['user']
env.hosts = ['{}@{}:22'.format(env.user, host) for host in conf['hosts']]
def deploy_project(local_dir, remote_dir, exclusions=[]):
"""Deploy the entire project at local_dir to remote_dir, excluding the given paths."""
with lcd(local_dir):
rsync_project(remote_dir=remote_dir, local_dir='.', exclude=exclusions)
rsync_project(remote_dir=remote_dir, local_dir='resources', exclude=exclusions, delete=True)
@task
def deploy():
"""Deploys web and script to remote server."""
deploy_project('web', conf['web_remote_dir'],
['.git', 'fabfile.py', 'cache', 'config', 'template'])
deploy_project('script', conf['script_remote_dir'],
['.git', 'fabfile.py', 'cache', 'js', 'image'])
| import logging
import os
import yaml
from fabric.api import lcd, env, task, local
from fabric.contrib.project import rsync_project
logging.basicConfig(level=logging.DEBUG)
log = logging.getLogger()
repo_root = local('git rev-parse --show-toplevel', capture=True)
try:
conf = yaml.load(open(os.path.join(repo_root, 'deploy.yaml'), 'rb').read())
except:
log.exception('error: unable to read deply.yaml config file:')
env.user = conf['user']
env.hosts = ['{}@{}:22'.format(env.user, host) for host in conf['hosts']]
def deploy_project(local_dir, remote_dir, exclusions=[]):
"""Deploy the entire project at local_dir to remote_dir, excluding the given paths."""
with lcd(repo_root):
with lcd(local_dir):
rsync_project(remote_dir=remote_dir, local_dir='.', exclude=exclusions)
rsync_project(remote_dir=remote_dir, local_dir='resources', exclude=exclusions, delete=True)
@task
def deploy():
"""Deploys web and script to remote server."""
deploy_project('web', conf['web_remote_dir'],
['.git', 'fabfile.py', 'cache', 'config', 'template'])
deploy_project('script', conf['script_remote_dir'],
['.git', 'fabfile.py', 'cache', 'js', 'image'])
| Make deployment script work from anywhere. | Make deployment script work from anywhere.
| Python | mit | lexicalunit/pancake-master,lexicalunit/pancake-master,lexicalunit/pancake-master | ---
+++
@@ -1,16 +1,18 @@
import logging
+import os
import yaml
-from fabric.api import lcd, env, task
+from fabric.api import lcd, env, task, local
from fabric.contrib.project import rsync_project
logging.basicConfig(level=logging.DEBUG)
+
log = logging.getLogger()
-
+repo_root = local('git rev-parse --show-toplevel', capture=True)
try:
- conf = yaml.load(open('deploy.yaml', 'rb').read())
+ conf = yaml.load(open(os.path.join(repo_root, 'deploy.yaml'), 'rb').read())
except:
log.exception('error: unable to read deply.yaml config file:')
@@ -20,9 +22,10 @@
def deploy_project(local_dir, remote_dir, exclusions=[]):
"""Deploy the entire project at local_dir to remote_dir, excluding the given paths."""
- with lcd(local_dir):
- rsync_project(remote_dir=remote_dir, local_dir='.', exclude=exclusions)
- rsync_project(remote_dir=remote_dir, local_dir='resources', exclude=exclusions, delete=True)
+ with lcd(repo_root):
+ with lcd(local_dir):
+ rsync_project(remote_dir=remote_dir, local_dir='.', exclude=exclusions)
+ rsync_project(remote_dir=remote_dir, local_dir='resources', exclude=exclusions, delete=True)
@task |
d648598d669144d589ffbbb03bf56edad4050aff | connector/__manifest__.py | connector/__manifest__.py | # -*- coding: utf-8 -*-
# Copyright 2013-2017 Camptocamp SA
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html)
{'name': 'Connector',
'version': '10.0.1.0.0',
'author': 'Camptocamp,Openerp Connector Core Editors,'
'Odoo Community Association (OCA)',
'website': 'http://odoo-connector.com',
'license': 'AGPL-3',
'category': 'Generic Modules',
'depends': ['mail',
'queue_job',
],
'data': ['security/connector_security.xml',
'security/ir.model.access.csv',
'checkpoint/checkpoint_view.xml',
'connector_menu.xml',
'setting_view.xml',
'res_partner_view.xml',
],
'installable': True,
'application': True,
}
| # -*- coding: utf-8 -*-
# Copyright 2013-2017 Camptocamp SA
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html)
{'name': 'Connector',
'version': '10.0.1.0.0',
'author': 'Camptocamp,Openerp Connector Core Editors,'
'Odoo Community Association (OCA)',
'website': 'http://odoo-connector.com',
'license': 'AGPL-3',
'category': 'Generic Modules',
'depends': ['mail',
'queue_job',
],
'data': ['security/connector_security.xml',
'security/ir.model.access.csv',
'checkpoint/checkpoint_view.xml',
'connector_menu.xml',
'setting_view.xml',
'res_partner_view.xml',
],
'installable': True,
}
| Remove application flag, not an application | Remove application flag, not an application
| Python | agpl-3.0 | js-landoo/connector,js-landoo/connector | ---
+++
@@ -20,5 +20,4 @@
'res_partner_view.xml',
],
'installable': True,
- 'application': True,
} |
15cb279724a646368066591e81467e1b26d61938 | examples/charts/file/steps.py | examples/charts/file/steps.py | from bokeh.charts import Step, show, output_file
# build a dataset where multiple columns measure the same thing
data = dict(python=[2, 3, 7, 5, 26, 221, 44, 233, 254, 265, 266, 267, 120, 111],
pypy=[12, 33, 47, 15, 126, 121, 144, 233, 254, 225, 226, 267, 110, 130],
jython=[22, 43, 10, 25, 26, 101, 114, 203, 194, 215, 201, 227, 139, 160],
test=['foo', 'bar', 'foo', 'bar', 'foo', 'bar', 'foo', 'bar', 'foo', 'bar',
'foo', 'bar', 'foo', 'bar']
)
# create a line chart where each column of measures receives a unique color and dash style
line = Step(data, y=['python', 'pypy', 'jython'],
dash=['python', 'pypy', 'jython'],
color=['python', 'pypy', 'jython'],
title="Interpreter Sample Data", ylabel='Duration', legend=True)
output_file("steps.html")
show(line)
| """ This example uses the U.S. postage rate per ounce for stamps and
postcards.
Source: https://en.wikipedia.org/wiki/History_of_United_States_postage_rates
"""
from bokeh.charts import Step, show, output_file
# build a dataset where multiple columns measure the same thing
data = dict(stamp=[
.33, .33, .34, .37, .37, .37, .37, .39, .41, .42,
.44, .44, .44, .45, .46, .49, .49],
postcard=[
.20, .20, .21, .23, .23, .23, .23, .24, .26, .27,
.28, .28, .29, .32, .33, .34, .35],
)
# create a line chart where each column of measures receives a unique color and dash style
line = Step(data, y=['stamp', 'postcard'],
dash=['stamp', 'postcard'],
color=['stamp', 'postcard'],
title="U.S. Postage Rates (1999-2015)", ylabel='Rate per ounce', legend=True)
output_file("steps.html")
show(line)
| Change step example to plot US postage rates | Change step example to plot US postage rates
| Python | bsd-3-clause | ptitjano/bokeh,timsnyder/bokeh,draperjames/bokeh,percyfal/bokeh,justacec/bokeh,clairetang6/bokeh,philippjfr/bokeh,ericmjl/bokeh,rs2/bokeh,azjps/bokeh,DuCorey/bokeh,clairetang6/bokeh,draperjames/bokeh,clairetang6/bokeh,DuCorey/bokeh,aavanian/bokeh,KasperPRasmussen/bokeh,justacec/bokeh,bokeh/bokeh,aiguofer/bokeh,rs2/bokeh,aavanian/bokeh,stonebig/bokeh,schoolie/bokeh,msarahan/bokeh,msarahan/bokeh,jakirkham/bokeh,schoolie/bokeh,Karel-van-de-Plassche/bokeh,Karel-van-de-Plassche/bokeh,phobson/bokeh,stonebig/bokeh,azjps/bokeh,aavanian/bokeh,jakirkham/bokeh,ptitjano/bokeh,bokeh/bokeh,azjps/bokeh,ptitjano/bokeh,msarahan/bokeh,Karel-van-de-Plassche/bokeh,schoolie/bokeh,KasperPRasmussen/bokeh,draperjames/bokeh,phobson/bokeh,schoolie/bokeh,timsnyder/bokeh,azjps/bokeh,quasiben/bokeh,percyfal/bokeh,ericmjl/bokeh,jakirkham/bokeh,timsnyder/bokeh,phobson/bokeh,justacec/bokeh,mindriot101/bokeh,azjps/bokeh,Karel-van-de-Plassche/bokeh,ericmjl/bokeh,quasiben/bokeh,dennisobrien/bokeh,philippjfr/bokeh,clairetang6/bokeh,ptitjano/bokeh,ericmjl/bokeh,quasiben/bokeh,aiguofer/bokeh,timsnyder/bokeh,Karel-van-de-Plassche/bokeh,draperjames/bokeh,dennisobrien/bokeh,phobson/bokeh,dennisobrien/bokeh,aavanian/bokeh,stonebig/bokeh,percyfal/bokeh,aiguofer/bokeh,aiguofer/bokeh,jakirkham/bokeh,rs2/bokeh,draperjames/bokeh,ericmjl/bokeh,bokeh/bokeh,dennisobrien/bokeh,schoolie/bokeh,bokeh/bokeh,KasperPRasmussen/bokeh,percyfal/bokeh,aavanian/bokeh,phobson/bokeh,justacec/bokeh,mindriot101/bokeh,DuCorey/bokeh,stonebig/bokeh,bokeh/bokeh,philippjfr/bokeh,philippjfr/bokeh,DuCorey/bokeh,rs2/bokeh,mindriot101/bokeh,timsnyder/bokeh,DuCorey/bokeh,dennisobrien/bokeh,ptitjano/bokeh,percyfal/bokeh,msarahan/bokeh,rs2/bokeh,KasperPRasmussen/bokeh,jakirkham/bokeh,KasperPRasmussen/bokeh,mindriot101/bokeh,aiguofer/bokeh,philippjfr/bokeh | ---
+++
@@ -1,18 +1,25 @@
+""" This example uses the U.S. postage rate per ounce for stamps and
+postcards.
+
+Source: https://en.wikipedia.org/wiki/History_of_United_States_postage_rates
+"""
+
from bokeh.charts import Step, show, output_file
# build a dataset where multiple columns measure the same thing
-data = dict(python=[2, 3, 7, 5, 26, 221, 44, 233, 254, 265, 266, 267, 120, 111],
- pypy=[12, 33, 47, 15, 126, 121, 144, 233, 254, 225, 226, 267, 110, 130],
- jython=[22, 43, 10, 25, 26, 101, 114, 203, 194, 215, 201, 227, 139, 160],
- test=['foo', 'bar', 'foo', 'bar', 'foo', 'bar', 'foo', 'bar', 'foo', 'bar',
- 'foo', 'bar', 'foo', 'bar']
+data = dict(stamp=[
+ .33, .33, .34, .37, .37, .37, .37, .39, .41, .42,
+ .44, .44, .44, .45, .46, .49, .49],
+ postcard=[
+ .20, .20, .21, .23, .23, .23, .23, .24, .26, .27,
+ .28, .28, .29, .32, .33, .34, .35],
)
# create a line chart where each column of measures receives a unique color and dash style
-line = Step(data, y=['python', 'pypy', 'jython'],
- dash=['python', 'pypy', 'jython'],
- color=['python', 'pypy', 'jython'],
- title="Interpreter Sample Data", ylabel='Duration', legend=True)
+line = Step(data, y=['stamp', 'postcard'],
+ dash=['stamp', 'postcard'],
+ color=['stamp', 'postcard'],
+ title="U.S. Postage Rates (1999-2015)", ylabel='Rate per ounce', legend=True)
output_file("steps.html")
show(line) |
dd03a3d323594c7836525a1be733b689731d98c4 | core/settings/__init__.py | core/settings/__init__.py | """Settings package initialization."""
import dotenv
dotenv.load()
# Ensure development settings are not used in testing and production:
if dotenv.get('ENVIRONMENT') == 'HEROKU':
from .production import *
elif dotenv.get('ENVIRONMENT') == 'TRAVIS':
from .testing import *
else:
from .local import *
| """Settings package initialization."""
import dotenv
dotenv.load()
# Ensure development settings are not used in testing and production:
if dotenv.get('ENVIRONMENT') == 'PRODUCTION':
from .production import *
elif dotenv.get('ENVIRONMENT') == 'TRAVIS':
from .testing import *
else:
from .local import *
| Change ENVIRONMENT variable from HEROKU to PRODUCTION | Change ENVIRONMENT variable from HEROKU to PRODUCTION
| Python | mit | teamtaverna/core | ---
+++
@@ -4,7 +4,7 @@
dotenv.load()
# Ensure development settings are not used in testing and production:
-if dotenv.get('ENVIRONMENT') == 'HEROKU':
+if dotenv.get('ENVIRONMENT') == 'PRODUCTION':
from .production import *
elif dotenv.get('ENVIRONMENT') == 'TRAVIS':
from .testing import * |
a55f0fa9f80042c3fa673f263b259e70dd52f7d6 | streak-podium/read.py | streak-podium/read.py | import requests
def input_file(filename):
"""
Read a file and return list of usernames.
Assumes one username per line and ignores blank lines.
"""
with open(filename, 'r') as f:
return list(line.strip() for line in f if line.strip())
def org_members(org_name):
"""
Query Github API and return list of members from a Github organization.
"""
# TODO: Return github org members, not a placeholder
return ['supermitch', 'Jollyra']
def svg_data(username):
"""
Returns the contribution streak SVG file contents from Github
for a specific username.
"""
url = 'https://github.com/users/{}/contributions'.format(username)
r = requests.get(url)
return r.text
| import requests
def input_file(filename):
"""
Read a file and return list of usernames.
Assumes one username per line and ignores blank lines.
"""
with open(filename, 'r') as f:
return list(line.strip() for line in f if line.strip())
def org_members(org_name):
"""
Query Github API and return list of members from a Github organization.
"""
# TODO: Return github org members, not a placeholder
return ['supermitch', 'Jollyra']
def svg_data(username):
"""
Returns the contribution streak SVG file contents from Github
for a specific username.
"""
url = 'https://github.com/users/{}/contributions'.format(username)
try:
r = requests.get(url)
except requests.exceptions.ConnectionError:
logging.warn('Connection error trying to get url: [{}]'.format(url))
return None
return r.text
| Handle exception where connection fails | Handle exception where connection fails
| Python | mit | jollyra/hubot-streak-podium,jollyra/hubot-commit-streak,supermitch/streak-podium,jollyra/hubot-streak-podium,jollyra/hubot-commit-streak,supermitch/streak-podium | ---
+++
@@ -25,6 +25,11 @@
for a specific username.
"""
url = 'https://github.com/users/{}/contributions'.format(username)
- r = requests.get(url)
+ try:
+ r = requests.get(url)
+ except requests.exceptions.ConnectionError:
+ logging.warn('Connection error trying to get url: [{}]'.format(url))
+ return None
+
return r.text
|
764256427ea7c0dbf73accf63ed05e8372f58a75 | test/pyfrontend/util.py | test/pyfrontend/util.py | import contextlib
import tempfile
import shutil
@contextlib.contextmanager
def temporary_directory():
"""Simple context manager to make a temporary directory"""
tmpdir = tempfile.mkdtemp()
yield tmpdir
shutil.rmtree(tmpdir, ignore_errors=True)
| import contextlib
import tempfile
import shutil
import saliweb.test # for python 2.6 support
@contextlib.contextmanager
def temporary_directory():
"""Simple context manager to make a temporary directory"""
tmpdir = tempfile.mkdtemp()
yield tmpdir
shutil.rmtree(tmpdir, ignore_errors=True)
| Add unittest methods to Python 2.6 | Add unittest methods to Python 2.6
| Python | lgpl-2.1 | salilab/saliweb,salilab/saliweb,salilab/saliweb,salilab/saliweb,salilab/saliweb | ---
+++
@@ -1,6 +1,7 @@
import contextlib
import tempfile
import shutil
+import saliweb.test # for python 2.6 support
@contextlib.contextmanager |
46b60c5886ede34db8998d7cfd5ae36f9211a0e8 | ovp_users/tests/test_views/__init__.py | ovp_users/tests/test_views/__init__.py | from ovp_users.tests.test_views.user import UserResourceViewSetTestCase
from ovp_users.tests.test_views.auth import JWTAuthTestCase
from ovp_users.tests.test_views.password_recovery import RecoveryTokenViewSetTestCase
from ovp_users.tests.test_views.password_recovery import RecoverPasswordViewSetTestCase
| from ovp_users.tests.test_views.user import UserResourceViewSetTestCase
from ovp_users.tests.test_views.auth import JWTAuthTestCase
from ovp_users.tests.test_views.profile import ProfileTestCase
from ovp_users.tests.test_views.password_recovery import RecoveryTokenViewSetTestCase
from ovp_users.tests.test_views.password_recovery import RecoverPasswordViewSetTestCase
| Add profile views tests to suite | Add profile views tests to suite
| Python | agpl-3.0 | OpenVolunteeringPlatform/django-ovp-users,OpenVolunteeringPlatform/django-ovp-users | ---
+++
@@ -1,4 +1,5 @@
from ovp_users.tests.test_views.user import UserResourceViewSetTestCase
from ovp_users.tests.test_views.auth import JWTAuthTestCase
+from ovp_users.tests.test_views.profile import ProfileTestCase
from ovp_users.tests.test_views.password_recovery import RecoveryTokenViewSetTestCase
from ovp_users.tests.test_views.password_recovery import RecoverPasswordViewSetTestCase |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.