commit stringlengths 40 40 | old_file stringlengths 4 150 | new_file stringlengths 4 150 | old_contents stringlengths 0 3.26k | new_contents stringlengths 1 4.43k | subject stringlengths 15 501 | message stringlengths 15 4.06k | lang stringclasses 4 values | license stringclasses 13 values | repos stringlengths 5 91.5k | diff stringlengths 0 4.35k |
|---|---|---|---|---|---|---|---|---|---|---|
5f9a3c62c4117e0e674d33e675c3a54d800dacb6 | comics/accounts/models.py | comics/accounts/models.py | import uuid
from django.contrib.auth.models import User
from django.db import models
from django.dispatch import receiver
from comics.core.models import Comic
@receiver(models.signals.post_save, sender=User)
def create_user_profile(sender, instance, created, **kwargs):
if created:
UserProfile.objects.create(user=instance)
def make_secret_key():
return uuid.uuid4().hex
class UserProfile(models.Model):
user = models.OneToOneField(User, related_name='comics_profile')
secret_key = models.CharField(max_length=32, blank=False,
default=make_secret_key,
help_text='Secret key for feed and API access')
comics = models.ManyToManyField(Comic)
class Meta:
db_table = 'comics_user_profile'
def __unicode__(self):
return u'User profile for %s' % self.user
def generate_new_secret_key(self):
self.secret_key = make_secret_key()
| import uuid
from django.contrib.auth.models import User
from django.db import models
from django.dispatch import receiver
from comics.core.models import Comic
@receiver(models.signals.post_save, sender=User)
def create_user_profile(sender, instance, created, **kwargs):
if created:
UserProfile.objects.create(user=instance)
def make_secret_key():
return uuid.uuid4().hex
class UserProfile(models.Model):
user = models.OneToOneField(User, related_name='comics_profile')
secret_key = models.CharField(max_length=32, blank=False,
default=make_secret_key,
help_text='Secret key for feed and API access')
comics = models.ManyToManyField(Comic, through='Subscription')
class Meta:
db_table = 'comics_user_profile'
def __unicode__(self):
return u'User profile for %s' % self.user
def generate_new_secret_key(self):
self.secret_key = make_secret_key()
class Subscription(models.Model):
userprofile = models.ForeignKey(UserProfile)
comic = models.ForeignKey(Comic)
class Meta:
db_table = 'comics_user_profile_comics'
| Add a M2M table for the subscription relation between users and comics | Add a M2M table for the subscription relation between users and comics
| Python | agpl-3.0 | jodal/comics,datagutten/comics,datagutten/comics,jodal/comics,datagutten/comics,datagutten/comics,jodal/comics,jodal/comics | ---
+++
@@ -22,7 +22,7 @@
secret_key = models.CharField(max_length=32, blank=False,
default=make_secret_key,
help_text='Secret key for feed and API access')
- comics = models.ManyToManyField(Comic)
+ comics = models.ManyToManyField(Comic, through='Subscription')
class Meta:
db_table = 'comics_user_profile'
@@ -32,3 +32,11 @@
def generate_new_secret_key(self):
self.secret_key = make_secret_key()
+
+
+class Subscription(models.Model):
+ userprofile = models.ForeignKey(UserProfile)
+ comic = models.ForeignKey(Comic)
+
+ class Meta:
+ db_table = 'comics_user_profile_comics' |
ccdefc6584f64a832614be172ec384022805fa20 | hitchstory/arguments.py | hitchstory/arguments.py | from hitchstory import utils, exceptions
from ruamel.yaml.comments import CommentedMap, CommentedSeq
class Arguments(object):
"""A null-argument, single argument or group of arguments of a hitchstory step."""
def __init__(self, yaml_args):
"""Create arguments from dict (from yaml)."""
if yaml_args is None:
self.is_none = True
self.single_argument = False
elif type(yaml_args) is CommentedMap:
self.is_none = False
self.single_argument = False
self.kwargs = yaml_args
else:
self.is_none = False
self.single_argument = True
self.argument = yaml_args
def validate(self, validators):
if self.is_none:
return
elif self.single_argument:
return
else:
_kwargs = {}
for key, value in self.kwargs.items():
if key in validators.keys():
_kwargs[key] = validators[key](value)
else:
if type(value) in (CommentedMap, CommentedSeq):
raise exceptions.StepArgumentWithoutValidatorContainsComplexData
else:
_kwargs[key] = str(value)
self.kwargs = _kwargs
return
def pythonized_kwargs(self):
pythonized_dict = {}
for key, value in self.kwargs.items():
pythonized_dict[utils.to_underscore_style(key)] = value
return pythonized_dict
def to_dict(self):
if self.is_none:
return None
elif self.single_argument:
return self.argument
else:
return self.kwargs
| from hitchstory import utils, exceptions
from ruamel.yaml.comments import CommentedMap, CommentedSeq
class Arguments(object):
"""A null-argument, single argument or group of arguments of a hitchstory step."""
def __init__(self, yaml_args):
"""Create arguments from dict (from yaml)."""
if yaml_args is None:
self.is_none = True
self.single_argument = False
elif type(yaml_args) is CommentedMap:
self.is_none = False
self.single_argument = False
self.kwargs = yaml_args
else:
self.is_none = False
self.single_argument = True
self.argument = yaml_args
def validate(self, validators):
"""
Validate step using validators specified in decorators.
"""
if not self.is_none and not self.single_argument:
_kwargs = {}
for key, value in self.kwargs.items():
if key in validators.keys():
_kwargs[key] = validators[key](value)
else:
if type(value) in (CommentedMap, CommentedSeq):
raise exceptions.StepArgumentWithoutValidatorContainsComplexData
else:
_kwargs[key] = str(value)
self.kwargs = _kwargs
def pythonized_kwargs(self):
pythonized_dict = {}
for key, value in self.kwargs.items():
pythonized_dict[utils.to_underscore_style(key)] = value
return pythonized_dict
| REFACTOR : Removed unnecessary code. | REFACTOR : Removed unnecessary code.
| Python | agpl-3.0 | hitchtest/hitchstory | ---
+++
@@ -20,11 +20,10 @@
self.argument = yaml_args
def validate(self, validators):
- if self.is_none:
- return
- elif self.single_argument:
- return
- else:
+ """
+ Validate step using validators specified in decorators.
+ """
+ if not self.is_none and not self.single_argument:
_kwargs = {}
for key, value in self.kwargs.items():
if key in validators.keys():
@@ -35,18 +34,9 @@
else:
_kwargs[key] = str(value)
self.kwargs = _kwargs
- return
def pythonized_kwargs(self):
pythonized_dict = {}
for key, value in self.kwargs.items():
pythonized_dict[utils.to_underscore_style(key)] = value
return pythonized_dict
-
- def to_dict(self):
- if self.is_none:
- return None
- elif self.single_argument:
- return self.argument
- else:
- return self.kwargs |
0e5b0ccb7eb79fe68b8e40ad46d8e2e0efa01ba7 | test_queue.py | test_queue.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""A series of pytest tests to test the quality
of our Queue class and its methods
"""
from __future__ import unicode_literals
import pytest
import queue
@pytest.fixture(scope="function")
def create_queue(request):
"""Create a queue with numbers 1 - 5"""
new_queue = queue.Queue()
for i in range(1, 6):
new_queue.enqueue(i)
return new_queue
def test_dequeue(create_queue):
first_queue = create_queue()
first_val = first_queue.dequeue()
assert first_val is 1
assert first_queue.size() is 4
second_val = first_queue.dequeue()
assert second_val is 2
assert first_queue.size() is 3
def test_enqueue(create_queue):
second_queue = create_queue()
second_queue.enqueue(6)
assert second_queue.size() is 6
foo = second_queue.dequeue()
assert foo is 1
assert second_queue.size() is 5
def test_empty(create_queue):
empty = queue.Queue()
assert empty.size() is 0
with pytest.raises("ValueError"):
empty.dequeue
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""A series of pytest tests to test the quality
of our Queue class and its methods
"""
from __future__ import unicode_literals
import pytest
import queue
@pytest.fixture(scope="function")
def create_queue(request):
"""Create a queue with numbers 1 - 5
"""
new_queue = queue.Queue()
for i in range(1, 6):
new_queue.enqueue(i)
return new_queue
def test_dequeue(create_queue):
"""Test that the queue shrinks and returns first in
"""
first_queue = create_queue
first_val = first_queue.dequeue()
assert first_val is 1
assert first_queue.size() is 4
second_val = first_queue.dequeue()
assert second_val is 2
assert first_queue.size() is 3
def test_enqueue(create_queue):
"""Test that the queue grows and returns first in
"""
second_queue = create_queue
second_queue.enqueue(6)
assert second_queue.size() is 6
foo = second_queue.dequeue()
assert foo is 1
assert second_queue.size() is 5
def test_empty(create_queue):
"""Test that empty queue size method returns 0 and dequeue raises IndexError
"""
empty = queue.Queue()
assert empty.size() is 0
with pytest.raises(IndexError):
empty.dequeue()
| Fix errors in test file | Fix errors in test file
Fix errors and typos in 'test_queue.py'
| Python | mit | jesseklein406/data-structures | ---
+++
@@ -11,7 +11,8 @@
@pytest.fixture(scope="function")
def create_queue(request):
- """Create a queue with numbers 1 - 5"""
+ """Create a queue with numbers 1 - 5
+ """
new_queue = queue.Queue()
for i in range(1, 6):
new_queue.enqueue(i)
@@ -19,7 +20,9 @@
def test_dequeue(create_queue):
- first_queue = create_queue()
+ """Test that the queue shrinks and returns first in
+ """
+ first_queue = create_queue
first_val = first_queue.dequeue()
assert first_val is 1
assert first_queue.size() is 4
@@ -29,7 +32,9 @@
def test_enqueue(create_queue):
- second_queue = create_queue()
+ """Test that the queue grows and returns first in
+ """
+ second_queue = create_queue
second_queue.enqueue(6)
assert second_queue.size() is 6
foo = second_queue.dequeue()
@@ -38,7 +43,9 @@
def test_empty(create_queue):
+ """Test that empty queue size method returns 0 and dequeue raises IndexError
+ """
empty = queue.Queue()
assert empty.size() is 0
- with pytest.raises("ValueError"):
- empty.dequeue
+ with pytest.raises(IndexError):
+ empty.dequeue() |
26de6c5decac3345dee470a0968926a65d3497b9 | test_stack.py | test_stack.py | import pytest
from stack import Element
from stack import Stack
def test_element_init():
n = Element()
assert n.val is None
assert n.next is None
n = Element(3)
assert n.val == 3
assert n.next is None
def test_stack_init():
l = Stack()
assert l.top is None
def test_stack_push():
l = Stack()
e = Element(10)
l.push(e)
assert l.top == e
assert l.top.previous is None
f = Element("String")
l.push(f)
assert l.top == f
assert l.top.previous == e
assert l.top.previous.previous is None
# def test_stack_pop():
# l = stack()
# a = l.pop()
# assert a is None
| import pytest
from stack import Element
from stack import Stack
def test_element_init():
n = Element()
assert n.val is None
assert n.previous is None
m = Element(3)
assert m.val == 3
assert m.previous is None
def test_stack_init():
l = Stack()
assert l.top is None
def test_stack_push():
l = Stack()
e = Element(10)
l.push(e)
assert l.top == e
assert l.top.previous is None
f = Element("String")
l.push(f)
assert l.top == f
assert l.top.previous == e
assert l.top.previous.previous is None
def test_stack_pop():
l = Stack()
e = Element(10)
l.push(e)
f = Element("String")
l.push(f)
assert l.pop() == f.val
assert l.top == e
assert l.top.previous is None
assert l.pop() == e.val
assert l.top is None
with pytest.raises(ValueError):
l.pop()
| Add test for pop and adjust element init test | Add test for pop and adjust element init test
| Python | mit | constanthatz/data-structures | ---
+++
@@ -6,10 +6,10 @@
def test_element_init():
n = Element()
assert n.val is None
- assert n.next is None
- n = Element(3)
- assert n.val == 3
- assert n.next is None
+ assert n.previous is None
+ m = Element(3)
+ assert m.val == 3
+ assert m.previous is None
def test_stack_init():
@@ -30,7 +30,16 @@
assert l.top.previous.previous is None
-# def test_stack_pop():
-# l = stack()
-# a = l.pop()
-# assert a is None
+def test_stack_pop():
+ l = Stack()
+ e = Element(10)
+ l.push(e)
+ f = Element("String")
+ l.push(f)
+ assert l.pop() == f.val
+ assert l.top == e
+ assert l.top.previous is None
+ assert l.pop() == e.val
+ assert l.top is None
+ with pytest.raises(ValueError):
+ l.pop() |
151c97a3a5cd0f9103c891ee9c60f3fe52fc3d12 | test_suite.py | test_suite.py | import os
os.environ['DJANGO_SETTINGS_MODULE'] = 'tests.settings'
from django.core import management
management.call_command('test', 'resources', 'forms', 'tokens')
| import os
import sys
os.environ['DJANGO_SETTINGS_MODULE'] = 'tests.settings'
from django.core import management
apps = sys.argv[1:]
if not apps:
apps = [
'resources',
'forms',
'tokens',
]
management.call_command('test', *apps)
| Allow apps to be specified from the command line | Allow apps to be specified from the command line
| Python | bsd-2-clause | chop-dbhi/serrano,chop-dbhi/serrano,rv816/serrano_night,rv816/serrano_night | ---
+++
@@ -1,6 +1,17 @@
import os
+import sys
os.environ['DJANGO_SETTINGS_MODULE'] = 'tests.settings'
from django.core import management
-management.call_command('test', 'resources', 'forms', 'tokens')
+
+apps = sys.argv[1:]
+
+if not apps:
+ apps = [
+ 'resources',
+ 'forms',
+ 'tokens',
+ ]
+
+management.call_command('test', *apps) |
9e365b0738a6fcd5f0f67375288cf8bea771c6eb | freight/notifiers/base.py | freight/notifiers/base.py | from __future__ import absolute_import
__all__ = ['Notifier', 'NotifierEvent']
class NotifierEvent(object):
TASK_STARTED = 0
TASK_FINISHED = 1
TASK_QUEUED = 2
class Notifier(object):
DEFAULT_EVENTS = [NotifierEvent.TASK_STARTED, NotifierEvent.TASK_FINISHED]
def get_default_options(self):
return {
# TODO(dcramer): we want to support events, but we need validators
# before that can happen to avoid magical constants
# 'events': {},
}
def get_options(self):
return {}
def send(self, task, config, event):
raise NotImplementedError
def should_send(self, task, config, event):
return event in config.get('events', self.DEFAULT_EVENTS)
| from __future__ import absolute_import
__all__ = ['Notifier', 'NotifierEvent']
class NotifierEvent(object):
TASK_STARTED = 0
TASK_FINISHED = 1
TASK_QUEUED = 2
class Notifier(object):
DEFAULT_EVENTS = [
NotifierEvent.TASK_QUEUED,
NotifierEvent.TASK_STARTED,
NotifierEvent.TASK_FINISHED,
]
def get_default_options(self):
return {
# TODO(dcramer): we want to support events, but we need validators
# before that can happen to avoid magical constants
# 'events': {},
}
def get_options(self):
return {}
def send(self, task, config, event):
raise NotImplementedError
def should_send(self, task, config, event):
return event in config.get('events', self.DEFAULT_EVENTS)
| Add TASK_QUEUED to default notifier events | Add TASK_QUEUED to default notifier events
| Python | apache-2.0 | rshk/freight,getsentry/freight,rshk/freight,klynton/freight,getsentry/freight,getsentry/freight,klynton/freight,getsentry/freight,rshk/freight,getsentry/freight,klynton/freight,klynton/freight,rshk/freight | ---
+++
@@ -10,7 +10,11 @@
class Notifier(object):
- DEFAULT_EVENTS = [NotifierEvent.TASK_STARTED, NotifierEvent.TASK_FINISHED]
+ DEFAULT_EVENTS = [
+ NotifierEvent.TASK_QUEUED,
+ NotifierEvent.TASK_STARTED,
+ NotifierEvent.TASK_FINISHED,
+ ]
def get_default_options(self):
return { |
1243d484009e621338a5fcd609d62bedd9796f05 | tests/base.py | tests/base.py | import unittest
from app import create_app, db
class Base(unittest.TestCase):
def setUp(self):
self.app = create_app("testing")
self.client = self.app.test_client()
self.user = {
"username": "brian",
"password": "password"
}
with self.app.app_context():
db.create_all()
def tearDown(self):
with self.app.app_context():
db.session.remove()
db.drop_all()
| import unittest
import json
from app import create_app, db
from app.models import User
class Base(unittest.TestCase):
def setUp(self):
self.app = create_app("testing")
self.client = self.app.test_client()
self.user = json.dumps({
"username": "brian",
"password": "password"
})
with self.app.app_context():
db.create_all()
def set_headers(self):
""" Set headers for Authorization and Content Type. """
self.client.post("/auth/register",
data=self.user,
content_type='application/json')
response = self.client.post( "/auth/login",
data=self.user,
content_type='application/json')
payload = json.loads(response.data.decode())
# get the token from the reponse body
self.token = payload['token']
return dict({
'Authorization': self.token,
'Content-Type': 'application/json',
})
def tearDown(self):
with self.app.app_context():
db.session.remove()
db.drop_all()
| Add authorization and content-type headers to request for tests | [CHORE] Add authorization and content-type headers to request for tests
| Python | mit | brayoh/bucket-list-api | ---
+++
@@ -1,18 +1,40 @@
import unittest
+import json
from app import create_app, db
+from app.models import User
class Base(unittest.TestCase):
def setUp(self):
self.app = create_app("testing")
self.client = self.app.test_client()
- self.user = {
+ self.user = json.dumps({
"username": "brian",
"password": "password"
- }
+ })
with self.app.app_context():
db.create_all()
+
+ def set_headers(self):
+ """ Set headers for Authorization and Content Type. """
+ self.client.post("/auth/register",
+ data=self.user,
+ content_type='application/json')
+
+ response = self.client.post( "/auth/login",
+ data=self.user,
+ content_type='application/json')
+
+ payload = json.loads(response.data.decode())
+
+ # get the token from the reponse body
+ self.token = payload['token']
+
+ return dict({
+ 'Authorization': self.token,
+ 'Content-Type': 'application/json',
+ })
def tearDown(self):
with self.app.app_context(): |
6b7e220cdaa403354104aa0fbeabdce8ce37ff13 | indra/tests/test_tas.py | indra/tests/test_tas.py | from indra.sources.tas import process_from_web
def test_processor():
tp = process_from_web(affinity_class_limit=10)
assert tp
assert tp.statements
num_stmts = len(tp.statements)
# This is the total number of statements about human genes
assert num_stmts == 51722, num_stmts
assert all(len(s.evidence) == 1 for s in tp.statements), \
"Some statements lack evidence, or have extra evidence."
| from indra.sources.tas import process_from_web
def test_processor():
tp = process_from_web(affinity_class_limit=10)
assert tp
assert tp.statements
num_stmts = len(tp.statements)
# This is the total number of statements about human genes
assert num_stmts == 1601159, num_stmts
assert all(len(s.evidence) == 1 for s in tp.statements), \
"Some statements lack evidence, or have extra evidence."
| Update expected number of tas statements in test | Update expected number of tas statements in test
| Python | bsd-2-clause | sorgerlab/indra,sorgerlab/belpy,johnbachman/belpy,sorgerlab/belpy,johnbachman/belpy,bgyori/indra,bgyori/indra,johnbachman/belpy,bgyori/indra,sorgerlab/indra,johnbachman/indra,sorgerlab/indra,johnbachman/indra,sorgerlab/belpy,johnbachman/indra | ---
+++
@@ -7,6 +7,6 @@
assert tp.statements
num_stmts = len(tp.statements)
# This is the total number of statements about human genes
- assert num_stmts == 51722, num_stmts
+ assert num_stmts == 1601159, num_stmts
assert all(len(s.evidence) == 1 for s in tp.statements), \
"Some statements lack evidence, or have extra evidence." |
863828c37eca9046a7dd169114e2a6c3e02e28aa | proxy-firewall.py | proxy-firewall.py | #!/usr/bin/python
"""
Set the firewall to allow access to configured HTTP(S) proxies.
This is only necessary until rBuilder handles the EC2 image posting
and registration process.
"""
import os, sys, urllib, urlparse
from conary.conarycfg import ConaryConfiguration
def main(args):
cfg = ConaryConfiguration(False)
cfg.read('/etc/conaryrc', exception=False)
for schema, uri in cfg.proxy.items():
hostpart = urlparse.urlsplit(uri)[1]
host, port = urllib.splitport(hostpart)
if not port:
if schema == 'https':
port = '443'
else:
port = '80'
os.system('/sbin/iptables -A FORWARD-SLAVE -m state --state NEW '
'-m tcp -p tcp --dport %s -d %s -j ACCEPT 2>/dev/null'
% (port, host))
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))
| #!/usr/bin/python
"""
Set the firewall to allow access to configured HTTP(S) proxies.
This is only necessary until rBuilder handles the EC2 image posting
and registration process.
"""
import os, sys, urllib, urlparse
from conary.conarycfg import ConaryConfiguration
def main(args):
cfg = ConaryConfiguration(False)
cfg.read('/etc/conaryrc', exception=False)
import epdb;epdb.st()
for schema, uri in cfg.proxy.items():
userhostport = urlparse.urlsplit(uri)[1]
hostport = urllib.splituser(userhostport)[1]
host, port = urllib.splitport(hostport)
if not port:
if schema == 'https':
port = '443'
else:
port = '80'
os.system('/sbin/iptables -A FORWARD-SLAVE -m state --state NEW '
'-m tcp -p tcp --dport %s -d %s -j ACCEPT 2>/dev/null'
% (port, host))
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))
| Fix proxy URIs with credentials in them | Fix proxy URIs with credentials in them
| Python | apache-2.0 | sassoftware/jobmaster,sassoftware/jobmaster,sassoftware/jobmaster | ---
+++
@@ -12,9 +12,11 @@
def main(args):
cfg = ConaryConfiguration(False)
cfg.read('/etc/conaryrc', exception=False)
+ import epdb;epdb.st()
for schema, uri in cfg.proxy.items():
- hostpart = urlparse.urlsplit(uri)[1]
- host, port = urllib.splitport(hostpart)
+ userhostport = urlparse.urlsplit(uri)[1]
+ hostport = urllib.splituser(userhostport)[1]
+ host, port = urllib.splitport(hostport)
if not port:
if schema == 'https':
port = '443' |
607728b17c0a79725d997b458a53d1b3d1394a59 | pymue/__init__.py | pymue/__init__.py | from _pymue import peng, Team, DistanceMatrix, SeenTable, GuestTupleGenerator
| from _pymue import peng, Team, DistanceMatrix, SeenTable, GuestTupleGenerator, GuestPair
def pair_pprint(pair):
return "(%s, %s)" % (pair.first, pair.second)
GuestPair.__repr__ = pair_pprint
| Add string representation to GuestPair | Add string representation to GuestPair
Signed-off-by: Jan Losinski <577c4104c61edf9f052c616c0c23e67bef4a9955@wh2.tu-dresden.de>
| Python | bsd-3-clause | janLo/meet-and-eat-distribution-tool,janLo/meet-and-eat-distribution-tool,eXma/meet-and-eat-distribution-tool,eXma/meet-and-eat-distribution-tool,janLo/meet-and-eat-distribution-tool,eXma/meet-and-eat-distribution-tool | ---
+++
@@ -1 +1,6 @@
-from _pymue import peng, Team, DistanceMatrix, SeenTable, GuestTupleGenerator
+from _pymue import peng, Team, DistanceMatrix, SeenTable, GuestTupleGenerator, GuestPair
+
+def pair_pprint(pair):
+ return "(%s, %s)" % (pair.first, pair.second)
+
+GuestPair.__repr__ = pair_pprint |
641e31fcb05016e5fb27e8f115a763b72c7638f3 | python/tag_img.py | python/tag_img.py | import requests
import json
# Tag an image based on detected visual content which mean running a CNN on top of it.
# https://pixlab.io/#/cmd?id=tagimg for more info.
# Target Image: Change to any link or switch to POST if you want to upload your image directly, refer to the sample set for more info.
img = 'https://s-media-cache-ak0.pinimg.com/originals/35/d0/f6/35d0f6ee0e40306c41cfd714c625f78e.jpg'
# Your PixLab key
key = 'My_PixLab_Key'
# Censure an image based on its NSFW score
req = requests.get('https://api.pixlab.io/tagimg',params={'img':img,'key':key})
reply = req.json()
if reply['status'] != 200:
print (reply['error'])
else:
total = len(reply['tags']) # Total tags
print ("Total tags: "+str(total))
for tag in reply['tags']:
print("Tag: "+tag['name']+" - Confidence: "+str(tag['confidence']))
| import requests
import json
# Tag an image based on detected visual content which mean running a CNN on top of it.
# https://pixlab.io/#/cmd?id=tagimg for more info.
# Target Image: Change to any link or switch to POST if you want to upload your image directly, refer to the sample set for more info.
img = 'https://s-media-cache-ak0.pinimg.com/originals/35/d0/f6/35d0f6ee0e40306c41cfd714c625f78e.jpg'
# Your PixLab key
key = 'My_PixLab_Key'
req = requests.get('https://api.pixlab.io/tagimg',params={'img':img,'key':key})
reply = req.json()
if reply['status'] != 200:
print (reply['error'])
else:
total = len(reply['tags']) # Total tags
print ("Total tags: "+str(total))
for tag in reply['tags']:
print("Tag: "+tag['name']+" - Confidence: "+str(tag['confidence']))
| Tag an image based on detected visual content | Tag an image based on detected visual content | Python | bsd-2-clause | symisc/pixlab,symisc/pixlab,symisc/pixlab | ---
+++
@@ -9,7 +9,6 @@
# Your PixLab key
key = 'My_PixLab_Key'
-# Censure an image based on its NSFW score
req = requests.get('https://api.pixlab.io/tagimg',params={'img':img,'key':key})
reply = req.json()
if reply['status'] != 200: |
bf6f77d90c3749983eb0b5358fb2f9fedb7d53da | app/main.py | app/main.py | import spark
import motion
from bot import process_command
from config import config
from flask import Flask
from flask import request
from flask import jsonify
from threading import Thread
import time
import sys
app = Flask(__name__)
def on_motion_detected():
print("motion detected!")
def run_motion_detection():
print("hello")
motion.detector_on(on_motion_detected)
def run_flask_server():
app.run(host='0.0.0.0', port=8181)
@app.route("/", methods=["post"])
def index():
# Parse request
webhook_req = request.get_json()
message = spark.get_message(message_id=webhook_req['data']['id'], bearer=config["bearer"])
if message["personEmail"] != config["bot_email"]:
res = process_command(message["command"])
if res["response_required"]:
spark.send_message(message["roomId"], res["data"], config["bearer"])
return jsonify({})
if __name__ == "__main__":
motion_thread = Thread(target = run_motion_detection)
flask_thread = Thread(target = run_flask_server)
motion_thread.daemon = True
flask_thread.daemon = True
motion_thread.start()
flask_thread.start()
| import spark
import motion
from bot import process_command
from config import config
from flask import Flask
from flask import request
from flask import jsonify
from threading import Thread
import time
import sys
app = Flask(__name__)
def on_motion_detected():
print("motion detected!")
def run_motion_detection():
print("hello")
motion.detector_on(on_motion_detected)
def run_flask_server():
app.run(host='0.0.0.0', port=8181)
@app.route("/", methods=["post"])
def index():
# Parse request
webhook_req = request.get_json()
message = spark.get_message(message_id=webhook_req['data']['id'], bearer=config["bearer"])
if message["personEmail"] != config["bot_email"]:
res = process_command(message["command"])
if res["response_required"]:
spark.send_message(message["roomId"], res["data"], config["bearer"])
return jsonify({})
if __name__ == "__main__":
motion_thread = Thread(target = run_motion_detection)
motion_thread.daemon = True
motion_thread.start()
app.run(host='0.0.0.0', port=8080)
| Use Python 3.6 compatible API for threading | Use Python 3.6 compatible API for threading
| Python | mit | alwye/spark-pi,alwye/spark-pi | ---
+++
@@ -40,8 +40,6 @@
if __name__ == "__main__":
motion_thread = Thread(target = run_motion_detection)
- flask_thread = Thread(target = run_flask_server)
motion_thread.daemon = True
- flask_thread.daemon = True
motion_thread.start()
- flask_thread.start()
+ app.run(host='0.0.0.0', port=8080) |
9eff339cba38a4a7f2a57a123cdc67f8cc23619f | in-class-code/2017-03-06-simulatingKinematics.py | in-class-code/2017-03-06-simulatingKinematics.py | ### Import our stuff
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
### Set up initial values
position = 555 # feet
velocity = 0 # feet/second
acceleration = -32.17 # feet / second^2
time_steps = np.linspace(0, 5, 501) # creates two entries at time zero
time_step_size = time_steps[1] - time_steps[0]
### Create a way to collect/record data
initial_data = {
'position': [position, 12],
'velocity': [velocity],
'acceleration': [acceleration],
'time': [0]
}
motion_data = pd.DataFrame(initial_data)
### Evolve the simulation forward using our update rules
for time_step in time_steps:
velocity = velocity + (acceleration * time_step_size)
position = position + (velocity * time_step_size)
updated_data = pd.DataFrame({
'position': [position],
'velocity': [velocity],
'acceleration': [acceleration],
'time': [time_step]
})
motion_data = motion_data.append(updated_data)
motion_data.plot.line(
x = 'time',
y = 'position'
)
motion_data
| ### Import our stuff
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
### Set up initial values
position = 555 # feet
velocity = 0 # feet/second
acceleration = -32.17 # feet / second^2
time_steps = np.linspace(0, 5, 501) # creates two entries at time zero
time_step_size = time_steps[1] - time_steps[0]
### Create a way to collect/record data
initial_data = {
'position': [position],
'velocity': [velocity],
'acceleration': [acceleration],
'time': [0]
}
motion_data = pd.DataFrame(initial_data)
### Evolve the simulation forward using our update rules
for time_step in time_steps:
velocity = velocity + (acceleration * time_step_size)
position = position + (velocity * time_step_size)
updated_data = pd.DataFrame({
'position': [position],
'velocity': [velocity],
'acceleration': [acceleration],
'time': [time_step]
})
motion_data = motion_data.append(updated_data)
motion_data.plot.line(
x = 'time',
y = 'position'
)
motion_data
| Remove dataframe error we were testing | Remove dataframe error we were testing
| Python | agpl-3.0 | ComputationalModeling/spring-2017-danielak,ComputationalModeling/spring-2017-danielak,ComputationalModeling/spring-2017-danielak,ComputationalModeling/spring-2017-danielak | ---
+++
@@ -12,7 +12,7 @@
### Create a way to collect/record data
initial_data = {
- 'position': [position, 12],
+ 'position': [position],
'velocity': [velocity],
'acceleration': [acceleration],
'time': [0] |
dc65920f52ca584608633cc511590b41b590f79e | billjobs/permissions.py | billjobs/permissions.py | from rest_framework import permissions
class CustomUserAPIPermission(permissions.BasePermission):
"""
Set custom permission for UserAPI
* GET : only accessible by admin
* POST : is public, everyone can create a user
"""
def has_permission(self, request, view):
"""
Define permission based on request method
"""
if request.method == 'GET':
# admin only
return request.user and request.user.is_staff
elif request.method == 'POST':
# is public
return True
# all other methods are accepted to allow 405 response
return True
| from rest_framework import permissions
from rest_framework.compat import is_authenticated
class CustomUserAPIPermission(permissions.BasePermission):
"""
Set custom permission for UserAPI
* GET : only accessible by admin
* POST : is public, everyone can create a user
"""
def has_permission(self, request, view):
"""
Define permission based on request method
"""
if request.method == 'GET':
# admin only
return request.user and request.user.is_staff
elif request.method == 'POST':
# is public
return True
# all other methods are accepted to allow 405 response
return True
class CustomUserDetailAPIPermission(permissions.BasePermission):
"""
Set custom permission for user detail API
* GET, PUT, DELETE :
* admin can access all users instance
* current user only his instance
* public is forbidden
"""
def has_permission(self, request, view):
"""
Give permission for admin or user to access API
"""
return (
request.user and
request.user.is_staff or
is_authenticated(request.user)
)
def has_object_permission(self, request, view, obj):
"""
Compare User instance in request is equal to User instance in obj
"""
return request.user.is_staff or obj == request.user
| Create permission for admin and user can access GET, PUT, DELETE method, user can access is instance only | Create permission for admin and user can access GET, PUT, DELETE method, user can access is instance only
| Python | mit | ioO/billjobs | ---
+++
@@ -1,4 +1,5 @@
from rest_framework import permissions
+from rest_framework.compat import is_authenticated
class CustomUserAPIPermission(permissions.BasePermission):
"""
@@ -21,3 +22,28 @@
return True
# all other methods are accepted to allow 405 response
return True
+
+class CustomUserDetailAPIPermission(permissions.BasePermission):
+ """
+ Set custom permission for user detail API
+
+ * GET, PUT, DELETE :
+ * admin can access all users instance
+ * current user only his instance
+ * public is forbidden
+ """
+ def has_permission(self, request, view):
+ """
+ Give permission for admin or user to access API
+ """
+ return (
+ request.user and
+ request.user.is_staff or
+ is_authenticated(request.user)
+ )
+
+ def has_object_permission(self, request, view, obj):
+ """
+ Compare User instance in request is equal to User instance in obj
+ """
+ return request.user.is_staff or obj == request.user |
96ddbd97d1aaf7a373adb04942e2d3d17931a285 | pytips/app.py | pytips/app.py | #! /usr/bin/env python
"""The main application logic for PyTips."""
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import absolute_import
import json
import random
from flask import Flask
import requests
app = Flask(__name__)
@app.route('/')
def index():
my_params = {
"q": "#python+tip",
"window": "a",
"type": "tweet"
}
r = requests.get('http://otter.topsy.com/search.json', params=my_params)
response = json.loads(r.content)['response']
# TODO Go beyond the first page of results.
return random.choice(response['list'])[u'title']
if __name__ == '__main__':
app.run(debug=True)
| #! /usr/bin/env python
"""The main application logic for PyTips."""
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import absolute_import
import json
import random
from flask import Flask
import requests
app = Flask(__name__)
QUERY = "#python+tip"
PER_PAGE = 100
SEARCH_COUNT_URL = 'http://otter.topsy.com/searchcount.json'
SEARCH_URL = 'http://otter.topsy.com/search.json'
def _get_page_and_index_for_random_tip():
initial_count_params = {
"q": QUERY
}
r = requests.get(SEARCH_COUNT_URL, params=initial_count_params)
response = json.loads(r.content)['response']
total_result_count = response['a']
random_index = random.randint(0, total_result_count - 1)
# Relies on int division truncating; this might be a Python 3 problem.
page_for_random_index = random_index / PER_PAGE
offset = PER_PAGE * page_for_random_index
index_on_page = random_index % PER_PAGE
return offset, index_on_page
@app.route('/')
def index():
offset, index = _get_page_and_index_for_random_tip()
search_params = {
"q": QUERY,
"window": "a",
"offset": offset,
"perpage": PER_PAGE,
}
r = requests.get(SEARCH_URL, params=search_params)
response = json.loads(r.content)['response']
return response['list'][index]['title']
if __name__ == '__main__':
app.run(debug=True)
| Choose tip from *all* results. | Choose tip from *all* results.
Before, we were just choosing from the first page of results. Now, we perform
a query to get the full number of results, calculate the page number and
position on the page of a random result, and then pull that result for the
tip.
| Python | isc | gthank/pytips,gthank/pytips,gthank/pytips,gthank/pytips | ---
+++
@@ -12,19 +12,40 @@
app = Flask(__name__)
+QUERY = "#python+tip"
+PER_PAGE = 100
+SEARCH_COUNT_URL = 'http://otter.topsy.com/searchcount.json'
+SEARCH_URL = 'http://otter.topsy.com/search.json'
+
+
+def _get_page_and_index_for_random_tip():
+ initial_count_params = {
+ "q": QUERY
+ }
+ r = requests.get(SEARCH_COUNT_URL, params=initial_count_params)
+ response = json.loads(r.content)['response']
+ total_result_count = response['a']
+ random_index = random.randint(0, total_result_count - 1)
+ # Relies on int division truncating; this might be a Python 3 problem.
+ page_for_random_index = random_index / PER_PAGE
+ offset = PER_PAGE * page_for_random_index
+ index_on_page = random_index % PER_PAGE
+ return offset, index_on_page
@app.route('/')
def index():
- my_params = {
- "q": "#python+tip",
+ offset, index = _get_page_and_index_for_random_tip()
+ search_params = {
+ "q": QUERY,
"window": "a",
- "type": "tweet"
+ "offset": offset,
+ "perpage": PER_PAGE,
}
- r = requests.get('http://otter.topsy.com/search.json', params=my_params)
+ r = requests.get(SEARCH_URL, params=search_params)
response = json.loads(r.content)['response']
- # TODO Go beyond the first page of results.
- return random.choice(response['list'])[u'title']
+ return response['list'][index]['title']
+
if __name__ == '__main__':
app.run(debug=True) |
96e60f1b56f37d1b953d63bf948cde33d1e04e65 | halaqat/settings/shaha.py | halaqat/settings/shaha.py | from .base_settings import *
import dj_database_url
import os
ALLOWED_HOSTS = ['0.0.0.0']
db_from_env = dj_database_url.config(conn_max_age=500)
DATABASES['default'].update(db_from_env)
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.9/howto/static-files/
PROJECT_ROOT = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
STATIC_ROOT = os.path.join(PROJECT_ROOT, 'static')
STATIC_URL = '/static/'
# Simplified static file serving.
# https://warehouse.python.org/project/whitenoise/
# STATICFILES_STORAGE = 'whitenoise.django.GzipManifestStaticFilesStorage'
| from .base_settings import *
import dj_database_url
import os
ALLOWED_HOSTS = ['shaha-halaqat.herokuapp.com', '0.0.0.0']
db_from_env = dj_database_url.config(conn_max_age=500)
DATABASES['default'].update(db_from_env)
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.9/howto/static-files/
PROJECT_ROOT = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
STATIC_ROOT = os.path.join(PROJECT_ROOT, 'static')
STATIC_URL = '/static/'
# Simplified static file serving.
# https://warehouse.python.org/project/whitenoise/
# STATICFILES_STORAGE = 'whitenoise.django.GzipManifestStaticFilesStorage'
| Add heroku app url to ALLOWED_HOSTS | Add heroku app url to ALLOWED_HOSTS
| Python | mit | EmadMokhtar/halaqat,EmadMokhtar/halaqat,EmadMokhtar/halaqat | ---
+++
@@ -2,7 +2,7 @@
import dj_database_url
import os
-ALLOWED_HOSTS = ['0.0.0.0']
+ALLOWED_HOSTS = ['shaha-halaqat.herokuapp.com', '0.0.0.0']
db_from_env = dj_database_url.config(conn_max_age=500)
DATABASES['default'].update(db_from_env) |
054503e406146eeff5f8d5437eb7db581eaeb0f2 | oscar_adyen/__init__.py | oscar_adyen/__init__.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.conf.urls import url
from django_adyen import urlpatterns
from . import views
urlpatterns = [
url(r'^payment-done/$', views.PaymentResultView.as_view(),
name='payment-result'),
url(r'^notify/$', views.NotificationView.as_view(),
name='payment-notification')
] + urlpatterns
urls = urlpatterns, 'oscar-adyen', 'oscar-adyen'
| # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.conf.urls import url
from django_adyen import urlpatterns
urlpatterns = [
url(r'^payment-done/$', 'oscar_adyen.views.payment_result',
name='payment-result'),
url(r'^notify/$', 'oscar_adyen.views.notification',
name='payment-notification')
] + urlpatterns
urls = urlpatterns, 'oscar-adyen', 'oscar-adyen'
| Allow importing oscar_adyen.mixins without importing oscar_adyen.views | Allow importing oscar_adyen.mixins without importing oscar_adyen.views
| Python | mit | machtfit/adyen | ---
+++
@@ -6,12 +6,10 @@
from django_adyen import urlpatterns
-from . import views
-
urlpatterns = [
- url(r'^payment-done/$', views.PaymentResultView.as_view(),
+ url(r'^payment-done/$', 'oscar_adyen.views.payment_result',
name='payment-result'),
- url(r'^notify/$', views.NotificationView.as_view(),
+ url(r'^notify/$', 'oscar_adyen.views.notification',
name='payment-notification')
] + urlpatterns
|
3f92ca011d08d69c5443994c79958a5d1f3b7415 | werobot/session/saekvstorage.py | werobot/session/saekvstorage.py | # -*- coding: utf-8 -*-
from . import SessionStorage
class SaeKVDBStorage(SessionStorage):
"""
SaeKVDBStorage 使用SAE 的 KVDB 来保存你的session ::
import werobot
from werobot.session.saekvstorage import SaeKVDBStorage
session_storage = SaeKVDBStorage()
robot = werobot.WeRoBot(token="token", enable_session=True,
session_storage=session_storage)
需要先在后台开启 KVDB 支持
"""
def __init__(self, prefix='WeRoBotSession_'):
try:
import sae.kvdb
except ImportError:
raise RuntimeError("SaeKVDBStorage requires SAE environment")
self.kv = sae.kvdb.KVClient()
self.prefix = prefix
def key_name(self, s):
return '{prefix}{s}'.format(prefix=self.prefix, s=s)
def get(self, id):
return self.kv.get(self.key_name(id)) or {}
def set(self, id, value):
return self.kv.set(self.key_name(id), value)
def delete(self, id):
return self.kv.delete(self.key_name(id))
| # -*- coding: utf-8 -*-
from . import SessionStorage
class SaeKVDBStorage(SessionStorage):
"""
SaeKVDBStorage 使用SAE 的 KVDB 来保存你的session ::
import werobot
from werobot.session.saekvstorage import SaeKVDBStorage
session_storage = SaeKVDBStorage()
robot = werobot.WeRoBot(token="token", enable_session=True,
session_storage=session_storage)
需要先在后台开启 KVDB 支持
:param prefix: KVDB 中 Session 数据 key 的 prefix 。默认为 ``ws_``
"""
def __init__(self, prefix='ws_'):
try:
import sae.kvdb
except ImportError:
raise RuntimeError("SaeKVDBStorage requires SAE environment")
self.kv = sae.kvdb.KVClient()
self.prefix = prefix
def key_name(self, s):
return '{prefix}{s}'.format(prefix=self.prefix, s=s)
def get(self, id):
return self.kv.get(self.key_name(id)) or {}
def set(self, id, value):
return self.kv.set(self.key_name(id), value)
def delete(self, id):
return self.kv.delete(self.key_name(id))
| Add param doc of SAEKVStorage | Add param doc of SAEKVStorage
| Python | mit | whtsky/WeRoBot,FlyRabbit/WeRoBot,whtsky/WeRoBot,adrianzhang/WeRoBot,notwin/WeRoBot,weberwang/WeRoBot,Infixz/WeRoBot,weberwang/WeRoBot,chenjiancan/WeRoBot,Zeacone/WeRoBot,tdautc19841202/WeRoBot,one-leaf/WeRoBot,Zhenghaotao/WeRoBot,kmalloc/WeRoBot,FlyRabbit/WeRoBot,whtsky/WeRoBot,adam139/WeRobot,adam139/WeRobot | ---
+++
@@ -16,9 +16,9 @@
需要先在后台开启 KVDB 支持
-
+ :param prefix: KVDB 中 Session 数据 key 的 prefix 。默认为 ``ws_``
"""
- def __init__(self, prefix='WeRoBotSession_'):
+ def __init__(self, prefix='ws_'):
try:
import sae.kvdb
except ImportError: |
cee2f2132cb54d5089f44bb48c9a19bd538dc72a | src/commoner_i/urls.py | src/commoner_i/urls.py | from django.conf.urls.defaults import patterns, include, handler500, url
from django.conf import settings
from django.contrib import admin
handler500 # Pyflakes
urlpatterns = patterns(
'',
# Profile view
url(r'^p/(?P<username>\w+)/$', 'commoner_i.views.badge',
name='profile_badge'),
)
| from django.conf.urls.defaults import patterns, include, handler500, handler404, url
from django.conf import settings
from django.contrib import admin
urlpatterns = patterns(
'',
# Profile view
url(r'^p/(?P<username>\w+)/$', 'commoner_i.views.badge',
name='profile_badge'),
)
| Use the default 404/500 handlers for i.cc.net. | Use the default 404/500 handlers for i.cc.net.
| Python | agpl-3.0 | cc-archive/commoner,cc-archive/commoner | ---
+++
@@ -1,9 +1,7 @@
-from django.conf.urls.defaults import patterns, include, handler500, url
+from django.conf.urls.defaults import patterns, include, handler500, handler404, url
from django.conf import settings
from django.contrib import admin
-
-handler500 # Pyflakes
urlpatterns = patterns(
'', |
b5ea9f83fc9422c165663920af1317365a3e6c4d | mangopaysdk/types/payinexecutiondetailsdirect.py | mangopaysdk/types/payinexecutiondetailsdirect.py | from mangopaysdk.types.payinexecutiondetails import PayInExecutionDetails
class PayInExecutionDetailsDirect(PayInExecutionDetails):
def __init__(self):
# direct card
self.CardId = None
self.SecureModeReturnURL = None
# Mode3DSType { DEFAULT, FORCE }
self.SecureMode = None | from mangopaysdk.types.payinexecutiondetails import PayInExecutionDetails
class PayInExecutionDetailsDirect(PayInExecutionDetails):
def __init__(self):
# direct card
self.CardId = None
self.SecureModeReturnURL = None
self.SecureModeRedirectURL = None
# Mode3DSType { DEFAULT, FORCE }
self.SecureMode = None
| Add SecureModeRedirectURL attribute to PayInExecutionDetailsDirect | Add SecureModeRedirectURL attribute to PayInExecutionDetailsDirect
| Python | mit | Mangopay/mangopay2-python-sdk,chocopoche/mangopay2-python-sdk | ---
+++
@@ -7,5 +7,6 @@
# direct card
self.CardId = None
self.SecureModeReturnURL = None
+ self.SecureModeRedirectURL = None
# Mode3DSType { DEFAULT, FORCE }
self.SecureMode = None |
c1fd0f12810be544d12d4bea8ccd0ce9f8a190cc | jupyterlab/labhubapp.py | jupyterlab/labhubapp.py | from .labapp import LabApp
try:
from jupyterhub.singleuser import SingleUserNotebookApp
except ImportError:
SingleUserLabApp = None
raise ImportError('You must have jupyterhub installed for this to work.')
else:
class SingleUserLabApp(SingleUserNotebookApp, LabApp):
def init_webapp(self, *args, **kwargs):
super().init_webapp(*args, **kwargs)
settings = self.web_app.settings
if 'page_config_data' not in settings:
settings['page_config_data'] = {}
settings['page_config_data']['hub_prefix'] = self.hub_prefix
settings['page_config_data']['hub_host'] = self.hub_host
def main(argv=None):
return SingleUserLabApp.launch_instance(argv)
if __name__ == "__main__":
main()
| from .labapp import LabApp
try:
from jupyterhub.singleuser import SingleUserNotebookApp
except ImportError:
SingleUserLabApp = None
raise ImportError('You must have jupyterhub installed for this to work.')
else:
class SingleUserLabApp(SingleUserNotebookApp, LabApp):
def init_webapp(self, *args, **kwargs):
super().init_webapp(*args, **kwargs)
settings = self.web_app.settings
if 'page_config_data' not in settings:
settings['page_config_data'] = {}
settings['page_config_data']['hub_prefix'] = self.hub_prefix
settings['page_config_data']['hub_host'] = self.hub_host
settings['page_config_data']['hub_user'] = self.user
def main(argv=None):
return SingleUserLabApp.launch_instance(argv)
if __name__ == "__main__":
main()
| Add hub user info to page | Add hub user info to page
| Python | bsd-3-clause | jupyter/jupyterlab,jupyter/jupyterlab,jupyter/jupyterlab,jupyter/jupyterlab,jupyter/jupyterlab | ---
+++
@@ -14,6 +14,7 @@
settings['page_config_data'] = {}
settings['page_config_data']['hub_prefix'] = self.hub_prefix
settings['page_config_data']['hub_host'] = self.hub_host
+ settings['page_config_data']['hub_user'] = self.user
def main(argv=None): |
ab79661216ff972bd696eb568c68ebd221c9a003 | seabird/modules/url.py | seabird/modules/url.py | import asyncio
import re
import aiohttp
import lxml.html
from seabird.plugin import Plugin
class URLPlugin(Plugin):
url_regex = re.compile(r'https?://[^ ]+')
def irc_privmsg(self, msg):
for match in URLPlugin.url_regex.finditer(msg.trailing):
url = match.group(0)
# As a fallback, use our own internal URL handler
if True:
loop = asyncio.get_event_loop()
loop.create_task(self.url_callback(msg, url))
async def url_callback(self, msg, url):
async with aiohttp.get(url) as resp:
# Read up to 1m
data = await resp.content.read(1024*1024)
if not data:
return
tree = lxml.html.fromstring(data)
title = tree.find(".//title")
if title is None:
return
self.bot.reply(msg, 'Title: {}'.format(title.text))
| import asyncio
import re
import aiohttp
import lxml.html
from seabird.plugin import Plugin
class URLMixin:
"""Simple marker class to mark a plugin as a url plugin
A URL plugin requires only one thing:
- A method named url_match which takes a msg and url as an argument and
returns True if the url matches this plugin.
Note that callback functions are not required to be coroutines in case they
need to access data from other plugins, but most should have a background
task as almost every one will need to do some form of background processing
or data transfer.
"""
def url_match(self, msg, url):
raise NotImplementedError
class URLPlugin(Plugin):
url_regex = re.compile(r'https?://[^ ]+')
def irc_privmsg(self, msg):
for match in URLPlugin.url_regex.finditer(msg.trailing):
url = match.group(0)
matching_plugin = False
for plugin in self.bot.plugins:
if isinstance(plugin, URLMixin) and plugin.url_match(msg, url):
matching_plugin = True
# As a fallback, use our own internal URL handler
if not matching_plugin:
loop = asyncio.get_event_loop()
loop.create_task(self.url_callback(msg, url))
async def url_callback(self, msg, url):
async with aiohttp.get(url) as resp:
# Read up to 1m
data = await resp.content.read(1024*1024)
if not data:
return
tree = lxml.html.fromstring(data)
title = tree.find(".//title")
if title is None:
return
self.bot.reply(msg, 'Title: {}'.format(title.text))
| Add a method for plugins to add their own URL handlers | Add a method for plugins to add their own URL handlers
| Python | mit | belak/pyseabird,belak/python-seabird | ---
+++
@@ -7,6 +7,22 @@
from seabird.plugin import Plugin
+class URLMixin:
+ """Simple marker class to mark a plugin as a url plugin
+
+ A URL plugin requires only one thing:
+ - A method named url_match which takes a msg and url as an argument and
+ returns True if the url matches this plugin.
+
+ Note that callback functions are not required to be coroutines in case they
+ need to access data from other plugins, but most should have a background
+ task as almost every one will need to do some form of background processing
+ or data transfer.
+ """
+ def url_match(self, msg, url):
+ raise NotImplementedError
+
+
class URLPlugin(Plugin):
url_regex = re.compile(r'https?://[^ ]+')
@@ -14,8 +30,13 @@
for match in URLPlugin.url_regex.finditer(msg.trailing):
url = match.group(0)
+ matching_plugin = False
+ for plugin in self.bot.plugins:
+ if isinstance(plugin, URLMixin) and plugin.url_match(msg, url):
+ matching_plugin = True
+
# As a fallback, use our own internal URL handler
- if True:
+ if not matching_plugin:
loop = asyncio.get_event_loop()
loop.create_task(self.url_callback(msg, url))
|
8e9dc62f01f4b6ccaab819d21d92f3d6c53e3e1c | src/common/constants.py | src/common/constants.py | """
Constants used to make the VWS mock and wrapper.
"""
from constantly import ValueConstant, Values
class ResultCodes(Values):
"""
Constants representing various VWS result codes.
"""
AUTHENTICATION_FAILURE = ValueConstant('AuthenticationFailure')
SUCCESS = ValueConstant('Success')
FAIL = ValueConstant('Fail')
REQUEST_TIME_TOO_SKEWED = ValueConstant('RequestTimeTooSkewed')
| """
Constants used to make the VWS mock and wrapper.
"""
from constantly import ValueConstant, Values
class ResultCodes(Values):
"""
Constants representing various VWS result codes.
See
https://library.vuforia.com/articles/Solution/How-To-Interperete-VWS-API-Result-Codes
"""
SUCCESS = ValueConstant('Success')
TARGET_CREATED = ValueConstant('Created')
AUTHENTICATION_FAILURE = ValueConstant('AuthenticationFailure')
REQUEST_TIME_TOO_SKEWED = ValueConstant('RequestTimeTooSkewed')
TARGET_NAME_EXIST = ValueConstant('TargetNameExist')
UNKNOWN_TARGET = ValueConstant('UnknownTarget')
BAD_IMAGE = ValueConstant('BadImage')
IMAGE_TOO_LARGE = ValueConstant('ImageTooLarge')
METADATA_TOO_LARGE = ValueConstant('MetadataTooLarge')
DATE_RANGE_ERROR = ValueConstant('DateRangeError')
FAIL = ValueConstant('Fail')
| Add all documented result codes | Add all documented result codes
| Python | mit | adamtheturtle/vws-python,adamtheturtle/vws-python | ---
+++
@@ -8,9 +8,19 @@
class ResultCodes(Values):
"""
Constants representing various VWS result codes.
+
+ See
+ https://library.vuforia.com/articles/Solution/How-To-Interperete-VWS-API-Result-Codes
"""
+ SUCCESS = ValueConstant('Success')
+ TARGET_CREATED = ValueConstant('Created')
AUTHENTICATION_FAILURE = ValueConstant('AuthenticationFailure')
- SUCCESS = ValueConstant('Success')
+ REQUEST_TIME_TOO_SKEWED = ValueConstant('RequestTimeTooSkewed')
+ TARGET_NAME_EXIST = ValueConstant('TargetNameExist')
+ UNKNOWN_TARGET = ValueConstant('UnknownTarget')
+ BAD_IMAGE = ValueConstant('BadImage')
+ IMAGE_TOO_LARGE = ValueConstant('ImageTooLarge')
+ METADATA_TOO_LARGE = ValueConstant('MetadataTooLarge')
+ DATE_RANGE_ERROR = ValueConstant('DateRangeError')
FAIL = ValueConstant('Fail')
- REQUEST_TIME_TOO_SKEWED = ValueConstant('RequestTimeTooSkewed') |
7ae4954a40b5b143cadff917456ba67c2653bdb8 | asyncmailer/tests/settings.py | asyncmailer/tests/settings.py | """
These settings are used by the ``manage.py`` command.
With normal tests we want to use the fastest possible way which is an
in-memory sqlite database but if you want to create South migrations you
need a persistant database.
Unfortunately there seems to be an issue with either South or syncdb so that
defining two routers ("default" and "south") does not work.
"""
from distutils.version import StrictVersion
import django
from .test_settings import * # NOQA
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'db.sqlite',
}
}
django_version = django.get_version()
if StrictVersion(django_version) < StrictVersion('1.7'):
INSTALLED_APPS.append('south', )
| """
These settings are used by the ``manage.py`` command.
With normal tests we want to use the fastest possible way which is an
in-memory sqlite database but if you want to create South migrations you
need a persistant database.
Unfortunately there seems to be an issue with either South or syncdb so that
defining two routers ("default" and "south") does not work.
"""
from distutils.version import StrictVersion
import django
from .test_settings import * # NOQA
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'db.sqlite',
}
}
django_version = django.get_version()
if StrictVersion(django_version) < StrictVersion('1.7'):
INSTALLED_APPS.append('south', ) # NOQA
| Add NOQA to make flake8 happy | Add NOQA to make flake8 happy
| Python | mit | andyfangdz/django-asyncmailer,andyfangdz/django-asyncmailer | ---
+++
@@ -25,4 +25,4 @@
django_version = django.get_version()
if StrictVersion(django_version) < StrictVersion('1.7'):
- INSTALLED_APPS.append('south', )
+ INSTALLED_APPS.append('south', ) # NOQA |
c1ac7c357d5a7ce3e96af9b4356fc2f0493e2b1d | apps/people/admin.py | apps/people/admin.py | from cms.admin import PageBaseAdmin, SearchMetaBaseAdmin
from django.contrib import admin
from .models import Person, Team
@admin.register(Person)
class PersonAdmin(SearchMetaBaseAdmin):
prepopulated_fields = {"url_title": ("first_name", "last_name",)}
filter_horizontal = ("teams",)
fieldsets = (
(None, {
"fields": (
"page",
)
}),
('Name information', {
'fields': (
"title",
"first_name",
"middle_name",
"last_name",
"url_title",
)
}),
('Additional information', {
'fields': (
"photo",
"job_title",
"bio",
"teams",
"order",
)
}),
('Contact details', {
'fields': (
"email",
"linkedin_username",
"skype_username",
"twitter_username",
)
}),
SearchMetaBaseAdmin.PUBLICATION_FIELDS,
SearchMetaBaseAdmin.SEO_FIELDS,
)
@admin.register(Team)
class TeamAdmin(PageBaseAdmin):
prepopulated_fields = {"url_title": ("title",)}
fieldsets = (
PageBaseAdmin.TITLE_FIELDS,
("Content", {
"fields": ("content_primary",),
}),
PageBaseAdmin.PUBLICATION_FIELDS,
PageBaseAdmin.NAVIGATION_FIELDS,
PageBaseAdmin.SEO_FIELDS,
)
| from cms.admin import PageBaseAdmin, SearchMetaBaseAdmin
from django.contrib import admin
from .models import Person, Team
@admin.register(Person)
class PersonAdmin(SearchMetaBaseAdmin):
prepopulated_fields = {"url_title": ("first_name", "last_name",)}
filter_horizontal = ("teams",)
fieldsets = (
(None, {
"fields": (
"page",
)
}),
('Name information', {
'fields': (
"title",
"first_name",
"middle_name",
"last_name",
"url_title",
)
}),
('Additional information', {
'fields': (
"photo",
"job_title",
"bio",
"teams",
"order",
)
}),
('Contact details', {
'fields': (
"email",
"linkedin_username",
"skype_username",
"twitter_username",
)
}),
SearchMetaBaseAdmin.PUBLICATION_FIELDS,
SearchMetaBaseAdmin.SEO_FIELDS,
)
@admin.register(Team)
class TeamAdmin(PageBaseAdmin):
prepopulated_fields = {
"slug": ("title",)
}
fieldsets = (
PageBaseAdmin.TITLE_FIELDS,
("Content", {
"fields": ("content_primary",),
}),
PageBaseAdmin.PUBLICATION_FIELDS,
PageBaseAdmin.NAVIGATION_FIELDS,
PageBaseAdmin.SEO_FIELDS,
)
| Fix usage of `url_title` in TeamAdmin. | Fix usage of `url_title` in TeamAdmin.
| Python | mit | onespacemedia/cms-people,onespacemedia/cms-people | ---
+++
@@ -50,7 +50,9 @@
@admin.register(Team)
class TeamAdmin(PageBaseAdmin):
- prepopulated_fields = {"url_title": ("title",)}
+ prepopulated_fields = {
+ "slug": ("title",)
+ }
fieldsets = (
PageBaseAdmin.TITLE_FIELDS, |
42a523393d6ec2d5dfc80d1b82e2c703e1afa29b | calc.py | calc.py | """calc.py: A simple calculator."""
import sys
def add_all(nums):
return sum(nums)
def multiply_all(nums):
return reduce(lambda a, b: a * b, nums)
if __name__ == '__main__':
command = sys.argv[1]
nums = map(float, sys.argv[2:])
if command == 'add':
print(add_all(nums))
elif command == 'multiply':
print(multiply_all(nums))
elif command == 'min':
print(min(nums))
else:
usage = "calc.py [add|multiply] NUM1 [NUM2 [NUM3 [...]]]"
print(usage)
| """calc.py: A simple calculator."""
import sys
def add_all(nums):
return sum(nums)
def multiply_all(nums):
return reduce(lambda a, b: a * b, nums)
if __name__ == '__main__':
command = sys.argv[1]
nums = map(float, sys.argv[2:])
if command == 'add':
print(add_all(nums))
elif command == 'multiply':
print(multiply_all(nums))
elif command == 'min':
print(min(nums))
else:
usage = ("calc.py [add|multiply|min]"
" NUM1 [NUM2 [NUM3 [...]]]")
print(usage)
| Update usage string for min | Update usage string for min
| Python | bsd-3-clause | mkuiper/calc-1 | ---
+++
@@ -18,5 +18,6 @@
elif command == 'min':
print(min(nums))
else:
- usage = "calc.py [add|multiply] NUM1 [NUM2 [NUM3 [...]]]"
+ usage = ("calc.py [add|multiply|min]"
+ " NUM1 [NUM2 [NUM3 [...]]]")
print(usage) |
1cd930883b4168f99da0a16d95f370b001126134 | src/reduce_framerate.py | src/reduce_framerate.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# (C) 2015 Jean Nassar
# Released under BSD version 4
"""
Reduce /ardrone/image_color framerate from 30 Hz to 2 Hz.
"""
import rospy
from sensor_msgs.msg import Image
class FramerateReducer(object):
"""
Reduces the framerate of a video feed to one fifteenth of the original.
"""
def __init__(self):
self.image_subscriber = rospy.Subscriber("/ardrone/image_color",
Image, self.frame_callback,
queue_size=1)
self.image_publisher = rospy.Publisher("/ardrone/slow_image_raw",
Image, queue_size=1)
rospy.logdebug("Subscribed to /ardrone/image_color")
self.count = 0
def frame_callback(self, frame):
"""
Publish at a reduced rate.
"""
# Publish every fifteenth frame
if not self.count % 15:
self.image_publisher.publish(frame)
self.count += 1
def main():
"""
Main entry point for script.
"""
rospy.init_node("framerate_reducer", anonymous=True)
FramerateReducer()
rospy.loginfo("Reducing framerate")
rospy.spin()
if __name__ == "__main__":
main()
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
# (C) 2015 Jean Nassar
# Released under BSD version 4
"""
Reduce /ardrone/image_color framerate from 30 Hz to 2 Hz.
"""
import rospy
from sensor_msgs.msg import Image
class FramerateReducer(object):
"""
Reduces the framerate of a video feed to one fifteenth of the original.
"""
def __init__(self):
self.image_subscriber = rospy.Subscriber("/ardrone/image_color",
Image, self.frame_callback,
queue_size=1)
self.image_publisher = rospy.Publisher("/ardrone/slow_image_raw",
Image, queue_size=1)
rospy.logdebug("Subscribed to /ardrone/image_color")
self.count = 0
def frame_callback(self, frame):
"""
Publish at a reduced rate.
"""
# Publish every fifteenth frame
if not self.count % 3:
self.image_publisher.publish(frame)
self.count += 1
def main():
"""
Main entry point for script.
"""
rospy.init_node("framerate_reducer", anonymous=True)
FramerateReducer()
rospy.loginfo("Reducing framerate")
rospy.spin()
if __name__ == "__main__":
main()
| Reduce frequency to 10 Hz instead of 2. | Reduce frequency to 10 Hz instead of 2.
| Python | mit | masasin/spirit,masasin/spirit | ---
+++
@@ -30,7 +30,7 @@
"""
# Publish every fifteenth frame
- if not self.count % 15:
+ if not self.count % 3:
self.image_publisher.publish(frame)
self.count += 1
|
28e95a2aad1efc3fa409b273b74e5173b5eba1ad | conjureup/ui/__init__.py | conjureup/ui/__init__.py | from ubuntui.frame import Frame # noqa
from ubuntui.views import ErrorView
from conjureup import async
from conjureup.app_config import app
from conjureup.ui.views.shutdown import ShutdownView
from ubuntui.ev import EventLoop
class ConjureUI(Frame):
def show_exception_message(self, ex):
errmsg = str(ex)
errmsg += ("\n\n"
"Review log messages at ~/.cache/conjure-up/conjure-up.log "
"If appropriate, please submit a bug here: "
"https://github.com/conjure-up/conjure-up/issues/new")
async.shutdown()
EventLoop.remove_alarms()
self.frame.body = ErrorView(errmsg)
app.log.debug("Showing dialog for exception: {}".format(ex))
def show_error_message(self, msg):
self.frame.body = ErrorView(msg)
def show_shutdown_message(self):
self.frame.body = ShutdownView()
| from ubuntui.frame import Frame # noqa
from ubuntui.views import ErrorView
from conjureup import async
from conjureup.app_config import app
from conjureup.ui.views.shutdown import ShutdownView
from ubuntui.ev import EventLoop
from pathlib import Path
class ConjureUI(Frame):
def show_exception_message(self, ex):
_cache_dir = Path(app.argv.cache_dir) / 'conjure-up.log'
errmsg = str(ex)
errmsg += (
"\n\n Review log messages at {} "
"If appropriate, please submit a bug here: "
"https://github.com/conjure-up/conjure-up/issues/new".format(
_cache_dir))
async.shutdown()
EventLoop.remove_alarms()
self.frame.body = ErrorView(errmsg)
app.log.debug("Showing dialog for exception: {}".format(ex))
def show_error_message(self, msg):
self.frame.body = ErrorView(msg)
def show_shutdown_message(self):
self.frame.body = ShutdownView()
| Use proper cache directory in error view | Use proper cache directory in error view
Fixes #1254
Signed-off-by: Adam Stokes <49c255c1d074742f60d19fdba5e2aa5a34add567@users.noreply.github.com>
| Python | mit | conjure-up/conjure-up,ubuntu/conjure-up,ubuntu/conjure-up,Ubuntu-Solutions-Engineering/conjure,Ubuntu-Solutions-Engineering/conjure,conjure-up/conjure-up | ---
+++
@@ -4,16 +4,19 @@
from conjureup.app_config import app
from conjureup.ui.views.shutdown import ShutdownView
from ubuntui.ev import EventLoop
+from pathlib import Path
class ConjureUI(Frame):
def show_exception_message(self, ex):
+ _cache_dir = Path(app.argv.cache_dir) / 'conjure-up.log'
errmsg = str(ex)
- errmsg += ("\n\n"
- "Review log messages at ~/.cache/conjure-up/conjure-up.log "
- "If appropriate, please submit a bug here: "
- "https://github.com/conjure-up/conjure-up/issues/new")
+ errmsg += (
+ "\n\n Review log messages at {} "
+ "If appropriate, please submit a bug here: "
+ "https://github.com/conjure-up/conjure-up/issues/new".format(
+ _cache_dir))
async.shutdown()
EventLoop.remove_alarms() |
ff4e92fc75392a5c3ed2a91591369046ba59f2a3 | ambassador/tests/test_ambassador.py | ambassador/tests/test_ambassador.py | from kat.harness import Runner
from abstract_tests import AmbassadorTest
# Import all the real tests from other files, to make it easier to pick and choose during development.
import t_basics
import t_extauth
import t_grpc
import t_grpc_bridge
import t_grpc_web
import t_gzip
import t_headerrouting
import t_loadbalancer
import t_lua_scripts
import t_mappingtests
import t_optiontests
import t_plain
import t_ratelimit
import t_redirect
import t_shadow
import t_stats
import t_tcpmapping
import t_tls
import t_tracing
import t_retrypolicy
import t_consul
import t_circuitbreaker
import t_knative
# pytest will find this because Runner is a toplevel callable object in a file
# that pytest is willing to look inside.
#
# Also note:
# - Runner(cls) will look for variants of _every subclass_ of cls.
# - Any class you pass to Runner needs to be standalone (it must have its
# own manifests and be able to set up its own world).
main = Runner(AmbassadorTest)
| from kat.harness import Runner
from abstract_tests import AmbassadorTest
# Import all the real tests from other files, to make it easier to pick and choose during development.
import t_basics
import t_extauth
import t_grpc
import t_grpc_bridge
import t_grpc_web
import t_gzip
import t_headerrouting
import t_loadbalancer
import t_lua_scripts
import t_mappingtests
import t_optiontests
import t_plain
import t_ratelimit
import t_redirect
import t_shadow
import t_stats
import t_tcpmapping
# import t_tls
import t_tracing
import t_retrypolicy
import t_consul
import t_circuitbreaker
import t_knative
# pytest will find this because Runner is a toplevel callable object in a file
# that pytest is willing to look inside.
#
# Also note:
# - Runner(cls) will look for variants of _every subclass_ of cls.
# - Any class you pass to Runner needs to be standalone (it must have its
# own manifests and be able to set up its own world).
main = Runner(AmbassadorTest)
| Drop the TLS tests for a moment to see if we can get a success for timing. | Drop the TLS tests for a moment to see if we can get a success for timing.
| Python | apache-2.0 | datawire/ambassador,datawire/ambassador,datawire/ambassador,datawire/ambassador,datawire/ambassador | ---
+++
@@ -21,7 +21,7 @@
import t_shadow
import t_stats
import t_tcpmapping
-import t_tls
+# import t_tls
import t_tracing
import t_retrypolicy
import t_consul |
990158b8fd0407129f776b6d28989b7717c331f5 | compose_mode/__init__.py | compose_mode/__init__.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
__version__ = '0.4.6'
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
__version__ = '0.5.0'
| Bump minor version to 0.5.0 | Bump minor version to 0.5.0
| Python | mit | KitB/compose-mode | ---
+++
@@ -1,4 +1,4 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
-__version__ = '0.4.6'
+__version__ = '0.5.0' |
d8913869c466bea4e301e8502decdc01f6d9987e | cowserver.py | cowserver.py | from flask import Flask
from flask_talisman import Talisman
from flask_seasurf import SeaSurf
from redditflair.redditflair import redditflair, limiter
from redissession import RedisSessionInterface
from database import db, User, Specials
import os.path
content_security_policy = {
'script-src': '\'unsafe-inline\'',
'style-src': '\'self\''
}
def setupApp():
app = Flask(__name__)
# HTTP security headers
Talisman(app, content_security_policy=content_security_policy)
# CSRF library
SeaSurf(app)
# Limiter
limiter.init_app(app)
# SQLAlchemy
app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///cowserver.db'
db.init_app(app)
# Blueprints
app.register_blueprint(redditflair)
# Redis Session Interface
app.session_interface = RedisSessionInterface()
return app
def setupDatabase():
if not os.path.isfile('cowserver.db'):
with app.app_context():
db.create_all()
app = setupApp()
setupDatabase()
if __name__ == "__main__":
app.run(host='0.0.0.0')
| from flask import Flask
from flask_talisman import Talisman
from flask_seasurf import SeaSurf
from redditflair.redditflair import redditflair, limiter
from redissession import RedisSessionInterface
from database import db, User, Specials
import os.path
content_security_policy = {
'script-src': '\'unsafe-inline\'',
'style-src': '\'self\''
}
def setupApp():
app = Flask(__name__)
# HTTP security headers
Talisman(app, content_security_policy=content_security_policy)
# CSRF library
SeaSurf(app)
# Limiter
limiter.init_app(app)
# SQLAlchemy
app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///cowserver.db'
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False
db.init_app(app)
# Blueprints
app.register_blueprint(redditflair)
# Redis Session Interface
app.session_interface = RedisSessionInterface()
return app
def setupDatabase():
if not os.path.isfile('cowserver.db'):
with app.app_context():
db.create_all()
app = setupApp()
setupDatabase()
if __name__ == "__main__":
app.run(host='0.0.0.0')
| Disable SQLALCHEMY_TRACK_MODIFICATIONS to fix deprecation warning and meet recommendation | Disable SQLALCHEMY_TRACK_MODIFICATIONS to fix deprecation warning and meet recommendation
| Python | mit | competitiveoverwatch/RankVerification,competitiveoverwatch/RankVerification | ---
+++
@@ -13,19 +13,28 @@
def setupApp():
app = Flask(__name__)
+
# HTTP security headers
Talisman(app, content_security_policy=content_security_policy)
+
# CSRF library
SeaSurf(app)
+
# Limiter
limiter.init_app(app)
+
# SQLAlchemy
app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///cowserver.db'
+ app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False
+
db.init_app(app)
+
# Blueprints
app.register_blueprint(redditflair)
+
# Redis Session Interface
app.session_interface = RedisSessionInterface()
+
return app
def setupDatabase(): |
d9ef4c798e50de12cc4ab41fa470cd0e04d77322 | opendebates/tests/test_context_processors.py | opendebates/tests/test_context_processors.py | import urlparse
from django.test import TestCase, override_settings
from django.conf import settings
from mock import patch, Mock
from opendebates.context_processors import global_vars
from opendebates.tests.factories import SubmissionFactory
class NumberOfVotesTest(TestCase):
def test_number_of_votes(self):
mock_request = Mock()
with patch('opendebates.utils.cache') as mock_cache:
mock_cache.get.return_value = 2
context = global_vars(mock_request)
self.assertEqual(2, int(context['NUMBER_OF_VOTES']))
class ThemeTests(TestCase):
def setUp(self):
self.idea = SubmissionFactory()
@override_settings(SITE_THEME={'HASHTAG': 'TestHashtag'})
def test_email_url(self):
email_url = self.idea.email_url()
fields = urlparse.parse_qs(urlparse.urlparse(email_url).query)
self.assertTrue('subject' in fields, fields)
self.assertTrue('#TestHashtag' in fields['subject'][0], fields['subject'][0])
| import urlparse
from django.test import TestCase, override_settings
from mock import patch, Mock
from opendebates.context_processors import global_vars
from opendebates.tests.factories import SubmissionFactory
class NumberOfVotesTest(TestCase):
def test_number_of_votes(self):
mock_request = Mock()
with patch('opendebates.utils.cache') as mock_cache:
mock_cache.get.return_value = 2
context = global_vars(mock_request)
self.assertEqual(2, int(context['NUMBER_OF_VOTES']))
class ThemeTests(TestCase):
def setUp(self):
self.idea = SubmissionFactory()
@override_settings(SITE_THEME={'HASHTAG': 'TestHashtag'})
def test_email_url(self):
email_url = self.idea.email_url()
fields = urlparse.parse_qs(urlparse.urlparse(email_url).query)
self.assertTrue('subject' in fields, fields)
self.assertTrue('#TestHashtag' in fields['subject'][0], fields['subject'][0])
| Remove unused import in test | Remove unused import in test
| Python | apache-2.0 | ejucovy/django-opendebates,ejucovy/django-opendebates,caktus/django-opendebates,caktus/django-opendebates,caktus/django-opendebates,caktus/django-opendebates,ejucovy/django-opendebates,ejucovy/django-opendebates | ---
+++
@@ -1,7 +1,6 @@
import urlparse
from django.test import TestCase, override_settings
-from django.conf import settings
from mock import patch, Mock
from opendebates.context_processors import global_vars |
f50b4bb49345db05d3601fc1d828d2491f902c31 | Lib/sandbox/pyem/misc.py | Lib/sandbox/pyem/misc.py | # Last Change: Sat Jun 09 07:00 PM 2007 J
#========================================================
# Constants used throughout the module (def args, etc...)
#========================================================
# This is the default dimension for representing confidence ellipses
DEF_VIS_DIM = [0, 1]
DEF_ELL_NP = 100
DEF_LEVEL = 0.39
#=====================================================================
# "magic number", that is number used to control regularization and co
# Change them at your risk !
#=====================================================================
# max deviation allowed when comparing double (this is actually stupid,
# I should actually use a number of decimals)
_MAX_DBL_DEV = 1e-10
# max conditional number allowed
_MAX_COND = 1e8
_MIN_INV_COND = 1/_MAX_COND
# Default alpha for regularization
_DEF_ALPHA = 1e-1
# Default min delta for regularization
_MIN_DBL_DELTA = 1e-5
| # Last Change: Sat Jun 09 08:00 PM 2007 J
#========================================================
# Constants used throughout the module (def args, etc...)
#========================================================
# This is the default dimension for representing confidence ellipses
DEF_VIS_DIM = (0, 1)
DEF_ELL_NP = 100
DEF_LEVEL = 0.39
#=====================================================================
# "magic number", that is number used to control regularization and co
# Change them at your risk !
#=====================================================================
# max deviation allowed when comparing double (this is actually stupid,
# I should actually use a number of decimals)
_MAX_DBL_DEV = 1e-10
# max conditional number allowed
_MAX_COND = 1e8
_MIN_INV_COND = 1/_MAX_COND
# Default alpha for regularization
_DEF_ALPHA = 1e-1
# Default min delta for regularization
_MIN_DBL_DELTA = 1e-5
| Set def arguments to immutable to avoid nasty side effect. | Set def arguments to immutable to avoid nasty side effect.
| Python | bsd-3-clause | hainm/scipy,rgommers/scipy,efiring/scipy,surhudm/scipy,rmcgibbo/scipy,dch312/scipy,Stefan-Endres/scipy,jakevdp/scipy,andyfaff/scipy,mikebenfield/scipy,anntzer/scipy,giorgiop/scipy,sargas/scipy,e-q/scipy,dominicelse/scipy,bkendzior/scipy,Shaswat27/scipy,ndchorley/scipy,haudren/scipy,aarchiba/scipy,grlee77/scipy,person142/scipy,jor-/scipy,kleskjr/scipy,zxsted/scipy,richardotis/scipy,gdooper/scipy,mtrbean/scipy,Eric89GXL/scipy,befelix/scipy,befelix/scipy,vanpact/scipy,FRidh/scipy,pizzathief/scipy,rgommers/scipy,andyfaff/scipy,jamestwebber/scipy,surhudm/scipy,jjhelmus/scipy,pnedunuri/scipy,Gillu13/scipy,pbrod/scipy,anntzer/scipy,piyush0609/scipy,rmcgibbo/scipy,ndchorley/scipy,gef756/scipy,WillieMaddox/scipy,Shaswat27/scipy,andim/scipy,richardotis/scipy,mingwpy/scipy,ales-erjavec/scipy,anntzer/scipy,matthew-brett/scipy,jamestwebber/scipy,sriki18/scipy,gfyoung/scipy,endolith/scipy,Gillu13/scipy,nvoron23/scipy,minhlongdo/scipy,gdooper/scipy,ales-erjavec/scipy,jjhelmus/scipy,jonycgn/scipy,larsmans/scipy,newemailjdm/scipy,lukauskas/scipy,arokem/scipy,ChanderG/scipy,e-q/scipy,cpaulik/scipy,ndchorley/scipy,juliantaylor/scipy,mortada/scipy,person142/scipy,e-q/scipy,Shaswat27/scipy,mhogg/scipy,aeklant/scipy,maciejkula/scipy,josephcslater/scipy,raoulbq/scipy,ChanderG/scipy,sargas/scipy,vhaasteren/scipy,efiring/scipy,mingwpy/scipy,sriki18/scipy,arokem/scipy,ilayn/scipy,bkendzior/scipy,lhilt/scipy,ortylp/scipy,gfyoung/scipy,Dapid/scipy,ndchorley/scipy,josephcslater/scipy,rmcgibbo/scipy,Newman101/scipy,fredrikw/scipy,WillieMaddox/scipy,grlee77/scipy,nmayorov/scipy,perimosocordiae/scipy,jonycgn/scipy,mikebenfield/scipy,petebachant/scipy,mikebenfield/scipy,gdooper/scipy,mortonjt/scipy,maniteja123/scipy,mgaitan/scipy,rgommers/scipy,ales-erjavec/scipy,haudren/scipy,zxsted/scipy,lukauskas/scipy,tylerjereddy/scipy,nvoron23/scipy,lhilt/scipy,vigna/scipy,jseabold/scipy,behzadnouri/scipy,aeklant/scipy,gertingold/scipy,dch312/scipy,aeklant/scipy,pschella/scipy,Newman101/scipy,pbrod/scipy,maniteja123/scipy,ortylp/scipy,mtrbean/scipy,Shaswat27/scipy,ortylp/scipy,vanpact/scipy,kalvdans/scipy,njwilson23/scipy,gdooper/scipy,richardotis/scipy,vanpact/scipy,mhogg/scipy,chatcannon/scipy,ChanderG/scipy,surhudm/scipy,fernand/scipy,mdhaber/scipy,matthew-brett/scipy,mikebenfield/scipy,futurulus/scipy,dch312/scipy,fernand/scipy,fernand/scipy,zxsted/scipy,jsilter/scipy,cpaulik/scipy,raoulbq/scipy,piyush0609/scipy,person142/scipy,chatcannon/scipy,FRidh/scipy,scipy/scipy,zaxliu/scipy,Kamp9/scipy,mtrbean/scipy,gef756/scipy,Kamp9/scipy,endolith/scipy,matthewalbani/scipy,aman-iitj/scipy,Kamp9/scipy,zaxliu/scipy,anielsen001/scipy,surhudm/scipy,vhaasteren/scipy,chatcannon/scipy,scipy/scipy,vanpact/scipy,perimosocordiae/scipy,sargas/scipy,pbrod/scipy,mortada/scipy,sonnyhu/scipy,felipebetancur/scipy,Kamp9/scipy,vberaudi/scipy,bkendzior/scipy,giorgiop/scipy,nonhermitian/scipy,mingwpy/scipy,vberaudi/scipy,felipebetancur/scipy,mortada/scipy,kalvdans/scipy,futurulus/scipy,gef756/scipy,mhogg/scipy,jamestwebber/scipy,person142/scipy,mortonjt/scipy,larsmans/scipy,josephcslater/scipy,aarchiba/scipy,sonnyhu/scipy,mdhaber/scipy,witcxc/scipy,trankmichael/scipy,petebachant/scipy,futurulus/scipy,ortylp/scipy,sauliusl/scipy,dch312/scipy,jjhelmus/scipy,ilayn/scipy,witcxc/scipy,aarchiba/scipy,mhogg/scipy,jseabold/scipy,matthew-brett/scipy,aman-iitj/scipy,gef756/scipy,mingwpy/scipy,aeklant/scipy,ogrisel/scipy,haudren/scipy,ilayn/scipy,Stefan-Endres/scipy,minhlongdo/scipy,maniteja123/scipy,vanpact/scipy,sriki18/scipy,anielsen001/scipy,andyfaff/scipy,apbard/scipy,sonnyhu/scipy,jonycgn/scipy,mortonjt/scipy,niknow/scipy,vhaasteren/scipy,behzadnouri/scipy,WillieMaddox/scipy,nonhermitian/scipy,befelix/scipy,vigna/scipy,mingwpy/scipy,njwilson23/scipy,petebachant/scipy,zerothi/scipy,njwilson23/scipy,Shaswat27/scipy,witcxc/scipy,sauliusl/scipy,Srisai85/scipy,nvoron23/scipy,larsmans/scipy,Newman101/scipy,FRidh/scipy,maniteja123/scipy,anielsen001/scipy,WarrenWeckesser/scipy,Stefan-Endres/scipy,juliantaylor/scipy,grlee77/scipy,behzadnouri/scipy,maciejkula/scipy,niknow/scipy,jakevdp/scipy,person142/scipy,newemailjdm/scipy,mgaitan/scipy,apbard/scipy,lhilt/scipy,matthew-brett/scipy,dominicelse/scipy,hainm/scipy,niknow/scipy,maniteja123/scipy,pyramania/scipy,scipy/scipy,zxsted/scipy,ales-erjavec/scipy,minhlongdo/scipy,gertingold/scipy,kleskjr/scipy,fredrikw/scipy,ndchorley/scipy,jsilter/scipy,surhudm/scipy,dominicelse/scipy,perimosocordiae/scipy,WarrenWeckesser/scipy,Newman101/scipy,zxsted/scipy,perimosocordiae/scipy,mhogg/scipy,perimosocordiae/scipy,haudren/scipy,endolith/scipy,kleskjr/scipy,woodscn/scipy,newemailjdm/scipy,jor-/scipy,juliantaylor/scipy,kalvdans/scipy,efiring/scipy,perimosocordiae/scipy,njwilson23/scipy,nmayorov/scipy,fernand/scipy,aarchiba/scipy,hainm/scipy,jor-/scipy,gef756/scipy,nmayorov/scipy,gertingold/scipy,kalvdans/scipy,ogrisel/scipy,scipy/scipy,jamestwebber/scipy,andim/scipy,njwilson23/scipy,zaxliu/scipy,zaxliu/scipy,newemailjdm/scipy,gertingold/scipy,rmcgibbo/scipy,cpaulik/scipy,cpaulik/scipy,josephcslater/scipy,WillieMaddox/scipy,pyramania/scipy,mtrbean/scipy,mortada/scipy,jonycgn/scipy,vberaudi/scipy,woodscn/scipy,sauliusl/scipy,Eric89GXL/scipy,niknow/scipy,kleskjr/scipy,aman-iitj/scipy,Gillu13/scipy,sriki18/scipy,ilayn/scipy,mikebenfield/scipy,vanpact/scipy,newemailjdm/scipy,FRidh/scipy,ales-erjavec/scipy,mdhaber/scipy,Eric89GXL/scipy,e-q/scipy,anntzer/scipy,teoliphant/scipy,Srisai85/scipy,pyramania/scipy,arokem/scipy,ilayn/scipy,apbard/scipy,apbard/scipy,minhlongdo/scipy,matthew-brett/scipy,newemailjdm/scipy,jseabold/scipy,argriffing/scipy,mdhaber/scipy,mgaitan/scipy,ogrisel/scipy,fernand/scipy,trankmichael/scipy,fredrikw/scipy,Newman101/scipy,niknow/scipy,argriffing/scipy,endolith/scipy,WillieMaddox/scipy,jakevdp/scipy,WillieMaddox/scipy,pizzathief/scipy,lukauskas/scipy,gertingold/scipy,arokem/scipy,Srisai85/scipy,hainm/scipy,anielsen001/scipy,grlee77/scipy,sonnyhu/scipy,piyush0609/scipy,gfyoung/scipy,giorgiop/scipy,trankmichael/scipy,gfyoung/scipy,nvoron23/scipy,efiring/scipy,jsilter/scipy,nonhermitian/scipy,anntzer/scipy,jsilter/scipy,dominicelse/scipy,Dapid/scipy,pschella/scipy,e-q/scipy,dch312/scipy,matthewalbani/scipy,zerothi/scipy,jsilter/scipy,minhlongdo/scipy,trankmichael/scipy,apbard/scipy,teoliphant/scipy,felipebetancur/scipy,sauliusl/scipy,chatcannon/scipy,andim/scipy,felipebetancur/scipy,andyfaff/scipy,aman-iitj/scipy,trankmichael/scipy,anielsen001/scipy,Shaswat27/scipy,ales-erjavec/scipy,juliantaylor/scipy,njwilson23/scipy,cpaulik/scipy,mortonjt/scipy,ilayn/scipy,andim/scipy,mingwpy/scipy,vigna/scipy,futurulus/scipy,Stefan-Endres/scipy,ogrisel/scipy,pschella/scipy,aeklant/scipy,Eric89GXL/scipy,larsmans/scipy,piyush0609/scipy,andyfaff/scipy,pschella/scipy,cpaulik/scipy,Eric89GXL/scipy,haudren/scipy,rmcgibbo/scipy,giorgiop/scipy,ortylp/scipy,Gillu13/scipy,jjhelmus/scipy,lukauskas/scipy,zerothi/scipy,kalvdans/scipy,teoliphant/scipy,WarrenWeckesser/scipy,ogrisel/scipy,zaxliu/scipy,Stefan-Endres/scipy,matthewalbani/scipy,matthewalbani/scipy,maniteja123/scipy,rgommers/scipy,woodscn/scipy,ChanderG/scipy,endolith/scipy,andim/scipy,pizzathief/scipy,chatcannon/scipy,hainm/scipy,teoliphant/scipy,nonhermitian/scipy,pnedunuri/scipy,pnedunuri/scipy,mhogg/scipy,raoulbq/scipy,petebachant/scipy,argriffing/scipy,ortylp/scipy,fernand/scipy,tylerjereddy/scipy,witcxc/scipy,zxsted/scipy,josephcslater/scipy,mtrbean/scipy,FRidh/scipy,Srisai85/scipy,giorgiop/scipy,felipebetancur/scipy,petebachant/scipy,bkendzior/scipy,pschella/scipy,befelix/scipy,mdhaber/scipy,gfyoung/scipy,arokem/scipy,Gillu13/scipy,pnedunuri/scipy,rmcgibbo/scipy,maciejkula/scipy,behzadnouri/scipy,pizzathief/scipy,Dapid/scipy,mgaitan/scipy,nvoron23/scipy,behzadnouri/scipy,raoulbq/scipy,fredrikw/scipy,kleskjr/scipy,jamestwebber/scipy,vhaasteren/scipy,mdhaber/scipy,efiring/scipy,anntzer/scipy,andim/scipy,ChanderG/scipy,jonycgn/scipy,lhilt/scipy,argriffing/scipy,woodscn/scipy,felipebetancur/scipy,Gillu13/scipy,argriffing/scipy,jseabold/scipy,raoulbq/scipy,jor-/scipy,mtrbean/scipy,WarrenWeckesser/scipy,nmayorov/scipy,rgommers/scipy,chatcannon/scipy,jjhelmus/scipy,raoulbq/scipy,jonycgn/scipy,mortada/scipy,teoliphant/scipy,vhaasteren/scipy,befelix/scipy,mgaitan/scipy,FRidh/scipy,lukauskas/scipy,richardotis/scipy,dominicelse/scipy,Dapid/scipy,richardotis/scipy,pbrod/scipy,maciejkula/scipy,efiring/scipy,argriffing/scipy,Srisai85/scipy,fredrikw/scipy,pbrod/scipy,vigna/scipy,haudren/scipy,WarrenWeckesser/scipy,nvoron23/scipy,aman-iitj/scipy,sargas/scipy,pnedunuri/scipy,piyush0609/scipy,Kamp9/scipy,behzadnouri/scipy,niknow/scipy,larsmans/scipy,jor-/scipy,andyfaff/scipy,richardotis/scipy,futurulus/scipy,vberaudi/scipy,grlee77/scipy,aarchiba/scipy,tylerjereddy/scipy,pnedunuri/scipy,Newman101/scipy,aman-iitj/scipy,Eric89GXL/scipy,piyush0609/scipy,matthewalbani/scipy,tylerjereddy/scipy,gdooper/scipy,mortonjt/scipy,giorgiop/scipy,Srisai85/scipy,zerothi/scipy,ChanderG/scipy,sriki18/scipy,mortonjt/scipy,nonhermitian/scipy,fredrikw/scipy,kleskjr/scipy,mgaitan/scipy,pizzathief/scipy,Dapid/scipy,tylerjereddy/scipy,Dapid/scipy,vberaudi/scipy,anielsen001/scipy,larsmans/scipy,jakevdp/scipy,zerothi/scipy,sriki18/scipy,jakevdp/scipy,gef756/scipy,hainm/scipy,jseabold/scipy,mortada/scipy,endolith/scipy,Kamp9/scipy,petebachant/scipy,scipy/scipy,Stefan-Endres/scipy,futurulus/scipy,juliantaylor/scipy,sonnyhu/scipy,witcxc/scipy,zerothi/scipy,vigna/scipy,vberaudi/scipy,zaxliu/scipy,trankmichael/scipy,jseabold/scipy,woodscn/scipy,pbrod/scipy,maciejkula/scipy,scipy/scipy,surhudm/scipy,WarrenWeckesser/scipy,sauliusl/scipy,lhilt/scipy,woodscn/scipy,sargas/scipy,sonnyhu/scipy,minhlongdo/scipy,bkendzior/scipy,sauliusl/scipy,pyramania/scipy,ndchorley/scipy,vhaasteren/scipy,lukauskas/scipy,nmayorov/scipy,pyramania/scipy | ---
+++
@@ -1,10 +1,10 @@
-# Last Change: Sat Jun 09 07:00 PM 2007 J
+# Last Change: Sat Jun 09 08:00 PM 2007 J
#========================================================
# Constants used throughout the module (def args, etc...)
#========================================================
# This is the default dimension for representing confidence ellipses
-DEF_VIS_DIM = [0, 1]
+DEF_VIS_DIM = (0, 1)
DEF_ELL_NP = 100
DEF_LEVEL = 0.39
#===================================================================== |
9948b5a2930cd2e6f13383bd969a33f6fc655936 | example/dlf_app/models.py | example/dlf_app/models.py | from django.db import models
from location_field.models.plain import PlainLocationField
class Place(models.Model):
parent_place = models.ForeignKey('self', null=True, blank=True)
city = models.CharField(max_length=255)
location = PlainLocationField(based_fields=[city], zoom=7)
| from django.db import models
from location_field.models.plain import PlainLocationField
class Place(models.Model):
parent_place = models.ForeignKey('self', null=True, blank=True)
city = models.CharField(max_length=255)
location = PlainLocationField(based_fields=['city'], zoom=7)
| Use field name instead of field instance | Use field name instead of field instance
| Python | mit | caioariede/django-location-field,voodmania/django-location-field,voodmania/django-location-field,caioariede/django-location-field,voodmania/django-location-field,caioariede/django-location-field | ---
+++
@@ -5,4 +5,4 @@
class Place(models.Model):
parent_place = models.ForeignKey('self', null=True, blank=True)
city = models.CharField(max_length=255)
- location = PlainLocationField(based_fields=[city], zoom=7)
+ location = PlainLocationField(based_fields=['city'], zoom=7) |
c4eef5919fa60c87b59d60c1bd005f97183ce057 | aiozk/test/test_connection.py | aiozk/test/test_connection.py | from unittest import mock
import pytest
import aiozk.connection
@pytest.fixture
def connection(event_loop):
connection = aiozk.connection.Connection(
host='zookeeper.test',
port=2181,
watch_handler=mock.MagicMock(),
read_timeout=30,
loop=mock.MagicMock(wraps=event_loop))
connection.writer = mock.MagicMock()
return connection
@pytest.mark.asyncio
async def test_close_connection_in_state_closing_do_not_performs_abort(connection):
connection.abort = mock.AsyncMock()
connection.closing = True
await connection.close(mock.ANY)
connection.abort.assert_not_awaited()
@pytest.mark.asyncio
async def test_close_cancels_read_loop_task(connection):
connection.start_read_loop()
connection.read_response = mock.AsyncMock(return_value=(0, mock.ANY, mock.ANY))
task_cancelled_future = connection.loop.create_future()
def set_result(task):
task_cancelled_future.set_result(task.cancelled())
connection.read_loop_task.add_done_callback(set_result)
await connection.close(mock.ANY)
assert await task_cancelled_future
| from unittest import mock
import pytest
import aiozk.connection
@pytest.fixture
def connection(event_loop):
connection = aiozk.connection.Connection(
host='zookeeper.test',
port=2181,
watch_handler=mock.MagicMock(),
read_timeout=30,
loop=event_loop)
connection.writer = mock.MagicMock()
return connection
@pytest.mark.asyncio
async def test_close_connection_in_state_closing_do_not_performs_abort(connection):
connection.abort = mock.AsyncMock()
connection.closing = True
await connection.close(0.1)
connection.abort.assert_not_awaited()
@pytest.mark.asyncio
async def test_close_cancels_read_loop_task(connection):
connection.read_loop_task = connection.loop.create_future()
connection.read_loop_task.done = mock.MagicMock(return_value=False)
connection.read_loop_task.cancel = mock.MagicMock(
wraps=connection.read_loop_task.cancel)
await connection.close(0.1)
connection.read_loop_task.cancel.assert_called_once()
@pytest.mark.asyncio
async def test_connection_abort(connection):
connection.pending_count = mock.MagicMock(return_value=1)
connection.abort = mock.MagicMock()
await connection.close(0.1)
connection.abort.assert_called_once()
| Modify and add tests for the revised connection.close | Modify and add tests for the revised connection.close
| Python | mit | tipsi/aiozk,tipsi/aiozk | ---
+++
@@ -12,7 +12,7 @@
port=2181,
watch_handler=mock.MagicMock(),
read_timeout=30,
- loop=mock.MagicMock(wraps=event_loop))
+ loop=event_loop)
connection.writer = mock.MagicMock()
return connection
@@ -23,22 +23,24 @@
connection.abort = mock.AsyncMock()
connection.closing = True
- await connection.close(mock.ANY)
+ await connection.close(0.1)
connection.abort.assert_not_awaited()
@pytest.mark.asyncio
async def test_close_cancels_read_loop_task(connection):
- connection.start_read_loop()
- connection.read_response = mock.AsyncMock(return_value=(0, mock.ANY, mock.ANY))
+ connection.read_loop_task = connection.loop.create_future()
+ connection.read_loop_task.done = mock.MagicMock(return_value=False)
+ connection.read_loop_task.cancel = mock.MagicMock(
+ wraps=connection.read_loop_task.cancel)
+ await connection.close(0.1)
+ connection.read_loop_task.cancel.assert_called_once()
- task_cancelled_future = connection.loop.create_future()
- def set_result(task):
- task_cancelled_future.set_result(task.cancelled())
-
- connection.read_loop_task.add_done_callback(set_result)
-
- await connection.close(mock.ANY)
- assert await task_cancelled_future
+@pytest.mark.asyncio
+async def test_connection_abort(connection):
+ connection.pending_count = mock.MagicMock(return_value=1)
+ connection.abort = mock.MagicMock()
+ await connection.close(0.1)
+ connection.abort.assert_called_once() |
23837afe465eb88ec4cb7d5bfcbc99970c417aa7 | cmsocial/db/__init__.py | cmsocial/db/__init__.py | # -*- coding: utf-8 -*-
def init_db():
from cms.db import Base
from .socialtask import SocialTask
from .socialuser import SocialUser
# Issue CREATE queries
Base.metadata.create_all()
# FIXME: The following is here just to avoid a circular dependency in socialuser.py
from cmsocial.db.socialtask import TaskTag
from cmsocial.db.socialuser import SocialUser
from sqlalchemy.orm import relationship
SocialUser.tasktags = relationship("TaskTag")
| # -*- coding: utf-8 -*-
def init_db():
from cms.db import Base
from .socialtask import SocialTask
from .socialuser import SocialUser
from .test import Test
# Issue CREATE queries
Base.metadata.create_all()
# FIXME: The following is here just to avoid a circular dependency in socialuser.py
from cmsocial.db.socialtask import TaskTag
from cmsocial.db.socialuser import SocialUser
from sqlalchemy.orm import relationship
SocialUser.tasktags = relationship("TaskTag")
| Create table "tests" as well | Create table "tests" as well
| Python | agpl-3.0 | elsantodel90/oia-juez,algorithm-ninja/cmsocial,elsantodel90/oia-juez,algorithm-ninja/cmsocial,elsantodel90/oia-juez,algorithm-ninja/cmsocial,algorithm-ninja/cmsocial,algorithm-ninja/cmsocial,elsantodel90/oia-juez | ---
+++
@@ -4,6 +4,7 @@
from cms.db import Base
from .socialtask import SocialTask
from .socialuser import SocialUser
+ from .test import Test
# Issue CREATE queries
Base.metadata.create_all() |
72ed64fad2d03ba97f12d5ad4802bcb956a1f29b | temba/chatbase/tasks.py | temba/chatbase/tasks.py | from __future__ import print_function, unicode_literals
import logging
from celery.task import task
from temba.orgs.models import Org
from .models import Chatbase
logger = logging.getLogger(__name__)
@task(track_started=True, name='send_chatbase_event')
def send_chatbase_event(org, channel, msg, contact):
try:
org = Org.objects.get(id=org)
if org.is_connected_to_chatbase():
chatbase_args = dict(org=org.id,
channel=channel,
msg=msg,
contact=contact)
chatbase = Chatbase.create(**chatbase_args)
chatbase.trigger_chatbase_event()
except Exception as e:
logger.error("Error for chatbase event: %s" % e.args, exc_info=True)
| from __future__ import print_function, unicode_literals
import logging
from celery.task import task
from temba.orgs.models import Org
from .models import Chatbase
logger = logging.getLogger(__name__)
@task(track_started=True, name='send_chatbase_event')
def send_chatbase_event(org, channel, msg, contact):
try:
org = Org.objects.get(id=org)
if org.is_connected_to_chatbase():
chatbase_args = dict(org=org.id, channel=channel, msg=msg, contact=contact)
chatbase = Chatbase.create(**chatbase_args)
chatbase.trigger_chatbase_event()
except Exception as e:
logger.error("Error for chatbase event: %s" % e.args, exc_info=True)
| Change dict declaration to inline | Change dict declaration to inline
| Python | agpl-3.0 | pulilab/rapidpro,pulilab/rapidpro,pulilab/rapidpro,pulilab/rapidpro,pulilab/rapidpro | ---
+++
@@ -15,10 +15,7 @@
try:
org = Org.objects.get(id=org)
if org.is_connected_to_chatbase():
- chatbase_args = dict(org=org.id,
- channel=channel,
- msg=msg,
- contact=contact)
+ chatbase_args = dict(org=org.id, channel=channel, msg=msg, contact=contact)
chatbase = Chatbase.create(**chatbase_args)
chatbase.trigger_chatbase_event()
except Exception as e: |
6a58c7f0eb1b92ec12d0e48d7fd3f2586de20755 | sal/management/commands/update_admin_user.py | sal/management/commands/update_admin_user.py | '''
Creates an admin user if there aren't any existing superusers
'''
from django.core.management.base import BaseCommand, CommandError
from django.contrib.auth.models import User
from optparse import make_option
class Command(BaseCommand):
help = 'Creates/Updates an Admin user'
def add_arguments(self, parser):
parser.add_argument('--username',
action='store',
dest='username',
default=None,
help='Admin username')
parser.add_argument('--password',
action='store',
dest='password',
default=None,
help='Admin password')
def handle(self, *args, **options):
username = options.get('username')
password = options.get('password')
if not username or not password:
raise StandardError('You must specify a username and password')
# Get the current superusers
su_count = User.objects.filter(is_superuser=True).count()
if su_count == 0:
# there aren't any superusers, create one
user, created = User.objects.get_or_create(username=username)
user.set_password(password)
user.is_staff = True
user.is_superuser = True
user.save()
print('{0} updated'.format(username))
else:
print('There are already {0} superusers'.format(su_count))
| """Creates an admin user if there aren't any existing superusers."""
from optparse import make_option
from django.contrib.auth.models import User
from django.core.management.base import BaseCommand, CommandError
class Command(BaseCommand):
help = 'Creates/Updates an Admin user'
def add_arguments(self, parser):
parser.add_argument('--username',
action='store',
dest='username',
default=None,
help='Admin username')
parser.add_argument('--password',
action='store',
dest='password',
default=None,
help='Admin password')
def handle(self, *args, **options):
username = options.get('username')
password = options.get('password')
if not username or not password:
raise CommandError('You must specify a username and password')
# Get the current superusers
su_count = User.objects.filter(is_superuser=True).count()
if su_count == 0:
# there aren't any superusers, create one
user, created = User.objects.get_or_create(username=username)
user.set_password(password)
user.is_staff = True
user.is_superuser = True
user.save()
print(f'{username} updated')
else:
print(f'There are already {su_count} superusers')
| Fix exception handling in management command. Clean up. | Fix exception handling in management command. Clean up.
| Python | apache-2.0 | salopensource/sal,sheagcraig/sal,sheagcraig/sal,sheagcraig/sal,sheagcraig/sal,salopensource/sal,salopensource/sal,salopensource/sal | ---
+++
@@ -1,10 +1,10 @@
-'''
-Creates an admin user if there aren't any existing superusers
-'''
+"""Creates an admin user if there aren't any existing superusers."""
+
+from optparse import make_option
+
+from django.contrib.auth.models import User
from django.core.management.base import BaseCommand, CommandError
-from django.contrib.auth.models import User
-from optparse import make_option
class Command(BaseCommand):
@@ -27,7 +27,7 @@
username = options.get('username')
password = options.get('password')
if not username or not password:
- raise StandardError('You must specify a username and password')
+ raise CommandError('You must specify a username and password')
# Get the current superusers
su_count = User.objects.filter(is_superuser=True).count()
if su_count == 0:
@@ -37,6 +37,6 @@
user.is_staff = True
user.is_superuser = True
user.save()
- print('{0} updated'.format(username))
+ print(f'{username} updated')
else:
- print('There are already {0} superusers'.format(su_count))
+ print(f'There are already {su_count} superusers') |
0fc6394e156246367bdee26b03ca94bfebb21545 | examples/django_demo/generic_foreignkey/models.py | examples/django_demo/generic_foreignkey/models.py | from __future__ import unicode_literals
from django.db import models
from django.contrib.contenttypes.models import ContentType
from django.contrib.contenttypes.fields import GenericForeignKey
class TaggedItem(models.Model):
"""Example GenericForeinKey model from django docs"""
tag = models.SlugField()
content_type = models.ForeignKey(ContentType, on_delete=models.CASCADE)
object_id = models.PositiveIntegerField()
content_object = GenericForeignKey('content_type', 'object_id')
def __str__(self): # __unicode__ on Python 2
return self.tag
| from __future__ import unicode_literals
from django.db import models
from django.contrib.contenttypes.models import ContentType
from django.contrib.contenttypes.fields import GenericForeignKey
class TaggedItem(models.Model):
"""Example GenericForeignKey model from django docs"""
tag = models.SlugField()
content_type = models.ForeignKey(ContentType, on_delete=models.CASCADE)
object_id = models.PositiveIntegerField()
content_object = GenericForeignKey('content_type', 'object_id')
def __str__(self): # __unicode__ on Python 2
return self.tag
| Fix GenericForeignKey typo in examples | Fix GenericForeignKey typo in examples | Python | mit | FactoryBoy/factory_boy | ---
+++
@@ -6,7 +6,7 @@
class TaggedItem(models.Model):
- """Example GenericForeinKey model from django docs"""
+ """Example GenericForeignKey model from django docs"""
tag = models.SlugField()
content_type = models.ForeignKey(ContentType, on_delete=models.CASCADE)
object_id = models.PositiveIntegerField() |
8a51446ee8833c3472d9f97cc29a58dd42d872b8 | core/tests/test_utils.py | core/tests/test_utils.py | from django.test import TestCase
from core import utils
class SlugifyOC(TestCase):
def test_oc_slugify(self):
lst = (
('test.this.value', 'test-this-value'),
('Plone.OpenComparison', 'plone-opencomparison'),
('Run from here', 'run-from-here'),
('Jump_the shark', 'jump_the-shark'),
)
for l in lst:
self.assertEquals(utils.oc_slugify(l[0]), l[1])
class GetPypiUrl(TestCase):
def test_get_pypi_url_success(self):
lst = (
('django', 'http://pypi.python.org/pypi/django'),
('Django Uni Form', 'http://pypi.python.org/pypi/django-uni-form'),
)
for l in lst:
self.assertEquals(utils.get_pypi_url(l[0]), l[1])
def test_get_pypi_url_fail(self):
lst = (
'ColdFusion is not here',
'php is not here'
)
for l in lst:
self.assertEquals(utils.get_pypi_url(l), None)
| from django.test import TestCase
from core import utils
class SlugifyOC(TestCase):
def test_oc_slugify(self):
lst = (
('test.this.value', 'test-this-value'),
('Plone.OpenComparison', 'plone-opencomparison'),
('Run from here', 'run-from-here'),
('Jump_the shark', 'jump_the-shark'),
)
for l in lst:
self.assertEquals(utils.oc_slugify(l[0]), l[1])
class GetPypiUrl(TestCase):
def test_get_pypi_url_success(self):
lst = (
('django', 'http://pypi.python.org/pypi/django'),
('Django Uni Form', 'http://pypi.python.org/pypi/django-uni-form'),
)
for l in lst:
self.assertEquals(utils.get_pypi_url(l[0].lower()), l[1].lower())
def test_get_pypi_url_fail(self):
lst = (
'ColdFusion is not here',
'php is not here'
)
for l in lst:
self.assertEquals(utils.get_pypi_url(l), None)
| Fix to account for casing diffs between Mac OS X and Linux | Fix to account for casing diffs between Mac OS X and Linux
| Python | mit | QLGu/djangopackages,nanuxbe/djangopackages,nanuxbe/djangopackages,nanuxbe/djangopackages,QLGu/djangopackages,pydanny/djangopackages,pydanny/djangopackages,QLGu/djangopackages,pydanny/djangopackages | ---
+++
@@ -27,7 +27,7 @@
('Django Uni Form', 'http://pypi.python.org/pypi/django-uni-form'),
)
for l in lst:
- self.assertEquals(utils.get_pypi_url(l[0]), l[1])
+ self.assertEquals(utils.get_pypi_url(l[0].lower()), l[1].lower())
def test_get_pypi_url_fail(self):
|
c35957b7219f572e80a550893150e8041c5f14b2 | create_ands_rif_cs_xml.py | create_ands_rif_cs_xml.py | """
Create an ANDS RIF-CS XML file.
Links
-----
- http://ands.org.au/guides/cpguide/cpgrifcs.html
- http://services.ands.org.au/documentation/rifcs/guidelines/rif-cs.html
- http://www.ands.org.au/resource/rif-cs.html
"""
import logging
import os
from settings import (
ANDS_XML_FILE_NAME, ANDS_XML_FOLDER_PATH, ANDS_XML_START, ANDS_XML_STOP
)
logger = logging.getLogger(__name__)
def main():
with open(ANDS_XML_FILE_NAME, 'w') as w:
w.write(ANDS_XML_START)
for file_path in os.listdir(ANDS_XML_FOLDER_PATH):
with open(file_path) as r:
w.write(r.read())
w.write(ANDS_XML_STOP)
if '__main__' == __name__:
logging.basicConfig(level=logging.DEBUG)
main()
| """
Create an ANDS RIF-CS XML file.
Links
-----
- http://ands.org.au/guides/cpguide/cpgrifcs.html
- http://ands.org.au/resource/rif-cs.html
- http://services.ands.org.au/documentation/rifcs/guidelines/rif-cs.html
"""
import logging
import os
from settings import (
ANDS_XML_FILE_NAME, ANDS_XML_FOLDER_PATH, ANDS_XML_START, ANDS_XML_STOP
)
logger = logging.getLogger(__name__)
def main():
with open(ANDS_XML_FILE_NAME, 'w') as w:
w.write(ANDS_XML_START)
for file_path in os.listdir(ANDS_XML_FOLDER_PATH):
with open(file_path) as r:
w.write(r.read())
w.write(ANDS_XML_STOP)
if '__main__' == __name__:
logging.basicConfig(level=logging.DEBUG)
main()
| Fix links in ANDS RIF-CS script | Fix links in ANDS RIF-CS script
| Python | mit | AustralianAntarcticDataCentre/metadata_xml_convert,AustralianAntarcticDataCentre/metadata_xml_convert | ---
+++
@@ -1,14 +1,15 @@
"""
Create an ANDS RIF-CS XML file.
+
Links
-----
- http://ands.org.au/guides/cpguide/cpgrifcs.html
+- http://ands.org.au/resource/rif-cs.html
+
- http://services.ands.org.au/documentation/rifcs/guidelines/rif-cs.html
-
-- http://www.ands.org.au/resource/rif-cs.html
"""
import logging |
4184b9b87a5b2684df47bcf6bb19703ae381ec55 | modules/module_urlsize.py | modules/module_urlsize.py | """Warns about large files"""
def handle_url(bot, user, channel, url):
if channel == "#wow": return
# inform about large files (over 5MB)
size = getUrl(url).getSize()
if not size: return
size = size / 1024
if size > 5:
bot.say(channel, "File size: %s MB" % size)
| """Warns about large files"""
def handle_url(bot, user, channel, url, msg):
if channel == "#wow": return
# inform about large files (over 5MB)
size = getUrl(url).getSize()
if not size: return
size = size / 1024
if size > 5:
bot.say(channel, "File size: %s MB" % size)
| Update to the latest method signature | Update to the latest method signature
git-svn-id: 056f9092885898c4775d98c479d2d33d00273e45@65 dda364a1-ef19-0410-af65-756c83048fb2
| Python | bsd-3-clause | aapa/pyfibot,huqa/pyfibot,rnyberg/pyfibot,EArmour/pyfibot,nigeljonez/newpyfibot,rnyberg/pyfibot,aapa/pyfibot,lepinkainen/pyfibot,EArmour/pyfibot,huqa/pyfibot,lepinkainen/pyfibot | ---
+++
@@ -1,6 +1,6 @@
"""Warns about large files"""
-def handle_url(bot, user, channel, url):
+def handle_url(bot, user, channel, url, msg):
if channel == "#wow": return
|
d1e9586fbbadd8278d1d4023490df3348915b217 | migrations/versions/0082_set_international.py | migrations/versions/0082_set_international.py | """empty message
Revision ID: 0082_set_international
Revises: 0080_fix_rate_start_date
Create Date: 2017-05-05 15:26:34.621670
"""
# revision identifiers, used by Alembic.
from datetime import datetime
revision = '0082_set_international'
down_revision = '0080_fix_rate_start_date'
from alembic import op
import sqlalchemy as sa
def upgrade():
conn = op.get_bind()
start = datetime.utcnow()
all_notifications = "select id from notification_history where international is null limit 10000"
results = conn.execute(all_notifications)
res = results.fetchall()
conn.execute("update notifications set international = False where id in ({})".format(all_notifications))
conn.execute("update notification_history set international = False where id in ({})".format(all_notifications))
while len(res) > 0:
conn.execute("update notifications set international = False where id in ({})".format(all_notifications))
conn.execute("update notification_history set international = False where id in ({})".format(all_notifications))
results = conn.execute(all_notifications)
res = results.fetchall()
end = datetime.utcnow()
print("Started at: {} ended at: {}".format(start, end))
def downgrade():
# There is no way to downgrade this update.
pass | """empty message
Revision ID: 0082_set_international
Revises: 0081_noti_status_as_enum
Create Date: 2017-05-05 15:26:34.621670
"""
from datetime import datetime
from alembic import op
# revision identifiers, used by Alembic.
revision = '0082_set_international'
down_revision = '0081_noti_status_as_enum'
def upgrade():
conn = op.get_bind()
start = datetime.utcnow()
notification_history = "select id from notification_history where international is null limit 10000"
results = conn.execute(notification_history)
res = results.fetchall()
while len(res) > 0:
conn.execute("update notification_history set international = False where id in ({})".format(
notification_history))
results = conn.execute(notification_history)
res = results.fetchall()
notifications = "select id from notifications where international is null limit 10000"
results2 = conn.execute(notifications)
res2 = results2.fetchall()
while len(res2) > 0:
conn.execute("update notifications set international = False where id in ({})".format(notifications))
results2 = conn.execute(notifications)
res2 = results2.fetchall()
end = datetime.utcnow()
print("Started at: {} ended at: {}".format(start, end))
def downgrade():
# There is no way to downgrade this update.
pass
| Update the script to set the international flag to do the notifications and notification_history in separate loops. It takes about 1.5 minutes to update 27,000 notifications and 27,000 notification_history. The update is a row level lock so will only affect updates to the same row. This is unlikely as the data being updated should be older than 3 days. The second scripts updates the table to set international as not null, to make the model. | Update the script to set the international flag to do the notifications and notification_history in separate loops.
It takes about 1.5 minutes to update 27,000 notifications and 27,000 notification_history. The update is a row level lock so will only affect updates to the same row.
This is unlikely as the data being updated should be older than 3 days.
The second scripts updates the table to set international as not null, to make the model.
| Python | mit | alphagov/notifications-api,alphagov/notifications-api | ---
+++
@@ -1,39 +1,44 @@
"""empty message
Revision ID: 0082_set_international
-Revises: 0080_fix_rate_start_date
+Revises: 0081_noti_status_as_enum
Create Date: 2017-05-05 15:26:34.621670
"""
+from datetime import datetime
+from alembic import op
# revision identifiers, used by Alembic.
-from datetime import datetime
-
revision = '0082_set_international'
-down_revision = '0080_fix_rate_start_date'
-
-from alembic import op
-import sqlalchemy as sa
+down_revision = '0081_noti_status_as_enum'
def upgrade():
conn = op.get_bind()
start = datetime.utcnow()
- all_notifications = "select id from notification_history where international is null limit 10000"
+ notification_history = "select id from notification_history where international is null limit 10000"
- results = conn.execute(all_notifications)
+ results = conn.execute(notification_history)
res = results.fetchall()
- conn.execute("update notifications set international = False where id in ({})".format(all_notifications))
- conn.execute("update notification_history set international = False where id in ({})".format(all_notifications))
+ while len(res) > 0:
+ conn.execute("update notification_history set international = False where id in ({})".format(
+ notification_history))
+ results = conn.execute(notification_history)
+ res = results.fetchall()
- while len(res) > 0:
- conn.execute("update notifications set international = False where id in ({})".format(all_notifications))
- conn.execute("update notification_history set international = False where id in ({})".format(all_notifications))
- results = conn.execute(all_notifications)
- res = results.fetchall()
+ notifications = "select id from notifications where international is null limit 10000"
+ results2 = conn.execute(notifications)
+ res2 = results2.fetchall()
+ while len(res2) > 0:
+ conn.execute("update notifications set international = False where id in ({})".format(notifications))
+
+ results2 = conn.execute(notifications)
+ res2 = results2.fetchall()
+
end = datetime.utcnow()
print("Started at: {} ended at: {}".format(start, end))
+
def downgrade():
# There is no way to downgrade this update. |
b6eb5e5ed4c12bea6239a58f76b7c944258c32b5 | paypal/payflow/codes.py | paypal/payflow/codes.py | # Make strings collectable with gettext tools, but don't trnslate them here:
_ = lambda x: x
# Transaction types (TRXTYPE)...
SALE, CREDIT, AUTHORIZATION, DELAYED_CAPTURE, VOID, DUPLICATE_TRANSACTION = (
'S', 'C', 'A', 'D', 'V', 'N')
# ...for humans
trxtype_map = {
SALE: _('Sale'),
AUTHORIZATION: _('Authorize'),
CREDIT: _('Credit'),
DELAYED_CAPTURE: _('Delayed capture'),
VOID: _('Void'),
DUPLICATE_TRANSACTION: _('Duplicate transaction'),
}
# Payment methods (TENDER)
BANKCARD, PAYPAL = 'C', 'P'
tender_map = {
BANKCARD: 'Bankcard',
PAYPAL: 'PayPal'
}
| # Make strings collectable with gettext tools, but don't translate them here:
_ = lambda x: x
# Transaction types (TRXTYPE)...
SALE, CREDIT, AUTHORIZATION, DELAYED_CAPTURE, VOID, DUPLICATE_TRANSACTION = (
'S', 'C', 'A', 'D', 'V', 'N')
# ...for humans
trxtype_map = {
SALE: _('Sale'),
AUTHORIZATION: _('Authorize'),
CREDIT: _('Credit'),
DELAYED_CAPTURE: _('Delayed capture'),
VOID: _('Void'),
DUPLICATE_TRANSACTION: _('Duplicate transaction'),
}
# Payment methods (TENDER)
BANKCARD, PAYPAL = 'C', 'P'
tender_map = {
BANKCARD: 'Bankcard',
PAYPAL: 'PayPal'
}
| Fix simple typo, trnslate -> translate | docs: Fix simple typo, trnslate -> translate
There is a small typo in paypal/payflow/codes.py.
Should read `translate` rather than `trnslate`.
| Python | bsd-3-clause | django-oscar/django-oscar-paypal,evonove/django-oscar-paypal,lpakula/django-oscar-paypal,lpakula/django-oscar-paypal,st8st8/django-oscar-paypal,evonove/django-oscar-paypal,lpakula/django-oscar-paypal,django-oscar/django-oscar-paypal,evonove/django-oscar-paypal,django-oscar/django-oscar-paypal,st8st8/django-oscar-paypal,st8st8/django-oscar-paypal | ---
+++
@@ -1,4 +1,4 @@
-# Make strings collectable with gettext tools, but don't trnslate them here:
+# Make strings collectable with gettext tools, but don't translate them here:
_ = lambda x: x
# Transaction types (TRXTYPE)... |
a121b79cd9260f17e85f3a611a47bb913170b353 | scripts/poweron/DRAC.py | scripts/poweron/DRAC.py | import subprocess, sys, os.path
class DRAC_NO_SUPP_PACK(Exception):
"""Base Exception class for all transfer plugin errors."""
def __init__(self, *args):
Exception.__init__(self, *args)
class DRAC_POWERON_FAILED(Exception):
"""Base Exception class for all transfer plugin errors."""
def __init__(self, *args):
Exception.__init__(self, *args)
def run2(command):
run = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
# Wait for the process to return
out, err = [ e.splitlines() for e in run.communicate() ]
return run.returncode, out, err
drac_path='/usr/sbin/racadm'
def DRAC( power_on_ip, user, password):
if( not os.path.exists(drac_path)):
raise DRAC_NO_SUPP_PACK()
cmd='%s -r %s -u %s -p %s serveraction powerup' % (drac_path, power_on_ip, user, password)
retcode,out,err=run2(cmd)
if(len(err)==0):
return str(True)
else:
raise DRAC_POWERON_FAILED()
def main():
if len(sys.argv)<3:
exit(0)
ip=sys.argv[1]
user=sys.argv[2]
password=sys.argv[3]
print DRAC(ip,user,password)
if __name__ == "__main__":
main() | import subprocess, sys, os.path
class DRAC_NO_SUPP_PACK(Exception):
"""Base Exception class for all transfer plugin errors."""
def __init__(self, *args):
Exception.__init__(self, *args)
class DRAC_POWERON_FAILED(Exception):
"""Base Exception class for all transfer plugin errors."""
def __init__(self, *args):
Exception.__init__(self, *args)
def run2(command):
run = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
# Wait for the process to return
out, err = [ e.splitlines() for e in run.communicate() ]
return run.returncode, out, err
drac_path='/opt/dell/srvadmin/sbin/racadm'
def DRAC( power_on_ip, user, password):
if( not os.path.exists(drac_path)):
raise DRAC_NO_SUPP_PACK()
cmd='%s -r %s -u %s -p %s serveraction powerup' % (drac_path, power_on_ip, user, password)
retcode,out,err=run2(cmd)
if(len(err)==0):
return str(True)
else:
raise DRAC_POWERON_FAILED()
def main():
if len(sys.argv)<3:
exit(0)
ip=sys.argv[1]
user=sys.argv[2]
password=sys.argv[3]
print DRAC(ip,user,password)
if __name__ == "__main__":
main() | Change path to the supplemental pack | CA-40618: Change path to the supplemental pack
Signed-off-by: Javier Alvarez-Valle <cf4c8668a0b4c5e013f594a6940d05b3d4d9ddcf@citrix.com>
| Python | lgpl-2.1 | djs55/xcp-networkd,sharady/xcp-networkd,johnelse/xcp-rrdd,sharady/xcp-networkd,robhoes/squeezed,djs55/xcp-rrdd,simonjbeaumont/xcp-rrdd,koushikcgit/xcp-networkd,koushikcgit/xcp-networkd,koushikcgit/xcp-rrdd,djs55/xcp-networkd,simonjbeaumont/xcp-rrdd,johnelse/xcp-rrdd,koushikcgit/xcp-rrdd,djs55/xcp-rrdd,djs55/squeezed,koushikcgit/xcp-rrdd | ---
+++
@@ -18,7 +18,7 @@
return run.returncode, out, err
-drac_path='/usr/sbin/racadm'
+drac_path='/opt/dell/srvadmin/sbin/racadm'
def DRAC( power_on_ip, user, password):
if( not os.path.exists(drac_path)):
raise DRAC_NO_SUPP_PACK() |
8e51085d9843b6b78f601ac28a2d01d2fc20cb09 | tests/test_settings.py | tests/test_settings.py | import os
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
SECRET_KEY = 'fake-key'
HASHID_FIELD_SALT = 'gg ez'
INSTALLED_APPS = [
"django.contrib.auth",
"django.contrib.contenttypes",
"django.contrib.messages",
"django.contrib.sessions",
"django.contrib.admin",
"tests",
]
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
MIDDLEWARE = [
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
]
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
| import os
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
SECRET_KEY = 'fake-key'
HASHID_FIELD_SALT = 'gg ez'
INSTALLED_APPS = [
"django.contrib.auth",
"django.contrib.contenttypes",
"django.contrib.messages",
"django.contrib.sessions",
"django.contrib.admin",
"tests",
]
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
MIDDLEWARE = [
'django.contrib.sessions.middleware.SessionMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
]
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
| Fix tests for django 2.2 | Fix tests for django 2.2
| Python | mit | nshafer/django-hashid-field,nshafer/django-hashid-field | ---
+++
@@ -20,6 +20,7 @@
}
MIDDLEWARE = [
+ 'django.contrib.sessions.middleware.SessionMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
] |
b414d74a639151785ef02a9d390e1398b7167886 | app/forms/simple_form.py | app/forms/simple_form.py | from flask_wtf import Form
from wtforms import StringField, SubmitField
from wtforms.validators import Required
class SimpleForm(Form):
name = StringField('Your name', validators=[Required()])
submit = SubmitField('Submit')
| from flask_wtf import FlaskForm
from wtforms import StringField, SubmitField
from wtforms.validators import Required
class SimpleForm(FlaskForm):
name = StringField('Your name', validators=[Required()])
submit = SubmitField('Submit')
| Change Form to FlaskForm (prevent deprecated) | Change Form to FlaskForm (prevent deprecated)
| Python | mit | rustyworks/flask-structure,rustyworks/flask-structure | ---
+++
@@ -1,8 +1,8 @@
-from flask_wtf import Form
+from flask_wtf import FlaskForm
from wtforms import StringField, SubmitField
from wtforms.validators import Required
-class SimpleForm(Form):
+class SimpleForm(FlaskForm):
name = StringField('Your name', validators=[Required()])
submit = SubmitField('Submit') |
4e8a9b2520642e3f2204ee3da59a153b61a95160 | polyaxon_client/transport/socket_transport.py | polyaxon_client/transport/socket_transport.py | # -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function
import json
import websocket
from polyaxon_client.logger import logger
class SocketTransportMixin(object):
"""Socket operations transport."""
def socket(self, url, message_handler, headers=None):
webs = websocket.WebSocketApp(
url,
on_message=lambda ws, message: self._on_message(message_handler, message),
on_error=self._on_error,
on_close=self._on_close,
header=self._get_headers(headers)
)
return webs
def stream(self, url, message_handler, headers=None):
webs = self.socket(url=url, message_handler=message_handler, headers=headers)
webs.run_forever(ping_interval=30, ping_timeout=10)
def _on_message(self, message_handler, message):
if message_handler and message:
message_handler(json.loads(message))
@staticmethod
def _on_error(ws, error):
if isinstance(error, (KeyboardInterrupt, SystemExit)):
logger.info('Quitting... The session will be running in the background.')
else:
logger.debug('Termination cause: %s', error)
logger.debug('Session disconnected.')
@staticmethod
def _on_close(ws):
logger.info('Session ended')
| # -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function
import json
import threading
import websocket
from polyaxon_client.logger import logger
from polyaxon_client.workers.socket_worker import SocketWorker
class SocketTransportMixin(object):
"""Socket operations transport."""
def socket(self, url, message_handler, headers=None):
webs = websocket.WebSocketApp(
url,
on_message=lambda ws, message: self._on_message(message_handler, message),
on_error=self._on_error,
on_close=self._on_close,
header=self._get_headers(headers)
)
return webs
def stream(self, url, message_handler, headers=None):
webs = self.socket(url=url, message_handler=message_handler, headers=headers)
webs.run_forever(ping_interval=30, ping_timeout=10)
def _on_message(self, message_handler, message):
if message_handler and message:
message_handler(json.loads(message).decode('utf-8'))
@staticmethod
def _on_error(ws, error):
if isinstance(error, (KeyboardInterrupt, SystemExit)):
logger.info('Quitting... The session will be running in the background.')
else:
logger.debug('Termination cause: %s', error)
logger.debug('Session disconnected.')
@staticmethod
def _on_close(ws):
logger.info('Session ended')
| Add utf decode for reading data | Add utf decode for reading data
| Python | apache-2.0 | polyaxon/polyaxon,polyaxon/polyaxon,polyaxon/polyaxon | ---
+++
@@ -2,9 +2,12 @@
from __future__ import absolute_import, division, print_function
import json
+import threading
+
import websocket
from polyaxon_client.logger import logger
+from polyaxon_client.workers.socket_worker import SocketWorker
class SocketTransportMixin(object):
@@ -25,7 +28,7 @@
def _on_message(self, message_handler, message):
if message_handler and message:
- message_handler(json.loads(message))
+ message_handler(json.loads(message).decode('utf-8'))
@staticmethod
def _on_error(ws, error): |
029edcfe1769dd65fa2fac566abb5686c5986890 | backdrop/core/records.py | backdrop/core/records.py | import datetime
class Record(object):
def __init__(self, data):
self.data = data
self.meta = {}
if "_timestamp" in self.data:
days_since_week_start = datetime.timedelta(
days=self.data['_timestamp'].weekday())
week_start = self.data['_timestamp'] - days_since_week_start
self.meta['_week_start_at'] = week_start.replace(
hour=0, minute=0, second=0, microsecond=0)
def to_mongo(self):
return dict(
self.data.items() + self.meta.items()
)
def __eq__(self, other):
if not isinstance(other, Record):
return False
if self.data != other.data:
return False
if self.meta != other.meta:
return False
return True
| import datetime
class Record(object):
def __init__(self, data):
self.data = data
self.meta = {}
if "_timestamp" in self.data:
day_of_week = self.data['_timestamp'].weekday()
delta_from_week_start = datetime.timedelta(days=day_of_week)
week_start = self.data['_timestamp'] - delta_from_week_start
self.meta['_week_start_at'] = week_start.replace(
hour=0,
minute=0,
second=0,
microsecond=0
)
def to_mongo(self):
return dict(
self.data.items() + self.meta.items()
)
def __eq__(self, other):
if not isinstance(other, Record):
return False
if self.data != other.data:
return False
if self.meta != other.meta:
return False
return True
| Refactor for clarity around _week_start_at | Refactor for clarity around _week_start_at
| Python | mit | alphagov/backdrop,alphagov/backdrop,alphagov/backdrop | ---
+++
@@ -6,12 +6,17 @@
def __init__(self, data):
self.data = data
self.meta = {}
+
if "_timestamp" in self.data:
- days_since_week_start = datetime.timedelta(
- days=self.data['_timestamp'].weekday())
- week_start = self.data['_timestamp'] - days_since_week_start
+ day_of_week = self.data['_timestamp'].weekday()
+ delta_from_week_start = datetime.timedelta(days=day_of_week)
+ week_start = self.data['_timestamp'] - delta_from_week_start
self.meta['_week_start_at'] = week_start.replace(
- hour=0, minute=0, second=0, microsecond=0)
+ hour=0,
+ minute=0,
+ second=0,
+ microsecond=0
+ )
def to_mongo(self):
return dict( |
e4841c674545892dfc6a8390574cec7c2836e004 | main.py | main.py | from SimpleCV import *
winsize = (640,480)
display = Display(winsize)
video = VirtualCamera('stefan_eye.mp4', 'video')
while display.isNotDone():
a = video.getImage()
a.rotate(90).invert().toGray().binarize().save(display)
| from SimpleCV import *
winsize = (640,480)
display = Display(winsize)
video = VirtualCamera('stefan_eye.mp4', 'video')
while display.isNotDone():
image = video.getImage().rotate(90).crop(850,50,400,400)
image2 = image.colorDistance(Color.RED)
blobs = image2.findBlobs()
image3 = image2.grayscale()
if blobs:
for b in blobs:
if b.isCircle(0.7) and b.radius() > 3:
image.drawCircle((b.x,b.y),b.radius(),Color.YELLOW,2)
image.show()
| Add code to accomodate a new '3 circles' approach | Add code to accomodate a new '3 circles' approach
| Python | mit | ColdSauce/Iris | ---
+++
@@ -1,9 +1,14 @@
from SimpleCV import *
winsize = (640,480)
display = Display(winsize)
-
video = VirtualCamera('stefan_eye.mp4', 'video')
while display.isNotDone():
- a = video.getImage()
- a.rotate(90).invert().toGray().binarize().save(display)
-
+ image = video.getImage().rotate(90).crop(850,50,400,400)
+ image2 = image.colorDistance(Color.RED)
+ blobs = image2.findBlobs()
+ image3 = image2.grayscale()
+ if blobs:
+ for b in blobs:
+ if b.isCircle(0.7) and b.radius() > 3:
+ image.drawCircle((b.x,b.y),b.radius(),Color.YELLOW,2)
+ image.show() |
e120c264f16f89b197ff3416deaefb7f553611db | pages/urlconf_registry.py | pages/urlconf_registry.py | """Django page CMS urlconf registry."""
from django.utils.translation import ugettext as _
class UrlconfAlreadyRegistered(Exception):
"""
An attempt was made to register a widget for Django page CMS more
than once.
"""
class UrlconfNotFound(Exception):
"""
The requested widget was not found
"""
registry = []
def get_choices():
choices = [('', 'No delegation')]
for reg in registry:
if reg[2]:
label = reg[2]
else:
label = reg[0]
choices.append((reg[0], label))
return choices
def register_urlconf(name, urlconf, label=None):
for urlconf_tuple in registry:
if urlconf_tuple[0] == name:
raise UrlconfAlreadyRegistered(
_('The urlconf %s has already been registered.') % name)
urlconf_tuple = (name, urlconf, label, urlconf)
registry.append(urlconf_tuple)
def get_urlconf(name):
for urlconf_tuple in registry:
if urlconf_tuple[0] == name:
return urlconf_tuple[1]
raise UrlconfNotFound(
_('The urlconf %s has not been registered.') % name) | """Django page CMS urlconf registry."""
from django.utils.translation import ugettext as _
class UrlconfAlreadyRegistered(Exception):
"""
An attempt was made to register a urlconf for Django page CMS more
than once.
"""
class UrlconfNotFound(Exception):
"""
The requested urlconf was not found
"""
registry = []
def get_choices():
choices = [('', 'No delegation')]
for reg in registry:
if reg[2]:
label = reg[2]
else:
label = reg[0]
choices.append((reg[0], label))
return choices
def register_urlconf(name, urlconf, label=None):
for urlconf_tuple in registry:
if urlconf_tuple[0] == name:
raise UrlconfAlreadyRegistered(
_('The urlconf %s has already been registered.') % name)
urlconf_tuple = (name, urlconf, label, urlconf)
registry.append(urlconf_tuple)
def get_urlconf(name):
for urlconf_tuple in registry:
if urlconf_tuple[0] == name:
return urlconf_tuple[1]
raise UrlconfNotFound(
_('The urlconf %s has not been registered.') % name) | Fix typos in urlconf registry | Fix typos in urlconf registry
| Python | bsd-3-clause | batiste/django-page-cms,remik/django-page-cms,oliciv/django-page-cms,oliciv/django-page-cms,remik/django-page-cms,pombredanne/django-page-cms-1,oliciv/django-page-cms,pombredanne/django-page-cms-1,remik/django-page-cms,akaihola/django-page-cms,batiste/django-page-cms,batiste/django-page-cms,akaihola/django-page-cms,remik/django-page-cms,pombredanne/django-page-cms-1,akaihola/django-page-cms | ---
+++
@@ -4,13 +4,13 @@
class UrlconfAlreadyRegistered(Exception):
"""
- An attempt was made to register a widget for Django page CMS more
+ An attempt was made to register a urlconf for Django page CMS more
than once.
"""
class UrlconfNotFound(Exception):
"""
- The requested widget was not found
+ The requested urlconf was not found
"""
registry = [] |
ed6a69dc2efefdb8cf5e32c9c71b122b6357b1fa | parks/test/test_finder.py | parks/test/test_finder.py | #!/usr/bin/env python
"""Unit tests for the 'finder' module."""
| #!/usr/bin/env python
"""Unit tests for the 'finder' module."""
def test_tbd():
"""Placeholder for the first test."""
assert True
| Add a placeholder test so pytest will not report an error | Add a placeholder test so pytest will not report an error
| Python | mit | friendlycode/gr-parks,friendlycode/gr-parks,friendlycode/gr-parks,friendlycode/gr-parks | ---
+++
@@ -1,3 +1,8 @@
#!/usr/bin/env python
"""Unit tests for the 'finder' module."""
+
+
+def test_tbd():
+ """Placeholder for the first test."""
+ assert True |
d7c6a7f78c8620e0e01e57eb082860e90f782a30 | parsl/tests/test_swift.py | parsl/tests/test_swift.py | #!/usr/bin/env python3.5
import parsl
from parsl import *
parsl.set_stream_logger()
from parsl.executors.swift_t import *
def foo(x, y):
return x * y
def slow_foo(x, y):
import time
time.sleep(x)
return x * y
def bad_foo(x, y):
time.sleep(x)
return x * y
def test_simple():
print("Start")
tex = TurbineExecutor()
x = tex.submit(foo, 5, 10)
print("Got : ", x)
print("X result : ", x.result())
assert x.result() == 50, "X != 50"
print("done")
def test_except():
print("Start")
tex = TurbineExecutor()
x = tex.submit(bad_foo, 5, 10)
print("Got : ", x)
print("X exception : ", x.exception())
print("X result : ", x.result())
print("done")
if __name__ == "__main__":
# test_simple()
test_except()
exit(0)
futs = {}
for i in range(0, 1):
futs[i] = tex.submit(slow_foo, 3, 10)
x.result(timeout=10)
for x in range(0, 10):
print(futs)
time.sleep(4)
print("Done")
| #!/usr/bin/env python3.5
from nose.tools import assert_raises
import parsl
from parsl import *
parsl.set_stream_logger()
from parsl.executors.swift_t import *
def foo(x, y):
return x * y
def slow_foo(x, y):
import time
time.sleep(x)
return x * y
def bad_foo(x, y):
time.sleep(x)
return x * y
def test_simple():
print("Start")
tex = TurbineExecutor()
x = tex.submit(foo, 5, 10)
print("Got: ", x)
print("X result: ", x.result())
assert x.result() == 50, "X != 50"
print("done")
def test_slow():
futs = {}
tex = TurbineExecutor()
for i in range(0, 3):
futs[i] = tex.submit(slow_foo, 1, 2)
total = sum([futs[i].result(timeout=10) for i in futs])
assert total == 6, "expected 6, got {}".format(total)
def test_except():
def get_bad_result():
tex = TurbineExecutor()
x = tex.submit(bad_foo, 5, 10)
return x.result()
assert_raises(NameError, get_bad_result)
if __name__ == "__main__":
# test_simple()
# test_slow()
test_except()
print("Done")
| Make `test_except` swift test pass | Make `test_except` swift test pass
Currently it is expected to fail. This asserts that the correct
exception is raised. Fixes #155.
| Python | apache-2.0 | Parsl/parsl,swift-lang/swift-e-lab,Parsl/parsl,Parsl/parsl,swift-lang/swift-e-lab,Parsl/parsl | ---
+++
@@ -1,4 +1,5 @@
#!/usr/bin/env python3.5
+from nose.tools import assert_raises
import parsl
from parsl import *
@@ -17,46 +18,42 @@
time.sleep(x)
return x * y
-
def bad_foo(x, y):
time.sleep(x)
return x * y
-
def test_simple():
print("Start")
tex = TurbineExecutor()
x = tex.submit(foo, 5, 10)
- print("Got : ", x)
- print("X result : ", x.result())
+ print("Got: ", x)
+ print("X result: ", x.result())
assert x.result() == 50, "X != 50"
print("done")
+def test_slow():
+ futs = {}
+ tex = TurbineExecutor()
+ for i in range(0, 3):
+ futs[i] = tex.submit(slow_foo, 1, 2)
+
+ total = sum([futs[i].result(timeout=10) for i in futs])
+ assert total == 6, "expected 6, got {}".format(total)
def test_except():
- print("Start")
- tex = TurbineExecutor()
- x = tex.submit(bad_foo, 5, 10)
- print("Got : ", x)
+ def get_bad_result():
+ tex = TurbineExecutor()
+ x = tex.submit(bad_foo, 5, 10)
- print("X exception : ", x.exception())
- print("X result : ", x.result())
+ return x.result()
- print("done")
+ assert_raises(NameError, get_bad_result)
if __name__ == "__main__":
# test_simple()
+ # test_slow()
test_except()
- exit(0)
- futs = {}
- for i in range(0, 1):
- futs[i] = tex.submit(slow_foo, 3, 10)
-
- x.result(timeout=10)
- for x in range(0, 10):
- print(futs)
- time.sleep(4)
print("Done") |
9d1a5932ad25b075b095f170c1b374e46b6f740b | setup/create_players.py | setup/create_players.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import json
from db import commit_db_item
from db.player import Player
def migrate_players(plr_src_file=None):
if not plr_src_file:
plr_src_file = os.path.join(
os.path.dirname(__file__), 'nhl_players.json')
migration_data = json.load(open(plr_src_file))
for player_id in sorted(migration_data.keys())[:]:
last_name = migration_data[player_id]['last_name']
first_name = migration_data[player_id]['first_name']
position = migration_data[player_id]['position']
alternate_last_names = migration_data[player_id].get(
'alternate_last_names', None)
alternate_first_names = migration_data[player_id].get(
'alternate_first_names', None)
alternate_positions = migration_data[player_id].get(
'alternate_positions', None)
plr = Player(
player_id, last_name, first_name, position,
alternate_last_names=alternate_last_names,
alternate_first_names=alternate_first_names,
alternate_positions=alternate_positions
)
print("Working on %s" % plr)
commit_db_item(plr)
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import json
from db import commit_db_item
from db.player import Player
from db.team import Team
from utils.player_finder import PlayerFinder
def migrate_players(plr_src_file=None):
if not plr_src_file:
plr_src_file = os.path.join(
os.path.dirname(__file__), 'nhl_players.json')
migration_data = json.load(open(plr_src_file))
for player_id in sorted(migration_data.keys())[:]:
last_name = migration_data[player_id]['last_name']
first_name = migration_data[player_id]['first_name']
position = migration_data[player_id]['position']
alternate_last_names = migration_data[player_id].get(
'alternate_last_names', None)
alternate_first_names = migration_data[player_id].get(
'alternate_first_names', None)
alternate_positions = migration_data[player_id].get(
'alternate_positions', None)
plr = Player(
player_id, last_name, first_name, position,
alternate_last_names=alternate_last_names,
alternate_first_names=alternate_first_names,
alternate_positions=alternate_positions
)
print("Working on %s" % plr)
commit_db_item(plr)
def search_players(src_type):
plr_f = PlayerFinder()
current_teams = Team.find_teams_for_season()
for team in sorted(current_teams)[:]:
team_players = plr_f.find_players_for_team(team, src_type)
print(sorted(team_players))
| Add stub to search for players remotely | Add stub to search for players remotely
| Python | mit | leaffan/pynhldb | ---
+++
@@ -6,6 +6,8 @@
from db import commit_db_item
from db.player import Player
+from db.team import Team
+from utils.player_finder import PlayerFinder
def migrate_players(plr_src_file=None):
@@ -39,3 +41,14 @@
print("Working on %s" % plr)
commit_db_item(plr)
+
+
+def search_players(src_type):
+
+ plr_f = PlayerFinder()
+
+ current_teams = Team.find_teams_for_season()
+ for team in sorted(current_teams)[:]:
+ team_players = plr_f.find_players_for_team(team, src_type)
+ print(sorted(team_players))
+ |
c1f221b638405af81c637ddd79bd8c9eef24b488 | main.py | main.py | import hashlib
import models
import os
import os.path
def init():
models.db.connect()
models.db.create_tables([models.Entry])
def digest(file_path):
h = hashlib.sha1()
file = open(file_path, 'rb')
buf = file.read(8192)
while len(buf) > 0:
h.update(buf)
buf = file.read(8192)
return h.hexdigest()
def traverse(path):
path = os.path.abspath(path)
for (dir_path, dirs, files) in os.walk(path):
for file_name in files:
file_path = os.path.join(dir_path, file_name)
entry = models.Entry(path=file_path)
entry.size = os.path.getsize(file_path)
entry.last_modified = os.path.getmtime(file_path)
entry.hash_str = digest(file_path)
entry.save()
| import hashlib
import models
import os
import os.path
def init():
models.db.connect()
models.db.create_tables([models.Entry])
def digest(file_path):
h = hashlib.sha1()
file = open(file_path, 'rb')
buf = file.read(8192)
while len(buf) > 0:
h.update(buf)
buf = file.read(8192)
return h.hexdigest()
def traverse(path):
path = os.path.abspath(path)
for (dir_path, dirs, files) in os.walk(path):
buf = []
for file_name in sorted(files):
file_path = os.path.join(dir_path, file_name)
entry = {
'path': file_path,
'size': os.path.getsize(file_path),
'last_modified': os.path.getmtime(file_path),
'hash_str': digest(file_path)
}
buf.append(entry)
if len(buf) >= 256:
print('Writing chunks until', file_name)
models.Entry.insert_many(buf).execute()
buf.clear()
| Apply modification from Feb 5 | Apply modification from Feb 5
| Python | mit | rschiang/pineapple.py | ---
+++
@@ -20,11 +20,18 @@
def traverse(path):
path = os.path.abspath(path)
for (dir_path, dirs, files) in os.walk(path):
- for file_name in files:
+ buf = []
+ for file_name in sorted(files):
file_path = os.path.join(dir_path, file_name)
+ entry = {
+ 'path': file_path,
+ 'size': os.path.getsize(file_path),
+ 'last_modified': os.path.getmtime(file_path),
+ 'hash_str': digest(file_path)
+ }
- entry = models.Entry(path=file_path)
- entry.size = os.path.getsize(file_path)
- entry.last_modified = os.path.getmtime(file_path)
- entry.hash_str = digest(file_path)
- entry.save()
+ buf.append(entry)
+ if len(buf) >= 256:
+ print('Writing chunks until', file_name)
+ models.Entry.insert_many(buf).execute()
+ buf.clear() |
fc263902e845c21aa3379bf985cef693d30bc56b | senlin/tests/functional/test_policy_type.py | senlin/tests/functional/test_policy_type.py | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from senlin.tests.functional import api as test_api
from senlin.tests.functional import base
class TestPolicyType(base.SenlinFunctionalTest):
def test_get_policy_types(self):
# Check that listing policy types works.
policy_types = test_api.list_policy_types(self.client)
policy_names = [p['name'] for p in policy_types]
self.assertIn('DeletionPolicy', policy_names)
self.assertIn('ScalingInPolicy', policy_names)
| # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from senlin.tests.functional import api as test_api
from senlin.tests.functional import base
class TestPolicyType(base.SenlinFunctionalTest):
def test_get_policy_types(self):
# Check that listing policy types works.
policy_types = test_api.list_policy_types(self.client)
policy_names = [p['name'] for p in policy_types]
self.assertIn('senlin.policy.deletion', policy_names)
self.assertIn('senlin.policy.scaling', policy_names)
| Fix functional test for policy type listing | Fix functional test for policy type listing
This patch fixes the funtional test for policy type listing. We have
changed the names of builtin policy types.
Change-Id: I9f04ab2a4245e8946db3a0255658676cc5f600ab
| Python | apache-2.0 | stackforge/senlin,openstack/senlin,stackforge/senlin,Alzon/senlin,tengqm/senlin-container,tengqm/senlin-container,openstack/senlin,Alzon/senlin,openstack/senlin | ---
+++
@@ -20,5 +20,5 @@
# Check that listing policy types works.
policy_types = test_api.list_policy_types(self.client)
policy_names = [p['name'] for p in policy_types]
- self.assertIn('DeletionPolicy', policy_names)
- self.assertIn('ScalingInPolicy', policy_names)
+ self.assertIn('senlin.policy.deletion', policy_names)
+ self.assertIn('senlin.policy.scaling', policy_names) |
2faf0facda08df07fbe9ed5363a3546d726326f6 | docs/conf.py | docs/conf.py | #!/usr/bin/env python3
from importlib.metadata import version
from packaging.version import parse
extensions = [
"sphinx.ext.autodoc",
"sphinx.ext.intersphinx",
"sphinx.ext.extlinks",
"sphinx_autodoc_typehints",
"sphinxcontrib.asyncio",
"sphinx_tabs.tabs",
]
templates_path = ["_templates"]
source_suffix = ".rst"
master_doc = "index"
project = "asphalt-web"
author = "Alex Grönholm"
copyright = "2022, " + author
v = parse(version(project))
version = v.base_version
release = v.public
language = None
exclude_patterns = ["_build"]
pygments_style = "sphinx"
highlight_language = "python3"
todo_include_todos = False
autodoc_inherit_docstrings = False
html_theme = "sphinx_rtd_theme"
html_static_path = ["_static"]
htmlhelp_basename = project.replace("-", "") + "doc"
extlinks = {
"github": (
f"https://github.com/asphalt-framework/{project}/tree/{version}/%s",
None,
)
}
intersphinx_mapping = {
"python": ("https://docs.python.org/3/", None),
"asphalt": ("https://asphalt.readthedocs.io/en/latest/", None),
}
| #!/usr/bin/env python3
from importlib.metadata import version
from packaging.version import parse
extensions = [
"sphinx.ext.autodoc",
"sphinx.ext.intersphinx",
"sphinx.ext.extlinks",
"sphinx_autodoc_typehints",
"sphinxcontrib.asyncio",
"sphinx_tabs.tabs",
]
templates_path = ["_templates"]
source_suffix = ".rst"
master_doc = "index"
project = "asphalt-web"
author = "Alex Grönholm"
copyright = "2022, " + author
v = parse(version(project))
version = v.base_version
release = v.public
language = None
exclude_patterns = ["_build"]
pygments_style = "sphinx"
highlight_language = "python3"
todo_include_todos = False
autodoc_inherit_docstrings = False
html_theme = "sphinx_rtd_theme"
html_static_path = ["_static"]
htmlhelp_basename = project.replace("-", "") + "doc"
extlinks = {
"github": (
f"https://github.com/asphalt-framework/{project}/tree/{release}/%s",
None,
)
}
intersphinx_mapping = {
"python": ("https://docs.python.org/3/", None),
"asphalt": ("https://asphalt.readthedocs.io/en/latest/", None),
}
| Use the release version rather than public version for GitHub links | Use the release version rather than public version for GitHub links
| Python | apache-2.0 | asphalt-framework/asphalt-web | ---
+++
@@ -37,7 +37,7 @@
extlinks = {
"github": (
- f"https://github.com/asphalt-framework/{project}/tree/{version}/%s",
+ f"https://github.com/asphalt-framework/{project}/tree/{release}/%s",
None,
)
} |
ae0e2a481f91e94cf05ac2df63f1d66f76a5e442 | indra/preassembler/grounding_mapper/gilda.py | indra/preassembler/grounding_mapper/gilda.py | """This module implements a client to the Gilda grounding web service,
and contains functions to help apply it during the course of INDRA assembly."""
import requests
from .mapper import GroundingMapper
grounding_service_url = 'http://grounding.indra.bio/ground'
def ground_statements(stmts):
"""Set grounding for Agents in a list of Statements using Gilda.
This function modifies the original Statements/Agents in place.
Parameters
----------
stmts : list[indra.statements.Statements]
A list of Statements to ground
"""
for stmt in stmts:
if stmt.evidence and stmt.evidence[0].text:
context = stmt.evidence[0].text
else:
context = None
for agent in stmt.agent_list():
if agent is not None and 'TEXT' in agent.db_refs:
txt = agent.db_refs['TEXT']
resp = requests.post(grounding_service_url,
json={'text': txt,
'context': context})
results = resp.json()
if results:
db_refs = {'TEXT': txt,
results[0]['term']['db']:
results[0]['term']['id']}
agent.db_refs = db_refs
GroundingMapper.standardize_agent_name(agent,
standardize_refs=True)
| """This module implements a client to the Gilda grounding web service,
and contains functions to help apply it during the course of INDRA assembly."""
import requests
from .mapper import GroundingMapper
grounding_service_url = 'http://grounding.indra.bio'
def get_gilda_models():
"""Return a list of strings for which Gilda has a disambiguation model.
Returns
-------
list[str]
A list of entity strings.
"""
res = requests.post(grounding_service_url + '/models')
models = res.json()
return models
def ground_statement(stmt):
"""Set grounding for Agents in a given Statement using Gilda.
This function modifies the original Statement/Agents in place.
Parameters
----------
stmt : indra.statements.Statement
A Statement to ground
"""
if stmt.evidence and stmt.evidence[0].text:
context = stmt.evidence[0].text
else:
context = None
for agent in stmt.agent_list():
if agent is not None and 'TEXT' in agent.db_refs:
txt = agent.db_refs['TEXT']
resp = requests.post(grounding_service_url + '/ground',
json={'text': txt,
'context': context})
results = resp.json()
if results:
db_refs = {'TEXT': txt,
results[0]['term']['db']:
results[0]['term']['id']}
agent.db_refs = db_refs
GroundingMapper.standardize_agent_name(agent,
standardize_refs=True)
def ground_statements(stmts):
"""Set grounding for Agents in a list of Statements using Gilda.
This function modifies the original Statements/Agents in place.
Parameters
----------
stmts : list[indra.statements.Statement]
A list of Statements to ground
"""
for stmt in stmts:
ground_statement(stmt)
| Refactor Gilda module and add function to get models | Refactor Gilda module and add function to get models
| Python | bsd-2-clause | bgyori/indra,johnbachman/belpy,johnbachman/belpy,johnbachman/indra,sorgerlab/indra,sorgerlab/belpy,sorgerlab/belpy,bgyori/indra,sorgerlab/belpy,johnbachman/belpy,sorgerlab/indra,bgyori/indra,johnbachman/indra,sorgerlab/indra,johnbachman/indra | ---
+++
@@ -4,7 +4,50 @@
import requests
from .mapper import GroundingMapper
-grounding_service_url = 'http://grounding.indra.bio/ground'
+grounding_service_url = 'http://grounding.indra.bio'
+
+
+def get_gilda_models():
+ """Return a list of strings for which Gilda has a disambiguation model.
+
+ Returns
+ -------
+ list[str]
+ A list of entity strings.
+ """
+ res = requests.post(grounding_service_url + '/models')
+ models = res.json()
+ return models
+
+
+def ground_statement(stmt):
+ """Set grounding for Agents in a given Statement using Gilda.
+
+ This function modifies the original Statement/Agents in place.
+
+ Parameters
+ ----------
+ stmt : indra.statements.Statement
+ A Statement to ground
+ """
+ if stmt.evidence and stmt.evidence[0].text:
+ context = stmt.evidence[0].text
+ else:
+ context = None
+ for agent in stmt.agent_list():
+ if agent is not None and 'TEXT' in agent.db_refs:
+ txt = agent.db_refs['TEXT']
+ resp = requests.post(grounding_service_url + '/ground',
+ json={'text': txt,
+ 'context': context})
+ results = resp.json()
+ if results:
+ db_refs = {'TEXT': txt,
+ results[0]['term']['db']:
+ results[0]['term']['id']}
+ agent.db_refs = db_refs
+ GroundingMapper.standardize_agent_name(agent,
+ standardize_refs=True)
def ground_statements(stmts):
@@ -14,25 +57,8 @@
Parameters
----------
- stmts : list[indra.statements.Statements]
+ stmts : list[indra.statements.Statement]
A list of Statements to ground
"""
for stmt in stmts:
- if stmt.evidence and stmt.evidence[0].text:
- context = stmt.evidence[0].text
- else:
- context = None
- for agent in stmt.agent_list():
- if agent is not None and 'TEXT' in agent.db_refs:
- txt = agent.db_refs['TEXT']
- resp = requests.post(grounding_service_url,
- json={'text': txt,
- 'context': context})
- results = resp.json()
- if results:
- db_refs = {'TEXT': txt,
- results[0]['term']['db']:
- results[0]['term']['id']}
- agent.db_refs = db_refs
- GroundingMapper.standardize_agent_name(agent,
- standardize_refs=True)
+ ground_statement(stmt) |
c7cc0e24ea5d4cbb44665c1267a771f08f1bda4f | cityhallmonitor/signals/handlers.py | cityhallmonitor/signals/handlers.py | from django.db.models.signals import pre_save, post_save
from django.dispatch import receiver
from django.utils import timezone
from cityhallmonitor.models import DirtyFieldsModel, \
Matter, MatterAttachment
@receiver(pre_save, sender=DirtyFieldsModel)
def handle_pre_save(sender, instance, *args, **kwargs):
"""Set updated_at timestamp if model is actually dirty"""
if hasattr(sender, 'is_dirty'):
if instance.is_dirty():
instance.updated_at = timezone.now()
@receiver(post_save, sender=DirtyFieldsModel)
def handle_post_save(sender, instance, **kwargs):
"""Reset dirty state, maybe update related Document"""
if hasattr(sender, 'is_dirty'):
if sender == Matter and instance.is_dirty():
for r in instance.matterattachment_set.all():
if hasattr(r, 'document'):
r.document.on_related_update()
elif sender == MatterAttachment and instance.is_dirty():
if hasattr(instance, 'document'):
r.document.on_related_update()
if hasattr(sender, 'reset_state'):
instance.reset_state()
| from django.db.models.signals import pre_save, post_save
from django.dispatch import receiver
from django.utils import timezone
from cityhallmonitor.models import DirtyFieldsModel, \
Matter, MatterAttachment, MatterSponsor
@receiver(pre_save, sender=DirtyFieldsModel)
def handle_pre_save(sender, instance, *args, **kwargs):
"""Set updated_at timestamp if model is actually dirty"""
if hasattr(sender, 'is_dirty'):
if instance.is_dirty():
instance.updated_at = timezone.now()
@receiver(post_save, sender=DirtyFieldsModel)
def handle_post_save(sender, instance, **kwargs):
"""Reset dirty state, maybe update related Document"""
if hasattr(sender, 'is_dirty'):
if sender == Matter and instance.is_dirty():
for r in instance.matterattachment_set.all():
if hasattr(r, 'document'):
r.document.on_related_update()
elif sender == MatterAttachment and instance.is_dirty():
if hasattr(instance, 'document'):
r.document.on_related_update()
elif sender == MatterSponsor and instance.is_dirty():
for r in instance.matter.matterattachment_set.all():
if hasattr(r, 'document'):
r.document.on_related_update()
if hasattr(sender, 'reset_state'):
instance.reset_state()
| Add post_save handler for MatterSponsor | Add post_save handler for MatterSponsor
| Python | mit | NUKnightLab/cityhallmonitor,NUKnightLab/cityhallmonitor,NUKnightLab/cityhallmonitor,NUKnightLab/cityhallmonitor | ---
+++
@@ -2,7 +2,7 @@
from django.dispatch import receiver
from django.utils import timezone
from cityhallmonitor.models import DirtyFieldsModel, \
- Matter, MatterAttachment
+ Matter, MatterAttachment, MatterSponsor
@receiver(pre_save, sender=DirtyFieldsModel)
@@ -23,9 +23,11 @@
elif sender == MatterAttachment and instance.is_dirty():
if hasattr(instance, 'document'):
r.document.on_related_update()
-
-
-
+ elif sender == MatterSponsor and instance.is_dirty():
+ for r in instance.matter.matterattachment_set.all():
+ if hasattr(r, 'document'):
+ r.document.on_related_update()
+
if hasattr(sender, 'reset_state'):
instance.reset_state()
|
6420eca5e458f981aa9f506bfa6354eba50e1e49 | processing/face_detect.py | processing/face_detect.py | import cv2
class FaceDetector:
def __init__(self, face_cascade_path):
self.faceCascade = cv2.CascadeClassifier(face_cascade_path)
def detect(self, image, scale_factor=1.1, min_neighbors=5, min_size=(30, 30)):
# detect faces in the image
rectangles = self.faceCascade.detectMultiScale(image, scaleFactor=scale_factor,
minNeighbors=min_neighbors, minSize=min_size,
flags=cv2.CASCADE_SCALE_IMAGE)
return rectangles
def HOG(self):
print("HOG still to be implemented")
def removeFace(self, image):
rectangle_dimensions = self.detect(image)
if len(rectangle_dimensions) > 0:
(x, y, w, h) = max(rectangle_dimensions, key=lambda b: (b[2] * b[3]))
# To Do if no face found return error
face = image[y:y + h, x:x + w]
image_copy = image.copy()
image_copy[y:y + h, x:x + w] = 0
return face, image_copy
| import cv2
class FaceDetector:
def __init__(self, face_cascade_path):
self.faceCascade = cv2.CascadeClassifier(face_cascade_path)
def detect(self, image, scale_factor=1.1, min_neighbors=5, min_size=(30, 30)):
rectangles = self.faceCascade.detectMultiScale(image, scaleFactor=scale_factor,
minNeighbors=min_neighbors, minSize=min_size,
flags=cv2.CASCADE_SCALE_IMAGE)
return rectangles
def HOG(self):
print("HOG still to be implemented")
def removeFace(self, image):
rectangle_dimensions = self.detect(image)
if len(rectangle_dimensions) > 0:
(x, y, w, h) = max(rectangle_dimensions, key=lambda b: (b[2] * b[3]))
# To Do if no face found return error
face = image[y:y + h, x:x + w]
image_copy = image.copy()
image_copy[y:y + h, x:x + w] = 0
return face, image_copy
| Align Face and Feature Mapping | Align Face and Feature Mapping
| Python | bsd-3-clause | javaTheHutts/Java-the-Hutts | ---
+++
@@ -6,10 +6,9 @@
self.faceCascade = cv2.CascadeClassifier(face_cascade_path)
def detect(self, image, scale_factor=1.1, min_neighbors=5, min_size=(30, 30)):
- # detect faces in the image
rectangles = self.faceCascade.detectMultiScale(image, scaleFactor=scale_factor,
- minNeighbors=min_neighbors, minSize=min_size,
- flags=cv2.CASCADE_SCALE_IMAGE)
+ minNeighbors=min_neighbors, minSize=min_size,
+ flags=cv2.CASCADE_SCALE_IMAGE)
return rectangles
def HOG(self): |
38ed00b38d9cb005f9b25643afa7ce480da6febe | examples/enable/resize_tool_demo.py | examples/enable/resize_tool_demo.py | """
This demonstrates the most basic drawing capabilities using Enable. A new
component is created and added to a container.
"""
from enable.example_support import DemoFrame, demo_main
from enable.api import Component, Container, Window
from enable.tools.resize_tool import ResizeTool
class Box(Component):
resizable = ""
def _draw_mainlayer(self, gc, view_bounds=None, mode="default"):
with gc:
dx, dy = self.bounds
x, y = self.position
gc.set_fill_color((1.0, 0.0, 0.0, 1.0))
gc.rect(x, y, dx, dy)
gc.fill_path()
class MyFrame(DemoFrame):
def _create_window(self):
box = Box(bounds=[100.0, 100.0], position=[50.0, 50.0])
box.tools.append(ResizeTool(component=box, hotspots=set(["top", "left", "right", "bottom",
"top left", "top right", "bottom left", "bottom right"])))
container = Container(bounds=[500, 500])
container.add(box)
return Window(self, -1, component=container)
if __name__ == "__main__":
# Save demo so that it doesn't get garbage collected when run within
# existing event loop (i.e. from ipython).
demo = demo_main(MyFrame)
| """
This demonstrates the resize tool.
"""
from enable.example_support import DemoFrame, demo_main
from enable.api import Component, Container, Window
from enable.tools.resize_tool import ResizeTool
class Box(Component):
resizable = ""
def _draw_mainlayer(self, gc, view_bounds=None, mode="default"):
with gc:
dx, dy = self.bounds
x, y = self.position
gc.set_fill_color((1.0, 0.0, 0.0, 1.0))
gc.rect(x, y, dx, dy)
gc.fill_path()
class MyFrame(DemoFrame):
def _create_window(self):
box = Box(bounds=[100.0, 100.0], position=[50.0, 50.0])
box.tools.append(ResizeTool(component=box, hotspots=set(["top", "left", "right", "bottom",
"top left", "top right", "bottom left", "bottom right"])))
container = Container(bounds=[500, 500])
container.add(box)
return Window(self, -1, component=container)
if __name__ == "__main__":
# Save demo so that it doesn't get garbage collected when run within
# existing event loop (i.e. from ipython).
demo = demo_main(MyFrame)
| Fix resize tool demo comments. | Fix resize tool demo comments.
| Python | bsd-3-clause | tommy-u/enable,tommy-u/enable,tommy-u/enable,tommy-u/enable | ---
+++
@@ -1,6 +1,5 @@
"""
-This demonstrates the most basic drawing capabilities using Enable. A new
-component is created and added to a container.
+This demonstrates the resize tool.
"""
from enable.example_support import DemoFrame, demo_main
from enable.api import Component, Container, Window |
b654ce911d458fc623929ac2a2e04c995201eb1e | runtests.py | runtests.py | #!/usr/bin/env python
# Setup Django
from django.conf import settings
settings.configure(
DEBUG=True,
DATABASES={
'default': {
'ENGINE': 'django.db.backends.sqlite3',
},
},
CACHES={
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
'LOCATION': 'localhost',
'OPTIONS': {
'MAX_ENTRIES': 2 ** 32,
},
},
},
INSTALLED_APPS=(
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'ormcache',
'ormcache.tests.testapp',
),
TEST_RUNNER='django_nose.NoseTestSuiteRunner',
)
# Run tests
import sys
from django_nose import NoseTestSuiteRunner
test_runner = NoseTestSuiteRunner(verbosity=1)
test_runner.setup_databases()
failures = test_runner.run_tests(['ormcache', ])
if failures:
sys.exit(failures)
| #!/usr/bin/env python
# Django must be set up before we import our libraries and run our tests
from django.conf import settings
settings.configure(
DEBUG=True,
DATABASES={
'default': {
'ENGINE': 'django.db.backends.sqlite3',
},
},
CACHES={
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
'LOCATION': 'localhost',
'OPTIONS': {
'MAX_ENTRIES': 2 ** 32,
},
},
},
INSTALLED_APPS=(
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'ormcache',
'ormcache.tests.testapp',
),
TEST_RUNNER='django_nose.NoseTestSuiteRunner',
)
# Run tests
import sys
from django_nose import NoseTestSuiteRunner
test_runner = NoseTestSuiteRunner(verbosity=1)
test_runner.setup_databases()
failures = test_runner.run_tests(['ormcache', ])
if failures:
sys.exit(failures)
| Add clarifying comment about Django setup | Add clarifying comment about Django setup
| Python | mit | educreations/django-ormcache | ---
+++
@@ -1,6 +1,6 @@
#!/usr/bin/env python
-# Setup Django
+# Django must be set up before we import our libraries and run our tests
from django.conf import settings
|
6502087c63df816e3a4d4b256af7685f638b477d | account_check/migrations/8.0.0.0/pre-migrate.py | account_check/migrations/8.0.0.0/pre-migrate.py | # -*- coding: utf-8 -*-
##############################################################################
# For copyright and license notices, see __openerp__.py file in module root
# directory
##############################################################################
import logging
import openupgradelib
_logger = logging.getLogger(__name__)
def migrate(cr, version):
_logger.info('Migrating account_check from version %s' % version)
cr.execute("""
INSERT INTO account_check (
id, create_uid, create_date, write_date, write_uid,
state, number, issue_date, amount, company_id, user_id, voucher_id,
clearing, bank_id, vat, type,
source_partner_id, destiny_partner_id,
payment_date
)
SELECT id, create_uid, create_date, write_date, write_uid,
state, CAST(number AS INT), date, amount, company_id, user_id, voucher_id,
clearing, bank_id, vat, 'third',
source_partner_id, destiny_partner_id,
clearing_date
FROM account_third_check
""")
pass
| # -*- coding: utf-8 -*-
##############################################################################
# For copyright and license notices, see __openerp__.py file in module root
# directory
##############################################################################
import logging
import openupgradelib
_logger = logging.getLogger(__name__)
def migrate(cr, version):
_logger.info('Migrating account_check from version %s' % version)
cr.execute("""
INSERT INTO account_check (
id, create_uid, create_date, write_date, write_uid,
state, number, issue_date, amount, company_id, user_id, voucher_id,
clearing, bank_id, vat, type,
source_partner_id, destiny_partner_id,
payment_date
)
SELECT id, create_uid, create_date, write_date, write_uid,
state, CAST(number AS INT), date, amount, company_id, user_id, voucher_id,
clearing, bank_id, vat, 'third',
source_partner_id, destiny_partner_id,
clearing_date
FROM account_third_check
""")
cr.execute("select max(id) FROM account_check")
take_last_check = cr.fetchall()[0][0]
cr.execute("ALTER SEQUENCE account_check_id_seq RESTART WITH %s;", (take_last_check,))
pass
| Set last id in sequence | [FIX] Set last id in sequence
| Python | agpl-3.0 | csrocha/account_check,csrocha/account_check | ---
+++
@@ -25,4 +25,9 @@
clearing_date
FROM account_third_check
""")
+
+ cr.execute("select max(id) FROM account_check")
+ take_last_check = cr.fetchall()[0][0]
+ cr.execute("ALTER SEQUENCE account_check_id_seq RESTART WITH %s;", (take_last_check,))
+
pass |
b4806b4650f576c7b5cd7f33742ccb108e37321c | StartWithPython/StartWithPython/Theory/Loops/Range.py | StartWithPython/StartWithPython/Theory/Loops/Range.py | # -------------------------------------------------------------------------------------------------
# RANGE
# -------------------------------------------------------------------------------------------------
print('\n\t\tRANGE\n')
for x in range(10): # to make an action ('n') times
print("Pippo") # ...
print('')
for x in range(5, 12): # second example --> (from 'x' to(,) 'y')
print(x) # ...
print('')
for x in range(10, 40, 5): # third example --> (from 'x' to(,) 'y' in steps of(,) 'z')
print(x) # ...
print('') | # -------------------------------------------------------------------------------------------------
# RANGE
# -------------------------------------------------------------------------------------------------
print('\n\t\tRANGE\n')
for x in range(10): # to make an action ('n') times
print("Pippo") # ...
print('') # ...
for x in range(5, 12): # second example --> (from 'x' to(,) 'y')
print(x) # ...
print('') # ...
for x in range(10, 40, 5): # third example --> (from 'x' to(,) 'y' in steps of(,) 'z')
print(x) # ...
print('') # ...
print(list(range(4))) # create new list from 0 to 3
print(list(range(-6,7,2))) # -6 to +6 by 2
print([[x ** 2, x ** 3] for x in range(4)]) # some more complicated example
print([[x, x / 2, x*2] for x in range(-6, 7, 2) if x > 0]) # ...
| Add some range application with list | Add some range application with list
| Python | mit | CaptainMich/Python_Project | ---
+++
@@ -4,17 +4,20 @@
print('\n\t\tRANGE\n')
-for x in range(10): # to make an action ('n') times
- print("Pippo") # ...
+for x in range(10): # to make an action ('n') times
+ print("Pippo") # ...
+print('') # ...
-print('')
+for x in range(5, 12): # second example --> (from 'x' to(,) 'y')
+ print(x) # ...
+print('') # ...
-for x in range(5, 12): # second example --> (from 'x' to(,) 'y')
- print(x) # ...
+for x in range(10, 40, 5): # third example --> (from 'x' to(,) 'y' in steps of(,) 'z')
+ print(x) # ...
+print('') # ...
-print('')
-
-for x in range(10, 40, 5): # third example --> (from 'x' to(,) 'y' in steps of(,) 'z')
- print(x) # ...
-
-print('')
+print(list(range(4))) # create new list from 0 to 3
+print(list(range(-6,7,2))) # -6 to +6 by 2
+
+print([[x ** 2, x ** 3] for x in range(4)]) # some more complicated example
+print([[x, x / 2, x*2] for x in range(-6, 7, 2) if x > 0]) # ... |
091f3c6eafcf2041517463e48f7209716a925b9f | website/files/utils.py | website/files/utils.py |
def copy_files(src, target_node, parent=None, name=None):
"""Copy the files from src to the target node
:param Folder src: The source to copy children from
:param Node target_node: The node settings of the project to copy files to
:param Folder parent: The parent of to attach the clone of src to, if applicable
"""
assert not parent or not parent.is_file, 'Parent must be a folder'
cloned = src.clone()
cloned.parent = parent
cloned.target = target_node
cloned.name = name or cloned.name
cloned.copied_from = src
cloned.save()
if src.is_file and src.versions.exists():
cloned.versions.add(*src.versions.all())
if not src.is_file:
for child in src.children:
copy_files(child, target_node, parent=cloned)
return cloned
|
def copy_files(src, target_node, parent=None, name=None):
"""Copy the files from src to the target node
:param Folder src: The source to copy children from
:param Node target_node: The node to copy files to
:param Folder parent: The parent of to attach the clone of src to, if applicable
"""
assert not parent or not parent.is_file, 'Parent must be a folder'
cloned = src.clone()
cloned.parent = parent
cloned.target = target_node
cloned.name = name or cloned.name
cloned.copied_from = src
cloned.save()
if src.is_file and src.versions.exists():
cloned.versions.add(*src.versions.all())
most_recent_fileversion = cloned.versions.select_related('region').order_by('-created').first()
if most_recent_fileversion.region != target_node.osfstorage_region:
most_recent_fileversion.region = target_node.osfstorage_region
most_recent_fileversion.save()
if not src.is_file:
for child in src.children:
copy_files(child, target_node, parent=cloned)
return cloned
| Update the most recent fileversions region for copied files across regions | Update the most recent fileversions region for copied files across regions
[#PLAT-1100]
| Python | apache-2.0 | aaxelb/osf.io,saradbowman/osf.io,Johnetordoff/osf.io,baylee-d/osf.io,saradbowman/osf.io,brianjgeiger/osf.io,adlius/osf.io,adlius/osf.io,cslzchen/osf.io,cslzchen/osf.io,HalcyonChimera/osf.io,mattclark/osf.io,mfraezz/osf.io,mfraezz/osf.io,baylee-d/osf.io,HalcyonChimera/osf.io,aaxelb/osf.io,CenterForOpenScience/osf.io,caseyrollins/osf.io,felliott/osf.io,mattclark/osf.io,felliott/osf.io,HalcyonChimera/osf.io,cslzchen/osf.io,CenterForOpenScience/osf.io,brianjgeiger/osf.io,mfraezz/osf.io,baylee-d/osf.io,CenterForOpenScience/osf.io,mfraezz/osf.io,Johnetordoff/osf.io,adlius/osf.io,brianjgeiger/osf.io,Johnetordoff/osf.io,HalcyonChimera/osf.io,Johnetordoff/osf.io,aaxelb/osf.io,brianjgeiger/osf.io,felliott/osf.io,felliott/osf.io,CenterForOpenScience/osf.io,cslzchen/osf.io,pattisdr/osf.io,caseyrollins/osf.io,adlius/osf.io,pattisdr/osf.io,mattclark/osf.io,caseyrollins/osf.io,aaxelb/osf.io,pattisdr/osf.io | ---
+++
@@ -2,7 +2,7 @@
def copy_files(src, target_node, parent=None, name=None):
"""Copy the files from src to the target node
:param Folder src: The source to copy children from
- :param Node target_node: The node settings of the project to copy files to
+ :param Node target_node: The node to copy files to
:param Folder parent: The parent of to attach the clone of src to, if applicable
"""
assert not parent or not parent.is_file, 'Parent must be a folder'
@@ -17,6 +17,10 @@
if src.is_file and src.versions.exists():
cloned.versions.add(*src.versions.all())
+ most_recent_fileversion = cloned.versions.select_related('region').order_by('-created').first()
+ if most_recent_fileversion.region != target_node.osfstorage_region:
+ most_recent_fileversion.region = target_node.osfstorage_region
+ most_recent_fileversion.save()
if not src.is_file:
for child in src.children: |
7d1f471f9723b7f8c12b5713a1a61f6391665009 | src/load_remote_data.py | src/load_remote_data.py | #!/usr/bin/env python
import os
import requests
# local configuration
remote_data_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), '..', 'data', 'remote')
# URLs at which data can be found
csv_url_summary_stats = 'http://dashboard.iatistandard.org/summary_stats.csv'
csv_url_humanitarian_stats = 'http://dev.dashboard.iatistandard.org/humanitarian.csv'
with open(os.path.join(remote_data_path, 'summary_stats.csv'), 'wb') as f:
# load the data to write to the file
# TODO: Add error handling - URL loading
response = requests.get(csv_url_summary_stats)
if not response.ok:
print('There was a problem loading the Summary Statistics data')
# TODO: Add error handling - file writing
f.write(response.text.encode('utf-8'))
with open(os.path.join(remote_data_path, 'humanitarian.csv'), 'wb') as f:
# load the data to write to the file
# TODO: Add error handling - URL loading
response = requests.get(csv_url_humanitarian_stats)
if not response.ok:
print('There was a problem loading the Humanitarian Statistics data')
# TODO: Add error handling - file writing
f.write(response.text.encode('utf-8'))
# TODO: Add mention of __main__ and main()
| #!/usr/bin/env python
import os
import requests
# local configuration
remote_data_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), '..', 'data', 'remote')
# URLs at which data can be found
csv_url_summary_stats = 'http://publishingstats.iatistandard.org/summary_stats.csv'
csv_url_humanitarian_stats = 'http://publishingstats.iatistandard.org/humanitarian.csv'
with open(os.path.join(remote_data_path, 'summary_stats.csv'), 'wb') as f:
# load the data to write to the file
# TODO: Add error handling - URL loading
response = requests.get(csv_url_summary_stats)
if not response.ok:
print('There was a problem loading the Summary Statistics data')
# TODO: Add error handling - file writing
f.write(response.text.encode('utf-8'))
with open(os.path.join(remote_data_path, 'humanitarian.csv'), 'wb') as f:
# load the data to write to the file
# TODO: Add error handling - URL loading
response = requests.get(csv_url_humanitarian_stats)
if not response.ok:
print('There was a problem loading the Humanitarian Statistics data')
# TODO: Add error handling - file writing
f.write(response.text.encode('utf-8'))
# TODO: Add mention of __main__ and main()
| Update download links to summary_stats and humanitarian | Update download links to summary_stats and humanitarian
With the split of the dashboard and publishing statistics, `humanitarian.csv` and `summary_stats.csv` will be moving to a different url, this PR points to that. | Python | mit | devinit/grand-bargain-monitoring,devinit/grand-bargain-monitoring,devinit/grand-bargain-monitoring | ---
+++
@@ -9,8 +9,8 @@
# URLs at which data can be found
-csv_url_summary_stats = 'http://dashboard.iatistandard.org/summary_stats.csv'
-csv_url_humanitarian_stats = 'http://dev.dashboard.iatistandard.org/humanitarian.csv'
+csv_url_summary_stats = 'http://publishingstats.iatistandard.org/summary_stats.csv'
+csv_url_humanitarian_stats = 'http://publishingstats.iatistandard.org/humanitarian.csv'
with open(os.path.join(remote_data_path, 'summary_stats.csv'), 'wb') as f: |
8157f0887d5fe9b78f484b5a556555b8ee26145f | fig/cli/formatter.py | fig/cli/formatter.py | from __future__ import unicode_literals
from __future__ import absolute_import
import os
import texttable
def get_tty_width():
tty_size = os.popen('stty size', 'r').read().split()
if len(tty_size) != 2:
return 80
_, width = tty_size
return width
class Formatter(object):
def table(self, headers, rows):
table = texttable.Texttable(max_width=get_tty_width())
table.set_cols_dtype(['t' for h in headers])
table.add_rows([headers] + rows)
table.set_deco(table.HEADER)
table.set_chars(['-', '|', '+', '-'])
return table.draw()
| from __future__ import unicode_literals
from __future__ import absolute_import
import os
import texttable
def get_tty_width():
tty_size = os.popen('stty size', 'r').read().split()
if len(tty_size) != 2:
return 80
_, width = tty_size
return int(width)
class Formatter(object):
def table(self, headers, rows):
table = texttable.Texttable(max_width=get_tty_width())
table.set_cols_dtype(['t' for h in headers])
table.add_rows([headers] + rows)
table.set_deco(table.HEADER)
table.set_chars(['-', '|', '+', '-'])
return table.draw()
| Fix the return value of get_tty_width() it should return an int. | Fix the return value of get_tty_width() it should return an int.
Signed-off-by: Daniel Nephin <6347c07ae509164cffebfb1e2a0d6ed64958db19@gmail.com>
| Python | apache-2.0 | LuisBosquez/docker.github.io,simonista/compose,simonista/compose,jorgeLuizChaves/compose,calou/compose,gtrdotmcs/compose,joeuo/docker.github.io,charleswhchan/compose,ouziel-slama/compose,VinceBarresi/compose,goloveychuk/compose,Dakno/compose,shubheksha/docker.github.io,RobertNorthard/compose,dbdd4us/compose,sanscontext/docker.github.io,browning/compose,schmunk42/compose,unodba/compose,abesto/fig,ain/compose,bdwill/docker.github.io,thaJeztah/docker.github.io,bsmr-docker/compose,aanand/fig,Chouser/compose,iamluc/compose,hypriot/compose,shin-/docker.github.io,troy0820/docker.github.io,bdwill/docker.github.io,funkyfuture/docker-compose,xydinesh/compose,jzwlqx/denverdino.github.io,shin-/docker.github.io,joaofnfernandes/docker.github.io,noironetworks/compose,talolard/compose,Dakno/compose,joeuo/docker.github.io,londoncalling/docker.github.io,kikkomep/compose,tpounds/compose,BSWANG/denverdino.github.io,rillig/docker.github.io,viranch/compose,thieman/compose,jessekl/compose,andrewgee/compose,alexisbellido/docker.github.io,gdevillele/docker.github.io,shin-/docker.github.io,d2bit/compose,xydinesh/compose,moxiegirl/compose,jzwlqx/denverdino.github.io,LuisBosquez/docker.github.io,Katlean/fig,jessekl/compose,dbdd4us/compose,VinceBarresi/compose,ralphtheninja/compose,nerro/compose,calou/compose,josephpage/compose,mnuessler/compose,menglingwei/denverdino.github.io,charleswhchan/compose,phiroict/docker,BSWANG/denverdino.github.io,danix800/docker.github.io,troy0820/docker.github.io,shakamunyi/fig,shin-/docker.github.io,johnstep/docker.github.io,ekristen/compose,noironetworks/compose,andrewgee/compose,menglingwei/denverdino.github.io,mindaugasrukas/compose,alexisbellido/docker.github.io,DoubleMalt/compose,swoopla/compose,LuisBosquez/docker.github.io,ZJaffee/compose,gdevillele/docker.github.io,jeanpralo/compose,gtrdotmcs/compose,genki/compose,joeuo/docker.github.io,alexandrev/compose,ChrisChinchilla/compose,menglingwei/denverdino.github.io,bcicen/fig,sdurrheimer/compose,KevinGreene/compose,screwgoth/compose,iamluc/compose,tpounds/compose,aduermael/docker.github.io,pspierce/compose,cclauss/compose,Yelp/docker-compose,mark-adams/compose,lukemarsden/compose,nhumrich/compose,artemkaint/compose,saada/compose,JimGalasyn/docker.github.io,anweiss/docker.github.io,benhamill/compose,cgvarela/compose,amitsaha/compose,johnstep/docker.github.io,sanscontext/docker.github.io,dnephin/compose,phiroict/docker,screwgoth/compose,johnstep/docker.github.io,marcusmartins/compose,ouziel-slama/compose,schmunk42/compose,benhamill/compose,denverdino/compose,KalleDK/compose,anweiss/docker.github.io,denverdino/denverdino.github.io,RobertNorthard/compose,gdevillele/docker.github.io,bdwill/docker.github.io,mdaue/compose,funkyfuture/docker-compose,denverdino/docker.github.io,johnstep/docker.github.io,jonaseck2/compose,docker-zh/docker.github.io,aanand/fig,dilgerma/compose,goloveychuk/compose,sanscontext/docker.github.io,jeanpralo/compose,jgrowl/compose,rillig/docker.github.io,glogiotatidis/compose,kikkomep/compose,bfirsh/fig,bfirsh/fig,mindaugasrukas/compose,michael-k/docker-compose,qzio/compose,anweiss/docker.github.io,ggtools/compose,bsmr-docker/compose,bbirand/compose,vdemeester/compose,aduermael/docker.github.io,mark-adams/compose,docker-zh/docker.github.io,aduermael/docker.github.io,danix800/docker.github.io,denverdino/denverdino.github.io,joaofnfernandes/docker.github.io,rstacruz/compose,jiekechoo/compose,jorgeLuizChaves/compose,phiroict/docker,troy0820/docker.github.io,cclauss/compose,brunocascio/compose,johnstep/docker.github.io,denverdino/docker.github.io,KevinGreene/compose,pspierce/compose,thaJeztah/docker.github.io,joeuo/docker.github.io,sebglazebrook/compose,BSWANG/denverdino.github.io,ph-One/compose,browning/compose,JimGalasyn/docker.github.io,feelobot/compose,shin-/compose,londoncalling/docker.github.io,phiroict/docker,bcicen/fig,denverdino/denverdino.github.io,MSakamaki/compose,tiry/compose,mohitsoni/compose,j-fuentes/compose,joeuo/docker.github.io,bobphill/compose,mohitsoni/compose,ph-One/compose,anweiss/docker.github.io,phiroict/docker,josephpage/compose,menglingwei/denverdino.github.io,denverdino/docker.github.io,danix800/docker.github.io,shubheksha/docker.github.io,dilgerma/compose,alexisbellido/docker.github.io,sanscontext/docker.github.io,lukemarsden/compose,thaJeztah/compose,zhangspook/compose,dopry/compose,joaofnfernandes/docker.github.io,bobphill/compose,docker/docker.github.io,dockerhn/compose,sebglazebrook/compose,ralphtheninja/compose,au-phiware/compose,d2bit/compose,alexisbellido/docker.github.io,DoubleMalt/compose,Katlean/fig,JimGalasyn/docker.github.io,runcom/compose,viranch/compose,saada/compose,tangkun75/compose,mnowster/compose,qzio/compose,ZJaffee/compose,londoncalling/docker.github.io,cgvarela/compose,thieman/compose,shakamunyi/fig,mrfuxi/compose,jiekechoo/compose,jonaseck2/compose,mbailey/compose,LuisBosquez/docker.github.io,alunduil/fig,KalleDK/compose,mnuessler/compose,philwrenn/compose,dockerhn/compose,mosquito/docker-compose,feelobot/compose,genki/compose,mchasal/compose,joaofnfernandes/docker.github.io,alexisbellido/docker.github.io,hoogenm/compose,aduermael/docker.github.io,amitsaha/compose,BSWANG/denverdino.github.io,dopry/compose,albers/compose,runcom/compose,alunduil/fig,bdwill/docker.github.io,uvgroovy/compose,docker-zh/docker.github.io,tiry/compose,TheDataShed/compose,kojiromike/compose,twitherspoon/compose,nhumrich/compose,Yelp/docker-compose,GM-Alex/compose,talolard/compose,nerro/compose,GM-Alex/compose,joaofnfernandes/docker.github.io,rgbkrk/compose,londoncalling/docker.github.io,TomasTomecek/compose,vlajos/compose,dnephin/compose,thaJeztah/docker.github.io,jzwlqx/denverdino.github.io,docker/docker.github.io,artemkaint/compose,bcicen/fig,sanscontext/docker.github.io,marcusmartins/compose,uvgroovy/compose,docker-zh/docker.github.io,denverdino/denverdino.github.io,Chouser/compose,anweiss/docker.github.io,denverdino/compose,thaJeztah/docker.github.io,philwrenn/compose,jgrowl/compose,shubheksha/docker.github.io,shubheksha/docker.github.io,bdwill/docker.github.io,alexandrev/compose,mbailey/compose,zhangspook/compose,thaJeztah/docker.github.io,shubheksha/docker.github.io,mdaue/compose,mchasal/compose,tangkun75/compose,shin-/docker.github.io,rillig/docker.github.io,TheDataShed/compose,mosquito/docker-compose,au-phiware/compose,ekristen/compose,denverdino/docker.github.io,bbirand/compose,moxiegirl/compose,hoogenm/compose,denverdino/docker.github.io,JimGalasyn/docker.github.io,vlajos/compose,jrabbit/compose,docker/docker.github.io,docker/docker.github.io,vdemeester/compose,menglingwei/denverdino.github.io,hypriot/compose,ggtools/compose,troy0820/docker.github.io,j-fuentes/compose,ChrisChinchilla/compose,twitherspoon/compose,denverdino/denverdino.github.io,JimGalasyn/docker.github.io,TomasTomecek/compose,prologic/compose,docker/docker.github.io,mrfuxi/compose,albers/compose,sdurrheimer/compose,michael-k/docker-compose,abesto/fig,mnowster/compose,swoopla/compose,ionrock/compose,MSakamaki/compose,jzwlqx/denverdino.github.io,gdevillele/docker.github.io,londoncalling/docker.github.io,unodba/compose,docker-zh/docker.github.io,BSWANG/denverdino.github.io,danix800/docker.github.io,rstacruz/compose,glogiotatidis/compose,rgbkrk/compose,shin-/compose,lmesz/compose,gdevillele/docker.github.io,jrabbit/compose,jzwlqx/denverdino.github.io,lmesz/compose,ionrock/compose,ain/compose,kojiromike/compose,thaJeztah/compose,prologic/compose,LuisBosquez/docker.github.io,brunocascio/compose,rillig/docker.github.io | ---
+++
@@ -9,7 +9,7 @@
if len(tty_size) != 2:
return 80
_, width = tty_size
- return width
+ return int(width)
class Formatter(object): |
0989682ad858a6f14a1d387c24511b228881d645 | flake8_docstrings.py | flake8_docstrings.py | # -*- coding: utf-8 -*-
"""pep257 docstrings convention needs error code and class parser for be
included as module into flakes8
"""
import pep257
__version__ = '0.2.0'
class pep257Checker(object):
"""flake8 needs a class to check python file."""
name = 'pep257'
version = __version__
def __init__(self, tree, filename='(none)', builtins=None):
self.tree = tree
self.filename = filename
def run(self):
"""Use directly check_source api from pep257."""
errors = list()
with open(self.filename, 'r') as handle:
for error in pep257.PEP257Checker().check_source(
handle.read(), self.filename):
errors.append(error)
for error in errors:
yield (error.line, 0, error.message, type(self))
| # -*- coding: utf-8 -*-
"""pep257 docstrings convention needs error code and class parser for be
included as module into flakes8
"""
import pep257
__version__ = '0.2.1'
class pep257Checker(object):
"""flake8 needs a class to check python file."""
name = 'pep257'
version = __version__
def __init__(self, tree, filename='(none)', builtins=None):
self.tree = tree
self.filename = filename
def run(self):
"""Use directly check() api from pep257."""
for error in pep257.check([self.filename]):
# Ignore AllError, Environment error.
if isinstance(error, pep257.Error):
yield (error.line, 0, error.message, type(self))
| Use different pep257 entry point | Use different pep257 entry point
The check_source() function of pep257.py does not handle AllError and
EnvironmentError exceptions. We can use instead the check() function
and ignore any errors that do not belong to the pep257.Error class and
thus are of no use to Flake8.
| Python | mit | PyCQA/flake8-docstrings | ---
+++
@@ -4,7 +4,7 @@
"""
import pep257
-__version__ = '0.2.0'
+__version__ = '0.2.1'
class pep257Checker(object):
@@ -19,12 +19,8 @@
self.filename = filename
def run(self):
- """Use directly check_source api from pep257."""
- errors = list()
- with open(self.filename, 'r') as handle:
- for error in pep257.PEP257Checker().check_source(
- handle.read(), self.filename):
- errors.append(error)
-
- for error in errors:
- yield (error.line, 0, error.message, type(self))
+ """Use directly check() api from pep257."""
+ for error in pep257.check([self.filename]):
+ # Ignore AllError, Environment error.
+ if isinstance(error, pep257.Error):
+ yield (error.line, 0, error.message, type(self)) |
dafbe424546020cc5a53eae5d10391d3dbf81870 | test/style_test.py | test/style_test.py | import glob
import os
import pep8
class TestCodeFormat:
def test_pep8(self):
def match(*p):
s = ['theanets'] + list(p) + ['*.py']
return glob.glob(os.path.join(*s))
pep8style = pep8.StyleGuide(config_file='setup.cfg')
result = pep8style.check_files(match() + match('layers'))
assert result.total_errors == 0
| import glob
import os
import pep8
class TestCodeFormat:
def test_pep8(self):
def match(*p):
s = ['downhill'] + list(p) + ['*.py']
return glob.glob(os.path.join(*s))
pep8style = pep8.StyleGuide(config_file='setup.cfg')
result = pep8style.check_files(match())
assert result.total_errors == 0
| Update style test to work with this package. | Update style test to work with this package.
| Python | mit | rodrigob/downhill,lmjohns3/downhill | ---
+++
@@ -6,8 +6,8 @@
class TestCodeFormat:
def test_pep8(self):
def match(*p):
- s = ['theanets'] + list(p) + ['*.py']
+ s = ['downhill'] + list(p) + ['*.py']
return glob.glob(os.path.join(*s))
pep8style = pep8.StyleGuide(config_file='setup.cfg')
- result = pep8style.check_files(match() + match('layers'))
+ result = pep8style.check_files(match())
assert result.total_errors == 0 |
93dfefff12569c180e20fefc9380358753c6771e | molo/core/tests/test_import_from_git_view.py | molo/core/tests/test_import_from_git_view.py | import pytest
from django.test import TestCase
from django.core.urlresolvers import reverse
from molo.core.tests.base import MoloTestCaseMixin
@pytest.mark.django_db
class TestImportFromGit(TestCase, MoloTestCaseMixin):
def setUp(self):
self.mk_main()
self.user = self.login()
def test_wagtail_has_import_menu_item(self):
response = self.client.get(reverse("import-from-git"))
self.assertContains(response, 'Import content from Git')
| import pytest
from django.test import TestCase
from django.core.urlresolvers import reverse
from molo.core.tests.base import MoloTestCaseMixin
@pytest.mark.django_db
class TestImportFromGit(TestCase, MoloTestCaseMixin):
def setUp(self):
self.mk_main()
self.user = self.login()
def test_wagtail_has_import_menu_item(self):
response = self.client.get(reverse('import-from-git'))
self.assertContains(
response, 'Import content from a Universal Core site')
| Fix import UI django view's tests | Fix import UI django view's tests
| Python | bsd-2-clause | praekelt/molo,praekelt/molo,praekelt/molo,praekelt/molo | ---
+++
@@ -14,5 +14,6 @@
self.user = self.login()
def test_wagtail_has_import_menu_item(self):
- response = self.client.get(reverse("import-from-git"))
- self.assertContains(response, 'Import content from Git')
+ response = self.client.get(reverse('import-from-git'))
+ self.assertContains(
+ response, 'Import content from a Universal Core site') |
a8b9e999a34039d64a2fe27b53a938feeb07a013 | flask_app.py | flask_app.py | from flask import Flask, abort, jsonify
from flask_caching import Cache
from flask_cors import CORS
import main
app = Flask(__name__)
cache = Cache(app, config={'CACHE_TYPE': 'simple'})
cors = CORS(app, resources={r"/*": {"origins": "*"}})
@app.route('/')
@cache.cached(timeout=3600)
def nbis_list_entities():
return jsonify({'entities': ['restaurant']})
@app.route('/restaurant/')
@cache.cached(timeout=3600)
def nbis_api_list_restaurants():
return jsonify({'restaurants': main.list_restaurants()})
@app.route('/restaurant/<name>/')
@cache.cached(timeout=3600)
def nbis_api_get_restaurant(name):
data = main.get_restaurant(name)
if not data:
abort(status=404)
data['menu'] = [{'dish': entry} for entry in data['menu']]
return jsonify({'restaurant': data})
| from flask import Flask, abort, jsonify
from flask_caching import Cache
from flask_cors import CORS
import main
app = Flask(__name__)
cache = Cache(app, config={'CACHE_TYPE': 'simple'})
cors = CORS(app, resources={r"/*": {"origins": "*"}})
@app.route('/api/')
@cache.cached(timeout=3600)
def nbis_list_entities():
return jsonify({'entities': ['restaurant']})
@app.route('/api/restaurant/')
@cache.cached(timeout=3600)
def nbis_api_list_restaurants():
return jsonify({'restaurants': main.list_restaurants()})
@app.route('/api/restaurant/<name>/')
@cache.cached(timeout=3600)
def nbis_api_get_restaurant(name):
data = main.get_restaurant(name)
if not data:
abort(status=404)
data['menu'] = [{'dish': entry} for entry in data['menu']]
return jsonify({'restaurant': data})
| Put API under /api/ by default | Put API under /api/ by default
| Python | bsd-3-clause | talavis/kimenu | ---
+++
@@ -9,19 +9,19 @@
cors = CORS(app, resources={r"/*": {"origins": "*"}})
-@app.route('/')
+@app.route('/api/')
@cache.cached(timeout=3600)
def nbis_list_entities():
return jsonify({'entities': ['restaurant']})
-@app.route('/restaurant/')
+@app.route('/api/restaurant/')
@cache.cached(timeout=3600)
def nbis_api_list_restaurants():
return jsonify({'restaurants': main.list_restaurants()})
-@app.route('/restaurant/<name>/')
+@app.route('/api/restaurant/<name>/')
@cache.cached(timeout=3600)
def nbis_api_get_restaurant(name):
data = main.get_restaurant(name) |
d2eb23a0dcf768d3d47966122b6f3717009eb2fd | Python/ds.py | Python/ds.py | """
This file includes several data structures used in LeetCode question.
"""
# Definition for a list node.
class ListNode(object):
def __init__(self, n):
self.val = n
self.next = None
def createLinkedList(nodelist):
#type nodelist: list[int/float]
#rtype: head of linked list
linkedList = ListNode(0)
head = linkedList
for val in nodelist:
linkedList.next = ListNode(val)
linkedList = linkedList.next
return head.next
def printList(head):
if not head:
print "head is None!\n"
return
else:
while head:
print head.val
head = head.next
return
# Definition for a binary tree node.
class TreeNode(object):
def __init__(self, x):
self.val = x
self.left = None
self.right = None
#TODO finish createBinaryTree
def createBinaryTree(nodelist):
root = TreeNode(0)
l = len(nodelist)
if l == 0:
return None
if __name__ == '__main__':
# main()
n = [1,2,3]
bst = createBinaryTree(n) | """
This file includes several data structures used in LeetCode question.
"""
# Definition for a list node.
class ListNode(object):
def __init__(self, n):
self.val = n
self.next = None
def createLinkedList(nodelist):
#type nodelist: list[int/float]
#rtype: head of linked list
linkedList = ListNode(0)
head = linkedList
for val in nodelist:
linkedList.next = ListNode(val)
linkedList = linkedList.next
return head.next
def printList(head):
if not head:
print "head is None!\n"
return
else:
while head:
print head.val
head = head.next
print "END OF LIST"
return
# Definition for a binary tree node.
class TreeNode(object):
def __init__(self, x):
self.val = x
self.left = None
self.right = None
#TODO finish createBinaryTree
def createBinaryTree(nodelist):
root = TreeNode(0)
l = len(nodelist)
if l == 0:
return None
if __name__ == '__main__':
# main()
n = [1,2,3]
bst = createBinaryTree(n) | Add one more line of comment in printList function. | Add one more line of comment in printList function.
| Python | mit | comicxmz001/LeetCode,comicxmz001/LeetCode | ---
+++
@@ -25,6 +25,7 @@
while head:
print head.val
head = head.next
+ print "END OF LIST"
return
# Definition for a binary tree node. |
a3bb5ca86cbba530718e55f97407d7c5d3ad0a57 | stagecraft/apps/organisation/admin.py | stagecraft/apps/organisation/admin.py | from django.contrib import admin
from .models import NodeType, Node
class NodeTypeAdmin(admin.ModelAdmin):
list_display = ('name',)
class NodeAdmin(admin.ModelAdmin):
list_display = ('name', 'abbreviation',)
admin.site.register(NodeType, NodeTypeAdmin)
admin.site.register(Node, NodeAdmin)
| from django.contrib import admin
from .models import NodeType, Node
class NodeTypeAdmin(admin.ModelAdmin):
list_display = ('name',)
class ParentInline(admin.TabularInline):
model = Node.parents.through
verbose_name = 'Parent relationship'
verbose_name_plural = 'Parents'
extra = 1
fk_name = 'from_node'
class NodeAdmin(admin.ModelAdmin):
list_display = ('name', 'abbreviation', 'typeOf',)
list_filter = ('typeOf',)
search_fields = ('name', 'abbreviation',)
inlines = (ParentInline,)
exclude = ('parents',)
admin.site.register(NodeType, NodeTypeAdmin)
admin.site.register(Node, NodeAdmin)
| Improve Django Admin interface for Organisations | Improve Django Admin interface for Organisations
We have a lot of nodes now so adding some way of filtering by type and
searching by name seemed wise.
I've switched the multiselect box to pick parents for an interface that
is a little more sane when we have over a 1000 possible parents.
| Python | mit | alphagov/stagecraft,alphagov/stagecraft,alphagov/stagecraft,alphagov/stagecraft | ---
+++
@@ -7,8 +7,20 @@
list_display = ('name',)
+class ParentInline(admin.TabularInline):
+ model = Node.parents.through
+ verbose_name = 'Parent relationship'
+ verbose_name_plural = 'Parents'
+ extra = 1
+ fk_name = 'from_node'
+
+
class NodeAdmin(admin.ModelAdmin):
- list_display = ('name', 'abbreviation',)
+ list_display = ('name', 'abbreviation', 'typeOf',)
+ list_filter = ('typeOf',)
+ search_fields = ('name', 'abbreviation',)
+ inlines = (ParentInline,)
+ exclude = ('parents',)
admin.site.register(NodeType, NodeTypeAdmin) |
a01d306a887eabc912a9e57af0ad862e6c45f652 | saleor/cart/__init__.py | saleor/cart/__init__.py | from __future__ import unicode_literals
from django.utils.translation import pgettext
from satchless import cart
from satchless.item import ItemList, ClassifyingPartitioner
from ..product.models import DigitalShip
class ShippedGroup(ItemList):
'''
Group for shippable products.
'''
pass
class DigitalGroup(ItemList):
'''
Group for digital products.
'''
pass
class CartPartitioner(ClassifyingPartitioner):
'''
Dividing cart into groups.
'''
def classify(self, item):
if isinstance(item.product, DigitalShip):
return 'digital'
return 'shippable'
def get_partition(self, classifier, items):
if classifier == 'digital':
return DigitalGroup(items)
return ShippedGroup(items)
class Cart(cart.Cart):
'''
Contains cart items. Serialized instance of cart is saved into django
session.
'''
timestamp = None
billing_address = None
def __unicode__(self):
return pgettext(
'Shopping cart',
'Your cart (%(cart_count)s)') % {'cart_count': self.count()}
def clear(self):
self._state = []
| from __future__ import unicode_literals
from django.utils.translation import pgettext
from satchless import cart
from satchless.item import ItemList, ClassifyingPartitioner
from ..product.models import DigitalShip
class ShippedGroup(ItemList):
'''
Group for shippable products.
'''
pass
class DigitalGroup(ItemList):
'''
Group for digital products.
'''
pass
class CartPartitioner(ClassifyingPartitioner):
'''
Dividing cart into groups.
'''
def classify(self, item):
if isinstance(item.product, DigitalShip):
return 'digital'
return 'shippable'
def get_partition(self, classifier, items):
if classifier == 'digital':
return DigitalGroup(items)
return ShippedGroup(items)
class Cart(cart.Cart):
'''
Contains cart items. Serialized instance of cart is saved into django
session.
'''
timestamp = None
billing_address = None
def __unicode__(self):
return pgettext(
'Shopping cart',
'Your cart (%(cart_count)s)') % {'cart_count': self.count()}
| Use clear cart method from satchless | Use clear cart method from satchless
https://github.com/mirumee/satchless/commit/3acaa8f6a27d9ab259a2d66fc3f7416a18fab1ad
This reverts commit 2ad16c44adb20e9ba023e873149d67068504c34c.
| Python | bsd-3-clause | dashmug/saleor,taedori81/saleor,avorio/saleor,avorio/saleor,tfroehlich82/saleor,arth-co/saleor,laosunhust/saleor,dashmug/saleor,rchav/vinerack,arth-co/saleor,Drekscott/Motlaesaleor,taedori81/saleor,rchav/vinerack,josesanch/saleor,arth-co/saleor,KenMutemi/saleor,paweltin/saleor,KenMutemi/saleor,jreigel/saleor,Drekscott/Motlaesaleor,paweltin/saleor,rodrigozn/CW-Shop,jreigel/saleor,car3oon/saleor,mociepka/saleor,HyperManTT/ECommerceSaleor,itbabu/saleor,josesanch/saleor,itbabu/saleor,tfroehlich82/saleor,hongquan/saleor,spartonia/saleor,josesanch/saleor,avorio/saleor,UITools/saleor,paweltin/saleor,laosunhust/saleor,laosunhust/saleor,HyperManTT/ECommerceSaleor,tfroehlich82/saleor,spartonia/saleor,rchav/vinerack,hongquan/saleor,arth-co/saleor,HyperManTT/ECommerceSaleor,spartonia/saleor,avorio/saleor,UITools/saleor,mociepka/saleor,maferelo/saleor,mociepka/saleor,jreigel/saleor,rodrigozn/CW-Shop,taedori81/saleor,maferelo/saleor,paweltin/saleor,Drekscott/Motlaesaleor,Drekscott/Motlaesaleor,laosunhust/saleor,car3oon/saleor,UITools/saleor,KenMutemi/saleor,dashmug/saleor,taedori81/saleor,rodrigozn/CW-Shop,hongquan/saleor,maferelo/saleor,UITools/saleor,car3oon/saleor,UITools/saleor,itbabu/saleor,spartonia/saleor | ---
+++
@@ -48,6 +48,3 @@
return pgettext(
'Shopping cart',
'Your cart (%(cart_count)s)') % {'cart_count': self.count()}
-
- def clear(self):
- self._state = [] |
5b50b96b35c678ca17b069630875a9d86e2cbca3 | scripts/i18n/commons.py | scripts/i18n/commons.py | # -*- coding: utf-8 -*-
msg = {
'en': {
'commons-file-moved' : u'[[:File:%s|File]] moved to [[:commons:File:%s|commons]].',
'commons-file-now-available' : u'File is now available on Wikimedia Commons.',
'commons-nowcommons-template' : 'en': u'{{subst:ncd|%s}}',
},
'qqq': {
'commons-file-now-available': u'Edit summary when the bot has moved a file to Commons and adds the NowCommons template.',
'commons-file-moved': u'Edit summary when the bot replaces usage of an image available under a different name.',
'commons-nowcommons-template' : 'en': u'The template to be added by the bot when a file is available at Commons. %s is the filename at Commons.',
},
}
| # -*- coding: utf-8 -*-
msg = {
'en': {
'commons-file-moved' : u'[[:File:%s|File]] moved to [[:commons:File:%s|commons]].',
'commons-file-now-available' : u'File is now available on Wikimedia Commons.',
},
'qqq': {
'commons-file-now-available' : u'Edit summary when the bot has moved a file to Commons and adds the NowCommons template.',
'commons-file-moved' : u'Edit summary when the bot replaces usage of an image available under a different name.',
},
}
| Remove the template for now. | Remove the template for now.
git-svn-id: 9a050473c2aca1e14f53d73349e19b938c2cf203@9344 6a7f98fc-eeb0-4dc1-a6e2-c2c589a08aa6
| Python | mit | legoktm/pywikipedia-rewrite | ---
+++
@@ -3,11 +3,9 @@
'en': {
'commons-file-moved' : u'[[:File:%s|File]] moved to [[:commons:File:%s|commons]].',
'commons-file-now-available' : u'File is now available on Wikimedia Commons.',
- 'commons-nowcommons-template' : 'en': u'{{subst:ncd|%s}}',
},
'qqq': {
- 'commons-file-now-available': u'Edit summary when the bot has moved a file to Commons and adds the NowCommons template.',
- 'commons-file-moved': u'Edit summary when the bot replaces usage of an image available under a different name.',
- 'commons-nowcommons-template' : 'en': u'The template to be added by the bot when a file is available at Commons. %s is the filename at Commons.',
+ 'commons-file-now-available' : u'Edit summary when the bot has moved a file to Commons and adds the NowCommons template.',
+ 'commons-file-moved' : u'Edit summary when the bot replaces usage of an image available under a different name.',
},
} |
11ab81f67df3f7579cb1b85d87499480c3cea351 | wafer/pages/serializers.py | wafer/pages/serializers.py | from rest_framework import serializers
from wafer.pages.models import Page
class PageSerializer(serializers.ModelSerializer):
class Meta:
model = Page
def create(self, validated_data):
# TODO: Implement
return super(PageSerializer, self).create(validated_data)
| from rest_framework import serializers
from reversion import revisions
from wafer.pages.models import Page
class PageSerializer(serializers.ModelSerializer):
class Meta:
model = Page
exclude = ('_content_rendered',)
@revisions.create_revision()
def create(self, validated_data):
revisions.set_comment("Created via REST api")
return super(PageSerializer, self).create(validated_data)
@revisions.create_revision()
def update(self, page, validated_data):
revisions.set_comment("Changed via REST api")
page.parent = validated_data['parent']
page.content = validated_data['content']
page.save()
return page
| Add create & update methods for page API | Add create & update methods for page API
| Python | isc | CTPUG/wafer,CTPUG/wafer,CTPUG/wafer,CTPUG/wafer | ---
+++
@@ -1,4 +1,5 @@
from rest_framework import serializers
+from reversion import revisions
from wafer.pages.models import Page
@@ -7,7 +8,17 @@
class Meta:
model = Page
+ exclude = ('_content_rendered',)
+ @revisions.create_revision()
def create(self, validated_data):
- # TODO: Implement
+ revisions.set_comment("Created via REST api")
return super(PageSerializer, self).create(validated_data)
+
+ @revisions.create_revision()
+ def update(self, page, validated_data):
+ revisions.set_comment("Changed via REST api")
+ page.parent = validated_data['parent']
+ page.content = validated_data['content']
+ page.save()
+ return page |
9eafc01ef8260a313f2e214924cfd5bda706c1c0 | cactusbot/handler.py | cactusbot/handler.py | """Handle handlers."""
import logging
class Handlers(object):
"""Handlers."""
def __init__(self, *handlers):
self.handlers = handlers
def handle(self, event, packet):
"""Handle incoming data."""
for handler in self.handlers:
if hasattr(handler, "on_" + event):
response = ""
try:
response = getattr(handler, "on_" + event)(packet)
except Exception as e:
print("Uh oh!")
print(e)
else:
if response is StopIteration:
break
yield response
class Handler(object):
"""Handler."""
def __init__(self):
self.logger = logging.getLogger(__name__)
| """Handle handlers."""
import logging
class Handlers(object):
"""Handlers."""
def __init__(self, *handlers):
self.logger = logging.getLogger(__name__)
self.handlers = handlers
def handle(self, event, packet):
"""Handle incoming data."""
for handler in self.handlers:
if hasattr(handler, "on_" + event):
response = ""
try:
response = getattr(handler, "on_" + event)(packet)
except Exception as e:
self.logger.warning(e)
else:
if response is StopIteration:
break
yield response
class Handler(object):
"""Handler."""
def __init__(self):
self.logger = logging.getLogger(__name__)
| Add exception logging to Handlers | Add exception logging to Handlers
| Python | mit | CactusDev/CactusBot | ---
+++
@@ -1,11 +1,15 @@
"""Handle handlers."""
import logging
+
class Handlers(object):
"""Handlers."""
def __init__(self, *handlers):
+
+ self.logger = logging.getLogger(__name__)
+
self.handlers = handlers
def handle(self, event, packet):
@@ -17,8 +21,7 @@
try:
response = getattr(handler, "on_" + event)(packet)
except Exception as e:
- print("Uh oh!")
- print(e)
+ self.logger.warning(e)
else:
if response is StopIteration:
break
@@ -26,6 +29,6 @@
class Handler(object):
"""Handler."""
-
+
def __init__(self):
self.logger = logging.getLogger(__name__) |
3a30074c13d1740ae24c8e381bd9d170ed6b6808 | wafer/sponsors/views.py | wafer/sponsors/views.py | from django.views.generic.list import ListView
from django.views.generic import DetailView
from rest_framework import viewsets
from rest_framework.permissions import DjangoModelPermissionsOrAnonReadOnly
from wafer.sponsors.models import Sponsor, SponsorshipPackage
from wafer.sponsors.serializers import SponsorSerializer, PackageSerializer
class ShowSponsors(ListView):
template_name = 'wafer.sponsors/sponsors.html'
model = Sponsor
def get_queryset(self):
return Sponsor.objects.all().order_by('packages')
class SponsorView(DetailView):
template_name = 'wafer.sponsors/sponsor.html'
model = Sponsor
class ShowPackages(ListView):
template_name = 'wafer.sponsors/packages.html'
model = SponsorshipPackage
class SponsorViewSet(viewsets.ModelViewSet):
"""API endpoint for users."""
queryset = Sponsor.objects.all()
serializer_class = SponsorSerializer
permission_classes = (DjangoModelPermissionsOrAnonReadOnly, )
class PackageViewSet(viewsets.ModelViewSet):
"""API endpoint for users."""
queryset = SponsorshipPackage.objects.all()
serializer_class = PackageSerializer
permission_classes = (DjangoModelPermissionsOrAnonReadOnly, )
| from django.views.generic.list import ListView
from django.views.generic import DetailView
from rest_framework import viewsets
from rest_framework.permissions import DjangoModelPermissionsOrAnonReadOnly
from wafer.sponsors.models import Sponsor, SponsorshipPackage
from wafer.sponsors.serializers import SponsorSerializer, PackageSerializer
class ShowSponsors(ListView):
template_name = 'wafer.sponsors/sponsors.html'
model = Sponsor
def get_queryset(self):
return Sponsor.objects.all().order_by('packages', 'id')
class SponsorView(DetailView):
template_name = 'wafer.sponsors/sponsor.html'
model = Sponsor
class ShowPackages(ListView):
template_name = 'wafer.sponsors/packages.html'
model = SponsorshipPackage
class SponsorViewSet(viewsets.ModelViewSet):
"""API endpoint for users."""
queryset = Sponsor.objects.all()
serializer_class = SponsorSerializer
permission_classes = (DjangoModelPermissionsOrAnonReadOnly, )
class PackageViewSet(viewsets.ModelViewSet):
"""API endpoint for users."""
queryset = SponsorshipPackage.objects.all()
serializer_class = PackageSerializer
permission_classes = (DjangoModelPermissionsOrAnonReadOnly, )
| Order sponsors by ID on the sponsors page, too | Order sponsors by ID on the sponsors page, too
| Python | isc | CTPUG/wafer,CTPUG/wafer,CTPUG/wafer,CTPUG/wafer | ---
+++
@@ -13,7 +13,7 @@
model = Sponsor
def get_queryset(self):
- return Sponsor.objects.all().order_by('packages')
+ return Sponsor.objects.all().order_by('packages', 'id')
class SponsorView(DetailView): |
801a209eb208c629d4ea84199b7779e8c6a0396d | tests/sentry/interfaces/user/tests.py | tests/sentry/interfaces/user/tests.py | # -*- coding: utf-8 -*-
from __future__ import absolute_import
import mock
from exam import fixture
from sentry.testutils import TestCase
from sentry.interfaces import User
from sentry.models import Event
class UserTest(TestCase):
@fixture
def event(self):
return mock.Mock(spec=Event())
@fixture
def interface(self):
return User(id=1, email='lol@example.com', favorite_color='brown')
def test_serialize_behavior(self):
assert self.interface.serialize() == {
'id': 1,
'username': None,
'email': 'lol@example.com',
'ip_address': None,
'data': {'favorite_color': 'brown'}
}
@mock.patch('sentry.interfaces.render_to_string')
def test_to_html(self, render_to_string):
self.interface.to_html(self.event)
render_to_string.assert_called_once_with('sentry/partial/interfaces/user.html', {
'is_public': False,
'event': self.event,
'user_ip_address': None,
'user_id': 1,
'user_username': None,
'user_email': 'lol@example.com',
'user_data': {'favorite_color': 'brown'},
})
def test_to_html_public(self):
result = self.interface.to_html(self.event, is_public=True)
assert result == ''
| # -*- coding: utf-8 -*-
from __future__ import absolute_import
import mock
from exam import fixture
from sentry.testutils import TestCase
from sentry.interfaces import User
from sentry.models import Event
class UserTest(TestCase):
@fixture
def event(self):
return mock.Mock(spec=Event())
@fixture
def interface(self):
return User(id=1, email='lol@example.com', favorite_color='brown')
def test_serialize_behavior(self):
assert self.interface.serialize() == {
'id': 1,
'username': None,
'email': 'lol@example.com',
'ip_address': None,
'data': {'favorite_color': 'brown'}
}
@mock.patch('sentry.interfaces.render_to_string')
def test_to_html(self, render_to_string):
interface = User(**self.interface.serialize())
interface.to_html(self.event)
render_to_string.assert_called_once_with('sentry/partial/interfaces/user.html', {
'is_public': False,
'event': self.event,
'user_ip_address': None,
'user_id': 1,
'user_username': None,
'user_email': 'lol@example.com',
'user_data': {'favorite_color': 'brown'},
})
def test_to_html_public(self):
result = self.interface.to_html(self.event, is_public=True)
assert result == ''
| Fix text to prove that behavior | Fix text to prove that behavior
| Python | bsd-3-clause | imankulov/sentry,BayanGroup/sentry,songyi199111/sentry,ifduyue/sentry,kevinlondon/sentry,JackDanger/sentry,zenefits/sentry,pauloschilling/sentry,kevinlondon/sentry,TedaLIEz/sentry,looker/sentry,camilonova/sentry,gg7/sentry,daevaorn/sentry,gg7/sentry,ngonzalvez/sentry,JamesMura/sentry,mitsuhiko/sentry,mvaled/sentry,wong2/sentry,Natim/sentry,nicholasserra/sentry,wujuguang/sentry,ngonzalvez/sentry,ewdurbin/sentry,jean/sentry,JamesMura/sentry,nicholasserra/sentry,BayanGroup/sentry,beeftornado/sentry,boneyao/sentry,llonchj/sentry,gencer/sentry,ifduyue/sentry,JackDanger/sentry,kevinastone/sentry,Kryz/sentry,imankulov/sentry,korealerts1/sentry,drcapulet/sentry,beeftornado/sentry,Natim/sentry,1tush/sentry,boneyao/sentry,vperron/sentry,mvaled/sentry,wong2/sentry,looker/sentry,kevinastone/sentry,songyi199111/sentry,drcapulet/sentry,kevinlondon/sentry,vperron/sentry,argonemyth/sentry,felixbuenemann/sentry,daevaorn/sentry,JamesMura/sentry,wujuguang/sentry,Kryz/sentry,gencer/sentry,alexm92/sentry,wong2/sentry,JTCunning/sentry,felixbuenemann/sentry,pauloschilling/sentry,1tush/sentry,zenefits/sentry,Kryz/sentry,ifduyue/sentry,mvaled/sentry,felixbuenemann/sentry,argonemyth/sentry,mvaled/sentry,fuziontech/sentry,camilonova/sentry,gencer/sentry,fuziontech/sentry,jean/sentry,songyi199111/sentry,zenefits/sentry,jean/sentry,jokey2k/sentry,looker/sentry,fotinakis/sentry,BuildingLink/sentry,hongliang5623/sentry,mvaled/sentry,TedaLIEz/sentry,looker/sentry,fuziontech/sentry,BuildingLink/sentry,zenefits/sentry,nicholasserra/sentry,fotinakis/sentry,fotinakis/sentry,alexm92/sentry,alexm92/sentry,mitsuhiko/sentry,jokey2k/sentry,ewdurbin/sentry,gg7/sentry,wujuguang/sentry,BuildingLink/sentry,gencer/sentry,korealerts1/sentry,gencer/sentry,kevinastone/sentry,camilonova/sentry,ifduyue/sentry,boneyao/sentry,hongliang5623/sentry,JTCunning/sentry,jokey2k/sentry,JTCunning/sentry,jean/sentry,hongliang5623/sentry,looker/sentry,1tush/sentry,llonchj/sentry,korealerts1/sentry,ifduyue/sentry,JackDanger/sentry,ngonzalvez/sentry,BuildingLink/sentry,fotinakis/sentry,TedaLIEz/sentry,ewdurbin/sentry,daevaorn/sentry,Natim/sentry,BuildingLink/sentry,jean/sentry,daevaorn/sentry,drcapulet/sentry,vperron/sentry,BayanGroup/sentry,mvaled/sentry,JamesMura/sentry,argonemyth/sentry,JamesMura/sentry,llonchj/sentry,beeftornado/sentry,imankulov/sentry,pauloschilling/sentry,zenefits/sentry | ---
+++
@@ -30,7 +30,8 @@
@mock.patch('sentry.interfaces.render_to_string')
def test_to_html(self, render_to_string):
- self.interface.to_html(self.event)
+ interface = User(**self.interface.serialize())
+ interface.to_html(self.event)
render_to_string.assert_called_once_with('sentry/partial/interfaces/user.html', {
'is_public': False,
'event': self.event, |
48e63186b3f3912134c167a6f74ffe8a98de3b16 | testrunner.py | testrunner.py | #!/usr/bin/env python
import unittest
import glob
import sys
def create_test_suite(mod):
if mod is None:
test_file_strings = glob.glob('jinger/test/test_*.py')
module_strings = [str[0:len(str)-3].replace('/', '.') for str in test_file_strings]
else:
module_strings = ['jinger.test.test_%s' % (mod)]
suites = [unittest.defaultTestLoader.loadTestsFromName(s) for s in module_strings]
testSuite = unittest.TestSuite(suites)
return testSuite
if __name__ == '__main__':
try:
mod = sys.argv[1]
except IndexError as e:
mod = None
testSuite = create_test_suite(mod);
text_runner = unittest.TextTestRunner().run(testSuite)
| #!/usr/bin/env python
import unittest
import glob
import sys
def create_test_suite(mod):
if mod is None:
test_file_strings = glob.glob('jinger/test/test_*.py')
module_strings = [str[0:len(str)-3].replace('/', '.') for str in test_file_strings]
else:
module_strings = ['jinger.test.test_%s' % (mod)]
suites = [unittest.defaultTestLoader.loadTestsFromName(s) for s in module_strings]
testSuite = unittest.TestSuite(suites)
return testSuite
if __name__ == '__main__':
try:
mod = sys.argv[1]
except IndexError as e:
mod = None
testSuite = create_test_suite(mod);
text_runner = unittest.TextTestRunner().run(testSuite)
| Add newline at the end of file | Add newline at the end of file
| Python | mit | naiquevin/jinger,naiquevin/jinger | ---
+++
@@ -23,3 +23,4 @@
mod = None
testSuite = create_test_suite(mod);
text_runner = unittest.TextTestRunner().run(testSuite)
+ |
3befcbaf3a78a46edc31cc1910fcd8e0a9381102 | money_conversion/money.py | money_conversion/money.py |
class Money(object):
def __init__(self, amount, currency):
self.amount = amount
self.currency = currency.upper()
def __repr__(self):
return "%.2f %s" % (self.amount, self.currency)
| from currency_rates import rates
class Money(object):
def __init__(self, amount, currency):
self.amount = amount
self.currency = currency.upper()
def __repr__(self):
return "%.2f %s" % (self.amount, self.currency)
def to_currency(self, new_currency):
new_currency = new_currency.split('_')[1].upper()
amount = self.amount
base_currency_rates = rates.get(self.currency)
new_amount = amount * base_currency_rates.get(new_currency)
return Money(new_amount, new_currency)
| Add to_currency method in order to be able to convert to a new currency | Add to_currency method in order to be able to convert to a new currency
| Python | mit | mdsrosa/money-conversion-py | ---
+++
@@ -1,3 +1,5 @@
+from currency_rates import rates
+
class Money(object):
@@ -7,3 +9,12 @@
def __repr__(self):
return "%.2f %s" % (self.amount, self.currency)
+
+ def to_currency(self, new_currency):
+ new_currency = new_currency.split('_')[1].upper()
+ amount = self.amount
+ base_currency_rates = rates.get(self.currency)
+
+ new_amount = amount * base_currency_rates.get(new_currency)
+
+ return Money(new_amount, new_currency) |
503e8f4ba3cbf388ffd9e88d58f783349d8354a3 | froide/document/views.py | froide/document/views.py | from elasticsearch_dsl.query import Q
from froide.helper.search.views import BaseSearchView
from froide.helper.search.filters import BaseSearchFilterSet
from filingcabinet.models import Page
from .documents import PageDocument
class DocumentFilterset(BaseSearchFilterSet):
query_fields = ['title^5', 'description^3', 'content']
class DocumentSearch(BaseSearchView):
search_name = 'document'
template_name = 'document/search.html'
object_template = 'document/result_item.html'
model = Page
document = PageDocument
filterset = DocumentFilterset
search_url_name = 'document-search'
select_related = ('document',)
def get_base_search(self):
# FIXME: add team
q = Q('term', public=True)
if self.request.user.is_authenticated:
q |= Q('term', user=self.request.user.pk)
return super().get_base_search().filter(q)
| from elasticsearch_dsl.query import Q
from froide.helper.search.views import BaseSearchView
from froide.helper.search.filters import BaseSearchFilterSet
from filingcabinet.models import Page
from .documents import PageDocument
class DocumentFilterset(BaseSearchFilterSet):
query_fields = ['title^3', 'description^2', 'content']
class DocumentSearch(BaseSearchView):
search_name = 'document'
template_name = 'document/search.html'
object_template = 'document/result_item.html'
model = Page
document = PageDocument
filterset = DocumentFilterset
search_url_name = 'document-search'
select_related = ('document',)
def get_base_search(self):
# FIXME: add team
q = Q('term', public=True)
if self.request.user.is_authenticated:
q |= Q('term', user=self.request.user.pk)
return super().get_base_search().filter(q)
| Reduce title, description search boost on document | Reduce title, description search boost on document | Python | mit | fin/froide,fin/froide,stefanw/froide,stefanw/froide,stefanw/froide,stefanw/froide,stefanw/froide,fin/froide,fin/froide | ---
+++
@@ -9,7 +9,7 @@
class DocumentFilterset(BaseSearchFilterSet):
- query_fields = ['title^5', 'description^3', 'content']
+ query_fields = ['title^3', 'description^2', 'content']
class DocumentSearch(BaseSearchView): |
5ffc1cc1d65b1e1bb364a8270b2a6a563c362733 | tests/test_pubtator.py | tests/test_pubtator.py |
import kindred
def test_pubtator_pmid():
corpus = kindred.pubtator.load(19894120)
assert isinstance(corpus,kindred.Corpus)
docCount = len(corpus.documents)
entityCount = sum([ len(d.entities) for d in corpus.documents ])
relationCount = sum([ len(d.relations) for d in corpus.documents ])
assert docCount == 2
assert relationCount == 0
assert entityCount == 16
def test_pubtator_pmids():
corpus = kindred.pubtator.load([19894120,19894121])
assert isinstance(corpus,kindred.Corpus)
docCount = len(corpus.documents)
entityCount = sum([ len(d.entities) for d in corpus.documents ])
relationCount = sum([ len(d.relations) for d in corpus.documents ])
assert docCount == 4
assert relationCount == 0
assert entityCount == 38
if __name__ == '__main__':
test_pubtator()
|
import kindred
def test_pubtator_pmid():
corpus = kindred.pubtator.load(19894120)
assert isinstance(corpus,kindred.Corpus)
docCount = len(corpus.documents)
entityCount = sum([ len(d.entities) for d in corpus.documents ])
relationCount = sum([ len(d.relations) for d in corpus.documents ])
assert docCount == 2
assert relationCount == 0
assert entityCount > 0
def test_pubtator_pmids():
corpus = kindred.pubtator.load([19894120,19894121])
assert isinstance(corpus,kindred.Corpus)
docCount = len(corpus.documents)
entityCount = sum([ len(d.entities) for d in corpus.documents ])
relationCount = sum([ len(d.relations) for d in corpus.documents ])
assert docCount == 4
assert relationCount == 0
assert entityCount > 0
if __name__ == '__main__':
test_pubtator()
| Simplify pubtator test to only check for entities, not exact number | Simplify pubtator test to only check for entities, not exact number
| Python | mit | jakelever/kindred,jakelever/kindred | ---
+++
@@ -12,7 +12,7 @@
assert docCount == 2
assert relationCount == 0
- assert entityCount == 16
+ assert entityCount > 0
def test_pubtator_pmids():
corpus = kindred.pubtator.load([19894120,19894121])
@@ -25,7 +25,7 @@
assert docCount == 4
assert relationCount == 0
- assert entityCount == 38
+ assert entityCount > 0
if __name__ == '__main__':
test_pubtator() |
1119a249d2e5dcbb2dd965a6e162d24e390d77f5 | website/prereg/utils.py | website/prereg/utils.py | from modularodm import Q
PREREG_CAMPAIGNS = {
'prereg': 'Prereg Challenge',
'erpc': 'Election Research Preacceptance Competition',
}
def drafts_for_user(user, campaign):
from osf import models # noqa
PREREG_CHALLENGE_METASCHEMA = get_prereg_schema(campaign)
return models.DraftRegistration.objects.filter(
registration_schema=PREREG_CHALLENGE_METASCHEMA,
approval=None,
registered_node=None,
branched_from__in=models.AbstractNode.subselect.filter(
is_deleted=False,
contributor__admin=True,
contributor__user=user).values_list('id', flat=True))
def get_prereg_schema(campaign='prereg'):
from website.models import MetaSchema # noqa
if campaign not in PREREG_CAMPAIGNS:
raise ValueError('campaign must be one of: {}'.format(', '.join(PREREG_CAMPAIGNS.keys())))
schema_name = PREREG_CAMPAIGNS[campaign]
return MetaSchema.find_one(
Q('name', 'eq', schema_name) &
Q('schema_version', 'eq', 2)
)
| from modularodm import Q
PREREG_CAMPAIGNS = {
'prereg': 'Prereg Challenge',
'erpc': 'Election Research Preacceptance Competition',
}
def drafts_for_user(user, campaign):
from osf import models # noqa
PREREG_CHALLENGE_METASCHEMA = get_prereg_schema(campaign)
return models.DraftRegistration.objects.filter(
registration_schema=PREREG_CHALLENGE_METASCHEMA,
approval=None,
registered_node=None,
branched_from__in=models.AbstractNode.objects.filter(
is_deleted=False,
contributor__admin=True,
contributor__user=user).values_list('id', flat=True))
def get_prereg_schema(campaign='prereg'):
from website.models import MetaSchema # noqa
if campaign not in PREREG_CAMPAIGNS:
raise ValueError('campaign must be one of: {}'.format(', '.join(PREREG_CAMPAIGNS.keys())))
schema_name = PREREG_CAMPAIGNS[campaign]
return MetaSchema.find_one(
Q('name', 'eq', schema_name) &
Q('schema_version', 'eq', 2)
)
| Remove reference to manager that no longer exists | Remove reference to manager that no longer exists
| Python | apache-2.0 | chennan47/osf.io,monikagrabowska/osf.io,cwisecarver/osf.io,felliott/osf.io,mfraezz/osf.io,pattisdr/osf.io,binoculars/osf.io,hmoco/osf.io,laurenrevere/osf.io,TomBaxter/osf.io,Johnetordoff/osf.io,cwisecarver/osf.io,cslzchen/osf.io,sloria/osf.io,acshi/osf.io,mattclark/osf.io,hmoco/osf.io,laurenrevere/osf.io,monikagrabowska/osf.io,aaxelb/osf.io,caneruguz/osf.io,Nesiehr/osf.io,acshi/osf.io,icereval/osf.io,brianjgeiger/osf.io,chrisseto/osf.io,adlius/osf.io,crcresearch/osf.io,baylee-d/osf.io,saradbowman/osf.io,baylee-d/osf.io,mfraezz/osf.io,monikagrabowska/osf.io,erinspace/osf.io,leb2dg/osf.io,caneruguz/osf.io,Nesiehr/osf.io,felliott/osf.io,cslzchen/osf.io,baylee-d/osf.io,icereval/osf.io,mfraezz/osf.io,sloria/osf.io,acshi/osf.io,CenterForOpenScience/osf.io,TomBaxter/osf.io,adlius/osf.io,Nesiehr/osf.io,brianjgeiger/osf.io,Johnetordoff/osf.io,hmoco/osf.io,caseyrollins/osf.io,mattclark/osf.io,aaxelb/osf.io,crcresearch/osf.io,CenterForOpenScience/osf.io,leb2dg/osf.io,monikagrabowska/osf.io,brianjgeiger/osf.io,chennan47/osf.io,chrisseto/osf.io,HalcyonChimera/osf.io,TomBaxter/osf.io,hmoco/osf.io,binoculars/osf.io,cwisecarver/osf.io,Johnetordoff/osf.io,pattisdr/osf.io,CenterForOpenScience/osf.io,caneruguz/osf.io,HalcyonChimera/osf.io,HalcyonChimera/osf.io,felliott/osf.io,cslzchen/osf.io,aaxelb/osf.io,CenterForOpenScience/osf.io,adlius/osf.io,erinspace/osf.io,brianjgeiger/osf.io,aaxelb/osf.io,mfraezz/osf.io,cwisecarver/osf.io,caneruguz/osf.io,felliott/osf.io,acshi/osf.io,saradbowman/osf.io,erinspace/osf.io,adlius/osf.io,icereval/osf.io,mattclark/osf.io,leb2dg/osf.io,monikagrabowska/osf.io,chennan47/osf.io,laurenrevere/osf.io,chrisseto/osf.io,pattisdr/osf.io,chrisseto/osf.io,acshi/osf.io,cslzchen/osf.io,crcresearch/osf.io,caseyrollins/osf.io,sloria/osf.io,caseyrollins/osf.io,binoculars/osf.io,leb2dg/osf.io,Nesiehr/osf.io,Johnetordoff/osf.io,HalcyonChimera/osf.io | ---
+++
@@ -14,7 +14,7 @@
registration_schema=PREREG_CHALLENGE_METASCHEMA,
approval=None,
registered_node=None,
- branched_from__in=models.AbstractNode.subselect.filter(
+ branched_from__in=models.AbstractNode.objects.filter(
is_deleted=False,
contributor__admin=True,
contributor__user=user).values_list('id', flat=True)) |
121c76f6af1987ba8ebef4f506604d37e6608a64 | scalaBee.py | scalaBee.py | #!/usr/bin/env python
## Arguments: numerOfTests program arg1Init-arg1Final arg2Init-arg2Final arg3Init-arg3Final...
## Ex: ./scalaBee 2 ./examples/omp_pi 1,2,4,8 100000,1000000,10000000,100000000
# Importing everything needed
import os
import sys
## Showing initial message
print "=================\nStarting ScalaBee\n=================\n"
## Getting Parameters
numberOfTests=sys.argv[1]
program=sys.argv[2]
param1=sys.argv[3]
param2=sys.argv[4]
problemSize=param2
threads=param1
print "Program:\t\t" +program
print "Number of Tests:\t" + numberOfTests
print "Number of threads:\t" + threads
print "Problem Size:\t\t" + problemSize | #!/usr/bin/env python
# Arguments: numerOfTests program arg1Init-arg1Final arg2Init-arg2Final arg3Init-arg3Final...
# Ex: python scalaBee.py 2 ./examples/omp_pi 1,2,4,8 100000,1000000,10000000,100000000
# Importing everything needed
import os, sys, time
## Showing initial message
print "=================\nStarting ScalaBee\n=================\n"
## Getting Parameters
numberOfTests=int(sys.argv[1])
program=sys.argv[2]
param1=sys.argv[3]
param2=sys.argv[4]
problemSize=param2.split(",")
threads=param1.split(",")
print "Program:\t\t" + program
print "Number of Tests:\t%d" % numberOfTests
print "Number of threads:\t", threads
print "Problem Size:\t\t", problemSize
print "\n"
# Running program and measuring time
from subprocess import call
for i in range(len(problemSize)):
for j in range(len(threads)):
start_time = time.time()
for k in range(numberOfTests):
call([program, threads[j] , problemSize[i]])
elapsed_time = time.time() - start_time
print "Elapsed time: %.3fs\n" % elapsed_time | ADD - Python script looping correctly | ADD - Python script looping correctly
| Python | mit | danielholanda/ScalaBee,danielholanda/ScalaBee | ---
+++
@@ -1,27 +1,37 @@
#!/usr/bin/env python
-## Arguments: numerOfTests program arg1Init-arg1Final arg2Init-arg2Final arg3Init-arg3Final...
-## Ex: ./scalaBee 2 ./examples/omp_pi 1,2,4,8 100000,1000000,10000000,100000000
+# Arguments: numerOfTests program arg1Init-arg1Final arg2Init-arg2Final arg3Init-arg3Final...
+# Ex: python scalaBee.py 2 ./examples/omp_pi 1,2,4,8 100000,1000000,10000000,100000000
# Importing everything needed
-import os
-import sys
+import os, sys, time
## Showing initial message
print "=================\nStarting ScalaBee\n=================\n"
## Getting Parameters
-
-
-numberOfTests=sys.argv[1]
+numberOfTests=int(sys.argv[1])
program=sys.argv[2]
param1=sys.argv[3]
param2=sys.argv[4]
-problemSize=param2
-threads=param1
+problemSize=param2.split(",")
+threads=param1.split(",")
-print "Program:\t\t" +program
-print "Number of Tests:\t" + numberOfTests
-print "Number of threads:\t" + threads
-print "Problem Size:\t\t" + problemSize
+print "Program:\t\t" + program
+print "Number of Tests:\t%d" % numberOfTests
+print "Number of threads:\t", threads
+print "Problem Size:\t\t", problemSize
+print "\n"
+
+# Running program and measuring time
+
+from subprocess import call
+
+for i in range(len(problemSize)):
+ for j in range(len(threads)):
+ start_time = time.time()
+ for k in range(numberOfTests):
+ call([program, threads[j] , problemSize[i]])
+ elapsed_time = time.time() - start_time
+ print "Elapsed time: %.3fs\n" % elapsed_time |
4607e0d837829621a2da32581137cc6dcab306f5 | nix/tests.py | nix/tests.py | __author__ = 'gicmo'
import unittest
import nix.core
class TestFile(unittest.TestCase):
def setUp(self):
self.nix_file = nix.core.File.open('test.h5')
assert(self.nix_file.version == '1.0')
def basic_test(self):
b = self.nix_file.create_block('foo', 'bar')
assert(b)
assert(len(self.nix_file.blocks()) > 0)
d = b.create_data_array('foo', 'bar')
assert b
assert(len(b.data_arrays()) > 0) | __author__ = 'gicmo'
import unittest
import nix.core
class TestFile(unittest.TestCase):
def setUp(self):
self.nix_file = nix.core.File.open('test.h5')
assert(self.nix_file.version == '1.0')
def basic_test(self):
b = self.nix_file.create_block('foo', 'bar')
assert(b)
assert(len(self.nix_file.blocks()) > 0)
d = b.create_data_array('foo', 'bar')
assert b
assert(len(b.data_arrays()) > 0)
d.label = "test_label"
assert(d.label == "test_label")
d.label = None
assert(d.label == None) | Add a test for nix.core.DataArray.label | [test] Add a test for nix.core.DataArray.label
| Python | bsd-3-clause | stoewer/nixpy,stoewer/nixpy | ---
+++
@@ -15,3 +15,7 @@
d = b.create_data_array('foo', 'bar')
assert b
assert(len(b.data_arrays()) > 0)
+ d.label = "test_label"
+ assert(d.label == "test_label")
+ d.label = None
+ assert(d.label == None) |
b20e236bd40c0d3c1d06aa1393f02b98e13e58bb | subscriptions/management/commands/add_missed_call_service_audio_notification_to_active_subscriptions.py | subscriptions/management/commands/add_missed_call_service_audio_notification_to_active_subscriptions.py | from django.core.exceptions import ObjectDoesNotExist
from django.core.management.base import BaseCommand, CommandError
from subscriptions.models import Subscription
class Command(BaseCommand):
help = ("Active subscription holders need to be informed via audio file "
"about the new missed call service.")
def handle(self, *args, **options):
self.stdout.write("Processing active subscriptions ...")
count = 0
try:
active_subscriptions_list = list(
Subscription.objects.filter(active=True))
except ObjectDoesNotExist:
self.stdout.write("No active subscriptions found")
if len(active_subscriptions_list) > 0:
for active_subscription in active_subscriptions_list:
# Add audio file to subscription meta_data. Not sure how we'll
# handle translations here.
if (active_subscription.metadata is not None and
"welcome_message" not in active_subscription.metadata):
active_subscription["audo_file_url"] = "audio_file_url"
count += 1
if count > 0:
self.stdout.write(
"Update {} subscriptions with voice notes".format(count))
else:
self.stdout.write(
"No subscriptions updated with audio file notes")
| from django.core.exceptions import ObjectDoesNotExist
from django.core.management.base import BaseCommand
from subscriptions.models import Subscription
class Command(BaseCommand):
help = ("Active subscription holders need to be informed via audio file "
"about the new missed call service.")
def handle(self, *args, **options):
self.stdout.write("Processing active subscriptions ...")
count = 0
try:
active_subscriptions_list = list(
Subscription.objects.filter(active=True))
except ObjectDoesNotExist:
self.stdout.write("No active subscriptions found")
if len(active_subscriptions_list) > 0:
for active_subscription in active_subscriptions_list:
# Add audio file to subscription meta_data. Not sure how we'll
# handle translations here.
if (active_subscription.metadata is not None and
"welcome_message" not in active_subscription.metadata):
active_subscription["audio_file_url"] = "audio_file_url"
count += 1
if count > 0:
self.stdout.write(
"Update {} subscriptions with voice notes".format(count))
else:
self.stdout.write(
"No subscriptions updated with audio file notes")
| Remove unused import and fix typo | Remove unused import and fix typo
| Python | bsd-3-clause | praekelt/seed-staged-based-messaging,praekelt/seed-stage-based-messaging,praekelt/seed-stage-based-messaging | ---
+++
@@ -1,5 +1,5 @@
from django.core.exceptions import ObjectDoesNotExist
-from django.core.management.base import BaseCommand, CommandError
+from django.core.management.base import BaseCommand
from subscriptions.models import Subscription
@@ -22,7 +22,7 @@
# handle translations here.
if (active_subscription.metadata is not None and
"welcome_message" not in active_subscription.metadata):
- active_subscription["audo_file_url"] = "audio_file_url"
+ active_subscription["audio_file_url"] = "audio_file_url"
count += 1
if count > 0:
self.stdout.write( |
c61929f0d0d8dbf53ef3c9ff2a98cf8f249bfca4 | handlers/base_handler.py | handlers/base_handler.py | from collections import OrderedDict
class BaseHandler:
def __init__(self, file, file_name):
self.file = file
self.file_name = file_name
self.info = OrderedDict()
def read(self, offset, size):
if offset < 0:
raise IndexError("File offset must be greater than 0")
if offset + size >= len(self.file):
raise IndexError("Cannot read beyond the end of the file")
return self.file[offset:offset + size]
| from collections import OrderedDict
class BaseHandler:
def __init__(self, file, file_name):
self.file = file
self.file_name = file_name
self.info = OrderedDict()
def read(self, offset, size):
return self.file[offset:offset + size]
| Revert "Add bounds checking to BaseHandler.read()" | Revert "Add bounds checking to BaseHandler.read()"
This reverts commit 045ead44ef69d6ebf2cb0dddf084762efcc62995.
| Python | mit | drx/rom-info | ---
+++
@@ -8,10 +8,4 @@
self.info = OrderedDict()
def read(self, offset, size):
- if offset < 0:
- raise IndexError("File offset must be greater than 0")
-
- if offset + size >= len(self.file):
- raise IndexError("Cannot read beyond the end of the file")
-
return self.file[offset:offset + size] |
6f87c12306e0daaae2bcee3da3229f34fa7f464c | src/reduce_framerate.py | src/reduce_framerate.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# (C) 2015 Jean Nassar
# Released under BSD version 4
"""
Reduce /ardrone_camera framerate to 2 Hz.
"""
import rospy
from sensor_msgs.msg import Image
from cv_bridge import CvBridge, CvBridgeError
class ImageFeature(object):
"""
A ROS image Publisher/Subscriber.
"""
def __init__(self):
self.subscriber = rospy.Subscriber("/ardrone/image_raw",
Image, self.callback, queue_size=1)
self.image_pub = rospy.Publisher("/output/slow_image_raw",
Image, queue_size=1)
self.bridge = CvBridge()
rospy.logdebug("Subscribed to /ardrone_camera/image_raw.")
self.count = 0
def callback(self, ros_data):
"""
Callback function of subscribed topic.
"""
# Publish every fifteenth frame
if not self.count % 15:
try:
image = self.bridge.imgmsg_to_cv2(ros_data, "bgr8")
self.image_pub.publish(self.bridge.cv2_to_imgmsg(image, "bgr8"))
except CvBridgeError as e:
rospy.logerr(e)
self.count += 1
def main():
"""Initialize and cleanup ROS node."""
rospy.init_node("image_feature", anonymous=True)
ImageFeature()
rospy.loginfo("Starting feature detection.")
rospy.spin()
if __name__ == "__main__":
main()
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
# (C) 2015 Jean Nassar
# Released under BSD version 4
"""
Reduce /ardrone/image_raw framerate from 30 Hz to 2 Hz.
"""
import rospy
from sensor_msgs.msg import Image
class ImageFeature(object):
"""
A ROS image Publisher/Subscriber.
"""
def __init__(self):
self.image_subscriber = rospy.Subscriber("/ardrone/image_raw",
Image, self.image_callback,
queue_size=1)
self.image_pub = rospy.Publisher("/output/slow_image_raw",
Image, queue_size=1)
rospy.logdebug("Subscribed to /ardrone/image_raw")
self.count = 0
def frame_callback(self, frame):
"""
Callback function of subscribed topic.
"""
# Publish every fifteenth frame
if not self.count % 15:
self.image_pub.publish(frame)
self.count += 1
def main():
"""Initialize and cleanup ROS node."""
rospy.init_node("framerate_reducer", anonymous=True)
ImageFeature()
rospy.loginfo("Reducing framerate")
rospy.spin()
if __name__ == "__main__":
main()
| Change docstrings and function names. | Change docstrings and function names.
| Python | mit | masasin/spirit,masasin/spirit | ---
+++
@@ -3,12 +3,11 @@
# (C) 2015 Jean Nassar
# Released under BSD version 4
"""
-Reduce /ardrone_camera framerate to 2 Hz.
+Reduce /ardrone/image_raw framerate from 30 Hz to 2 Hz.
"""
import rospy
from sensor_msgs.msg import Image
-from cv_bridge import CvBridge, CvBridgeError
class ImageFeature(object):
@@ -17,35 +16,30 @@
"""
def __init__(self):
- self.subscriber = rospy.Subscriber("/ardrone/image_raw",
- Image, self.callback, queue_size=1)
+ self.image_subscriber = rospy.Subscriber("/ardrone/image_raw",
+ Image, self.image_callback,
+ queue_size=1)
self.image_pub = rospy.Publisher("/output/slow_image_raw",
Image, queue_size=1)
- self.bridge = CvBridge()
- rospy.logdebug("Subscribed to /ardrone_camera/image_raw.")
+ rospy.logdebug("Subscribed to /ardrone/image_raw")
self.count = 0
- def callback(self, ros_data):
+ def frame_callback(self, frame):
"""
Callback function of subscribed topic.
"""
# Publish every fifteenth frame
if not self.count % 15:
- try:
- image = self.bridge.imgmsg_to_cv2(ros_data, "bgr8")
- self.image_pub.publish(self.bridge.cv2_to_imgmsg(image, "bgr8"))
- except CvBridgeError as e:
- rospy.logerr(e)
-
+ self.image_pub.publish(frame)
self.count += 1
def main():
"""Initialize and cleanup ROS node."""
- rospy.init_node("image_feature", anonymous=True)
+ rospy.init_node("framerate_reducer", anonymous=True)
ImageFeature()
- rospy.loginfo("Starting feature detection.")
+ rospy.loginfo("Reducing framerate")
rospy.spin()
|
f106a434df84497e12cfbdf1e693e28b6c567711 | kubespawner/utils.py | kubespawner/utils.py | """
Misc. general utility functions, not tied to Kubespawner directly
"""
from concurrent.futures import ThreadPoolExecutor
import random
from jupyterhub.utils import DT_MIN, DT_MAX, DT_SCALE
from tornado import gen, ioloop
from traitlets.config import SingletonConfigurable
class SingletonExecutor(SingletonConfigurable, ThreadPoolExecutor):
"""
Simple wrapper to ThreadPoolExecutor that is also a singleton.
We want one ThreadPool that is used by all the spawners, rather
than one ThreadPool per spawner!
"""
pass
@gen.coroutine
def exponential_backoff(func, fail_message, timeout=10, *args, **kwargs):
loop = ioloop.IOLoop.current()
start_tic = loop.time()
dt = DT_MIN
while True:
if (loop.time() - start_tic) > timeout:
# We time out!
break
if func(*args, **kwargs):
return
else:
yield gen.sleep(dt)
# Add some random jitter to improve performance
# This makes sure that we don't overload any single iteration
# of the tornado loop with too many things
# See https://www.awsarchitectureblog.com/2015/03/backoff.html
# for a good example of why and how this helps
dt = min(DT_MAX, (1 + random.random()) * (dt * DT_SCALE))
raise TimeoutError(fail_message)
| """
Misc. general utility functions, not tied to Kubespawner directly
"""
from concurrent.futures import ThreadPoolExecutor
import random
from jupyterhub.utils import DT_MIN, DT_MAX, DT_SCALE
from tornado import gen, ioloop
from traitlets.config import SingletonConfigurable
class SingletonExecutor(SingletonConfigurable, ThreadPoolExecutor):
"""
Simple wrapper to ThreadPoolExecutor that is also a singleton.
We want one ThreadPool that is used by all the spawners, rather
than one ThreadPool per spawner!
"""
pass
@gen.coroutine
def exponential_backoff(pass_func, fail_message, timeout=10, *args, **kwargs):
"""
Exponentially backoff until pass_func is true.
This function will wait with exponential backoff + random jitter for as
many iterations as needed, with maximum timeout timeout. If pass_func is
still returning false at the end of timeout, a TimeoutError will be raised.
*args and **kwargs are passed to pass_func.
"""
loop = ioloop.IOLoop.current()
start_tic = loop.time()
dt = DT_MIN
while True:
if (loop.time() - start_tic) > timeout:
# We time out!
break
if pass_func(*args, **kwargs):
return
else:
yield gen.sleep(dt)
# Add some random jitter to improve performance
# This makes sure that we don't overload any single iteration
# of the tornado loop with too many things
# See https://www.awsarchitectureblog.com/2015/03/backoff.html
# for a good example of why and how this helps
dt = min(DT_MAX, (1 + random.random()) * (dt * DT_SCALE))
raise TimeoutError(fail_message)
| Add docstrings to exponential backoff | Add docstrings to exponential backoff
| Python | bsd-3-clause | yuvipanda/jupyterhub-kubernetes-spawner,jupyterhub/kubespawner | ---
+++
@@ -18,7 +18,16 @@
pass
@gen.coroutine
-def exponential_backoff(func, fail_message, timeout=10, *args, **kwargs):
+def exponential_backoff(pass_func, fail_message, timeout=10, *args, **kwargs):
+ """
+ Exponentially backoff until pass_func is true.
+
+ This function will wait with exponential backoff + random jitter for as
+ many iterations as needed, with maximum timeout timeout. If pass_func is
+ still returning false at the end of timeout, a TimeoutError will be raised.
+
+ *args and **kwargs are passed to pass_func.
+ """
loop = ioloop.IOLoop.current()
start_tic = loop.time()
dt = DT_MIN
@@ -26,7 +35,7 @@
if (loop.time() - start_tic) > timeout:
# We time out!
break
- if func(*args, **kwargs):
+ if pass_func(*args, **kwargs):
return
else:
yield gen.sleep(dt) |
61d42efa009525e5efd90d147ba70e038f978ae3 | Lib/sublime_lib/__init__.py | Lib/sublime_lib/__init__.py | import sublime_plugin
from sublime import Window, View
class WindowAndTextCommand(sublime_plugin.WindowCommand, sublime_plugin.TextCommand):
"""A class to derive from when using a Window- and a TextCommand in one class
(e.g. when you make a build system that should/could also be calles from the command
palette).
Defines both self.view and self.window.
Since this class derives from both Window- and a TextCommand it is also callable
with the known methods, like ``window.run_command("window_and_text")``.
I defined a dummy ``run`` method to prevent parameters from raising an exception so
this command call does exactly nothing.
Still a better method than having the parent class (the command you will define)
derive from three classes with the limitation that this class must be the first one
(the *Command classes do not use super() for multi-inheritance support; neither do I
but apparently I have reasons).
"""
def __init__(self, param):
# no super() call! this would get the references confused
if isinstance(param, Window):
self.view = param.active_view()
self.window = param
self._window_command = True # probably called from build system
elif isinstance(param, View):
self.view = param
self.window = param.window()
self._window_command = False
else:
raise TypeError("Something really bad happend and you are responsible")
def run(self, *args, **kwargs):
pass
| Add WindowAndTextCommand class to sublime_lib | Add WindowAndTextCommand class to sublime_lib
Probably has limited use but w/e.
| Python | mit | SublimeText/PackageDev,SublimeText/AAAPackageDev,SublimeText/AAAPackageDev | ---
+++
@@ -0,0 +1,35 @@
+import sublime_plugin
+from sublime import Window, View
+
+
+class WindowAndTextCommand(sublime_plugin.WindowCommand, sublime_plugin.TextCommand):
+ """A class to derive from when using a Window- and a TextCommand in one class
+ (e.g. when you make a build system that should/could also be calles from the command
+ palette).
+
+ Defines both self.view and self.window.
+
+ Since this class derives from both Window- and a TextCommand it is also callable
+ with the known methods, like ``window.run_command("window_and_text")``.
+ I defined a dummy ``run`` method to prevent parameters from raising an exception so
+ this command call does exactly nothing.
+ Still a better method than having the parent class (the command you will define)
+ derive from three classes with the limitation that this class must be the first one
+ (the *Command classes do not use super() for multi-inheritance support; neither do I
+ but apparently I have reasons).
+ """
+ def __init__(self, param):
+ # no super() call! this would get the references confused
+ if isinstance(param, Window):
+ self.view = param.active_view()
+ self.window = param
+ self._window_command = True # probably called from build system
+ elif isinstance(param, View):
+ self.view = param
+ self.window = param.window()
+ self._window_command = False
+ else:
+ raise TypeError("Something really bad happend and you are responsible")
+
+ def run(self, *args, **kwargs):
+ pass | |
ea22192c9debe171db5d4b6b83d581fe079d6fa4 | ipython/profile_bots/startup/05-import-company.py | ipython/profile_bots/startup/05-import-company.py | """Set up access to important employer data for IPython"""
from dataclasses import dataclass
@dataclass
class EmployerData:
name: str
wwts = EmployerData('wwts')
| """Set up access to important employer data for IPython"""
from dataclasses import dataclass
from tools.issues import issues
@dataclass
class EmployerData:
name: str
wwts = EmployerData('wwts')
def issue_branch(issue):
name = issues.one(issue).branch_name()
print(name)
| Add functuion to ibots to name a branch | Add functuion to ibots to name a branch
| Python | mit | jalanb/jab,jalanb/dotjab,jalanb/jab,jalanb/dotjab | ---
+++
@@ -1,9 +1,16 @@
"""Set up access to important employer data for IPython"""
from dataclasses import dataclass
+
+from tools.issues import issues
@dataclass
class EmployerData:
name: str
wwts = EmployerData('wwts')
+
+
+def issue_branch(issue):
+ name = issues.one(issue).branch_name()
+ print(name) |
9e131c863c7ff147b95a016b0dfd52c03c60341e | tests/test_cmd_write.py | tests/test_cmd_write.py | from cfgen import cfgen
from nose.tools import assert_equals
import os
def setup():
os.chdir("test_dir")
clean()
def test_cmd_write():
cfgen.cmd_write("test.cfg")
with open("test.cfg") as actual, open("test.cfg.expected") as expected:
actual_lines = actual.read().splitlines()
expected_lines = expected.read().splitlines()
assert_equals(len(actual_lines), len(expected_lines))
for line_number in range(0, len(actual_lines)):
assert_equals(actual_lines[line_number], expected_lines[line_number])
def clean():
if os.path.isfile("test.cfg"):
os.remove("test.cfg")
if os.path.isfile("test.cfg.metaconfig.cache"):
os.remove("test.cfg.metaconfig.cache")
| from cfgen import cfgen
from nose.tools import assert_equals
import os
def setup():
test_root_dir = os.path.dirname(os.path.abspath(__file__))
os.chdir(test_root_dir + "/test_dir")
clean()
def test_cmd_write():
cfgen.cmd_write("test.cfg")
with open("test.cfg") as actual, open("test.cfg.expected") as expected:
actual_lines = actual.read().splitlines()
expected_lines = expected.read().splitlines()
assert_equals(len(actual_lines), len(expected_lines))
for line_number in range(0, len(actual_lines)):
assert_equals(actual_lines[line_number], expected_lines[line_number])
def clean():
if os.path.isfile("test.cfg"):
os.remove("test.cfg")
if os.path.isfile("test.cfg.metaconfig.cache"):
os.remove("test.cfg.metaconfig.cache")
| Fix running tests form command line | Fix running tests form command line
| Python | mit | rzhilkibaev/cfgen | ---
+++
@@ -3,7 +3,8 @@
import os
def setup():
- os.chdir("test_dir")
+ test_root_dir = os.path.dirname(os.path.abspath(__file__))
+ os.chdir(test_root_dir + "/test_dir")
clean()
|
b435bc206bfa6dc6654c5a904363faedc856835d | tests/test_flask_get.py | tests/test_flask_get.py | import unittest
from flask import Flask
from flask.ext.autodoc import Autodoc
class TestAutodocWithFlask(unittest.TestCase):
def setUp(self):
self.app = Flask(__name__)
self.autodoc = Autodoc(self.app)
@self.app.route('/')
@self.autodoc.doc()
def index():
"""Returns a hello world message"""
return 'Hello World!'
self.client = self.app.test_client()
def test_html(self):
@self.app.route('/docs')
def html_docs():
return self.autodoc.html()
response = self.client.get('/docs')
self.assertEqual(response.status_code, 200)
| import unittest
from flask import Flask
from flask.ext.autodoc import Autodoc
class TestAutodocWithFlask(unittest.TestCase):
def setUp(self):
self.app = Flask(__name__)
self.autodoc = Autodoc(self.app)
@self.app.route('/')
@self.autodoc.doc()
def index():
"""Returns a hello world message"""
return 'Hello World!'
self.client = self.app.test_client()
def test_html(self):
@self.app.route('/docs')
def html_docs():
return self.autodoc.html()
response = self.client.get('/docs')
self.assertEqual(response.status_code, 200)
def test_json(self):
@self.app.route('/docs')
def json_docs():
return self.autodoc.json()
response = self.client.get('/docs')
self.assertEqual(response.status_code, 200)
| Add a test for json retrieval. | Add a test for json retrieval.
| Python | mit | jwg4/flask-autodoc,jwg4/flask-autodoc | ---
+++
@@ -24,3 +24,11 @@
response = self.client.get('/docs')
self.assertEqual(response.status_code, 200)
+
+ def test_json(self):
+ @self.app.route('/docs')
+ def json_docs():
+ return self.autodoc.json()
+
+ response = self.client.get('/docs')
+ self.assertEqual(response.status_code, 200) |
dc887df974cc9a060b048543d6280c5492ef8ac8 | main/main.py | main/main.py | #_*_ coding: UTF-8 _*_
from flask import render_template
from application import app
import data_models
import views
import funds
import projects
import grants
import pledges
import suppliers
import supplier_funds
import internal_transfers
import purchases
import users
import roles
import partners
import foreign_transfer
import committees
@app.route('/')
def home():
model = data_models.Model(None)
links = views.view_links(None,
('Committee', 'Show Committees'),
('Supplier', 'Show Suppliers'),
('User', 'Show Users'))
return render_template('layout.html', title='DashBoard', user=views.view_user_controls(model), links=links)
projects.add_rules(app)
pledges.add_rules(app)
supplier_funds.add_rules(app)
users.add_rules(app)
partners.add_rules(app)
| #_*_ coding: UTF-8 _*_
from flask import render_template
from application import app
import db
import data_models
import views
import properties
import renderers
import funds
import projects
import grants
import pledges
import suppliers
import supplier_funds
import internal_transfers
import purchases
import users
import roles
import partners
import foreign_transfer
import committees
audit_fields = [
properties.DateProperty('timestamp'),
properties.KeyProperty('entity', title_of=lambda e: e.key.kind()),
properties.StringProperty('message'),
properties.KeyProperty('user')
]
@app.route('/')
def home():
model = data_models.Model(None)
links = views.view_links(None,
('Committee', 'Show Committees'),
('Supplier', 'Show Suppliers'),
('User', 'Show Users'))
audit_list = db.AuditRecord.query().order(-db.AuditRecord.timestamp).iter(limit = 10)
sub_heading = renderers.sub_heading('Recent Activity')
table = views.view_entity_list(audit_list, audit_fields, selectable=False)
content = renderers.render_div(sub_heading, table)
return render_template('layout.html', title='DashBoard', user=views.view_user_controls(model), links=links,
content=content)
projects.add_rules(app)
pledges.add_rules(app)
supplier_funds.add_rules(app)
users.add_rules(app)
partners.add_rules(app)
| Add receent activity to dashboard | Add receent activity to dashboard
| Python | mit | keith-lewis100/pont-workbench,keith-lewis100/pont-workbench,keith-lewis100/pont-workbench | ---
+++
@@ -3,8 +3,11 @@
from flask import render_template
from application import app
+import db
import data_models
import views
+import properties
+import renderers
import funds
import projects
@@ -20,6 +23,13 @@
import foreign_transfer
import committees
+audit_fields = [
+ properties.DateProperty('timestamp'),
+ properties.KeyProperty('entity', title_of=lambda e: e.key.kind()),
+ properties.StringProperty('message'),
+ properties.KeyProperty('user')
+]
+
@app.route('/')
def home():
model = data_models.Model(None)
@@ -27,7 +37,12 @@
('Committee', 'Show Committees'),
('Supplier', 'Show Suppliers'),
('User', 'Show Users'))
- return render_template('layout.html', title='DashBoard', user=views.view_user_controls(model), links=links)
+ audit_list = db.AuditRecord.query().order(-db.AuditRecord.timestamp).iter(limit = 10)
+ sub_heading = renderers.sub_heading('Recent Activity')
+ table = views.view_entity_list(audit_list, audit_fields, selectable=False)
+ content = renderers.render_div(sub_heading, table)
+ return render_template('layout.html', title='DashBoard', user=views.view_user_controls(model), links=links,
+ content=content)
projects.add_rules(app)
pledges.add_rules(app) |
9cb485e97873eff66ba283f30765bb9c66a3c864 | djangae/core/management/__init__.py | djangae/core/management/__init__.py | import argparse
import djangae.sandbox as sandbox
def execute_from_command_line(argv=None):
"""Wraps Django's `execute_from_command_line` to initialize a djangae
sandbox before running a management command.
Note: The '--sandbox' arg must come first. All other args are forwarded to
Django as normal.
"""
parser = argparse.ArgumentParser(prog='manage.py')
parser.add_argument(
'--sandbox', default=sandbox.LOCAL, choices=sandbox.SANDBOXES.keys())
parser.add_argument('args', nargs=argparse.REMAINDER)
namespace = parser.parse_args(argv[1:])
django_argv = ['manage.py'] + namespace.args
with sandbox.activate(namespace.sandbox, add_sdk_to_path=True):
import django.core.management as django_management # Now on the path
django_management.execute_from_command_line(django_argv)
| import sys
import argparse
import djangae.sandbox as sandbox
def execute_from_command_line(argv=None):
"""Wraps Django's `execute_from_command_line` to initialize a djangae
sandbox before running a management command.
Note: The '--sandbox' arg must come first. All other args are forwarded to
Django as normal.
"""
argv = argv or sys.argv
parser = argparse.ArgumentParser(prog='manage.py')
parser.add_argument(
'--sandbox', default=sandbox.LOCAL, choices=sandbox.SANDBOXES.keys())
parser.add_argument('args', nargs=argparse.REMAINDER)
namespace = parser.parse_args(argv[1:])
django_argv = ['manage.py'] + namespace.args
with sandbox.activate(namespace.sandbox, add_sdk_to_path=True):
import django.core.management as django_management # Now on the path
django_management.execute_from_command_line(django_argv)
| Support no-args for djangae.core.management.execute_from_commandline - matches django implementation. | Support no-args for djangae.core.management.execute_from_commandline - matches django implementation.
| Python | bsd-3-clause | kirberich/djangae,nealedj/djangae,asendecka/djangae,stucox/djangae,martinogden/djangae,chargrizzle/djangae,stucox/djangae,armirusco/djangae,leekchan/djangae,armirusco/djangae,trik/djangae,jscissr/djangae,grzes/djangae,SiPiggles/djangae,trik/djangae,kirberich/djangae,armirusco/djangae,trik/djangae,asendecka/djangae,stucox/djangae,pablorecio/djangae,wangjun/djangae,pablorecio/djangae,potatolondon/djangae,martinogden/djangae,jscissr/djangae,kirberich/djangae,potatolondon/djangae,martinogden/djangae,leekchan/djangae,grzes/djangae,SiPiggles/djangae,chargrizzle/djangae,jscissr/djangae,nealedj/djangae,chargrizzle/djangae,asendecka/djangae,grzes/djangae,pablorecio/djangae,b-cannon/my_djae,wangjun/djangae,SiPiggles/djangae,leekchan/djangae,nealedj/djangae,wangjun/djangae | ---
+++
@@ -1,3 +1,4 @@
+import sys
import argparse
import djangae.sandbox as sandbox
@@ -10,6 +11,7 @@
Note: The '--sandbox' arg must come first. All other args are forwarded to
Django as normal.
"""
+ argv = argv or sys.argv
parser = argparse.ArgumentParser(prog='manage.py')
parser.add_argument(
'--sandbox', default=sandbox.LOCAL, choices=sandbox.SANDBOXES.keys()) |
eaea466e29725c04ccb31a24807668dee1a09a91 | courses/developingapps/python/devenv/server.py | courses/developingapps/python/devenv/server.py |
# Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from BaseHTTPServer import BaseHTTPRequestHandler, HTTPServer
class SimpleHTTPRequestHandler(BaseHTTPRequestHandler):
def do_GET(self):
self.send_response(200)
self.send_header('Content-type','text/plain')
self.end_headers()
self.wfile.write('Hello GCP dev!')
return
def run():
print('Server is starting...')
server_address = ('0.0.0.0', 80)
server = HTTPServer(server_address, SimpleHTTPRequestHandler)
print('Started. Press Ctrl + C to stop')
server.serve_forever()
if __name__ == '__main__':
run() |
# Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
try:
from BaseHTTPServer import BaseHTTPRequestHandler, HTTPServer
except ImportError:
from http.server import BaseHTTPRequestHandler, HTTPServer
class SimpleHTTPRequestHandler(BaseHTTPRequestHandler):
def do_GET(self):
self.send_response(200)
self.send_header('Content-type','text/plain')
self.end_headers()
self.wfile.write(b'Hello GCP dev!')
return
def run():
print('Server is starting...')
server_address = ('0.0.0.0', 80)
server = HTTPServer(server_address, SimpleHTTPRequestHandler)
print('Started. Press Ctrl + C to stop')
server.serve_forever()
if __name__ == '__main__':
run()
| Fix ImportError and use bytes in outstream | Fix ImportError and use bytes in outstream
| Python | apache-2.0 | turbomanage/training-data-analyst,GoogleCloudPlatform/training-data-analyst,GoogleCloudPlatform/training-data-analyst,turbomanage/training-data-analyst,GoogleCloudPlatform/training-data-analyst,GoogleCloudPlatform/training-data-analyst,GoogleCloudPlatform/training-data-analyst,turbomanage/training-data-analyst,turbomanage/training-data-analyst,GoogleCloudPlatform/training-data-analyst,turbomanage/training-data-analyst,turbomanage/training-data-analyst,GoogleCloudPlatform/training-data-analyst,turbomanage/training-data-analyst,GoogleCloudPlatform/training-data-analyst,GoogleCloudPlatform/training-data-analyst,GoogleCloudPlatform/training-data-analyst,turbomanage/training-data-analyst,GoogleCloudPlatform/training-data-analyst | ---
+++
@@ -14,14 +14,17 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-from BaseHTTPServer import BaseHTTPRequestHandler, HTTPServer
+try:
+ from BaseHTTPServer import BaseHTTPRequestHandler, HTTPServer
+except ImportError:
+ from http.server import BaseHTTPRequestHandler, HTTPServer
class SimpleHTTPRequestHandler(BaseHTTPRequestHandler):
def do_GET(self):
self.send_response(200)
self.send_header('Content-type','text/plain')
self.end_headers()
- self.wfile.write('Hello GCP dev!')
+ self.wfile.write(b'Hello GCP dev!')
return
|
b914fee46220633c81f244e388c9385614db7d60 | karspexet/ticket/tasks.py | karspexet/ticket/tasks.py | import logging
from django.conf import settings
from django.core.mail import send_mail
from django.contrib.sites.models import Site
from django.template.loader import render_to_string
from django.template import Context
logger = logging.getLogger(__file__)
def send_ticket_email_to_customer(reservation, email, name=None):
'''Send an email to the customer with a link to their tickets
If the supplied email is empty, this will silently fail. The reason for this is that this is used in the payment
flow, and if we raise an error here, it will crash the payment transaction, and at that point we have likely
charged someone's card without giving them tickets.
Therefore the trade-off is made that if the customer fails to provide a valid email address, they will not receive
an email. They will however, have another chance to send the reservation information via email at the
reservation-detail page.
'''
if not email:
return
if not name:
name = email
to_address = f'{name} <{email}>'
subject = 'Dina biljetter till Kårspexet'
site = Site.objects.get_current()
reservation_url = f'https://{site.domain}{reservation.get_absolute_url()}'
body = render_to_string('reservation_email.txt', Context({
'reservation': reservation,
'url': reservation_url,
}))
send_mail(
subject,
body,
settings.TICKET_EMAIL_FROM_ADDRESS,
[to_address],
fail_silently=False,
)
| import logging
from django.conf import settings
from django.core.mail import send_mail
from django.contrib.sites.models import Site
from django.template.loader import render_to_string
logger = logging.getLogger(__file__)
def send_ticket_email_to_customer(reservation, email, name=None):
'''Send an email to the customer with a link to their tickets
If the supplied email is empty, this will silently fail. The reason for this is that this is used in the payment
flow, and if we raise an error here, it will crash the payment transaction, and at that point we have likely
charged someone's card without giving them tickets.
Therefore the trade-off is made that if the customer fails to provide a valid email address, they will not receive
an email. They will however, have another chance to send the reservation information via email at the
reservation-detail page.
'''
if not email:
return
if not name:
name = email
to_address = f'{name} <{email}>'
subject = 'Dina biljetter till Kårspexet'
site = Site.objects.get_current()
reservation_url = f'https://{site.domain}{reservation.get_absolute_url()}'
body = render_to_string('reservation_email.txt', {
'reservation': reservation,
'url': reservation_url,
})
send_mail(
subject,
body,
settings.TICKET_EMAIL_FROM_ADDRESS,
[to_address],
fail_silently=False,
)
| Use dict in render_to_string when sending email | Use dict in render_to_string when sending email
Building a Context manually like this is deprecated in Django 1.11, so
let's not do it this way.
| Python | mit | Karspexet/Karspexet,Karspexet/Karspexet,Karspexet/Karspexet,Karspexet/Karspexet,Karspexet/Karspexet | ---
+++
@@ -3,7 +3,6 @@
from django.core.mail import send_mail
from django.contrib.sites.models import Site
from django.template.loader import render_to_string
-from django.template import Context
logger = logging.getLogger(__file__)
@@ -29,10 +28,10 @@
subject = 'Dina biljetter till Kårspexet'
site = Site.objects.get_current()
reservation_url = f'https://{site.domain}{reservation.get_absolute_url()}'
- body = render_to_string('reservation_email.txt', Context({
+ body = render_to_string('reservation_email.txt', {
'reservation': reservation,
'url': reservation_url,
- }))
+ })
send_mail(
subject, |
2cbbf3d2175f35370ca4e9a0a8d9e6f01f3a2240 | python_plot/plot_test.py | python_plot/plot_test.py | from matplotlib import pyplot as plt
import numpy as np
import os
import pandas as pd
data_files = "./data"
def movingAv(interval, window_size):
window = np.ones(int(window_size))/float(window_size)
return np.convolve(interval, window, 'same')
fig,ax = plt.subplots()
for subdir, dirs, files in os.walk(data_files):
for fname in files:
if fname.endswith(('.csv')):
x,y = np.loadtxt("data/%s" % fname, unpack = True, delimiter = ',', skiprows = 1)
y_av = movingAv(y, 200)
ax.plot(x,y_av)
ax.plot(x,y)
plt.show()
| from matplotlib import pyplot as plt
import numpy as np
import os
# import pandas as pd
data_files = "./data"
def movingAv(interval, window_size):
window = np.ones(int(window_size))/float(window_size)
return np.convolve(interval, window, 'same')
fig,ax = plt.subplots()
for subdir, dirs, files in os.walk(data_files):
for fname in files:
if fname.endswith(('.csv')):
x,y = np.loadtxt("data/%s" % fname, unpack = True, delimiter = ',', skiprows = 1)
y_av = movingAv(y, 200)
ax.plot(x,y_av)
ax.plot(x,y)
plt.show()
| Comment out pandas because its unused | Comment out pandas because its unused
| Python | mit | agurusa/practice_code,agurusa/practice_code | ---
+++
@@ -1,7 +1,7 @@
from matplotlib import pyplot as plt
import numpy as np
import os
-import pandas as pd
+# import pandas as pd
data_files = "./data"
|
f2db056d4da23b96034f7c3ac5c4c12dd2853e91 | luigi_slack/slack_api.py | luigi_slack/slack_api.py | import json
from slackclient import SlackClient
class SlackBotConf(object):
def __init__(self):
self.username = 'Luigi-slack Bot'
class SlackAPI(object):
def __init__(self, token, bot_conf=SlackBotConf()):
self.client = SlackClient(token)
self._all_channels = self._get_channels()
self.bot = bot_conf
def _get_channels(self):
res = self.client.api_call('channels.list')
_channels = json.loads(res.decode())
return _channels['channels']
def get_channels(self, reload_channels=False):
if not self._all_channels or reload_channels:
self._all_channels = self._get_channels()
return self._all_channels
def channel_name_to_id(self, names):
name_to_id = []
for name in names:
for channel in self._all_channels:
if channel['name'] == name:
name_to_id.append({'name': channel['name'], 'id': channel['id']})
return name_to_id
def bulk_message(self, message, post_to=[]):
channel_map = self.channel_name_to_id(post_to)
for channel in channel_map:
self.client.api_call('chat.postMessage',
text=message,
channel=channel['id'],
username=self.bot.username)
return True
| import json
from slackclient import SlackClient
class SlackBotConf(object):
def __init__(self):
self.username = 'Luigi-slack Bot'
class SlackAPI(object):
def __init__(self, token, bot_conf=SlackBotConf()):
self.client = SlackClient(token)
self._all_channels = self._get_channels()
self.bot = bot_conf
def _get_channels(self):
res = self.client.api_call('channels.list')
_channels = json.loads(res.decode())
_parsed_channels = _channels.get('channels', None)
if _parsed_channels is None:
raise Exception("Could not get Slack channels. Are you sure your token is correct?")
return _parsed_channels
def get_channels(self, reload_channels=False):
if not self._all_channels or reload_channels:
self._all_channels = self._get_channels()
return self._all_channels
def channel_name_to_id(self, names):
name_to_id = []
for name in names:
for channel in self._all_channels:
if channel['name'] == name:
name_to_id.append({'name': channel['name'], 'id': channel['id']})
return name_to_id
def bulk_message(self, message, post_to=[]):
channel_map = self.channel_name_to_id(post_to)
for channel in channel_map:
self.client.api_call('chat.postMessage',
text=message,
channel=channel['id'],
username=self.bot.username)
return True
| Validate token when fetching channels | Validate token when fetching channels
| Python | mit | bonzanini/luigi-slack | ---
+++
@@ -15,7 +15,10 @@
def _get_channels(self):
res = self.client.api_call('channels.list')
_channels = json.loads(res.decode())
- return _channels['channels']
+ _parsed_channels = _channels.get('channels', None)
+ if _parsed_channels is None:
+ raise Exception("Could not get Slack channels. Are you sure your token is correct?")
+ return _parsed_channels
def get_channels(self, reload_channels=False):
if not self._all_channels or reload_channels: |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.