commit
stringlengths 40
40
| old_file
stringlengths 4
150
| new_file
stringlengths 4
150
| old_contents
stringlengths 0
3.26k
| new_contents
stringlengths 1
4.43k
| subject
stringlengths 15
501
| message
stringlengths 15
4.06k
| lang
stringclasses 4
values | license
stringclasses 13
values | repos
stringlengths 5
91.5k
| diff
stringlengths 0
4.35k
|
|---|---|---|---|---|---|---|---|---|---|---|
4932279ce1b43162c8c37cabd4e62e0620f4e3ce
|
python/marvin/core/exceptions.py
|
python/marvin/core/exceptions.py
|
#!/usr/bin/env python
# encoding: utf-8
"""
exceptions.py
Licensed under a 3-clause BSD license.
Revision history:
13 Feb 2016 J. Sánchez-Gallego
Initial version
"""
from __future__ import division
from __future__ import print_function
__all__ = ['MarvinError', 'MarvinUserWarning', 'MarvinSkippedTestWargning',
'MarvinNotImplemented', 'MarvinMissingDependency']
class MarvinError(Exception):
pass
class MarvinNotImplemented(MarvinError):
"""A Marvin exception for not yet implemented features."""
def __init__(self, message=None):
message = 'This feature is not implemented yet.' \
if not message else message
super(MarvinNotImplemented, self).__init__(message)
class MarvinMissingDependency(MarvinError):
"""A custom exception for missing dependencies."""
pass
class MarvinWarning(Warning):
"""Base warning for Marvin."""
pass
class MarvinUserWarning(UserWarning, MarvinWarning):
"""The primary warning class."""
pass
class MarvinSkippedTestWarning(MarvinUserWarning):
"""A warning for when a test is skipped."""
pass
|
#!/usr/bin/env python
# encoding: utf-8
"""
exceptions.py
Licensed under a 3-clause BSD license.
Revision history:
13 Feb 2016 J. Sánchez-Gallego
Initial version
"""
from __future__ import division
from __future__ import print_function
__all__ = ['MarvinError', 'MarvinUserWarning', 'MarvinSkippedTestWarning',
'MarvinNotImplemented', 'MarvinMissingDependency']
class MarvinError(Exception):
pass
class MarvinNotImplemented(MarvinError):
"""A Marvin exception for not yet implemented features."""
def __init__(self, message=None):
message = 'This feature is not implemented yet.' \
if not message else message
super(MarvinNotImplemented, self).__init__(message)
class MarvinMissingDependency(MarvinError):
"""A custom exception for missing dependencies."""
pass
class MarvinWarning(Warning):
"""Base warning for Marvin."""
pass
class MarvinUserWarning(UserWarning, MarvinWarning):
"""The primary warning class."""
pass
class MarvinSkippedTestWarning(MarvinUserWarning):
"""A warning for when a test is skipped."""
pass
|
Fix another occurence of same typo
|
Fix another occurence of same typo
|
Python
|
bsd-3-clause
|
albireox/marvin,bretthandrews/marvin,albireox/marvin,sdss/marvin,albireox/marvin,sdss/marvin,sdss/marvin,albireox/marvin,bretthandrews/marvin,bretthandrews/marvin,bretthandrews/marvin,sdss/marvin
|
---
+++
@@ -16,7 +16,7 @@
from __future__ import print_function
-__all__ = ['MarvinError', 'MarvinUserWarning', 'MarvinSkippedTestWargning',
+__all__ = ['MarvinError', 'MarvinUserWarning', 'MarvinSkippedTestWarning',
'MarvinNotImplemented', 'MarvinMissingDependency']
|
4db9c0139b42cdaf7c22ac2032b5edfec9394ce4
|
mopidy/frontends/mpd/protocol/connection.py
|
mopidy/frontends/mpd/protocol/connection.py
|
from mopidy import settings
from mopidy.frontends.mpd.protocol import handle_pattern
from mopidy.frontends.mpd.exceptions import MpdPasswordError
@handle_pattern(r'^close$')
def close(context):
"""
*musicpd.org, connection section:*
``close``
Closes the connection to MPD.
"""
context.session.close()
@handle_pattern(r'^kill$')
def kill(context):
"""
*musicpd.org, connection section:*
``kill``
Kills MPD.
"""
pass # TODO
@handle_pattern(r'^password "(?P<password>[^"]+)"$')
def password_(context, password):
"""
*musicpd.org, connection section:*
``password {PASSWORD}``
This is used for authentication with the server. ``PASSWORD`` is
simply the plaintext password.
"""
# You will not get to this code without being authenticated. This is for
# when you are already authenticated, and are sending additional 'password'
# requests.
if settings.MPD_SERVER_PASSWORD != password:
raise MpdPasswordError(u'incorrect password', command=u'password')
@handle_pattern(r'^ping$')
def ping(context):
"""
*musicpd.org, connection section:*
``ping``
Does nothing but return ``OK``.
"""
pass
|
from mopidy import settings
from mopidy.frontends.mpd.protocol import handle_pattern
from mopidy.frontends.mpd.exceptions import MpdPasswordError
@handle_pattern(r'^close$')
def close(context):
"""
*musicpd.org, connection section:*
``close``
Closes the connection to MPD.
"""
context.session.close()
@handle_pattern(r'^kill$')
def kill(context):
"""
*musicpd.org, connection section:*
``kill``
Kills MPD.
"""
# TODO We do not want to allow remote killing of Mopidy. We should throw an
# MPD exception here. Maybe using ACK_ERROR_PERMISSION.
pass
@handle_pattern(r'^password "(?P<password>[^"]+)"$')
def password_(context, password):
"""
*musicpd.org, connection section:*
``password {PASSWORD}``
This is used for authentication with the server. ``PASSWORD`` is
simply the plaintext password.
"""
# You will not get to this code without being authenticated. This is for
# when you are already authenticated, and are sending additional 'password'
# requests.
if settings.MPD_SERVER_PASSWORD != password:
raise MpdPasswordError(u'incorrect password', command=u'password')
@handle_pattern(r'^ping$')
def ping(context):
"""
*musicpd.org, connection section:*
``ping``
Does nothing but return ``OK``.
"""
pass
|
Update TODO for MPD command 'kill'
|
Update TODO for MPD command 'kill'
|
Python
|
apache-2.0
|
dbrgn/mopidy,diandiankan/mopidy,rawdlite/mopidy,adamcik/mopidy,vrs01/mopidy,tkem/mopidy,ali/mopidy,ZenithDK/mopidy,vrs01/mopidy,pacificIT/mopidy,hkariti/mopidy,kingosticks/mopidy,bacontext/mopidy,glogiotatidis/mopidy,jodal/mopidy,kingosticks/mopidy,swak/mopidy,quartz55/mopidy,quartz55/mopidy,mopidy/mopidy,swak/mopidy,ZenithDK/mopidy,jcass77/mopidy,dbrgn/mopidy,mokieyue/mopidy,rawdlite/mopidy,dbrgn/mopidy,quartz55/mopidy,jcass77/mopidy,pacificIT/mopidy,glogiotatidis/mopidy,rawdlite/mopidy,diandiankan/mopidy,woutervanwijk/mopidy,jodal/mopidy,abarisain/mopidy,bacontext/mopidy,jcass77/mopidy,liamw9534/mopidy,priestd09/mopidy,mokieyue/mopidy,ZenithDK/mopidy,bacontext/mopidy,adamcik/mopidy,diandiankan/mopidy,swak/mopidy,liamw9534/mopidy,bacontext/mopidy,bencevans/mopidy,dbrgn/mopidy,pacificIT/mopidy,ali/mopidy,pacificIT/mopidy,hkariti/mopidy,SuperStarPL/mopidy,bencevans/mopidy,glogiotatidis/mopidy,kingosticks/mopidy,abarisain/mopidy,jmarsik/mopidy,quartz55/mopidy,swak/mopidy,tkem/mopidy,bencevans/mopidy,vrs01/mopidy,bencevans/mopidy,jmarsik/mopidy,mopidy/mopidy,adamcik/mopidy,jmarsik/mopidy,vrs01/mopidy,mokieyue/mopidy,mopidy/mopidy,priestd09/mopidy,SuperStarPL/mopidy,SuperStarPL/mopidy,rawdlite/mopidy,jmarsik/mopidy,ZenithDK/mopidy,tkem/mopidy,SuperStarPL/mopidy,woutervanwijk/mopidy,jodal/mopidy,ali/mopidy,hkariti/mopidy,mokieyue/mopidy,hkariti/mopidy,ali/mopidy,glogiotatidis/mopidy,priestd09/mopidy,tkem/mopidy,diandiankan/mopidy
|
---
+++
@@ -22,7 +22,9 @@
Kills MPD.
"""
- pass # TODO
+ # TODO We do not want to allow remote killing of Mopidy. We should throw an
+ # MPD exception here. Maybe using ACK_ERROR_PERMISSION.
+ pass
@handle_pattern(r'^password "(?P<password>[^"]+)"$')
def password_(context, password):
|
e8fb50e265d62086fa2501f55e7763e49b775440
|
scripts/generate-s3-post-url-data.py
|
scripts/generate-s3-post-url-data.py
|
#!/usr/bin/env python
"""
This script will create a presigned url and fields for POSTING to an s3 bucket. This allows someone without permissions
on the bucket to upload a file.
This script must be run by an entity with the right permissions on the bucket.
The url will expire after 600 seconds.
Usage:
scripts/generate-s3-post-url-data.py <bucket> <filename>
"""
import json
import boto3
from docopt import docopt
def generate_s3_post_data(bucket, filename):
s3 = boto3.client('s3')
fields = {"acl": "private"}
conditions = [
{"acl": "private"}
]
post = s3.generate_presigned_post(
Bucket=bucket,
Key=filename,
Fields=fields,
Conditions=conditions,
ExpiresIn=600
)
return json.dumps(post)
if __name__ == "__main__":
arguments = docopt(__doc__)
bucket = arguments['<bucket>']
filename = arguments['<filename>']
print(generate_s3_post_data(bucket, filename))
|
#!/usr/bin/env python
"""
This script will create a presigned url and fields for POSTING to an s3 bucket. This allows someone without permissions
on the bucket to upload a file.
This script must be run by an entity with the right permissions on the bucket.
The url will expire after 600 seconds.
Usage:
scripts/generate-s3-post-url-data.py <bucket> <filename>
"""
import json
import boto3
from docopt import docopt
def generate_s3_post_data(bucket, filename):
s3 = boto3.client('s3')
fields = {"acl": "bucket-owner-read"}
conditions = [
{"acl": "bucket-owner-read"}
]
post = s3.generate_presigned_post(
Bucket=bucket,
Key=filename,
Fields=fields,
Conditions=conditions,
ExpiresIn=600
)
return json.dumps(post)
if __name__ == "__main__":
arguments = docopt(__doc__)
bucket = arguments['<bucket>']
filename = arguments['<filename>']
print(generate_s3_post_data(bucket, filename))
|
Apply bucket-owner-read acl to uploaded db dumps
|
Apply bucket-owner-read acl to uploaded db dumps
As per https://docs.aws.amazon.com/AmazonS3/latest/dev/crr-troubleshoot.html
> By default, the bucket owner does not have any permissions on the objects created by <another> account. And the replication configuration replicates only the objects for which the bucket owner has access permissions.
Therefore the bucket owner must be granted read permissions on the uploaded object for replicatino to work.
We do this by applying the `bucket-owner-read` canned acl detailed here:
https://docs.aws.amazon.com/AmazonS3/latest/dev/acl-overview.html#canned-acl
|
Python
|
mit
|
alphagov/digitalmarketplace-aws,alphagov/digitalmarketplace-aws,alphagov/digitalmarketplace-aws
|
---
+++
@@ -19,9 +19,9 @@
def generate_s3_post_data(bucket, filename):
s3 = boto3.client('s3')
- fields = {"acl": "private"}
+ fields = {"acl": "bucket-owner-read"}
conditions = [
- {"acl": "private"}
+ {"acl": "bucket-owner-read"}
]
post = s3.generate_presigned_post(
|
5d57c43ba7a01dc0f94ab41e4014484d1b78c1cb
|
django_polymorphic_auth/admin.py
|
django_polymorphic_auth/admin.py
|
from django.conf import settings
from django.contrib import admin
from django_polymorphic_auth.models import User
from django_polymorphic_auth.usertypes.email.models import EmailUser
from django_polymorphic_auth.usertypes.username.models import UsernameUser
from polymorphic.admin import \
PolymorphicParentModelAdmin, PolymorphicChildModelAdmin
class UserChildAdmin(PolymorphicChildModelAdmin):
base_model = User
# base_form = forms.ProductAdminForm
class UserAdmin(PolymorphicParentModelAdmin):
base_model = User
list_filter = ('is_active', 'is_staff', 'is_superuser', 'created')
list_display = (
'__unicode__', 'is_active', 'is_staff', 'is_superuser', 'created')
polymorphic_list = True
def get_child_models(self):
from django_polymorphic_auth.usertypes.email.admin import \
EmailUserAdmin
from django_polymorphic_auth.usertypes.username.admin import \
UsernameUserAdmin
child_models = []
if 'django_polymorphic_auth.usertypes.email' in \
settings.INSTALLED_APPS:
child_models.append((EmailUser, EmailUserAdmin))
if 'django_polymorphic_auth.usertypes.username' in \
settings.INSTALLED_APPS:
child_models.append((UsernameUser, UsernameUserAdmin))
return child_models
admin.site.register(User, UserAdmin)
|
from django.conf import settings
from django.contrib import admin
from django.contrib.auth.forms import UserChangeForm
from django_polymorphic_auth.models import User
from django_polymorphic_auth.usertypes.email.models import EmailUser
from django_polymorphic_auth.usertypes.username.models import UsernameUser
from polymorphic.admin import \
PolymorphicParentModelAdmin, PolymorphicChildModelAdmin
class UserChildAdmin(PolymorphicChildModelAdmin):
base_fieldsets = (
('Meta', {
'classes': ('collapse', ),
'fields': ('last_login', )
}),
('Permissions', {
'fields': (
'is_active', 'is_staff', 'is_superuser', 'groups',
'user_permissions')
}),
)
base_form = UserChangeForm
base_model = User
class UserAdmin(PolymorphicParentModelAdmin):
base_model = User
list_filter = ('is_active', 'is_staff', 'is_superuser', 'created')
list_display = (
'__unicode__', 'is_active', 'is_staff', 'is_superuser', 'created')
polymorphic_list = True
def get_child_models(self):
from django_polymorphic_auth.usertypes.email.admin import \
EmailUserAdmin
from django_polymorphic_auth.usertypes.username.admin import \
UsernameUserAdmin
child_models = []
if 'django_polymorphic_auth.usertypes.email' in \
settings.INSTALLED_APPS:
child_models.append((EmailUser, EmailUserAdmin))
if 'django_polymorphic_auth.usertypes.username' in \
settings.INSTALLED_APPS:
child_models.append((UsernameUser, UsernameUserAdmin))
return child_models
admin.site.register(User, UserAdmin)
|
Integrate `UserChangeForm` so we get nice password fields.
|
Integrate `UserChangeForm` so we get nice password fields.
|
Python
|
mit
|
whembed197923/django-polymorphic-auth,ixc/django-polymorphic-auth
|
---
+++
@@ -1,5 +1,6 @@
from django.conf import settings
from django.contrib import admin
+from django.contrib.auth.forms import UserChangeForm
from django_polymorphic_auth.models import User
from django_polymorphic_auth.usertypes.email.models import EmailUser
from django_polymorphic_auth.usertypes.username.models import UsernameUser
@@ -8,8 +9,19 @@
class UserChildAdmin(PolymorphicChildModelAdmin):
+ base_fieldsets = (
+ ('Meta', {
+ 'classes': ('collapse', ),
+ 'fields': ('last_login', )
+ }),
+ ('Permissions', {
+ 'fields': (
+ 'is_active', 'is_staff', 'is_superuser', 'groups',
+ 'user_permissions')
+ }),
+ )
+ base_form = UserChangeForm
base_model = User
- # base_form = forms.ProductAdminForm
class UserAdmin(PolymorphicParentModelAdmin):
|
998da5c8d68dff5ad612847a2d16fb6464e30bc2
|
semillas_backend/users/models.py
|
semillas_backend/users/models.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals, absolute_import
from django.contrib.auth.models import AbstractUser
from django.core.urlresolvers import reverse
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
from django.utils.translation import ugettext_lazy as _
from django.contrib.gis.db.models import PointField
@python_2_unicode_compatible
class User(AbstractUser):
# First Name and Last Name do not cover name patterns
# around the globe.
name = models.CharField(_('Name of User'), blank=True, max_length=255)
location = PointField(
null=True,
blank=True,
help_text='User Location, only read in production user admin panel'
)
picture = models.ImageField(
null=True,
blank=True,
help_text='Profile Picture'
)
def __str__(self):
return self.username
def get_absolute_url(self):
return reverse('users:detail', kwargs={'username': self.username})
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals, absolute_import
from django.contrib.auth.models import AbstractUser
from django.core.urlresolvers import reverse
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
from django.utils.translation import ugettext_lazy as _
from django.contrib.gis.db.models import PointField
from .storage import user_store
@python_2_unicode_compatible
class User(AbstractUser):
# First Name and Last Name do not cover name patterns
# around the globe.
name = models.CharField(_('Name of User'), blank=True, max_length=255)
location = PointField(
null=True,
blank=True,
help_text='User Location, only read in production user admin panel'
)
picture = models.ImageField(
null=True,
blank=True,
help_text='Profile Picture',
storage=user_store
)
def __str__(self):
return self.username
def get_absolute_url(self):
return reverse('users:detail', kwargs={'username': self.username})
|
Test for uploading files to /media/ folder in S3
|
Test for uploading files to /media/ folder in S3
|
Python
|
mit
|
Semillas/semillas_platform,Semillas/semillas_backend,Semillas/semillas_platform,Semillas/semillas_backend,Semillas/semillas_backend,Semillas/semillas_platform,Semillas/semillas_backend,Semillas/semillas_platform
|
---
+++
@@ -7,6 +7,8 @@
from django.utils.encoding import python_2_unicode_compatible
from django.utils.translation import ugettext_lazy as _
from django.contrib.gis.db.models import PointField
+
+from .storage import user_store
@python_2_unicode_compatible
@@ -24,7 +26,8 @@
picture = models.ImageField(
null=True,
blank=True,
- help_text='Profile Picture'
+ help_text='Profile Picture',
+ storage=user_store
)
def __str__(self):
|
bcf7d7eb689ec4e926a659bce3bf8301c23bd7e0
|
test/utu_test.py
|
test/utu_test.py
|
import utu
import unittest
def run_test(test):
runner = unittest.runner.TextTestRunner()
loader = unittest.loader.defaultTestLoader
test = loader.loadTestsFromTestCase(test)
runner.run(test)
class UtuTest(unittest.TestCase):
def test_sanity(self):
# XXX should work out why I cannot use an ordinary variable here
state = {}
class Testee(unittest.TestCase):
def test_method(self):
state['test_ran'] = True
run_test(Testee)
self.assertTrue('test_ran' in state)
def test_basic(self):
state = {}
class Testee(utu.adjusted_unittest_base()):
def setup(self):
state['setup_ran'] = True
def teardown(self):
state['teardown_ran'] = True
def test_foo(self):
self.assert_true(True)
state['asserted'] = True
run_test(Testee)
self.assertTrue('setup_ran' in state)
self.assertTrue('teardown_ran' in state)
self.assertTrue('asserted' in state)
if __name__ == '__main__':
unittest.main()
|
import utu
import unittest
def invoke(test):
runner = unittest.runner.TextTestRunner()
loader = unittest.loader.defaultTestLoader
test = loader.loadTestsFromTestCase(test)
runner.run(test)
class UtuTest(unittest.TestCase):
def test_sanity(self):
# XXX should work out why I cannot use an ordinary variable here
state = {}
class Testee(unittest.TestCase):
def test_method(self):
state['test_ran'] = True
invoke(Testee)
self.assertTrue('test_ran' in state)
def test_basic(self):
state = {}
class Testee(utu.adjusted_unittest_base()):
def setup(self):
state['setup_ran'] = True
def teardown(self):
state['teardown_ran'] = True
def test_foo(self):
self.assert_true(True)
state['asserted'] = True
invoke(Testee)
self.assertTrue('setup_ran' in state)
self.assertTrue('teardown_ran' in state)
self.assertTrue('asserted' in state)
if __name__ == '__main__':
unittest.main()
|
Rename run_test to invoke to avoid confusing nosetests
|
Rename run_test to invoke to avoid confusing nosetests
|
Python
|
bsd-2-clause
|
p/utu
|
---
+++
@@ -1,7 +1,7 @@
import utu
import unittest
-def run_test(test):
+def invoke(test):
runner = unittest.runner.TextTestRunner()
loader = unittest.loader.defaultTestLoader
test = loader.loadTestsFromTestCase(test)
@@ -16,7 +16,7 @@
def test_method(self):
state['test_ran'] = True
- run_test(Testee)
+ invoke(Testee)
self.assertTrue('test_ran' in state)
@@ -34,7 +34,7 @@
self.assert_true(True)
state['asserted'] = True
- run_test(Testee)
+ invoke(Testee)
self.assertTrue('setup_ran' in state)
self.assertTrue('teardown_ran' in state)
|
be093e7df91ca68e4e9c73e37d18042cc5029b87
|
bootstrap/conf/salt/state/run-tracking-db/scripts/import_sample_data.py
|
bootstrap/conf/salt/state/run-tracking-db/scripts/import_sample_data.py
|
import pandas as pd
import sys
df = pd.read_csv(sys.argv[1])
df.columns = [c.lower() for c in df.columns]
from sqlalchemy import create_engine
engine = create_engine('postgresql://postgresql.service.consul:5432/germline_genotype_tracking')
try:
df.to_sql("pcawg_samples", engine)
except ValueError as e:
if str(e) != "Table 'pcawg_samples' already exists.":
print str(e)
exit(1)
else:
print str(e)
|
import pandas as pd
import sys
df = pd.read_csv(sys.argv[1])
df.columns = [c.lower() for c in df.columns]
from sqlalchemy import create_engine
engine = create_engine('postgresql://pcawg_admin:pcawg@postgresql.service.consul:5432/germline_genotype_tracking')
try:
df.to_sql("pcawg_samples", engine)
except ValueError as e:
if str(e) != "Table 'pcawg_samples' already exists.":
print str(e)
exit(1)
else:
print str(e)
|
Add DB credentials to sqlalchemy connection.
|
Add DB credentials to sqlalchemy connection.
|
Python
|
mit
|
llevar/germline-regenotyper,llevar/germline-regenotyper
|
---
+++
@@ -6,7 +6,7 @@
df.columns = [c.lower() for c in df.columns]
from sqlalchemy import create_engine
-engine = create_engine('postgresql://postgresql.service.consul:5432/germline_genotype_tracking')
+engine = create_engine('postgresql://pcawg_admin:pcawg@postgresql.service.consul:5432/germline_genotype_tracking')
try:
df.to_sql("pcawg_samples", engine)
|
6194e5b9cb1a07f96e1844155863fe622f11bb91
|
ella/newman/context_processors.py
|
ella/newman/context_processors.py
|
from django.conf import settings
def newman_media(request):
"""
Inject NEWMAN_MEDIA_URL to template. Use NEWMAN_MEDIA_PREFIX value from
settings, if not available, use MEDIA_URL + 'newman_media/' combination
"""
uri = getattr(settings, 'NEWMAN_MEDIA_URL', None)
if not uri:
uri = getattr(settings, 'MEDIA_URL') + 'newman_media/'
return {
'NEWMAN_MEDIA_URL' : uri
}
|
from django.conf import settings
def newman_media(request):
"""
Inject NEWMAN_MEDIA_URL to template. Use NEWMAN_MEDIA_PREFIX value from
settings, if not available, use MEDIA_URL + 'newman_media/' combination
"""
uri = getattr(settings, 'NEWMAN_MEDIA_PREFIX', None)
if not uri:
uri = getattr(settings, 'MEDIA_URL') + 'newman_media/'
return {
'NEWMAN_MEDIA_URL' : uri
}
|
Make code consistent with comments
|
Make code consistent with comments
|
Python
|
bsd-3-clause
|
WhiskeyMedia/ella,petrlosa/ella,whalerock/ella,MichalMaM/ella,whalerock/ella,ella/ella,whalerock/ella,petrlosa/ella,MichalMaM/ella,WhiskeyMedia/ella
|
---
+++
@@ -5,7 +5,7 @@
Inject NEWMAN_MEDIA_URL to template. Use NEWMAN_MEDIA_PREFIX value from
settings, if not available, use MEDIA_URL + 'newman_media/' combination
"""
- uri = getattr(settings, 'NEWMAN_MEDIA_URL', None)
+ uri = getattr(settings, 'NEWMAN_MEDIA_PREFIX', None)
if not uri:
uri = getattr(settings, 'MEDIA_URL') + 'newman_media/'
return {
|
4f183ee9c24814cdb136f39effb9d932058650d5
|
generate_resume.py
|
generate_resume.py
|
import argparse
import jinja2
import os
import types
import yaml
from debug.debug import dprint
def load_templates(template_dir=os.path.join(os.getcwd(), 'template')):
loader = jinja2.FileSystemLoader(template_dir)
environment = jinja2.environment.Environment(loader=loader)
return environment
if __name__ == "__main__":
parser = argparse.ArgumentParser(
description="Generates multiple resume outputs from a singular YAML-formatted source")
parser.add_argument('source_file', help="yaml-formatted containing the desired resume sections")
args = parser.parse_args()
environment = load_templates()
dprint("found templates {}".format(environment.list_templates()))
with open('resume.txt', 'w') as output:
with open(args.source_file, 'r') as source_file:
raw = yaml.load(source_file)
for section, value in zip(raw.keys(), raw.values()):
try:
# Wow this is gross. refactor a _ton_
template = environment.get_template(os.path.join('plain_text',section+os.path.extsep+'txt'))
dprint("found template {}".format(template))
metatype = types.new_class(section)
dprint("created metatype {}".format(metatype))
metaobject = metatype()
metaobject.__dict__ = value
dprint("metaobject = {}".format(str(metaobject)))
dprint("metaobject = {}".format(metaobject.__dict__))
metaargs = dict()
metaargs[section] = metaobject
output.write(template.render(metaargs))
except (jinja2.TemplateNotFound):
print("Source section '{}' found in source file, but no template exists".format(section))
|
import argparse
import jinja2
import os
import yaml
from debug.debug import dprint
def load_templates(template_dir=os.path.join(os.getcwd(), 'template')):
loader = jinja2.FileSystemLoader(template_dir)
environment = jinja2.environment.Environment(loader=loader)
return environment
if __name__ == "__main__":
parser = argparse.ArgumentParser(
description="Generates multiple resume outputs from a singular YAML-formatted source")
parser.add_argument('source_file', help="yaml-formatted containing the desired resume sections")
args = parser.parse_args()
environment = load_templates()
dprint("found templates {}".format(environment.list_templates()))
with open('resume.txt', 'w') as output:
with open(args.source_file, 'r') as source_file:
raw = yaml.load(source_file)
for section, value in zip(raw.keys(), raw.values()):
try:
# Wow this is gross. refactor a _ton_
template = environment.get_template(os.path.join('plain_text',section+os.path.extsep+'txt'))
dprint("found template {}".format(template))
metaargs = dict()
metaargs[section] = value
output.write(template.render(metaargs))
except (jinja2.TemplateNotFound):
print("Source section '{}' found in source file, but no template exists".format(section))
|
Remove a ton of metaclass complexity
|
Remove a ton of metaclass complexity
If I'd read the jinja2 documentation, I'd have seen that dotted notation
is equivalent to subscript notation, which means metaclasses and
generated classes aren't necessary. At all.
Note-to-self for the future: RTFM.
|
Python
|
mit
|
sjbarag/poorly-named-resume-generator,sjbarag/poorly-named-resume-generator
|
---
+++
@@ -1,7 +1,6 @@
import argparse
import jinja2
import os
-import types
import yaml
from debug.debug import dprint
@@ -30,15 +29,8 @@
template = environment.get_template(os.path.join('plain_text',section+os.path.extsep+'txt'))
dprint("found template {}".format(template))
- metatype = types.new_class(section)
- dprint("created metatype {}".format(metatype))
- metaobject = metatype()
- metaobject.__dict__ = value
- dprint("metaobject = {}".format(str(metaobject)))
- dprint("metaobject = {}".format(metaobject.__dict__))
-
metaargs = dict()
- metaargs[section] = metaobject
+ metaargs[section] = value
output.write(template.render(metaargs))
except (jinja2.TemplateNotFound):
print("Source section '{}' found in source file, but no template exists".format(section))
|
f313c9c476f6ae441f65567552ed835e96c62cb3
|
avocado/tests/settings.py
|
avocado/tests/settings.py
|
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
}
}
MODELTREES = {
'default': {
'model': 'tests.Employee'
}
}
INSTALLED_APPS = (
'avocado',
'avocado.meta',
'avocado.tests',
)
COVERAGE_MODULES = (
'avocado.meta.formatters',
'avocado.meta.exporters._base',
'avocado.meta.exporters._csv',
'avocado.meta.exporters._excel',
'avocado.meta.exporters._sas',
'avocado.meta.exporters._r',
# 'avocado.meta.logictree',
'avocado.meta.managers',
'avocado.meta.mixins',
'avocado.meta.models',
'avocado.meta.operators',
'avocado.meta.translators',
'avocado.meta.utils',
)
TEST_RUNNER = 'avocado.tests.coverage_test.CoverageTestRunner'
|
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
}
}
MODELTREES = {
'default': {
'model': 'tests.Employee'
}
}
INSTALLED_APPS = (
'avocado',
'avocado.meta',
'avocado.tests',
)
COVERAGE_MODULES = (
'avocado.meta.formatters',
'avocado.meta.exporters._base',
'avocado.meta.exporters._csv',
'avocado.meta.exporters._excel',
'avocado.meta.exporters._sas',
'avocado.meta.exporters._r',
'avocado.meta.exporters._json',
# 'avocado.meta.logictree',
'avocado.meta.managers',
'avocado.meta.mixins',
'avocado.meta.models',
'avocado.meta.operators',
'avocado.meta.translators',
'avocado.meta.utils',
)
TEST_RUNNER = 'avocado.tests.coverage_test.CoverageTestRunner'
|
Add json exporter module to modules coveraged
|
Add json exporter module to modules coveraged
|
Python
|
bsd-2-clause
|
murphyke/avocado,murphyke/avocado,murphyke/avocado,murphyke/avocado
|
---
+++
@@ -23,6 +23,7 @@
'avocado.meta.exporters._excel',
'avocado.meta.exporters._sas',
'avocado.meta.exporters._r',
+ 'avocado.meta.exporters._json',
# 'avocado.meta.logictree',
'avocado.meta.managers',
'avocado.meta.mixins',
|
51b362ab66ed8a1a793dc9995a6f06067230085b
|
geomdl/__init__.py
|
geomdl/__init__.py
|
""" This package contains native Python implementations of several `The NURBS Book <http://www.springer.com/gp/book/9783642973857>`_ algorithms for generating B-spline / NURBS curves and surfaces. It also provides a data structure for storing elements required for evaluation these curves and surfaces.
Please follow the `README.md <https://github.com/orbingol/NURBS-Python/blob/master/README.md>`_ file included in the `repository <https://github.com/orbingol/NURBS-Python>`_ for details on the algorithms.
Some other advantages of this package are;
* Python 2.x and 3.x compatibility
* No external dependencies (such as NumPy)
* Uses Python properties for the data storage access
* A :code:`utilities` module containing several helper functions
* :code:`Grid` and :code:`GridWeighted` classes for generating various types of control points grids
The NURBS-Python package follows an object-oriented design as much as possible. However, in order to understand the algorithms, you might need to take a look at `The NURBS Book <http://www.springer.com/gp/book/9783642973857>`_ itself.
.. moduleauthor:: Onur Rauf Bingol
"""
__version__ = "3.0.0"
# Fixes "from geomdl import *" but this is not considered as a good practice
# @see: https://docs.python.org/3/tutorial/modules.html#importing-from-a-package
__all__ = ["BSpline.Curve",
"BSpline.Curve2D",
"BSpline.Surface",
"NURBS.Curve",
"NURBS.Curve2D",
"NURBS.Surface",
"CPGen.Grid",
"CPGen.GridWeighted",
"utilities"]
|
""" This package contains native Python implementations of several `The NURBS Book <http://www.springer.com/gp/book/9783642973857>`_ algorithms for generating B-spline / NURBS curves and surfaces. It also provides a data structure for storing elements required for evaluation these curves and surfaces.
Please follow the `README.md <https://github.com/orbingol/NURBS-Python/blob/master/README.md>`_ file included in the `repository <https://github.com/orbingol/NURBS-Python>`_ for details on the algorithms.
Some other advantages of this package are;
* Python 2.x and 3.x compatibility
* No external dependencies (such as NumPy)
* Uses Python properties for the data storage access
* A :code:`utilities` module containing several helper functions
* :code:`Grid` and :code:`GridWeighted` classes for generating various types of control points grids
The NURBS-Python package follows an object-oriented design as much as possible. However, in order to understand the algorithms, you might need to take a look at `The NURBS Book <http://www.springer.com/gp/book/9783642973857>`_ itself.
.. moduleauthor:: Onur Rauf Bingol
"""
__version__ = "3.0.0"
|
Disable import * as it seems to cause some unnecessary trouble
|
Disable import * as it seems to cause some unnecessary trouble
|
Python
|
mit
|
orbingol/NURBS-Python,orbingol/NURBS-Python
|
---
+++
@@ -16,15 +16,3 @@
"""
__version__ = "3.0.0"
-
-# Fixes "from geomdl import *" but this is not considered as a good practice
-# @see: https://docs.python.org/3/tutorial/modules.html#importing-from-a-package
-__all__ = ["BSpline.Curve",
- "BSpline.Curve2D",
- "BSpline.Surface",
- "NURBS.Curve",
- "NURBS.Curve2D",
- "NURBS.Surface",
- "CPGen.Grid",
- "CPGen.GridWeighted",
- "utilities"]
|
e71f23f0bef4307831b240ee162bd9e2cf84e212
|
hoomd/pytest/dummy.py
|
hoomd/pytest/dummy.py
|
from hoomd.triggers import Trigger
from hoomd.meta import _Operation, _TriggeredOperation
class DummySimulation:
def __init__(self):
self.state = DummyState()
self.operations = DummyOperations()
self._cpp_sys = DummySystem()
class DummySystem:
def __init__(self):
self.dummy_list = []
class DummyState:
def __init__(self):
pass
@property
def particle_types(self):
return ['A', 'B']
class DummyOperations:
pass
class DummyCppObj:
def __init__(self):
self._dict = dict()
def setTypeParam(self, type_, value):
self._dict[type_] = value
def getTypeParam(self, type_):
return self._dict[type_]
@property
def param1(self):
return self._param1
@param1.setter
def param1(self, value):
self._param1 = value
@property
def param2(self):
return self._param2
@param2.setter
def param2(self, value):
self._param2 = value
class DummyOperation(_Operation):
'''Requires that user manually add param_dict and typeparam_dict items.
This is for testing purposes.
'''
pass
class DummyTriggeredOp(_TriggeredOperation):
_cpp_list_name = 'dummy_list'
class DummyTrigger(Trigger):
def __call__(self, ts):
return True
|
from hoomd.triggers import Trigger
from hoomd.meta import _Operation, _TriggeredOperation
class DummySimulation:
def __init__(self):
self.state = DummyState()
self.operations = DummyOperations()
self._cpp_sys = DummySystem()
class DummySystem:
def __init__(self):
self.dummy_list = []
class DummyState:
def __init__(self):
pass
@property
def particle_types(self):
return ['A', 'B']
class DummyOperations:
pass
class DummyCppObj:
def __init__(self):
self._dict = dict()
def setTypeParam(self, type_, value):
self._dict[type_] = value
def getTypeParam(self, type_):
return self._dict[type_]
@property
def param1(self):
return self._param1
@param1.setter
def param1(self, value):
self._param1 = value
@property
def param2(self):
return self._param2
@param2.setter
def param2(self, value):
self._param2 = value
class DummyOperation(_Operation):
'''Requires that user manually add param_dict and typeparam_dict items.
This is for testing purposes.
'''
def attach(self, simulation):
self._cpp_obj = "cpp obj"
class DummyTriggeredOp(_TriggeredOperation):
_cpp_list_name = 'dummy_list'
class DummyTrigger(Trigger):
def __call__(self, ts):
return True
|
Include attach method in DummyOperation
|
Include attach method in DummyOperation
|
Python
|
bsd-3-clause
|
joaander/hoomd-blue,joaander/hoomd-blue,joaander/hoomd-blue,joaander/hoomd-blue,joaander/hoomd-blue,joaander/hoomd-blue
|
---
+++
@@ -59,7 +59,8 @@
This is for testing purposes.
'''
- pass
+ def attach(self, simulation):
+ self._cpp_obj = "cpp obj"
class DummyTriggeredOp(_TriggeredOperation):
|
c5ae3a8708023039b877a47bcfedec6997ec1583
|
geokey_wegovnow/conversions.py
|
geokey_wegovnow/conversions.py
|
"""Methods for converting data between systems or formats."""
def make_cm_url(url):
"""Turns a Geokey url into a Community Maps url."""
protocol, address = url.split('//')
address_parts = address.split('/')
new_address_parts = []
for i, part in enumerate(address_parts):
if part == 'api':
continue
if i == 0 and '-gk-' in part:
new_address_parts.append(part.replace('-gk-', '-cm-'))
elif part.endswith('s'):
new_address_parts.append(part[:-1])
else:
new_address_parts.append(part)
return protocol + '//' + '/'.join(new_address_parts)
def get_link_title(properties):
"""Gets a link title from a properties dictionary."""
if not properties:
return "Unknown title"
# Try plausible fields for link titles.
possible_title_field_names = ['name', 'Name', 'title', 'Title']
for title in possible_title_field_names:
if title in properties:
return properties[title]
# Fall back to the first items in the dict.
return ' '.join(properties.items()[0])
|
"""Methods for converting data between systems or formats."""
def make_cm_url(url):
"""Turns a Geokey url into a Community Maps url."""
protocol, address = url.split('//')
address_parts = address.split('/')
new_address_parts = []
for i, part in enumerate(address_parts):
if part == 'api':
continue
if i == 0 and '-gk-' in part:
new_address_parts.append(part.replace('-gk-', '-cm-'))
elif part.endswith('s'):
new_address_parts.append(part[:-1])
else:
new_address_parts.append(part)
return protocol + '//' + '/'.join(new_address_parts)
def get_link_title(properties):
"""Gets a link title from a properties dictionary."""
if not properties:
return "Unknown title"
# Try plausible fields for link titles.
possible_title_field_names = ['name', 'title', 'heading', 'main']
for title in possible_title_field_names:
for k in properties.keys():
if str.upper(title) in str.upper(str(k)):
return properties[k]
# Fall back to the first items in the dict.
return ' '.join([str(a) for a in properties.items()[0]])
|
Fix get_link_title for non-string keys and values. Include heading and main as possible titles.
|
Fix get_link_title for non-string keys and values.
Include heading and main as possible titles.
|
Python
|
mit
|
ExCiteS/geokey-wegovnow,ExCiteS/geokey-wegovnow
|
---
+++
@@ -24,10 +24,11 @@
return "Unknown title"
# Try plausible fields for link titles.
- possible_title_field_names = ['name', 'Name', 'title', 'Title']
+ possible_title_field_names = ['name', 'title', 'heading', 'main']
for title in possible_title_field_names:
- if title in properties:
- return properties[title]
+ for k in properties.keys():
+ if str.upper(title) in str.upper(str(k)):
+ return properties[k]
# Fall back to the first items in the dict.
- return ' '.join(properties.items()[0])
+ return ' '.join([str(a) for a in properties.items()[0]])
|
892b1f04ba6f6dde0953c061409fb4eb05935634
|
bot/action/internationalization.py
|
bot/action/internationalization.py
|
import gettext
from bot.action.core.action import IntermediateAction
LOCALE_DIR = "locales"
TRANSLATION_DOMAIN = "telegram-bot"
DEFAULT_LANGUAGE = "en"
class InternationalizationAction(IntermediateAction):
def __init__(self):
super().__init__()
self.cached_translations = {}
self.default_translation = self.__get_translation(DEFAULT_LANGUAGE)
def process(self, event):
lang = event.state.get_for("settings").get("language", DEFAULT_LANGUAGE)
translation = self.__get_translation(lang)
translation.install()
event._ = translation.gettext
self._continue(event)
self.default_translation.install()
def __get_translation(self, language):
if language in self.cached_translations:
return self.cached_translations[language]
translation = gettext.translation(TRANSLATION_DOMAIN, LOCALE_DIR, languages=[language], fallback=True)
self.cached_translations[language] = translation
return translation
|
import gettext
from bot.action.core.action import IntermediateAction
LOCALE_DIR = "locales"
TRANSLATION_DOMAIN = "telegram-bot"
DEFAULT_LANGUAGE = "en"
CACHED_TRANSLATIONS = {}
class InternationalizationAction(IntermediateAction):
def __init__(self):
super().__init__()
self.default_translation = self.__get_translation(DEFAULT_LANGUAGE)
def process(self, event):
lang = event.state.get_for("settings").get_value("language", DEFAULT_LANGUAGE)
translation = self.__get_translation(lang)
translation.install()
event._ = translation.gettext
self._continue(event)
self.default_translation.install()
@staticmethod
def __get_translation(language):
if language in CACHED_TRANSLATIONS:
translation = CACHED_TRANSLATIONS[language]
else:
translation = gettext.translation(TRANSLATION_DOMAIN, LOCALE_DIR, languages=[language], fallback=True)
CACHED_TRANSLATIONS[language] = translation
return translation
|
Make CACHED_TRANSLATIONS global, fix get_value typo
|
Make CACHED_TRANSLATIONS global, fix get_value typo
|
Python
|
agpl-3.0
|
alvarogzp/telegram-bot,alvarogzp/telegram-bot
|
---
+++
@@ -7,24 +7,27 @@
DEFAULT_LANGUAGE = "en"
+CACHED_TRANSLATIONS = {}
+
class InternationalizationAction(IntermediateAction):
def __init__(self):
super().__init__()
- self.cached_translations = {}
self.default_translation = self.__get_translation(DEFAULT_LANGUAGE)
def process(self, event):
- lang = event.state.get_for("settings").get("language", DEFAULT_LANGUAGE)
+ lang = event.state.get_for("settings").get_value("language", DEFAULT_LANGUAGE)
translation = self.__get_translation(lang)
translation.install()
event._ = translation.gettext
self._continue(event)
self.default_translation.install()
- def __get_translation(self, language):
- if language in self.cached_translations:
- return self.cached_translations[language]
- translation = gettext.translation(TRANSLATION_DOMAIN, LOCALE_DIR, languages=[language], fallback=True)
- self.cached_translations[language] = translation
+ @staticmethod
+ def __get_translation(language):
+ if language in CACHED_TRANSLATIONS:
+ translation = CACHED_TRANSLATIONS[language]
+ else:
+ translation = gettext.translation(TRANSLATION_DOMAIN, LOCALE_DIR, languages=[language], fallback=True)
+ CACHED_TRANSLATIONS[language] = translation
return translation
|
62549a211ff41e2b84a9b085e784649efc76c5d9
|
apps/domain/tests/conftest.py
|
apps/domain/tests/conftest.py
|
import pytest
import sys, os
myPath = os.path.dirname(os.path.abspath(__file__))
sys.path.append(myPath + "/../src/")
from app import create_app
@pytest.fixture(scope="function", autouse=True)
def app():
return create_app()
@pytest.fixture
def client(app):
return app.test_client()
|
import pytest
import sys, os
myPath = os.path.dirname(os.path.abspath(__file__))
sys.path.append(myPath + "/../src/")
from app import create_app
@pytest.fixture(scope="function", autouse=True)
def app():
db_path = "sqlite:///databasenetwork.db"
return create_app(test_config={"SQLALCHEMY_DATABASE_URI": db_path})
@pytest.fixture
def client(app):
return app.test_client()
|
Update unit test app() fixture
|
Update unit test app() fixture
|
Python
|
apache-2.0
|
OpenMined/PySyft,OpenMined/PySyft,OpenMined/PySyft,OpenMined/PySyft
|
---
+++
@@ -9,7 +9,8 @@
@pytest.fixture(scope="function", autouse=True)
def app():
- return create_app()
+ db_path = "sqlite:///databasenetwork.db"
+ return create_app(test_config={"SQLALCHEMY_DATABASE_URI": db_path})
@pytest.fixture
|
7f53f8da79a41591498b73356770ff1cf417adf4
|
byceps/services/country/service.py
|
byceps/services/country/service.py
|
"""
byceps.services.country.service
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2021 Jochen Kupperschmidt
:License: Revised BSD (see `LICENSE` file for details)
"""
from __future__ import annotations
import codecs
from dataclasses import dataclass
import json
from flask import current_app
@dataclass(frozen=True)
class Country:
name: str
alpha2: str
alpha3: str
def get_countries() -> list[Country]:
"""Load countries from JSON file."""
reader = codecs.getreader('utf-8')
path = 'services/country/resources/countries.json'
with current_app.open_resource(path) as f:
records = json.load(reader(f))
return [Country(**record) for record in records]
def get_country_names() -> list[str]:
"""Return country names."""
return [country.name for country in get_countries()]
|
"""
byceps.services.country.service
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2021 Jochen Kupperschmidt
:License: Revised BSD (see `LICENSE` file for details)
"""
from __future__ import annotations
from dataclasses import dataclass
import json
from flask import current_app
@dataclass(frozen=True)
class Country:
name: str
alpha2: str
alpha3: str
def get_countries() -> list[Country]:
"""Load countries from JSON file."""
path = 'services/country/resources/countries.json'
with current_app.open_resource(path) as f:
records = json.load(f)
return [Country(**record) for record in records]
def get_country_names() -> list[str]:
"""Return country names."""
return [country.name for country in get_countries()]
|
Remove unnecessary codec reader, fixes type issue
|
Remove unnecessary codec reader, fixes type issue
|
Python
|
bsd-3-clause
|
homeworkprod/byceps,homeworkprod/byceps,homeworkprod/byceps
|
---
+++
@@ -7,7 +7,6 @@
"""
from __future__ import annotations
-import codecs
from dataclasses import dataclass
import json
@@ -23,11 +22,9 @@
def get_countries() -> list[Country]:
"""Load countries from JSON file."""
- reader = codecs.getreader('utf-8')
-
path = 'services/country/resources/countries.json'
with current_app.open_resource(path) as f:
- records = json.load(reader(f))
+ records = json.load(f)
return [Country(**record) for record in records]
|
6b49db80a4508c3cba7f410db8ed3f23b5234e3b
|
src/behavior/features/terrain.py
|
src/behavior/features/terrain.py
|
from lettuce import *
import requests
import json
tenantList = [ "511", "615", "634", "515" ]
def initialize():
with open("properties.json") as config_file:
world.config = json.load(config_file)
@before.each_scenario
def cleanContext(feature):
for tenant in tenantList:
url = world.config['targetUrl'] + '/pap/v1/' + tenant
r = requests.delete(url)
initialize()
|
from lettuce import *
import requests
import json
import os
tenantList = [ "511", "615", "634", "515" ]
def initialize():
if os.getenv("LETTUCE_CONFIG"):
filename = os.getenv("LETTUCE_CONFIG")
else:
filename = "properties.json"
with open(filename) as config_file:
world.config = json.load(config_file)
@before.each_scenario
def cleanContext(feature):
for tenant in tenantList:
url = world.config['targetUrl'] + '/pap/v1/' + tenant
r = requests.delete(url)
initialize()
|
ADD Name of the configuration file in a environment variable
|
ADD Name of the configuration file in a environment variable
|
Python
|
apache-2.0
|
telefonicaid/fiware-keypass,telefonicaid/fiware-keypass,telefonicaid/fiware-keypass
|
---
+++
@@ -1,12 +1,18 @@
from lettuce import *
import requests
import json
+import os
tenantList = [ "511", "615", "634", "515" ]
def initialize():
- with open("properties.json") as config_file:
+ if os.getenv("LETTUCE_CONFIG"):
+ filename = os.getenv("LETTUCE_CONFIG")
+ else:
+ filename = "properties.json"
+
+ with open(filename) as config_file:
world.config = json.load(config_file)
@before.each_scenario
|
b9e2ee470308231f4ea4d23297f7b07fab711dba
|
django_lightweight_queue/task.py
|
django_lightweight_queue/task.py
|
from .job import Job
from .utils import get_backend
from . import app_settings
class task(object):
def __init__(self, queue='default'):
self.queue = queue
app_settings.WORKERS.setdefault(self.queue, 1)
def __call__(self, fn):
return TaskWrapper(fn, self.queue)
class TaskWrapper(object):
def __init__(self, fn, queue):
self.fn = fn
self.queue = queue
self.path = '%s.%s' % (fn.__module__, fn.__name__)
def __repr__(self):
return "<TaskWrapper: %s>" % self.path
def __call__(self, *args, **kwargs):
job = Job(self.path, args, kwargs)
job.validate()
get_backend().enqueue(job, self.queue)
|
from .job import Job
from .utils import get_backend
from . import app_settings
class task(object):
def __init__(self, queue='default', timeout=None):
self.queue = queue
self.timeout = timeout
app_settings.WORKERS.setdefault(self.queue, 1)
def __call__(self, fn):
return TaskWrapper(fn, self.queue, self.timeout)
class TaskWrapper(object):
def __init__(self, fn, queue, timeout):
self.fn = fn
self.queue = queue
self.timeout = timeout
self.path = '%s.%s' % (fn.__module__, fn.__name__)
def __repr__(self):
return "<TaskWrapper: %s>" % self.path
def __call__(self, *args, **kwargs):
job = Job(self.path, args, kwargs)
job.validate()
get_backend().enqueue(job, self.queue)
|
Store a timeout value on the TaskWrapper, defaulting to no timeout.
|
Store a timeout value on the TaskWrapper, defaulting to no timeout.
Signed-off-by: Chris Lamb <711c73f64afdce07b7e38039a96d2224209e9a6c@thread.com>
|
Python
|
bsd-3-clause
|
lamby/django-lightweight-queue,prophile/django-lightweight-queue,thread/django-lightweight-queue,thread/django-lightweight-queue,prophile/django-lightweight-queue
|
---
+++
@@ -4,18 +4,20 @@
from . import app_settings
class task(object):
- def __init__(self, queue='default'):
+ def __init__(self, queue='default', timeout=None):
self.queue = queue
+ self.timeout = timeout
app_settings.WORKERS.setdefault(self.queue, 1)
def __call__(self, fn):
- return TaskWrapper(fn, self.queue)
+ return TaskWrapper(fn, self.queue, self.timeout)
class TaskWrapper(object):
- def __init__(self, fn, queue):
+ def __init__(self, fn, queue, timeout):
self.fn = fn
self.queue = queue
+ self.timeout = timeout
self.path = '%s.%s' % (fn.__module__, fn.__name__)
|
6bbcff384acf4ca5cb22aa80dc4032e3832817b2
|
wsgi/flaskapp.py
|
wsgi/flaskapp.py
|
import json
import os
import ieeebot
from flask import Flask, request, abort
from storage import Storage
app = Flask(__name__)
app.config['PROPAGATE_EXCEPTIONS'] = True
ieeebot.logger.debug(ieeebot.TOKEN)
ieeebot.logger.debug(ieeebot.DATABASE_FILE)
@app.route('/ieeetestbot', methods=['POST'])
def hello():
update = request.get_json(force=True)
#ieeebot.logger.info(str(update))
if update['update_id'] > ieeebot.last_update_id:
ieeebot.last_update_id = update['update_id']
ieeebot.process_update(update)
return "", 200
if __name__ == "__main__":
app.run(debug = True)
|
import json
import os
import ieeebot
from flask import Flask, request, abort
from storage import Storage
app = Flask(__name__)
app.config['PROPAGATE_EXCEPTIONS'] = True
ieeebot.logger.debug(ieeebot.TOKEN)
ieeebot.logger.debug(ieeebot.DATABASE_FILE)
@app.route('/webhook/<token>', methods=['POST'])
def hello(token=None):
if token == ieeebot.TOKEN:
update = request.get_json(force=True)
ieeebot.logger.info(str(update))
if update['update_id'] > ieeebot.last_update_id:
ieeebot.last_update_id = update['update_id']
ieeebot.process_update(update)
return "", 200
else:
return "", 400
if __name__ == "__main__":
app.run(debug = True)
|
Use a dynamic url to secure webhook
|
Use a dynamic url to secure webhook
|
Python
|
agpl-3.0
|
ieeeugrsb/IEEEbot
|
---
+++
@@ -12,16 +12,19 @@
ieeebot.logger.debug(ieeebot.TOKEN)
ieeebot.logger.debug(ieeebot.DATABASE_FILE)
-@app.route('/ieeetestbot', methods=['POST'])
-def hello():
- update = request.get_json(force=True)
- #ieeebot.logger.info(str(update))
-
- if update['update_id'] > ieeebot.last_update_id:
- ieeebot.last_update_id = update['update_id']
- ieeebot.process_update(update)
+@app.route('/webhook/<token>', methods=['POST'])
+def hello(token=None):
+ if token == ieeebot.TOKEN:
+ update = request.get_json(force=True)
+ ieeebot.logger.info(str(update))
+
+ if update['update_id'] > ieeebot.last_update_id:
+ ieeebot.last_update_id = update['update_id']
+ ieeebot.process_update(update)
- return "", 200
+ return "", 200
+ else:
+ return "", 400
if __name__ == "__main__":
app.run(debug = True)
|
cd3b92d75c331de13a25693822cac57dc82d8e81
|
mla_game/apps/transcript/management/commands/fake_game_one_gameplay.py
|
mla_game/apps/transcript/management/commands/fake_game_one_gameplay.py
|
import random
from django.core.management.base import BaseCommand
from django.contrib.auth.models import User
from ...models import (
Transcript, TranscriptPhraseVote
)
from ...tasks import update_transcript_stats
class Command(BaseCommand):
help = 'Creates random votes for 5 phrases in a random transcript'
def handle(self, *args, **options):
users = User.objects.all()
transcript = Transcript.objects.random_transcript(in_progress=False).first()
phrases = transcript.phrases.all()[:5]
for phrase in phrases:
for user in users:
TranscriptPhraseVote.objects.create(
transcript_phrase=phrase,
user=user,
upvote=random.choice([True, False])
)
update_transcript_stats(transcript)
|
import random
from django.core.management.base import BaseCommand
from django.contrib.auth.models import User
from ...models import (
Transcript, TranscriptPhraseVote
)
from ...tasks import update_transcript_stats
class Command(BaseCommand):
help = 'Creates random votes for 5 phrases in a random transcript'
def handle(self, *args, **options):
users = User.objects.all()[:5]
transcript = Transcript.objects.random_transcript(
in_progress=False
).first()
phrases = transcript.phrases.all()[:5]
for phrase in phrases:
for user in users:
TranscriptPhraseVote.objects.create(
transcript_phrase=phrase,
user=user,
upvote=random.choice([True, False])
)
update_transcript_stats(transcript)
|
Use a smaller set of users in fake game two gameplay
|
Use a smaller set of users in fake game two gameplay
|
Python
|
mit
|
WGBH/FixIt,WGBH/FixIt,WGBH/FixIt
|
---
+++
@@ -13,8 +13,10 @@
help = 'Creates random votes for 5 phrases in a random transcript'
def handle(self, *args, **options):
- users = User.objects.all()
- transcript = Transcript.objects.random_transcript(in_progress=False).first()
+ users = User.objects.all()[:5]
+ transcript = Transcript.objects.random_transcript(
+ in_progress=False
+ ).first()
phrases = transcript.phrases.all()[:5]
for phrase in phrases:
for user in users:
|
b850d8ef9ef2453445c14f4c78a058fe51211983
|
trac/upgrades/db11.py
|
trac/upgrades/db11.py
|
import os.path
import shutil
sql = """
-- Remove empty values from the milestone list
DELETE FROM milestone WHERE COALESCE(name,'')='';
-- Add a description column to the version table, and remove unnamed versions
CREATE TEMP TABLE version_old AS SELECT * FROM version;
DROP TABLE version;
CREATE TABLE version (
name text PRIMARY KEY,
time integer,
description text
);
INSERT INTO version(name,time,description)
SELECT name,time,'' FROM version_old WHERE COALESCE(name,'')<>'';
-- Add a description column to the component table, and remove unnamed components
CREATE TEMP TABLE component_old AS SELECT * FROM component;
DROP TABLE component;
CREATE TABLE component (
name text PRIMARY KEY,
owner text,
description text
);
INSERT INTO component(name,owner,description)
SELECT name,owner,'' FROM component_old WHERE COALESCE(name,'')<>'';
"""
def do_upgrade(env, ver, cursor):
cursor.execute(sql)
# Copy the new default wiki macros over to the environment
from trac.siteconfig import __default_macro_dir__ as macro_dir
for f in os.listdir(macro_dir):
if not f.endswith('.py'):
continue
src = os.path.join(macro_dir, f)
dst = os.path.join(env.path, 'wiki-macros', f)
if not os.path.isfile(dst):
shutil.copy2(src, dst)
|
import os.path
import shutil
sql = """
-- Remove empty values from the milestone list
DELETE FROM milestone WHERE COALESCE(name,'')='';
-- Add a description column to the version table, and remove unnamed versions
CREATE TEMP TABLE version_old AS SELECT * FROM version;
DROP TABLE version;
CREATE TABLE version (
name text PRIMARY KEY,
time integer,
description text
);
INSERT INTO version(name,time,description)
SELECT name,time,'' FROM version_old WHERE COALESCE(name,'')<>'';
-- Add a description column to the component table, and remove unnamed components
CREATE TEMP TABLE component_old AS SELECT * FROM component;
DROP TABLE component;
CREATE TABLE component (
name text PRIMARY KEY,
owner text,
description text
);
INSERT INTO component(name,owner,description)
SELECT name,owner,'' FROM component_old WHERE COALESCE(name,'')<>'';
"""
def do_upgrade(env, ver, cursor):
cursor.execute(sql)
# Copy the new default wiki macros over to the environment
from trac.siteconfig import __default_macros_dir__ as macros_dir
for f in os.listdir(macros_dir):
if not f.endswith('.py'):
continue
src = os.path.join(macros_dir, f)
dst = os.path.join(env.path, 'wiki-macros', f)
if not os.path.isfile(dst):
shutil.copy2(src, dst)
|
Fix typo in upgrade script
|
Fix typo in upgrade script
|
Python
|
bsd-3-clause
|
pkdevbox/trac,pkdevbox/trac,pkdevbox/trac,pkdevbox/trac
|
---
+++
@@ -32,11 +32,11 @@
cursor.execute(sql)
# Copy the new default wiki macros over to the environment
- from trac.siteconfig import __default_macro_dir__ as macro_dir
- for f in os.listdir(macro_dir):
+ from trac.siteconfig import __default_macros_dir__ as macros_dir
+ for f in os.listdir(macros_dir):
if not f.endswith('.py'):
continue
- src = os.path.join(macro_dir, f)
+ src = os.path.join(macros_dir, f)
dst = os.path.join(env.path, 'wiki-macros', f)
if not os.path.isfile(dst):
shutil.copy2(src, dst)
|
1b0fbe54406f22017bd5f40cee52333f31807272
|
hardware/sense_hat/marble_maze.py
|
hardware/sense_hat/marble_maze.py
|
# based on https://www.raspberrypi.org/learning/sense-hat-marble-maze/worksheet/
from sense_hat import SenseHat
import time
sense = SenseHat()
sense.clear()
time.sleep(0.5)
r = (255, 0, 0 )
b = (0,0,0)
maze = [[r,r,r,r,r,r,r,r],
[r,b,b,b,b,b,b,r],
[r,r,r,b,r,b,b,r],
[r,b,r,b,r,r,r,r],
[r,b,b,b,b,b,b,r],
[r,b,r,r,r,r,b,r],
[r,b,b,r,b,b,b,r],
[r,r,r,r,r,r,r,r]]
sense.set_pixels(sum(maze,[]))
|
# based on https://www.raspberrypi.org/learning/sense-hat-marble-maze/worksheet/
from sense_hat import SenseHat
import time
sense = SenseHat()
sense.clear()
time.sleep(0.5)
r = (255, 0, 0 )
b = (0,0,0)
w = (255, 255, 255 )
x = 1
y = 1
maze = [[r,r,r,r,r,r,r,r],
[r,b,b,b,b,b,b,r],
[r,r,r,b,r,b,b,r],
[r,b,r,b,r,r,r,r],
[r,b,b,b,b,b,b,r],
[r,b,r,r,r,r,b,r],
[r,b,b,r,b,b,b,r],
[r,r,r,r,r,r,r,r]]
sense.set_pixels(sum(maze,[]))
game_over = False
while not game_over:
maze[y][x] = w
sense.set_pixels(sum(maze,[]))
|
Add static white marble to sense hat maze
|
Add static white marble to sense hat maze
|
Python
|
mit
|
claremacrae/raspi_code,claremacrae/raspi_code,claremacrae/raspi_code
|
---
+++
@@ -10,6 +10,10 @@
r = (255, 0, 0 )
b = (0,0,0)
+w = (255, 255, 255 )
+
+x = 1
+y = 1
maze = [[r,r,r,r,r,r,r,r],
[r,b,b,b,b,b,b,r],
@@ -21,3 +25,9 @@
[r,r,r,r,r,r,r,r]]
sense.set_pixels(sum(maze,[]))
+
+game_over = False
+
+while not game_over:
+ maze[y][x] = w
+ sense.set_pixels(sum(maze,[]))
|
3d98b426b3eb9b1ddc42e4726f7b0c99c2e488e3
|
scripts/generate_setup_builtin_functions.py
|
scripts/generate_setup_builtin_functions.py
|
# Copyright 2008 Paul Hodge
import os, string
def run(functionsDirectory, outputFilename):
print "dir is: " +functionsDirectory
files = os.listdir(functionsDirectory)
functionNames = []
for file in files:
if file.endswith('.cpp'):
function_name = os.path.split(file)[1][:-4]
functionNames.append(function_name)
#print "added "+function_name
def makeNamespace(functionName):
return functionName.replace('-','_')+"_function"
namespaces = map(makeNamespace, functionNames)
def makePredeclaration(namespace):
return "namespace "+namespace+" { void setup(Branch& kernel); }"
def makeCall(namespace):
return namespace+"::setup(kernel);"
predeclarations = map(makePredeclaration, namespaces)
calls = map(makeCall, namespaces)
output = string.Template(TEMPLATE).substitute({
'predeclarations':"\n".join(predeclarations),
'calls':"\n ".join(calls)})
output_file = open(outputFilename, 'w')
output_file.write(output)
output_file.close()
TEMPLATE = """
// Copyright 2008 Paul Hodge
#include "common_headers.h"
#include "branch.h"
namespace circa {
$predeclarations
void setup_builtin_functions(Branch& kernel)
{
$calls
}
} // namespace circa
"""
if __name__ == "__main__":
run("../src/builtin_functions", "../src/setup_builtin_functions.cpp")
|
# Copyright 2008 Paul Hodge
import os, string
def run(functionsDirectory, outputFilename):
print "dir is: " +functionsDirectory
files = os.listdir(functionsDirectory)
functionNames = []
for file in files:
if file.endswith('.cpp'):
function_name = os.path.split(file)[1][:-4]
functionNames.append(function_name)
#print "added "+function_name
def makeNamespace(functionName):
return functionName.replace('-','_')+"_function"
namespaces = map(makeNamespace, functionNames)
def makePredeclaration(namespace):
return "namespace "+namespace+" { void setup(Branch& kernel); }"
def makeCall(namespace):
return namespace+"::setup(kernel);"
predeclarations = map(makePredeclaration, namespaces)
calls = map(makeCall, namespaces)
predeclarations.sort()
calls.sort()
output = string.Template(TEMPLATE).substitute({
'predeclarations':"\n".join(predeclarations),
'calls':"\n ".join(calls)})
output_file = open(outputFilename, 'w')
output_file.write(output)
output_file.close()
TEMPLATE = """
// Copyright 2008 Paul Hodge
#include "common_headers.h"
#include "branch.h"
namespace circa {
$predeclarations
void setup_builtin_functions(Branch& kernel)
{
$calls
}
} // namespace circa
"""
if __name__ == "__main__":
run("../src/builtin_functions", "../src/setup_builtin_functions.cpp")
|
Enforce consistent results for generated code
|
Enforce consistent results for generated code
|
Python
|
mit
|
andyfischer/circa,andyfischer/circa,andyfischer/circa,andyfischer/circa
|
---
+++
@@ -26,6 +26,9 @@
predeclarations = map(makePredeclaration, namespaces)
calls = map(makeCall, namespaces)
+
+ predeclarations.sort()
+ calls.sort()
output = string.Template(TEMPLATE).substitute({
'predeclarations':"\n".join(predeclarations),
|
b89b6a3d609c29ed544e2ab6b0995932b475ef96
|
admin/common_auth/forms.py
|
admin/common_auth/forms.py
|
from __future__ import absolute_import
from django import forms
from django.contrib.auth.models import Group
from osf.models import AdminProfile
class LoginForm(forms.Form):
email = forms.CharField(label=u'Email', required=True)
password = forms.CharField(
label=u'Password',
widget=forms.PasswordInput(render_value=False),
required=True
)
class UserRegistrationForm(forms.Form):
""" A form that finds an existing OSF User, and grants permissions to that
user so that they can use the admin app"""
osf_id = forms.CharField(required=True, max_length=5, min_length=5)
group_perms = forms.ModelMultipleChoiceField(
queryset=Group.objects.all(),
required=False,
widget=forms.CheckboxSelectMultiple
)
class DeskUserForm(forms.ModelForm):
class Meta:
model = AdminProfile
fields = ['desk_token', 'desk_token_secret']
|
from __future__ import absolute_import
from django import forms
from django.contrib.auth.models import Group
from osf.models import AdminProfile
class LoginForm(forms.Form):
email = forms.CharField(label=u'Email', required=True)
password = forms.CharField(
label=u'Password',
widget=forms.PasswordInput(render_value=False),
required=True
)
class UserRegistrationForm(forms.Form):
""" A form that finds an existing OSF User, and grants permissions to that
user so that they can use the admin app"""
osf_id = forms.CharField(required=True, max_length=5, min_length=5)
# TODO: Moving to guardian, find a better way to distinguish "admin-like" groups from object permission groups
group_perms = forms.ModelMultipleChoiceField(
queryset=Group.objects.exclude(name__startswith='collections_'),
required=False,
widget=forms.CheckboxSelectMultiple
)
class DeskUserForm(forms.ModelForm):
class Meta:
model = AdminProfile
fields = ['desk_token', 'desk_token_secret']
|
Exclude collection groups from ADM UI
|
Exclude collection groups from ADM UI
|
Python
|
apache-2.0
|
Johnetordoff/osf.io,adlius/osf.io,adlius/osf.io,aaxelb/osf.io,Johnetordoff/osf.io,erinspace/osf.io,mfraezz/osf.io,CenterForOpenScience/osf.io,CenterForOpenScience/osf.io,brianjgeiger/osf.io,pattisdr/osf.io,mfraezz/osf.io,baylee-d/osf.io,cslzchen/osf.io,mattclark/osf.io,HalcyonChimera/osf.io,cslzchen/osf.io,mattclark/osf.io,brianjgeiger/osf.io,HalcyonChimera/osf.io,Johnetordoff/osf.io,binoculars/osf.io,aaxelb/osf.io,felliott/osf.io,icereval/osf.io,mfraezz/osf.io,felliott/osf.io,felliott/osf.io,sloria/osf.io,mfraezz/osf.io,erinspace/osf.io,aaxelb/osf.io,pattisdr/osf.io,pattisdr/osf.io,CenterForOpenScience/osf.io,HalcyonChimera/osf.io,erinspace/osf.io,caseyrollins/osf.io,saradbowman/osf.io,binoculars/osf.io,binoculars/osf.io,baylee-d/osf.io,mattclark/osf.io,Johnetordoff/osf.io,saradbowman/osf.io,CenterForOpenScience/osf.io,felliott/osf.io,aaxelb/osf.io,icereval/osf.io,baylee-d/osf.io,adlius/osf.io,cslzchen/osf.io,HalcyonChimera/osf.io,caseyrollins/osf.io,brianjgeiger/osf.io,icereval/osf.io,sloria/osf.io,sloria/osf.io,cslzchen/osf.io,caseyrollins/osf.io,brianjgeiger/osf.io,adlius/osf.io
|
---
+++
@@ -21,8 +21,9 @@
osf_id = forms.CharField(required=True, max_length=5, min_length=5)
+ # TODO: Moving to guardian, find a better way to distinguish "admin-like" groups from object permission groups
group_perms = forms.ModelMultipleChoiceField(
- queryset=Group.objects.all(),
+ queryset=Group.objects.exclude(name__startswith='collections_'),
required=False,
widget=forms.CheckboxSelectMultiple
)
|
505e6ee2643dc9179565db5aad9bdddaea849b86
|
shipyard/rules/third-party/haproxy/build.py
|
shipyard/rules/third-party/haproxy/build.py
|
from foreman import define_parameter
from templates import pods
(define_parameter('image-version')
.with_doc('HAProxy image version.')
.with_default('1.8.9'))
@pods.app_specifier
def haproxy_app(_):
return pods.App(
name='haproxy',
exec=[
'/usr/local/sbin/haproxy',
'-f', '/etc/haproxy/haproxy.cfg',
],
volumes=[
pods.Volume(
name='etc-hosts-volume',
path='/etc/hosts',
host_path='/etc/hosts',
),
pods.Volume(
name='haproxy-volume',
path='/etc/haproxy',
data='haproxy-volume/haproxy-config.tar.gz',
),
],
ports=[
pods.Port(
name='web',
protocol='tcp',
port=8443,
host_port=443,
),
],
)
@pods.image_specifier
def haproxy_image(parameters):
return pods.Image(
image_build_uri='docker://haproxy:%s' % parameters['image-version'],
name='haproxy',
app=parameters['haproxy_app'],
)
haproxy_image.specify_image.depend('haproxy_app/specify_app')
haproxy_image.build_image.depend('//host/docker2aci:install')
|
from foreman import define_parameter
from templates import pods
(define_parameter('image-version')
.with_doc('HAProxy image version.')
.with_default('2.0.3'))
@pods.app_specifier
def haproxy_app(_):
return pods.App(
name='haproxy',
exec=[
'/usr/local/sbin/haproxy',
'-f', '/etc/haproxy/haproxy.cfg',
],
volumes=[
pods.Volume(
name='etc-hosts-volume',
path='/etc/hosts',
host_path='/etc/hosts',
),
pods.Volume(
name='haproxy-volume',
path='/etc/haproxy',
data='haproxy-volume/haproxy-config.tar.gz',
),
],
ports=[
pods.Port(
name='web',
protocol='tcp',
port=8443,
host_port=443,
),
],
)
@pods.image_specifier
def haproxy_image(parameters):
return pods.Image(
image_build_uri='docker://haproxy:%s' % parameters['image-version'],
name='haproxy',
app=parameters['haproxy_app'],
)
haproxy_image.specify_image.depend('haproxy_app/specify_app')
haproxy_image.build_image.depend('//host/docker2aci:install')
|
Upgrade HAProxy to version 2.0.3
|
Upgrade HAProxy to version 2.0.3
|
Python
|
mit
|
clchiou/garage,clchiou/garage,clchiou/garage,clchiou/garage
|
---
+++
@@ -5,7 +5,7 @@
(define_parameter('image-version')
.with_doc('HAProxy image version.')
- .with_default('1.8.9'))
+ .with_default('2.0.3'))
@pods.app_specifier
|
20d355c52a73e38ae421aa3e4227c2c60d6ae2ff
|
ckanext/inventory/logic/schema.py
|
ckanext/inventory/logic/schema.py
|
from ckan.lib.navl.validators import ignore_empty, not_empty
from ckan.logic.validators import (
name_validator, boolean_validator, is_positive_integer, isodate,
group_id_exists)
def default_inventory_entry_schema():
schema = {
'id': [unicode, ignore_empty],
'title': [unicode, not_empty],
'group_id': [group_id_exists],
'is_recurring': [boolean_validator],
'recurring_interval': [is_positive_integer],
'last_added_dataset_timestamp': [isodate],
}
return schema
def default_inventory_entry_schema_create():
schema = {
'title': [unicode, not_empty],
'recurring_interval': [is_positive_integer],
}
return schema
|
from ckan.lib.navl.validators import ignore_empty, not_empty
from ckan.logic.validators import (
name_validator, boolean_validator, natural_number_validator, isodate,
group_id_exists)
def default_inventory_entry_schema():
schema = {
'id': [unicode, ignore_empty],
'title': [unicode, not_empty],
'group_id': [group_id_exists],
'is_recurring': [boolean_validator],
'recurring_interval': [natural_number_validator],
'last_added_dataset_timestamp': [isodate],
}
return schema
def default_inventory_entry_schema_create():
schema = {
'title': [unicode, not_empty],
'recurring_interval': [natural_number_validator],
}
return schema
|
Allow zeros for recurring interval
|
Allow zeros for recurring interval
|
Python
|
apache-2.0
|
govro/ckanext-inventory,govro/ckanext-inventory,govro/ckanext-inventory,govro/ckanext-inventory
|
---
+++
@@ -1,6 +1,6 @@
from ckan.lib.navl.validators import ignore_empty, not_empty
from ckan.logic.validators import (
- name_validator, boolean_validator, is_positive_integer, isodate,
+ name_validator, boolean_validator, natural_number_validator, isodate,
group_id_exists)
@@ -10,7 +10,7 @@
'title': [unicode, not_empty],
'group_id': [group_id_exists],
'is_recurring': [boolean_validator],
- 'recurring_interval': [is_positive_integer],
+ 'recurring_interval': [natural_number_validator],
'last_added_dataset_timestamp': [isodate],
}
return schema
@@ -19,6 +19,6 @@
def default_inventory_entry_schema_create():
schema = {
'title': [unicode, not_empty],
- 'recurring_interval': [is_positive_integer],
+ 'recurring_interval': [natural_number_validator],
}
return schema
|
133a085f40f1536d5ebb26e912d15fa3bddcc82c
|
manager.py
|
manager.py
|
from cement.core.foundation import CementApp
import command
import util.config
util.config.Configuration()
class Manager(CementApp):
class Meta:
label = 'QLDS-Manager'
handlers = [
command.default.ManagerBaseController,
command.setup.SetupController
]
with Manager() as app:
app.run()
|
from cement.core.foundation import CementApp
import command
import util.config
class Manager(CementApp):
class Meta:
label = 'QLDS-Manager'
handlers = command.commands
with Manager() as app:
app.run()
|
Use handlers defined in command package
|
Use handlers defined in command package
|
Python
|
mit
|
rzeka/QLDS-Manager
|
---
+++
@@ -2,16 +2,10 @@
import command
import util.config
-util.config.Configuration()
-
-
class Manager(CementApp):
class Meta:
label = 'QLDS-Manager'
- handlers = [
- command.default.ManagerBaseController,
- command.setup.SetupController
- ]
+ handlers = command.commands
with Manager() as app:
|
a8515cf56837ef3f32ea53003f88275a47c4d249
|
src/pipeline.py
|
src/pipeline.py
|
import os
import fnmatch
import re
import subprocess
import sys
import json
import imp
import time
class pipeline(object):
def __init__(self):
self.name = ''
self.taskId = ''
self.taskPath = ''
self.scriptPath = ''
self.inputPath = ''
self.outputPath = ''
self.setting = ''
def logger(self, message):
print("["+time.strftime('%Y-%m-%d %H:%M%p %Z')+"] "+message)
def read_config(self):
with open("app.json") as json_file:
self.setting = json.load(json_file)
def clean(self):
self.read_config()
self.logger("Start pipeline")
def processApp(self):
self.logger("processApp")
def pj_initialize(self):
self.logger("initialize")
def run(self):
for step in self.setting['step']:
mod = imp.load_source(step["packageName"], './')
if hasattr(mod, step["className"]):
class_inst = getattr(mod, step["className"])()
class_inst.setName(step['name'])
class_inst.init()
class_inst.run()
class_inst.finish()
|
import os
import fnmatch
import re
import subprocess
import sys
import json
import imp
import time
from pprint import pprint
class pipeline(object):
def __init__(self):
self.name = ''
self.taskId = ''
self.taskPath = ''
self.scriptPath = ''
self.inputPath = ''
self.outputPath = ''
self.setting = ''
def logger(self, message):
print("["+time.strftime('%Y-%m-%d %H:%M%p %Z')+"] "+message)
def read_config(self):
with open("app.json") as json_file:
self.setting = json.load(json_file)
def clean(self):
self.read_config()
self.logger("Start pipeline")
def processApp(self):
self.logger("processApp")
def pj_initialize(self):
self.logger("initialize")
def run(self):
for step in self.setting['step']:
mod = __import__(step["packageName"])
if hasattr(mod, step["className"]):
class_inst = getattr(mod, step["className"])()
class_inst.setName(step['name'])
class_inst.init()
class_inst.run()
class_inst.finish()
|
Change the way to import package dynamically
|
Change the way to import package dynamically
|
Python
|
mit
|
s4553711/HiScript
|
---
+++
@@ -7,6 +7,7 @@
import json
import imp
import time
+from pprint import pprint
class pipeline(object):
def __init__(self):
@@ -37,7 +38,7 @@
def run(self):
for step in self.setting['step']:
- mod = imp.load_source(step["packageName"], './')
+ mod = __import__(step["packageName"])
if hasattr(mod, step["className"]):
class_inst = getattr(mod, step["className"])()
class_inst.setName(step['name'])
|
86cd447a9d724489ed3c23157d411fa4a9208b30
|
changes/jobs/cleanup_tasks.py
|
changes/jobs/cleanup_tasks.py
|
from __future__ import absolute_import
from datetime import datetime, timedelta
from changes.config import queue
from changes.constants import Status
from changes.models import Task
from changes.queue.task import TrackedTask, tracked_task
CHECK_TIME = timedelta(minutes=5)
@tracked_task
def cleanup_tasks():
"""
Find any tasks which haven't checked in within a reasonable time period and
requeue them if nescessary.
"""
now = datetime.utcnow()
cutoff = now - CHECK_TIME
pending_tasks = Task.query.filter(
Task.status != Status.finished,
Task.date_modified < cutoff,
)
for task in pending_tasks:
task_func = TrackedTask(queue.get_task(task.task_name))
task_func.delay(
task_id=task.task_id.hex,
parent_task_id=task.parent_id.hex if task.parent_id else None,
**task.data['kwargs']
)
|
from __future__ import absolute_import
from datetime import datetime, timedelta
from changes.config import queue
from changes.constants import Status
from changes.models import Task
from changes.queue.task import TrackedTask, tracked_task
CHECK_TIME = timedelta(minutes=60)
@tracked_task
def cleanup_tasks():
"""
Find any tasks which haven't checked in within a reasonable time period and
requeue them if nescessary.
"""
now = datetime.utcnow()
cutoff = now - CHECK_TIME
pending_tasks = Task.query.filter(
Task.status != Status.finished,
Task.date_modified < cutoff,
)
for task in pending_tasks:
task_func = TrackedTask(queue.get_task(task.task_name))
task_func.delay(
task_id=task.task_id.hex,
parent_task_id=task.parent_id.hex if task.parent_id else None,
**task.data['kwargs']
)
|
Increase task check time to 1h
|
Increase task check time to 1h
|
Python
|
apache-2.0
|
wfxiang08/changes,bowlofstew/changes,wfxiang08/changes,dropbox/changes,bowlofstew/changes,wfxiang08/changes,dropbox/changes,dropbox/changes,wfxiang08/changes,bowlofstew/changes,bowlofstew/changes,dropbox/changes
|
---
+++
@@ -7,7 +7,7 @@
from changes.models import Task
from changes.queue.task import TrackedTask, tracked_task
-CHECK_TIME = timedelta(minutes=5)
+CHECK_TIME = timedelta(minutes=60)
@tracked_task
|
1172bb29cca80486fffcfda0dea61a12f643a2e9
|
start_server.py
|
start_server.py
|
#!/usr/bin/env python3
# tsuserver3, an Attorney Online server
#
# Copyright (C) 2016 argoneus <argoneuscze@gmail.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from server.tsuserver import TsuServer3
def main():
server = TsuServer3()
server.start()
if __name__ == '__main__':
main()
|
#!/usr/bin/env python3
# tsuserver3, an Attorney Online server
#
# Copyright (C) 2016 argoneus <argoneuscze@gmail.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from server.tsuserver import TsuServer3
# Idiotproof setup
def check_pyyaml():
try:
import yaml
except ImportError:
print("Couldn't import PyYAML. Installing it for you...")
import pip
pip.main(["install", "--user", "pyyaml"])
def main():
server = TsuServer3()
server.start()
if __name__ == '__main__':
check_pyyaml()
main()
|
Install PyYAML automatically before starting server
|
Install PyYAML automatically before starting server
|
Python
|
agpl-3.0
|
Attorney-Online-Engineering-Task-Force/tsuserver3,Mariomagistr/tsuserver3
|
---
+++
@@ -19,6 +19,14 @@
from server.tsuserver import TsuServer3
+# Idiotproof setup
+def check_pyyaml():
+ try:
+ import yaml
+ except ImportError:
+ print("Couldn't import PyYAML. Installing it for you...")
+ import pip
+ pip.main(["install", "--user", "pyyaml"])
def main():
server = TsuServer3()
@@ -26,4 +34,5 @@
if __name__ == '__main__':
+ check_pyyaml()
main()
|
c42393a99435278f577b508e204bdfe1a9a6ff68
|
testproject/tablib_test/tests.py
|
testproject/tablib_test/tests.py
|
from django.test import TestCase
from django_tablib import ModelDataset, Field
from .models import TestModel
class DjangoTablibTestCase(TestCase):
def test_declarative_fields(self):
class TestModelDataset(ModelDataset):
field1 = Field()
field2 = Field(attribute='field1')
class Meta:
model = TestModel
data = TestModelDataset()
self.assertEqual(len(data.headers), 2)
self.assertTrue('id' not in data.headers)
self.assertTrue('field1' in data.headers)
self.assertTrue('field2' in data.headers)
|
from django.test import TestCase
from django_tablib import ModelDataset, Field
from .models import TestModel
class DjangoTablibTestCase(TestCase):
def setUp(self):
TestModel.objects.create(field1='value')
def test_declarative_fields(self):
class TestModelDataset(ModelDataset):
field1 = Field()
field2 = Field(attribute='field1')
class Meta:
model = TestModel
data = TestModelDataset()
self.assertEqual(len(data.headers), 2)
self.assertTrue('id' not in data.headers)
self.assertTrue('field1' in data.headers)
self.assertTrue('field2' in data.headers)
self.assertEqual(data[0][0], data[0][1])
|
Test that declarative fields actually work.
|
Test that declarative fields actually work.
|
Python
|
mit
|
ebrelsford/django-tablib,joshourisman/django-tablib,ebrelsford/django-tablib,joshourisman/django-tablib
|
---
+++
@@ -6,6 +6,9 @@
class DjangoTablibTestCase(TestCase):
+ def setUp(self):
+ TestModel.objects.create(field1='value')
+
def test_declarative_fields(self):
class TestModelDataset(ModelDataset):
field1 = Field()
@@ -20,3 +23,5 @@
self.assertTrue('id' not in data.headers)
self.assertTrue('field1' in data.headers)
self.assertTrue('field2' in data.headers)
+
+ self.assertEqual(data[0][0], data[0][1])
|
3c60fd42d7ce84b0f90d80d6e04b46c8affb5ff5
|
maedchenbund/views.py
|
maedchenbund/views.py
|
from django.shortcuts import render
from .models import Document
def documents(request):
docs = Document.objects.all().order_by("title")
return render(request, 'home.html', {'documents': docs})
|
from django.contrib.auth.decorators import login_required
from django.shortcuts import render
from .models import Document
@login_required
def documents(request):
docs = Document.objects.all().order_by("title")
return render(request, 'home.html', {'documents': docs})
|
Add required login to maedchenbund
|
Add required login to maedchenbund
|
Python
|
mit
|
n2o/dpb,n2o/dpb,n2o/dpb,n2o/dpb,n2o/dpb,n2o/dpb,n2o/dpb,n2o/dpb
|
---
+++
@@ -1,8 +1,10 @@
+from django.contrib.auth.decorators import login_required
from django.shortcuts import render
from .models import Document
+@login_required
def documents(request):
docs = Document.objects.all().order_by("title")
return render(request, 'home.html', {'documents': docs})
|
5bbef2ebbe959dd2dcfc53272305fa249acfa17d
|
functional/tests/test_examples.py
|
functional/tests/test_examples.py
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import testtools
from functional.common import test
class ExampleTests(test.TestCase):
"""Functional tests for running examples."""
def test_common(self):
# NOTE(stevemar): If an examples has a non-zero return
# code, then execute will raise an error by default.
test.execute('python', test.EXAMPLE_DIR + '/common.py --debug')
@testtools.skip('skipping until bug 1420080 is resolved')
def test_object_api(self):
test.execute('python', test.EXAMPLE_DIR + '/object_api.py --debug')
@testtools.skip('skipping until bug 1420080 is resolved')
def test_osc_lib(self):
test.execute('python', test.EXAMPLE_DIR + '/osc-lib.py --debug')
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from functional.common import test
class ExampleTests(test.TestCase):
"""Functional tests for running examples."""
def test_common(self):
# NOTE(stevemar): If an examples has a non-zero return
# code, then execute will raise an error by default.
test.execute('python', test.EXAMPLE_DIR + '/common.py --debug')
def test_object_api(self):
test.execute('python', test.EXAMPLE_DIR + '/object_api.py --debug')
def test_osc_lib(self):
test.execute('python', test.EXAMPLE_DIR + '/osc-lib.py --debug')
|
Revert "Skip functional tests until they are fixed"
|
Revert "Skip functional tests until they are fixed"
This reverts commit 3946bf0e0186c8c2897f432029bad7873b6d03a0.
Change-Id: Ic21137b208991c9ce738e5bf3ad5e6965b69e166
|
Python
|
apache-2.0
|
dtroyer/python-openstackclient,openstack/python-openstackclient,redhat-openstack/python-openstackclient,redhat-openstack/python-openstackclient,BjoernT/python-openstackclient,BjoernT/python-openstackclient,openstack/python-openstackclient,dtroyer/python-openstackclient
|
---
+++
@@ -10,8 +10,6 @@
# License for the specific language governing permissions and limitations
# under the License.
-import testtools
-
from functional.common import test
@@ -23,10 +21,8 @@
# code, then execute will raise an error by default.
test.execute('python', test.EXAMPLE_DIR + '/common.py --debug')
- @testtools.skip('skipping until bug 1420080 is resolved')
def test_object_api(self):
test.execute('python', test.EXAMPLE_DIR + '/object_api.py --debug')
- @testtools.skip('skipping until bug 1420080 is resolved')
def test_osc_lib(self):
test.execute('python', test.EXAMPLE_DIR + '/osc-lib.py --debug')
|
e68dcc13d6152b15f2b7c5c151e03437d9cda314
|
lib/python/mod_python/__init__.py
|
lib/python/mod_python/__init__.py
|
#
# Copyright 2004 Apache Software Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you
# may not use this file except in compliance with the License. You
# may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied. See the License for the specific language governing
# permissions and limitations under the License.
#
# Originally developed by Gregory Trubetskoy.
#
# $Id$
__all__ = ["apache", "cgihandler", "psp",
"publisher", "util"]
|
#
# Copyright 2004 Apache Software Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you
# may not use this file except in compliance with the License. You
# may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied. See the License for the specific language governing
# permissions and limitations under the License.
#
# Originally developed by Gregory Trubetskoy.
#
# $Id$
__all__ = ["apache", "cgihandler", "psp",
"publisher", "util"]
version = "3.2.0-dev-20050518"
|
Fix for MODPYTHON-55 : added a version attribute to the mod_python package.
|
Fix for MODPYTHON-55 : added a version attribute to the mod_python package.
|
Python
|
apache-2.0
|
Distrotech/mod_python,Distrotech/mod_python,Distrotech/mod_python
|
---
+++
@@ -19,3 +19,5 @@
__all__ = ["apache", "cgihandler", "psp",
"publisher", "util"]
+
+version = "3.2.0-dev-20050518"
|
37f6d1a693134785dbfb2cb50b4b6e562be83f1e
|
collection_registry_client.py
|
collection_registry_client.py
|
import json
import sys, os
sys.path.insert(0, os.path.abspath('./python-tastypie-client'))
import tastypie_client
url_root = "http://vorol-dev.cdlib.org/"
path_collection_registry = "collection_registry/api/v1"
url_api = url_root+path_collection_registry
entrypoint_entrypoint_key = "list_entrypoint"
entrypoint_schema_key = "schema"
collection_name = "provenancialcollection"
tp = tastypie_client.Api(url_api)
provenancialcollection = None
for c in tp.collections:
#print c, dir(c), c.url
try:
c.url.index(collection_name)
provenancialcollection = c
except:
pass
print provenancialcollection.url
#print type(provenancialcollection)
#print dir(provenancialcollection)
import time;time.sleep(5)
obj_list = []
for obj in provenancialcollection:#.next():
#print "OBJ?", obj.fields
if obj.fields['url_local']:
print obj.fields['resource_uri'], obj.fields['url_local']
obj_list.append(obj)
print "LENGTH:::", len(obj_list)
print "COLLECTION:"#, dir(provenancialcollection)
print provenancialcollection.meta
print obj.fields
#import code;code.interact(local=locals())
|
import sys, os
sys.path.insert(0, os.path.abspath('./python-tastypie-client'))
import tastypie_client
url_root = "http://vorol-dev.cdlib.org/"
path_collection_registry = "collection_registry/api/v1"
url_api = url_root+path_collection_registry
entrypoint_entrypoint_key = "list_entrypoint"
entrypoint_schema_key = "schema"
collection_name = "provenancialcollection"
tp = tastypie_client.Api(url_api)
provenancialcollection = None
for c in tp.collections:
#print c, dir(c), c.url
try:
c.url.index(collection_name)
provenancialcollection = c
except:
pass
print provenancialcollection.url
#print type(provenancialcollection)
#print dir(provenancialcollection)
import time;time.sleep(5)
obj_list = []
for obj in provenancialcollection:#.next():
#print "OBJ?", obj.fields
if obj.fields['url_local']:
print obj.fields['resource_uri'], obj.fields['url_local']
obj_list.append(obj)
print "LENGTH:::", len(obj_list)
print "COLLECTION:"#, dir(provenancialcollection)
print provenancialcollection.meta
print obj.fields
#import code;code.interact(local=locals())
|
Remove json import, not needed yet
|
Remove json import, not needed yet
|
Python
|
bsd-3-clause
|
mredar/ucldc_collection_registry_client
|
---
+++
@@ -1,5 +1,3 @@
-import json
-
import sys, os
sys.path.insert(0, os.path.abspath('./python-tastypie-client'))
|
230d7745f2f1bbc5099e1288ab482c92262e4126
|
examples/ndbc/buoy_type_request.py
|
examples/ndbc/buoy_type_request.py
|
# Copyright (c) 2018 Siphon Contributors.
# Distributed under the terms of the BSD 3-Clause License.
# SPDX-License-Identifier: BSD-3-Clause
"""
NDBC Buoy Data Request (of any type)
====================================
The NDBC keeps a 40-day recent rolling file for each buoy. This examples shows how to access
the other types of data available for a buoy.
"""
from siphon.simplewebservice.ndbc import NDBC
####################################################
# Request the types of data available from a given buoy.
data_aval = NDBC.buoy_data_types('41002')
print(data_aval)
####################################################
# Get a pandas data frame of all of the observations, meteorological data is the default
# observation set to query.
df = NDBC.realtime_observations('41002', data_type='supl')
df.head()
|
# Copyright (c) 2018 Siphon Contributors.
# Distributed under the terms of the BSD 3-Clause License.
# SPDX-License-Identifier: BSD-3-Clause
"""
NDBC Buoy Data Request (of any type)
====================================
The NDBC keeps a 40-day recent rolling file for each buoy. This examples shows how to access
the other types of data available for a buoy.
"""
from siphon.simplewebservice.ndbc import NDBC
####################################################
# Request the types of data available from a given buoy.
data_aval = NDBC.buoy_data_types('42002')
print(data_aval)
####################################################
# Get a pandas data frame of all of the observations, meteorological data is the default
# observation set to query.
df = NDBC.realtime_observations('42002', data_type='supl')
df.head()
|
Update NDBC example for removed buoy
|
MNT: Update NDBC example for removed buoy
|
Python
|
bsd-3-clause
|
Unidata/siphon
|
---
+++
@@ -13,11 +13,11 @@
####################################################
# Request the types of data available from a given buoy.
-data_aval = NDBC.buoy_data_types('41002')
+data_aval = NDBC.buoy_data_types('42002')
print(data_aval)
####################################################
# Get a pandas data frame of all of the observations, meteorological data is the default
# observation set to query.
-df = NDBC.realtime_observations('41002', data_type='supl')
+df = NDBC.realtime_observations('42002', data_type='supl')
df.head()
|
9ce2faa950086f95f5a9c1f3e4f22e0a52622d8f
|
luminoso_api/wrappers/account.py
|
luminoso_api/wrappers/account.py
|
from .base import BaseWrapper
from .database import Database
from ..constants import URL_BASE
class Account(BaseWrapper):
"""An object encapsulating a billing account on Luminoso's servers"""
def __init__(self, acct_name, session):
"""Construct a wrapper around a particular account name
NOTE: Construction does not validate the existence or accessibility
of the account"""
super(Account, self).__init__(path=acct_name, session=session)
self.acct_name = acct_name
def __unicode__(self):
return u'Account("%s")' % self.acct_name
@classmethod
def accessible(cls, session):
accounts = session.get(URL_BASE + '/.accounts/').json
return [Account(acct, session) for acct in accounts['accounts']]
def databases(self):
db_table = self._get('/.list_dbs/')
dbs = {}
for db_name, db_meta in db_table.items():
path = self.api_path + '/' + db_meta['name']
dbs[db_name]=Database(path, db_name, meta=db_meta,
session=self._session)
return dbs
def create_project(self, db_name):
resp = self._post_raw('/%s/create_project/' % db_name)
if resp == 'Database %s created' % db_name:
return None
return resp
|
from .base import BaseWrapper
from .database import Database
from ..constants import URL_BASE
class Account(BaseWrapper):
"""An object encapsulating a billing account on Luminoso's servers"""
def __init__(self, acct_name, session):
"""Construct a wrapper around a particular account name
NOTE: Construction does not validate the existence or accessibility
of the account"""
super(Account, self).__init__(path=acct_name, session=session)
self.acct_name = acct_name
def __unicode__(self):
return u'Account("%s")' % self.acct_name
@classmethod
def accessible(cls, session):
accounts = session.get(URL_BASE + '/.accounts/').json
return [Account(acct, session) for acct in accounts['accounts']]
def databases(self):
db_table = self._get('/.list_dbs/')['result']
dbs = {}
for db_name, db_meta in db_table.items():
path = self.api_path + '/' + db_meta['name']
dbs[db_name]=Database(path, db_name, meta=db_meta,
session=self._session)
return dbs
def create_project(self, db_name):
resp = self._post_raw('/%s/create_project/' % db_name)
if resp == 'Database %s created' % db_name:
return None
return resp
|
Adjust Account.databases() to new return format
|
Adjust Account.databases() to new return format
|
Python
|
mit
|
LuminosoInsight/luminoso-api-client-python
|
---
+++
@@ -22,7 +22,7 @@
return [Account(acct, session) for acct in accounts['accounts']]
def databases(self):
- db_table = self._get('/.list_dbs/')
+ db_table = self._get('/.list_dbs/')['result']
dbs = {}
for db_name, db_meta in db_table.items():
path = self.api_path + '/' + db_meta['name']
|
ce889047cd06714c1da86daf787583e84a59956a
|
api/azure.py
|
api/azure.py
|
import os
from azure.storage import *
def store(image, entity, entity_id):
blob_service = BlobService(account_name='shnergledata',
account_key=os.environ['BLOB_KEY'])
myblob = image.read()
name = '/' + entity + '/' + entity_id
blob_service.put_blob('images', name, myblob, x_ms_blob_type='BlockBlob')
return True
def retrieve(entity, entity_id):
blob_service = BlobService(account_name='shnergledata',
account_key=os.environ['BLOB_KEY'])
name = '/' + entity + '/' + entity_id
blob = blob_service.get_blob('images', name)
return blob
|
import os
from azure.storage import BlobService
def store(image, entity, entity_id):
blob_service = BlobService(account_name='shnergledata',
account_key=os.environ['BLOB_KEY'])
myblob = image.read()
name = '/' + entity + '/' + entity_id
blob_service.put_blob('images', name, myblob, x_ms_blob_type='BlockBlob')
return True
def retrieve(entity, entity_id):
blob_service = BlobService(account_name='shnergledata',
account_key=os.environ['BLOB_KEY'])
name = '/' + entity + '/' + entity_id
blob = blob_service.get_blob('images', name)
return blob
|
Revert "The same thing, but their way haha"
|
Revert "The same thing, but their way haha"
This reverts commit ab12f6cc4593e81bc426a54e8aebc1671ac34e2a.
|
Python
|
mit
|
shnergle/ShnergleServer
|
---
+++
@@ -1,6 +1,6 @@
import os
-from azure.storage import *
+from azure.storage import BlobService
def store(image, entity, entity_id):
|
8898f23a429112cd80e6a2c8321b0de44aeaee7e
|
blanc_basic_pages/forms.py
|
blanc_basic_pages/forms.py
|
from django import forms
from django.conf import settings
from mptt.forms import MPTTAdminForm
from .models import Page
TEMPLATE_CHOICES = getattr(settings, 'PAGE_TEMPLATES', (
('', 'Default'),
))
class PageAdminForm(MPTTAdminForm):
class Meta:
model = Page
exclude = ()
def __init__(self, *args, **kwargs):
super(PageAdminForm, self).__init__(*args, **kwargs)
# The list of templates is defined in settings, however as we can't have dynamic choices in
# models due to migrations - we change the form choices instead.
self.fields['template_name'] = forms.ChoiceField(choices=TEMPLATE_CHOICES, required=False)
|
from django import forms
from django.conf import settings
from mptt.forms import MPTTAdminForm
from .models import Page
TEMPLATE_CHOICES = getattr(settings, 'PAGE_TEMPLATES', (
('', 'Default'),
))
class PageAdminForm(MPTTAdminForm):
class Meta:
model = Page
exclude = ()
widgets = {
# The list of templates is defined in settings, however as we can't have dynamic
# choices in models due to migrations - we change the form choices instead.
'template_name': forms.widgets.Select(choices=TEMPLATE_CHOICES),
}
|
Use custom widget for template choices instead
|
Use custom widget for template choices instead
A bit more Djangonic than tweaking self.fields
|
Python
|
bsd-3-clause
|
blancltd/blanc-basic-pages
|
---
+++
@@ -15,10 +15,8 @@
class Meta:
model = Page
exclude = ()
-
- def __init__(self, *args, **kwargs):
- super(PageAdminForm, self).__init__(*args, **kwargs)
-
- # The list of templates is defined in settings, however as we can't have dynamic choices in
- # models due to migrations - we change the form choices instead.
- self.fields['template_name'] = forms.ChoiceField(choices=TEMPLATE_CHOICES, required=False)
+ widgets = {
+ # The list of templates is defined in settings, however as we can't have dynamic
+ # choices in models due to migrations - we change the form choices instead.
+ 'template_name': forms.widgets.Select(choices=TEMPLATE_CHOICES),
+ }
|
408297aed41966b22d8486c559716b33bc2e2984
|
bitbots_body_behavior/src/bitbots_body_behavior/decisions/ball_close.py
|
bitbots_body_behavior/src/bitbots_body_behavior/decisions/ball_close.py
|
from dynamic_stack_decider.abstract_decision_element import AbstractDecisionElement
class BallClose(AbstractDecisionElement):
def __init__(self, blackboard, dsd, parameters=None):
super(BallClose, self).__init__(blackboard, dsd, parameters)
self.ball_close_distance = parameters.get("distance", self.blackboard.config['ball_close_distance'])
def perform(self, reevaluate=False):
"""
Determines whether the ball is in close range to the robot. The distance threshold is set in the config file.
:param reevaluate:
:return:
"""
self.publish_debug_data("ball_distance", self.blackboard.world_model.get_ball_distance())
if self.blackboard.world_model.get_ball_distance() < self.ball_close_distance:
return 'YES'
return 'NO'
def get_reevaluate(self):
return True
|
import math
from dynamic_stack_decider.abstract_decision_element import AbstractDecisionElement
class BallClose(AbstractDecisionElement):
def __init__(self, blackboard, dsd, parameters=None):
super(BallClose, self).__init__(blackboard, dsd, parameters)
self.ball_close_distance = parameters.get("distance", self.blackboard.config['ball_close_distance'])
self.ball_close_angle = parameters.get("angle", math.pi)
def perform(self, reevaluate=False):
"""
Determines whether the ball is in close range to the robot. The distance threshold is set in the config file.
:param reevaluate:
:return:
"""
self.publish_debug_data("ball_distance", self.blackboard.world_model.get_ball_distance())
self.publish_debug_data("ball_angle", self.blackboard.world_model.get_ball_angle())
if self.blackboard.world_model.get_ball_distance() < self.ball_close_distance and \
abs(self.blackboard.world_model.get_ball_angle()) < self.ball_close_angle:
return 'YES'
return 'NO'
def get_reevaluate(self):
return True
|
Add angles to ball close decision
|
Add angles to ball close decision
|
Python
|
bsd-3-clause
|
bit-bots/bitbots_behaviour
|
---
+++
@@ -1,3 +1,4 @@
+import math
from dynamic_stack_decider.abstract_decision_element import AbstractDecisionElement
@@ -5,6 +6,7 @@
def __init__(self, blackboard, dsd, parameters=None):
super(BallClose, self).__init__(blackboard, dsd, parameters)
self.ball_close_distance = parameters.get("distance", self.blackboard.config['ball_close_distance'])
+ self.ball_close_angle = parameters.get("angle", math.pi)
def perform(self, reevaluate=False):
"""
@@ -13,8 +15,10 @@
:return:
"""
self.publish_debug_data("ball_distance", self.blackboard.world_model.get_ball_distance())
+ self.publish_debug_data("ball_angle", self.blackboard.world_model.get_ball_angle())
- if self.blackboard.world_model.get_ball_distance() < self.ball_close_distance:
+ if self.blackboard.world_model.get_ball_distance() < self.ball_close_distance and \
+ abs(self.blackboard.world_model.get_ball_angle()) < self.ball_close_angle:
return 'YES'
return 'NO'
|
c24dc7db961b03c947a98454fc3e8655c5f938ff
|
functional_tests/test_all_users.py
|
functional_tests/test_all_users.py
|
from datetime import date
from django.core.urlresolvers import reverse
from django.contrib.staticfiles.testing import StaticLiveServerTestCase
from django.utils import formats
from selenium import webdriver
class HomeNewVisitorTest(StaticLiveServerTestCase):
def setUp(self):
self.browser = webdriver.Firefox()
self.browser.implicitly_wait(3)
def tearDown(self):
self.browser.quit()
def get_full_url(self, namespace):
return "{0}{1}".format(self.live_server_url, reverse(namespace))
def test_home_title(self):
self.browser.get(self.get_full_url("home"))
self.assertIn("Alert", self.browser.title)
def test_h1_css(self):
self.browser.get(self.get_full_url("home"))
h1 = self.browser.find_element_by_tag_name("h1")
self.assertIn(h1.value_of_css_property(
"color"), "rgba(200, 50, 255, 1)")
def test_home_files(self):
self.browser.get(self.live_server_url + "/robots.txt")
self.assertNotIn("Not Found", self.browser.title)
self.browser.get(self.live_server_url + "/humans.txt")
self.assertNotIn("Not Found", self.browser.title)
def test_localization(self):
today = date.today()
self.browser.get(self.get_full_url("home"))
local_date = self.browser.find_element_by_id("local-date")
self.assertEqual(formats.date_format(
today, use_l10n=True), local_date.text)
|
from datetime import date
from django.core.urlresolvers import reverse
from django.contrib.staticfiles.testing import StaticLiveServerTestCase
from django.utils import formats
from selenium import webdriver
class HomeNewVisitorTest(StaticLiveServerTestCase):
def setUp(self):
self.browser = webdriver.Firefox()
self.browser.implicitly_wait(3)
def tearDown(self):
self.browser.quit()
def get_full_url(self, namespace):
return "{0}{1}".format(self.live_server_url, reverse(namespace))
def test_home_title(self):
self.browser.get(self.get_full_url("home"))
self.assertIn("Alert", self.browser.title)
def test_h2_css(self):
self.browser.get(self.get_full_url("home"))
h2 = self.browser.find_element_by_tag_name("h2")
self.assertIn(h2.value_of_css_property(
"color"), "rgba(0, 0, 0, 1)")
def test_home_files(self):
self.browser.get(self.live_server_url + "/robots.txt")
self.assertNotIn("Not Found", self.browser.title)
self.browser.get(self.live_server_url + "/humans.txt")
self.assertNotIn("Not Found", self.browser.title)
|
Fix css and heading test also removed localization test as no longer required
|
Fix css and heading test also removed localization test as no longer required
|
Python
|
mit
|
iAmMrinal0/django_moviealert,iAmMrinal0/django_moviealert,iAmMrinal0/django_moviealert
|
---
+++
@@ -21,21 +21,14 @@
self.browser.get(self.get_full_url("home"))
self.assertIn("Alert", self.browser.title)
- def test_h1_css(self):
+ def test_h2_css(self):
self.browser.get(self.get_full_url("home"))
- h1 = self.browser.find_element_by_tag_name("h1")
- self.assertIn(h1.value_of_css_property(
- "color"), "rgba(200, 50, 255, 1)")
+ h2 = self.browser.find_element_by_tag_name("h2")
+ self.assertIn(h2.value_of_css_property(
+ "color"), "rgba(0, 0, 0, 1)")
def test_home_files(self):
self.browser.get(self.live_server_url + "/robots.txt")
self.assertNotIn("Not Found", self.browser.title)
self.browser.get(self.live_server_url + "/humans.txt")
self.assertNotIn("Not Found", self.browser.title)
-
- def test_localization(self):
- today = date.today()
- self.browser.get(self.get_full_url("home"))
- local_date = self.browser.find_element_by_id("local-date")
- self.assertEqual(formats.date_format(
- today, use_l10n=True), local_date.text)
|
de6a32e4b9a94103c923188894da6455ca14956c
|
TopTenView.py
|
TopTenView.py
|
# coding: utf-8
# ui.View subclass for the top ten iTunes songs.
# Pull requests gladly accepted.
import feedparser, requests, ui
url = 'https://itunes.apple.com/us/rss/topsongs/limit=10/xml'
def get_image_urls(itunes_url):
for entry in feedparser.parse(itunes_url).entries:
yield entry['summary'].partition('src="')[2].partition('"')[0]
class TopTenView(ui.View):
def __init__(self, image_urls):
self.present()
for i, url in enumerate(image_urls):
button = ui.Button()
button.background_image = ui.Image.from_data(requests.get(url).content)
button.border_width = 2
button.x = (i % 5) * 128 + 10
button.y = (i / 5) * 128 + 10
button.width = button.height = 128
self.add_subview(button)
TopTenView(list(get_image_urls(url)))
|
# coding: utf-8
# ui.View subclass for the top ten iTunes songs.
# Pull requests gladly accepted.
import feedparser, requests, ui
url = 'https://itunes.apple.com/us/rss/topsongs/limit=10/xml'
def get_image_urls(itunes_url):
for entry in feedparser.parse(itunes_url).entries:
yield entry['summary'].partition('src="')[2].partition('"')[0]
class TopTenView(ui.View):
def __init__(self, image_urls):
self.present()
for i, url in enumerate(image_urls):
button = ui.Button()
button.background_image = ui.Image.from_data(requests.get(url).content)
w, h = button.background_image.size
button.x = i % 5 * w
button.y = i / 5 * h
button.width, button.height = w, h
button.border_width = 2
self.add_subview(button)
TopTenView(list(get_image_urls(url)))
|
Move to buttons & remove hardcoding of image size
|
Move to buttons & remove hardcoding of image size
|
Python
|
apache-2.0
|
cclauss/Pythonista_ui
|
---
+++
@@ -17,10 +17,11 @@
for i, url in enumerate(image_urls):
button = ui.Button()
button.background_image = ui.Image.from_data(requests.get(url).content)
+ w, h = button.background_image.size
+ button.x = i % 5 * w
+ button.y = i / 5 * h
+ button.width, button.height = w, h
button.border_width = 2
- button.x = (i % 5) * 128 + 10
- button.y = (i / 5) * 128 + 10
- button.width = button.height = 128
self.add_subview(button)
TopTenView(list(get_image_urls(url)))
|
ceeb64c9e46a74f95178be88566fba3d7f080fa1
|
mica/stats/tests/test_acq_stats.py
|
mica/stats/tests/test_acq_stats.py
|
from .. import acq_stats
def test_calc_stats():
acq_stats.calc_stats(17210)
|
import tempfile
import os
from .. import acq_stats
def test_calc_stats():
acq_stats.calc_stats(17210)
def test_make_acq_stats():
"""
Save the acq stats for one obsid into a newly-created table
"""
# Get a temporary file, but then delete it, because _save_acq_stats will only
# make a new table if the supplied file doesn't exist
fh, fn = tempfile.mkstemp(suffix='.h5')
os.unlink(fn)
acq_stats.table_file = fn
obsid = 20001
obsid_info, acq, star_info, catalog, temp = acq_stats.calc_stats(obsid)
t = acq_stats.table_acq_stats(obsid_info, acq, star_info, catalog, temp)
acq_stats._save_acq_stats(t)
os.unlink(fn)
|
Add a test that makes a new acq stats database
|
Add a test that makes a new acq stats database
|
Python
|
bsd-3-clause
|
sot/mica,sot/mica
|
---
+++
@@ -1,5 +1,24 @@
+import tempfile
+import os
+
from .. import acq_stats
def test_calc_stats():
acq_stats.calc_stats(17210)
+
+
+def test_make_acq_stats():
+ """
+ Save the acq stats for one obsid into a newly-created table
+ """
+ # Get a temporary file, but then delete it, because _save_acq_stats will only
+ # make a new table if the supplied file doesn't exist
+ fh, fn = tempfile.mkstemp(suffix='.h5')
+ os.unlink(fn)
+ acq_stats.table_file = fn
+ obsid = 20001
+ obsid_info, acq, star_info, catalog, temp = acq_stats.calc_stats(obsid)
+ t = acq_stats.table_acq_stats(obsid_info, acq, star_info, catalog, temp)
+ acq_stats._save_acq_stats(t)
+ os.unlink(fn)
|
dd0d047829e65e613e4d2e9ccd9a6411fa9e301f
|
test.py
|
test.py
|
from interruptingcow import timeout
import time
while True:
# simulate waiting for laser A
print("Waiting for laser A")
time.sleep(2)
print("laser A tripped")
try:
with timeout(.25, exception=RuntimeError):
# perform a potentially very slow operation
print("Wait for laser B")
time.sleep(2)
print("laser B tripped, add 1 to DB")
pass
except RuntimeError:
print "didn't finish within .25 seconds"
|
from interruptingcow import timeout
import time
# def bTripped():
# +1 to DB
# continue
while True:
# simulate waiting for laser A
print("Waiting for laser A")
time.sleep(2)
print("laser A tripped")
# Real code would be:
# laserA.wait_for_dark()
try:
with timeout(.25, exception=RuntimeError):
# perform a potentially very slow operation
print("Wait for laser B")
time.sleep(2)
print("laser B tripped, add 1 to DB")
# Real code would be:
# laserB.when_dark() = bTripped
pass
except RuntimeError:
print "didn't finish within .25 seconds"
|
Add real code in comments.
|
Add real code in comments.
|
Python
|
apache-2.0
|
pjcoleman73/UVaLibRoomCount
|
---
+++
@@ -1,17 +1,25 @@
from interruptingcow import timeout
import time
+
+# def bTripped():
+# +1 to DB
+# continue
while True:
# simulate waiting for laser A
print("Waiting for laser A")
time.sleep(2)
print("laser A tripped")
+ # Real code would be:
+ # laserA.wait_for_dark()
try:
with timeout(.25, exception=RuntimeError):
# perform a potentially very slow operation
print("Wait for laser B")
time.sleep(2)
print("laser B tripped, add 1 to DB")
+ # Real code would be:
+ # laserB.when_dark() = bTripped
pass
except RuntimeError:
print "didn't finish within .25 seconds"
|
8c8307ff5313b1f6c69d976853f763daf2aece0c
|
test.py
|
test.py
|
""" Functions to call the api and test it """
import sys
import fenix
api = fenix.FenixAPISingleton()
print('Testing Fenix API SDK Python')
auth_url = api.get_authentication_url()
print(auth_url)
api.set_code(sys.argv[1])
print('Access token: ' + api.get_access_token())
print('Refresh token: ' + api.get_refresh_token())
api._refresh_access_token()
print('New access token: ' + api.get_access_token())
print(api.get_space('2465311230082'))
|
""" Functions to call the api and test it """
import sys
import fenix
api = fenix.FenixAPISingleton()
print('Testing Fenix API SDK Python')
auth_url = api.get_authentication_url()
print(api.get_space('2465311230082'))
print(auth_url)
api.set_code(sys.argv[1])
print('Access token: ' + api.get_access_token())
print('Refresh token: ' + api.get_refresh_token())
api._refresh_access_token()
print('New access token: ' + api.get_access_token())
|
Test now calls a public endpoint first
|
Test now calls a public endpoint first
|
Python
|
mit
|
samfcmc/fenixedu-python-sdk
|
---
+++
@@ -6,6 +6,7 @@
api = fenix.FenixAPISingleton()
print('Testing Fenix API SDK Python')
auth_url = api.get_authentication_url()
+print(api.get_space('2465311230082'))
print(auth_url)
api.set_code(sys.argv[1])
print('Access token: ' + api.get_access_token())
@@ -13,4 +14,3 @@
api._refresh_access_token()
print('New access token: ' + api.get_access_token())
-print(api.get_space('2465311230082'))
|
f74d57f4a05fa56b8668e371159affe37f4c38c3
|
opentreemap/otm_comments/models.py
|
opentreemap/otm_comments/models.py
|
# -*- coding: utf-8 -*-
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
from threadedcomments.models import ThreadedComment
from django.contrib.gis.db import models
from treemap.instance import Instance
class EnhancedThreadedComment(ThreadedComment):
"""
This class wraps the ThreadedComment model with moderation specific fields
"""
# If the comment should be hidden in the default filter view for moderation
is_archived = models.BooleanField(default=False)
# We could retrieve this through the GenericForeignKey on ThreadedComment,
# but it makes things simpler to record instance here.
instance = models.ForeignKey(Instance)
def save(self, *args, **kwargs):
if hasattr(self.content_object, 'instance'):
self.instance = self.content_object.instance
super(EnhancedThreadedComment, self).save(*args, **kwargs)
|
# -*- coding: utf-8 -*-
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
from threadedcomments.models import ThreadedComment
from django.contrib.gis.db import models
class EnhancedThreadedComment(ThreadedComment):
"""
This class wraps the ThreadedComment model with moderation specific fields
"""
# If the comment should be hidden in the default filter view for moderation
is_archived = models.BooleanField(default=False)
# We could retrieve this through the GenericForeignKey on ThreadedComment,
# but it makes things simpler to record instance here.
instance = models.ForeignKey('treemap.Instance')
def save(self, *args, **kwargs):
if hasattr(self.content_object, 'instance'):
self.instance = self.content_object.instance
super(EnhancedThreadedComment, self).save(*args, **kwargs)
|
Fix circular dependency problem with django apps
|
Fix circular dependency problem with django apps
It looks like translation is importing *all other* django apps in the project
when it is used from treemap. This means that it will load apps that
depend on treemap when it is not finished import treemap. So while it
appears that treemap/otm1_migrator/otm_comments have sane, non-circular
dependencies on each other, the translation app is causing the circle.
I'm pretty sure this is actually a django pattern. Django enjoys
including dynamic apps that walk through installed apps and do magic
stuff. To compensate, they provide this alternative, string-based import
strategy that dynamic apps adhere to.
|
Python
|
agpl-3.0
|
recklessromeo/otm-core,clever-crow-consulting/otm-core,maurizi/otm-core,recklessromeo/otm-core,maurizi/otm-core,RickMohr/otm-core,RickMohr/otm-core,recklessromeo/otm-core,RickMohr/otm-core,maurizi/otm-core,clever-crow-consulting/otm-core,recklessromeo/otm-core,RickMohr/otm-core,clever-crow-consulting/otm-core,maurizi/otm-core,clever-crow-consulting/otm-core
|
---
+++
@@ -7,8 +7,6 @@
from threadedcomments.models import ThreadedComment
from django.contrib.gis.db import models
-
-from treemap.instance import Instance
class EnhancedThreadedComment(ThreadedComment):
@@ -21,7 +19,7 @@
# We could retrieve this through the GenericForeignKey on ThreadedComment,
# but it makes things simpler to record instance here.
- instance = models.ForeignKey(Instance)
+ instance = models.ForeignKey('treemap.Instance')
def save(self, *args, **kwargs):
if hasattr(self.content_object, 'instance'):
|
905e9e7744d8d5d84978a913b2e641a9c4337912
|
Do_not_deploy/query_outgoing_queue.py
|
Do_not_deploy/query_outgoing_queue.py
|
from kombu import Connection, Exchange, Queue
from flask import Flask
import os
app = Flask(__name__)
app.config.from_object(os.environ.get('SETTINGS'))
@app.route("/getnextqueuemessage")
#Gets the next message from target queue. Returns the signed JSON.
def get_last_queue_message():
#: By default messages sent to exchanges are persistent (delivery_mode=2),
#: and queues and exchanges are durable.
exchange = Exchange()
connection = Connection(app.config['OUTGOING_QUEUE_HOSTNAME'])
# Create/access a queue bound to the connection.
queue = Queue(app.config['OUTGOING_QUEUE'], exchange, routing_key='#')(connection)
queue.declare()
message = queue.get()
if message:
signature = message.body
message.ack() #acknowledges message, ensuring its removal.
return signature
else:
return "no message"
@app.route("/removeallmessages")
#Gets the next message from target queue. Returns the signed JSON.
def remove_all_messages():
while True:
queue_message = get_last_queue_message()
if queue_message == 'no message':
break
return "done", 202
@app.route("/")
def check_status():
return "Everything is OK"
|
from kombu import Connection, Exchange, Queue
from flask import Flask
import os
app = Flask(__name__)
app.config.from_object(os.environ.get('SETTINGS'))
@app.route("/getnextqueuemessage")
#Gets the next message from target queue. Returns the signed JSON.
def get_last_queue_message():
#: By default messages sent to exchanges are persistent (delivery_mode=2),
#: and queues and exchanges are durable.
exchange = Exchange()
connection = Connection(app.config['OUTGOING_QUEUE_HOSTNAME'])
# Create/access a queue bound to the connection.
queue = Queue(app.config['OUTGOING_QUEUE'], exchange, routing_key='#')(connection)
queue.declare()
message = queue.get()
if message:
signature = message.body
message.ack() #acknowledges message, ensuring its removal.
return signature
else:
return "no message", 404
@app.route("/removeallmessages")
#Gets the next message from target queue. Returns the signed JSON.
def remove_all_messages():
while True:
queue_message = get_last_queue_message()
if queue_message == 'no message':
break
return "done", 202
@app.route("/")
def check_status():
return "Everything is OK"
|
Return 404 if no message so we can poll from cucumber tests
|
Return 404 if no message so we can poll from cucumber tests
|
Python
|
mit
|
LandRegistry/register-publisher,LandRegistry/register-publisher
|
---
+++
@@ -24,7 +24,7 @@
message.ack() #acknowledges message, ensuring its removal.
return signature
else:
- return "no message"
+ return "no message", 404
|
e3c660cc4b5e72af3f6155c2426555247a4699b5
|
tests/test_ultrametric.py
|
tests/test_ultrametric.py
|
from viridis import tree
from six.moves import range
def test_split():
t = tree.Ultrametric(list(range(6)))
t.merge(0, 1, 0.1)
t.merge(6, 2, 0.2)
t.merge(3, 4, 0.3)
t.merge(8, 5, 0.4)
t.merge(7, 8, 0.5)
t.split(0, 2)
assert t.node[9]['num_leaves'] == 3
t.split(0, 4) # nothing to do
assert t.node[9]['num_leaves'] == 3
|
from viridis import tree
from six.moves import range
import pytest
@pytest.fixture
def base_tree():
t = tree.Ultrametric(list(range(6)))
t.merge(0, 1, 0.1)
t.merge(6, 2, 0.2)
t.merge(3, 4, 0.3)
t.merge(8, 5, 0.4)
t.merge(7, 8, 0.5)
return t
def test_split(base_tree):
t = base_tree
t.split(0, 2)
assert t.node[9]['num_leaves'] == 3
t.split(0, 4) # nothing to do
assert t.node[9]['num_leaves'] == 3
|
Update test to use pytest fixture
|
Update test to use pytest fixture
|
Python
|
mit
|
jni/viridis
|
---
+++
@@ -1,13 +1,21 @@
from viridis import tree
from six.moves import range
-def test_split():
+import pytest
+
+@pytest.fixture
+def base_tree():
t = tree.Ultrametric(list(range(6)))
t.merge(0, 1, 0.1)
t.merge(6, 2, 0.2)
t.merge(3, 4, 0.3)
t.merge(8, 5, 0.4)
t.merge(7, 8, 0.5)
+ return t
+
+
+def test_split(base_tree):
+ t = base_tree
t.split(0, 2)
assert t.node[9]['num_leaves'] == 3
t.split(0, 4) # nothing to do
|
7aa68e0f7c553a964725ddf63c8de44eff3b3f10
|
lib/log_processor.py
|
lib/log_processor.py
|
import re
import snmpy
class log_processor(snmpy.plugin):
def create(self):
for k, v in sorted(self.conf['objects'].items()):
extra = {
'count': re.compile(v['count']),
'reset': re.compile(v['reset']) if 'reset' in v else None,
'start': int(v['start']) if 'start' in v else 0,
}
self.data['1.%s' % k] = 'string', v['label']
self.data['2.%s' % k] = 'integer', extra['start'], extra
self.tail()
@snmpy.plugin.task
def tail(self):
for line in snmpy.plugin.tail(self.conf['file_name']):
for item in self.data['2.0':]:
count = self.data[item:'count'].search(line)
if count:
self.data[item] = self.data[item:True] + (int(count.group(1)) if len(count.groups()) > 0 else 1)
break
if self.data[item:'reset'] is not None and self.data[item:'reset'].search(line):
self.data[item] = self.data[item:'start']
break
|
import re
import snmpy
class log_processor(snmpy.plugin):
def create(self):
for k, v in sorted(self.conf['objects'].items()):
extra = {
'count': re.compile(v['count']),
'reset': re.compile(v['reset']) if 'reset' in v else None,
'start': int(v['start']) if 'start' in v else 0,
'rotate': bool(v['rotate']) if 'rotate' in v else False
}
self.data['1.%s' % k] = 'string', v['label']
self.data['2.%s' % k] = 'integer', extra['start'], extra
self.tail()
@snmpy.plugin.task
def tail(self):
for line in snmpy.plugin.tail(self.conf['file_name'], True):
if line is True:
for item in self.data['2.0':]:
if self.data[item:'rotate'] and line is True:
self.data[item] = self.data[item:'start']
continue
for item in self.data['2.0':]:
count = self.data[item:'count'].search(line)
if count:
self.data[item] = self.data[item:True] + (int(count.group(1)) if len(count.groups()) > 0 else 1)
break
if self.data[item:'reset'] is not None and self.data[item:'reset'].search(line):
self.data[item] = self.data[item:'start']
break
|
Add reset on rotation support in log processor.
|
Add reset on rotation support in log processor.
|
Python
|
mit
|
mk23/snmpy,mk23/snmpy
|
---
+++
@@ -5,9 +5,10 @@
def create(self):
for k, v in sorted(self.conf['objects'].items()):
extra = {
- 'count': re.compile(v['count']),
- 'reset': re.compile(v['reset']) if 'reset' in v else None,
- 'start': int(v['start']) if 'start' in v else 0,
+ 'count': re.compile(v['count']),
+ 'reset': re.compile(v['reset']) if 'reset' in v else None,
+ 'start': int(v['start']) if 'start' in v else 0,
+ 'rotate': bool(v['rotate']) if 'rotate' in v else False
}
self.data['1.%s' % k] = 'string', v['label']
@@ -17,7 +18,13 @@
@snmpy.plugin.task
def tail(self):
- for line in snmpy.plugin.tail(self.conf['file_name']):
+ for line in snmpy.plugin.tail(self.conf['file_name'], True):
+ if line is True:
+ for item in self.data['2.0':]:
+ if self.data[item:'rotate'] and line is True:
+ self.data[item] = self.data[item:'start']
+ continue
+
for item in self.data['2.0':]:
count = self.data[item:'count'].search(line)
if count:
|
d0edc9ef44555b35d9ae0d84e9b109b0d5705bdf
|
callisto_core/tests/reporting/test_pdf.py
|
callisto_core/tests/reporting/test_pdf.py
|
from io import BytesIO
import PyPDF2
from wizard_builder import view_helpers as wizard_builer_helpers
from .. import test_base
from ...reporting import report_delivery
# TODO: generate mock_report_data in wizard builder
mock_report_data = [
{'food options': ['vegetables', 'apples: red']},
{'do androids dream of electric sheep?': ['awdad']},
{'whats on the radios?': ['guitar']},
]
class ReportPDFTest(test_base.ReportFlowHelper):
def test_report_pdf(self):
self.client_post_report_creation()
pdf = report_delivery.report_as_pdf(
report=self.report,
data=mock_report_data,
recipient=None,
)
pdf_reader = PyPDF2.PdfFileReader(BytesIO(pdf))
self.assertIn(
"Reported by: testing_12",
pdf_reader.getPage(0).extractText())
self.assertIn('food options', pdf_reader.getPage(0).extractText())
self.assertIn('vegetables', pdf_reader.getPage(0).extractText())
self.assertIn('apples: red', pdf_reader.getPage(0).extractText())
self.assertIn(
wizard_builer_helpers.SerializedDataHelper.not_answered_text,
pdf_reader.getPage(0).extractText(),
)
|
from io import BytesIO
import PyPDF2
from .. import test_base
from ...reporting import report_delivery
# TODO: generate mock_report_data in wizard builder
mock_report_data = [
{'food options': ['vegetables', 'apples: red']},
{'eat it now???': ['catte']},
{'do androids dream of electric sheep?': ['awdad']},
{'whats on the radios?': ['guitar']},
]
class ReportPDFTest(test_base.ReportFlowHelper):
def test_report_pdf(self):
self.client_post_report_creation()
pdf = report_delivery.report_as_pdf(
report=self.report,
data=mock_report_data,
recipient=None,
)
pdf_reader = PyPDF2.PdfFileReader(BytesIO(pdf))
self.assertIn(
"Reported by: testing_12",
pdf_reader.getPage(0).extractText())
self.assertIn('food options', pdf_reader.getPage(0).extractText())
self.assertIn('vegetables', pdf_reader.getPage(0).extractText())
self.assertIn('apples: red', pdf_reader.getPage(0).extractText())
self.assertIn('eat it now???', pdf_reader.getPage(0).extractText())
|
Revert "test not answered text present in report"
|
Revert "test not answered text present in report"
This reverts commit 7e6caad91969bafdaefa0fc60fdfe25a8d8d8ab2.
|
Python
|
agpl-3.0
|
project-callisto/callisto-core,project-callisto/callisto-core,SexualHealthInnovations/callisto-core,SexualHealthInnovations/callisto-core
|
---
+++
@@ -1,8 +1,6 @@
from io import BytesIO
import PyPDF2
-
-from wizard_builder import view_helpers as wizard_builer_helpers
from .. import test_base
from ...reporting import report_delivery
@@ -10,6 +8,7 @@
# TODO: generate mock_report_data in wizard builder
mock_report_data = [
{'food options': ['vegetables', 'apples: red']},
+ {'eat it now???': ['catte']},
{'do androids dream of electric sheep?': ['awdad']},
{'whats on the radios?': ['guitar']},
]
@@ -32,7 +31,4 @@
self.assertIn('food options', pdf_reader.getPage(0).extractText())
self.assertIn('vegetables', pdf_reader.getPage(0).extractText())
self.assertIn('apples: red', pdf_reader.getPage(0).extractText())
- self.assertIn(
- wizard_builer_helpers.SerializedDataHelper.not_answered_text,
- pdf_reader.getPage(0).extractText(),
- )
+ self.assertIn('eat it now???', pdf_reader.getPage(0).extractText())
|
af5ff98f150158e4f2e0bd2281229a6248a8fb52
|
cdc/models.py
|
cdc/models.py
|
from django.db import models
from django.contrib.auth.models import User
class SiteUser(models.Model):
def __str__(self):
return self.user.username
# Using a OneToOneField so we can add the extra 'company' parameter to the user
# without extending or replacing Django's User model
user = models.OneToOneField(User)
company = models.CharField(default='', max_length=100)
class LoginSession(models.Model):
def __str__(self):
return self.token
token = models.CharField(default='', max_length=64)
user = models.CharField(default='', max_length=100)
class Testimonial(models.Model):
def __str__(self):
return self.postedby
text = models.TextField(default='', max_length=1000)
postedby = models.CharField(default='', max_length=1000)
email = models.CharField(default='', max_length=100)
|
from django.db import models
from django.contrib.auth.models import User
class SiteUser(models.Model):
def __str__(self):
return self.company + " | " + self.user.username
# Using a OneToOneField so we can add the extra 'company' parameter to the user
# without extending or replacing Django's User model
user = models.OneToOneField(User)
company = models.CharField(default='', max_length=100)
class LoginSession(models.Model):
def __str__(self):
return self.token
token = models.CharField(default='', max_length=64)
user = models.CharField(default='', max_length=100)
class Testimonial(models.Model):
def __str__(self):
return self.postedby
text = models.TextField(default='', max_length=1000)
postedby = models.CharField(default='', max_length=1000)
email = models.CharField(default='', max_length=100)
|
Change siteuser string for clarity
|
Change siteuser string for clarity
|
Python
|
mit
|
mgerst/cdc2-2015-www,ISEAGE-ISU/cdc2-2015-www,mg1065/cdc2-2015-www,mgerst/cdc2-2015-www,keaneokelley/Crippling-Debt-Corporation,keaneokelley/Crippling-Debt-Corporation,ISEAGE-ISU/cdc2-2015-www,mg1065/cdc2-2015-www
|
---
+++
@@ -3,7 +3,7 @@
class SiteUser(models.Model):
def __str__(self):
- return self.user.username
+ return self.company + " | " + self.user.username
# Using a OneToOneField so we can add the extra 'company' parameter to the user
# without extending or replacing Django's User model
user = models.OneToOneField(User)
|
782a4b028c45d3cc37e6679ccc3d482f0518b4b7
|
txircd/modules/cmode_t.py
|
txircd/modules/cmode_t.py
|
from twisted.words.protocols import irc
from txircd.modbase import Mode
class TopiclockMode(Mode):
def checkPermission(self, user, cmd, data):
if cmd != "TOPIC":
return data
if "topic" not in data:
return data
targetChannel = data["targetchan"]
if "t" in targetChannel.mode and not user.hasAccess(self.ircd.servconfig["channel_minimum_level"]["TOPIC"], targetChannel.name):
user.sendMessage(irc.ERR_CHANOPRIVSNEEDED, targetChannel.name, ":You do not have access to change the topic on this channel")
return {}
return data
class Spawner(object):
def __init__(self, ircd):
self.ircd = ircd
def spawn(self):
if "channel_minimum_level" not in self.ircd.servconfig:
self.ircd.servconfig["channel_minimum_level"] = {}
if "TOPIC" not in self.ircd.servconfig["channel_minimum_level"]:
self.ircd.servconfig["channel_minimum_level"]["TOPIC"] = "o"
return {
"modes": {
"cnt": TopiclockMode()
}
}
def cleanup(self):
self.ircd.removeMode("cnt")
|
from twisted.words.protocols import irc
from txircd.modbase import Mode
class TopiclockMode(Mode):
def checkPermission(self, user, cmd, data):
if cmd != "TOPIC":
return data
if "topic" not in data:
return data
targetChannel = data["targetchan"]
if "t" in targetChannel.mode and not user.hasAccess(targetChannel.name, self.ircd.servconfig["channel_minimum_level"]["TOPIC"]):
user.sendMessage(irc.ERR_CHANOPRIVSNEEDED, targetChannel.name, ":You do not have access to change the topic on this channel")
return {}
return data
class Spawner(object):
def __init__(self, ircd):
self.ircd = ircd
def spawn(self):
if "channel_minimum_level" not in self.ircd.servconfig:
self.ircd.servconfig["channel_minimum_level"] = {}
if "TOPIC" not in self.ircd.servconfig["channel_minimum_level"]:
self.ircd.servconfig["channel_minimum_level"]["TOPIC"] = "o"
return {
"modes": {
"cnt": TopiclockMode()
}
}
def cleanup(self):
self.ircd.removeMode("cnt")
|
Fix the order of parameters to hasAccess, which broke all topic changing when +t was set
|
Fix the order of parameters to hasAccess, which broke all topic changing when +t was set
|
Python
|
bsd-3-clause
|
ElementalAlchemist/txircd,Heufneutje/txircd,DesertBus/txircd
|
---
+++
@@ -8,7 +8,7 @@
if "topic" not in data:
return data
targetChannel = data["targetchan"]
- if "t" in targetChannel.mode and not user.hasAccess(self.ircd.servconfig["channel_minimum_level"]["TOPIC"], targetChannel.name):
+ if "t" in targetChannel.mode and not user.hasAccess(targetChannel.name, self.ircd.servconfig["channel_minimum_level"]["TOPIC"]):
user.sendMessage(irc.ERR_CHANOPRIVSNEEDED, targetChannel.name, ":You do not have access to change the topic on this channel")
return {}
return data
|
8ad7b5546e0afd25b55411851feee61e5377a71d
|
PrinterApplication.py
|
PrinterApplication.py
|
from Cura.Wx.WxApplication import WxApplication
class PrinterApplication(WxApplication):
def __init__(self):
super(PrinterApplication, self).__init__()
def run(self):
super(PrinterApplication, self).run()
|
from Cura.Wx.WxApplication import WxApplication
from Cura.Wx.MainWindow import MainWindow
class PrinterApplication(WxApplication):
def __init__(self):
super(PrinterApplication, self).__init__()
def run(self):
window = MainWindow("Cura Printer")
window.Show()
super(PrinterApplication, self).run()
|
Add a MainWindow class to Wx and use it in printer and scanner
|
Add a MainWindow class to Wx and use it in printer and scanner
|
Python
|
agpl-3.0
|
quillford/Cura,Curahelper/Cura,senttech/Cura,bq/Ultimaker-Cura,fxtentacle/Cura,hmflash/Cura,fieldOfView/Cura,derekhe/Cura,lo0ol/Ultimaker-Cura,totalretribution/Cura,DeskboxBrazil/Cura,totalretribution/Cura,derekhe/Cura,DeskboxBrazil/Cura,quillford/Cura,ynotstartups/Wanhao,hmflash/Cura,Curahelper/Cura,ad1217/Cura,fxtentacle/Cura,fieldOfView/Cura,bq/Ultimaker-Cura,markwal/Cura,ynotstartups/Wanhao,ad1217/Cura,lo0ol/Ultimaker-Cura,markwal/Cura,senttech/Cura
|
---
+++
@@ -1,8 +1,11 @@
from Cura.Wx.WxApplication import WxApplication
+from Cura.Wx.MainWindow import MainWindow
class PrinterApplication(WxApplication):
def __init__(self):
super(PrinterApplication, self).__init__()
def run(self):
+ window = MainWindow("Cura Printer")
+ window.Show()
super(PrinterApplication, self).run()
|
5d5c944533d70c0c9c3385f3417b06d3d3060594
|
MROCPdjangoForm/ocpipeline/mrpaths.py
|
MROCPdjangoForm/ocpipeline/mrpaths.py
|
#
# Code to load project paths
#
import os, sys
MR_BASE_PATH = os.path.abspath(os.path.join(os.path.dirname(__file__), "/Users/dmhembere44/MR-connectome" ))
MR_CMAPPER_PATH = os.path.join(MR_BASE_PATH, "cmapper" )
MR_MRCAP_PATH = os.path.join(MR_BASE_PATH, "mrcap" )
sys.path += [ MR_BASE_PATH, MR_CMAPPER_PATH, MR_MRCAP_PATH ]
|
#
# Code to load project paths
#
import os, sys
MR_BASE_PATH = os.path.abspath(os.path.join(os.path.dirname(__file__), "../.." ))
MR_CMAPPER_PATH = os.path.join(MR_BASE_PATH, "cmapper" )
MR_MRCAP_PATH = os.path.join(MR_BASE_PATH, "mrcap" )
sys.path += [ MR_BASE_PATH, MR_CMAPPER_PATH, MR_MRCAP_PATH ]
|
Change to path, made relative
|
Change to path, made relative
Former-commit-id: f00bf782fad3f6ddc6d2c97a23ff4f087ad3a22f
|
Python
|
apache-2.0
|
openconnectome/m2g,openconnectome/m2g,neurodata/ndgrutedb,openconnectome/m2g,neurodata/ndgrutedb,openconnectome/m2g,neurodata/ndgrutedb,neurodata/ndmg,openconnectome/m2g,neurodata/ndgrutedb,neurodata/ndgrutedb,neurodata/ndgrutedb,openconnectome/m2g,openconnectome/m2g,neurodata/ndgrutedb,neurodata/ndgrutedb,openconnectome/m2g
|
---
+++
@@ -4,7 +4,7 @@
import os, sys
-MR_BASE_PATH = os.path.abspath(os.path.join(os.path.dirname(__file__), "/Users/dmhembere44/MR-connectome" ))
+MR_BASE_PATH = os.path.abspath(os.path.join(os.path.dirname(__file__), "../.." ))
MR_CMAPPER_PATH = os.path.join(MR_BASE_PATH, "cmapper" )
MR_MRCAP_PATH = os.path.join(MR_BASE_PATH, "mrcap" )
|
91519c542b2fac085dc6b785a41d2fbdba91386c
|
business_requirement_deliverable_report/__openerp__.py
|
business_requirement_deliverable_report/__openerp__.py
|
# -*- coding: utf-8 -*-
# © 2016 Elico Corp (https://www.elico-corp.com).
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
{
'name': 'Business Requirement Document Printout',
'summary': 'Print the Business Requirement Document for your customers',
'version': '8.0.5.0.1',
'category': 'Business Requirements Management',
'website': 'https://www.elico-corp.com',
'author': 'Elico Corp',
'license': 'AGPL-3',
'application': False,
'installable': True,
'depends': [
'business_requirement_deliverable',
],
'data': [
'views/report_business_requirement.xml',
'views/report_business_requirement_deliverable.xml',
'views/report_business_requirement_deliverable_resource.xml',
'report/report.xml'
],
'image': [
'static/img/bus_req_report1.png',
'static/img/bus_req_report2.png',
'static/img/bus_req_report3.png',
],
}
|
# -*- coding: utf-8 -*-
# © 2016 Elico Corp (https://www.elico-corp.com).
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
{
'name': 'Business Requirement Document Printout',
'summary': 'Print the Business Requirement Document for your customers',
'version': '8.0.5.0.1',
'category': 'Business Requirements Management',
'website': 'https://www.elico-corp.com',
'author': 'Elico Corp, Odoo Community Association (OCA)',
'license': 'AGPL-3',
'application': False,
'installable': True,
'depends': [
'business_requirement_deliverable',
],
'data': [
'views/report_business_requirement.xml',
'views/report_business_requirement_deliverable.xml',
'views/report_business_requirement_deliverable_resource.xml',
'report/report.xml'
],
'image': [
'static/img/bus_req_report1.png',
'static/img/bus_req_report2.png',
'static/img/bus_req_report3.png',
],
}
|
Fix manifest: add OCA in the authors
|
Fix manifest: add OCA in the authors
Added OCA in the authors
|
Python
|
agpl-3.0
|
YogeshMahera-SerpentCS/business-requirement,sudhir-serpentcs/business-requirement
|
---
+++
@@ -7,7 +7,7 @@
'version': '8.0.5.0.1',
'category': 'Business Requirements Management',
'website': 'https://www.elico-corp.com',
- 'author': 'Elico Corp',
+ 'author': 'Elico Corp, Odoo Community Association (OCA)',
'license': 'AGPL-3',
'application': False,
'installable': True,
|
23501afd09b13d1e5f33bdd60614fd9ac7210108
|
oratioignoreparser.py
|
oratioignoreparser.py
|
import os
import re
class OratioIgnoreParser():
def __init__(self):
self.ignored_paths = ["oratiomodule.tar.gz"]
def load(self, oratio_ignore_path):
with open(oratio_ignore_path, "r") as f:
self.ignored_paths.extend([line.strip() for line in f])
def should_be_ignored(self, filepath):
for ig in self.ignored_paths:
compiled_regex = re.compile(
'^' + re.escape(ig).replace('\\*', '.*') + '$'
)
if compiled_regex.search(filepath) or \
compiled_regex.search(filepath.split('/')[-1]):
return True
return False
def list_files(self, directory):
filepaths = []
ignored_files = []
for root, dirs, files in os.walk("."):
for name in files:
relative_path = os.path.join(root, name)
if relative_path.startswith("./"):
relative_path = relative_path[2:]
if not self.should_be_ignored(relative_path):
filepaths.append(relative_path)
else:
ignored_files.append(relative_path)
return filepaths, ignored_files
|
import os
import re
class OratioIgnoreParser():
def __init__(self):
self.ignored_paths = ["oratiomodule.tar.gz"]
def load(self, oratio_ignore_path):
with open(oratio_ignore_path, "r") as f:
self.ignored_paths.extend([line.strip() for line in f])
def extend_list(self, ignored_paths_list):
self.ignored_paths.extend(ignored_paths_list)
def should_be_ignored(self, filepath):
for ig in self.ignored_paths:
compiled_regex = re.compile(
'^' + re.escape(ig).replace('\\*', '.*') + '$'
)
if compiled_regex.search(filepath) or \
compiled_regex.search(filepath.split('/')[-1]):
return True
return False
def list_files(self, directory):
filepaths = []
ignored_files = []
for root, dirs, files in os.walk("."):
for name in files:
relative_path = os.path.join(root, name)
if relative_path.startswith("./"):
relative_path = relative_path[2:]
if not self.should_be_ignored(relative_path):
filepaths.append(relative_path)
else:
ignored_files.append(relative_path)
return filepaths, ignored_files
|
Add extend_list method to OratioIgnoreParser
|
Add extend_list method to OratioIgnoreParser
To make oratioignoreparser.py easily testable using unit tests.
|
Python
|
mit
|
oratio-io/oratio-cli,oratio-io/oratio-cli
|
---
+++
@@ -9,6 +9,9 @@
def load(self, oratio_ignore_path):
with open(oratio_ignore_path, "r") as f:
self.ignored_paths.extend([line.strip() for line in f])
+
+ def extend_list(self, ignored_paths_list):
+ self.ignored_paths.extend(ignored_paths_list)
def should_be_ignored(self, filepath):
for ig in self.ignored_paths:
|
efa5156a15d2fd945c406792065b3386aa61107e
|
package_deb_replace_version.py
|
package_deb_replace_version.py
|
import sys
def split(string, splitters):
final = [string]
for x in splitters:
for i,s in enumerate(final):
if x in s and x != s:
left, right = s.split(x, 1)
final[i] = left
final.insert(i + 1, x)
final.insert(i + 2, right)
return final
fullversion = sys.argv[1]
path = f"btsoot_{fullversion}/DEBIAN/control"
version = split(fullversion, "v")
version = version[1]
control_content = f"""Package: btsoot
Version: {version}
Section: base
Priority: optional
Architecture: i386
Depends: build-essential
Maintainer: Paul Kramme <pjkramme@gmail.com>
Description: BTSOOT
Folder redundancy offsite-backup utility.
"""
print("DEB PACKAGE VERSION REPLACER")
# yes, i wrote a tool for this...
with open(path, "a") as f:
f.write(control_content)
print("Done.")
|
import sys
fullversion = sys.argv[1]
path = f"btsoot_{fullversion}/DEBIAN/control"
version = fullversion[1:]
version = version[1]
control_content = f"""Package: btsoot
Version: {version}
Section: base
Priority: optional
Architecture: i386
Depends: build-essential
Maintainer: Paul Kramme <pjkramme@gmail.com>
Description: BTSOOT
Folder redundancy offsite-backup utility.
"""
print("DEB PACKAGE VERSION REPLACER")
# yes, i wrote a tool for this...
with open(path, "a") as f:
f.write(control_content)
print("Done.")
|
Remove unecessary code Add first charakter remover
|
Remove unecessary code
Add first charakter remover
|
Python
|
bsd-3-clause
|
paulkramme/btsoot
|
---
+++
@@ -1,19 +1,8 @@
import sys
-
-def split(string, splitters):
- final = [string]
- for x in splitters:
- for i,s in enumerate(final):
- if x in s and x != s:
- left, right = s.split(x, 1)
- final[i] = left
- final.insert(i + 1, x)
- final.insert(i + 2, right)
- return final
fullversion = sys.argv[1]
path = f"btsoot_{fullversion}/DEBIAN/control"
-version = split(fullversion, "v")
+version = fullversion[1:]
version = version[1]
control_content = f"""Package: btsoot
Version: {version}
|
874ed1f0d97794b56907cffef047d178696591a4
|
tensorflow_datasets/testing/nyu_depth_v2.py
|
tensorflow_datasets/testing/nyu_depth_v2.py
|
import os
import h5py
import numpy as np
ref = h5py.File(
os.path.expanduser(
"~/tensorflow_datasets/downloads/extracted/TAR_GZ.datasets.lids.mit.edu_fastdept_nyudepthBjtXYu6zBBYUv0ByLqXPgFy4ygUuVvPRxjz9Ip5_97M.tar.gz/nyudepthv2/val/official/00001.h5"
),
"r",
)
rgb = ref["rgb"][:]
depth = ref["depth"][:]
rgb_fake = np.ones(rgb.shape, dtype=np.uint8) # np.zeros for val
depth_fake = np.ones(depth.shape).astype(depth.dtype) # np.zeros for val
with h5py.File("00001.h5", "w") as f: # 00001 and 00002 for train; 00001 for val
f.create_dataset("rgb", data=rgb_fake, compression="gzip")
f.create_dataset("depth", data=depth_fake, compression="gzip")
|
import os
import h5py
import numpy as np
ref = h5py.File(
os.path.expanduser(
"~/tensorflow_datasets/downloads/extracted/TAR_GZ.datasets.lids.mit.edu_fastdept_nyudepthBjtXYu6zBBYUv0ByLqXPgFy4ygUuVvPRxjz9Ip5_97M.tar.gz/nyudepthv2/val/official/00001.h5"
),
"r",
)
rgb = ref["rgb"][:]
depth = ref["depth"][:]
rgb_fake = np.ones(rgb.shape, dtype=np.uint8) # np.zeros for val
depth_fake = np.ones(depth.shape).astype(depth.dtype) # np.zeros for val
with h5py.File("00001.h5", "w") as f: # 00001 and 00002 for train; 00001 for val
f.create_dataset("rgb", data=rgb_fake, compression="gzip")
f.create_dataset("depth", data=depth_fake, compression="gzip")
|
Format with two spaces instead of four
|
Format with two spaces instead of four
|
Python
|
apache-2.0
|
tensorflow/datasets,tensorflow/datasets,tensorflow/datasets,tensorflow/datasets,tensorflow/datasets
|
---
+++
@@ -16,5 +16,5 @@
depth_fake = np.ones(depth.shape).astype(depth.dtype) # np.zeros for val
with h5py.File("00001.h5", "w") as f: # 00001 and 00002 for train; 00001 for val
- f.create_dataset("rgb", data=rgb_fake, compression="gzip")
- f.create_dataset("depth", data=depth_fake, compression="gzip")
+ f.create_dataset("rgb", data=rgb_fake, compression="gzip")
+ f.create_dataset("depth", data=depth_fake, compression="gzip")
|
1ce899d118b3d46a816c0fc5f2f1a6f0ca9670ed
|
addons/resource/models/res_company.py
|
addons/resource/models/res_company.py
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import api, fields, models, _
class ResCompany(models.Model):
_inherit = 'res.company'
resource_calendar_ids = fields.One2many(
'resource.calendar', 'company_id', 'Working Hours')
resource_calendar_id = fields.Many2one(
'resource.calendar', 'Default Working Hours', ondelete='restrict')
@api.model
def _init_data_resource_calendar(self):
for company in self.search([('resource_calendar_id', '=', False)]):
company.resource_calendar_id = self.env['resource.calendar'].create({'name': _('Standard 40 hours/week')}).id
@api.model
def create(self, values):
if not values.get('resource_calendar_id'):
values['resource_calendar_id'] = self.env['resource.calendar'].sudo().create({'name': _('Standard 40 hours/week')}).id
company = super(ResCompany, self).create(values)
# calendar created from form view: no company_id set because record was still not created
if not company.resource_calendar_id.company_id:
company.resource_calendar_id.company_id = company.id
return company
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import api, fields, models, _
class ResCompany(models.Model):
_inherit = 'res.company'
resource_calendar_ids = fields.One2many(
'resource.calendar', 'company_id', 'Working Hours')
resource_calendar_id = fields.Many2one(
'resource.calendar', 'Default Working Hours', ondelete='restrict')
@api.model
def _init_data_resource_calendar(self):
self.search([('resource_calendar_id', '=', False)])._create_resource_calendar()
def _create_resource_calendar(self):
for company in self:
company.resource_calendar_id = self.env['resource.calendar'].create({
'name': _('Standard 40 hours/week'),
'company_id': company.id
}).id
@api.model
def create(self, values):
company = super(ResCompany, self).create(values)
if not company.resource_calendar_id:
company.sudo()._create_resource_calendar()
# calendar created from form view: no company_id set because record was still not created
if not company.resource_calendar_id.company_id:
company.resource_calendar_id.company_id = company.id
return company
|
Set company_id on a resource.calendar on company creation
|
[IMP] resource: Set company_id on a resource.calendar on company creation
Purpose
=======
Currently, when creating a company, the resource calendar is created if not specified.
This lead to duplicated data. In Manufacturing > Configuration > Working Time, two same working time demo data('Standard 40 Hours/Week')
Specification
=============
Company should be correctly set in the calendar.
|
Python
|
agpl-3.0
|
ygol/odoo,ygol/odoo,ygol/odoo,ygol/odoo,ygol/odoo,ygol/odoo,ygol/odoo
|
---
+++
@@ -14,14 +14,20 @@
@api.model
def _init_data_resource_calendar(self):
- for company in self.search([('resource_calendar_id', '=', False)]):
- company.resource_calendar_id = self.env['resource.calendar'].create({'name': _('Standard 40 hours/week')}).id
+ self.search([('resource_calendar_id', '=', False)])._create_resource_calendar()
+
+ def _create_resource_calendar(self):
+ for company in self:
+ company.resource_calendar_id = self.env['resource.calendar'].create({
+ 'name': _('Standard 40 hours/week'),
+ 'company_id': company.id
+ }).id
@api.model
def create(self, values):
- if not values.get('resource_calendar_id'):
- values['resource_calendar_id'] = self.env['resource.calendar'].sudo().create({'name': _('Standard 40 hours/week')}).id
company = super(ResCompany, self).create(values)
+ if not company.resource_calendar_id:
+ company.sudo()._create_resource_calendar()
# calendar created from form view: no company_id set because record was still not created
if not company.resource_calendar_id.company_id:
company.resource_calendar_id.company_id = company.id
|
d3d9d5a7f23bfd95ecfb6c9609c2c0d4be503ae7
|
branches/extracting_oxford/molly/molly/providers/apps/search/application_search.py
|
branches/extracting_oxford/molly/molly/providers/apps/search/application_search.py
|
import logging
from molly.apps.search.providers import BaseSearchProvider
from molly.conf import applications
logger = logging.getLogger('molly.providers.apps.search.application_search')
class ApplicationSearchProvider(BaseSearchProvider):
def __init__(self, application_names=None):
self.application_names = application_names
self.applications = None
def perform_search(self, request, query, application=None):
if self.applications == None:
self.find_applications()
if application:
if not application in self.applications:
return []
apps = [self.applications[application]]
else:
apps = self.applications.values()
results = []
for app in apps:
try:
results += app.perform_search(request, query, application)
except Exception, e:
logger.exception("Application search provider raised exception: %r", e)
pass
print apps
return results
def find_applications(self):
self.applications = {}
for app_name in applications:
application = applications[app_name]
if self.application_names and not application in self.application_names:
continue
try:
search_module_name = '%s.search' % application.name
_temp = __import__(search_module_name,
globals(), locals(),
['SearchProvider'], -1)
if not hasattr(_temp, 'SearchProvider'):
raise ImportError
except ImportError:
continue
else:
search_provider = _temp.SearchProvider
self.applications[application.slug] = search_provider
|
import logging
from molly.apps.search.providers import BaseSearchProvider
from molly.conf import applications
logger = logging.getLogger('molly.providers.apps.search.application_search')
class ApplicationSearchProvider(BaseSearchProvider):
def __init__(self, local_names=None):
self.local_names = local_names
self.applications = None
def perform_search(self, request, query, application=None):
if self.applications == None:
self.find_applications()
if application:
if not application in self.applications:
return []
apps = [self.applications[application]]
else:
apps = self.applications.values()
results = []
for app in apps:
try:
results += app.perform_search(request, query, application)
except Exception, e:
logger.exception("Application search provider raised exception: %r", e)
pass
print apps
return results
def find_applications(self):
self.applications = {}
for local_name in applications:
application = applications[local_name]
if self.local_names and not application in self.local_names:
continue
try:
search_module_name = '%s.search' % application.application_name
_temp = __import__(search_module_name,
globals(), locals(),
['SearchProvider'], -1)
if not hasattr(_temp, 'SearchProvider'):
raise ImportError
except ImportError:
continue
else:
search_provider = _temp.SearchProvider
self.applications[application.local_name] = search_provider
|
Update ApplicationSearchProvider to reflect conf.settings.Application changes.
|
Update ApplicationSearchProvider to reflect conf.settings.Application changes.
|
Python
|
apache-2.0
|
mollyproject/mollyproject,mollyproject/mollyproject,mollyproject/mollyproject
|
---
+++
@@ -7,8 +7,8 @@
logger = logging.getLogger('molly.providers.apps.search.application_search')
class ApplicationSearchProvider(BaseSearchProvider):
- def __init__(self, application_names=None):
- self.application_names = application_names
+ def __init__(self, local_names=None):
+ self.local_names = local_names
self.applications = None
def perform_search(self, request, query, application=None):
@@ -35,13 +35,13 @@
def find_applications(self):
self.applications = {}
- for app_name in applications:
- application = applications[app_name]
+ for local_name in applications:
+ application = applications[local_name]
- if self.application_names and not application in self.application_names:
+ if self.local_names and not application in self.local_names:
continue
try:
- search_module_name = '%s.search' % application.name
+ search_module_name = '%s.search' % application.application_name
_temp = __import__(search_module_name,
globals(), locals(),
['SearchProvider'], -1)
@@ -52,6 +52,6 @@
else:
search_provider = _temp.SearchProvider
- self.applications[application.slug] = search_provider
+ self.applications[application.local_name] = search_provider
|
8b6eee19707a981fddfa2114be3d90353e049b33
|
examples/undocumented/python/converter_stochasticproximityembedding.py
|
examples/undocumented/python/converter_stochasticproximityembedding.py
|
#!/usr/bin/env python
data = '../data/fm_train_real.dat'
parameter_list = [[data, 20]]
def converter_stochasticproximityembedding (data_fname, k):
try:
from shogun import RealFeatures,StochasticProximityEmbedding, SPE_GLOBAL, SPE_LOCAL, CSVFile
features = RealFeatures(CSVFile(data_fname))
converter = StochasticProximityEmbedding()
converter.set_target_dim(1)
converter.set_nupdates(40)
# Embed with local strategy
converter.set_k(k)
converter.set_strategy(SPE_LOCAL)
converter.embed(features)
# Embed with global strategy
converter.set_strategy(SPE_GLOBAL)
converter.embed(features)
return features
except ImportError:
print('No Eigen3 available')
if __name__=='__main__':
print('StochasticProximityEmbedding')
converter_stochasticproximityembedding(*parameter_list[0])
|
#!/usr/bin/env python
data = '../data/fm_train_real.dat'
parameter_list = [[data, 20]]
def converter_stochasticproximityembedding (data_fname, k):
try:
from shogun import RealFeatures,StochasticProximityEmbedding, SPE_GLOBAL, SPE_LOCAL, CSVFile
features = RealFeatures(CSVFile(data_fname))
converter = StochasticProximityEmbedding()
converter.set_target_dim(1)
converter.set_nupdates(40)
# Embed with local strategy
converter.set_k(k)
converter.set_strategy(SPE_LOCAL)
features = converter.transform(features)
# Embed with global strategy
converter.set_strategy(SPE_GLOBAL)
features = converter.transform(features)
return features
except ImportError:
print('No Eigen3 available')
if __name__=='__main__':
print('StochasticProximityEmbedding')
converter_stochasticproximityembedding(*parameter_list[0])
|
Use transform instead of embed
|
Use transform instead of embed
|
Python
|
bsd-3-clause
|
karlnapf/shogun,lisitsyn/shogun,karlnapf/shogun,geektoni/shogun,geektoni/shogun,besser82/shogun,sorig/shogun,sorig/shogun,lisitsyn/shogun,besser82/shogun,lambday/shogun,geektoni/shogun,sorig/shogun,lambday/shogun,lambday/shogun,lambday/shogun,besser82/shogun,geektoni/shogun,lisitsyn/shogun,lisitsyn/shogun,geektoni/shogun,lisitsyn/shogun,sorig/shogun,lambday/shogun,karlnapf/shogun,shogun-toolbox/shogun,shogun-toolbox/shogun,besser82/shogun,besser82/shogun,sorig/shogun,karlnapf/shogun,lambday/shogun,shogun-toolbox/shogun,lisitsyn/shogun,karlnapf/shogun,karlnapf/shogun,besser82/shogun,sorig/shogun,shogun-toolbox/shogun,geektoni/shogun,shogun-toolbox/shogun,shogun-toolbox/shogun
|
---
+++
@@ -14,10 +14,10 @@
# Embed with local strategy
converter.set_k(k)
converter.set_strategy(SPE_LOCAL)
- converter.embed(features)
+ features = converter.transform(features)
# Embed with global strategy
converter.set_strategy(SPE_GLOBAL)
- converter.embed(features)
+ features = converter.transform(features)
return features
except ImportError:
|
b7d5767df23aa9b0dcc5c32d83387a622bf9e9e0
|
webapp/thing.py
|
webapp/thing.py
|
#!/usr/bin/env python
import RPi.GPIO as GPIO
LED_PIN = 23
SWITCH_PIN = 24
# new style class
class PiThing(object):
"""Raspberry Pi Internet 'Thing'."""
def __init__(self):
# use BCM numbering scheme when using Adafruit pi cobbler. Don't use board scheme.
GPIO.setmode(GPIO.BCM)
# led as output
GPIO.setup(LED_PIN, GPIO.OUT)
# switch as input
GPIO.setup(SWITCH_PIN, GPIO.IN)
def read_switch(self):
"""returns true if switch is high, false if switch is low
"""
return gpio.input(SWITCH_PIN)
def set_led(self, value):
"""Set the LED to the passed in value, True for on, False for off.
"""
GPIO.output(LED_PIN, value)
|
#!/usr/bin/env python
import RPi.GPIO as GPIO
LED_PIN = 23
SWITCH_PIN = 24
# new style class
class PiThing(object):
"""Raspberry Pi Internet 'Thing'."""
def __init__(self):
# use BCM numbering scheme when using Adafruit pi cobbler. Don't use board scheme.
GPIO.setmode(GPIO.BCM)
# led as output
GPIO.setup(LED_PIN, GPIO.OUT)
# switch as input
GPIO.setup(SWITCH_PIN, GPIO.IN)
def read_switch(self):
"""returns true if switch is high, false if switch is low
"""
return GPIO.input(SWITCH_PIN)
def set_led(self, value):
"""Set the LED to the passed in value, True for on, False for off.
"""
GPIO.output(LED_PIN, value)
|
Fix gpio to uppercase GPIO
|
Fix gpio to uppercase GPIO
|
Python
|
mit
|
beepscore/pi_thing,beepscore/pi_thing,beepscore/pi_thing
|
---
+++
@@ -22,7 +22,7 @@
def read_switch(self):
"""returns true if switch is high, false if switch is low
"""
- return gpio.input(SWITCH_PIN)
+ return GPIO.input(SWITCH_PIN)
def set_led(self, value):
|
49749403321d16f14ecf0f6f95d5511e5429d7a2
|
actstream/__init__.py
|
actstream/__init__.py
|
try:
from actstream.signals import action
except:
pass
import django
__version__ = '1.4.0'
__author__ = 'Asif Saif Uddin, Justin Quick <justquick@gmail.com>'
if django.VERSION < (3, 2):
default_app_config = 'actstream.apps.ActstreamConfig'
|
try:
from actstream.signals import action
except:
pass
import django
__version__ = '1.4.0'
__author__ = 'Asif Saif Uddin, Justin Quick <justquick@gmail.com>'
if django.VERSION >= (3, 2):
# The declaration is only needed for older Django versions
pass
else:
default_app_config = 'actstream.apps.ActstreamConfig'
|
Fix django app config default
|
Fix django app config default
|
Python
|
bsd-3-clause
|
justquick/django-activity-stream,pombredanne/django-activity-stream,pombredanne/django-activity-stream,justquick/django-activity-stream
|
---
+++
@@ -8,5 +8,8 @@
__version__ = '1.4.0'
__author__ = 'Asif Saif Uddin, Justin Quick <justquick@gmail.com>'
-if django.VERSION < (3, 2):
+if django.VERSION >= (3, 2):
+ # The declaration is only needed for older Django versions
+ pass
+else:
default_app_config = 'actstream.apps.ActstreamConfig'
|
edc773bfd5d25a42fa2759631500fc4861557e57
|
fireplace/cards/tgt/priest.py
|
fireplace/cards/tgt/priest.py
|
from ..utils import *
##
# Minions
# Holy Champion
class AT_011:
events = Heal().on(Buff(SELF, "AT_011e"))
# Spawn of Shadows
class AT_012:
inspire = Hit(ALL_HEROES, 4)
##
# Spells
# Power Word: Glory
class AT_013:
play = Buff(TARGET, "AT_013e")
class AT_013e:
events = Attack(OWNER).on(Heal(FRIENDLY_HERO, 4))
# Convert
class AT_015:
play = Give(CONTROLLER, Copy(TARGET))
# Confuse
class AT_016:
play = Buff(ALL_MINIONS, "AT_016e")
# Flash Heal
class AT_055:
play = Heal(TARGET, 5)
|
from ..utils import *
##
# Minions
# Holy Champion
class AT_011:
events = Heal().on(Buff(SELF, "AT_011e"))
# Spawn of Shadows
class AT_012:
inspire = Hit(ALL_HEROES, 4)
# Shadowfiend
class AT_014:
events = Draw(CONTROLLER).on(Buff(Draw.Args.CARD, "AT_014e"))
# Wyrmrest Agent
class AT_116:
play = HOLDING_DRAGON & Buff(SELF, "AT_116e")
##
# Spells
# Power Word: Glory
class AT_013:
play = Buff(TARGET, "AT_013e")
class AT_013e:
events = Attack(OWNER).on(Heal(FRIENDLY_HERO, 4))
# Convert
class AT_015:
play = Give(CONTROLLER, Copy(TARGET))
# Confuse
class AT_016:
play = Buff(ALL_MINIONS, "AT_016e")
# Flash Heal
class AT_055:
play = Heal(TARGET, 5)
|
Implement more TGT Priest cards
|
Implement more TGT Priest cards
|
Python
|
agpl-3.0
|
oftc-ftw/fireplace,beheh/fireplace,smallnamespace/fireplace,Meerkov/fireplace,Ragowit/fireplace,smallnamespace/fireplace,oftc-ftw/fireplace,amw2104/fireplace,liujimj/fireplace,NightKev/fireplace,jleclanche/fireplace,Ragowit/fireplace,amw2104/fireplace,liujimj/fireplace,Meerkov/fireplace
|
---
+++
@@ -12,6 +12,16 @@
# Spawn of Shadows
class AT_012:
inspire = Hit(ALL_HEROES, 4)
+
+
+# Shadowfiend
+class AT_014:
+ events = Draw(CONTROLLER).on(Buff(Draw.Args.CARD, "AT_014e"))
+
+
+# Wyrmrest Agent
+class AT_116:
+ play = HOLDING_DRAGON & Buff(SELF, "AT_116e")
##
|
10dea74d7f7946e9bab8c99b489793708845183c
|
fireplace/cards/wog/hunter.py
|
fireplace/cards/wog/hunter.py
|
from ..utils import *
##
# Minions
class OG_179:
"Fiery Bat"
deathrattle = Hit(RANDOM_ENEMY_CHARACTER, 1)
class OG_292:
"Forlorn Stalker"
play = Buff(FRIENDLY_HAND + MINION + DEATHRATTLE, "OG_292e")
OG_292e = buff(+1, +1)
|
from ..utils import *
##
# Minions
class OG_179:
"Fiery Bat"
deathrattle = Hit(RANDOM_ENEMY_CHARACTER, 1)
class OG_292:
"Forlorn Stalker"
play = Buff(FRIENDLY_HAND + MINION + DEATHRATTLE, "OG_292e")
OG_292e = buff(+1, +1)
##
# Spells
class OG_045:
"Infest"
play = Buff(FRIENDLY_MINIONS, "OG_045a")
class OG_045a:
"Nerubian Spores"
deathrattle = Give(CONTROLLER, RandomBeast())
tags = {GameTag.DEATHRATTLE: True}
class OG_061:
"On the Hunt"
play = Hit(TARGET, 1), Summon(CONTROLLER, "OG_061t")
class OG_211:
"Call of the Wild"
play = (
Summon(CONTROLLER, "NEW1_034"),
Summon(CONTROLLER, "NEW1_033"),
Summon(CONTROLLER, "NEW1_032")
)
|
Implement Infest, On the Hunt, Call of the Wild
|
Implement Infest, On the Hunt, Call of the Wild
|
Python
|
agpl-3.0
|
jleclanche/fireplace,beheh/fireplace,NightKev/fireplace
|
---
+++
@@ -14,3 +14,30 @@
play = Buff(FRIENDLY_HAND + MINION + DEATHRATTLE, "OG_292e")
OG_292e = buff(+1, +1)
+
+
+##
+# Spells
+
+class OG_045:
+ "Infest"
+ play = Buff(FRIENDLY_MINIONS, "OG_045a")
+
+class OG_045a:
+ "Nerubian Spores"
+ deathrattle = Give(CONTROLLER, RandomBeast())
+ tags = {GameTag.DEATHRATTLE: True}
+
+
+class OG_061:
+ "On the Hunt"
+ play = Hit(TARGET, 1), Summon(CONTROLLER, "OG_061t")
+
+
+class OG_211:
+ "Call of the Wild"
+ play = (
+ Summon(CONTROLLER, "NEW1_034"),
+ Summon(CONTROLLER, "NEW1_033"),
+ Summon(CONTROLLER, "NEW1_032")
+ )
|
df52bf506fdb6754d51c2320108bcd832a0dfc02
|
django_twilio_sms/admin.py
|
django_twilio_sms/admin.py
|
from django.contrib import admin
from .models import *
class MessageAdmin(admin.ModelAdmin):
list_display = ('to_phone_number', 'from_phone_number', 'status', 'date_sent')
list_display_links = list_display
list_filter = ('status', 'date_sent')
date_hierarchy = 'date_sent'
ordering = ('-date_sent', )
class ResponseAdmin(admin.ModelAdmin):
list_display = ('action', 'active', 'body', 'date_updated')
list_display_links = list_display
list_filter = ('action', 'active')
class AccountAdmin(admin.ModelAdmin):
list_display = ('friendly_name', 'owner_account_sid', 'account_type', 'status', 'date_updated')
list_display_links = list_display
admin.site.register(Message, MessageAdmin)
admin.site.register(Response, ResponseAdmin)
admin.site.register(Account, AccountAdmin)
|
from django.contrib import admin
from .models import *
class MessageAdmin(admin.ModelAdmin):
list_display = ('to_phone_number', 'from_phone_number', 'status', 'direction', 'date_sent')
list_display_links = list_display
list_filter = ('status', 'direction', 'date_sent')
date_hierarchy = 'date_sent'
ordering = ('-date_sent', )
class ResponseAdmin(admin.ModelAdmin):
list_display = ('action', 'active', 'body', 'date_updated')
list_display_links = list_display
list_filter = ('action', 'active')
class AccountAdmin(admin.ModelAdmin):
list_display = ('friendly_name', 'owner_account_sid', 'account_type', 'status', 'date_updated')
list_display_links = list_display
admin.site.register(Message, MessageAdmin)
admin.site.register(Response, ResponseAdmin)
admin.site.register(Account, AccountAdmin)
|
Add direction to Message listing
|
Add direction to Message listing
|
Python
|
bsd-3-clause
|
cfc603/django-twilio-sms-models
|
---
+++
@@ -3,9 +3,9 @@
class MessageAdmin(admin.ModelAdmin):
- list_display = ('to_phone_number', 'from_phone_number', 'status', 'date_sent')
+ list_display = ('to_phone_number', 'from_phone_number', 'status', 'direction', 'date_sent')
list_display_links = list_display
- list_filter = ('status', 'date_sent')
+ list_filter = ('status', 'direction', 'date_sent')
date_hierarchy = 'date_sent'
ordering = ('-date_sent', )
|
9ef1c87ec752df3fb32cfe8ee94216e5eb3326fe
|
alg_quick_sort.py
|
alg_quick_sort.py
|
from __future__ import absolute_import
from __future__ import print_function
from __future__ import division
def quick_sort(a_list):
"""Quick sort algortihm with list comprehension recursion."""
if len(a_list) <= 1:
return a_list
pivot_value = a_list[len(a_list) // 2]
left_list = [x for x in a_list if x < pivot_value]
middle_list = [x for x in a_list if x == pivot_value]
right_list = [x for x in a_list if x > pivot_value]
return quick_sort(left_list) + middle_list + quick_sort(right_list)
def main():
a_list = [54, 26, 93, 17, 77, 31, 44, 55, 20]
print('a_list: {}'.format(a_list))
print('Quick sort with list comprehension: ')
print(quick_sort(a_list))
if __name__ == '__main__':
main()
|
from __future__ import absolute_import
from __future__ import print_function
from __future__ import division
def quick_sort(a_list):
"""Quick sort algortihm with list comprehension recursion.
Time complexity: O(n*logn).
"""
if len(a_list) <= 1:
return a_list
pivot_value = a_list[len(a_list) // 2]
left_list = [x for x in a_list if x < pivot_value]
middle_list = [x for x in a_list if x == pivot_value]
right_list = [x for x in a_list if x > pivot_value]
return quick_sort(left_list) + middle_list + quick_sort(right_list)
def main():
a_list = [54, 26, 93, 17, 77, 31, 44, 55, 20]
print('a_list: {}'.format(a_list))
print('Quick sort with list comprehension: ')
print(quick_sort(a_list))
if __name__ == '__main__':
main()
|
Add to doc string: time complexity
|
Add to doc string: time complexity
|
Python
|
bsd-2-clause
|
bowen0701/algorithms_data_structures
|
---
+++
@@ -4,7 +4,10 @@
def quick_sort(a_list):
- """Quick sort algortihm with list comprehension recursion."""
+ """Quick sort algortihm with list comprehension recursion.
+
+ Time complexity: O(n*logn).
+ """
if len(a_list) <= 1:
return a_list
pivot_value = a_list[len(a_list) // 2]
|
a71b60363a39414eac712210086ce51abeed41d0
|
api/feedback/admin.py
|
api/feedback/admin.py
|
from django import forms
from django.contrib import admin
from feedback.models import Feedback
class FeedbackAdminForm(forms.ModelForm):
class Meta:
model = Feedback
fields = '__all__'
widgets = {
'comments': forms.Textarea(attrs={'cols': 80, 'rows': 5}),
'user_agent': forms.Textarea(attrs={'cols': 80, 'rows': 5}),
'redux_state': forms.Textarea(attrs={'cols': 80, 'rows': 20}),
}
class FeedbackAdmin(admin.ModelAdmin):
model = Feedback
form = FeedbackAdminForm
list_display = (
'player',
'get_type',
'created',
'comments',
)
list_filter = (
'type',
'created',
)
search_fields = (
'comments',
'user_agent',
'redux_state',
'player__username',
)
def get_type(self, obj):
return obj.get_type_display()
get_type.short_description = 'Type'
get_type.admin_order_field = 'type'
admin.site.register(Feedback, FeedbackAdmin)
|
from django import forms
from django.contrib import admin
from feedback.models import Feedback
class FeedbackAdminForm(forms.ModelForm):
class Meta:
model = Feedback
fields = '__all__'
widgets = {
'comments': forms.Textarea(attrs={'cols': 80, 'rows': 5}),
'user_agent': forms.Textarea(attrs={'cols': 80, 'rows': 5}),
'redux_state': forms.Textarea(attrs={'cols': 80, 'rows': 20}),
}
class FeedbackAdmin(admin.ModelAdmin):
model = Feedback
form = FeedbackAdminForm
list_display = (
'player',
'get_type',
'created',
'comments',
)
list_filter = (
'type',
'created',
)
search_fields = (
'comments',
'user_agent',
'redux_state',
'player__username',
)
ordering = (
'-created',
)
def get_type(self, obj):
return obj.get_type_display()
get_type.short_description = 'Type'
get_type.admin_order_field = 'type'
admin.site.register(Feedback, FeedbackAdmin)
|
Order feedback by most recent
|
Order feedback by most recent
|
Python
|
apache-2.0
|
prattl/teamfinder,prattl/teamfinder,prattl/teamfinder,prattl/teamfinder
|
---
+++
@@ -34,6 +34,9 @@
'redux_state',
'player__username',
)
+ ordering = (
+ '-created',
+ )
def get_type(self, obj):
return obj.get_type_display()
|
90a024186928d98dfbd3db29d02d5eeba4a55415
|
sql/src/test/BugTracker-2009/Tests/create_on_ro_db_crash.SF-2830238.py
|
sql/src/test/BugTracker-2009/Tests/create_on_ro_db_crash.SF-2830238.py
|
import sys
import os
import time
try:
import subprocess
except ImportError:
# use private copy for old Python versions
import MonetDBtesting.subprocess26 as subprocess
def server():
s = subprocess.Popen("%s --dbinit='include sql;' --set gdk_readonly=yes" % os.getenv('MSERVER'),
shell = True,
stdin = subprocess.PIPE,
stdout = subprocess.PIPE,
stderr = subprocess.PIPE)
s.stdin.write('\nio.printf("\\nReady.\\n");\n')
s.stdin.flush()
while True:
ln = s.stdout.readline()
if not ln:
print 'Unexpected EOF from server'
sys.exit(1)
sys.stdout.write(ln)
if 'Ready' in ln:
break
return s
def client():
c = subprocess.Popen("%s" % os.getenv('SQL_CLIENT'),
shell = True,
stdin = subprocess.PIPE,
stdout = subprocess.PIPE,
stderr = subprocess.PIPE)
return c
script1 = '''\
create table t2 (a int);
'''
def main():
s = server()
c = client()
o, e = c.communicate(script1)
sys.stdout.write(o)
sys.stderr.write(e)
o, e = s.communicate()
sys.stdout.write(o)
sys.stderr.write(e)
if __name__ == '__main__':
main()
|
import sys
import os
import time
try:
import subprocess
except ImportError:
# use private copy for old Python versions
import MonetDBtesting.subprocess26 as subprocess
def server():
s = subprocess.Popen('%s "--dbinit=include sql;" --set gdk_readonly=yes' % os.getenv('MSERVER'),
shell = True,
stdin = subprocess.PIPE,
stdout = subprocess.PIPE,
stderr = subprocess.PIPE)
s.stdin.write('\nio.printf("\\nReady.\\n");\n')
s.stdin.flush()
while True:
ln = s.stdout.readline()
if not ln:
print 'Unexpected EOF from server'
sys.exit(1)
sys.stdout.write(ln)
if 'Ready' in ln:
break
return s
def client():
c = subprocess.Popen("%s" % os.getenv('SQL_CLIENT'),
shell = True,
stdin = subprocess.PIPE,
stdout = subprocess.PIPE,
stderr = subprocess.PIPE)
return c
script1 = '''\
create table t2 (a int);
'''
def main():
s = server()
c = client()
o, e = c.communicate(script1)
sys.stdout.write(o)
sys.stderr.write(e)
o, e = s.communicate()
sys.stdout.write(o)
sys.stderr.write(e)
if __name__ == '__main__':
main()
|
Use double quotes to quote command arguments. The Windows command parser doesn't recognize single quotes.
|
Use double quotes to quote command arguments. The Windows command
parser doesn't recognize single quotes.
|
Python
|
mpl-2.0
|
zyzyis/monetdb,zyzyis/monetdb,zyzyis/monetdb,zyzyis/monetdb,zyzyis/monetdb,zyzyis/monetdb,zyzyis/monetdb,zyzyis/monetdb,zyzyis/monetdb,zyzyis/monetdb
|
---
+++
@@ -8,7 +8,7 @@
import MonetDBtesting.subprocess26 as subprocess
def server():
- s = subprocess.Popen("%s --dbinit='include sql;' --set gdk_readonly=yes" % os.getenv('MSERVER'),
+ s = subprocess.Popen('%s "--dbinit=include sql;" --set gdk_readonly=yes' % os.getenv('MSERVER'),
shell = True,
stdin = subprocess.PIPE,
stdout = subprocess.PIPE,
|
13f857978378df38a4d52cc92aec750c3804f069
|
build.py
|
build.py
|
# OSU SPS Website Build Script
def loadFile( src, prefx ):
out = ""
for line in open( src, 'r' ):
out += prefx + line
return out
def outputFile( name, content ):
file = open( name, 'w' )
file.write( content )
file.close()
out_folder = "output/"
src_folder = "src/"
includes_folder = src_folder + "includes/"
header = loadFile( includes_folder + "header.html", "" )
header = header.replace( "[STYLE]", loadFile( includes_folder + "style.css", "\t" ) )
header = header.replace( "[SCRIPT]", loadFile( includes_folder + "script.js", "\t" ) )
footer = loadFile( includes_folder + "footer.html", "" )
names = [
"index.html"
]
for name in names:
raw = loadFile( src_folder + name, "" )
raw = header + raw + footer
outputFile( out_folder + name, raw )
|
# OSU SPS Website Build Script
import os
def loadFile( src, prefx ):
out = ""
for line in open( src, 'r' ):
out += prefx + line
return out
def outputFile( name, content ):
file = open( name, 'w' )
file.write( content )
file.close()
out_folder = "output/"
src_folder = "src/"
includes_folder = src_folder + "includes/"
header = loadFile( includes_folder + "header.html", "" )
header = header.replace( "[STYLE]", loadFile( includes_folder + "style.css", "\t" ) )
header = header.replace( "[SCRIPT]", loadFile( includes_folder + "script.js", "\t" ) )
footer = loadFile( includes_folder + "footer.html", "" )
names = set()
for name in os.listdir( src_folder ):
names.add( name )
names.remove( includes_folder[ len( src_folder ) : -1 ] )
for name in names:
raw = loadFile( src_folder + name, "" )
raw = header + raw + footer
outputFile( out_folder + name, raw )
|
Remove need for static pages
|
Remove need for static pages
|
Python
|
mit
|
GeekLogan/OSU-SPS-Website,GeekLogan/OSU-SPS-Website
|
---
+++
@@ -1,4 +1,6 @@
# OSU SPS Website Build Script
+
+import os
def loadFile( src, prefx ):
out = ""
@@ -21,9 +23,11 @@
footer = loadFile( includes_folder + "footer.html", "" )
-names = [
- "index.html"
-]
+names = set()
+for name in os.listdir( src_folder ):
+ names.add( name )
+
+names.remove( includes_folder[ len( src_folder ) : -1 ] )
for name in names:
raw = loadFile( src_folder + name, "" )
|
f34c230c93ea8c87a77c29dc1bf86aa450e87749
|
build.py
|
build.py
|
#!/usr/bin/env python3
import glob
import time
import re
def readfile(fn):
with open(fn, 'Ur', encoding='utf8') as f:
return f.read()
def loaderString(var):
fn = var.group(1)
return readfile(fn).replace('\n', '\\n').replace('\'', '\\\'')
def loaderRaw(var):
fn = var.group(1)
return readfile(fn)
c = '\n\n'.join(map(readfile, glob.glob('code/*')))
n = time.strftime('%Y-%m-%d-%H%M%S')
m = readfile('main.js')
m = m.split('@@INJECTHERE@@')
m.insert(1, c)
m = '\n\n'.join(m)
m = m.replace('@@BUILDDATE@@', n)
m = re.sub('@@INCLUDERAW:([0-9a-zA-Z_./-]+)@@', loaderRaw, m)
m = re.sub('@@INCLUDESTRING:([0-9a-zA-Z_./-]+)@@', loaderString, m)
with open('iitc-debug.user.js', 'w', encoding='utf8') as f:
f.write(m)
# vim: ai si ts=4 sw=4 sts=4 et
|
#!/usr/bin/env python
import glob
import time
import re
import io
def readfile(fn):
with io.open(fn, 'Ur', encoding='utf8') as f:
return f.read()
def loaderString(var):
fn = var.group(1)
return readfile(fn).replace('\n', '\\n').replace('\'', '\\\'')
def loaderRaw(var):
fn = var.group(1)
return readfile(fn)
c = '\n\n'.join(map(readfile, glob.glob('code/*')))
n = time.strftime('%Y-%m-%d-%H%M%S')
m = readfile('main.js')
m = m.split('@@INJECTHERE@@')
m.insert(1, c)
m = '\n\n'.join(m)
m = m.replace('@@BUILDDATE@@', n)
m = re.sub('@@INCLUDERAW:([0-9a-zA-Z_./-]+)@@', loaderRaw, m)
m = re.sub('@@INCLUDESTRING:([0-9a-zA-Z_./-]+)@@', loaderString, m)
with io.open('iitc-debug.user.js', 'w', encoding='utf8') as f:
f.write(m)
# vim: ai si ts=4 sw=4 sts=4 et
|
Use io.open to support python 2.x _and_ python 3
|
Use io.open to support python 2.x _and_ python 3
|
Python
|
isc
|
SpamapS/ingress-intel-total-conversion,fkloft/ingress-intel-total-conversion,Galfinite/ingress-intel-total-conversion,jonatkins/ingress-intel-total-conversion,meyerdg/ingress-intel-total-conversion,pleasantone/ingress-intel-total-conversion,MarkTraceur/ingress-intel-total-conversion,nikolawannabe/ingress-intel-total-conversion,kyotoalgo/ingress-intel-total-conversion,kyotoalgo/ingress-intel-total-conversion,kdawes/ingress-intel-total-conversion,nikolawannabe/ingress-intel-total-conversion,iitc-project/ingress-intel-total-conversion,Galfinite/ingress-intel-total-conversion,nhamer/ingress-intel-total-conversion,imsaguy/ingress-intel-total-conversion,meyerdg/ingress-intel-total-conversion,jonatkins/ingress-intel-total-conversion,ruharen/ingress-intel-total-conversion,SpamapS/ingress-intel-total-conversion,michmerr/ingress-intel-total-conversion,Hubertzhang/ingress-intel-total-conversion,Galfinite/ingress-intel-total-conversion,Hubertzhang/ingress-intel-total-conversion,adostes/ingress-intel-total-conversion,adostes/ingress-intel-total-conversion,MarkTraceur/ingress-intel-total-conversion,iitc-project/ingress-intel-total-conversion,SteelToad/ingress-intel-total-conversion,kottt/ingress-intel-total-conversion,jarridgraham/ingress-intel-total-conversion,manierim/ingress-intel-total-conversion,McBen/ingress-intel-total-conversion,rspielmann/ingress-intel-total-conversion,pleasantone/ingress-intel-total-conversion,SpamapS/ingress-intel-total-conversion,MNoelJones/ingress-intel-total-conversion,jarridgraham/ingress-intel-total-conversion,3ch01c/ingress-intel-total-conversion,dwandw/ingress-intel-total-conversion,ruharen/ingress-intel-total-conversion,kdawes/ingress-intel-total-conversion,adostes/ingress-intel-total-conversion,rspielmann/ingress-intel-total-conversion,michaeldever/ingress-intel-total-conversion,ruharen/ingress-intel-total-conversion,kdawes/ingress-intel-total-conversion,fkloft/ingress-intel-total-conversion,MarkTraceur/ingress-intel-total-conversion,kyotoalgo/ingress-intel-total-conversion,mutongx/ingress-intel-total-conversion,nhamer/ingress-intel-total-conversion,jarridgraham/ingress-intel-total-conversion,adostes/ingress-intel-total-conversion,FLamparski/ingress-intel-total-conversion,SpamapS/ingress-intel-total-conversion,tony2001/ingress-intel-total-conversion,jonatkins/ingress-intel-total-conversion,Yossi/ingress-intel-total-conversion,mutongx/ingress-intel-total-conversion,MNoelJones/ingress-intel-total-conversion,mutongx/ingress-intel-total-conversion,nexushoratio/ingress-intel-total-conversion,mrmakeit/ingress-intel-total-conversion,Yossi/ingress-intel-total-conversion,nexushoratio/ingress-intel-total-conversion,imsaguy/ingress-intel-total-conversion,3ch01c/ingress-intel-total-conversion,sgtwilko/ingress-intel-total-conversion,michaeldever/ingress-intel-total-conversion,pfsmorigo/ingress-intel-total-conversion,michaeldever/ingress-intel-total-conversion,sgtwilko/ingress-intel-total-conversion,dwimberger/ingress-intel-total-conversion,iitc-project/ingress-intel-total-conversion,dwimberger/ingress-intel-total-conversion,FLamparski/ingress-intel-total-conversion,michmerr/ingress-intel-total-conversion,FLamparski/ingress-intel-total-conversion,FLamparski/ingress-intel-total-conversion,Hubertzhang/ingress-intel-total-conversion,sndirsch/ingress-intel-total-conversion,nhamer/ingress-intel-total-conversion,3ch01c/ingress-intel-total-conversion,pleasantone/ingress-intel-total-conversion,ZasoGD/ingress-intel-total-conversion,ZasoGD/ingress-intel-total-conversion,sgtwilko/ingress-intel-total-conversion,pfsmorigo/ingress-intel-total-conversion,dwandw/ingress-intel-total-conversion,Yossi/ingress-intel-total-conversion,kyotoalgo/ingress-intel-total-conversion,McBen/ingress-intel-total-conversion,insane210/ingress-intel-total-conversion,fkloft/ingress-intel-total-conversion,jarridgraham/ingress-intel-total-conversion,dwimberger/ingress-intel-total-conversion,adostes/ingress-intel-total-conversion,nexushoratio/ingress-intel-total-conversion,Galfinite/ingress-intel-total-conversion,nikolawannabe/ingress-intel-total-conversion,mrmakeit/ingress-intel-total-conversion,pfsmorigo/ingress-intel-total-conversion,michaeldever/ingress-intel-total-conversion,ruharen/ingress-intel-total-conversion,manierim/ingress-intel-total-conversion,rspielmann/ingress-intel-total-conversion,sndirsch/ingress-intel-total-conversion,manierim/ingress-intel-total-conversion,Hubertzhang/ingress-intel-total-conversion,dwandw/ingress-intel-total-conversion,3ch01c/ingress-intel-total-conversion,kyotoalgo/ingress-intel-total-conversion,Yossi/ingress-intel-total-conversion,nexushoratio/ingress-intel-total-conversion,insane210/ingress-intel-total-conversion,tony2001/ingress-intel-total-conversion,hayeswise/ingress-intel-total-conversion,hayeswise/ingress-intel-total-conversion,adostes/ingress-intel-total-conversion,hayeswise/ingress-intel-total-conversion,tony2001/ingress-intel-total-conversion,sndirsch/ingress-intel-total-conversion,fkloft/ingress-intel-total-conversion,MNoelJones/ingress-intel-total-conversion,MNoelJones/ingress-intel-total-conversion,kdawes/ingress-intel-total-conversion,dwimberger/ingress-intel-total-conversion,pfsmorigo/ingress-intel-total-conversion,michaeldever/ingress-intel-total-conversion,nhamer/ingress-intel-total-conversion,dwimberger/ingress-intel-total-conversion,3ch01c/ingress-intel-total-conversion,kottt/ingress-intel-total-conversion,Hubertzhang/ingress-intel-total-conversion,dwimberger/ingress-intel-total-conversion,MNoelJones/ingress-intel-total-conversion,Hubertzhang/ingress-intel-total-conversion,pfsmorigo/ingress-intel-total-conversion,imsaguy/ingress-intel-total-conversion,nikolawannabe/ingress-intel-total-conversion,kottt/ingress-intel-total-conversion,nikolawannabe/ingress-intel-total-conversion,McBen/ingress-intel-total-conversion,sgtwilko/ingress-intel-total-conversion,mrmakeit/ingress-intel-total-conversion,imsaguy/ingress-intel-total-conversion,jonatkins/ingress-intel-total-conversion,mrmakeit/ingress-intel-total-conversion,MarkTraceur/ingress-intel-total-conversion,pleasantone/ingress-intel-total-conversion,kdawes/ingress-intel-total-conversion,SteelToad/ingress-intel-total-conversion,sndirsch/ingress-intel-total-conversion,McBen/ingress-intel-total-conversion,pfsmorigo/ingress-intel-total-conversion,insane210/ingress-intel-total-conversion,SteelToad/ingress-intel-total-conversion,Galfinite/ingress-intel-total-conversion,MarkTraceur/ingress-intel-total-conversion,hayeswise/ingress-intel-total-conversion,ruharen/ingress-intel-total-conversion,hayeswise/ingress-intel-total-conversion,sgtwilko/ingress-intel-total-conversion,nexushoratio/ingress-intel-total-conversion,manierim/ingress-intel-total-conversion,nhamer/ingress-intel-total-conversion,dwimberger/ingress-intel-total-conversion,jarridgraham/ingress-intel-total-conversion,michaeldever/ingress-intel-total-conversion,nexushoratio/ingress-intel-total-conversion,imsaguy/ingress-intel-total-conversion,ZasoGD/ingress-intel-total-conversion,Yossi/ingress-intel-total-conversion,FLamparski/ingress-intel-total-conversion,michmerr/ingress-intel-total-conversion,rspielmann/ingress-intel-total-conversion,dwandw/ingress-intel-total-conversion,tony2001/ingress-intel-total-conversion,mutongx/ingress-intel-total-conversion,SpamapS/ingress-intel-total-conversion,michmerr/ingress-intel-total-conversion,Yossi/ingress-intel-total-conversion,SteelToad/ingress-intel-total-conversion,insane210/ingress-intel-total-conversion,dwandw/ingress-intel-total-conversion,rspielmann/ingress-intel-total-conversion,dwandw/ingress-intel-total-conversion,tony2001/ingress-intel-total-conversion,Galfinite/ingress-intel-total-conversion,FLamparski/ingress-intel-total-conversion,kyotoalgo/ingress-intel-total-conversion,iitc-project/ingress-intel-total-conversion,Hubertzhang/ingress-intel-total-conversion,mutongx/ingress-intel-total-conversion,SteelToad/ingress-intel-total-conversion,meyerdg/ingress-intel-total-conversion,McBen/ingress-intel-total-conversion,jarridgraham/ingress-intel-total-conversion,jonatkins/ingress-intel-total-conversion,tony2001/ingress-intel-total-conversion,SpamapS/ingress-intel-total-conversion,Hubertzhang/ingress-intel-total-conversion,iitc-project/ingress-intel-total-conversion,mutongx/ingress-intel-total-conversion,kottt/ingress-intel-total-conversion,pleasantone/ingress-intel-total-conversion,fkloft/ingress-intel-total-conversion,manierim/ingress-intel-total-conversion,ruharen/ingress-intel-total-conversion,MarkTraceur/ingress-intel-total-conversion,manierim/ingress-intel-total-conversion,fkloft/ingress-intel-total-conversion,meyerdg/ingress-intel-total-conversion,imsaguy/ingress-intel-total-conversion,ZasoGD/ingress-intel-total-conversion,ZasoGD/ingress-intel-total-conversion,jonatkins/ingress-intel-total-conversion,kottt/ingress-intel-total-conversion,kdawes/ingress-intel-total-conversion,meyerdg/ingress-intel-total-conversion,rspielmann/ingress-intel-total-conversion,hayeswise/ingress-intel-total-conversion,sgtwilko/ingress-intel-total-conversion,sndirsch/ingress-intel-total-conversion,nhamer/ingress-intel-total-conversion,meyerdg/ingress-intel-total-conversion,SteelToad/ingress-intel-total-conversion,michmerr/ingress-intel-total-conversion,kottt/ingress-intel-total-conversion,MNoelJones/ingress-intel-total-conversion,insane210/ingress-intel-total-conversion,nikolawannabe/ingress-intel-total-conversion,pleasantone/ingress-intel-total-conversion,michmerr/ingress-intel-total-conversion,insane210/ingress-intel-total-conversion,iitc-project/ingress-intel-total-conversion,ZasoGD/ingress-intel-total-conversion,3ch01c/ingress-intel-total-conversion
|
---
+++
@@ -1,11 +1,12 @@
-#!/usr/bin/env python3
+#!/usr/bin/env python
import glob
import time
import re
+import io
def readfile(fn):
- with open(fn, 'Ur', encoding='utf8') as f:
+ with io.open(fn, 'Ur', encoding='utf8') as f:
return f.read()
def loaderString(var):
@@ -29,7 +30,7 @@
m = re.sub('@@INCLUDERAW:([0-9a-zA-Z_./-]+)@@', loaderRaw, m)
m = re.sub('@@INCLUDESTRING:([0-9a-zA-Z_./-]+)@@', loaderString, m)
-with open('iitc-debug.user.js', 'w', encoding='utf8') as f:
+with io.open('iitc-debug.user.js', 'w', encoding='utf8') as f:
f.write(m)
# vim: ai si ts=4 sw=4 sts=4 et
|
037fcccebae10f608f5a2711fbbc659411d6879b
|
okdataset/context.py
|
okdataset/context.py
|
"""
DataSet context
"""
class DsContext(object):
def __init__(self, config="okdataset.yml"):
self.workers = 8
|
import yaml
import os
"""
DataSet context
"""
class Context(object):
def __init__(self, config=os.path.dirname(os.path.realpath(__file__)) + "/../okdataset.yml"):
self.workers = 8
self.config = yaml.load(open(config).read())
|
Put yaml config in Context.
|
Put yaml config in Context.
|
Python
|
mit
|
anthonyserious/okdataset,anthonyserious/okdataset
|
---
+++
@@ -1,6 +1,11 @@
+import yaml
+import os
+
"""
DataSet context
"""
-class DsContext(object):
- def __init__(self, config="okdataset.yml"):
+class Context(object):
+ def __init__(self, config=os.path.dirname(os.path.realpath(__file__)) + "/../okdataset.yml"):
self.workers = 8
+ self.config = yaml.load(open(config).read())
+
|
1697d267633f981b7a6e1a14b5e5b9b05f3b8179
|
imagersite/imager_images/tests.py
|
imagersite/imager_images/tests.py
|
from __future__ import unicode_literals
from django.contrib.auth.models import User
from django.test import TestCase
import factory
from faker import Faker
from imager_profile.models import ImagerProfile
from .models import Album, Photo
# Create your tests here.
fake = Faker()
class UserFactory(factory.django.DjangoModelFactory):
"""Create a fake user."""
class Meta:
model = User
username = factory.Sequence(lambda n: 'user{}'.format(n))
first_name = fake.first_name()
last_name = fake.last_name()
email = fake.email()
class PhotoFactory(factory.django.DjangoModelFactory):
"""Create a fake photo."""
class Meta:
model = Photo
photo = factory.django.ImageField()
title = fake.sentence()
description = fake.text()
class AlbumFactory(factory.django.DjangoModelFactory):
"""Create a fake album."""
class Meta:
model = Album
title = fake.sentence()
description = fake.text()
|
from __future__ import unicode_literals
from django.contrib.auth.models import User
from django.test import TestCase
import factory
from faker import Faker
from .models import Album, Photo
# Create your tests here.
fake = Faker()
class UserFactory(factory.django.DjangoModelFactory):
"""Create a fake user."""
class Meta:
model = User
username = factory.Sequence(lambda n: 'user{}'.format(n))
first_name = fake.first_name()
last_name = fake.last_name()
email = fake.email()
class PhotoFactory(factory.django.DjangoModelFactory):
"""Create a fake photo."""
class Meta:
model = Photo
photo = factory.django.ImageField()
title = fake.sentence()
description = fake.text()
user = factory.SubFactory(UserFactory)
class AlbumFactory(factory.django.DjangoModelFactory):
"""Create a fake album."""
class Meta:
model = Album
title = fake.sentence()
description = fake.text()
user = factory.SubFactory(UserFactory)
class PhotoTestCase(TestCase):
"""docstring for PhotoTestCase"""
@classmethod
def setUp(cls):
user = UserFactory()
user.set_password('secret')
user.save()
|
Fix user, album factories; add setup for photo test case
|
Fix user, album factories; add setup for photo test case
|
Python
|
mit
|
jesseklein406/django-imager,jesseklein406/django-imager,jesseklein406/django-imager
|
---
+++
@@ -4,7 +4,6 @@
import factory
from faker import Faker
-from imager_profile.models import ImagerProfile
from .models import Album, Photo
# Create your tests here.
@@ -30,6 +29,7 @@
photo = factory.django.ImageField()
title = fake.sentence()
description = fake.text()
+ user = factory.SubFactory(UserFactory)
class AlbumFactory(factory.django.DjangoModelFactory):
@@ -39,4 +39,14 @@
title = fake.sentence()
description = fake.text()
+ user = factory.SubFactory(UserFactory)
+
+class PhotoTestCase(TestCase):
+ """docstring for PhotoTestCase"""
+ @classmethod
+ def setUp(cls):
+ user = UserFactory()
+ user.set_password('secret')
+ user.save()
+
|
9b3f9b69efef93e5c56a0452ccff2e6195663c6b
|
imboclient/header/authenticate.py
|
imboclient/header/authenticate.py
|
import hmac, hashlib
class Authenticate:
def __init__(self, public_key, private_key, method, url, timestamp):
self._public_key = public_key
self._private_key = private_key
self.method = method
self.url = url
self.timestamp = timestamp
def _generate_auth_hash(self):
data = bytes(self.method + '|' + self.url + '|' + self._public_key + '|' + self.timestamp, 'utf-8')
return hmac.new(bytes(self._private_key, 'utf-8'), data, hashlib.sha256).hexdigest()
def headers(self):
signature = self._generate_auth_hash()
return {"Accept": "application/json", "X-Imbo-Authenticate-Signature": signature, "X-Imbo-Authenticate-Timestamp": self.timestamp}
|
import hmac
import hashlib
class Authenticate:
def __init__(self, public_key, private_key, method, url, timestamp):
self._public_key = public_key
self._private_key = private_key
self.method = method
self.url = url
self.timestamp = timestamp
def _generate_auth_hash(self):
data = bytes(self.method + '|' + self.url + '|' + self._public_key + '|' + self.timestamp, 'utf-8')
return hmac.new(bytes(self._private_key, 'utf-8'), data, hashlib.sha256).hexdigest()
def headers(self):
signature = self._generate_auth_hash()
return {"Accept": "application/json", "X-Imbo-Authenticate-Signature": signature, "X-Imbo-Authenticate-Timestamp": self.timestamp}
|
Clean up PEP8 inconsistencies in header classes
|
Clean up PEP8 inconsistencies in header classes
|
Python
|
mit
|
imbo/imboclient-python,imbo/imboclient-python,imbo/imboclient-python
|
---
+++
@@ -1,4 +1,6 @@
-import hmac, hashlib
+import hmac
+import hashlib
+
class Authenticate:
def __init__(self, public_key, private_key, method, url, timestamp):
@@ -16,4 +18,3 @@
def headers(self):
signature = self._generate_auth_hash()
return {"Accept": "application/json", "X-Imbo-Authenticate-Signature": signature, "X-Imbo-Authenticate-Timestamp": self.timestamp}
-
|
9537cb765776135bc6d2777b6f4f931724edea7f
|
project_name/project_name/urls.py
|
project_name/project_name/urls.py
|
from django.conf.urls import include, url
from django.conf.urls.static import static
from django.views.generic import TemplateView
from django.contrib import admin
from django.conf import settings
import views
admin.autodiscover()
urlpatterns = [
url(r'^$', TemplateView.as_view(template_name='{{project_name}}/base.html')),
url(r'^login/$', 'django.contrib.auth.views.login'),
url(r'^logout/$', views.log_out, name='log_out'),
url(r'^admin/', include(admin.site.urls)),
]
if settings.DEBUG:
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
|
from django.conf.urls import include, url
from django.conf.urls.static import static
from django.views.generic import TemplateView
from django.contrib import admin
from django.conf import settings
import views
import django.contrib.auth.views.login
admin.autodiscover()
urlpatterns = [
url(r'^$', TemplateView.as_view(template_name='{{project_name}}/base.html')),
url(r'^login/$', django.contrib.auth.views.login),
url(r'^logout/$', views.log_out, name='log_out'),
url(r'^admin/', include(admin.site.urls)),
]
if settings.DEBUG:
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
|
Include django.contrib.auth.views.login in django 1.10 way
|
Include django.contrib.auth.views.login in django 1.10 way
|
Python
|
mit
|
tom-henderson/django-template,tom-henderson/django-template
|
---
+++
@@ -5,12 +5,13 @@
from django.conf import settings
import views
+import django.contrib.auth.views.login
admin.autodiscover()
urlpatterns = [
url(r'^$', TemplateView.as_view(template_name='{{project_name}}/base.html')),
- url(r'^login/$', 'django.contrib.auth.views.login'),
+ url(r'^login/$', django.contrib.auth.views.login),
url(r'^logout/$', views.log_out, name='log_out'),
url(r'^admin/', include(admin.site.urls)),
]
|
a0a4ba94cc76d5c4395d869fe5ea70caae14fa36
|
pyroSAR/tests/test_snap_exe.py
|
pyroSAR/tests/test_snap_exe.py
|
import pytest
from contextlib import contextmanager
from pyroSAR._dev_config import ExamineExe
from pyroSAR.snap.auxil import ExamineSnap
@contextmanager
def not_raises(ExpectedException):
try:
yield
except ExpectedException:
raise AssertionError(
"Did raise exception {0} when it should not!".format(
repr(ExpectedException)
)
)
except Exception:
raise AssertionError(
"An unexpected exception {0} raised.".format(repr(Exception))
)
class TestExemineExe:
def test_exception(self):
with pytest.warns(UserWarning):
ExamineExe.examine('some_exe_file.exe')
def test_warn_snap(self):
with pytest.warns(UserWarning):
ExamineExe.examine('snap')
# def test_not_exception(self):
# SNAP_EXECUTABLE = ['snap64.exe', 'snap32.exe', 'snap.exe', 'snap']
# with not_raises(ValueError):
# ExamineExe.examine(SNAP_EXECUTABLE)
class TestExamineSnap:
def test_exception(self):
with pytest.warns(UserWarning):
ExamineExe.examine('some_exe_file.exe')
# def test_not_exception(self):
# with not_raises(AssertionError):
# test_snap_exe = ExamineSnap()
|
from contextlib import contextmanager
import pytest
from pyroSAR._dev_config import ExamineExe
from pyroSAR.snap.auxil import ExamineSnap
@contextmanager
def not_raises(ExpectedException):
try:
yield
except ExpectedException:
raise AssertionError(
"Did raise exception {0} when it should not!".format(
repr(ExpectedException)
)
)
except Exception:
raise AssertionError(
"An unexpected exception {0} raised.".format(repr(Exception))
)
class TestExemineExe:
def test_exception(self):
with pytest.warns(UserWarning):
ExamineExe.examine('some_exe_file.exe')
def test_not_exception(self):
SNAP_EXECUTABLE = ['snap64.exe', 'snap32.exe', 'snap.exe', 'snap']
with pytest.warns(None) as record:
ExamineExe.examine(SNAP_EXECUTABLE)
assert len(record) == 1
class TestExamineSnap:
def test_exception(self):
with pytest.warns(UserWarning):
ExamineSnap(snap_executable='some_exe_file.exe')
def test_not_exception(self):
with pytest.warns(None) as record:
ExamineSnap()
assert len(record) == 0
|
Add unit test to determine if the classes ExamineExe and ExamineSnap will work properly.
|
Add unit test to determine if the classes ExamineExe and ExamineSnap will work properly.
|
Python
|
mit
|
johntruckenbrodt/pyroSAR,johntruckenbrodt/pyroSAR
|
---
+++
@@ -1,7 +1,10 @@
+from contextlib import contextmanager
+
import pytest
-from contextlib import contextmanager
+
from pyroSAR._dev_config import ExamineExe
from pyroSAR.snap.auxil import ExamineSnap
+
@contextmanager
def not_raises(ExpectedException):
@@ -20,24 +23,25 @@
"An unexpected exception {0} raised.".format(repr(Exception))
)
+
class TestExemineExe:
def test_exception(self):
with pytest.warns(UserWarning):
ExamineExe.examine('some_exe_file.exe')
- def test_warn_snap(self):
- with pytest.warns(UserWarning):
- ExamineExe.examine('snap')
- # def test_not_exception(self):
- # SNAP_EXECUTABLE = ['snap64.exe', 'snap32.exe', 'snap.exe', 'snap']
- # with not_raises(ValueError):
- # ExamineExe.examine(SNAP_EXECUTABLE)
+ def test_not_exception(self):
+ SNAP_EXECUTABLE = ['snap64.exe', 'snap32.exe', 'snap.exe', 'snap']
+ with pytest.warns(None) as record:
+ ExamineExe.examine(SNAP_EXECUTABLE)
+ assert len(record) == 1
+
class TestExamineSnap:
def test_exception(self):
with pytest.warns(UserWarning):
- ExamineExe.examine('some_exe_file.exe')
+ ExamineSnap(snap_executable='some_exe_file.exe')
- # def test_not_exception(self):
- # with not_raises(AssertionError):
- # test_snap_exe = ExamineSnap()
+ def test_not_exception(self):
+ with pytest.warns(None) as record:
+ ExamineSnap()
+ assert len(record) == 0
|
584bfedf9c71bad0715b9d167c3e90ec588d5110
|
pydarkstar/scrubbing/scrubber.py
|
pydarkstar/scrubbing/scrubber.py
|
from ..darkobject import DarkObject
from bs4 import BeautifulSoup
import requests
import logging
import time
class Scrubber(DarkObject):
def __init__(self):
super(Scrubber, self).__init__()
def scrub(self):
"""
Get item metadata.
"""
return {}
# noinspection PyBroadException
@staticmethod
def soup(url, absolute: bool = False, **kwargs):
"""
Open URL and create tag soup.
:param url: website string
:type url: str
:param absolute: perform double get request to find absolute url
:type absolute: bool
"""
handle = ''
max_tries = 10
for i in range(max_tries):
# noinspection PyPep8
try:
if absolute:
url = requests.get(url).url
handle = requests.get(url, params=kwargs).text
break
except Exception:
logging.exception('urlopen failed (attempt %d)', i + 1)
if i == max_tries - 1:
logging.error('the maximum urlopen attempts have been reached')
raise
time.sleep(1)
s = BeautifulSoup(handle, features='html5lib')
return s
if __name__ == '__main__':
pass
|
from ..darkobject import DarkObject
from bs4 import BeautifulSoup
import requests
import logging
import time
import bs4
class Scrubber(DarkObject):
def __init__(self):
super(Scrubber, self).__init__()
def scrub(self):
"""
Get item metadata.
"""
return {}
# noinspection PyBroadException
@staticmethod
def soup(url, absolute: bool = False, **kwargs):
"""
Open URL and create tag soup.
:param url: website string
:type url: str
:param absolute: perform double get request to find absolute url
:type absolute: bool
"""
handle = ''
max_tries = 10
for i in range(max_tries):
# noinspection PyPep8
try:
if absolute:
url = requests.get(url).url
handle = requests.get(url, params=kwargs).text
break
except Exception:
logging.exception('urlopen failed (attempt %d)', i + 1)
if i == max_tries - 1:
logging.error('the maximum urlopen attempts have been reached')
raise
time.sleep(1)
try:
s = BeautifulSoup(handle, features='html5lib')
except bs4.FeatureNotFound:
s = BeautifulSoup(handle, features='html.parser')
return s
if __name__ == '__main__':
pass
|
Add try accept block for bs4 features
|
Add try accept block for bs4 features
|
Python
|
mit
|
AdamGagorik/pydarkstar
|
---
+++
@@ -3,6 +3,7 @@
import requests
import logging
import time
+import bs4
class Scrubber(DarkObject):
@@ -43,7 +44,11 @@
raise
time.sleep(1)
- s = BeautifulSoup(handle, features='html5lib')
+ try:
+ s = BeautifulSoup(handle, features='html5lib')
+ except bs4.FeatureNotFound:
+ s = BeautifulSoup(handle, features='html.parser')
+
return s
|
3bceae5afd3158e98e76dd0e228efc4d1396a433
|
marvin/__init__.py
|
marvin/__init__.py
|
"""
marvin
~~~~~~
This is the main entry point to marvin, the API endpoints for streamr.
"""
# pylint: disable=invalid-name
from flask import Flask
from flask.ext.sqlalchemy import SQLAlchemy
from flask.ext.restful import Api
from os import path
db = SQLAlchemy()
api = Api()
def create_app(config_file=None, **extra_config):
""" Creates a WSGI app.
:param config_file: Load config from this file.
:param extra_config: Extra configuration values to pass to the WSGI object.
"""
core_settings = path.join(path.dirname(__file__), 'settings.py')
# Setup app configuration
app = Flask(__name__)
app.config.from_pyfile(core_settings)
if config_file is not None:
app.config.from_pyfile(config_file)
app.config.update(extra_config)
# Connect extensions
db.init_app(app)
api.init_app(app)
# Import views (must be done down here to avoid circular imports)
from .views import movies
# Register resources
api.add_resource(movies.MovieView, '/movies/<int:movie_id>')
api.add_resource(movies.AllMoviesView, '/movies')
return app
def init_db(app):
""" Create the database with all tables for the given app. """
with app.test_request_context():
db.create_all()
|
"""
marvin
~~~~~~
This is the main entry point to marvin, the API endpoints for streamr.
"""
# pylint: disable=invalid-name
from flask import Flask, make_response
from flask.ext.sqlalchemy import SQLAlchemy
from flask.ext.restful import Api
from os import path
import ujson
db = SQLAlchemy()
api = Api()
@api.representation('application/json')
def _fastjson(data, code, headers=None):
response = make_response(ujson.dumps(data), code)
response.headers.extend(headers or {})
return response
def create_app(config_file=None, **extra_config):
""" Creates a WSGI app.
:param config_file: Load config from this file.
:param extra_config: Extra configuration values to pass to the WSGI object.
"""
core_settings = path.join(path.dirname(__file__), 'settings.py')
# Setup app configuration
app = Flask(__name__)
app.config.from_pyfile(core_settings)
if config_file is not None:
app.config.from_pyfile(config_file)
app.config.update(extra_config)
# Connect extensions
db.init_app(app)
api.init_app(app)
# Import views (must be done down here to avoid circular imports)
from .views import movies
# Register resources
api.add_resource(movies.MovieView, '/movies/<int:movie_id>')
api.add_resource(movies.AllMoviesView, '/movies')
return app
def init_db(app):
""" Create the database with all tables for the given app. """
with app.test_request_context():
db.create_all()
|
Use ujson for encoding responses.
|
Use ujson for encoding responses.
|
Python
|
mit
|
streamr/marvin,streamr/marvin,streamr/marvin
|
---
+++
@@ -7,13 +7,21 @@
# pylint: disable=invalid-name
-from flask import Flask
+from flask import Flask, make_response
from flask.ext.sqlalchemy import SQLAlchemy
from flask.ext.restful import Api
from os import path
+import ujson
+
db = SQLAlchemy()
api = Api()
+
+@api.representation('application/json')
+def _fastjson(data, code, headers=None):
+ response = make_response(ujson.dumps(data), code)
+ response.headers.extend(headers or {})
+ return response
def create_app(config_file=None, **extra_config):
""" Creates a WSGI app.
|
e096343aaaa916232633543d57431b7f3022215a
|
awscfncli/__main__.py
|
awscfncli/__main__.py
|
# -*- encoding: utf-8 -*-
__author__ = 'kotaimen'
__date__ = '28-Feb-2018'
"""Main cli entry point, called when awscfncli is run as a package,
imported in setuptools intergration.
cli package stucture:
Click main entry:
cli/main.py
Command groups:
cli/group_named/__init__.py
Subcommands:
cli/group_name/command_name.py
All commands are imported in cli/__init__.py to get registered into click.
"""
from .cli import cfn_cli
def main():
cfn_cli()
if __name__ == '__main__':
main()
|
# -*- encoding: utf-8 -*-
__author__ = 'kotaimen'
__date__ = '28-Feb-2018'
"""Main cli entry point, called when awscfncli is run as a package,
imported in setuptools intergration.
cli package stucture:
Click main entry:
cli/main.py
Command groups:
cli/group_named/__init__.py
Subcommands:
cli/group_name/command_name.py
All commands are imported in cli/__init__.py to get registered into click.
"""
from .cli import cfn_cli
def main():
cfn_cli(
auto_envvar_prefix='CFN'
)
if __name__ == '__main__':
main()
|
Add click automatic environment variable prefix.
|
Add click automatic environment variable prefix.
|
Python
|
mit
|
Kotaimen/awscfncli,Kotaimen/awscfncli
|
---
+++
@@ -24,7 +24,9 @@
def main():
- cfn_cli()
+ cfn_cli(
+ auto_envvar_prefix='CFN'
+ )
if __name__ == '__main__':
|
9684af3ecb0fd26deef87e2509a5de892a62e5f1
|
cloudsizzle/studyplanner/courselist/views.py
|
cloudsizzle/studyplanner/courselist/views.py
|
# Create your views here.
from django.shortcuts import render_to_response
from studyplanner.courselist.models import Course
from studyplanner.courselist.models import Faculty
from studyplanner.courselist.models import Department
def list_courses(request, faculty, department):
department = Department.objects.get(slug=department)
courses = department.courses.all()
return render_to_response('list_courses.html', {
'department': department, 'courses': courses})
def list_faculties(request):
faculties = Faculty.objects.all()
return render_to_response('list_faculties.html', {'faculties': faculties})
def list_departments(request, faculty):
faculty = Faculty.objects.get(slug=faculty)
departments = faculty.departments.all()
return render_to_response('list_departments.html', {
'faculty': faculty, 'departments': departments})
def show_course(request, faculty, department, course):
faculty = Faculty.objects.get(slug=faculty)
department = Department.objects.get(slug=department)
course = Course.objects.get(slug=course)
return render_to_response('show_course.html', {
'faculty': faculty, 'department': department, 'course': course})
|
# Create your views here.
from django.shortcuts import render_to_response
from studyplanner.courselist.models import Course
from studyplanner.courselist.models import Faculty
from studyplanner.courselist.models import Department
def list_courses(request, faculty, department):
department = Department.objects.get(slug=department)
courses = department.courses.all()
return render_to_response('list_courses.html', {'user': request.user,
'department': department, 'courses': courses})
def list_faculties(request):
faculties = Faculty.objects.all()
return render_to_response('list_faculties.html', {'user': request.user,
'faculties': faculties})
def list_departments(request, faculty):
faculty = Faculty.objects.get(slug=faculty)
departments = faculty.departments.all()
return render_to_response('list_departments.html', {'user': request.user,
'faculty': faculty, 'departments': departments})
def show_course(request, faculty, department, course):
faculty = Faculty.objects.get(slug=faculty)
department = Department.objects.get(slug=department)
course = Course.objects.get(slug=course)
return render_to_response('show_course.html', {'user': request.user,
'faculty': faculty, 'department': department, 'course': course})
|
Add user object to template context.
|
Add user object to template context.
|
Python
|
mit
|
jpvanhal/cloudsizzle,jpvanhal/cloudsizzle
|
---
+++
@@ -7,22 +7,23 @@
def list_courses(request, faculty, department):
department = Department.objects.get(slug=department)
courses = department.courses.all()
- return render_to_response('list_courses.html', {
+ return render_to_response('list_courses.html', {'user': request.user,
'department': department, 'courses': courses})
def list_faculties(request):
faculties = Faculty.objects.all()
- return render_to_response('list_faculties.html', {'faculties': faculties})
+ return render_to_response('list_faculties.html', {'user': request.user,
+ 'faculties': faculties})
def list_departments(request, faculty):
faculty = Faculty.objects.get(slug=faculty)
departments = faculty.departments.all()
- return render_to_response('list_departments.html', {
+ return render_to_response('list_departments.html', {'user': request.user,
'faculty': faculty, 'departments': departments})
def show_course(request, faculty, department, course):
faculty = Faculty.objects.get(slug=faculty)
department = Department.objects.get(slug=department)
course = Course.objects.get(slug=course)
- return render_to_response('show_course.html', {
+ return render_to_response('show_course.html', {'user': request.user,
'faculty': faculty, 'department': department, 'course': course})
|
c6c189ffe13f88d7310291c785ffe363f6c04423
|
trayapp.py
|
trayapp.py
|
# Github Tray App
import rumps
import config
import contribs
class GithubTrayApp(rumps.App):
def __init__(self):
super(GithubTrayApp, self).__init__('Github')
self.count = rumps.MenuItem('commits')
self.username = config.get_username()
self.menu = [
self.count,
'Update Now',
'Change Frequency',
'Change Username'
]
self.update()
def update(self):
try:
num = str(contribs.get_contribs(self.username))
self.icon = 'github0.png' if num == '0' else 'github.png'
self.count.title = num + ' commits'
except Exception as e: print(e)
@rumps.timer(60*5)
def timer(self, _):
print('Running timer')
self.update()
@rumps.clicked('Update Now')
def update_now(self, _):
self.update()
@rumps.clicked('Change Frequency')
def change_frequency(_):
rumps.alert('jk! not ready yet!')
@rumps.clicked('Change Username')
def change_username(_):
rumps.alert('jk! not ready yet!')
if __name__ == '__main__':
GithubTrayApp().run()
|
# Github Tray App
import rumps
import config
import contribs
class GithubTrayApp(rumps.App):
def __init__(self):
super(GithubTrayApp, self).__init__('Github')
self.count = rumps.MenuItem('commits')
self.username = config.get_username()
self.menu = [
self.count,
'Update Now',
'Change Frequency',
'Change Username'
]
self.update()
def update(self):
try:
print('Updating user')
num = str(contribs.get_contribs(self.username))
self.icon = 'github0.png' if num == '0' else 'github.png'
self.count.title = num + ' commits'
except Exception as e: print(e)
@rumps.timer(60*5)
def timer(self, _):
self.update()
@rumps.clicked('Update Now')
def update_now(self, _):
self.update()
@rumps.clicked('Change Frequency')
def change_frequency(_):
rumps.alert('jk! not ready yet!')
@rumps.clicked('Change Username')
def change_username(_):
rumps.alert('jk! not ready yet!')
if __name__ == '__main__':
GithubTrayApp().run()
|
Update in the update function
|
Update in the update function
|
Python
|
mit
|
chrisfosterelli/commitwatch
|
---
+++
@@ -21,6 +21,7 @@
def update(self):
try:
+ print('Updating user')
num = str(contribs.get_contribs(self.username))
self.icon = 'github0.png' if num == '0' else 'github.png'
self.count.title = num + ' commits'
@@ -28,7 +29,6 @@
@rumps.timer(60*5)
def timer(self, _):
- print('Running timer')
self.update()
@rumps.clicked('Update Now')
|
63e31d0594f3813a6d49191d94c4ec7ce6b66e3b
|
metatlas/tests/test_loader.py
|
metatlas/tests/test_loader.py
|
from __future__ import print_function
import tables
from metatlas.mzml_loader import mzml_to_hdf, get_test_data
def test_loader():
path = get_test_data()
out_file = 'test_loader.h5'
mzml_to_hdf(path, out_file_name=out_file)
fid = tables.open_file(out_file)
table = fid.root.spectra
assert table.nrows == 933367
assert table[0][0] == 59.01387023925781
assert table[-1][0] == 1666.9520263671875
scan_time = [y['scan_time'] for y in table.where('(ms_level==1)')]
assert len(scan_time) == 5082
|
from __future__ import print_function
import tables
from metatlas.mzml_loader import mzml_to_hdf, get_test_data
def test_loader():
path = get_test_data()
out_file = 'test_loader.h5'
mzml_to_hdf(path, out_file_name=out_file)
fid = tables.open_file(out_file)
table = fid.root.spectra
assert table.nrows == 933367
assert table[0][0] == 59.01387023925781
assert table[-1][0] == 1666.9520263671875
scan_time = [y['scan_time'] for y in table.where('(ms_level==1)')]
assert len(scan_time) == 933367
|
Fix the query test now that the hdf file order is fixed
|
Fix the query test now that the hdf file order is fixed
|
Python
|
bsd-3-clause
|
metabolite-atlas/metatlas,metabolite-atlas/metatlas,metabolite-atlas/metatlas,aitatanit/metatlas,aitatanit/metatlas,biorack/metatlas,aitatanit/metatlas,biorack/metatlas
|
---
+++
@@ -17,4 +17,4 @@
assert table[0][0] == 59.01387023925781
assert table[-1][0] == 1666.9520263671875
scan_time = [y['scan_time'] for y in table.where('(ms_level==1)')]
- assert len(scan_time) == 5082
+ assert len(scan_time) == 933367
|
89c97e1923ff481d82d7c6474874633a346ded1d
|
sigmapiweb/apps/UserInfo/admin.py
|
sigmapiweb/apps/UserInfo/admin.py
|
"""
Admin config for UserInfo app.
"""
from django.contrib import admin
from common.utils import register_model_admins
from .models import PledgeClass, UserInfo
class UserInfoAdmin(admin.ModelAdmin):
"""
Class to represent the user info for an Admin
"""
search_fields = ['user__first_name', 'user__last_name']
list_display = tuple([
field.name
for field in UserInfo._meta.fields
if field.name not in UserInfo.admin_display_excluded_fields
])
admin.site.register(UserInfo, UserInfoAdmin)
register_model_admins(
PledgeClass,
)
|
"""
Admin config for UserInfo app.
"""
from django.contrib import admin
from common.utils import register_model_admins
from .models import PledgeClass, UserInfo
class UserInfoAdmin(admin.ModelAdmin):
"""
Class to represent the user info for an Admin
"""
search_fields = ['user__first_name', 'user__last_name']
list_display = tuple([
field.name
for field in UserInfo._meta.fields
if field.name not in UserInfo.admin_display_excluded_fields
])
admin.site.register(UserInfo, UserInfoAdmin)
register_model_admins(
PledgeClass,
)
|
Fix quality violation in UserInfo
|
Fix quality violation in UserInfo
|
Python
|
mit
|
sigmapi-gammaiota/sigmapi-web,sigmapi-gammaiota/sigmapi-web,sigmapi-gammaiota/sigmapi-web,sigmapi-gammaiota/sigmapi-web
|
---
+++
@@ -16,7 +16,8 @@
field.name
for field in UserInfo._meta.fields
if field.name not in UserInfo.admin_display_excluded_fields
- ])
+ ])
+
admin.site.register(UserInfo, UserInfoAdmin)
|
fc67921095b85fc021482417415d935e8de55525
|
chatexchange6/_logging26backport.py
|
chatexchange6/_logging26backport.py
|
import logging
from logging import *
class Logger26(logging.getLoggerClass()):
def getChild(self, suffix):
"""
(copied from module "logging" for Python 3.4)
Get a logger which is a descendant to this one.
This is a convenience method, such that
logging.getLogger('abc').getChild('def.ghi')
is the same as
logging.getLogger('abc.def.ghi')
It's useful, for example, when the parent logger is named using
__name__ rather than a literal string.
"""
if self.root is not self:
suffix = '.'.join((self.name, suffix))
return self.manager.getLogger(suffix)
logging.setLoggerClass(Logger26)
|
import logging
from logging import *
class Logger(logging.Logger):
def getChild(self, suffix):
"""
(copied from module "logging" for Python 3.4)
Get a logger which is a descendant to this one.
This is a convenience method, such that
logging.getLogger('abc').getChild('def.ghi')
is the same as
logging.getLogger('abc.def.ghi')
It's useful, for example, when the parent logger is named using
__name__ rather than a literal string.
"""
if self.root is not self:
suffix = '.'.join((self.name, suffix))
return self.manager.getLogger(suffix)
|
Implement backport of logging package for 2.6
|
Implement backport of logging package for 2.6
debugging 3
|
Python
|
apache-2.0
|
Charcoal-SE/ChatExchange,Charcoal-SE/ChatExchange,ByteCommander/ChatExchange6,ByteCommander/ChatExchange6
|
---
+++
@@ -2,7 +2,7 @@
from logging import *
-class Logger26(logging.getLoggerClass()):
+class Logger(logging.Logger):
def getChild(self, suffix):
"""
@@ -26,4 +26,3 @@
return self.manager.getLogger(suffix)
-logging.setLoggerClass(Logger26)
|
19ee49c57fd17f14efffd946019734f1cb4ed18e
|
pipes/s3/__main__.py
|
pipes/s3/__main__.py
|
"""Add application.properties to Application's S3 Bucket directory."""
import logging
import argparse
from .create_archaius import init_properties
LOG = logging.getLogger(__name__)
def main():
"""Create application.properties for a given application."""
logging.basicConfig()
parser = argparse.ArgumentParser(description=main.__doc__)
parser.add_argument('-d',
'--debug',
action='store_const',
const=logging.DEBUG,
default=logging.INFO,
help='Set DEBUG output')
parser.add_argument('-e',
'--env',
choices=('dev', 'stage', 'prod'),
default='dev',
help='Deploy environment')
parser.add_argument('-g',
'--group',
default='extra',
help='Application Group name, e.g. forrest')
parser.add_argument('-a',
'--app',
default='unnecessary',
help='Application name, e.g. forrestcore')
args = parser.parse_args()
LOG.setLevel(args.debug)
logging.getLogger(__package__).setLevel(args.debug)
vars(args).pop('debug')
LOG.debug('Args: %s', vars(args))
init_properties(env=args.env, group=args.group, app=args.app)
if __name__ == '__main__':
main()
|
"""Add application.properties to Application's S3 Bucket directory."""
import logging
import argparse
from .create_archaius import init_properties
LOG = logging.getLogger(__name__)
def main():
"""Create application.properties for a given application."""
logging.basicConfig()
parser = argparse.ArgumentParser(description=main.__doc__)
parser.add_argument('-d',
'--debug',
action='store_const',
const=logging.DEBUG,
default=logging.INFO,
help='Set DEBUG output')
parser.add_argument('-e',
'--env',
choices=('build', 'dev', 'stage', 'prod'),
default='dev',
help='Deploy environment')
parser.add_argument('-g',
'--group',
default='extra',
help='Application Group name, e.g. forrest')
parser.add_argument('-a',
'--app',
default='unnecessary',
help='Application name, e.g. forrestcore')
args = parser.parse_args()
LOG.setLevel(args.debug)
logging.getLogger(__package__).setLevel(args.debug)
vars(args).pop('debug')
LOG.debug('Args: %s', vars(args))
init_properties(env=args.env, group=args.group, app=args.app)
if __name__ == '__main__':
main()
|
Add build as a possible environment option
|
Add build as a possible environment option
|
Python
|
apache-2.0
|
gogoair/foremast,gogoair/foremast
|
---
+++
@@ -19,7 +19,7 @@
help='Set DEBUG output')
parser.add_argument('-e',
'--env',
- choices=('dev', 'stage', 'prod'),
+ choices=('build', 'dev', 'stage', 'prod'),
default='dev',
help='Deploy environment')
parser.add_argument('-g',
|
c7b7f3709fc958a2c694858c65a379cfcd54f72b
|
myflaskapp/tests/test_unit.py
|
myflaskapp/tests/test_unit.py
|
import unittest
import requests
class SmokeTest(unittest.TestCase):
def test_maths(self):
self.assertEquals(6, 2 + 4)
def test_home_page_is_about_todo_lists(self):
request = requests.get('http://localhost:5000')
self.assertTrue(
request.content.startswith(bytes('\n\n<!doctype html>\n', 'utf-8')))
self.assertIn(
'<title>\n \n tdd_with_python\n \n \n </title>\n',
request.text)
self.assertTrue(
request.content.endswith(bytes('</body>\n</html>\n', 'utf-8')))
class TestMainPage:
"""WebTest test for title"""
def test_main_page_returns_200(self, user, testapp):
"""Login successful."""
# Goes to homepage
res = testapp.get('/')
assert res.status_code == 200
def test_main_page_returns_expected_title(self, user, testapp):
res = testapp.get('/')
assert '<title>\n \n tdd_with_python\n \n \n </title>\n' in res
|
import unittest
import requests
class SmokeTest(unittest.TestCase):
def test_maths(self):
self.assertEquals(6, 2 + 4)
def test_home_page_is_about_todo_lists(self):
request = requests.get('http://localhost:5000')
self.assertTrue(
request.content.startswith(b'\n\n<!doctype html>\n'))
self.assertIn(
'<title>\n \n tdd_with_python\n \n \n </title>\n',
request.text)
self.assertTrue(request.content.endswith(b'</body>\n</html>\n'))
class TestMainPage:
"""WebTest test for title"""
def test_main_page_returns_200(self, user, testapp):
"""Login successful."""
# Goes to homepage
res = testapp.get('/')
assert res.status_code == 200
def test_main_page_returns_expected_title(self, user, testapp):
res = testapp.get('/')
assert '<title>\n \n tdd_with_python\n \n \n </title>\n' in res
# def test_main_page_returns_expected_content(self, user, testapp):
# res = testapp.get('/')
|
Use less verbose byte string syntax
|
Use less verbose byte string syntax
|
Python
|
mit
|
terryjbates/test-driven-development-with-python,terryjbates/test-driven-development-with-python,terryjbates/test-driven-development-with-python,terryjbates/test-driven-development-with-python,terryjbates/test-driven-development-with-python
|
---
+++
@@ -9,13 +9,11 @@
def test_home_page_is_about_todo_lists(self):
request = requests.get('http://localhost:5000')
self.assertTrue(
- request.content.startswith(bytes('\n\n<!doctype html>\n', 'utf-8')))
+ request.content.startswith(b'\n\n<!doctype html>\n'))
self.assertIn(
'<title>\n \n tdd_with_python\n \n \n </title>\n',
request.text)
- self.assertTrue(
- request.content.endswith(bytes('</body>\n</html>\n', 'utf-8')))
-
+ self.assertTrue(request.content.endswith(b'</body>\n</html>\n'))
class TestMainPage:
"""WebTest test for title"""
@@ -29,3 +27,6 @@
def test_main_page_returns_expected_title(self, user, testapp):
res = testapp.get('/')
assert '<title>\n \n tdd_with_python\n \n \n </title>\n' in res
+
+ # def test_main_page_returns_expected_content(self, user, testapp):
+ # res = testapp.get('/')
|
1a2527afdc5cb9c948ac74a9925d90709d6150cc
|
seleniumbase/fixtures/constants.py
|
seleniumbase/fixtures/constants.py
|
"""
This class containts some frequently-used constants
"""
class Environment:
QA = "qa"
STAGING = "staging"
PRODUCTION = "production"
MASTER = "master"
LOCAL = "local"
TEST = "test"
class Files:
DOWNLOADS_FOLDER = "downloaded_files"
ARCHIVED_DOWNLOADS_FOLDER = "archived_files"
class ValidBrowsers:
valid_browsers = ["firefox", "ie", "edge", "safari", "chrome", "phantomjs"]
class Browser:
FIREFOX = "firefox"
INTERNET_EXPLORER = "ie"
EDGE = "edge"
SAFARI = "safari"
GOOGLE_CHROME = "chrome"
PHANTOM_JS = "phantomjs"
HTML_UNIT = "htmlunit"
VERSION = {
"firefox": None,
"ie": None,
"edge": None,
"safari": None,
"chrome": None,
"phantomjs": None,
"htmlunit": None
}
LATEST = {
"firefox": None,
"ie": None,
"edge": None,
"safari": None,
"chrome": None,
"phantomjs": None,
"htmlunit": None
}
class State:
NOTRUN = "NotRun"
ERROR = "Error"
FAILURE = "Fail"
PASS = "Pass"
SKIP = "Skip"
BLOCKED = "Blocked"
DEPRECATED = "Deprecated"
|
"""
This class containts some frequently-used constants
"""
class Environment:
QA = "qa"
STAGING = "staging"
PRODUCTION = "production"
MASTER = "master"
LOCAL = "local"
TEST = "test"
class Files:
DOWNLOADS_FOLDER = "downloaded_files"
ARCHIVED_DOWNLOADS_FOLDER = "archived_files"
class ValidBrowsers:
valid_browsers = ["firefox", "ie", "edge", "safari", "chrome", "phantomjs"]
class Browser:
FIREFOX = "firefox"
INTERNET_EXPLORER = "ie"
EDGE = "edge"
SAFARI = "safari"
GOOGLE_CHROME = "chrome"
PHANTOM_JS = "phantomjs"
VERSION = {
"firefox": None,
"ie": None,
"edge": None,
"safari": None,
"chrome": None,
"phantomjs": None
}
LATEST = {
"firefox": None,
"ie": None,
"edge": None,
"safari": None,
"chrome": None,
"phantomjs": None
}
class State:
NOTRUN = "NotRun"
ERROR = "Error"
FAILURE = "Fail"
PASS = "Pass"
SKIP = "Skip"
BLOCKED = "Blocked"
DEPRECATED = "Deprecated"
|
Remove "htmlunit" from browser options
|
Remove "htmlunit" from browser options
|
Python
|
mit
|
mdmintz/SeleniumBase,mdmintz/seleniumspot,seleniumbase/SeleniumBase,seleniumbase/SeleniumBase,seleniumbase/SeleniumBase,seleniumbase/SeleniumBase,mdmintz/seleniumspot,mdmintz/SeleniumBase,mdmintz/SeleniumBase,mdmintz/SeleniumBase
|
---
+++
@@ -28,7 +28,6 @@
SAFARI = "safari"
GOOGLE_CHROME = "chrome"
PHANTOM_JS = "phantomjs"
- HTML_UNIT = "htmlunit"
VERSION = {
"firefox": None,
@@ -36,8 +35,7 @@
"edge": None,
"safari": None,
"chrome": None,
- "phantomjs": None,
- "htmlunit": None
+ "phantomjs": None
}
LATEST = {
@@ -46,8 +44,7 @@
"edge": None,
"safari": None,
"chrome": None,
- "phantomjs": None,
- "htmlunit": None
+ "phantomjs": None
}
|
6ca85774de9532676b784646ecad3ef62b09ed5e
|
scripts/settings/fieldnames.py
|
scripts/settings/fieldnames.py
|
# Hexagons
HexID = 'HexID'
# ProviderServiceAreas
ServiceClass = 'ServiceClass'
ProvName = 'ProvName'
#Providers
Colloquial = 'Colloquial'
Code = 'Code'
URL = 'URL'
#Utilities
PROVIDER = 'PROVIDER'
WEBLINK = 'WEBLINK'
#Roads
CARTOCODE = 'CARTOCODE'
FULLNAME = 'FULLNAME'
#Drive Times
ToBreak = 'ToBreak'
Name = 'Name'
#BB
UTProvCode = 'UTProvCode'
|
# Hexagons
HexID = 'HexID'
# ProviderServiceAreas
ServiceClass = 'ServiceClass'
ProvName = 'ProvName'
#Providers
Colloquial = 'Colloquial'
Code = 'Code'
URL = 'URL'
#Utilities
PROVIDER = 'PROVIDER'
WEBLINK = 'WEBLINK'
#Roads
CARTOCODE = 'CARTOCODE'
FULLNAME = 'HWYNAME'
#Drive Times
ToBreak = 'ToBreak'
Name = 'Name'
#BB
UTProvCode = 'UTProvCode'
|
Use HWYNAME for road names in report
|
Use HWYNAME for road names in report
Related to #36
|
Python
|
mit
|
agrc/bb-econ,agrc/bb-econ,agrc/bb-econ
|
---
+++
@@ -16,7 +16,7 @@
#Roads
CARTOCODE = 'CARTOCODE'
-FULLNAME = 'FULLNAME'
+FULLNAME = 'HWYNAME'
#Drive Times
ToBreak = 'ToBreak'
|
020c13f3b39d495d50704317fe12ee4a4e735bb4
|
kylin/_injector.py
|
kylin/_injector.py
|
from functools import wraps
from typing import Callable
from ._scope import Scope
class Injector(Callable):
"""
class decorator to inject dependencies into a callable decorated function
"""
def __init__(self, dependencies: dict, fun: Callable):
self.dependencies = dependencies
self.fun = fun
@property
def scope(self) -> Scope:
return Scope()
def __call__(self, *args, **kwargs):
injections = {}
for dependency_name, service_name in self.dependencies.items():
injections[dependency_name] = kwargs.get(dependency_name) or self.scope[service_name]
kwargs.update(injections)
return self.fun(*args, **kwargs)
class Inject(Callable):
"""
class to recive the callable dependencies
"""
__injector__ = Injector
def __init__(self, **dependencies):
self.dependencies = dependencies
def __call__(self, fun: Callable):
return wraps(fun).__call__(self.__injector__(self.dependencies, fun))
|
from functools import wraps
from typing import Callable
from ._scope import Scope
class Injector(Callable):
"""
class decorator to inject dependencies into a callable decorated function
"""
def __init__(self, dependencies: dict, fun: Callable):
self.dependencies = dependencies
self.fun = fun
@property
def scope(self) -> Scope:
return Scope()
def __call__(self, *args, **kwargs):
injections = {}
for dependency_name, service_name in self.dependencies.items():
injections[dependency_name] = kwargs.get(dependency_name) or self.scope[service_name]
kwargs.update(injections)
return self.fun(*args, **kwargs)
class Inject(Callable):
"""
class to recive the callable dependencies
"""
__injector__ = Injector
def __init__(self, **dependencies):
self.dependencies = dependencies
def __call__(self, fun: Callable):
def call(*args, **kwargs):
return self.__injector__(self.dependencies, fun).__call__(*args, **kwargs)
return wraps(fun).__call__(call)
|
Fix bug of self injection into injector function
|
Fix bug of self injection into injector function
|
Python
|
mit
|
WatanukiRasadar/kylin
|
---
+++
@@ -35,4 +35,6 @@
self.dependencies = dependencies
def __call__(self, fun: Callable):
- return wraps(fun).__call__(self.__injector__(self.dependencies, fun))
+ def call(*args, **kwargs):
+ return self.__injector__(self.dependencies, fun).__call__(*args, **kwargs)
+ return wraps(fun).__call__(call)
|
291da4afa9f359dc4cfda6a683afdcead39d557b
|
simiki/conf_templates/fabfile.py
|
simiki/conf_templates/fabfile.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import absolute_import
import os
import os.path
from fabric.api import env, local, run
from fabric.colors import blue
import fabric.contrib.project as project
# Remote host and username
env.hosts = []
env.user = ""
env.colorize_errors = True
# Local output path
env.local_output = os.path.join(
os.path.abspath(os.path.dirname(__file__)),
"output/")
# Remote path to deploy output
env.remote_output = ""
def update_simiki():
print(blue("Old Version: "))
run("simiki -V")
run("pip install -U simiki")
print(blue("New Version: "))
run("simiki -V")
def deploy():
project.rsync_project(
local_dir = env.local_output,
remote_dir = env.remote_output.rstrip("/") + "/",
delete =True
)
def g():
local("simiki generate")
def p():
local("simiki preview")
def gp():
g()
p()
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import absolute_import
import os
import os.path
from sys import exit
from fabric.api import env, local, run
from fabric.colors import blue, red
import fabric.contrib.project as project
# Remote host and username
env.hosts = []
env.user = ""
env.colorize_errors = True
# Local output path
env.local_output = os.path.join(
os.path.abspath(os.path.dirname(__file__)),
"output/")
# Remote path to deploy output
env.remote_output = ""
# Other options
env.rsync_delete = False
def update_simiki():
print(blue("Old Version: "))
run("simiki -V")
run("pip install -U simiki")
print(blue("New Version: "))
run("simiki -V")
def deploy():
if not env.remote_output:
if env.rsync_delete:
print(red("You can't enable env.rsync_delete option "
"if env.remote_output is not set!!!"))
print(blue("Exit"))
exit()
print(red("Warning: env.remote_output directory is not set!\n"
"This will cause some problems!!!"))
ans = raw_input(red("Do you want to continue? (y/N) "))
if ans != "y":
print(blue("Exit"))
exit()
project.rsync_project(
local_dir = env.local_output,
remote_dir = env.remote_output.rstrip("/") + "/",
delete = env.rsync_delete
)
def g():
local("simiki generate")
def p():
local("simiki preview")
def gp():
g()
p()
|
Fix serious problem using rsync
|
Fix serious problem using rsync
If env.remote_output not set, use rsync --delete option will
empty the whole remote system, this is a very serious problem!
|
Python
|
mit
|
tankywoo/simiki,tankywoo/simiki,9p0le/simiki,9p0le/simiki,9p0le/simiki,zhaochunqi/simiki,tankywoo/simiki,zhaochunqi/simiki,zhaochunqi/simiki
|
---
+++
@@ -5,8 +5,9 @@
import os
import os.path
+from sys import exit
from fabric.api import env, local, run
-from fabric.colors import blue
+from fabric.colors import blue, red
import fabric.contrib.project as project
# Remote host and username
@@ -21,6 +22,8 @@
# Remote path to deploy output
env.remote_output = ""
+# Other options
+env.rsync_delete = False
def update_simiki():
print(blue("Old Version: "))
@@ -30,10 +33,24 @@
run("simiki -V")
def deploy():
+ if not env.remote_output:
+ if env.rsync_delete:
+ print(red("You can't enable env.rsync_delete option "
+ "if env.remote_output is not set!!!"))
+ print(blue("Exit"))
+ exit()
+
+ print(red("Warning: env.remote_output directory is not set!\n"
+ "This will cause some problems!!!"))
+ ans = raw_input(red("Do you want to continue? (y/N) "))
+ if ans != "y":
+ print(blue("Exit"))
+ exit()
+
project.rsync_project(
local_dir = env.local_output,
remote_dir = env.remote_output.rstrip("/") + "/",
- delete =True
+ delete = env.rsync_delete
)
def g():
|
bcabd0e0766e1d8f93c86ac8102e71bec446ef20
|
ynr/apps/sopn_parsing/management/commands/sopn_tooling_write_baseline.py
|
ynr/apps/sopn_parsing/management/commands/sopn_tooling_write_baseline.py
|
import json
import os
from candidates.models import Ballot
from bulk_adding.models import RawPeople
from django.core.management.base import BaseCommand
class Command(BaseCommand):
"""This command uses the ballots endpoint to loop over each
ballot and store each sopn pdf (uploaded_file) locally"""
def add_arguments(self, parser):
parser.add_argument(
"--data",
action="store",
help="Dictionary of raw people to write as a baseline",
)
def handle(self, *args, **options):
json_data = options["data"] or {}
if not json_data:
for ballot in Ballot.objects.exclude(officialdocument__isnull=True):
raw_people = getattr(ballot, "rawpeople", [])
try:
raw_people = ballot.rawpeople.data
except RawPeople.DoesNotExist:
raw_people = []
json_data[ballot.ballot_paper_id] = raw_people
file_path = os.path.join(
os.getcwd(), "ynr/apps/sopn_parsing/tests/data/sopn_baseline.json"
)
with open(file_path, "w") as f:
f.write(json.dumps(json_data))
|
import json
import os
from django.db.models import Q
from candidates.models import Ballot
from bulk_adding.models import RawPeople
from django.core.management.base import BaseCommand
class Command(BaseCommand):
"""
Creates a JSON file to represent ballots that have an Officialdocument.
Only include ballots where:
- The source of the RawPeople is from parsing a PDF
- No RawPeople were created from the OfficialDocument. This is so that we
will know if we make make improvements that mean more RawPeople are parsed
from an OfficialDocument
"""
def add_arguments(self, parser):
parser.add_argument(
"--data",
action="store",
help="Dictionary of raw people to write as a baseline",
)
def handle(self, *args, **options):
json_data = options["data"] or {}
if not json_data:
qs = Ballot.objects.exclude(officialdocument__isnull=True).filter(
Q(rawpeople__source_type=RawPeople.SOURCE_PARSED_PDF)
| Q(rawpeople__isnull=True)
)
for ballot in qs:
raw_people = getattr(ballot, "rawpeople", [])
try:
raw_people = ballot.rawpeople.data
except RawPeople.DoesNotExist:
raw_people = []
json_data[ballot.ballot_paper_id] = raw_people
file_path = os.path.join(
os.getcwd(), "ynr/apps/sopn_parsing/tests/data/sopn_baseline.json"
)
with open(file_path, "w") as f:
f.write(json.dumps(json_data))
|
Update which objects are used to write baseline file
|
Update which objects are used to write baseline file
|
Python
|
agpl-3.0
|
DemocracyClub/yournextrepresentative,DemocracyClub/yournextrepresentative,DemocracyClub/yournextrepresentative
|
---
+++
@@ -1,5 +1,7 @@
import json
import os
+
+from django.db.models import Q
from candidates.models import Ballot
from bulk_adding.models import RawPeople
@@ -7,8 +9,14 @@
class Command(BaseCommand):
- """This command uses the ballots endpoint to loop over each
- ballot and store each sopn pdf (uploaded_file) locally"""
+ """
+ Creates a JSON file to represent ballots that have an Officialdocument.
+ Only include ballots where:
+ - The source of the RawPeople is from parsing a PDF
+ - No RawPeople were created from the OfficialDocument. This is so that we
+ will know if we make make improvements that mean more RawPeople are parsed
+ from an OfficialDocument
+ """
def add_arguments(self, parser):
parser.add_argument(
@@ -19,8 +27,13 @@
def handle(self, *args, **options):
json_data = options["data"] or {}
+
if not json_data:
- for ballot in Ballot.objects.exclude(officialdocument__isnull=True):
+ qs = Ballot.objects.exclude(officialdocument__isnull=True).filter(
+ Q(rawpeople__source_type=RawPeople.SOURCE_PARSED_PDF)
+ | Q(rawpeople__isnull=True)
+ )
+ for ballot in qs:
raw_people = getattr(ballot, "rawpeople", [])
try:
raw_people = ballot.rawpeople.data
|
92bbddc2901c5720042fcadbbceaa68642e7cf3d
|
{{cookiecutter.project_slug}}/{{cookiecutter.project_slug}}/users/tests/test_tasks.py
|
{{cookiecutter.project_slug}}/{{cookiecutter.project_slug}}/users/tests/test_tasks.py
|
import pytest
from celery.result import EagerResult
from {{ cookiecutter.project_slug }}.users.tasks import get_users_count
from {{ cookiecutter.project_slug }}.users.tests.factories import UserFactory
@pytest.mark.django_db
def test_user_count(settings):
"""A basic test to execute the get_users_count Celery task."""
UserFactory.create_batch(3)
settings.CELERY_TASK_ALWAYS_EAGER = True
task_result = get_users_count.delay()
assert isinstance(task_result, EagerResult)
assert task_result.result == 3
|
import pytest
from celery.result import EagerResult
from {{ cookiecutter.project_slug }}.users.tasks import get_users_count
from {{ cookiecutter.project_slug }}.users.tests.factories import UserFactory
pytestmark = pytest.mark.django_db
def test_user_count(settings):
"""A basic test to execute the get_users_count Celery task."""
UserFactory.create_batch(3)
settings.CELERY_TASK_ALWAYS_EAGER = True
task_result = get_users_count.delay()
assert isinstance(task_result, EagerResult)
assert task_result.result == 3
|
Change style for pytest marker
|
Change style for pytest marker
Update pytest.mark.django_db to be more consistent with rest of the project
|
Python
|
bsd-3-clause
|
luzfcb/cookiecutter-django,pydanny/cookiecutter-django,trungdong/cookiecutter-django,ryankanno/cookiecutter-django,luzfcb/cookiecutter-django,pydanny/cookiecutter-django,luzfcb/cookiecutter-django,trungdong/cookiecutter-django,luzfcb/cookiecutter-django,ryankanno/cookiecutter-django,trungdong/cookiecutter-django,pydanny/cookiecutter-django,trungdong/cookiecutter-django,ryankanno/cookiecutter-django,pydanny/cookiecutter-django,ryankanno/cookiecutter-django
|
---
+++
@@ -4,8 +4,9 @@
from {{ cookiecutter.project_slug }}.users.tasks import get_users_count
from {{ cookiecutter.project_slug }}.users.tests.factories import UserFactory
+pytestmark = pytest.mark.django_db
-@pytest.mark.django_db
+
def test_user_count(settings):
"""A basic test to execute the get_users_count Celery task."""
UserFactory.create_batch(3)
|
01551a1985064f319600732197afeb93cb64a377
|
locust/__init__.py
|
locust/__init__.py
|
from .user.sequential_taskset import SequentialTaskSet
from .user.task import task, TaskSet
from .user.users import HttpUser, User
from .user.wait_time import between, constant, constant_pacing
from .event import Events
events = Events()
__version__ = "1.0b1"
|
from .user.sequential_taskset import SequentialTaskSet
from .user import wait_time
from .user.task import task, TaskSet
from .user.users import HttpUser, User
from .user.wait_time import between, constant, constant_pacing
from .event import Events
events = Events()
__version__ = "1.0b1"
|
Add wait_time to locust python package namespace (to not break `from locust.wait_time import ...` imports)
|
Add wait_time to locust python package namespace (to not break `from locust.wait_time import ...` imports)
|
Python
|
mit
|
mbeacom/locust,locustio/locust,mbeacom/locust,locustio/locust,locustio/locust,mbeacom/locust,mbeacom/locust,locustio/locust
|
---
+++
@@ -1,4 +1,5 @@
from .user.sequential_taskset import SequentialTaskSet
+from .user import wait_time
from .user.task import task, TaskSet
from .user.users import HttpUser, User
from .user.wait_time import between, constant, constant_pacing
|
14462f42c8e187411e5875686ce8869bbd9b1fb7
|
spectator/core/imagegenerators.py
|
spectator/core/imagegenerators.py
|
from imagekit import ImageSpec, register
from imagekit.processors import ResizeToFit
from spectator.core import app_settings
class Thumbnail(ImageSpec):
"Base class"
format = "JPEG"
options = {"quality": 60}
class ListThumbnail(Thumbnail):
"For displaying in lists of Publications, Events, etc."
processors = [ResizeToFit(*app_settings.THUMBNAIL_LIST_SIZE)]
class ListThumbnail2x(ListThumbnail):
"""Retina version of ListThumbnail
Generated twice the size of our set dimensions.
"""
dimensions = [d * 2 for d in app_settings.THUMBNAIL_LIST_SIZE]
processors = [ResizeToFit(*dimensions)]
class DetailThumbnail(Thumbnail):
"For displaying on the detail pages of Publication, Event, etc"
processors = [ResizeToFit(*app_settings.THUMBNAIL_DETAIL_SIZE)]
class DetailThumbnail2x(DetailThumbnail):
"""Retina version of DetailThumbnail
Generated twice the size of our set dimensions.
"""
dimensions = [d * 2 for d in app_settings.THUMBNAIL_DETAIL_SIZE]
processors = [ResizeToFit(*dimensions)]
register.generator("spectator:list_thumbnail", ListThumbnail)
register.generator("spectator:list_thumbnail2x", ListThumbnail2x)
register.generator("spectator:detail_thumbnail", DetailThumbnail)
register.generator("spectator:detail_thumbnail2x", DetailThumbnail2x)
|
from imagekit import ImageSpec, register
from imagekit.processors import ResizeToFit
from spectator.core import app_settings
class Thumbnail(ImageSpec):
"Base class"
format = "JPEG"
options = {"quality": 80}
class ListThumbnail(Thumbnail):
"For displaying in lists of Publications, Events, etc."
processors = [ResizeToFit(*app_settings.THUMBNAIL_LIST_SIZE)]
class ListThumbnail2x(ListThumbnail):
"""Retina version of ListThumbnail
Generated twice the size of our set dimensions.
"""
dimensions = [d * 2 for d in app_settings.THUMBNAIL_LIST_SIZE]
processors = [ResizeToFit(*dimensions)]
class DetailThumbnail(Thumbnail):
"For displaying on the detail pages of Publication, Event, etc"
processors = [ResizeToFit(*app_settings.THUMBNAIL_DETAIL_SIZE)]
class DetailThumbnail2x(DetailThumbnail):
"""Retina version of DetailThumbnail
Generated twice the size of our set dimensions.
"""
dimensions = [d * 2 for d in app_settings.THUMBNAIL_DETAIL_SIZE]
processors = [ResizeToFit(*dimensions)]
register.generator("spectator:list_thumbnail", ListThumbnail)
register.generator("spectator:list_thumbnail2x", ListThumbnail2x)
register.generator("spectator:detail_thumbnail", DetailThumbnail)
register.generator("spectator:detail_thumbnail2x", DetailThumbnail2x)
|
Increase thumbnail quality to 80 from 60
|
Increase thumbnail quality to 80 from 60
Some of them were looking pretty compressed.
|
Python
|
mit
|
philgyford/django-spectator,philgyford/django-spectator,philgyford/django-spectator
|
---
+++
@@ -7,7 +7,7 @@
class Thumbnail(ImageSpec):
"Base class"
format = "JPEG"
- options = {"quality": 60}
+ options = {"quality": 80}
class ListThumbnail(Thumbnail):
|
48be00323c57b6a61455d2591970e1380da8055c
|
libcloud/dns/providers.py
|
libcloud/dns/providers.py
|
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from libcloud.utils.misc import get_driver as get_provider_driver
from libcloud.dns.types import Provider
DRIVERS = {
Provider.DUMMY:
('libcloud.dns.drivers.dummy', 'DummyDNSDriver'),
Provider.LINODE:
('libcloud.dns.drivers.linode', 'LinodeDNSDriver'),
Provider.ZERIGO:
('libcloud.dns.drivers.zerigo', 'ZerigoDNSDriver'),
Provider.RACKSPACE_US:
('libcloud.dns.drivers.rackspace', 'RackspaceUSDNSDriver'),
Provider.RACKSPACE_UK:
('libcloud.dns.drivers.rackspace', 'RackspaceUKDNSDriver'),
Provider.ROUTE53:
('libcloud.dns.drivers.route53', 'Route53DNSDriver')
}
def get_driver(provider):
return get_provider_driver(DRIVERS, provider)
|
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from libcloud.utils.misc import get_driver as get_provider_driver
from libcloud.dns.types import Provider
DRIVERS = {
Provider.DUMMY:
('libcloud.dns.drivers.dummy', 'DummyDNSDriver'),
Provider.LINODE:
('libcloud.dns.drivers.linode', 'LinodeDNSDriver'),
Provider.ZERIGO:
('libcloud.dns.drivers.zerigo', 'ZerigoDNSDriver'),
Provider.RACKSPACE_US:
('libcloud.dns.drivers.rackspace', 'RackspaceUSDNSDriver'),
Provider.RACKSPACE_UK:
('libcloud.dns.drivers.rackspace', 'RackspaceUKDNSDriver')
}
def get_driver(provider):
return get_provider_driver(DRIVERS, provider)
|
Remove route54 provider because the driver is not finished yet.
|
Remove route54 provider because the driver is not finished yet.
git-svn-id: 9ad005ce451fa0ce30ad6352b03eb45b36893355@1340889 13f79535-47bb-0310-9956-ffa450edef68
|
Python
|
apache-2.0
|
sergiorua/libcloud,Cloud-Elasticity-Services/as-libcloud,Cloud-Elasticity-Services/as-libcloud,mbrukman/libcloud,iPlantCollaborativeOpenSource/libcloud,sfriesel/libcloud,ByteInternet/libcloud,curoverse/libcloud,cryptickp/libcloud,schaubl/libcloud,t-tran/libcloud,carletes/libcloud,MrBasset/libcloud,ninefold/libcloud,jimbobhickville/libcloud,briancurtin/libcloud,NexusIS/libcloud,sahildua2305/libcloud,thesquelched/libcloud,kater169/libcloud,munkiat/libcloud,curoverse/libcloud,aviweit/libcloud,iPlantCollaborativeOpenSource/libcloud,mistio/libcloud,pquentin/libcloud,sfriesel/libcloud,StackPointCloud/libcloud,sahildua2305/libcloud,t-tran/libcloud,schaubl/libcloud,mtekel/libcloud,t-tran/libcloud,pantheon-systems/libcloud,MrBasset/libcloud,vongazman/libcloud,atsaki/libcloud,SecurityCompass/libcloud,sergiorua/libcloud,apache/libcloud,watermelo/libcloud,Cloud-Elasticity-Services/as-libcloud,smaffulli/libcloud,thesquelched/libcloud,dcorbacho/libcloud,cloudControl/libcloud,lochiiconnectivity/libcloud,illfelder/libcloud,NexusIS/libcloud,marcinzaremba/libcloud,aleGpereira/libcloud,Kami/libcloud,JamesGuthrie/libcloud,carletes/libcloud,watermelo/libcloud,marcinzaremba/libcloud,JamesGuthrie/libcloud,cloudControl/libcloud,Jc2k/libcloud,andrewsomething/libcloud,jerryblakley/libcloud,samuelchong/libcloud,erjohnso/libcloud,sahildua2305/libcloud,munkiat/libcloud,StackPointCloud/libcloud,kater169/libcloud,atsaki/libcloud,Scalr/libcloud,Kami/libcloud,sfriesel/libcloud,Scalr/libcloud,JamesGuthrie/libcloud,jerryblakley/libcloud,wrigri/libcloud,niteoweb/libcloud,mgogoulos/libcloud,carletes/libcloud,supertom/libcloud,thesquelched/libcloud,sgammon/libcloud,ZuluPro/libcloud,cloudControl/libcloud,DimensionDataCBUSydney/libcloud,mgogoulos/libcloud,lochiiconnectivity/libcloud,vongazman/libcloud,mbrukman/libcloud,ClusterHQ/libcloud,cryptickp/libcloud,pantheon-systems/libcloud,mathspace/libcloud,jimbobhickville/libcloud,briancurtin/libcloud,samuelchong/libcloud,mistio/libcloud,ZuluPro/libcloud,curoverse/libcloud,samuelchong/libcloud,mathspace/libcloud,techhat/libcloud,atsaki/libcloud,aleGpereira/libcloud,aleGpereira/libcloud,apache/libcloud,SecurityCompass/libcloud,watermelo/libcloud,marcinzaremba/libcloud,pquentin/libcloud,Itxaka/libcloud,wuyuewen/libcloud,erjohnso/libcloud,ZuluPro/libcloud,ninefold/libcloud,Verizon/libcloud,techhat/libcloud,supertom/libcloud,Jc2k/libcloud,StackPointCloud/libcloud,aviweit/libcloud,apache/libcloud,supertom/libcloud,andrewsomething/libcloud,cryptickp/libcloud,mbrukman/libcloud,jimbobhickville/libcloud,munkiat/libcloud,dcorbacho/libcloud,sergiorua/libcloud,andrewsomething/libcloud,lochiiconnectivity/libcloud,ByteInternet/libcloud,erjohnso/libcloud,mathspace/libcloud,kater169/libcloud,mtekel/libcloud,DimensionDataCBUSydney/libcloud,Verizon/libcloud,Itxaka/libcloud,vongazman/libcloud,wido/libcloud,Kami/libcloud,DimensionDataCBUSydney/libcloud,briancurtin/libcloud,pquentin/libcloud,ByteInternet/libcloud,illfelder/libcloud,wrigri/libcloud,illfelder/libcloud,NexusIS/libcloud,Itxaka/libcloud,MrBasset/libcloud,schaubl/libcloud,smaffulli/libcloud,niteoweb/libcloud,sgammon/libcloud,aviweit/libcloud,wuyuewen/libcloud,niteoweb/libcloud,Scalr/libcloud,techhat/libcloud,ClusterHQ/libcloud,iPlantCollaborativeOpenSource/libcloud,smaffulli/libcloud,mtekel/libcloud,SecurityCompass/libcloud,wido/libcloud,Verizon/libcloud,jerryblakley/libcloud,dcorbacho/libcloud,mgogoulos/libcloud,wrigri/libcloud,wuyuewen/libcloud,wido/libcloud,mistio/libcloud,pantheon-systems/libcloud
|
---
+++
@@ -26,9 +26,7 @@
Provider.RACKSPACE_US:
('libcloud.dns.drivers.rackspace', 'RackspaceUSDNSDriver'),
Provider.RACKSPACE_UK:
- ('libcloud.dns.drivers.rackspace', 'RackspaceUKDNSDriver'),
- Provider.ROUTE53:
- ('libcloud.dns.drivers.route53', 'Route53DNSDriver')
+ ('libcloud.dns.drivers.rackspace', 'RackspaceUKDNSDriver')
}
|
bd8fdf1dccc3660be3b8e020a637f94d21dc2b3a
|
gaphas/tests/test_state.py
|
gaphas/tests/test_state.py
|
from builtins import object
import unittest
from gaphas.state import reversible_pair, observed, _reverse
class SList(object):
def __init__(self):
self.l = list()
def add(self, node, before=None):
if before: self.l.insert(self.l.index(before), node)
else: self.l.append(node)
add = observed(add)
@observed
def remove(self, node):
self.l.remove(self.l.index(node))
class StateTestCase(unittest.TestCase):
def test_adding_pair(self):
"""Test adding reversible pair
"""
reversible_pair(SList.add, SList.remove, \
bind1={'before': lambda self, node: self.l[self.l.index(node)+1] })
self.assertTrue(SList.add.__func__ in _reverse)
self.assertTrue(SList.remove.__func__ in _reverse)
|
from builtins import object
import unittest
from gaphas.state import reversible_pair, observed, _reverse
class SList(object):
def __init__(self):
self.l = list()
def add(self, node, before=None):
if before: self.l.insert(self.l.index(before), node)
else: self.l.append(node)
add = observed(add)
@observed
def remove(self, node):
self.l.remove(self.l.index(node))
class StateTestCase(unittest.TestCase):
def test_adding_pair(self):
"""Test adding reversible pair
"""
reversible_pair(SList.add, SList.remove, \
bind1={'before': lambda self, node: self.l[self.l.index(node)+1] })
self.assertTrue(SList.add in _reverse)
self.assertTrue(SList.remove in _reverse)
|
Fix function object has no attribute __func__
|
Fix function object has no attribute __func__
Signed-off-by: Dan Yeaw <2591e5f46f28d303f9dc027d475a5c60d8dea17a@yeaw.me>
|
Python
|
lgpl-2.1
|
amolenaar/gaphas
|
---
+++
@@ -21,5 +21,5 @@
reversible_pair(SList.add, SList.remove, \
bind1={'before': lambda self, node: self.l[self.l.index(node)+1] })
- self.assertTrue(SList.add.__func__ in _reverse)
- self.assertTrue(SList.remove.__func__ in _reverse)
+ self.assertTrue(SList.add in _reverse)
+ self.assertTrue(SList.remove in _reverse)
|
7cad00f02c2f86cbeb23755f47f9bf1d45be2f8e
|
logperfu.py
|
logperfu.py
|
import threading
import fresenius
import readline
import time
import sys
prompt = '> '
port = sys.argv[1]
fresbase = fresenius.FreseniusComm(port = port)
logfile = open('seringues.log', 'w')
def printrx():
while not logfile.closed:
origin, msg = fresbase.recvq.get()
logfile.write("{};{}\n".format(time.time(), msg))
print msg + "\n" + prompt,
def queryloop():
Ts = 0.5 # 2 Hz sample rate
tbase = time.time()
while not logfile.closed:
tnew = time.time()
if tnew - tbase > Ts:
# d = flow rate, g = infused volume
fresbase.sendCommand('1LE;dg')
tbase = tnew
printthread = threading.Thread(target = printrx)
printthread.daemon = True
printthread.start()
querythread = threading.Thread(target = queryloop)
querythread.daemon = True
if False:
# Start querying for the flow
fresbase.sendCommand('0DC')
fresbase.sendCommand('1DC')
fresbase.sendCommand('1DE;dg')
querythread.start()
while True:
cmd = raw_input(prompt)
if cmd == 'quit': break
fresbase.sendCommand(cmd)
logfile.close()
fresbase.stop()
|
import threading
import fresenius
import readline
import time
import sys
from decimal import Decimal
prompt = '> '
port = sys.argv[1]
fresbase = fresenius.FreseniusComm(port = port)
logfile = open('seringues.log', 'w')
def printrx():
while not logfile.closed:
origin, msg = fresbase.recvq.get()
logfile.write("{};{}\n".format(Decimal(time.time()), msg))
print msg + "\n" + prompt,
def queryloop():
Ts = 0.5 # 2 Hz sample rate
tbase = time.time()
while not logfile.closed:
tnew = time.time()
if tnew - tbase > Ts:
# d = flow rate, g = infused volume
fresbase.sendCommand('1LE;dg')
tbase = tnew
printthread = threading.Thread(target = printrx)
printthread.daemon = True
printthread.start()
querythread = threading.Thread(target = queryloop)
querythread.daemon = True
if False:
# Start querying for the flow
fresbase.sendCommand('0DC')
fresbase.sendCommand('1DC')
fresbase.sendCommand('1DE;dg')
querythread.start()
while True:
cmd = raw_input(prompt)
if cmd == 'quit': break
fresbase.sendCommand(cmd)
logfile.close()
fresbase.stop()
|
Use decimal in the logger to get the most accurate time info
|
Use decimal in the logger to get the most accurate time info
|
Python
|
isc
|
jaj42/infupy
|
---
+++
@@ -3,6 +3,7 @@
import readline
import time
import sys
+from decimal import Decimal
prompt = '> '
@@ -14,7 +15,7 @@
def printrx():
while not logfile.closed:
origin, msg = fresbase.recvq.get()
- logfile.write("{};{}\n".format(time.time(), msg))
+ logfile.write("{};{}\n".format(Decimal(time.time()), msg))
print msg + "\n" + prompt,
def queryloop():
|
b66fc000a2d1328f557fb2c563f99b4748bf88b1
|
py101/boilerplate/__init__.py
|
py101/boilerplate/__init__.py
|
""""
Boilerplate Adventure
Author: Ignacio Avas (iavas@sophilabs.com)
"""
import codecs
import io
import sys
import unittest
from story.adventures import AdventureVerificationError, BaseAdventure
from story.translation import gettext as _
class TestOutput(unittest.TestCase):
"""Variables Adventure test"""
def __init__(self, candidate_code, file_name='<inline>'):
"""Init the test"""
super(TestOutput, self).__init__()
self.candidate_code = candidate_code
self.file_name = file_name
def setUp(self):
self.__old_stdout = sys.stdout
sys.stdout = self.__mockstdout = io.StringIO()
def tearDown(self):
sys.stdout = self.__old_stdout
self.__mockstdout.close()
def runTest(self):
"""Makes a simple test of the output"""
code = compile(self.candidate_code, self.file_name, 'exec', optimize=0)
exec(code)
class Adventure(BaseAdventure):
"""Boilerplate Adventure"""
title = '<Insert Title Here>'
@classmethod
def test(cls, sourcefile):
"""Test against the provided file"""
suite = unittest.TestSuite()
raw_program = codecs.open(sourcefile).read()
suite.addTest(TestOutput(raw_program, sourcefile))
result = unittest.TextTestRunner().run(suite)
if not result.wasSuccessful():
raise AdventureVerificationError()
|
""""
Boilerplate Adventure
Author: Ignacio Avas (iavas@sophilabs.com)
"""
import codecs
import io
import sys
import unittest
from story.adventures import AdventureVerificationError, BaseAdventure
from story.translation import gettext as _
class TestOutput(unittest.TestCase):
"""Variables Adventure test"""
def __init__(self, candidate_code, file_name='<inline>'):
"""Init the test"""
super(TestOutput, self).__init__()
self.candidate_code = candidate_code
self.file_name = file_name
def setUp(self):
self.__old_stdout = sys.stdout
sys.stdout = self.__mockstdout = io.StringIO()
def tearDown(self):
sys.stdout = self.__old_stdout
self.__mockstdout.close()
def runTest(self):
"""Makes a simple test of the output"""
#code = compile(self.candidate_code, self.file_name, 'exec', optimize=0)
#exec(code)
self.fail("Test not implemented")
class Adventure(BaseAdventure):
"""Boilerplate Adventure"""
title = '<Insert Title Here>'
@classmethod
def test(cls, sourcefile):
"""Test against the provided file"""
suite = unittest.TestSuite()
raw_program = codecs.open(sourcefile).read()
suite.addTest(TestOutput(raw_program, sourcefile))
result = unittest.TextTestRunner().run(suite)
if not result.wasSuccessful():
raise AdventureVerificationError()
|
Comment out some boilerplate code, to prevent failures
|
Comment out some boilerplate code, to prevent failures
|
Python
|
mit
|
sophilabs/py101
|
---
+++
@@ -30,8 +30,9 @@
def runTest(self):
"""Makes a simple test of the output"""
- code = compile(self.candidate_code, self.file_name, 'exec', optimize=0)
- exec(code)
+ #code = compile(self.candidate_code, self.file_name, 'exec', optimize=0)
+ #exec(code)
+ self.fail("Test not implemented")
class Adventure(BaseAdventure):
|
b2ebc5878f7113756970ef52ae8e0db4cc6e5cf2
|
marbaloo_mako/__init__.py
|
marbaloo_mako/__init__.py
|
import cherrypy
from mako.lookup import TemplateLookup
class Tool(cherrypy.Tool):
_lookups = {}
def __init__(self):
cherrypy.Tool.__init__(self, 'before_handler',
self.callable,
priority=20)
def callable(self,
filename=None,
directories=None,
module_directory=None,
collection_size=-1):
if filename is None or directories is None:
return
# Find the appropriate template lookup.
key = (tuple(directories), module_directory)
try:
lookup = self._lookups[key]
except KeyError:
lookup = TemplateLookup(directories=directories,
module_directory=module_directory,
collection_size=collection_size,
input_encoding='utf8')
self._lookups[key] = lookup
cherrypy.request.lookup = lookup
# Replace the current handler.
cherrypy.request.template = template = lookup.get_template(filename)
inner_handler = cherrypy.serving.request.handler
def wrapper(*args, **kwargs):
context = inner_handler(*args, **kwargs)
response = template.render(**context)
return response
cherrypy.serving.request.handler = wrapper
|
import cherrypy
from mako.lookup import TemplateLookup
class Tool(cherrypy.Tool):
_lookups = {}
def __init__(self):
cherrypy.Tool.__init__(self, 'before_handler',
self.callable,
priority=40)
def callable(self,
filename=None,
directories=None,
module_directory=None,
collection_size=-1):
if filename is None or directories is None:
return
# Find the appropriate template lookup.
key = (tuple(directories), module_directory)
try:
lookup = self._lookups[key]
except KeyError:
lookup = TemplateLookup(directories=directories,
module_directory=module_directory,
collection_size=collection_size,
input_encoding='utf8')
self._lookups[key] = lookup
cherrypy.request.lookup = lookup
# Replace the current handler.
cherrypy.request.template = template = lookup.get_template(filename)
inner_handler = cherrypy.serving.request.handler
def wrapper(*args, **kwargs):
context = inner_handler(*args, **kwargs)
response = template.render(**context)
return response
cherrypy.serving.request.handler = wrapper
|
Increase tool priority, to keep space between cherrypy builtin tools like `json_out`.
|
Increase tool priority, to keep space between cherrypy builtin tools like `json_out`.
|
Python
|
mit
|
marbaloo/marbaloo_mako
|
---
+++
@@ -8,7 +8,7 @@
def __init__(self):
cherrypy.Tool.__init__(self, 'before_handler',
self.callable,
- priority=20)
+ priority=40)
def callable(self,
filename=None,
|
f3299b3108c8fab7015541e9d9b1ef220a488f87
|
onitu/escalator/server/__main__.py
|
onitu/escalator/server/__main__.py
|
import zmq
from .databases import Databases
from .worker import Worker
context = zmq.Context()
back_uri = 'inproc://workers'
proxy = zmq.devices.ThreadDevice(
device_type=zmq.QUEUE, in_type=zmq.DEALER, out_type=zmq.ROUTER
)
proxy.bind_out('tcp://*:4224')
proxy.bind_in(back_uri)
proxy.start()
databases = Databases('dbs')
nb_workers = 8
workers = []
for i in range(nb_workers):
worker = Worker(databases, back_uri)
worker.daemon = True
worker.start()
workers.append(worker)
while proxy.launcher.isAlive():
try:
# If we join the process without a timeout we never
# get the chance to handle the exception
proxy.join(100)
except KeyboardInterrupt:
databases.close()
break
|
import argparse
import zmq
from .databases import Databases
from .worker import Worker
parser = argparse.ArgumentParser("escalator")
parser.add_argument(
'--bind', default='tcp://*:4224',
help="Address to bind escalator server"
)
args = parser.parse_args()
context = zmq.Context()
back_uri = 'inproc://workers'
proxy = zmq.devices.ThreadDevice(
device_type=zmq.QUEUE, in_type=zmq.DEALER, out_type=zmq.ROUTER
)
proxy.bind_out(args.bind)
proxy.bind_in(back_uri)
proxy.start()
print('Starting escalator server on {}'.format(repr(args.bind)))
databases = Databases('dbs')
nb_workers = 8
workers = []
for i in range(nb_workers):
worker = Worker(databases, back_uri)
worker.daemon = True
worker.start()
workers.append(worker)
while proxy.launcher.isAlive():
try:
# If we join the process without a timeout we never
# get the chance to handle the exception
proxy.join(100)
except KeyboardInterrupt:
databases.close()
break
|
Use command-line argument to bind server address
|
Use command-line argument to bind server address
|
Python
|
mit
|
onitu/onitu,onitu/onitu,onitu/onitu
|
---
+++
@@ -1,7 +1,17 @@
+import argparse
+
import zmq
from .databases import Databases
from .worker import Worker
+
+
+parser = argparse.ArgumentParser("escalator")
+parser.add_argument(
+ '--bind', default='tcp://*:4224',
+ help="Address to bind escalator server"
+)
+args = parser.parse_args()
context = zmq.Context()
@@ -10,9 +20,11 @@
proxy = zmq.devices.ThreadDevice(
device_type=zmq.QUEUE, in_type=zmq.DEALER, out_type=zmq.ROUTER
)
-proxy.bind_out('tcp://*:4224')
+proxy.bind_out(args.bind)
proxy.bind_in(back_uri)
proxy.start()
+
+print('Starting escalator server on {}'.format(repr(args.bind)))
databases = Databases('dbs')
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.