commit
stringlengths 40
40
| old_file
stringlengths 4
150
| new_file
stringlengths 4
150
| old_contents
stringlengths 0
3.26k
| new_contents
stringlengths 1
4.43k
| subject
stringlengths 15
501
| message
stringlengths 15
4.06k
| lang
stringclasses 4
values | license
stringclasses 13
values | repos
stringlengths 5
91.5k
| diff
stringlengths 0
4.35k
|
|---|---|---|---|---|---|---|---|---|---|---|
958a134bd5e68ac94e3f4f25d4587e357d9a7ba5
|
download_data.py
|
download_data.py
|
# coding: utf-8
"""
Script to download the raw data from http://www.rsssf.com/
The data was processed mostly by interactive sessions in ipython. Almost every
file had it's own format, so there is no point in trying to automate it in a
fully automatic script, but this downloading script may be useful for future
dowloads.
"""
import requests
YEAR_URL = 'http://www.rsssf.com/tablesa/arg%i.html'
FILE_PATH = 'data/%i.txt'
YEARS = range(90, 100) + range(2000, 2016)
for year in YEARS:
print 'Year:', year
try:
data = requests.get(YEAR_URL % year).content
with open(FILE_PATH % year, 'w') as data_file:
data_file.write(data)
print 'Wrote file with', len(data), 'chars'
except:
print 'ERROR!'
|
# coding: utf-8
"""
Script to download the raw data from http://www.rsssf.com/
The data was processed mostly by interactive sessions in ipython. Almost every
file had it's own format, so there is no point in trying to automate it in a
fully automatic script, but this downloading script may be useful for future
dowloads.
"""
import nltk
import requests
YEAR_URL = 'http://www.rsssf.com/tablesa/arg%i.html'
FILE_PATH = 'data/%i.txt'
YEARS = range(90, 100) + range(2000, 2016)
for year in YEARS:
print 'Year:', year
try:
html = requests.get(YEAR_URL % year).content
text = nltk.clean_html(html)
with open(FILE_PATH % year, 'w') as data_file:
data_file.write(text)
print 'Wrote file with', len(text), 'chars'
except:
print 'ERROR!'
|
Use nltk to extract text from html data
|
Use nltk to extract text from html data
|
Python
|
mit
|
fisadev/afa_cup_learning
|
---
+++
@@ -6,6 +6,7 @@
fully automatic script, but this downloading script may be useful for future
dowloads.
"""
+import nltk
import requests
YEAR_URL = 'http://www.rsssf.com/tablesa/arg%i.html'
@@ -15,9 +16,10 @@
for year in YEARS:
print 'Year:', year
try:
- data = requests.get(YEAR_URL % year).content
+ html = requests.get(YEAR_URL % year).content
+ text = nltk.clean_html(html)
with open(FILE_PATH % year, 'w') as data_file:
- data_file.write(data)
- print 'Wrote file with', len(data), 'chars'
+ data_file.write(text)
+ print 'Wrote file with', len(text), 'chars'
except:
print 'ERROR!'
|
4212b19910e627b48df2c50389b5c8e46250c568
|
girder_worker/__init__.py
|
girder_worker/__init__.py
|
import abc
import os
from pkg_resources import DistributionNotFound, get_distribution
from six.moves.configparser import SafeConfigParser
from . import log_utils
try:
__version__ = get_distribution(__name__).version
except DistributionNotFound:
# package is not installed
pass
__license__ = 'Apache 2.0'
PACKAGE_DIR = os.path.dirname(os.path.abspath(__file__))
# Read the configuration files
_cfgs = ('worker.dist.cfg', 'worker.local.cfg')
config = SafeConfigParser({
'RABBITMQ_USER': os.environ.get('RABBITMQ_USER', 'guest'),
'RABBITMQ_PASS': os.environ.get('RABBITMQ_PASS', 'guest'),
'RABBITMQ_HOST': os.environ.get('RABBITMQ_HOST', 'localhost')
})
config.read([os.path.join(PACKAGE_DIR, f) for f in _cfgs])
# Create and configure our logger
logger = log_utils.setupLogger(config)
class GirderWorkerPluginABC(object):
""" """
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def __init__(self, app, *args, **kwargs):
""" """
@abc.abstractmethod
def task_imports(self):
""" """
class GirderWorkerPlugin(GirderWorkerPluginABC):
def __init__(self, app, *args, **kwargs):
self.app = app
def task_imports(self):
return ['girder_worker.tasks']
|
import abc
import os
from pkg_resources import DistributionNotFound, get_distribution
from six.moves.configparser import SafeConfigParser, add_metaclass
from . import log_utils
try:
__version__ = get_distribution(__name__).version
except DistributionNotFound:
# package is not installed
pass
__license__ = 'Apache 2.0'
PACKAGE_DIR = os.path.dirname(os.path.abspath(__file__))
# Read the configuration files
_cfgs = ('worker.dist.cfg', 'worker.local.cfg')
config = SafeConfigParser({
'RABBITMQ_USER': os.environ.get('RABBITMQ_USER', 'guest'),
'RABBITMQ_PASS': os.environ.get('RABBITMQ_PASS', 'guest'),
'RABBITMQ_HOST': os.environ.get('RABBITMQ_HOST', 'localhost')
})
config.read([os.path.join(PACKAGE_DIR, f) for f in _cfgs])
# Create and configure our logger
logger = log_utils.setupLogger(config)
@add_metaclass(abc.ABCMeta)
class GirderWorkerPluginABC(object):
""" """
@abc.abstractmethod
def __init__(self, app, *args, **kwargs):
""" """
@abc.abstractmethod
def task_imports(self):
""" """
class GirderWorkerPlugin(GirderWorkerPluginABC):
def __init__(self, app, *args, **kwargs):
self.app = app
def task_imports(self):
return ['girder_worker.tasks']
|
Use six to add meta class
|
Use six to add meta class
|
Python
|
apache-2.0
|
girder/girder_worker,girder/girder_worker,girder/girder_worker
|
---
+++
@@ -1,7 +1,7 @@
import abc
import os
from pkg_resources import DistributionNotFound, get_distribution
-from six.moves.configparser import SafeConfigParser
+from six.moves.configparser import SafeConfigParser, add_metaclass
from . import log_utils
@@ -32,9 +32,9 @@
logger = log_utils.setupLogger(config)
+@add_metaclass(abc.ABCMeta)
class GirderWorkerPluginABC(object):
""" """
- __metaclass__ = abc.ABCMeta
@abc.abstractmethod
def __init__(self, app, *args, **kwargs):
|
7638f03dfe347866d62ede55d0163e7593f9e6c9
|
waterbutler/core/__init__.py
|
waterbutler/core/__init__.py
|
from waterbutler.core.utils import async_retry
from waterbutler.core.utils import make_provider
__all__ = [
'async_retry',
'make_provider',
]
|
# from waterbutler.core.utils import async_retry
# from waterbutler.core.utils import make_provider
# __all__ = [
# 'async_retry',
# 'make_provider',
# ]
|
Remove __all__ as it was not used and causing circular import errors when attempting to configure logging
|
Remove __all__ as it was not used and causing circular import errors when attempting to configure logging
|
Python
|
apache-2.0
|
TomBaxter/waterbutler,Johnetordoff/waterbutler,RCOSDP/waterbutler,kwierman/waterbutler,CenterForOpenScience/waterbutler,rdhyee/waterbutler,Ghalko/waterbutler,felliott/waterbutler,rafaeldelucena/waterbutler,hmoco/waterbutler,cosenal/waterbutler,icereval/waterbutler,chrisseto/waterbutler
|
---
+++
@@ -1,7 +1,7 @@
-from waterbutler.core.utils import async_retry
-from waterbutler.core.utils import make_provider
+# from waterbutler.core.utils import async_retry
+# from waterbutler.core.utils import make_provider
-__all__ = [
- 'async_retry',
- 'make_provider',
-]
+# __all__ = [
+# 'async_retry',
+# 'make_provider',
+# ]
|
727e2283f60355b7c76e4ef0d72ee666eb269bd4
|
facturapdf/__about__.py
|
facturapdf/__about__.py
|
__title__ = 'facturapdf'
__summary__ = 'Create PDF invoice according to Spanish regulations.'
__version__ = '0.0.1'
__license__ = 'BSD 3-Clause License'
__uri__ = 'https://github.com/initios/factura-pdf'
__author__ = 'Carlos Goce'
__email__ = 'cgonzalez@initios.com'
|
__title__ = 'facturapdf'
__summary__ = 'Create PDF invoice according to Spanish regulations.'
__version__ = '0.0.2'
__license__ = 'BSD 3-Clause License'
__uri__ = 'https://github.com/initios/factura-pdf'
__author__ = 'Carlos Goce'
__email__ = 'cgonzalez@initios.com'
|
Update about version. It was outdated
|
Update about version. It was outdated
|
Python
|
bsd-3-clause
|
initios/factura-pdf
|
---
+++
@@ -1,6 +1,6 @@
__title__ = 'facturapdf'
__summary__ = 'Create PDF invoice according to Spanish regulations.'
-__version__ = '0.0.1'
+__version__ = '0.0.2'
__license__ = 'BSD 3-Clause License'
__uri__ = 'https://github.com/initios/factura-pdf'
|
4fe675af1cc8eb65f843e06962763dab8c920ce5
|
contrib/meson/GetLz4LibraryVersion.py
|
contrib/meson/GetLz4LibraryVersion.py
|
#!/usr/bin/env python3
# #############################################################################
# Copyright (c) 2018-present lzutao <taolzu(at)gmail.com>
# All rights reserved.
#
# This source code is licensed under both the BSD-style license (found in the
# LICENSE file in the root directory of this source tree) and the GPLv2 (found
# in the COPYING file in the root directory of this source tree).
# #############################################################################
import re
import sys
def usage():
print('usage: python3 GetLz4LibraryVersion.py <path/to/lz4.h>')
sys.exit(1)
def find_version(filepath):
version_file_data = None
with open(filepath) as fd:
version_file_data = fd.read()
patterns = r"""#\s*define\s+LZ4_VERSION_MAJOR\s+([0-9]+).*$
#\s*define\s+LZ4_VERSION_MINOR\s+([0-9]+).*$
#\s*define\s+LZ4_VERSION_RELEASE\s+([0-9]+).*$
"""
regex = re.compile(patterns, re.MULTILINE)
version_match = regex.search(version_file_data)
if version_match:
return version_match.groups()
raise Exception("Unable to find version string.")
def main():
if len(sys.argv) < 2:
usage()
filepath = sys.argv[1]
version_tup = find_version(filepath)
print('.'.join(version_tup))
if __name__ == '__main__':
main()
|
#!/usr/bin/env python3
# #############################################################################
# Copyright (c) 2018-present lzutao <taolzu(at)gmail.com>
# All rights reserved.
#
# This source code is licensed under both the BSD-style license (found in the
# LICENSE file in the root directory of this source tree) and the GPLv2 (found
# in the COPYING file in the root directory of this source tree).
# #############################################################################
import re
import sys
def find_version(filepath):
version_file_data = None
with open(filepath) as fd:
version_file_data = fd.read()
patterns = r"""#\s*define\s+LZ4_VERSION_MAJOR\s+([0-9]+).*$
#\s*define\s+LZ4_VERSION_MINOR\s+([0-9]+).*$
#\s*define\s+LZ4_VERSION_RELEASE\s+([0-9]+).*$
"""
regex = re.compile(patterns, re.MULTILINE)
version_match = regex.search(version_file_data)
if version_match:
return version_match.groups()
raise Exception("Unable to find version string.")
def main():
import argparse
parser = argparse.ArgumentParser(description='Print lz4 version from lib/lz4.h')
parser.add_argument('file', help='path to lib/lz4.h')
args = parser.parse_args()
filepath = args.file
version_tup = find_version(filepath)
print('.'.join(version_tup))
if __name__ == '__main__':
main()
|
Use argparse instead of manually parsing
|
Use argparse instead of manually parsing [skip ci]
|
Python
|
isc
|
unknownbrackets/maxcso,unknownbrackets/maxcso,unknownbrackets/maxcso,unknownbrackets/maxcso,unknownbrackets/maxcso,unknownbrackets/maxcso,unknownbrackets/maxcso
|
---
+++
@@ -9,11 +9,6 @@
# #############################################################################
import re
import sys
-
-
-def usage():
- print('usage: python3 GetLz4LibraryVersion.py <path/to/lz4.h>')
- sys.exit(1)
def find_version(filepath):
@@ -33,10 +28,11 @@
def main():
- if len(sys.argv) < 2:
- usage()
-
- filepath = sys.argv[1]
+ import argparse
+ parser = argparse.ArgumentParser(description='Print lz4 version from lib/lz4.h')
+ parser.add_argument('file', help='path to lib/lz4.h')
+ args = parser.parse_args()
+ filepath = args.file
version_tup = find_version(filepath)
print('.'.join(version_tup))
|
b03b62e7abe9a8db0cded78b80cb8d565a424a7e
|
apps/activity/models.py
|
apps/activity/models.py
|
from django.db import models
class Activity(models.Model):
entry = models.ForeignKey('feeds.Entry', blank=True, null=True,
unique=True)
published_on = models.DateTimeField(auto_now_add=True)
def __unicode__(self):
return u'%s:%s => %s' % (self.source_class, self.source_id, self.title)
def from_feed(self):
return self.entry != None
def broadcast(source):
return Activity.objects.create(
entry=source
)
|
from django.db import models
class Activity(models.Model):
entry = models.ForeignKey('feeds.Entry', blank=True, null=True,
unique=True)
published_on = models.DateTimeField(auto_now_add=True)
def __unicode__(self):
return u'%d: Entry: %s' % (self.pk, self.entry)
def broadcast(source):
return Activity.objects.create(
entry=source
)
|
Remove reference to old field and unused method
|
Remove reference to old field and unused method
|
Python
|
bsd-3-clause
|
mozilla/betafarm,mozilla/mozilla-ignite,mozilla/mozilla-ignite,mozilla/mozilla-ignite,mozilla/betafarm,mozilla/betafarm,mozilla/betafarm,mozilla/mozilla-ignite
|
---
+++
@@ -7,10 +7,7 @@
published_on = models.DateTimeField(auto_now_add=True)
def __unicode__(self):
- return u'%s:%s => %s' % (self.source_class, self.source_id, self.title)
-
- def from_feed(self):
- return self.entry != None
+ return u'%d: Entry: %s' % (self.pk, self.entry)
def broadcast(source):
|
3504bd0c867841c85f5ef54cdb5096ec5117cc1e
|
backend/scripts/projsize.py
|
backend/scripts/projsize.py
|
#!/usr/bin/env python
import rethinkdb as r
from optparse import OptionParser
def compute_project_size(project_id, conn):
total = 0
for f in r.table('project2datafile').get_all(project_id, index="project_id").eq_join('datafile_id', r.table(
'datafiles')).zip().run(conn):
total = total + f['size']
print "Total size %s" % sizeof_fmt(total)
def sizeof_fmt(num, suffix='B'):
for unit in ['', 'Ki', 'Mi', 'Gi', 'Ti', 'Pi', 'Ei', 'Zi']:
if abs(num) < 1024.0:
return "%3.1f%s%s" % (num, unit, suffix)
num /= 1024.0
return "%.1f%s%s" % (num, 'Yi', suffix)
if __name__ == "__main__":
parser = OptionParser()
parser.add_option("-P", "--port", dest="port", type="int", help="rethinkdb port", default=30815)
parser.add_option("-p", "--project-id", dest="project_id", type="string", help="project id")
(options, args) = parser.parse_args()
conn = r.connect('localhost', options.port, db="materialscommons")
compute_project_size(options.project_id, conn)
|
#!/usr/bin/env python
import rethinkdb as r
from optparse import OptionParser
def compute_project_size(project_id, conn):
total = 0
count = 0
for f in r.table('project2datafile').get_all(project_id, index="project_id").eq_join('datafile_id', r.table(
'datafiles')).zip().run(conn):
total = total + f['size']
count = count + 1
print "Total files : %s" % "{:,}".format(count)
print "Project size: %s" % sizeof_fmt(total)
def sizeof_fmt(num, suffix='B'):
for unit in ['', 'Ki', 'Mi', 'Gi', 'Ti', 'Pi', 'Ei', 'Zi']:
if abs(num) < 1024.0:
return "%3.1f%s%s" % (num, unit, suffix)
num /= 1024.0
return "%.1f%s%s" % (num, 'Yi', suffix)
if __name__ == "__main__":
parser = OptionParser()
parser.add_option("-P", "--port", dest="port", type="int", help="rethinkdb port", default=30815)
parser.add_option("-p", "--project-id", dest="project_id", type="string", help="project id")
(options, args) = parser.parse_args()
conn = r.connect('localhost', options.port, db="materialscommons")
compute_project_size(options.project_id, conn)
|
Improve report format and information
|
Improve report format and information
Include total number of files.
|
Python
|
mit
|
materials-commons/materialscommons.org,materials-commons/materialscommons.org,materials-commons/materialscommons.org,materials-commons/materialscommons.org,materials-commons/materialscommons.org
|
---
+++
@@ -6,10 +6,13 @@
def compute_project_size(project_id, conn):
total = 0
+ count = 0
for f in r.table('project2datafile').get_all(project_id, index="project_id").eq_join('datafile_id', r.table(
'datafiles')).zip().run(conn):
total = total + f['size']
- print "Total size %s" % sizeof_fmt(total)
+ count = count + 1
+ print "Total files : %s" % "{:,}".format(count)
+ print "Project size: %s" % sizeof_fmt(total)
def sizeof_fmt(num, suffix='B'):
|
06effd31b4f52f1f61114c03ba8b1d6ce51738ab
|
zou/app/utils/permissions.py
|
zou/app/utils/permissions.py
|
from flask_principal import RoleNeed, Permission
from werkzeug.exceptions import Forbidden
admin_permission = Permission(RoleNeed('admin'))
manager_permission = Permission(RoleNeed('manager'))
class PermissionDenied(Forbidden):
pass
def has_manager_permissions():
return admin_permission.can() or manager_permission.can()
def check_manager_permissions():
if has_manager_permissions():
return True
else:
raise PermissionDenied
def check_admin_permissions():
if admin_permission.can():
return True
else:
raise PermissionDenied
|
from flask_principal import RoleNeed, Permission
from werkzeug.exceptions import Forbidden
admin_permission = Permission(RoleNeed('admin'))
manager_permission = Permission(RoleNeed('manager'))
class PermissionDenied(Forbidden):
pass
def has_manager_permissions():
"""
Return True if user is admin or manager.
"""
return admin_permission.can() or manager_permission.can()
def check_manager_permissions():
"""
Return True if user is admin or manager. It raises a PermissionDenied
exception in case of failure.
"""
if has_manager_permissions():
return True
else:
raise PermissionDenied
def check_admin_permissions():
"""
Return True if user is admin. It raises a PermissionDenied exception in case
of failure.
"""
if admin_permission.can():
return True
else:
raise PermissionDenied
|
Add comments to permission utils
|
Add comments to permission utils
|
Python
|
agpl-3.0
|
cgwire/zou
|
---
+++
@@ -10,10 +10,17 @@
def has_manager_permissions():
+ """
+ Return True if user is admin or manager.
+ """
return admin_permission.can() or manager_permission.can()
def check_manager_permissions():
+ """
+ Return True if user is admin or manager. It raises a PermissionDenied
+ exception in case of failure.
+ """
if has_manager_permissions():
return True
else:
@@ -21,6 +28,10 @@
def check_admin_permissions():
+ """
+ Return True if user is admin. It raises a PermissionDenied exception in case
+ of failure.
+ """
if admin_permission.can():
return True
else:
|
af27c17d3f2f13d66d188900bbac02975eb446b4
|
lanauth/app.py
|
lanauth/app.py
|
from flask import Flask, render_template, jsonify
from lanauth.api import load_api
class App(Flask):
"""Web application.
Routes:
/
/login Route to the main page
"""
def configure_views(self):
"""Configures core views"""
@self.route('/')
@self.route('/login')
@self.route("/guest/s/<site>")
def login(site):
"""Route to login (index) page"""
return render_template('login.html')
def app_factory(app_name, config, blueprints=None):
"""Build the webappi.
:param str app_name: Name of the Flask application
:param config: Site configuration
:param list blueprints: List of blueprint tuples to load formatted as:
(blueprint class, "end point")
"""
app = App(app_name)
app.config.update(config)
app.configure_views()
if blueprints is not None:
for blueprint, prefix in blueprints:
app.register_blueprint(blueprint, url_prefix=prefix)
load_api(app)
return app
|
from flask import Flask, render_template, jsonify
from lanauth.api import load_api
class App(Flask):
"""Web application.
Routes:
/
/login Route to the main page
"""
def configure_views(self):
"""Configures core views"""
@self.route('/')
@self.route('/login')
@self.route("/guest/s/<site>/")
def login(site = ""):
"""Route to login (index) page"""
return render_template('login.html')
def app_factory(app_name, config, blueprints=None):
"""Build the webappi.
:param str app_name: Name of the Flask application
:param config: Site configuration
:param list blueprints: List of blueprint tuples to load formatted as:
(blueprint class, "end point")
"""
app = App(app_name)
app.config.update(config)
app.configure_views()
if blueprints is not None:
for blueprint, prefix in blueprints:
app.register_blueprint(blueprint, url_prefix=prefix)
load_api(app)
return app
|
Set default site string value
|
Set default site string value
|
Python
|
mit
|
LSUCS/pyLanAuth,LSUCS/pyLanAuth,LSUCS/pyLanAuth,LSUCS/pyLanAuth
|
---
+++
@@ -16,8 +16,8 @@
@self.route('/')
@self.route('/login')
- @self.route("/guest/s/<site>")
- def login(site):
+ @self.route("/guest/s/<site>/")
+ def login(site = ""):
"""Route to login (index) page"""
return render_template('login.html')
|
2f0f34efa83736b1f09db1971ca17ab6d0006516
|
api/v2/views/image_bookmark.py
|
api/v2/views/image_bookmark.py
|
from core.models import ApplicationBookmark as ImageBookmark
from django.utils import timezone
from django.db.models import Q
from api.v2.serializers.details import ImageBookmarkSerializer
from api.v2.views.base import AuthViewSet
from api.v2.views.mixins import MultipleFieldLookup
class ImageBookmarkViewSet(MultipleFieldLookup, AuthViewSet):
"""
API endpoint that allows instance actions to be viewed or edited.
"""
lookup_fields = ("id", "uuid")
queryset = ImageBookmark.objects.all()
serializer_class = ImageBookmarkSerializer
http_method_names = ['get', 'post', 'delete', 'head', 'options', 'trace']
def perform_create(self, serializer):
serializer.save(user=self.request.user)
def get_queryset(self):
"""
Filter projects by current user
"""
user = self.request.user
now_time = timezone.now()
return ImageBookmark.objects.filter(user=user).filter(
Q(application__end_date__isnull=True) & Q(application__end_date__gt=now_time)
)
|
from core.models import ApplicationBookmark as ImageBookmark
from django.utils import timezone
from django.db.models import Q
from api.v2.serializers.details import ImageBookmarkSerializer
from api.v2.views.base import AuthViewSet
from api.v2.views.mixins import MultipleFieldLookup
class ImageBookmarkViewSet(MultipleFieldLookup, AuthViewSet):
"""
API endpoint that allows instance actions to be viewed or edited.
"""
lookup_fields = ("id", "uuid")
queryset = ImageBookmark.objects.all()
serializer_class = ImageBookmarkSerializer
http_method_names = ['get', 'post', 'delete', 'head', 'options', 'trace']
def perform_create(self, serializer):
serializer.save(user=self.request.user)
def get_queryset(self):
"""
Filter projects by current user
"""
user = self.request.user
now_time = timezone.now()
return ImageBookmark.objects.filter(user=user).filter(
Q(application__end_date__isnull=True) | Q(application__end_date__gt=now_time)
)
|
Fix disappearing image bookmarks on refresh
|
Fix disappearing image bookmarks on refresh
The bookmark queryset was incorrect it had a filter that was returning an
empty list.
|
Python
|
apache-2.0
|
CCI-MOC/GUI-Backend,CCI-MOC/GUI-Backend,CCI-MOC/GUI-Backend,CCI-MOC/GUI-Backend
|
---
+++
@@ -28,5 +28,5 @@
user = self.request.user
now_time = timezone.now()
return ImageBookmark.objects.filter(user=user).filter(
- Q(application__end_date__isnull=True) & Q(application__end_date__gt=now_time)
+ Q(application__end_date__isnull=True) | Q(application__end_date__gt=now_time)
)
|
5f21b5f387e895a9af2ac8481bd495f2dacd6cdf
|
carbonate/util.py
|
carbonate/util.py
|
import fileinput
import os
import socket
import argparse
def local_addresses():
ips = socket.gethostbyname_ex(socket.gethostname())[2]
return set([ip for ip in ips if not ip.startswith("127.")][:1])
def common_parser(description='untitled'):
parser = argparse.ArgumentParser(
description=description,
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument(
'-c', '--config-file',
default='/opt/graphite/conf/carbonate.conf',
help='Config file to use')
parser.add_argument(
'-C', '--cluster',
default='main',
help='Cluster name')
return parser
def metrics_from_args(args):
arg = args.metrics_file
fi = arg if (arg and arg[0] != '-') else []
return map(lambda s: s.strip(), fileinput.input(fi))
def metric_to_fs(path, prepend=None):
filepath = path.replace('.', '/') + "." + "wsp"
if prepend:
filepath = os.path.join(prepend, filepath)
return filepath
def fs_to_metric(path, prepend=None):
if prepend:
path = path.replace(prepend, '')
return path.replace('.wsp', '').replace('/', '.').strip('.')
|
import fileinput
import os
import socket
import argparse
def local_addresses():
ips = socket.gethostbyname_ex(socket.gethostname())[2]
return set([ip for ip in ips if not ip.startswith("127.")][:1])
def common_parser(description='untitled'):
parser = argparse.ArgumentParser(
description=description,
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
config_file = os.environ.get('CARBONATE_CONFIG',
'/opt/graphite/conf/carbonate.conf')
cluster = os.environ.get('CARBONATE_CLUSTER', 'main')
parser.add_argument(
'-c', '--config-file',
default=config_file,
help='Config file to use')
parser.add_argument(
'-C', '--cluster',
default=cluster,
help='Cluster name')
return parser
def metrics_from_args(args):
arg = args.metrics_file
fi = arg if (arg and arg[0] != '-') else []
return map(lambda s: s.strip(), fileinput.input(fi))
def metric_to_fs(path, prepend=None):
filepath = path.replace('.', '/') + "." + "wsp"
if prepend:
filepath = os.path.join(prepend, filepath)
return filepath
def fs_to_metric(path, prepend=None):
if prepend:
path = path.replace(prepend, '')
return path.replace('.wsp', '').replace('/', '.').strip('.')
|
Read config-file and cluster options from environ
|
Read config-file and cluster options from environ
This would make it DRY in scripts/wrappers with non-default values.
|
Python
|
mit
|
jssjr/carbonate,criteo-forks/carbonate,ross/carbonate,criteo-forks/carbonate,graphite-project/carbonate,unbrice/carbonate,criteo-forks/carbonate,deniszh/carbonate,graphite-project/carbonate,jssjr/carbonate,deniszh/carbonate,unbrice/carbonate,deniszh/carbonate,jssjr/carbonate,ross/carbonate,graphite-project/carbonate,unbrice/carbonate,ross/carbonate
|
---
+++
@@ -14,14 +14,18 @@
description=description,
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
+ config_file = os.environ.get('CARBONATE_CONFIG',
+ '/opt/graphite/conf/carbonate.conf')
+ cluster = os.environ.get('CARBONATE_CLUSTER', 'main')
+
parser.add_argument(
'-c', '--config-file',
- default='/opt/graphite/conf/carbonate.conf',
+ default=config_file,
help='Config file to use')
parser.add_argument(
'-C', '--cluster',
- default='main',
+ default=cluster,
help='Cluster name')
return parser
|
4fb8fd95ce873deca56c66a3f7380b031864a879
|
lingcod/raster_stats/views.py
|
lingcod/raster_stats/views.py
|
# Create your views here.
from django.http import HttpResponse
from lingcod.raster_stats.models import zonal_stats, RasterDataset, ZonalStatsCache
from django.core import serializers
from django.contrib.gis.geos import fromstr
def stats_for_geom(request, raster_name):
# Confirm that we have a valid polygon geometry
if 'geom_txt' in request.REQUEST:
geom_txt = str(request.REQUEST['geom_txt'])
else:
return HttpResponse("Must supply a geom_txt parameter", status=404)
try:
geom = fromstr(geom_txt)
except:
return HttpResponse("Must supply a parsable geom_txt parameter (wkt or json)", status=404)
# Confirm raster with pk exists
try:
raster = RasterDataset.objects.get(name=raster_name)
except:
return HttpResponse("No raster with pk of %s" % pk, status=404)
#TODO check if continuous
zonal = zonal_stats(geom, raster)
zonal.save()
zqs = ZonalStatsCache.objects.filter(pk=zonal.pk)
data = serializers.serialize("json", zqs)
return HttpResponse(data, mimetype='application/javascript')
def raster_list(request):
rasts = RasterDataset.objects.all()
data = serializers.serialize("json", rasts)
return HttpResponse(data, mimetype='application/javascript')
|
# Create your views here.
from django.http import HttpResponse
from lingcod.raster_stats.models import zonal_stats, RasterDataset, ZonalStatsCache
from django.core import serializers
from django.contrib.gis.geos import fromstr
def stats_for_geom(request, raster_name):
# Confirm that we have a valid polygon geometry
if 'geom_txt' in request.REQUEST:
geom_txt = str(request.REQUEST['geom_txt'])
else:
return HttpResponse("Must supply a geom_txt parameter", status=404)
try:
geom = fromstr(geom_txt)
except:
return HttpResponse("Must supply a parsable geom_txt parameter (wkt or json)", status=404)
# Confirm raster with pk exists
try:
raster = RasterDataset.objects.get(name=raster_name)
except:
return HttpResponse("No raster with pk of %s" % pk, status=404)
#TODO check if continuous
zonal = zonal_stats(geom, raster)
zonal.save()
zqs = ZonalStatsCache.objects.filter(pk=zonal.pk)
data = serializers.serialize("json", zqs, fields=('avg','min','max','median','mode','stdev','nulls','pixels','date_modified','raster'))
return HttpResponse(data, mimetype='application/json')
def raster_list(request):
rasts = RasterDataset.objects.all()
data = serializers.serialize("json", rasts, fields=('name','type'))
return HttpResponse(data, mimetype='application/json')
|
Exclude certain fields from json serialization in raster_stats web service
|
Exclude certain fields from json serialization in raster_stats web service
|
Python
|
bsd-3-clause
|
Ecotrust/madrona_addons,Ecotrust/madrona_addons
|
---
+++
@@ -26,12 +26,10 @@
zonal = zonal_stats(geom, raster)
zonal.save()
zqs = ZonalStatsCache.objects.filter(pk=zonal.pk)
- data = serializers.serialize("json", zqs)
-
- return HttpResponse(data, mimetype='application/javascript')
+ data = serializers.serialize("json", zqs, fields=('avg','min','max','median','mode','stdev','nulls','pixels','date_modified','raster'))
+ return HttpResponse(data, mimetype='application/json')
def raster_list(request):
rasts = RasterDataset.objects.all()
- data = serializers.serialize("json", rasts)
- return HttpResponse(data, mimetype='application/javascript')
-
+ data = serializers.serialize("json", rasts, fields=('name','type'))
+ return HttpResponse(data, mimetype='application/json')
|
355b1a91edf2dfcff66c2a02e034977f65d0690c
|
influxdb/dataframe_client.py
|
influxdb/dataframe_client.py
|
# -*- coding: utf-8 -*-
"""
DataFrame client for InfluxDB
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
__all__ = ['DataFrameClient']
try:
import pandas
del pandas
except ImportError as err:
from .client import InfluxDBClient
class DataFrameClient(InfluxDBClient):
def __init__(self, *a, **kw):
raise ImportError("DataFrameClient requires Pandas "
"which couldn't be imported: %s" % err)
else:
from ._dataframe_client import DataFrameClient
|
# -*- coding: utf-8 -*-
"""
DataFrame client for InfluxDB
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
__all__ = ['DataFrameClient']
try:
import pandas
del pandas
except ImportError as err:
from .client import InfluxDBClient
class DataFrameClient(InfluxDBClient):
err = err
def __init__(self, *a, **kw):
raise ImportError("DataFrameClient requires Pandas "
"which couldn't be imported: %s" % self.err)
else:
from ._dataframe_client import DataFrameClient
|
Fix DataFrameClient import error on python3.5
|
Fix DataFrameClient import error on python3.5
|
Python
|
mit
|
BenHewins/influxdb-python,omki2005/influxdb-python,omki2005/influxdb-python,Asimmetric/influxdb-python,tzonghao/influxdb-python,influxdata/influxdb-python,Asimmetric/influxdb-python,tzonghao/influxdb-python,BenHewins/influxdb-python,influxdb/influxdb-python,influxdata/influxdb-python,influxdb/influxdb-python
|
---
+++
@@ -16,8 +16,10 @@
from .client import InfluxDBClient
class DataFrameClient(InfluxDBClient):
+ err = err
+
def __init__(self, *a, **kw):
raise ImportError("DataFrameClient requires Pandas "
- "which couldn't be imported: %s" % err)
+ "which couldn't be imported: %s" % self.err)
else:
from ._dataframe_client import DataFrameClient
|
853bf035fcb9ea21e648cb0b1d1b13ee68f8e9cc
|
importer/tests/test_utils.py
|
importer/tests/test_utils.py
|
from unittest import TestCase
from importer.utils import find_first
class FindFirstTestCase(TestCase):
def test_first_in_haystack(self):
self.assertEqual(
find_first(
['one', 'two', 'three'],
['one', 'four']
),
'one',
)
def test_second_in_haystack(self):
self.assertEqual(
find_first(
['one', 'two', 'three'],
['two', 'four']
),
'two',
)
def test_none_present(self):
self.assertIsNone(
find_first(
['one', 'two', 'three'],
['four']
)
)
|
from unittest import TestCase
from importer.utils import (
maybe,
find_first,
)
class MaybeTestCase(TestCase):
def setUp(self):
self.add_ten = maybe(lambda x: x + 10)
def test_with_none(self):
self.assertIsNone(self.add_ten(None))
def test_with_different_value(self):
self.assertEqual(self.add_ten(20), 30)
class FindFirstTestCase(TestCase):
def test_first_in_haystack(self):
self.assertEqual(
find_first(
['one', 'two', 'three'],
['one', 'four']
),
'one',
)
def test_second_in_haystack(self):
self.assertEqual(
find_first(
['one', 'two', 'three'],
['two', 'four']
),
'two',
)
def test_none_present(self):
self.assertIsNone(
find_first(
['one', 'two', 'three'],
['four']
)
)
|
Add tests for maybe decorator
|
Add tests for maybe decorator
|
Python
|
mit
|
despawnerer/theatrics,despawnerer/theatrics,despawnerer/theatrics
|
---
+++
@@ -1,6 +1,20 @@
from unittest import TestCase
-from importer.utils import find_first
+from importer.utils import (
+ maybe,
+ find_first,
+)
+
+
+class MaybeTestCase(TestCase):
+ def setUp(self):
+ self.add_ten = maybe(lambda x: x + 10)
+
+ def test_with_none(self):
+ self.assertIsNone(self.add_ten(None))
+
+ def test_with_different_value(self):
+ self.assertEqual(self.add_ten(20), 30)
class FindFirstTestCase(TestCase):
|
7e4227b304da4313a3114a95d361fc5bb6bf5529
|
runtests.py
|
runtests.py
|
from django.conf import settings
try:
import honeypot
except ImportError:
honeypot = None
if not settings.configured:
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django_nose',
'envelope',
)
if honeypot:
INSTALLED_APPS += ('honeypot',)
settings.configure(
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
}
},
INSTALLED_APPS = INSTALLED_APPS,
SITE_ID = 1,
ROOT_URLCONF = 'envelope.tests.urls',
HONEYPOT_FIELD_NAME = 'email2',
)
import django
try:
django.setup() # Django 1.7+
except AttributeError:
pass
from django_nose import NoseTestSuiteRunner
test_runner = NoseTestSuiteRunner()
test_runner.run_tests(['envelope'])
|
from django.conf import settings
try:
import honeypot
except ImportError:
honeypot = None
if not settings.configured:
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django_nose',
'envelope',
)
if honeypot:
INSTALLED_APPS += ('honeypot',)
settings.configure(
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
}
},
INSTALLED_APPS = INSTALLED_APPS,
SITE_ID = 1,
ROOT_URLCONF = 'envelope.tests.urls',
HONEYPOT_FIELD_NAME = 'email2',
PASSWORD_HASHERS = {
'django.contrib.auth.hashers.MD5PasswordHasher',
},
)
import django
try:
django.setup() # Django 1.7+
except AttributeError:
pass
from django_nose import NoseTestSuiteRunner
test_runner = NoseTestSuiteRunner()
test_runner.run_tests(['envelope'])
|
Test speedup by swapping password hasher.
|
Test speedup by swapping password hasher.
|
Python
|
mit
|
r4ts0n/django-envelope,zsiciarz/django-envelope,r4ts0n/django-envelope,zsiciarz/django-envelope
|
---
+++
@@ -29,6 +29,9 @@
SITE_ID = 1,
ROOT_URLCONF = 'envelope.tests.urls',
HONEYPOT_FIELD_NAME = 'email2',
+ PASSWORD_HASHERS = {
+ 'django.contrib.auth.hashers.MD5PasswordHasher',
+ },
)
|
2d4b6b1fa84f7530d064bfc5577d61430b4e5a34
|
run_multip.py
|
run_multip.py
|
# -*- coding:utf-8 -*-
from subprocess import check_output
from multiprocessing import Pool
from os import listdir, mkdir, path
from sys import argv
def run_simu((psfile, dir_runs)):
"""Runs the main script with specified parameter file."""
ndir = psfile[:-3]
mkdir(ndir)
simu_output = check_output(["python2.7", "multiglom_network.py", psfile,
"--full-ps"])
with open(path.join(ndir, "output.txt"), 'w') as f:
f.write(simu_output)
if __name__ == '__main__':
n_processes = int(argv[1])
dir_runs = argv[2]
psfiles = listdir(dir_runs)
psfile_filter = lambda fname: fname[-3:] == '.py' and fname != '__init__.py'
psfiles = filter(psfile_filter, psfiles)
psfiles = [(path.join(dir_runs, fname), dir_runs) for fname in psfiles]
pool = Pool(processes=n_processes)
pool.map(run_simu, psfiles)
|
# -*- coding:utf-8 -*-
"""
Script for running multiple simulation with different parameter set.
"""
from subprocess import check_output
from multiprocessing import Pool
from os import listdir, mkdir, path
from sys import argv
def run_simu((psfile, dir_runs)):
"""Runs the main script with specified parameter file."""
ndir = psfile[:-3]
mkdir(ndir)
simu_output = check_output(["python2.7", "multiglom_network.py", psfile,
"--full-ps"])
with open(path.join(ndir, "output.txt"), 'w') as f:
f.write(simu_output)
if __name__ == '__main__':
n_processes = int(argv[1])
dir_runs = argv[2]
psfiles = listdir(dir_runs)
psfile_filter = lambda fname: fname[-3:] == '.py' and fname != '__init__.py'
psfiles = filter(psfile_filter, psfiles)
psfiles = [(path.join(dir_runs, fname), dir_runs) for fname in psfiles]
pool = Pool(processes=n_processes)
pool.map(run_simu, psfiles)
|
Clean main multi simulation script
|
Clean main multi simulation script
|
Python
|
mit
|
neuro-lyon/multiglom-model,neuro-lyon/multiglom-model
|
---
+++
@@ -1,9 +1,13 @@
# -*- coding:utf-8 -*-
+"""
+Script for running multiple simulation with different parameter set.
+"""
from subprocess import check_output
from multiprocessing import Pool
from os import listdir, mkdir, path
from sys import argv
+
def run_simu((psfile, dir_runs)):
"""Runs the main script with specified parameter file."""
@@ -13,6 +17,7 @@
"--full-ps"])
with open(path.join(ndir, "output.txt"), 'w') as f:
f.write(simu_output)
+
if __name__ == '__main__':
n_processes = int(argv[1])
|
adb6c4c86407d6c66e8679aefdf2f2f0c9c87277
|
pltpreview/view.py
|
pltpreview/view.py
|
"""Convenience functions for matplotlib plotting and image viewing."""
import numpy as np
from matplotlib import pyplot as plt
def show(image, blocking=False, **kwargs):
"""Show *image*. If *blocking* is False the call is nonblocking.
*kwargs* are passed to matplotlib's ``imshow`` function. This command
always creates a new figure. Returns matplotlib's ``AxesImage``.
"""
plt.figure()
mpl_image = plt.imshow(image, **kwargs)
plt.colorbar(ticks=np.linspace(image.min(), image.max(), 8))
plt.show(blocking)
return mpl_image
def plot(*args, **kwargs):
"""Plot using matplotlib's ``plot`` function. Pass it *args* and *kwargs*.
*kwargs* are infected with *blocking* and if False or not specified,
the call is nonblocking. This command always creates a new figure.
"""
blocking = False if 'blocking' not in kwargs else kwargs.pop('blocking')
plt.figure()
plt.plot(*args, **kwargs)
plt.show(blocking)
|
"""Convenience functions for matplotlib plotting and image viewing."""
import numpy as np
from matplotlib import pyplot as plt
def show(image, blocking=False, **kwargs):
"""Show *image*. If *blocking* is False the call is nonblocking.
*kwargs* are passed to matplotlib's ``imshow`` function. This command
always creates a new figure. Returns matplotlib's ``AxesImage``.
"""
plt.figure()
mpl_image = plt.imshow(image, **kwargs)
plt.colorbar(ticks=np.linspace(image.min(), image.max(), 8))
plt.show(blocking)
return mpl_image
def plot(*args, **kwargs):
"""Plot using matplotlib's ``plot`` function. Pass it *args* and *kwargs*.
*kwargs* are infected with *blocking* and if False or not specified,
the call is nonblocking. This command always creates a new figure. Returns
a list of ``Line2D`` instances.
"""
blocking = False if 'blocking' not in kwargs else kwargs.pop('blocking')
plt.figure()
lines = plt.plot(*args, **kwargs)
plt.show(blocking)
return lines
|
Return lines from plot command
|
Return lines from plot command
|
Python
|
mit
|
tfarago/pltpreview
|
---
+++
@@ -19,9 +19,12 @@
def plot(*args, **kwargs):
"""Plot using matplotlib's ``plot`` function. Pass it *args* and *kwargs*.
*kwargs* are infected with *blocking* and if False or not specified,
- the call is nonblocking. This command always creates a new figure.
+ the call is nonblocking. This command always creates a new figure. Returns
+ a list of ``Line2D`` instances.
"""
blocking = False if 'blocking' not in kwargs else kwargs.pop('blocking')
plt.figure()
- plt.plot(*args, **kwargs)
+ lines = plt.plot(*args, **kwargs)
plt.show(blocking)
+
+ return lines
|
856e769c564673a9cf0fefb7ca4354cf75eb9155
|
access/tests/factories.py
|
access/tests/factories.py
|
import factory
from access.models import User
class UserFactory(factory.django.DjangoModelFactory):
class Meta:
model = User
email = factory.Sequence(lambda n: 'user{}@example.com'.format(n))
|
import factory
from ..models import User
class UserFactory(factory.django.DjangoModelFactory):
class Meta:
model = User
email = factory.Sequence(lambda n: 'user{}@example.com'.format(n))
|
Use relative import for User
|
Use relative import for User
|
Python
|
agpl-3.0
|
node13h/droll,node13h/droll
|
---
+++
@@ -1,6 +1,6 @@
import factory
-from access.models import User
+from ..models import User
class UserFactory(factory.django.DjangoModelFactory):
|
c61cace4106a8cf9a27099acbfae26ba2a727d65
|
install_steps/run_errands.py
|
install_steps/run_errands.py
|
import bosh_client
import os
import yaml
def do_step(context):
settings = context.meta['settings']
username = settings["username"]
home_dir = os.path.join("/home", username)
f = open('manifests/index.yml')
manifests = yaml.safe_load(f)
f.close()
client = bosh_client.BoshClient("https://10.0.0.4:25555", "admin", "admin")
for m in manifests['manifests']:
print "Running errands for {0}/manifests/{1}...".format(home_dir, m['file'])
for errand in m['errands']:
print "Running errand {0}".format(errand)
task_id = client.run_errand(m['deployment-name'], errand)
client.wait_for_task(task_id)
result = client.get_task_result(task_id)
print "Errand finished with exit code {0}".format(result['exit_code'])
print "=========== STDOUT ==========="
print result['stdout']
print "=========== STDERR ==========="
print result['stderr']
return context
|
import bosh_client
import os
import yaml
def do_step(context):
settings = context.meta['settings']
username = settings["username"]
home_dir = os.path.join("/home", username)
f = open('manifests/index.yml')
manifests = yaml.safe_load(f)
f.close()
client = bosh_client.BoshClient("https://10.0.0.4:25555", "admin", "admin")
for m in manifests['manifests']:
print "Running errands for {0}/manifests/{1}...".format(home_dir, m['file'])
for errand in m['errands']:
print "Running errand {0}".format(errand)
task_id = client.run_errand(m['deployment-name'], errand)
client.wait_for_task(task_id)
result = client.get_task_result(task_id)
print "Errand finished with exit code {0}".format(result['exit_code'])
print "=========== STDOUT ==========="
print result['stdout'].encode('utf8')
print "=========== STDERR ==========="
print result['stderr'].encode('utf8')
return context
|
Convert task output to UTF8
|
Convert task output to UTF8
|
Python
|
apache-2.0
|
cf-platform-eng/bosh-azure-template,cf-platform-eng/bosh-azure-template
|
---
+++
@@ -27,9 +27,9 @@
print "Errand finished with exit code {0}".format(result['exit_code'])
print "=========== STDOUT ==========="
- print result['stdout']
+ print result['stdout'].encode('utf8')
print "=========== STDERR ==========="
- print result['stderr']
+ print result['stderr'].encode('utf8')
return context
|
9a7de89fd4bc6d134f30bdee4c8a71b34e1e6ab9
|
stoq/__init__.py
|
stoq/__init__.py
|
#!/usr/bin/env python3
# Copyright 2014-2018 PUNCH Cyber Analytics Group
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from .core import Stoq
from .data_classes import (
ArchiverResponse,
ExtractedPayload,
Payload,
PayloadMeta,
PayloadResults,
RequestMeta,
StoqResponse,
WorkerResponse,
DispatcherResponse,
DeepDispatcherResponse,
DecoratorResponse,
)
from .exceptions import StoqException
__version__ = '2.0.0'
|
#!/usr/bin/env python3
# Copyright 2014-2018 PUNCH Cyber Analytics Group
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from .core import Stoq
from .data_classes import (
ArchiverResponse,
ExtractedPayload,
Payload,
PayloadMeta,
PayloadResults,
RequestMeta,
StoqResponse,
WorkerResponse,
DispatcherResponse,
DeepDispatcherResponse,
DecoratorResponse,
)
from .exceptions import StoqException, StoqPluginException
__version__ = '2.0.0'
|
Add StoqPluginException to default imports
|
Add StoqPluginException to default imports
|
Python
|
apache-2.0
|
PUNCH-Cyber/stoq
|
---
+++
@@ -28,6 +28,6 @@
DeepDispatcherResponse,
DecoratorResponse,
)
-from .exceptions import StoqException
+from .exceptions import StoqException, StoqPluginException
__version__ = '2.0.0'
|
ea02f8e714df34da0ff55a8c9750eb0d992875c2
|
feincms3/apps.py
|
feincms3/apps.py
|
# pragma: no cover
import warnings
import django
if django.VERSION < (3, 2):
from feincms3.applications import * # noqa
warnings.warn(
"Django 3.2 will start autodiscovering app configs inside '.apps' modules."
" We cannot continue using feincms3.apps because the AppsMixin inside this"
" module can only be loaded after Django initializes all apps."
" Please change feincms3.apps to feincms3.applications in your code."
" This compatibility shim will be removed at some point in the future."
" Sorry for the inconvenience.",
DeprecationWarning,
stacklevel=2,
)
|
import warnings
import django
if django.VERSION < (3, 2):
from feincms3.applications import * # noqa
warnings.warn(
"Django 3.2 will start autodiscovering app configs inside '.apps' modules."
" We cannot continue using feincms3.apps because the AppsMixin inside this"
" module can only be loaded after Django initializes all apps."
" Please change feincms3.apps to feincms3.applications in your code."
" This compatibility shim will be removed at some point in the future."
" Sorry for the inconvenience.",
DeprecationWarning,
stacklevel=2,
)
|
Remove a no cover pragma having no effect (since the pragma only affects the current line)
|
Remove a no cover pragma having no effect (since the pragma only affects the current line)
|
Python
|
bsd-3-clause
|
matthiask/feincms3,matthiask/feincms3,matthiask/feincms3
|
---
+++
@@ -1,5 +1,3 @@
-# pragma: no cover
-
import warnings
import django
|
bea83c533f65eeedae983b70fd41350e57df6908
|
cms/djangoapps/contentstore/features/video-editor.py
|
cms/djangoapps/contentstore/features/video-editor.py
|
# disable missing docstring
#pylint: disable=C0111
from lettuce import world, step
@step('I see the correct settings and default values$')
def i_see_the_correct_settings_and_values(step):
world.verify_all_setting_entries([['.75x', '', False],
['1.25x', '', False],
['1.5x', '', False],
['Display Name', 'default', True],
['Normal Speed', '', False],
['Show Captions', 'True', False],
['Source', '', False],
['Track', '', False]])
|
# disable missing docstring
#pylint: disable=C0111
from lettuce import world, step
@step('I see the correct settings and default values$')
def i_see_the_correct_settings_and_values(step):
world.verify_all_setting_entries([['Default Speed', '', False],
['Display Name', 'default', True],
['Download Track', '', False],
['Download Video', '', False],
['Show Captions', 'True', False],
['Speed: .75x', '', False],
['Speed: 1.25x', '', False],
['Speed: 1.5x', '', False]])
|
Update failing metadata settings acceptance test.
|
Update failing metadata settings acceptance test.
|
Python
|
agpl-3.0
|
bitifirefly/edx-platform,jonathan-beard/edx-platform,arbrandes/edx-platform,xuxiao19910803/edx-platform,zhenzhai/edx-platform,jazztpt/edx-platform,doismellburning/edx-platform,apigee/edx-platform,jbassen/edx-platform,inares/edx-platform,stvstnfrd/edx-platform,Edraak/edx-platform,a-parhom/edx-platform,etzhou/edx-platform,nanolearningllc/edx-platform-cypress-2,kxliugang/edx-platform,jelugbo/tundex,synergeticsedx/deployment-wipro,dcosentino/edx-platform,chudaol/edx-platform,prarthitm/edxplatform,defance/edx-platform,Stanford-Online/edx-platform,gsehub/edx-platform,appsembler/edx-platform,torchingloom/edx-platform,zofuthan/edx-platform,hamzehd/edx-platform,pepeportela/edx-platform,philanthropy-u/edx-platform,rhndg/openedx,ZLLab-Mooc/edx-platform,EduPepperPDTesting/pepper2013-testing,pelikanchik/edx-platform,BehavioralInsightsTeam/edx-platform,xinjiguaike/edx-platform,bitifirefly/edx-platform,sameetb-cuelogic/edx-platform-test,hamzehd/edx-platform,motion2015/edx-platform,wwj718/edx-platform,B-MOOC/edx-platform,wwj718/edx-platform,DNFcode/edx-platform,shabab12/edx-platform,jbzdak/edx-platform,jzoldak/edx-platform,hkawasaki/kawasaki-aio8-0,kalebhartje/schoolboost,Endika/edx-platform,ahmadiga/min_edx,jzoldak/edx-platform,motion2015/edx-platform,cyanna/edx-platform,procangroup/edx-platform,Unow/edx-platform,jelugbo/tundex,jruiperezv/ANALYSE,beacloudgenius/edx-platform,DefyVentures/edx-platform,franosincic/edx-platform,nanolearning/edx-platform,ahmadiga/min_edx,jswope00/GAI,shubhdev/edx-platform,SravanthiSinha/edx-platform,pabloborrego93/edx-platform,zadgroup/edx-platform,beni55/edx-platform,rismalrv/edx-platform,longmen21/edx-platform,EduPepperPD/pepper2013,morpheby/levelup-by,ahmedaljazzar/edx-platform,polimediaupv/edx-platform,abdoosh00/edraak,EDUlib/edx-platform,mjirayu/sit_academy,EduPepperPD/pepper2013,ovnicraft/edx-platform,tiagochiavericosta/edx-platform,yokose-ks/edx-platform,fly19890211/edx-platform,antonve/s4-project-mooc,lduarte1991/edx-platform,motion2015/edx-platform,CourseTalk/edx-platform,wwj718/ANALYSE,angelapper/edx-platform,shabab12/edx-platform,waheedahmed/edx-platform,solashirai/edx-platform,kamalx/edx-platform,iivic/BoiseStateX,shashank971/edx-platform,shashank971/edx-platform,hkawasaki/kawasaki-aio8-1,nikolas/edx-platform,MSOpenTech/edx-platform,rismalrv/edx-platform,a-parhom/edx-platform,beacloudgenius/edx-platform,jamesblunt/edx-platform,devs1991/test_edx_docmode,mushtaqak/edx-platform,chrisndodge/edx-platform,jamesblunt/edx-platform,olexiim/edx-platform,raccoongang/edx-platform,martynovp/edx-platform,peterm-itr/edx-platform,stvstnfrd/edx-platform,sudheerchintala/LearnEraPlatForm,Semi-global/edx-platform,zubair-arbi/edx-platform,nanolearningllc/edx-platform-cypress,ovnicraft/edx-platform,ferabra/edx-platform,ak2703/edx-platform,etzhou/edx-platform,dsajkl/reqiop,OmarIthawi/edx-platform,edry/edx-platform,jolyonb/edx-platform,kalebhartje/schoolboost,RPI-OPENEDX/edx-platform,eemirtekin/edx-platform,Kalyzee/edx-platform,nttks/jenkins-test,appliedx/edx-platform,zubair-arbi/edx-platform,mahendra-r/edx-platform,xinjiguaike/edx-platform,Kalyzee/edx-platform,ahmedaljazzar/edx-platform,tanmaykm/edx-platform,alexthered/kienhoc-platform,ZLLab-Mooc/edx-platform,Kalyzee/edx-platform,syjeon/new_edx,jelugbo/tundex,martynovp/edx-platform,EduPepperPD/pepper2013,kmoocdev/edx-platform,tiagochiavericosta/edx-platform,halvertoluke/edx-platform,cpennington/edx-platform,shubhdev/edxOnBaadal,10clouds/edx-platform,RPI-OPENEDX/edx-platform,Edraak/edraak-platform,zhenzhai/edx-platform,angelapper/edx-platform,teltek/edx-platform,leansoft/edx-platform,jbassen/edx-platform,caesar2164/edx-platform,tiagochiavericosta/edx-platform,morenopc/edx-platform,chauhanhardik/populo,cselis86/edx-platform,zerobatu/edx-platform,vikas1885/test1,utecuy/edx-platform,amir-qayyum-khan/edx-platform,sudheerchintala/LearnEraPlatForm,andyzsf/edx,LearnEra/LearnEraPlaftform,simbs/edx-platform,jamiefolsom/edx-platform,jjmiranda/edx-platform,eemirtekin/edx-platform,kmoocdev2/edx-platform,hkawasaki/kawasaki-aio8-0,TeachAtTUM/edx-platform,zofuthan/edx-platform,rationalAgent/edx-platform-custom,olexiim/edx-platform,Shrhawk/edx-platform,fly19890211/edx-platform,shubhdev/openedx,EduPepperPDTesting/pepper2013-testing,jswope00/griffinx,cecep-edu/edx-platform,benpatterson/edx-platform,peterm-itr/edx-platform,bitifirefly/edx-platform,ubc/edx-platform,IndonesiaX/edx-platform,jazztpt/edx-platform,xinjiguaike/edx-platform,unicri/edx-platform,fly19890211/edx-platform,ampax/edx-platform,gymnasium/edx-platform,knehez/edx-platform,franosincic/edx-platform,bitifirefly/edx-platform,UXE/local-edx,knehez/edx-platform,ubc/edx-platform,RPI-OPENEDX/edx-platform,itsjeyd/edx-platform,motion2015/a3,eduNEXT/edunext-platform,kamalx/edx-platform,dcosentino/edx-platform,hkawasaki/kawasaki-aio8-2,olexiim/edx-platform,pku9104038/edx-platform,appliedx/edx-platform,UOMx/edx-platform,kmoocdev2/edx-platform,vasyarv/edx-platform,vasyarv/edx-platform,TsinghuaX/edx-platform,jazztpt/edx-platform,UOMx/edx-platform,openfun/edx-platform,dkarakats/edx-platform,antonve/s4-project-mooc,jonathan-beard/edx-platform,jonathan-beard/edx-platform,torchingloom/edx-platform,playm2mboy/edx-platform,eestay/edx-platform,xingyepei/edx-platform,zerobatu/edx-platform,Ayub-Khan/edx-platform,atsolakid/edx-platform,jazkarta/edx-platform-for-isc,jbassen/edx-platform,chauhanhardik/populo_2,pabloborrego93/edx-platform,amir-qayyum-khan/edx-platform,naresh21/synergetics-edx-platform,cselis86/edx-platform,edry/edx-platform,utecuy/edx-platform,EduPepperPDTesting/pepper2013-testing,jamesblunt/edx-platform,TeachAtTUM/edx-platform,adoosii/edx-platform,longmen21/edx-platform,eemirtekin/edx-platform,ahmadio/edx-platform,TeachAtTUM/edx-platform,olexiim/edx-platform,mitocw/edx-platform,shubhdev/openedx,fintech-circle/edx-platform,dcosentino/edx-platform,OmarIthawi/edx-platform,dcosentino/edx-platform,vasyarv/edx-platform,zerobatu/edx-platform,abdoosh00/edx-rtl-final,vismartltd/edx-platform,kalebhartje/schoolboost,kamalx/edx-platform,beacloudgenius/edx-platform,chauhanhardik/populo_2,J861449197/edx-platform,hkawasaki/kawasaki-aio8-1,simbs/edx-platform,Unow/edx-platform,rhndg/openedx,WatanabeYasumasa/edx-platform,chauhanhardik/populo,Edraak/circleci-edx-platform,ahmadio/edx-platform,Lektorium-LLC/edx-platform,praveen-pal/edx-platform,jamiefolsom/edx-platform,antonve/s4-project-mooc,DNFcode/edx-platform,arifsetiawan/edx-platform,cognitiveclass/edx-platform,naresh21/synergetics-edx-platform,zadgroup/edx-platform,dkarakats/edx-platform,carsongee/edx-platform,doganov/edx-platform,defance/edx-platform,devs1991/test_edx_docmode,etzhou/edx-platform,shubhdev/edx-platform,solashirai/edx-platform,romain-li/edx-platform,IONISx/edx-platform,xinjiguaike/edx-platform,morenopc/edx-platform,eemirtekin/edx-platform,mjirayu/sit_academy,chudaol/edx-platform,hkawasaki/kawasaki-aio8-0,shubhdev/openedx,mcgachey/edx-platform,vismartltd/edx-platform,JCBarahona/edX,rue89-tech/edx-platform,morenopc/edx-platform,PepperPD/edx-pepper-platform,rationalAgent/edx-platform-custom,kalebhartje/schoolboost,jolyonb/edx-platform,deepsrijit1105/edx-platform,CourseTalk/edx-platform,IITBinterns13/edx-platform-dev,martynovp/edx-platform,shubhdev/edxOnBaadal,TsinghuaX/edx-platform,jruiperezv/ANALYSE,Lektorium-LLC/edx-platform,adoosii/edx-platform,atsolakid/edx-platform,prarthitm/edxplatform,itsjeyd/edx-platform,IndonesiaX/edx-platform,analyseuc3m/ANALYSE-v1,xuxiao19910803/edx,utecuy/edx-platform,vasyarv/edx-platform,utecuy/edx-platform,leansoft/edx-platform,rismalrv/edx-platform,torchingloom/edx-platform,halvertoluke/edx-platform,cpennington/edx-platform,adoosii/edx-platform,abdoosh00/edraak,Shrhawk/edx-platform,bdero/edx-platform,eduNEXT/edunext-platform,inares/edx-platform,vasyarv/edx-platform,Kalyzee/edx-platform,chand3040/cloud_that,J861449197/edx-platform,auferack08/edx-platform,rue89-tech/edx-platform,zofuthan/edx-platform,hkawasaki/kawasaki-aio8-0,mjg2203/edx-platform-seas,hkawasaki/kawasaki-aio8-2,AkA84/edx-platform,mjg2203/edx-platform-seas,jruiperezv/ANALYSE,mtlchun/edx,rue89-tech/edx-platform,BehavioralInsightsTeam/edx-platform,MSOpenTech/edx-platform,pabloborrego93/edx-platform,arbrandes/edx-platform,syjeon/new_edx,marcore/edx-platform,zubair-arbi/edx-platform,eduNEXT/edx-platform,LearnEra/LearnEraPlaftform,valtech-mooc/edx-platform,pelikanchik/edx-platform,IONISx/edx-platform,devs1991/test_edx_docmode,unicri/edx-platform,Lektorium-LLC/edx-platform,alexthered/kienhoc-platform,vikas1885/test1,cecep-edu/edx-platform,romain-li/edx-platform,deepsrijit1105/edx-platform,don-github/edx-platform,pdehaye/theming-edx-platform,pepeportela/edx-platform,hkawasaki/kawasaki-aio8-1,vismartltd/edx-platform,abdoosh00/edraak,cpennington/edx-platform,Edraak/circleci-edx-platform,hastexo/edx-platform,IndonesiaX/edx-platform,ovnicraft/edx-platform,appliedx/edx-platform,dcosentino/edx-platform,nagyistoce/edx-platform,chauhanhardik/populo_2,rhndg/openedx,arbrandes/edx-platform,pomegranited/edx-platform,waheedahmed/edx-platform,SivilTaram/edx-platform,romain-li/edx-platform,UXE/local-edx,alu042/edx-platform,msegado/edx-platform,jruiperezv/ANALYSE,shabab12/edx-platform,philanthropy-u/edx-platform,beni55/edx-platform,DefyVentures/edx-platform,UOMx/edx-platform,synergeticsedx/deployment-wipro,ampax/edx-platform,nagyistoce/edx-platform,franosincic/edx-platform,SravanthiSinha/edx-platform,fly19890211/edx-platform,antonve/s4-project-mooc,10clouds/edx-platform,tanmaykm/edx-platform,edx/edx-platform,MakeHer/edx-platform,devs1991/test_edx_docmode,bigdatauniversity/edx-platform,a-parhom/edx-platform,fintech-circle/edx-platform,pepeportela/edx-platform,TsinghuaX/edx-platform,dsajkl/reqiop,jazztpt/edx-platform,cpennington/edx-platform,jzoldak/edx-platform,rhndg/openedx,eduNEXT/edunext-platform,doganov/edx-platform,eduNEXT/edx-platform,arifsetiawan/edx-platform,MakeHer/edx-platform,rationalAgent/edx-platform-custom,devs1991/test_edx_docmode,ovnicraft/edx-platform,edx-solutions/edx-platform,shubhdev/edx-platform,halvertoluke/edx-platform,MakeHer/edx-platform,mjirayu/sit_academy,defance/edx-platform,zhenzhai/edx-platform,kamalx/edx-platform,alexthered/kienhoc-platform,ESOedX/edx-platform,knehez/edx-platform,Livit/Livit.Learn.EdX,philanthropy-u/edx-platform,jazkarta/edx-platform-for-isc,sameetb-cuelogic/edx-platform-test,shabab12/edx-platform,jswope00/griffinx,shubhdev/openedx,LICEF/edx-platform,xingyepei/edx-platform,morpheby/levelup-by,Softmotions/edx-platform,ampax/edx-platform,Semi-global/edx-platform,jjmiranda/edx-platform,yokose-ks/edx-platform,benpatterson/edx-platform,teltek/edx-platform,kxliugang/edx-platform,SravanthiSinha/edx-platform,mtlchun/edx,4eek/edx-platform,LICEF/edx-platform,jjmiranda/edx-platform,Edraak/edx-platform,bigdatauniversity/edx-platform,polimediaupv/edx-platform,appliedx/edx-platform,ak2703/edx-platform,Shrhawk/edx-platform,shubhdev/edxOnBaadal,CourseTalk/edx-platform,cecep-edu/edx-platform,SravanthiSinha/edx-platform,kxliugang/edx-platform,Endika/edx-platform,Kalyzee/edx-platform,y12uc231/edx-platform,chrisndodge/edx-platform,zadgroup/edx-platform,arifsetiawan/edx-platform,edx-solutions/edx-platform,ahmadio/edx-platform,devs1991/test_edx_docmode,defance/edx-platform,bigdatauniversity/edx-platform,yokose-ks/edx-platform,nanolearning/edx-platform,Shrhawk/edx-platform,valtech-mooc/edx-platform,syjeon/new_edx,pabloborrego93/edx-platform,WatanabeYasumasa/edx-platform,nttks/edx-platform,AkA84/edx-platform,louyihua/edx-platform,Ayub-Khan/edx-platform,jswope00/GAI,cecep-edu/edx-platform,Stanford-Online/edx-platform,halvertoluke/edx-platform,bigdatauniversity/edx-platform,etzhou/edx-platform,JioEducation/edx-platform,DNFcode/edx-platform,cognitiveclass/edx-platform,jzoldak/edx-platform,ahmedaljazzar/edx-platform,arifsetiawan/edx-platform,4eek/edx-platform,iivic/BoiseStateX,kursitet/edx-platform,chand3040/cloud_that,hmcmooc/muddx-platform,mcgachey/edx-platform,Shrhawk/edx-platform,nanolearningllc/edx-platform-cypress-2,edx-solutions/edx-platform,jazkarta/edx-platform,IONISx/edx-platform,J861449197/edx-platform,ferabra/edx-platform,DefyVentures/edx-platform,waheedahmed/edx-platform,shubhdev/edx-platform,gsehub/edx-platform,y12uc231/edx-platform,apigee/edx-platform,Semi-global/edx-platform,ferabra/edx-platform,auferack08/edx-platform,pdehaye/theming-edx-platform,jelugbo/tundex,jswope00/GAI,tanmaykm/edx-platform,IITBinterns13/edx-platform-dev,kmoocdev/edx-platform,teltek/edx-platform,romain-li/edx-platform,RPI-OPENEDX/edx-platform,andyzsf/edx,cselis86/edx-platform,atsolakid/edx-platform,morpheby/levelup-by,naresh21/synergetics-edx-platform,Edraak/circleci-edx-platform,fly19890211/edx-platform,marcore/edx-platform,atsolakid/edx-platform,mcgachey/edx-platform,angelapper/edx-platform,rismalrv/edx-platform,adoosii/edx-platform,xinjiguaike/edx-platform,bdero/edx-platform,edx/edx-platform,chudaol/edx-platform,wwj718/ANALYSE,MakeHer/edx-platform,valtech-mooc/edx-platform,SravanthiSinha/edx-platform,MSOpenTech/edx-platform,rismalrv/edx-platform,synergeticsedx/deployment-wipro,eestay/edx-platform,proversity-org/edx-platform,benpatterson/edx-platform,B-MOOC/edx-platform,cyanna/edx-platform,vismartltd/edx-platform,lduarte1991/edx-platform,antoviaque/edx-platform,waheedahmed/edx-platform,tiagochiavericosta/edx-platform,chauhanhardik/populo,nikolas/edx-platform,vikas1885/test1,xuxiao19910803/edx-platform,antonve/s4-project-mooc,jazkarta/edx-platform,cyanna/edx-platform,TsinghuaX/edx-platform,10clouds/edx-platform,mushtaqak/edx-platform,dsajkl/123,jruiperezv/ANALYSE,marcore/edx-platform,ahmadiga/min_edx,Endika/edx-platform,ESOedX/edx-platform,xuxiao19910803/edx,gymnasium/edx-platform,pdehaye/theming-edx-platform,ak2703/edx-platform,AkA84/edx-platform,B-MOOC/edx-platform,jjmiranda/edx-platform,inares/edx-platform,DNFcode/edx-platform,doganov/edx-platform,carsongee/edx-platform,SivilTaram/edx-platform,abdoosh00/edx-rtl-final,marcore/edx-platform,jbassen/edx-platform,IndonesiaX/edx-platform,morpheby/levelup-by,zadgroup/edx-platform,unicri/edx-platform,ahmadiga/min_edx,don-github/edx-platform,UOMx/edx-platform,vismartltd/edx-platform,Livit/Livit.Learn.EdX,pdehaye/theming-edx-platform,JioEducation/edx-platform,ZLLab-Mooc/edx-platform,antoviaque/edx-platform,cselis86/edx-platform,LICEF/edx-platform,etzhou/edx-platform,devs1991/test_edx_docmode,BehavioralInsightsTeam/edx-platform,openfun/edx-platform,chand3040/cloud_that,deepsrijit1105/edx-platform,alu042/edx-platform,kmoocdev2/edx-platform,doganov/edx-platform,franosincic/edx-platform,dsajkl/123,atsolakid/edx-platform,msegado/edx-platform,bdero/edx-platform,eestay/edx-platform,msegado/edx-platform,shurihell/testasia,kursitet/edx-platform,xingyepei/edx-platform,ESOedX/edx-platform,jolyonb/edx-platform,jazkarta/edx-platform,JCBarahona/edX,jonathan-beard/edx-platform,jazztpt/edx-platform,alexthered/kienhoc-platform,mbareta/edx-platform-ft,CredoReference/edx-platform,cyanna/edx-platform,Softmotions/edx-platform,fintech-circle/edx-platform,antoviaque/edx-platform,hmcmooc/muddx-platform,inares/edx-platform,jonathan-beard/edx-platform,IITBinterns13/edx-platform-dev,zofuthan/edx-platform,xuxiao19910803/edx-platform,caesar2164/edx-platform,OmarIthawi/edx-platform,PepperPD/edx-pepper-platform,sameetb-cuelogic/edx-platform-test,don-github/edx-platform,nanolearningllc/edx-platform-cypress,kxliugang/edx-platform,utecuy/edx-platform,bitifirefly/edx-platform,auferack08/edx-platform,sudheerchintala/LearnEraPlatForm,ahmedaljazzar/edx-platform,nikolas/edx-platform,nttks/edx-platform,playm2mboy/edx-platform,chand3040/cloud_that,mcgachey/edx-platform,motion2015/edx-platform,caesar2164/edx-platform,xuxiao19910803/edx-platform,abdoosh00/edraak,kursitet/edx-platform,tiagochiavericosta/edx-platform,alu042/edx-platform,zhenzhai/edx-platform,xuxiao19910803/edx-platform,ubc/edx-platform,LearnEra/LearnEraPlaftform,hastexo/edx-platform,ampax/edx-platform-backup,kamalx/edx-platform,Stanford-Online/edx-platform,morenopc/edx-platform,dsajkl/123,EDUlib/edx-platform,UXE/local-edx,TeachAtTUM/edx-platform,don-github/edx-platform,beni55/edx-platform,eduNEXT/edunext-platform,xuxiao19910803/edx,shubhdev/openedx,hamzehd/edx-platform,doismellburning/edx-platform,openfun/edx-platform,peterm-itr/edx-platform,doismellburning/edx-platform,shurihell/testasia,ubc/edx-platform,solashirai/edx-platform,motion2015/a3,nagyistoce/edx-platform,jswope00/griffinx,cecep-edu/edx-platform,shurihell/testasia,y12uc231/edx-platform,nanolearningllc/edx-platform-cypress-2,yokose-ks/edx-platform,WatanabeYasumasa/edx-platform,valtech-mooc/edx-platform,nttks/edx-platform,halvertoluke/edx-platform,analyseuc3m/ANALYSE-v1,jamesblunt/edx-platform,chand3040/cloud_that,playm2mboy/edx-platform,leansoft/edx-platform,procangroup/edx-platform,appsembler/edx-platform,nttks/jenkins-test,proversity-org/edx-platform,torchingloom/edx-platform,edry/edx-platform,carsongee/edx-platform,hmcmooc/muddx-platform,DNFcode/edx-platform,ZLLab-Mooc/edx-platform,auferack08/edx-platform,cognitiveclass/edx-platform,motion2015/a3,jamesblunt/edx-platform,CredoReference/edx-platform,nttks/edx-platform,fintech-circle/edx-platform,CredoReference/edx-platform,J861449197/edx-platform,mitocw/edx-platform,mbareta/edx-platform-ft,jelugbo/tundex,doismellburning/edx-platform,JioEducation/edx-platform,kmoocdev/edx-platform,martynovp/edx-platform,SivilTaram/edx-platform,chrisndodge/edx-platform,motion2015/a3,vikas1885/test1,benpatterson/edx-platform,LICEF/edx-platform,wwj718/edx-platform,beni55/edx-platform,itsjeyd/edx-platform,apigee/edx-platform,eestay/edx-platform,inares/edx-platform,jamiefolsom/edx-platform,mbareta/edx-platform-ft,kmoocdev/edx-platform,gsehub/edx-platform,jamiefolsom/edx-platform,pomegranited/edx-platform,LICEF/edx-platform,synergeticsedx/deployment-wipro,JCBarahona/edX,louyihua/edx-platform,jswope00/griffinx,iivic/BoiseStateX,iivic/BoiseStateX,nanolearning/edx-platform,analyseuc3m/ANALYSE-v1,pku9104038/edx-platform,a-parhom/edx-platform,appsembler/edx-platform,jbassen/edx-platform,nttks/edx-platform,abdoosh00/edx-rtl-final,jbzdak/edx-platform,apigee/edx-platform,longmen21/edx-platform,lduarte1991/edx-platform,longmen21/edx-platform,hastexo/edx-platform,lduarte1991/edx-platform,deepsrijit1105/edx-platform,eduNEXT/edx-platform,ubc/edx-platform,nanolearningllc/edx-platform-cypress,hamzehd/edx-platform,ampax/edx-platform-backup,jazkarta/edx-platform-for-isc,WatanabeYasumasa/edx-platform,edry/edx-platform,beni55/edx-platform,pomegranited/edx-platform,proversity-org/edx-platform,solashirai/edx-platform,jswope00/griffinx,ahmadio/edx-platform,y12uc231/edx-platform,EduPepperPDTesting/pepper2013-testing,PepperPD/edx-pepper-platform,mahendra-r/edx-platform,doganov/edx-platform,jbzdak/edx-platform,mbareta/edx-platform-ft,morenopc/edx-platform,kalebhartje/schoolboost,jazkarta/edx-platform-for-isc,mitocw/edx-platform,rhndg/openedx,cyanna/edx-platform,ak2703/edx-platform,LearnEra/LearnEraPlaftform,jazkarta/edx-platform,PepperPD/edx-pepper-platform,praveen-pal/edx-platform,nikolas/edx-platform,IndonesiaX/edx-platform,msegado/edx-platform,4eek/edx-platform,gymnasium/edx-platform,proversity-org/edx-platform,cselis86/edx-platform,miptliot/edx-platform,nttks/jenkins-test,antoviaque/edx-platform,Edraak/edx-platform,beacloudgenius/edx-platform,UXE/local-edx,peterm-itr/edx-platform,amir-qayyum-khan/edx-platform,J861449197/edx-platform,louyihua/edx-platform,mushtaqak/edx-platform,xuxiao19910803/edx,zubair-arbi/edx-platform,unicri/edx-platform,mjirayu/sit_academy,kursitet/edx-platform,Edraak/edraak-platform,olexiim/edx-platform,jswope00/GAI,hkawasaki/kawasaki-aio8-1,mcgachey/edx-platform,gymnasium/edx-platform,eestay/edx-platform,edx-solutions/edx-platform,ampax/edx-platform-backup,polimediaupv/edx-platform,rue89-tech/edx-platform,pku9104038/edx-platform,nagyistoce/edx-platform,Endika/edx-platform,jazkarta/edx-platform-for-isc,martynovp/edx-platform,eduNEXT/edx-platform,mjg2203/edx-platform-seas,Edraak/edraak-platform,edx/edx-platform,vikas1885/test1,dkarakats/edx-platform,unicri/edx-platform,shubhdev/edxOnBaadal,shubhdev/edxOnBaadal,nagyistoce/edx-platform,playm2mboy/edx-platform,andyzsf/edx,hamzehd/edx-platform,OmarIthawi/edx-platform,sameetb-cuelogic/edx-platform-test,JCBarahona/edX,jbzdak/edx-platform,xingyepei/edx-platform,shashank971/edx-platform,zadgroup/edx-platform,prarthitm/edxplatform,Semi-global/edx-platform,Softmotions/edx-platform,ak2703/edx-platform,pku9104038/edx-platform,mjirayu/sit_academy,teltek/edx-platform,bigdatauniversity/edx-platform,AkA84/edx-platform,chauhanhardik/populo_2,Stanford-Online/edx-platform,Edraak/edx-platform,4eek/edx-platform,shashank971/edx-platform,IONISx/edx-platform,ahmadiga/min_edx,EduPepperPDTesting/pepper2013-testing,dsajkl/123,dkarakats/edx-platform,kxliugang/edx-platform,ESOedX/edx-platform,miptliot/edx-platform,nanolearningllc/edx-platform-cypress,ZLLab-Mooc/edx-platform,raccoongang/edx-platform,eemirtekin/edx-platform,bdero/edx-platform,alu042/edx-platform,dsajkl/reqiop,DefyVentures/edx-platform,Ayub-Khan/edx-platform,y12uc231/edx-platform,Ayub-Khan/edx-platform,Edraak/circleci-edx-platform,knehez/edx-platform,franosincic/edx-platform,EDUlib/edx-platform,doismellburning/edx-platform,chudaol/edx-platform,wwj718/edx-platform,BehavioralInsightsTeam/edx-platform,wwj718/edx-platform,motion2015/a3,stvstnfrd/edx-platform,devs1991/test_edx_docmode,zubair-arbi/edx-platform,rationalAgent/edx-platform-custom,leansoft/edx-platform,nanolearningllc/edx-platform-cypress-2,hmcmooc/muddx-platform,jbzdak/edx-platform,pomegranited/edx-platform,Edraak/edraak-platform,kmoocdev2/edx-platform,amir-qayyum-khan/edx-platform,simbs/edx-platform,torchingloom/edx-platform,edx/edx-platform,EduPepperPD/pepper2013,mtlchun/edx,knehez/edx-platform,chudaol/edx-platform,AkA84/edx-platform,EduPepperPD/pepper2013,MSOpenTech/edx-platform,pelikanchik/edx-platform,philanthropy-u/edx-platform,pepeportela/edx-platform,jolyonb/edx-platform,SivilTaram/edx-platform,louyihua/edx-platform,CredoReference/edx-platform,Unow/edx-platform,gsehub/edx-platform,beacloudgenius/edx-platform,hastexo/edx-platform,nanolearning/edx-platform,chauhanhardik/populo_2,shurihell/testasia,10clouds/edx-platform,Edraak/circleci-edx-platform,JCBarahona/edX,procangroup/edx-platform,ahmadio/edx-platform,wwj718/ANALYSE,kmoocdev/edx-platform,xuxiao19910803/edx,hkawasaki/kawasaki-aio8-2,nanolearningllc/edx-platform-cypress,hkawasaki/kawasaki-aio8-2,solashirai/edx-platform,Edraak/edx-platform,pomegranited/edx-platform,mushtaqak/edx-platform,procangroup/edx-platform,ampax/edx-platform-backup,IITBinterns13/edx-platform-dev,andyzsf/edx,miptliot/edx-platform,openfun/edx-platform,Unow/edx-platform,B-MOOC/edx-platform,syjeon/new_edx,chauhanhardik/populo,adoosii/edx-platform,EduPepperPDTesting/pepper2013-testing,zofuthan/edx-platform,mahendra-r/edx-platform,xingyepei/edx-platform,DefyVentures/edx-platform,rationalAgent/edx-platform-custom,chauhanhardik/populo,Livit/Livit.Learn.EdX,wwj718/ANALYSE,prarthitm/edxplatform,mahendra-r/edx-platform,zerobatu/edx-platform,IONISx/edx-platform,miptliot/edx-platform,cognitiveclass/edx-platform,jazkarta/edx-platform,benpatterson/edx-platform,mitocw/edx-platform,edry/edx-platform,JioEducation/edx-platform,MakeHer/edx-platform,EDUlib/edx-platform,alexthered/kienhoc-platform,angelapper/edx-platform,raccoongang/edx-platform,nttks/jenkins-test,arifsetiawan/edx-platform,4eek/edx-platform,waheedahmed/edx-platform,openfun/edx-platform,appsembler/edx-platform,mtlchun/edx,shubhdev/edx-platform,ferabra/edx-platform,arbrandes/edx-platform,Lektorium-LLC/edx-platform,nanolearningllc/edx-platform-cypress-2,zerobatu/edx-platform,Softmotions/edx-platform,mushtaqak/edx-platform,PepperPD/edx-pepper-platform,Ayub-Khan/edx-platform,analyseuc3m/ANALYSE-v1,simbs/edx-platform,cognitiveclass/edx-platform,mjg2203/edx-platform-seas,SivilTaram/edx-platform,leansoft/edx-platform,shurihell/testasia,abdoosh00/edx-rtl-final,dkarakats/edx-platform,CourseTalk/edx-platform,ferabra/edx-platform,appliedx/edx-platform,praveen-pal/edx-platform,shashank971/edx-platform,caesar2164/edx-platform,simbs/edx-platform,valtech-mooc/edx-platform,pelikanchik/edx-platform,naresh21/synergetics-edx-platform,mtlchun/edx,itsjeyd/edx-platform,polimediaupv/edx-platform,polimediaupv/edx-platform,nanolearning/edx-platform,tanmaykm/edx-platform,ampax/edx-platform-backup,yokose-ks/edx-platform,don-github/edx-platform,Semi-global/edx-platform,msegado/edx-platform,iivic/BoiseStateX,longmen21/edx-platform,ampax/edx-platform,stvstnfrd/edx-platform,wwj718/ANALYSE,romain-li/edx-platform,MSOpenTech/edx-platform,motion2015/edx-platform,zhenzhai/edx-platform,dsajkl/123,raccoongang/edx-platform,kursitet/edx-platform,Softmotions/edx-platform,carsongee/edx-platform,dsajkl/reqiop,jamiefolsom/edx-platform,Livit/Livit.Learn.EdX,mahendra-r/edx-platform,rue89-tech/edx-platform,sameetb-cuelogic/edx-platform-test,playm2mboy/edx-platform,RPI-OPENEDX/edx-platform,sudheerchintala/LearnEraPlatForm,kmoocdev2/edx-platform,ovnicraft/edx-platform,B-MOOC/edx-platform,nikolas/edx-platform,nttks/jenkins-test,chrisndodge/edx-platform,praveen-pal/edx-platform
|
---
+++
@@ -6,11 +6,11 @@
@step('I see the correct settings and default values$')
def i_see_the_correct_settings_and_values(step):
- world.verify_all_setting_entries([['.75x', '', False],
- ['1.25x', '', False],
- ['1.5x', '', False],
+ world.verify_all_setting_entries([['Default Speed', '', False],
['Display Name', 'default', True],
- ['Normal Speed', '', False],
+ ['Download Track', '', False],
+ ['Download Video', '', False],
['Show Captions', 'True', False],
- ['Source', '', False],
- ['Track', '', False]])
+ ['Speed: .75x', '', False],
+ ['Speed: 1.25x', '', False],
+ ['Speed: 1.5x', '', False]])
|
878de0641342b4f43ca6cfe48357e9e198bc696f
|
index.py
|
index.py
|
#!/usr/bin/python
# -*- coding: UTF-8 -*-
# enable web debugging
import cgitb
cgitb.enable()
print("Content-Type: text/html;charset=utf-8")
print()
import formar.fweb.WebFormar
|
#!/usr/bin/python
# -*- coding: UTF-8 -*-
# enable web debugging
import cgitb
cgitb.enable()
"""
from os import path
import sys
sys.path.append(path.abspath('../WebTagGenerator'))
"""
from os import path
import sys
sys.path.append(path.abspath('../WebTagGenerator/'))
print("Content-Type: text/html;charset=utf-8")
print()
print(sys.path)
import formar.fweb.WebFormar
|
Add external module reference to deal with html
|
Add external module reference to deal with html
|
Python
|
apache-2.0
|
ronengi/Formar,ronengi/Formar
|
---
+++
@@ -5,8 +5,19 @@
import cgitb
cgitb.enable()
+"""
+from os import path
+import sys
+sys.path.append(path.abspath('../WebTagGenerator'))
+"""
+from os import path
+import sys
+sys.path.append(path.abspath('../WebTagGenerator/'))
+
+
print("Content-Type: text/html;charset=utf-8")
print()
+print(sys.path)
import formar.fweb.WebFormar
|
6b6fb12545ff3b35420fd2382576a6150044bdc8
|
kerze.py
|
kerze.py
|
from turtle import *
GROESSE = 0.5
FARBE = "red"
FAERBEN = True
SHAPE = "turtle"
fillcolor(FARBE)
shape(SHAPE)
def zeichneKerze(brennt):
pd()
begin_fill()
forward(GROESSE*100)
left(90)
forward(GROESSE*400)
left(90)
forward(GROESSE*100)
right(90)
forward(GROESSE*30)
back(GROESSE*30)
left(90)
forward(GROESSE*100)
left(90)
forward(GROESSE*400)
left(90)
forward(GROESSE*100)
end_fill()
pu()
if brennt:
zeichneFlamme()
def zeichneFlamme():
left(90)
fd(GROESSE*430)
pd()
color("yellow")
dot(GROESSE*60)
color("black")
back(GROESSE*30)
pu()
home()
##zeichneKerze(brennt=False) # testweise erstmal nur nicht brennende Kerze
zeichneKerze(True)
hideturtle()
|
from turtle import *
GROESSE = 0.5
FARBE = "red"
FAERBEN = True
SHAPE = "turtle"
fillcolor(FARBE)
shape(SHAPE)
def zeichneKerze(brennt):
pd()
begin_fill()
forward(GROESSE*100)
left(90)
forward(GROESSE*400)
left(90)
forward(GROESSE*100)
right(90)
forward(GROESSE*30)
back(GROESSE*30)
left(90)
forward(GROESSE*100)
left(90)
forward(GROESSE*400)
left(90)
forward(GROESSE*100)
end_fill()
pu()
if brennt:
zeichneFlamme()
def zeichneFlamme():
left(90)
fd(GROESSE*430)
pd()
color("yellow")
dot(GROESSE*60)
color("black")
back(GROESSE*30)
pu()
home()
if __name__=="__main__":
zeichneKerze(True)
hideturtle()
|
Prepare for use as module
|
Prepare for use as module
|
Python
|
mit
|
luforst/adventskranz
|
---
+++
@@ -41,6 +41,6 @@
pu()
home()
-##zeichneKerze(brennt=False) # testweise erstmal nur nicht brennende Kerze
-zeichneKerze(True)
-hideturtle()
+if __name__=="__main__":
+ zeichneKerze(True)
+ hideturtle()
|
7e9f765b90df14b66a361c6cf9fe25a38189475a
|
actions/server/extend_expiration_date.py
|
actions/server/extend_expiration_date.py
|
import datetime
from utilities.logger import ThreadLogger
from utilities.mail import send_mail
from utilities.mail import InvalidConfigurationException
"""
Server action to extend expiration date on a server by 30 days
"""
def run(job, logger=None):
# Extend Server Expiration Date
server = job.server_set.first()
new_date = server.expiration_date + datetime.timedelta(days=30)
server.set_value_for_custom_field("expiration_date", new_date)
# Notify Approver
email_body = (
'{} has extended {}\'s expiration date by 30 days.'.format(job.owner, server.hostname)
)
emails = []
for approver in server.group.approvers.all():
emails.append(approver.user.email)
subject = 'CloudBolt: Server expiration extended by 30 days.'
try:
send_mail(subject, email_body, None, emails)
except InvalidConfigurationException:
logger.debug('Cannot connect to email (SMTP) server')
return "", "", ""
|
"""
Server Action to extend the expiration date of a Server by 30 days and notify
Approvers in the Server's Group.
"""
import datetime
from utilities.logger import ThreadLogger
from utilities.mail import send_mail, InvalidConfigurationException
logger = ThreadLogger(__name__)
def run(job, logger=None):
# Extend Server Expiration Date
server = job.server_set.first()
# If the server doesn't have an expiration date, this Server Action will
# quit and _not_ assign it one.
if server.expiration_date is None:
return "", "This server does not have an expiration date.", ""
new_date = server.expiration_date + datetime.timedelta(days=30)
server.set_value_for_custom_field("expiration_date", new_date)
# Notify Approvers
email_body = (
f"{job.owner} has extended {server.hostname}'s expiration date by 30 days."
)
email_addrs = [approver.user.email for approver in server.group.get_approvers()]
subject = "CloudBolt: Server expiration extended by 30 days."
try:
send_mail(subject, email_body, None, email_addrs)
except InvalidConfigurationException:
logger.debug("Cannot connect to email (SMTP) server")
return "", "", ""
|
Fix "Extend Expiration Date" Server Action
|
Fix "Extend Expiration Date" Server Action
Server Action had bugs where server.expiration_date would cause an
exception if it wasn't set, and was using the old method
`group.approvers`. These issues have been fixed, and the plugin is more
stable.
[DEV-13752]
|
Python
|
apache-2.0
|
CloudBoltSoftware/cloudbolt-forge,CloudBoltSoftware/cloudbolt-forge,CloudBoltSoftware/cloudbolt-forge,CloudBoltSoftware/cloudbolt-forge
|
---
+++
@@ -1,28 +1,36 @@
+"""
+Server Action to extend the expiration date of a Server by 30 days and notify
+Approvers in the Server's Group.
+"""
+
import datetime
+
from utilities.logger import ThreadLogger
-from utilities.mail import send_mail
-from utilities.mail import InvalidConfigurationException
+from utilities.mail import send_mail, InvalidConfigurationException
-"""
-Server action to extend expiration date on a server by 30 days
-"""
+logger = ThreadLogger(__name__)
+
def run(job, logger=None):
# Extend Server Expiration Date
server = job.server_set.first()
+
+ # If the server doesn't have an expiration date, this Server Action will
+ # quit and _not_ assign it one.
+ if server.expiration_date is None:
+ return "", "This server does not have an expiration date.", ""
+
new_date = server.expiration_date + datetime.timedelta(days=30)
server.set_value_for_custom_field("expiration_date", new_date)
-
- # Notify Approver
+
+ # Notify Approvers
email_body = (
- '{} has extended {}\'s expiration date by 30 days.'.format(job.owner, server.hostname)
+ f"{job.owner} has extended {server.hostname}'s expiration date by 30 days."
)
- emails = []
- for approver in server.group.approvers.all():
- emails.append(approver.user.email)
- subject = 'CloudBolt: Server expiration extended by 30 days.'
+ email_addrs = [approver.user.email for approver in server.group.get_approvers()]
+ subject = "CloudBolt: Server expiration extended by 30 days."
try:
- send_mail(subject, email_body, None, emails)
+ send_mail(subject, email_body, None, email_addrs)
except InvalidConfigurationException:
- logger.debug('Cannot connect to email (SMTP) server')
+ logger.debug("Cannot connect to email (SMTP) server")
return "", "", ""
|
204c955b7150e3ef26f44e1982359b2d6096eebe
|
lc053_maximum_subarray.py
|
lc053_maximum_subarray.py
|
"""Leetcode 53. Maximum Subarray
Easy
Given an integer array nums, find the contiguous subarray
(containing at least one number) which has the largest sum and return its sum.
Example:
Input: [-2,1,-3,4,-1,2,1,-5,4],
Output: 6
Explanation: [4,-1,2,1] has the largest sum = 6.
Follow up:
If you have figured out the O(n) solution, try coding another solution using
the divide and conquer approach, which is more subtle.
"""
class Solution(object):
def maxSubArray(self, nums):
"""
:type nums: List[int]
:rtype: int
"""
pass
def main():
nums = [-2, 1, -3, 4, -1, 2, 1, -5, 4]
# Output: 6.
if __name__ == '__main__':
main()
|
"""Leetcode 53. Maximum Subarray
Easy
Given an integer array nums, find the contiguous subarray
(containing at least one number) which has the largest sum and return its sum.
Example:
Input: [-2,1,-3,4,-1,2,1,-5,4],
Output: 6
Explanation: [4,-1,2,1] has the largest sum = 6.
Follow up:
If you have figured out the O(n) solution, try coding another solution using
the divide and conquer approach, which is more subtle.
"""
class SolutionDp(object):
def maxSubArray(self, nums):
"""
:type nums: List[int]
:rtype: int
Maximum subarray sum by Kadane's algorithm.
"""
sums = [0] * len(nums)
sums[0] = nums[0]
max_sum = sums[0]
for i in range(1, len(sums)):
# Compute current max subarray sum before pos i.
sums[i] = max(sums[i - 1] + nums[i], nums[i])
# Track global max sum before pos i.
max_sum = max(max_sum, sums[i])
return max_sum
def main():
nums = [-2, 1, -3, 4, -1, 2, 1, -5, 4]
# Output: 6.
print SolutionDp().maxSubArray(nums)
if __name__ == '__main__':
main()
|
Complete max subarray sum by DP
|
Complete max subarray sum by DP
|
Python
|
bsd-2-clause
|
bowen0701/algorithms_data_structures
|
---
+++
@@ -15,19 +15,33 @@
the divide and conquer approach, which is more subtle.
"""
-class Solution(object):
+class SolutionDp(object):
def maxSubArray(self, nums):
"""
:type nums: List[int]
:rtype: int
+
+ Maximum subarray sum by Kadane's algorithm.
"""
- pass
+ sums = [0] * len(nums)
+ sums[0] = nums[0]
+ max_sum = sums[0]
+
+ for i in range(1, len(sums)):
+ # Compute current max subarray sum before pos i.
+ sums[i] = max(sums[i - 1] + nums[i], nums[i])
+ # Track global max sum before pos i.
+ max_sum = max(max_sum, sums[i])
+
+ return max_sum
def main():
nums = [-2, 1, -3, 4, -1, 2, 1, -5, 4]
# Output: 6.
+ print SolutionDp().maxSubArray(nums)
+
if __name__ == '__main__':
main()
|
5c7161858fa7ca2962f08b66f6d20ae49715c206
|
ci_scripts/buildLinuxWheels.py
|
ci_scripts/buildLinuxWheels.py
|
from subprocess import call, check_output
import sys
isPython3 = sys.version_info.major == 3
# https://stackoverflow.com/a/3357357
command = 'git log --format=%B -n 1'.split()
out = check_output(command)
if b'build wheels' not in out.lower() or not isPython3:
exit(0)
path = os.path.abspath(argv[1])
call('pip install cibuildwheel==0.7.0'.split())
call('cibuildwheel --output-dir {}'.format(path).split())
from dropboxUpload import uploadAll
uploadAll(path)
|
from subprocess import call, check_output
import sys
import os
isPython3 = sys.version_info.major == 3
# https://stackoverflow.com/a/3357357
command = 'git log --format=%B -n 1'.split()
out = check_output(command)
if b'build wheels' not in out.lower() or not isPython3:
exit(0)
path = os.path.abspath(argv[1])
call('pip install cibuildwheel==0.7.0'.split())
call('cibuildwheel --output-dir {}'.format(path).split())
from dropboxUpload import uploadAll
uploadAll(path)
|
Fix build wheels and upload 3.
|
Fix build wheels and upload 3.
|
Python
|
bsd-3-clause
|
jr-garcia/AssimpCy,jr-garcia/AssimpCy
|
---
+++
@@ -1,5 +1,6 @@
from subprocess import call, check_output
import sys
+import os
isPython3 = sys.version_info.major == 3
|
7f6cd8f5444d92644642cadb84d7f958e0b6fce1
|
examples/image_test.py
|
examples/image_test.py
|
import sys
import os
import pyglet.window
from pyglet.gl import *
from pyglet import clock
from pyglet.ext.scene2d import Image2d
from ctypes import *
if len(sys.argv) != 2:
print 'Usage: %s <PNG/JPEG filename>'%sys.argv[0]
sys.exit()
window = pyglet.window.Window(width=400, height=400)
image = Image2d.load(sys.argv[1])
s = max(image.width, image.height)
c = clock.Clock(60)
glMatrixMode(GL_PROJECTION)
glLoadIdentity()
gluPerspective(60., 1., 1., 100.)
glEnable(GL_COLOR_MATERIAL)
glMatrixMode(GL_MODELVIEW)
glClearColor(0, 0, 0, 0)
glColor4f(1, 1, 1, 1)
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA)
glEnable(GL_BLEND)
while not window.has_exit:
c.tick()
window.dispatch_events()
glClear(GL_COLOR_BUFFER_BIT)
glLoadIdentity()
glScalef(1./s, 1./s, 1.)
glTranslatef(-image.width/2, -image.height/2, -1.)
image.draw()
window.flip()
|
import sys
import os
import ctypes
import pyglet.window
from pyglet.gl import *
from pyglet import clock
from pyglet import image
if len(sys.argv) != 2:
print 'Usage: %s <PNG/JPEG filename>'%sys.argv[0]
sys.exit()
window = pyglet.window.Window(width=400, height=400)
image = image.load(sys.argv[1])
imx = imy = 0
@window.event
def on_mouse_drag(x, y, dx, dy, buttons, modifiers):
global imx, imy
imx += dx
imy += dy
clock.set_fps_limit(30)
while not window.has_exit:
clock.tick()
window.dispatch_events()
glClear(GL_COLOR_BUFFER_BIT)
image.blit(imx, imy, 0)
window.flip()
|
Use the core, make example more useful.
|
Use the core, make example more useful.
git-svn-id: d4fdfcd4de20a449196f78acc655f735742cd30d@874 14d46d22-621c-0410-bb3d-6f67920f7d95
|
Python
|
bsd-3-clause
|
regular/pyglet-avbin-optimizations,regular/pyglet-avbin-optimizations,regular/pyglet-avbin-optimizations,regular/pyglet-avbin-optimizations
|
---
+++
@@ -1,46 +1,30 @@
import sys
import os
+import ctypes
import pyglet.window
from pyglet.gl import *
from pyglet import clock
-from pyglet.ext.scene2d import Image2d
-
-from ctypes import *
+from pyglet import image
if len(sys.argv) != 2:
print 'Usage: %s <PNG/JPEG filename>'%sys.argv[0]
sys.exit()
window = pyglet.window.Window(width=400, height=400)
+image = image.load(sys.argv[1])
+imx = imy = 0
+@window.event
+def on_mouse_drag(x, y, dx, dy, buttons, modifiers):
+ global imx, imy
+ imx += dx
+ imy += dy
-image = Image2d.load(sys.argv[1])
-s = max(image.width, image.height)
-
-c = clock.Clock(60)
-
-glMatrixMode(GL_PROJECTION)
-glLoadIdentity()
-gluPerspective(60., 1., 1., 100.)
-glEnable(GL_COLOR_MATERIAL)
-
-glMatrixMode(GL_MODELVIEW)
-glClearColor(0, 0, 0, 0)
-glColor4f(1, 1, 1, 1)
-
-glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA)
-glEnable(GL_BLEND)
-
+clock.set_fps_limit(30)
while not window.has_exit:
- c.tick()
+ clock.tick()
window.dispatch_events()
-
glClear(GL_COLOR_BUFFER_BIT)
- glLoadIdentity()
-
- glScalef(1./s, 1./s, 1.)
- glTranslatef(-image.width/2, -image.height/2, -1.)
- image.draw()
-
+ image.blit(imx, imy, 0)
window.flip()
|
3ddbff7204c8458be777cdf194ce38170c1982d9
|
learntools/computer_vision/ex4.py
|
learntools/computer_vision/ex4.py
|
from learntools.core import *
import tensorflow as tf
class Q1A(ThoughtExperiment):
_solution = ""
class Q1B(ThoughtExperiment):
_solution = ""
Q1 = MultipartProblem(Q1A, Q1B)
class Q2A(ThoughtExperiment):
_hint = "Stacking the second layer expanded the receptive field by one neuron on each side, giving $3+1+1=5$ for each dimension. If you expanded by one neuron again, what would you get?"
_solution = "The third layer would have a $7\times 7$ receptive field."
class Q2B(ThoughtExperiment):
_hint = "This pooling layer collapses a $2\times 2$ patch into a single pixel, effectively *doubling* the number of connections along each dimension. "
_solution = "Doubling a $7 \times 7$ field produces a $14 \times 14$ field for the final outputs."
Q2 = MultipartProblem(Q2A, Q2B)
class Q3(CodingProblem):
_hint = "You just need a list of numbers, maybe three to five."
_solution = CS("""
kernel = tf.constant([0.1, 0.2, 0.3, 0.4])
""")
def check(self):
pass
qvars = bind_exercises(globals(), [
Q1, Q2, Q3,
],
var_format='q_{n}',
)
__all__ = list(qvars)
|
from learntools.core import *
import tensorflow as tf
class Q1A(ThoughtExperiment):
_solution = ""
class Q1B(ThoughtExperiment):
_solution = ""
Q1 = MultipartProblem(Q1A, Q1B)
class Q2A(ThoughtExperiment):
_hint = r"Stacking the second layer expanded the receptive field by one neuron on each side, giving $3+1+1=5$ for each dimension. If you expanded by one neuron again, what would you get?"
_solution = r"The third layer would have a $7 \times 7$ receptive field."
class Q2B(ThoughtExperiment):
_hint = r"This pooling layer collapses a $2 \times 2$ patch into a single pixel, effectively *doubling* the number of connections along each dimension. "
_solution = r"Doubling a $7 \times 7$ field produces a $14 \times 14$ field for the final outputs."
Q2 = MultipartProblem(Q2A, Q2B)
class Q3(CodingProblem):
_hint = "You just need a list of numbers, maybe three to five."
_solution = CS("""
kernel = tf.constant([0.1, 0.2, 0.3, 0.4])
""")
def check(self):
pass
qvars = bind_exercises(globals(), [
Q1, Q2, Q3,
],
var_format='q_{n}',
)
__all__ = list(qvars)
|
Change to raw strings in exercise 4 checking
|
Change to raw strings in exercise 4 checking
|
Python
|
apache-2.0
|
Kaggle/learntools,Kaggle/learntools
|
---
+++
@@ -12,12 +12,12 @@
Q1 = MultipartProblem(Q1A, Q1B)
class Q2A(ThoughtExperiment):
- _hint = "Stacking the second layer expanded the receptive field by one neuron on each side, giving $3+1+1=5$ for each dimension. If you expanded by one neuron again, what would you get?"
- _solution = "The third layer would have a $7\times 7$ receptive field."
+ _hint = r"Stacking the second layer expanded the receptive field by one neuron on each side, giving $3+1+1=5$ for each dimension. If you expanded by one neuron again, what would you get?"
+ _solution = r"The third layer would have a $7 \times 7$ receptive field."
class Q2B(ThoughtExperiment):
- _hint = "This pooling layer collapses a $2\times 2$ patch into a single pixel, effectively *doubling* the number of connections along each dimension. "
- _solution = "Doubling a $7 \times 7$ field produces a $14 \times 14$ field for the final outputs."
+ _hint = r"This pooling layer collapses a $2 \times 2$ patch into a single pixel, effectively *doubling* the number of connections along each dimension. "
+ _solution = r"Doubling a $7 \times 7$ field produces a $14 \times 14$ field for the final outputs."
Q2 = MultipartProblem(Q2A, Q2B)
|
fffce05288a80b4d4e8e45b6a1282eaa6c2d80c4
|
dec02/dec02part2.py
|
dec02/dec02part2.py
|
# Advent of Code
# Dec 2, Part 2
# @geekygirlsarah
|
# Advent of Code
# Dec 2, Part 1
# @geekygirlsarah
inputFile = "input.txt"
# Tracking vars
finalCode = ""
lastNumber = 5 # start here
tempNumber = lastNumber
with open(inputFile) as f:
while True:
line = f.readline(-1)
if not line:
# print "End of file"
break
# print ("Line: ", line)
print ("First number=" + str(lastNumber))
for dir in line:
print("dir=" + dir)
if dir == "U":
if lastNumber == 3:
tempNumber = lastNumber - 2
if lastNumber > 5 and lastNumber < 9:
tempNumber = lastNumber - 4
if lastNumber >= 10 and lastNumber <= 12:
tempNumber = lastNumber - 4
if lastNumber == 13:
tempNumber = lastNumber - 2
elif dir == "D":
if lastNumber == 1:
tempNumber = lastNumber + 2
if lastNumber >= 2 and lastNumber <= 4:
tempNumber = lastNumber + 4
if lastNumber >= 6 and lastNumber <= 8:
tempNumber = lastNumber + 4
if lastNumber == 11:
tempNumber = lastNumber + 2
elif dir == "L":
if lastNumber == 6:
tempNumber = lastNumber - 1
if lastNumber == 3 or lastNumber == 7 or lastNumber == 11:
tempNumber = lastNumber - 1
if lastNumber == 4 or lastNumber == 8 or lastNumber == 12:
tempNumber = lastNumber - 1
if lastNumber == 9:
tempNumber = lastNumber - 1
elif dir == "R":
if lastNumber == 5:
tempNumber = lastNumber + 1
if lastNumber == 2 or lastNumber == 6 or lastNumber == 10:
tempNumber = lastNumber + 1
if lastNumber == 3 or lastNumber == 7 or lastNumber == 11:
tempNumber = lastNumber + 1
if lastNumber == 8:
tempNumber = lastNumber + 1
elif dir == "\n":
break
lastNumber = tempNumber
print ("New number: " + str(lastNumber))
# last number validated, so add to code
lastChar = str(lastNumber)
if lastNumber == 10:
lastChar = "A"
elif lastNumber == 11:
lastChar = "B"
elif lastNumber == 12:
lastChar = "C"
elif lastNumber == 13:
lastChar = "D"
finalCode = finalCode + lastChar
print("Final code: " + finalCode)
|
Add 12/2 part 2 solution
|
Add 12/2 part 2 solution
|
Python
|
mit
|
geekygirlsarah/adventofcode2016
|
---
+++
@@ -1,4 +1,78 @@
# Advent of Code
-# Dec 2, Part 2
+# Dec 2, Part 1
# @geekygirlsarah
+inputFile = "input.txt"
+
+# Tracking vars
+finalCode = ""
+lastNumber = 5 # start here
+tempNumber = lastNumber
+
+with open(inputFile) as f:
+ while True:
+ line = f.readline(-1)
+ if not line:
+ # print "End of file"
+ break
+ # print ("Line: ", line)
+
+ print ("First number=" + str(lastNumber))
+ for dir in line:
+ print("dir=" + dir)
+ if dir == "U":
+ if lastNumber == 3:
+ tempNumber = lastNumber - 2
+ if lastNumber > 5 and lastNumber < 9:
+ tempNumber = lastNumber - 4
+ if lastNumber >= 10 and lastNumber <= 12:
+ tempNumber = lastNumber - 4
+ if lastNumber == 13:
+ tempNumber = lastNumber - 2
+ elif dir == "D":
+ if lastNumber == 1:
+ tempNumber = lastNumber + 2
+ if lastNumber >= 2 and lastNumber <= 4:
+ tempNumber = lastNumber + 4
+ if lastNumber >= 6 and lastNumber <= 8:
+ tempNumber = lastNumber + 4
+ if lastNumber == 11:
+ tempNumber = lastNumber + 2
+ elif dir == "L":
+ if lastNumber == 6:
+ tempNumber = lastNumber - 1
+ if lastNumber == 3 or lastNumber == 7 or lastNumber == 11:
+ tempNumber = lastNumber - 1
+ if lastNumber == 4 or lastNumber == 8 or lastNumber == 12:
+ tempNumber = lastNumber - 1
+ if lastNumber == 9:
+ tempNumber = lastNumber - 1
+ elif dir == "R":
+ if lastNumber == 5:
+ tempNumber = lastNumber + 1
+ if lastNumber == 2 or lastNumber == 6 or lastNumber == 10:
+ tempNumber = lastNumber + 1
+ if lastNumber == 3 or lastNumber == 7 or lastNumber == 11:
+ tempNumber = lastNumber + 1
+ if lastNumber == 8:
+ tempNumber = lastNumber + 1
+ elif dir == "\n":
+ break
+
+ lastNumber = tempNumber
+ print ("New number: " + str(lastNumber))
+
+
+ # last number validated, so add to code
+ lastChar = str(lastNumber)
+ if lastNumber == 10:
+ lastChar = "A"
+ elif lastNumber == 11:
+ lastChar = "B"
+ elif lastNumber == 12:
+ lastChar = "C"
+ elif lastNumber == 13:
+ lastChar = "D"
+ finalCode = finalCode + lastChar
+
+print("Final code: " + finalCode)
|
5a806254b63b8cc1732dfc4a797371b9a52c8621
|
mediacloud/mediawords/util/test_process.py
|
mediacloud/mediawords/util/test_process.py
|
import subprocess
from mediawords.util.process import *
def test_process_with_pid_is_running():
test_process = subprocess.Popen(['sleep', '999'])
test_process_pid = test_process.pid
assert test_process_pid != 0
assert test_process_pid is not None
assert process_with_pid_is_running(test_process_pid) is True
# again to test if os.kill() just tests the process, not actually kills it
assert process_with_pid_is_running(test_process_pid) is True
test_process.kill()
assert process_with_pid_is_running(test_process_pid) is False
|
import subprocess
from mediawords.util.process import *
def test_process_with_pid_is_running():
test_process = subprocess.Popen(['sleep', '999'])
test_process_pid = test_process.pid
assert test_process_pid != 0
assert test_process_pid is not None
assert process_with_pid_is_running(test_process_pid) is True
# again to test if os.kill() just tests the process, not actually kills it
assert process_with_pid_is_running(test_process_pid) is True
test_process.terminate()
test_process.kill()
# FIXME for whatever reason Python still "sees" this PID after killing it; maybe it's a thread PID and not a
# process one?
# assert process_with_pid_is_running(test_process_pid) is False
|
Comment out erroneous process_with_pid_is_running() test
|
Comment out erroneous process_with_pid_is_running() test
|
Python
|
agpl-3.0
|
berkmancenter/mediacloud,berkmancenter/mediacloud,berkmancenter/mediacloud,berkmancenter/mediacloud,berkmancenter/mediacloud
|
---
+++
@@ -15,6 +15,9 @@
# again to test if os.kill() just tests the process, not actually kills it
assert process_with_pid_is_running(test_process_pid) is True
+ test_process.terminate()
test_process.kill()
- assert process_with_pid_is_running(test_process_pid) is False
+ # FIXME for whatever reason Python still "sees" this PID after killing it; maybe it's a thread PID and not a
+ # process one?
+ # assert process_with_pid_is_running(test_process_pid) is False
|
c88310e970518c3531dbe26c9544ff4455068a7e
|
opps/core/tests/test_obj_tags.py
|
opps/core/tests/test_obj_tags.py
|
from django.test import TestCase
from opps.channels.templatetags.menu_tags import ofKey
class OfKeyTest(TestCase):
def test_tag(self):
result = ofKey({"name": "andrews"}, "name")
self.assertEqual(result, "andrews")
def test_tag_is_none(self):
result = ofKey(None, "name")
self.assertEqual(result, "")
|
from django.test import TestCase
from opps.core.templatetags.obj_tags import ofKey
class OfKeyTest(TestCase):
def test_tag(self):
result = ofKey({"name": "andrews"}, "name")
self.assertEqual(result, "andrews")
def test_tag_is_none(self):
result = ofKey(None, "name")
self.assertEqual(result, "")
|
Fix import template tag obj_tags on test case core
|
Fix import template tag obj_tags on test case core
|
Python
|
mit
|
williamroot/opps,YACOWS/opps,opps/opps,williamroot/opps,williamroot/opps,YACOWS/opps,YACOWS/opps,williamroot/opps,jeanmask/opps,jeanmask/opps,opps/opps,YACOWS/opps,jeanmask/opps,opps/opps,opps/opps,jeanmask/opps
|
---
+++
@@ -1,6 +1,6 @@
from django.test import TestCase
-from opps.channels.templatetags.menu_tags import ofKey
+from opps.core.templatetags.obj_tags import ofKey
class OfKeyTest(TestCase):
|
ee6bd389e3e602b67fac399cdb4a50c3a67666b9
|
twitter/admin.py
|
twitter/admin.py
|
from django.contrib import admin
from twitter.models import User, Tweet, Analytics, AnalyticsReport
class UserAdmin(admin.ModelAdmin):
list_display = ('screen_name', 'current_followers')
class AnalyticsAdmin(admin.ModelAdmin):
list_display = (
'date',
'user',
'followers',
'following',
'listed',
'tweet_count',
'retweet_count',
'reply_count',
'user_mention_count',
'link_count',
'hashtag_count',
)
class AnalyticsReportAdmin(admin.ModelAdmin):
list_display = (
'date',
'user',
'tweets_reweeted_count',
'tweets_favorited_count',
)
admin.site.register(User, UserAdmin)
admin.site.register(Tweet)
admin.site.register(Analytics, AnalyticsAdmin)
admin.site.register(AnalyticsReport, AnalyticsReportAdmin)
|
from django.contrib import admin
from twitter.models import User, Tweet, Analytics, AnalyticsReport
class UserAdmin(admin.ModelAdmin):
list_display = ('screen_name', 'current_followers')
class AnalyticsAdmin(admin.ModelAdmin):
list_display = (
'date',
'user',
'followers',
'following',
'listed',
'tweet_count',
'retweet_count',
'reply_count',
'user_mention_count',
'link_count',
'hashtag_count',
)
list_filter = ('user',)
class AnalyticsReportAdmin(admin.ModelAdmin):
list_display = (
'date',
'user',
'tweets_reweeted_count',
'tweets_favorited_count',
)
admin.site.register(User, UserAdmin)
admin.site.register(Tweet)
admin.site.register(Analytics, AnalyticsAdmin)
admin.site.register(AnalyticsReport, AnalyticsReportAdmin)
|
Add a list filter to make looking a specific users easier.
|
Add a list filter to make looking a specific users easier.
|
Python
|
mit
|
CIGIHub/tweet_cache,albertoconnor/tweet_cache
|
---
+++
@@ -20,6 +20,7 @@
'link_count',
'hashtag_count',
)
+ list_filter = ('user',)
class AnalyticsReportAdmin(admin.ModelAdmin):
|
0e866db1377e4c58ef05d66583cea6e35071ba20
|
nnpy/errors.py
|
nnpy/errors.py
|
from _nnpy import ffi, lib as nanomsg
class NNError(Exception):
def __init__(self, error_no, *args, **kwargs):
super().__init__(*args, **kwargs)
self.error_no = error_no
def convert(rc, value=None):
if rc < 0:
error_no = nanomsg.nn_errno()
chars = nanomsg.nn_strerror(error_no)
msg = ffi.string(chars).decode()
raise NNError(error_no, msg)
if callable(value):
return value()
return value
|
from _nnpy import ffi, lib as nanomsg
class NNError(Exception):
def __init__(self, error_no, *args, **kwargs):
super(NNError, self).__init__(*args, **kwargs)
self.error_no = error_no
def convert(rc, value=None):
if rc < 0:
error_no = nanomsg.nn_errno()
chars = nanomsg.nn_strerror(error_no)
msg = ffi.string(chars).decode()
raise NNError(error_no, msg)
if callable(value):
return value()
return value
|
Fix incorrect args to super
|
Fix incorrect args to super
|
Python
|
mit
|
nanomsg/nnpy
|
---
+++
@@ -2,7 +2,7 @@
class NNError(Exception):
def __init__(self, error_no, *args, **kwargs):
- super().__init__(*args, **kwargs)
+ super(NNError, self).__init__(*args, **kwargs)
self.error_no = error_no
def convert(rc, value=None):
|
6557cbe8bee7ded848ba7c3928e2b4f82aedeea8
|
linked-list/remove-k-from-list.py
|
linked-list/remove-k-from-list.py
|
# Given a singly linked list of integers l and an integer k, remove all elements from list l that have a value equal to k
class Node(object):
def __init__(self, value):
self.value = value
self.next = None
def remove_k_from_list(l, k):
fake_head = Node(None)
fake_head.next = l
current_node = fake_head
while current_node:
while current_node.next and current_node.next.value == k:
current_node.next = current_node.next.next
current_node = current_node.next
return fake_head.next
|
# Given a singly linked list of integers l and an integer k, remove all elements from list l that have a value equal to k
class Node(object): # define constructor
def __init__(self, value):
self.value = value
self.next = None
class LinkedList(object):
def __init__(self, head=None):
self.head = head
def add(self, new_node):
current_node = self.head
if self.head:
while current_node.next:
current_node = current_node.next
current_node.next = new_node # add to end of linked list
else:
self.head = new_node
def remove_k_from_list(l, k):
fake_head = Node(None)
fake_head.next = l
current_node = fake_head
while current_node:
while current_node.next and current_node.next.value == k:
current_node.next = current_node.next.next
current_node = current_node.next
return fake_head.next
|
Add linked list class and add method
|
Add linked list class and add method
|
Python
|
mit
|
derekmpham/interview-prep,derekmpham/interview-prep
|
---
+++
@@ -1,9 +1,22 @@
# Given a singly linked list of integers l and an integer k, remove all elements from list l that have a value equal to k
-class Node(object):
+class Node(object): # define constructor
def __init__(self, value):
self.value = value
self.next = None
+
+class LinkedList(object):
+ def __init__(self, head=None):
+ self.head = head
+
+ def add(self, new_node):
+ current_node = self.head
+ if self.head:
+ while current_node.next:
+ current_node = current_node.next
+ current_node.next = new_node # add to end of linked list
+ else:
+ self.head = new_node
def remove_k_from_list(l, k):
fake_head = Node(None)
|
06c7e43f96f9394949b0ec1ed709429ab3167cf9
|
incuna_auth/backends.py
|
incuna_auth/backends.py
|
from django.contrib.auth.backends import ModelBackend
from django.contrib.auth.models import User
class CustomUserModelBackend(ModelBackend):
def authenticate(self, username=None, password=None):
"""Allow users to log in with their email as well as username."""
kw = 'email__iexact' if '@' in username else 'username'
kwargs = {kw: username}
try:
user = User.objects.get(**kwargs)
except User.DoesNotExist:
pass
else:
if user.check_password(password):
return user
|
from django.contrib.auth.backends import ModelBackend
try:
from django.contrib.auth import get_user_model
User = get_user_model()
except ImportError:
# Django < 1.5
from django.contrib.auth.models import User
class CustomUserModelBackend(ModelBackend):
def authenticate(self, username=None, password=None):
"""Allow users to log in with their email as well as username."""
kw = 'email__iexact' if '@' in username else 'username'
kwargs = {kw: username}
try:
user = User.objects.get(**kwargs)
except User.DoesNotExist:
pass
else:
if user.check_password(password):
return user
|
Make the CustomerUserModelBackend Dj1.5 compatible
|
Make the CustomerUserModelBackend Dj1.5 compatible
|
Python
|
bsd-2-clause
|
incuna/incuna-auth,ghickman/incuna-auth,ghickman/incuna-auth,incuna/incuna-auth
|
---
+++
@@ -1,5 +1,10 @@
from django.contrib.auth.backends import ModelBackend
-from django.contrib.auth.models import User
+try:
+ from django.contrib.auth import get_user_model
+ User = get_user_model()
+except ImportError:
+ # Django < 1.5
+ from django.contrib.auth.models import User
class CustomUserModelBackend(ModelBackend):
|
6a717adf087f847ae4a58375170d01adf5ef17de
|
polyaxon/factories/pipelines.py
|
polyaxon/factories/pipelines.py
|
import factory
from factories.factory_projects import ProjectFactory
from factories.factory_users import UserFactory
from pipelines.models import Pipeline, Operation
class PipelineFactory(factory.DjangoModelFactory):
user = factory.SubFactory(UserFactory)
project = factory.SubFactory(ProjectFactory)
class Meta:
model = Pipeline
class OperationFactory(factory.DjangoModelFactory):
pipeline = factory.SubFactory(PipelineFactory)
class Meta:
model = Operation
|
import factory
from factories.factory_projects import ProjectFactory
from factories.factory_users import UserFactory
from pipelines.models import Pipeline, Operation, PipelineRun, OperationRun
class PipelineFactory(factory.DjangoModelFactory):
user = factory.SubFactory(UserFactory)
project = factory.SubFactory(ProjectFactory)
class Meta:
model = Pipeline
class OperationFactory(factory.DjangoModelFactory):
pipeline = factory.SubFactory(PipelineFactory)
class Meta:
model = Operation
class PipelineRunFactory(factory.DjangoModelFactory):
pipeline = factory.SubFactory(PipelineFactory)
class Meta:
model = PipelineRun
class OperationRunFactory(factory.DjangoModelFactory):
operation = factory.SubFactory(OperationFactory)
pipeline_run = factory.SubFactory(PipelineRunFactory)
class Meta:
model = OperationRun
|
Add pipeline run and operation run factories
|
Add pipeline run and operation run factories
|
Python
|
apache-2.0
|
polyaxon/polyaxon,polyaxon/polyaxon,polyaxon/polyaxon
|
---
+++
@@ -2,7 +2,7 @@
from factories.factory_projects import ProjectFactory
from factories.factory_users import UserFactory
-from pipelines.models import Pipeline, Operation
+from pipelines.models import Pipeline, Operation, PipelineRun, OperationRun
class PipelineFactory(factory.DjangoModelFactory):
@@ -18,3 +18,18 @@
class Meta:
model = Operation
+
+
+class PipelineRunFactory(factory.DjangoModelFactory):
+ pipeline = factory.SubFactory(PipelineFactory)
+
+ class Meta:
+ model = PipelineRun
+
+
+class OperationRunFactory(factory.DjangoModelFactory):
+ operation = factory.SubFactory(OperationFactory)
+ pipeline_run = factory.SubFactory(PipelineRunFactory)
+
+ class Meta:
+ model = OperationRun
|
08ac56dc6d80560ec46ad06fec6843be18bb1d91
|
dictionary/test1.py
|
dictionary/test1.py
|
#!/usr/local/bin/python
#items=[('a','b'),(1,2)]
#b=dict(items)
#print b
#c=dict(name='c',age=42)
#print c
#print len(c)
#c['sex']='female'
#print c
#del c['age']
#print c
#print 'sex' in c
#c['age']=25
#print c
#print c.clear()
#print c
#x={'name':'a','age':'14'}
#y=x
#print y
#z=y.copy()
#z.clear()
#print x
#print y
#print z
x={'name':'a','age':14}
y=x.copy()
y['age']=25
print x
print y
a={}
b=a.fromkeys(['name','age'],'(hahaha)')
print b
print b.get('name')
print b.get('hi','N/A')
|
#!/usr/local/bin/python
#items=[('a','b'),(1,2)]
#b=dict(items)
#print b
#c=dict(name='c',age=42)
#print c
#print len(c)
#c['sex']='female'
#print c
#del c['age']
#print c
#print 'sex' in c
#c['age']=25
#print c
#print c.clear()
#print c
#x={'name':'a','age':'14'}
#y=x
#print y
#z=y.copy()
#z.clear()
#print x
#print y
#print z
#x={'name':'a','age':14}
#y=x.copy()
#y['age']=25
#print x
#print y
#a={}
#b=a.fromkeys(['name','age'],'(hahaha)')
#print b
#print b.get('name')
#print b.get('hi','N/A')
#c={}
#print c.has_key('name')
#c['name']='Eric'
#print c.has_key('name')
#x={'name':'a','age':'14'}
#print x.items()
#print x.pop('age')
#print x
#y={}
#print y.setdefault('name','N/A')
#print y
#y['name']='Apple'
#y.setdefault('name','N/A')
#print y
x={'a':'1','b':'2','c':'3'}
y={'c':'5'}
x.update(y)
print x
print x.values()
|
Use update,get,setdefault and so on.
|
Use update,get,setdefault and so on.
|
Python
|
apache-2.0
|
Vayne-Lover/Python
|
---
+++
@@ -22,13 +22,32 @@
#print x
#print y
#print z
-x={'name':'a','age':14}
-y=x.copy()
-y['age']=25
+#x={'name':'a','age':14}
+#y=x.copy()
+#y['age']=25
+#print x
+#print y
+#a={}
+#b=a.fromkeys(['name','age'],'(hahaha)')
+#print b
+#print b.get('name')
+#print b.get('hi','N/A')
+#c={}
+#print c.has_key('name')
+#c['name']='Eric'
+#print c.has_key('name')
+#x={'name':'a','age':'14'}
+#print x.items()
+#print x.pop('age')
+#print x
+#y={}
+#print y.setdefault('name','N/A')
+#print y
+#y['name']='Apple'
+#y.setdefault('name','N/A')
+#print y
+x={'a':'1','b':'2','c':'3'}
+y={'c':'5'}
+x.update(y)
print x
-print y
-a={}
-b=a.fromkeys(['name','age'],'(hahaha)')
-print b
-print b.get('name')
-print b.get('hi','N/A')
+print x.values()
|
a5d2751be278356e2a03fe07f5a1d0aef11b401f
|
ch07/enrich_airlines.py
|
ch07/enrich_airlines.py
|
# Load the on-time parquet file
on_time_dataframe = sqlContext.read.parquet('../data/on_time_performance.parquet')
wikidata = sqlContext.read.json('../data/wikidata-20160404-all.json.bz2')
|
# Load the on-time parquet file
on_time_dataframe = sqlContext.read.parquet('data/on_time_performance.parquet')
# The first step is easily expressed as SQL: get all unique tail numbers for each airline
on_time_dataframe.registerTempTable("on_time_performance")
carrier_codes = sqlContext.sql(
"SELECT DISTINCT Carrier FROM on_time_performance"
)
carrier_codes.collect()
airlines = sqlContext.read.format('com.databricks.spark.csv')\
.options(header='false', nullValue='\N')\
.load('data/airlines.csv')
airlines.show()
# Is Delta around?
airlines.filter(airlines.C3 == 'DL').show()
# Drop fields except for C1 as name, C3 as carrier code
airlines.registerTempTable("airlines")
airlines = sqlContext.sql("SELECT C1 AS Name, C3 AS CarrierCode from airlines")
# Join our 14 carrier codes to the airliens table to get our set of airlines
our_airlines = carrier_codes.join(airlines, carrier_codes.Carrier == airlines.CarrierCode)
our_airlines = our_airlines.select('Name', 'CarrierCode')
our_airlines.show()
# Store as JSON objects via a dataframe. Repartition to 1 to get 1 json file.
our_airlines.repartition(1).write.json("data/our_airlines.json")
#wikidata = sqlContext.read.json('../data/wikidata-20160404-all.json.bz2')
|
Work on chapter 7 enriching airlines with the name of the carrier from the openflights airline data
|
Work on chapter 7 enriching airlines with the name of the carrier from the openflights airline data
|
Python
|
mit
|
rjurney/Agile_Data_Code_2,naoyak/Agile_Data_Code_2,rjurney/Agile_Data_Code_2,rjurney/Agile_Data_Code_2,naoyak/Agile_Data_Code_2,rjurney/Agile_Data_Code_2,naoyak/Agile_Data_Code_2,naoyak/Agile_Data_Code_2
|
---
+++
@@ -1,4 +1,31 @@
# Load the on-time parquet file
-on_time_dataframe = sqlContext.read.parquet('../data/on_time_performance.parquet')
+on_time_dataframe = sqlContext.read.parquet('data/on_time_performance.parquet')
-wikidata = sqlContext.read.json('../data/wikidata-20160404-all.json.bz2')
+# The first step is easily expressed as SQL: get all unique tail numbers for each airline
+on_time_dataframe.registerTempTable("on_time_performance")
+carrier_codes = sqlContext.sql(
+ "SELECT DISTINCT Carrier FROM on_time_performance"
+ )
+carrier_codes.collect()
+
+airlines = sqlContext.read.format('com.databricks.spark.csv')\
+ .options(header='false', nullValue='\N')\
+ .load('data/airlines.csv')
+airlines.show()
+
+# Is Delta around?
+airlines.filter(airlines.C3 == 'DL').show()
+
+# Drop fields except for C1 as name, C3 as carrier code
+airlines.registerTempTable("airlines")
+airlines = sqlContext.sql("SELECT C1 AS Name, C3 AS CarrierCode from airlines")
+
+# Join our 14 carrier codes to the airliens table to get our set of airlines
+our_airlines = carrier_codes.join(airlines, carrier_codes.Carrier == airlines.CarrierCode)
+our_airlines = our_airlines.select('Name', 'CarrierCode')
+our_airlines.show()
+
+# Store as JSON objects via a dataframe. Repartition to 1 to get 1 json file.
+our_airlines.repartition(1).write.json("data/our_airlines.json")
+
+#wikidata = sqlContext.read.json('../data/wikidata-20160404-all.json.bz2')
|
84ccc5489b4d3dfdf1883bb777cd597bd9cb8e53
|
src/test/testclasses.py
|
src/test/testclasses.py
|
from nose.tools import *
from libeeyore.builtins import add_builtins
from libeeyore.classvalues import *
from libeeyore.environment import EeyEnvironment
from libeeyore.cpp.cppvalues import *
from libeeyore.cpp.cpprenderer import EeyCppRenderer
from eeyasserts import assert_multiline_equal
def test_Static_variable_can_be_read():
env = EeyEnvironment( EeyCppRenderer() )
decl = EeyClass(
name=EeySymbol( "MyClass" ),
base_classes=(),
body_stmts=(
EeyInit( EeyType( EeyInt ), EeySymbol( "i" ), EeyInt( "7" ) ),
)
)
assert_equal( decl.render( env ), "" )
value = EeySymbol( "MyClass.i" )
assert_equal( value.render( env ), "7" )
|
from nose.tools import *
from libeeyore.builtins import add_builtins
from libeeyore.classvalues import *
from libeeyore.environment import EeyEnvironment
from libeeyore.cpp.cppvalues import *
from libeeyore.cpp.cpprenderer import EeyCppRenderer
from eeyasserts import assert_multiline_equal
def test_Static_variable_can_be_read():
env = EeyEnvironment( EeyCppRenderer() )
decl = EeyClass(
name=EeySymbol( "MyClass" ),
base_classes=(),
body_stmts=(
EeyInit( EeyType( EeyInt ), EeySymbol( "i" ), EeyInt( "7" ) ),
)
)
assert_equal( decl.render( env ), "" )
value = EeySymbol( "MyClass.i" )
assert_equal( value.render( env ), "7" )
def test_Member_function_can_be_executed():
"""
Note this test may turn out to be incorrect. Python would respond with:
TypeError: unbound method myfunc() must be called with X instance as
first argument (got int instance instead)
"""
env = EeyEnvironment( EeyCppRenderer() )
decl = EeyClass(
name=EeySymbol( "MyClass" ),
base_classes=(),
body_stmts=(
EeyDef(
EeyType( EeyInt ),
EeySymbol( "myfunc" ),
(
( EeyType( EeyInt ), EeySymbol( "x" ) ),
),
(
EeyReturn( EeySymbol( "x" ) ),
)
),
)
)
assert_equal( decl.render( env ), "" )
value3 = EeyFunctionCall(
EeySymbol( "MyClass.myfunc" ),
(
EeyInt( "3" ),
)
)
value5 = EeyFunctionCall(
EeySymbol( "MyClass.myfunc" ),
(
EeyInt( "5" ),
)
)
assert_equal( value5.render( env ), "5" )
|
Add a test that demonstrates calling a function within a class definition.
|
Add a test that demonstrates calling a function within a class definition.
|
Python
|
mit
|
andybalaam/pepper,andybalaam/pepper,andybalaam/pepper,andybalaam/pepper,andybalaam/pepper
|
---
+++
@@ -26,3 +26,50 @@
assert_equal( value.render( env ), "7" )
+
+def test_Member_function_can_be_executed():
+ """
+ Note this test may turn out to be incorrect. Python would respond with:
+ TypeError: unbound method myfunc() must be called with X instance as
+ first argument (got int instance instead)
+ """
+
+ env = EeyEnvironment( EeyCppRenderer() )
+
+ decl = EeyClass(
+ name=EeySymbol( "MyClass" ),
+ base_classes=(),
+ body_stmts=(
+ EeyDef(
+ EeyType( EeyInt ),
+ EeySymbol( "myfunc" ),
+ (
+ ( EeyType( EeyInt ), EeySymbol( "x" ) ),
+ ),
+ (
+ EeyReturn( EeySymbol( "x" ) ),
+ )
+ ),
+ )
+ )
+
+ assert_equal( decl.render( env ), "" )
+
+ value3 = EeyFunctionCall(
+ EeySymbol( "MyClass.myfunc" ),
+ (
+ EeyInt( "3" ),
+ )
+ )
+
+ value5 = EeyFunctionCall(
+ EeySymbol( "MyClass.myfunc" ),
+ (
+ EeyInt( "5" ),
+ )
+ )
+
+ assert_equal( value5.render( env ), "5" )
+
+
+
|
898e087d67ba5f6f8af3f280d46c59edc0bb665e
|
modules/module_spotify.py
|
modules/module_spotify.py
|
import re
import urllib
def handle_url(bot, user, channel, url, msg):
"""Handle IMDB urls"""
m = re.match("(http:\/\/open.spotify.com\/|spotify:)(album|artist|track)([:\/])([a-zA-Z0-9]+)\/?", url)
if not m: return
dataurl = "http://spotify.url.fi/%s/%s?txt" % (m.group(2), m.group(4))
f = urllib.urlopen(dataurl)
songinfo = f.read()
f.close()
artist, album, song = songinfo.split("/", 2)
bot.say(channel, "[Spotify] Artist: %s - Album: %s - Song: %s" % (artist.strip(), album.strip(), song.strip()))
|
import re
import urllib
def handle_url(bot, user, channel, url, msg):
"""Handle IMDB urls"""
m = re.match("(http:\/\/open.spotify.com\/|spotify:)(album|artist|track)([:\/])([a-zA-Z0-9]+)\/?", url)
if not m: return
dataurl = "http://spotify.url.fi/%s/%s?txt" % (m.group(2), m.group(4))
f = urllib.urlopen(dataurl)
songinfo = f.read()
f.close()
artist, album, song = songinfo.split("/", 2)
bot.say(channel, "[Spotify] %s - %s (%s)" % (artist.strip(), song.strip(), album.strip()))
|
Change output format to a more reasonable one
|
Change output format to a more reasonable one
git-svn-id: 056f9092885898c4775d98c479d2d33d00273e45@143 dda364a1-ef19-0410-af65-756c83048fb2
|
Python
|
bsd-3-clause
|
EArmour/pyfibot,nigeljonez/newpyfibot,huqa/pyfibot,lepinkainen/pyfibot,rnyberg/pyfibot,lepinkainen/pyfibot,aapa/pyfibot,aapa/pyfibot,EArmour/pyfibot,rnyberg/pyfibot,huqa/pyfibot
|
---
+++
@@ -16,4 +16,4 @@
artist, album, song = songinfo.split("/", 2)
- bot.say(channel, "[Spotify] Artist: %s - Album: %s - Song: %s" % (artist.strip(), album.strip(), song.strip()))
+ bot.say(channel, "[Spotify] %s - %s (%s)" % (artist.strip(), song.strip(), album.strip()))
|
afe792e50e6e30036f1ed718d7c3f5143a1e2da5
|
adhocracy4/follows/signals.py
|
adhocracy4/follows/signals.py
|
from django.conf import settings
from django.db.models.signals import post_save
from . import models
def autofollow_hook(instance, **kwargs):
if hasattr(instance.project, 'id'):
models.Follow.objects.get_or_create(
project=instance.project,
creator=instance.creator,
defaults={
'enabled': True,
})
for model in settings.A4_AUTO_FOLLOWABLES:
post_save.connect(autofollow_hook, model)
|
from django.apps import apps
from django.conf import settings
from django.db.models.signals import post_save
from . import models
def autofollow_hook(instance, **kwargs):
if hasattr(instance.project, 'id'):
models.Follow.objects.get_or_create(
project=instance.project,
creator=instance.creator,
defaults={
'enabled': True,
})
for app, model in settings.A4_AUTO_FOLLOWABLES:
post_save.connect(autofollow_hook, apps.get_model(app, model))
|
Fix setting up AUTO_FOLLOWABLES models
|
Fix setting up AUTO_FOLLOWABLES models
Note that `Signal.connect` expects the model class as the sender
argument.
Altough while using e.g. `post_save` it also works with a string
`"apname.model"`
|
Python
|
agpl-3.0
|
liqd/adhocracy4,liqd/adhocracy4,liqd/adhocracy4,liqd/adhocracy4
|
---
+++
@@ -1,3 +1,4 @@
+from django.apps import apps
from django.conf import settings
from django.db.models.signals import post_save
@@ -14,5 +15,5 @@
})
-for model in settings.A4_AUTO_FOLLOWABLES:
- post_save.connect(autofollow_hook, model)
+for app, model in settings.A4_AUTO_FOLLOWABLES:
+ post_save.connect(autofollow_hook, apps.get_model(app, model))
|
404ff05ea8a57fff8e706a26b69356366e67352f
|
main.py
|
main.py
|
from update import *
from plot_and_save import *
from Parameters.parametersA3 import n, n_t, n0
animate = False # animate the wave in the string upon completion
plot = False # plot the waveform and frequency spectrum on the piano bridge
write_file = True # write the waveform on the bridge to a .wav file
filename = "pianoA3.wav"
# calculate matrices
A, B = calculate_AB()
C = calculate_C()
# initialize eta and u
eta = update_eta(init=True)
u = u_old = 1.*np.zeros(n)
ims = []
u_bridge = np.array([])
for i in range(n_t):
u_bridge = np.append(u_bridge, u[-1])
if i%10==0 and animate:
ims.append([u])
if eta[1] >= 0.: # eta[0]:
force = calculate_force(eta[1], u[n0])
u, u_old = update_displacement(u, u_old, A, B, C, force)
eta = update_eta(eta, force[n0])
else:
u, u_old = update_displacement(u, u_old, A, B)
# animate, plot and save
if animate: animate_string(ims)
if plot:
plot_u_bridge(u_bridge)
plot_frequency(u_bridge)
if write_file: save_to_wav(u_bridge, filename)
|
from update import *
from plot_and_save import *
from Parameters.parametersA3 import n, n_t, n0
animate = False # animate the wave in the string upon completion
plot = False # plot the waveform and frequency spectrum on the piano bridge
write_file = True # write the waveform on the bridge to a .wav file
filename = "./Notes/pianoA3.wav"
# calculate matrices
A, B = calculate_AB()
C = calculate_C()
# initialize eta and u
eta = update_eta(init=True)
u = u_old = 1.*np.zeros(n)
ims = []
u_bridge = np.array([])
for i in range(n_t):
u_bridge = np.append(u_bridge, u[-1])
if i%10==0 and animate:
ims.append([u])
if eta[1] >= 0.: # eta[0]:
force = calculate_force(eta[1], u[n0])
u, u_old = update_displacement(u, u_old, A, B, C, force)
eta = update_eta(eta, force[n0])
else:
u, u_old = update_displacement(u, u_old, A, B)
# animate, plot and save
if animate: animate_string(ims)
if plot:
plot_u_bridge(u_bridge)
plot_frequency(u_bridge)
if write_file: save_to_wav(u_bridge, filename)
|
Save file in correct folder
|
Save file in correct folder
|
Python
|
mit
|
madarivi/PianoSimulation
|
---
+++
@@ -4,7 +4,7 @@
animate = False # animate the wave in the string upon completion
plot = False # plot the waveform and frequency spectrum on the piano bridge
write_file = True # write the waveform on the bridge to a .wav file
-filename = "pianoA3.wav"
+filename = "./Notes/pianoA3.wav"
# calculate matrices
A, B = calculate_AB()
|
817c9634e89be79c5ca4e08ce48c1eb1dd173f46
|
skimage/viewer/qt/__init__.py
|
skimage/viewer/qt/__init__.py
|
import os
import warnings
qt_api = os.environ.get('QT_API')
if qt_api is None:
try:
import PySide
qt_api = 'pyside'
except ImportError:
try:
import PyQt4
qt_api = 'pyqt'
except ImportError:
qt_api = None
# Note that we don't want to raise an error because that would
# cause the TravisCI build to fail.
warnings.warn("Could not import PyQt4: ImageViewer not available!")
os.environ['QT_API'] = qt_api
|
import os
import warnings
qt_api = os.environ.get('QT_API')
if qt_api is None:
try:
import PySide
qt_api = 'pyside'
except ImportError:
try:
import PyQt4
qt_api = 'pyqt'
except ImportError:
qt_api = None
# Note that we don't want to raise an error because that would
# cause the TravisCI build to fail.
warnings.warn("Could not import PyQt4: ImageViewer not available!")
if qt_api is not None:
os.environ['QT_API'] = qt_api
|
Fix setting of QT_API environment variable
|
Fix setting of QT_API environment variable
|
Python
|
bsd-3-clause
|
jwiggins/scikit-image,blink1073/scikit-image,Hiyorimi/scikit-image,pratapvardhan/scikit-image,paalge/scikit-image,rjeli/scikit-image,emon10005/scikit-image,robintw/scikit-image,newville/scikit-image,almarklein/scikit-image,almarklein/scikit-image,oew1v07/scikit-image,vighneshbirodkar/scikit-image,michaelaye/scikit-image,GaZ3ll3/scikit-image,ajaybhat/scikit-image,rjeli/scikit-image,Britefury/scikit-image,bennlich/scikit-image,dpshelio/scikit-image,SamHames/scikit-image,juliusbierk/scikit-image,michaelpacer/scikit-image,almarklein/scikit-image,bennlich/scikit-image,pratapvardhan/scikit-image,blink1073/scikit-image,emon10005/scikit-image,michaelpacer/scikit-image,ofgulban/scikit-image,dpshelio/scikit-image,robintw/scikit-image,paalge/scikit-image,ClinicalGraphics/scikit-image,keflavich/scikit-image,chriscrosscutler/scikit-image,ofgulban/scikit-image,vighneshbirodkar/scikit-image,juliusbierk/scikit-image,SamHames/scikit-image,ajaybhat/scikit-image,warmspringwinds/scikit-image,chintak/scikit-image,WarrenWeckesser/scikits-image,Midafi/scikit-image,warmspringwinds/scikit-image,GaZ3ll3/scikit-image,chintak/scikit-image,Hiyorimi/scikit-image,chintak/scikit-image,chintak/scikit-image,bsipocz/scikit-image,youprofit/scikit-image,Britefury/scikit-image,newville/scikit-image,michaelaye/scikit-image,SamHames/scikit-image,almarklein/scikit-image,bsipocz/scikit-image,oew1v07/scikit-image,chriscrosscutler/scikit-image,Midafi/scikit-image,SamHames/scikit-image,vighneshbirodkar/scikit-image,youprofit/scikit-image,WarrenWeckesser/scikits-image,rjeli/scikit-image,ofgulban/scikit-image,jwiggins/scikit-image,paalge/scikit-image,ClinicalGraphics/scikit-image,keflavich/scikit-image
|
---
+++
@@ -16,4 +16,7 @@
# Note that we don't want to raise an error because that would
# cause the TravisCI build to fail.
warnings.warn("Could not import PyQt4: ImageViewer not available!")
+
+
+if qt_api is not None:
os.environ['QT_API'] = qt_api
|
1525c25029a2cd93494ebab45377661d606fa7ab
|
make_mozilla/events/tasks.py
|
make_mozilla/events/tasks.py
|
from celery.decorators import task
from make_mozilla.bsd import BSDRegisterConstituent
@task
def register_email_address_as_constituent(email_address, group):
BSDRegisterConstituent.add_email_to_group(email_address, group)
|
from celery.task import task
from make_mozilla.bsd import BSDRegisterConstituent
@task
def register_email_address_as_constituent(email_address, group):
BSDRegisterConstituent.add_email_to_group(email_address, group)
|
Switch to post Celery 2.2 task decorator syntax.
|
Switch to post Celery 2.2 task decorator syntax.
|
Python
|
bsd-3-clause
|
mozilla/make.mozilla.org,mozilla/make.mozilla.org,mozilla/make.mozilla.org,mozilla/make.mozilla.org
|
---
+++
@@ -1,4 +1,4 @@
-from celery.decorators import task
+from celery.task import task
from make_mozilla.bsd import BSDRegisterConstituent
@task
|
151dbe6d319b27882e4df42a73c4fe6c6b77b90a
|
rm/trials/templatetags/share.py
|
rm/trials/templatetags/share.py
|
"""
Helpers for sharing
"""
from django.template import Library
register = Library()
def absolute(request):
return request.build_absolute_uri(request.path)
@register.inclusion_tag('share_this.html', takes_context=True)
def share_this(context):
"What, you can't copy a URL? Bah."
return dict(
title=context['trial'].title,
href=absolute(context['request']),
img=context['request'].build_absolute_uri('/static/img/randomisemelogo.png')
)
register.filter('absolute', absolute)
|
"""
Helpers for sharing
"""
from django.template import Library
register = Library()
def absolute(request):
return request.build_absolute_uri(request.path)
@register.inclusion_tag('share_this.html', takes_context=True)
def share_this(context):
"What, you can't copy a URL? Bah."
title = ''
trial = context.get('trial')
if trial:
title = trial.title
return dict(
title=title,
href=absolute(context['request']),
img=context['request'].build_absolute_uri('/static/img/randomisemelogo.png')
)
register.filter('absolute', absolute)
|
Move variable out of inline dict construction to debug production errors.
|
Move variable out of inline dict construction to debug production errors.
|
Python
|
agpl-3.0
|
openhealthcare/randomise.me,openhealthcare/randomise.me,openhealthcare/randomise.me,openhealthcare/randomise.me
|
---
+++
@@ -11,8 +11,12 @@
@register.inclusion_tag('share_this.html', takes_context=True)
def share_this(context):
"What, you can't copy a URL? Bah."
+ title = ''
+ trial = context.get('trial')
+ if trial:
+ title = trial.title
return dict(
- title=context['trial'].title,
+ title=title,
href=absolute(context['request']),
img=context['request'].build_absolute_uri('/static/img/randomisemelogo.png')
)
|
0b6e9d6e329b8e88fca9635640ef0842f3cb82c2
|
api/tests/test_scrapers.py
|
api/tests/test_scrapers.py
|
def test_scrape_item_by_id():
from api.scrapers.item import scrape_item_by_id
item = scrape_item_by_id('d19447e548d')
assert item.id == 'd19447e548d'
assert item.name == 'Thyrus Zenith'
assert item.type == 'Two-handed Conjurer\'s Arm'
assert item.ilvl == 90
|
def test_scrape_item_by_id():
from api.scrapers.item import scrape_item_by_id
item = scrape_item_by_id('d19447e548d')
assert item.id == 'd19447e548d'
assert item.name == 'Thyrus Zenith'
assert item.type == 'Two-handed Conjurer\'s Arm'
assert item.ilvl == 90
def test_scrape_character_by_id():
from api.scrapers.character import scrape_character_by_id
name = scrape_character_by_id('8774791')
assert name.id == '8774791'
assert name.name == 'Mina Loriel'
assert name.species == 'Miqo\'te'
assert name.gender == 'Female'
|
Add scrape character unit test
|
Add scrape character unit test
|
Python
|
mit
|
Demotivated/loadstone
|
---
+++
@@ -7,3 +7,14 @@
assert item.name == 'Thyrus Zenith'
assert item.type == 'Two-handed Conjurer\'s Arm'
assert item.ilvl == 90
+
+
+def test_scrape_character_by_id():
+ from api.scrapers.character import scrape_character_by_id
+
+ name = scrape_character_by_id('8774791')
+
+ assert name.id == '8774791'
+ assert name.name == 'Mina Loriel'
+ assert name.species == 'Miqo\'te'
+ assert name.gender == 'Female'
|
68ae2de4b51a2fe0f02c40bad8731d34b1092521
|
narcissa.py
|
narcissa.py
|
#!/usr/bin/env python3
import subprocess
import atexit
import sys
from utils.safe_schedule import SafeScheduler
from time import sleep
from glob import glob
META_IMPORT = '# narcissa import '
scheduler = SafeScheduler()
def make_exit_graceful():
original_hook = sys.excepthook
def new_hook(type, value, traceback):
if type == KeyboardInterrupt:
sys.exit("\nBye for now!")
else:
original_hook(type, value, traceback)
sys.excepthook = new_hook
def start_server():
cmd = 'waitress-serve --port=5000 server:app'
p = subprocess.Popen(cmd.split(), cwd='server')
return p
def start_scrapers():
for scraper_path in glob('scrapers/*.py'):
with open(scraper_path) as f:
print(scraper_path)
scraper_data = f.read()
exec(scraper_data)
def main():
make_exit_graceful()
server = start_server()
atexit.register(server.terminate)
start_scrapers()
while True:
scheduler.run_pending()
sleep(1)
if __name__ == '__main__':
main()
|
#!/usr/bin/env python3
import subprocess
import atexit
import sys
from utils.safe_schedule import SafeScheduler
from time import sleep
from glob import glob
META_IMPORT = '# narcissa import '
scheduler = SafeScheduler()
def make_exit_graceful():
original_hook = sys.excepthook
def new_hook(type, value, traceback):
if type == KeyboardInterrupt:
sys.exit("\nBye for now!")
else:
original_hook(type, value, traceback)
sys.excepthook = new_hook
def start_server():
cmd = 'waitress-serve --port=5000 server:app'
p = subprocess.Popen(cmd.split(), cwd='server')
return p
def load_scrapers():
for scraper_path in glob('scrapers/*.py'):
with open(scraper_path) as f:
print(scraper_path)
scraper_data = f.read()
exec(scraper_data)
def main():
make_exit_graceful()
server = start_server()
atexit.register(server.terminate)
load_scrapers()
while True:
scheduler.run_pending()
sleep(1)
if __name__ == '__main__':
main()
|
Change name of start_scrapers() to be more accurate
|
Change name of start_scrapers() to be more accurate
|
Python
|
mit
|
mplewis/narcissa
|
---
+++
@@ -30,7 +30,7 @@
return p
-def start_scrapers():
+def load_scrapers():
for scraper_path in glob('scrapers/*.py'):
with open(scraper_path) as f:
print(scraper_path)
@@ -42,7 +42,7 @@
make_exit_graceful()
server = start_server()
atexit.register(server.terminate)
- start_scrapers()
+ load_scrapers()
while True:
scheduler.run_pending()
sleep(1)
|
27723696885319aabea974f83189d3a43770b7d5
|
spillway/fields.py
|
spillway/fields.py
|
"""Serializer fields"""
from django.contrib.gis import forms
from rest_framework.fields import WritableField
from spillway.compat import json
class GeometryField(WritableField):
type_name = 'GeometryField'
type_label = 'geometry'
form_field_class = forms.GeometryField
def to_native(self, value):
# Create a dict from the GEOSGeometry when the value is not previously
# serialized from the spatial db.
try:
return {'type': value.geom_type, 'coordinates': value.coords}
# Value is already serialized as geojson, kml, etc.
except AttributeError:
return value
def from_native(self, value):
# forms.GeometryField cannot handle geojson dicts.
if isinstance(value, dict):
value = json.dumps(value)
return super(GeometryField, self).from_native(value)
|
"""Serializer fields"""
from django.contrib.gis import forms
from rest_framework.fields import FileField, WritableField
from greenwich.raster import Raster
from spillway.compat import json
class GeometryField(WritableField):
type_name = 'GeometryField'
type_label = 'geometry'
form_field_class = forms.GeometryField
def to_native(self, value):
# Create a dict from the GEOSGeometry when the value is not previously
# serialized from the spatial db.
try:
return {'type': value.geom_type, 'coordinates': value.coords}
# Value is already serialized as geojson, kml, etc.
except AttributeError:
return value
def from_native(self, value):
# forms.GeometryField cannot handle geojson dicts.
if isinstance(value, dict):
value = json.dumps(value)
return super(GeometryField, self).from_native(value)
class NDArrayField(FileField):
type_name = 'NDArrayField'
type_label = 'ndarray'
def to_native(self, value):
params = self.context.get('params', {})
geom = params.get('g')
with Raster(getattr(value, 'path', value)) as r:
arr = r.clip(geom).masked_array() if geom else r.array()
return arr.tolist()
|
Add numpy array serializer field
|
Add numpy array serializer field
|
Python
|
bsd-3-clause
|
bkg/django-spillway,barseghyanartur/django-spillway,kuzmich/django-spillway
|
---
+++
@@ -1,6 +1,7 @@
"""Serializer fields"""
from django.contrib.gis import forms
-from rest_framework.fields import WritableField
+from rest_framework.fields import FileField, WritableField
+from greenwich.raster import Raster
from spillway.compat import json
@@ -24,3 +25,15 @@
if isinstance(value, dict):
value = json.dumps(value)
return super(GeometryField, self).from_native(value)
+
+
+class NDArrayField(FileField):
+ type_name = 'NDArrayField'
+ type_label = 'ndarray'
+
+ def to_native(self, value):
+ params = self.context.get('params', {})
+ geom = params.get('g')
+ with Raster(getattr(value, 'path', value)) as r:
+ arr = r.clip(geom).masked_array() if geom else r.array()
+ return arr.tolist()
|
f3ef685d4bb900733741f53e1afcefd143b26289
|
npcs.py
|
npcs.py
|
import random
from characters import BaseCharacer
class Mentor(BaseCharacer):
MENTORS_COUNT = 4
def __init__(self, location, *groups):
super(Mentor, self).__init__(location, *groups)
def change_place(self, new_x, new_y):
self.rect.left = new_x
self.rect.top = new_y
def change_to_random_place(self, locations):
place = random.randint(0, self.MENTORS_COUNT-1)
mc = locations[place]
self.change_place(mc.px, mc.py)
def update(self, dt, game):
pass
|
import random
from characters import BaseCharacer
class Mentor(BaseCharacer):
MENTORS_COUNT = 4
def __init__(self, location, *groups):
super(Mentor, self).__init__(location, *groups)
def change_place(self, new_x, new_y):
self.rect.left = new_x
self.rect.top = new_y
def change_to_random_place(self, locations):
place = random.randint(0, self.MENTORS_COUNT-1)
mc = locations[place]
self.change_place(mc.px, mc.py)
def visited(self, player, locations):
print 'Visited'
self.change_to_random_place(locations)
def update(self, dt, game):
pass
|
Add visit method to Mentor
|
Add visit method to Mentor
|
Python
|
mit
|
arturbalabanov/hacksym
|
---
+++
@@ -19,5 +19,9 @@
self.change_place(mc.px, mc.py)
+ def visited(self, player, locations):
+ print 'Visited'
+ self.change_to_random_place(locations)
+
def update(self, dt, game):
pass
|
5933f9ef0ff7af0fd85a7dbe6578eefe9b8f7cdf
|
seqcluster/create_report.py
|
seqcluster/create_report.py
|
import os
import shutil
import logging
from bcbio import install
install._set_matplotlib_default_backend()
import matplotlib
matplotlib.use('Agg', force=True)
from libs.read import load_data
from libs.report import make_profile
from libs.utils import safe_dirs
from db import make_database
import templates
logger = logging.getLogger('report')
def report(args):
"""
Create report in html format
"""
logger.info("reading sequeces")
data = load_data(args.json)
out_dir = os.path.join(args.out, "html")
safe_dirs(out_dir)
logger.info("create profile")
data = make_profile(data, out_dir, args)
logger.info("create database")
make_database(data, "seqcluster.db", args.out)
path_template = os.path.normpath(os.path.dirname(os.path.realpath(templates.__file__)))
css_template = os.path.join(path_template, "info.css")
js_template = os.path.join(path_template, "jquery.tablesorter.min.js")
css = os.path.join(out_dir, "info.css")
js = os.path.join(out_dir, "jquery.tablesorter.min.js")
if not os.path.exists(css):
shutil.copy(css_template, css)
shutil.copy(js_template, js)
logger.info("Done")
|
import os
import shutil
import logging
from bcbio import install
install._set_matplotlib_default_backend()
import matplotlib
matplotlib.use('Agg', force=True)
from libs.read import load_data
from libs.report import make_profile
from libs.utils import safe_dirs
from db import make_database
import templates
logger = logging.getLogger('report')
def report(args):
"""
Create report in html format
"""
logger.info("reading sequeces")
data = load_data(args.json)
out_dir = os.path.join(args.out, "html")
safe_dirs(out_dir)
logger.info("create profile")
data = make_profile(data, out_dir, args)
logger.info("create database")
make_database(data, "seqcluster.db", args.out)
path_template = os.path.normpath(os.path.dirname(os.path.realpath(templates.__file__)))
css_template = os.path.join(path_template, "info.css")
js_template = os.path.join(path_template, "jquery.tablesorter.min.js")
css = os.path.join(out_dir, "info.css")
js = os.path.join(out_dir, "jquery.tablesorter.min.js")
if not os.path.exists(css):
shutil.copy(css_template, css)
shutil.copy(js_template, js)
logger.info("Done. Download https://github.com/lpantano/seqclusterViz/archive/master.zip to browse the output.")
|
Add message with link to seqclusterViz
|
Add message with link to seqclusterViz
|
Python
|
mit
|
lpantano/seqcluster,lpantano/seqcluster,lpantano/seqcluster,lpantano/seqcluster,lpantano/seqcluster
|
---
+++
@@ -38,4 +38,4 @@
if not os.path.exists(css):
shutil.copy(css_template, css)
shutil.copy(js_template, js)
- logger.info("Done")
+ logger.info("Done. Download https://github.com/lpantano/seqclusterViz/archive/master.zip to browse the output.")
|
c96f939c25663e5b123e2123d2d8ec69cf7c4cbe
|
fusesoc/provider/git.py
|
fusesoc/provider/git.py
|
# Copyright FuseSoC contributors
# Licensed under the 2-Clause BSD License, see LICENSE for details.
# SPDX-License-Identifier: BSD-2-Clause
import logging
import os.path
import shutil
import subprocess
from fusesoc.provider.provider import Provider
from fusesoc.utils import Launcher
logger = logging.getLogger(__name__)
class Git(Provider):
@staticmethod
def init_library(library):
logger.info(f"Cloning library into {library.location}")
git_args = ["clone", library.sync_uri, library.location]
try:
Launcher("git", git_args).run()
except subprocess.CalledProcessError as e:
raise RuntimeError(str(e))
@staticmethod
def update_library(library):
git_args = ["-C", library.location, "pull"]
try:
Launcher("git", git_args).run()
except subprocess.CalledProcessError as e:
raise RuntimeError(str(e))
def _checkout(self, local_dir):
version = self.config.get("version", None)
# TODO : Sanitize URL
repo = self.config.get("repo")
logger.info("Checking out " + repo + " to " + local_dir)
args = ["clone", "-q", "--depth", "1", repo, local_dir]
Launcher("git", args).run()
if version:
args = ["-C", local_dir, "checkout", "-q", version]
Launcher("git", args).run()
|
# Copyright FuseSoC contributors
# Licensed under the 2-Clause BSD License, see LICENSE for details.
# SPDX-License-Identifier: BSD-2-Clause
import logging
import os.path
import shutil
import subprocess
from fusesoc.provider.provider import Provider
from fusesoc.utils import Launcher
logger = logging.getLogger(__name__)
class Git(Provider):
@staticmethod
def init_library(library):
logger.info(f"Cloning library into {library.location}")
git_args = ["clone", library.sync_uri, library.location]
try:
Launcher("git", git_args).run()
except subprocess.CalledProcessError as e:
raise RuntimeError(str(e))
@staticmethod
def update_library(library):
git_args = ["-C", library.location, "pull"]
try:
Launcher("git", git_args).run()
except subprocess.CalledProcessError as e:
raise RuntimeError(str(e))
def _checkout(self, local_dir):
version = self.config.get("version", None)
# TODO : Sanitize URL
repo = self.config.get("repo")
logger.info("Checking out " + repo + " to " + local_dir)
args = ["clone", "-q", "--depth", "1", "--no-single-branch", repo, local_dir]
Launcher("git", args).run()
if version:
args = ["-C", local_dir, "checkout", "-q", version]
Launcher("git", args).run()
|
Make sure shallow clone pulls all tags
|
Make sure shallow clone pulls all tags
No-single-branch flag ensures shallow-clone depth constraint does not make older tags invisible
|
Python
|
bsd-2-clause
|
olofk/fusesoc,olofk/fusesoc
|
---
+++
@@ -37,7 +37,7 @@
# TODO : Sanitize URL
repo = self.config.get("repo")
logger.info("Checking out " + repo + " to " + local_dir)
- args = ["clone", "-q", "--depth", "1", repo, local_dir]
+ args = ["clone", "-q", "--depth", "1", "--no-single-branch", repo, local_dir]
Launcher("git", args).run()
if version:
args = ["-C", local_dir, "checkout", "-q", version]
|
64219411d0bcbb7dafc754bef8538fc237584031
|
go/vumitools/tests/test_api_worker.py
|
go/vumitools/tests/test_api_worker.py
|
# -*- coding: utf-8 -*-
"""Tests for go.vumitools.api_worker."""
from twisted.internet.defer import inlineCallbacks
from vumi.application.tests.test_base import ApplicationTestCase
from go.vumitools.api_worker import VumiApiWorker
from go.vumitools.api import VumiApiCommand
class TestVumiApiWorker(ApplicationTestCase):
application_class = VumiApiWorker
@inlineCallbacks
def setUp(self):
super(TestVumiApiWorker, self).setUp()
config = {
'send_to': {
'default': {
'transport_name': 'test_transport',
},
},
}
self.api = yield self.get_application(config)
def publish_command(self, cmd):
return self.dispatch(cmd, rkey='vumi.api')
@inlineCallbacks
def test_send(self):
yield self.publish_command(VumiApiCommand.send('batch1', 'content',
'to_addr'))
[msg] = yield self.get_dispatched_messages()
self.assertEqual(msg['to_addr'], 'to_addr')
self.assertEqual(msg['content'], 'content')
|
# -*- coding: utf-8 -*-
"""Tests for go.vumitools.api_worker."""
from twisted.internet.defer import inlineCallbacks
from vumi.application.tests.test_base import ApplicationTestCase
from go.vumitools.api_worker import VumiApiWorker
from go.vumitools.api import VumiApiCommand
class TestVumiApiWorker(ApplicationTestCase):
application_class = VumiApiWorker
@inlineCallbacks
def setUp(self):
super(TestVumiApiWorker, self).setUp()
self.api = yield self.get_application({})
def publish_command(self, cmd):
return self.dispatch(cmd, rkey='vumi.api')
@inlineCallbacks
def test_send(self):
yield self.publish_command(VumiApiCommand.send('batch1', 'content',
'to_addr'))
[msg] = yield self.get_dispatched_messages()
self.assertEqual(msg['to_addr'], 'to_addr')
self.assertEqual(msg['content'], 'content')
|
Remove send_to config from tests since Vumi's application test class now adds this automatically.
|
Remove send_to config from tests since Vumi's application test class now adds this automatically.
|
Python
|
bsd-3-clause
|
praekelt/vumi-go,praekelt/vumi-go,praekelt/vumi-go,praekelt/vumi-go
|
---
+++
@@ -17,14 +17,7 @@
@inlineCallbacks
def setUp(self):
super(TestVumiApiWorker, self).setUp()
- config = {
- 'send_to': {
- 'default': {
- 'transport_name': 'test_transport',
- },
- },
- }
- self.api = yield self.get_application(config)
+ self.api = yield self.get_application({})
def publish_command(self, cmd):
return self.dispatch(cmd, rkey='vumi.api')
|
a38ee91cbb45cba35c930aae780a469c0cbc762c
|
mrbelvedereci/build/tasks.py
|
mrbelvedereci/build/tasks.py
|
from celery import shared_task
from mrbelvedereci.build.models import Build
from mrbelvedereci.salesforce.models import Org
@shared_task
def run_build(build_id):
build = Build.objects.get(id=build_id)
build.run()
return build.status
@shared_task
def check_queued_build(build_id):
build = Build.objects.get(id = build_id)
# Check for concurrency blocking
try:
org = Org.objects.get(name = build.trigger.org, repo = build.repo)
except Org.DoesNotExist:
return
# If this is not a scratch org, ensure no builds are currently running against the org
if not org.scratch:
running_builds = Build.objects.filter(status='running', repo=build.repo, org = build.org).count()
if running_builds:
# Requeue this job to check again in 5 seconds
check_queued_build.apply_async((build.id,), countdown=5)
return 'Queued: checking again in 5 seconds'
# Queue the background job with a 1 second delay to allow the transaction to commit
run_build.apply_async((build.id,), countdown=1)
|
from celery import shared_task
from mrbelvedereci.build.models import Build
from mrbelvedereci.salesforce.models import Org
@shared_task
def run_build(build_id):
build = Build.objects.get(id=build_id)
build.run()
return build.status
@shared_task
def check_queued_build(build_id):
build = Build.objects.get(id = build_id)
# Check for concurrency blocking
try:
org = Org.objects.get(name = build.trigger.org, repo = build.repo)
except Org.DoesNotExist:
return
# If this is not a scratch org, ensure no builds are currently running against the org
if not org.scratch:
running_builds = Build.objects.filter(status='running', repo=build.repo, trigger__org = build.trigger.org).count()
if running_builds:
# Requeue this job to check again in 5 seconds
check_queued_build.apply_async((build.id,), countdown=5)
return 'Queued: checking again in 5 seconds'
# Queue the background job with a 1 second delay to allow the transaction to commit
run_build.apply_async((build.id,), countdown=1)
|
Fix path to org field
|
Fix path to org field
|
Python
|
bsd-3-clause
|
SalesforceFoundation/mrbelvedereci,SalesforceFoundation/mrbelvedereci,SalesforceFoundation/mrbelvedereci,SalesforceFoundation/mrbelvedereci
|
---
+++
@@ -20,7 +20,7 @@
# If this is not a scratch org, ensure no builds are currently running against the org
if not org.scratch:
- running_builds = Build.objects.filter(status='running', repo=build.repo, org = build.org).count()
+ running_builds = Build.objects.filter(status='running', repo=build.repo, trigger__org = build.trigger.org).count()
if running_builds:
# Requeue this job to check again in 5 seconds
check_queued_build.apply_async((build.id,), countdown=5)
|
8aae2526f4e565982f8c57c25d796c59d17e6c46
|
components/lie_graph/lie_graph/graph_model_classes/model_files.py
|
components/lie_graph/lie_graph/graph_model_classes/model_files.py
|
# -*- coding: utf-8 -*-
"""
file: model_files.py
Graph model classes for working with files
"""
import os
import logging
from lie_graph.graph_mixin import NodeEdgeToolsBaseClass
class FilePath(NodeEdgeToolsBaseClass):
@property
def exists(self):
path = self.get()
if path:
return os.path.exists(path)
return False
@property
def iswritable(self):
return os.access(self.get(), os.W_OK)
def set(self, key, value=None, absolute=True):
if key == self.node_value_tag and absolute:
value = os.path.abspath(value)
self.nodes[self.nid][key] = value
def create_dirs(self):
"""
Create directories of the stored path
:return: Absolute path to working directory
:rtype: :py:str
"""
path = self.get()
if self.exists and self.iswritable:
logging.info('Directory exists and writable: {0}'.format(path))
return path
try:
os.makedirs(path, 0755)
except Exception:
logging.error('Unable to create project directory: {0}'.format(path))
return path
|
# -*- coding: utf-8 -*-
"""
file: model_files.py
Graph model classes for working with files
"""
import os
import logging
from lie_graph.graph_mixin import NodeEdgeToolsBaseClass
class FilePath(NodeEdgeToolsBaseClass):
@property
def exists(self):
path = self.get()
if path:
return os.path.exists(path)
return False
@property
def iswritable(self):
return os.access(self.get(), os.W_OK)
def set(self, key, value=None, absolute=True):
if key == self.node_value_tag and absolute:
value = os.path.abspath(value)
self.nodes[self.nid][key] = value
def makedirs(self):
"""
Recursively create the directory structure of the path
:return: Absolute path to working directory
:rtype: :py:str
"""
path = self.get()
if self.exists and self.iswritable:
logging.info('Directory exists and writable: {0}'.format(path))
return path
try:
os.makedirs(path, 0755)
except Exception:
logging.error('Unable to create project directory: {0}'.format(path))
return path
|
Rename create_dirs method to makedirs in line with os.path method
|
Rename create_dirs method to makedirs in line with os.path method
|
Python
|
apache-2.0
|
MD-Studio/MDStudio,MD-Studio/MDStudio,MD-Studio/MDStudio,MD-Studio/MDStudio,MD-Studio/MDStudio
|
---
+++
@@ -34,9 +34,9 @@
self.nodes[self.nid][key] = value
- def create_dirs(self):
+ def makedirs(self):
"""
- Create directories of the stored path
+ Recursively create the directory structure of the path
:return: Absolute path to working directory
:rtype: :py:str
|
a2b6aa3bd90e967e2b7811f65a6b7311f13c53b3
|
testproject/settings.py
|
testproject/settings.py
|
import os
DEBUG = True
BASE_DIR = os.path.dirname(__file__)
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
SECRET_KEY = '_'
MIDDLEWARE_CLASSES = ()
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'rest_framework',
'rest_framework.authtoken',
'djoser',
'testapp',
)
REST_FRAMEWORK = {
'DEFAULT_AUTHENTICATION_CLASSES': (
'rest_framework.authentication.TokenAuthentication',
),
}
ROOT_URLCONF = 'urls'
TEMPLATE_DIRS = (
os.path.join(BASE_DIR, 'templates'),
)
DJOSER = {
'DOMAIN': 'frontend.com',
'SITE_NAME': 'Frontend',
'PASSWORD_RESET_CONFIRM_URL': '#/password/reset/confirm/{uid}/{token}',
'ACTIVATION_URL': '#/activate/{uid}/{token}',
}
|
import os
DEBUG = True
BASE_DIR = os.path.dirname(__file__)
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
SECRET_KEY = '_'
MIDDLEWARE_CLASSES = ()
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.staticfiles',
'rest_framework',
'rest_framework.authtoken',
'djoser',
'testapp',
)
STATIC_URL = '/static/'
REST_FRAMEWORK = {
'DEFAULT_AUTHENTICATION_CLASSES': (
'rest_framework.authentication.TokenAuthentication',
),
}
ROOT_URLCONF = 'urls'
TEMPLATE_DIRS = (
os.path.join(BASE_DIR, 'templates'),
)
DJOSER = {
'DOMAIN': 'frontend.com',
'SITE_NAME': 'Frontend',
'PASSWORD_RESET_CONFIRM_URL': '#/password/reset/confirm/{uid}/{token}',
'ACTIVATION_URL': '#/activate/{uid}/{token}',
}
|
Fix browsable API in test project - add staticfiles app.
|
Fix browsable API in test project - add staticfiles app.
|
Python
|
mit
|
johnwalker/djoser,akalipetis/djoser,fladi/djoser,yiyocx/djoser,barseghyanartur/djoser,sunscrapers/djoser,sunscrapers/djoser,dokenzy/djoser,carlosfunk/djoser,akalipetis/djoser,PingaxAnalytics/koob_auth,liyocee/djoser,sunscrapers/djoser,mjuopperi/djoser,vandoornik/djoser,unacast/djoser,apokinsocha/djoser,avances123/djoser
|
---
+++
@@ -18,6 +18,7 @@
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
+ 'django.contrib.staticfiles',
'rest_framework',
'rest_framework.authtoken',
@@ -25,6 +26,8 @@
'testapp',
)
+
+STATIC_URL = '/static/'
REST_FRAMEWORK = {
'DEFAULT_AUTHENTICATION_CLASSES': (
|
3e488750a460afa549795d7189f5c5b1c43f96e0
|
avalon/houdini/__init__.py
|
avalon/houdini/__init__.py
|
from .pipeline import (
install,
uninstall,
Creator,
ls,
containerise,
)
from .lib import (
lsattr,
lsattrs,
read,
maintained_selection,
unique_name
)
__all__ = [
"install",
"uninstall",
"Creator",
"ls",
"containerise",
# Utility functions
"maintained_selection",
"lsattr",
"lsattrs",
"read",
"unique_name"
]
|
from .pipeline import (
install,
uninstall,
Creator,
ls,
containerise,
)
from .lib import (
lsattr,
lsattrs,
read,
maintained_selection,
unique_name
)
__all__ = [
"install",
"uninstall",
"Creator",
"ls",
"containerise",
# Utility functions
"lsattr",
"lsattrs",
"read",
"maintained_selection",
"unique_name"
]
|
Reorder so it's similar to imports, purely cosmetics/readability
|
Reorder so it's similar to imports, purely cosmetics/readability
|
Python
|
mit
|
getavalon/core,mindbender-studio/core,mindbender-studio/core,getavalon/core
|
---
+++
@@ -29,10 +29,10 @@
"containerise",
# Utility functions
- "maintained_selection",
-
"lsattr",
"lsattrs",
"read",
+
+ "maintained_selection",
"unique_name"
]
|
eda7125f28a9da3c5ccefb3ec5c604ddd23d3034
|
plantcv/plantcv/plot_image.py
|
plantcv/plantcv/plot_image.py
|
# Plot image to screen
import cv2
import numpy
import matplotlib
from plantcv.plantcv import params
from matplotlib import pyplot as plt
from plantcv.plantcv import fatal_error
def plot_image(img, cmap=None):
"""Plot an image to the screen.
:param img: numpy.ndarray
:param cmap: str
:return:
"""
image_type = type(img)
dimensions = numpy.shape(img)
if image_type == numpy.ndarray:
matplotlib.rcParams['figure.dpi'] = params.dpi
# If the image is color then OpenCV stores it as BGR, we plot it as RGB
if len(dimensions) == 3:
plt.imshow(cv2.cvtColor(img, cv2.COLOR_BGR2RGB))
plt.show()
elif cmap is None and len(dimensions) == 2:
plt.imshow(img, cmap="gray")
plt.show()
elif cmap is not None and len(dimensions) == 2:
plt.imshow(img, cmap=cmap)
plt.show()
elif image_type == matplotlib.figure.Figure:
fatal_error("Error, matplotlib Figure not supported. Instead try running without plot_image.")
# Plot if the image is a plotnine ggplot image
elif str(image_type) == "<class 'plotnine.ggplot.ggplot'>":
print(img)
|
# Plot image to screen
import cv2
import numpy
import matplotlib
from plantcv.plantcv import params
from matplotlib import pyplot as plt
from plantcv.plantcv import fatal_error
def plot_image(img, cmap=None):
"""Plot an image to the screen.
:param img: numpy.ndarray
:param cmap: str
:return:
"""
image_type = type(img)
dimensions = numpy.shape(img)
if image_type == numpy.ndarray:
matplotlib.rcParams['figure.dpi'] = params.dpi
# If the image is color then OpenCV stores it as BGR, we plot it as RGB
if len(dimensions) == 3:
fig = plt.figure()
plt.imshow(cv2.cvtColor(img, cv2.COLOR_BGR2RGB))
plt.show()
elif cmap is None and len(dimensions) == 2:
fig = plt.figure()
plt.imshow(img, cmap="gray")
plt.show()
elif cmap is not None and len(dimensions) == 2:
fig = plt.figure()
plt.imshow(img, cmap=cmap)
plt.show()
elif image_type == matplotlib.figure.Figure:
fatal_error("Error, matplotlib Figure not supported. Instead try running without plot_image.")
# Plot if the image is a plotnine ggplot image
elif str(image_type) == "<class 'plotnine.ggplot.ggplot'>":
print(img)
|
Create a new figure for each plot
|
Create a new figure for each plot
|
Python
|
mit
|
danforthcenter/plantcv,stiphyMT/plantcv,danforthcenter/plantcv,danforthcenter/plantcv,stiphyMT/plantcv,stiphyMT/plantcv
|
---
+++
@@ -23,14 +23,17 @@
matplotlib.rcParams['figure.dpi'] = params.dpi
# If the image is color then OpenCV stores it as BGR, we plot it as RGB
if len(dimensions) == 3:
+ fig = plt.figure()
plt.imshow(cv2.cvtColor(img, cv2.COLOR_BGR2RGB))
plt.show()
elif cmap is None and len(dimensions) == 2:
+ fig = plt.figure()
plt.imshow(img, cmap="gray")
plt.show()
elif cmap is not None and len(dimensions) == 2:
+ fig = plt.figure()
plt.imshow(img, cmap=cmap)
plt.show()
|
607ba7481abc7556c247e140b963dfc9d5bd2161
|
examples/strings.py
|
examples/strings.py
|
import collections
import collections.abc
def strings_have_format_map_method():
"""
As of Python 3.2 you can use the .format_map() method on a string object
to use mapping objects (not just builtin dictionaries) when formatting
a string.
"""
class Default(dict):
def __missing__(self, key):
return key
print("This prints the keys: {a} {key2}".format_map(Default(a="key1")))
mapping = collections.defaultdict(int, a=2)
print("This prints the value 2000: {a}{b}{c}{d}".format_map(mapping))
class MyMapping(collections.abc.Mapping):
def __init__(self):
self._data = {'a': 'A', 'b': 'B', 'c': 'C'}
def __getitem__(self, key):
return self._data[key]
def __len__(self):
return len(self._data)
def __iter__(self):
for item in self._data:
yield item
mapping = MyMapping()
print("This prints ABC: {a}{b}{c}".format_map(mapping))
|
import collections
import collections.abc
def strings_have_format_map_method():
"""
As of Python 3.2 you can use the .format_map() method on a string object
to use mapping objects (not just builtin dictionaries) when formatting
a string.
"""
class Default(dict):
def __missing__(self, key):
return key
print("This prints key1 and key2: {key1} and {key2}".format_map(Default(key1="key1")))
mapping = collections.defaultdict(int, a=2)
print("This prints the value 2000: {a}{b}{c}{d}".format_map(mapping))
class MyMapping(collections.abc.Mapping):
def __init__(self):
self._data = {'a': 'A', 'b': 'B', 'c': 'C'}
def __getitem__(self, key):
return self._data[key]
def __len__(self):
return len(self._data)
def __iter__(self):
for item in self._data:
yield item
mapping = MyMapping()
print("This prints ABC: {a}{b}{c}".format_map(mapping))
|
Make string example a bit less confusing
|
Make string example a bit less confusing
|
Python
|
mit
|
svisser/python-3-examples
|
---
+++
@@ -14,7 +14,7 @@
def __missing__(self, key):
return key
- print("This prints the keys: {a} {key2}".format_map(Default(a="key1")))
+ print("This prints key1 and key2: {key1} and {key2}".format_map(Default(key1="key1")))
mapping = collections.defaultdict(int, a=2)
print("This prints the value 2000: {a}{b}{c}{d}".format_map(mapping))
|
ca9cd229faaa0fc43a2ac0e4c6354331c0b57550
|
nengo_spinnaker/simulator.py
|
nengo_spinnaker/simulator.py
|
import sys
from pacman103.core import control
from pacman103 import conf
from . import builder
class Simulator(object):
def __init__(self, model, dt=0.001, seed=None):
# Build the model
self.builder = builder.Builder()
self.dao = self.builder(model, dt, seed)
self.dao.writeTextSpecs = True
def run(self, time):
"""Run the model, currently ignores the time."""
self.controller = control.Controller(
sys.modules[__name__],
conf.config.get('Machine', 'machineName')
)
self.controller.dao = self.dao
self.dao.set_hostname(conf.config.get('Machine', 'machineName'))
self.controller.map_model()
self.controller.generate_output()
self.controller.load_targets()
self.controller.load_write_mem()
self.controller.run(self.dao.app_id)
|
import sys
from pacman103.core import control
from pacman103 import conf
from . import builder
class Simulator(object):
def __init__(self, model, dt=0.001, seed=None):
# Build the model
self.builder = builder.Builder()
self.dao = self.builder(model, dt, seed)
self.dao.writeTextSpecs = True
def run(self, time):
"""Run the model, currently ignores the time."""
self.controller = control.Controller(
sys.modules[__name__],
conf.config.get('Machine', 'machineName')
)
# Preparation functions
# Consider moving this into PACMAN103
for vertex in self.dao.vertices:
if hasattr(vertex, 'prepare_vertex'):
vertex.prepare_vertex()
self.controller.dao = self.dao
self.dao.set_hostname(conf.config.get('Machine', 'machineName'))
self.controller.map_model()
self.controller.generate_output()
self.controller.load_targets()
self.controller.load_write_mem()
self.controller.run(self.dao.app_id)
|
Allow vertices to define a `prepare_vertex` function which will be called just once at some point in the build process.
|
Allow vertices to define a `prepare_vertex` function which will be called just once at some point in the build process.
|
Python
|
mit
|
ctn-archive/nengo_spinnaker_2014,ctn-archive/nengo_spinnaker_2014,ctn-archive/nengo_spinnaker_2014,ctn-archive/nengo_spinnaker_2014
|
---
+++
@@ -19,6 +19,13 @@
sys.modules[__name__],
conf.config.get('Machine', 'machineName')
)
+
+ # Preparation functions
+ # Consider moving this into PACMAN103
+ for vertex in self.dao.vertices:
+ if hasattr(vertex, 'prepare_vertex'):
+ vertex.prepare_vertex()
+
self.controller.dao = self.dao
self.dao.set_hostname(conf.config.get('Machine', 'machineName'))
self.controller.map_model()
|
749c727b044244060e1e17ad51505ee11957bfeb
|
tests/test_formatter.py
|
tests/test_formatter.py
|
from datetime import datetime, timedelta
from random import randrange
from todoman import ui
DATE_FORMAT = "%d-%m-%y"
def test_format_date():
"""
Tests the format_date function in todoman.ui.TodoFormatter
"""
formatter = ui.TodoFormatter(DATE_FORMAT)
today = datetime.now()
tomorrow = today + timedelta(days = 1)
any_day = today + timedelta(days = randrange(2, 8))
assert formatter.format_date("") == " "
assert formatter.format_date(today) == " Today"
assert formatter.format_date(tomorrow) == "Tomorrow"
assert formatter.format_date(any_day) == any_day.strftime(DATE_FORMAT)
|
from datetime import datetime, timedelta
from random import randrange
from todoman import ui
DATE_FORMAT = "%d-%m-%y"
def test_format_date():
"""
Tests the format_date function in todoman.ui.TodoFormatter
"""
formatter = ui.TodoFormatter(DATE_FORMAT)
today = datetime.now()
tomorrow = today + timedelta(days=1)
any_day = today + timedelta(days=randrange(2, 8))
assert formatter.format_date("") == " "
assert formatter.format_date(today) == " Today"
assert formatter.format_date(tomorrow) == "Tomorrow"
assert formatter.format_date(any_day) == any_day.strftime(DATE_FORMAT)
|
Remove spaces from around arguements
|
Remove spaces from around arguements
|
Python
|
isc
|
rimshaakhan/todoman
|
---
+++
@@ -12,8 +12,8 @@
"""
formatter = ui.TodoFormatter(DATE_FORMAT)
today = datetime.now()
- tomorrow = today + timedelta(days = 1)
- any_day = today + timedelta(days = randrange(2, 8))
+ tomorrow = today + timedelta(days=1)
+ any_day = today + timedelta(days=randrange(2, 8))
assert formatter.format_date("") == " "
assert formatter.format_date(today) == " Today"
|
7cfdde79d161b463bf720cd7e222812280d09cdc
|
src/fabfile.py
|
src/fabfile.py
|
import os
# pylint: disable=unused-wildcard-import,unused-import,wildcard-import
SRC_DIR = os.path.dirname(os.path.abspath(__file__)) # elife-builder/src/
# once called 'THIS_DIR', now deprecated as confusing.
PROJECT_DIR = os.path.dirname(SRC_DIR) # elife-builder/
from cfn import *
# aws tasks are not working for some reason.. possibly circular dependency
import aws
import metrics
# packer functionality not tested properly since going public
#import packer
import tasks
import master
import askmaster
import buildvars
import project
from deploy import switch_revision_update_instance
from lifecycle import start, stop, stop_if_running_for, update_dns
import masterless
import fixtures
|
import os
# pylint: disable=unused-wildcard-import,unused-import,wildcard-import
SRC_DIR = os.path.dirname(os.path.abspath(__file__)) # elife-builder/src/
# once called 'THIS_DIR', now deprecated as confusing.
PROJECT_DIR = os.path.dirname(SRC_DIR) # elife-builder/
from cfn import *
# aws tasks are not working for some reason.. possibly circular dependency
import aws
import metrics
import tasks
import master
import askmaster
import buildvars
import project
from deploy import switch_revision_update_instance
from lifecycle import start, stop, stop_if_running_for, update_dns
import masterless
import fixtures
|
Remove commented out packer import
|
Remove commented out packer import
|
Python
|
mit
|
elifesciences/builder,elifesciences/builder
|
---
+++
@@ -12,8 +12,6 @@
# aws tasks are not working for some reason.. possibly circular dependency
import aws
import metrics
-# packer functionality not tested properly since going public
-#import packer
import tasks
import master
import askmaster
|
eeba670eb9ee4aabf36843add730eaa983de7ca6
|
run.py
|
run.py
|
from flask import Flask, request, redirect
import twilio.twiml
app = Flask(__name__)
@app.route("/", methods=['GET', 'POST'])
def hello_monkey():
"""Respond to incoming requests"""
resp = twilio.twiml.Response()
with resp.gather(numDigits=1, action="/handle-key", method="POST") as g:
g.say("press 1 or something")
return str(resp)
@app.route("/handle-key", methods=['GET', 'POST'])
def handle_key():
digit_pressed = request.values.get('Digits', None)
if digit_pressed == "1":
resp = twilio.twiml.Response()
resp.play("http://demo.twilio.com/hellomonkey/monkey.mp3")
return str(resp)
else:
return redirect("/")
if __name__ == "__main__":
app.run(debug=True)
|
from flask import Flask, request, redirect
import twilio.twiml
app = Flask(__name__)
@app.route("/", methods=['GET', 'POST'])
def root():
"""Respond to incoming requests"""
resp = twilio.twiml.Response()
with resp.gather(finishOnKey="*", action="/handle-key", method="POST") as g:
g.say("Play that trill trap shit bro")
return str(resp)
@app.route("/handle-key", methods=['GET', 'POST'])
def handle_key():
digit_pressed = request.values.get('Digits', None)
if digit_pressed == "1":
resp = twilio.twiml.Response()
resp.play("http://demo.twilio.com/hellomonkey/monkey.mp3")
return str(resp)
else:
return redirect("/")
if __name__ == "__main__":
app.run(debug=True)
|
Refactor code and add ability for many input.
|
Refactor code and add ability for many input.
|
Python
|
mit
|
ColdSauce/tw-1,christophert/tw-1,zachlatta/tw-1
|
---
+++
@@ -4,12 +4,11 @@
app = Flask(__name__)
@app.route("/", methods=['GET', 'POST'])
-def hello_monkey():
+def root():
"""Respond to incoming requests"""
resp = twilio.twiml.Response()
-
- with resp.gather(numDigits=1, action="/handle-key", method="POST") as g:
- g.say("press 1 or something")
+ with resp.gather(finishOnKey="*", action="/handle-key", method="POST") as g:
+ g.say("Play that trill trap shit bro")
return str(resp)
|
d731b4172592ef905101868b43817f25f5b04063
|
virtstrap/exceptions.py
|
virtstrap/exceptions.py
|
class CommandConfigError(Exception):
"""Exception for command configuration errors"""
pass
|
class CommandConfigError(Exception):
"""Exception for command configuration errors"""
pass
class RequirementsConfigError(Exception):
"""Exception for command configuration errors"""
pass
|
Add a requirements configuration exception
|
Add a requirements configuration exception
|
Python
|
mit
|
ravenac95/virtstrap-core,ravenac95/testvirtstrapdocs,ravenac95/virtstrap-core
|
---
+++
@@ -1,3 +1,7 @@
class CommandConfigError(Exception):
"""Exception for command configuration errors"""
pass
+
+class RequirementsConfigError(Exception):
+ """Exception for command configuration errors"""
+ pass
|
9bdd1c9a33bd48cde186a5d4c425fc8745017cd9
|
lifx.py
|
lifx.py
|
# -*- encoding: utf8 -*-
from __future__ import division, print_function, division
import pylifx
if __name__ == '__main__':
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('bulb_addr')
parser.add_argument('state', choices=('on', 'off'))
args = parser.parse_args()
with pylifx.LifxController(args.bulb_addr) as bulb:
if args.state == 'on':
bulb.on()
else:
bulb.off()
|
# -*- encoding: utf8 -*-
from __future__ import division, print_function, division
import pylifx
def power(bulb, state):
if state == 'on':
bulb.on()
elif state == 'off':
bulb.off()
else:
raise ValueError('Invalid State specified %s' % state)
if __name__ == '__main__':
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('bulb_addr')
subparsers = parser.add_subparsers(dest='command')
power_parser = subparsers.add_parser('power')
power_parser.add_argument('state', choices=('on', 'off'))
rgb_parser = subparsers.add_parser('rgb')
rgb_parser.add_argument('red', type=float)
rgb_parser.add_argument('green', type=float)
rgb_parser.add_argument('blue', type=float)
hsb_parser = subparsers.add_parser('hsb')
hsb_parser.add_argument('hue', type=float)
hsb_parser.add_argument('saturation', type=float)
hsb_parser.add_argument('brightness', type=float)
temp_parser = subparsers.add_parser('temperature')
temp_parser.add_argument('kelvin', type=int)
args = parser.parse_args()
with pylifx.LifxController(args.bulb_addr) as bulb:
if args.command == 'power':
power(bulb, args.state)
elif args.command == 'rgb':
bulb.set_rgb(args.red, args.green, args.blue)
elif args.command == 'hsb':
bulb.set_hsb(args.hue, args.saturation, args.brightness)
elif args.command == 'temperature':
bulb.set_temperature(args.kelvin)
|
Add subparsers for power, rgb, hsb and temperature
|
Add subparsers for power, rgb, hsb and temperature
|
Python
|
bsd-3-clause
|
MichaelAquilina/lifx-cmd
|
---
+++
@@ -2,6 +2,15 @@
from __future__ import division, print_function, division
import pylifx
+
+
+def power(bulb, state):
+ if state == 'on':
+ bulb.on()
+ elif state == 'off':
+ bulb.off()
+ else:
+ raise ValueError('Invalid State specified %s' % state)
if __name__ == '__main__':
@@ -9,12 +18,33 @@
parser = argparse.ArgumentParser()
parser.add_argument('bulb_addr')
- parser.add_argument('state', choices=('on', 'off'))
+
+ subparsers = parser.add_subparsers(dest='command')
+
+ power_parser = subparsers.add_parser('power')
+ power_parser.add_argument('state', choices=('on', 'off'))
+
+ rgb_parser = subparsers.add_parser('rgb')
+ rgb_parser.add_argument('red', type=float)
+ rgb_parser.add_argument('green', type=float)
+ rgb_parser.add_argument('blue', type=float)
+
+ hsb_parser = subparsers.add_parser('hsb')
+ hsb_parser.add_argument('hue', type=float)
+ hsb_parser.add_argument('saturation', type=float)
+ hsb_parser.add_argument('brightness', type=float)
+
+ temp_parser = subparsers.add_parser('temperature')
+ temp_parser.add_argument('kelvin', type=int)
args = parser.parse_args()
with pylifx.LifxController(args.bulb_addr) as bulb:
- if args.state == 'on':
- bulb.on()
- else:
- bulb.off()
+ if args.command == 'power':
+ power(bulb, args.state)
+ elif args.command == 'rgb':
+ bulb.set_rgb(args.red, args.green, args.blue)
+ elif args.command == 'hsb':
+ bulb.set_hsb(args.hue, args.saturation, args.brightness)
+ elif args.command == 'temperature':
+ bulb.set_temperature(args.kelvin)
|
dfd6e69b0a6202ed562360201e123e40c7aa1cc9
|
bin/template.py
|
bin/template.py
|
#! /usr/bin/env python
import jinja2
import sys
import re
import os
KEY_REGEX = re.compile('([A-Z]+)=(\w+)')
def main():
args = sys.argv[1:]
if len(args) < 1:
raise Exception('args too short {}'.format(args))
template_file = args[0]
keyword_args = args[1:]
if not os.path.exists(template_file):
raise Exception('File "{}" does not exist'.format(template_file))
context = {}
for arg in keyword_args:
match = KEY_REGEX.match(arg)
if match:
(key, val) = match.groups()
context[key] = val
with open(template_file, 'r') as file_reader:
text = file_reader.read()
print jinja2.Template(text).render(context)
if __name__ == '__main__':
main()
|
#! /usr/bin/env python
import jinja2
import sys
import re
import os
KEY_REGEX = re.compile('([A-Z]+)=(.+)')
def main():
args = sys.argv[1:]
if len(args) < 1:
raise Exception('args too short {}'.format(args))
template_file = args[0]
keyword_args = args[1:]
if not os.path.exists(template_file):
raise Exception('File "{}" does not exist'.format(template_file))
context = {}
for arg in keyword_args:
match = KEY_REGEX.match(arg)
if match:
(key, val) = match.groups()
context[key] = val
with open(template_file, 'r') as file_reader:
text = file_reader.read()
print jinja2.Template(text).render(context)
if __name__ == '__main__':
main()
|
Fix the regex so that it catches all characters to the right of the equals-sign.
|
Fix the regex so that it catches all characters to the right of the equals-sign.
|
Python
|
mit
|
BedquiltDB/bedquilt-core
|
---
+++
@@ -5,7 +5,7 @@
import os
-KEY_REGEX = re.compile('([A-Z]+)=(\w+)')
+KEY_REGEX = re.compile('([A-Z]+)=(.+)')
def main():
|
6377284c022f26cfd9528b09af3ec61fc91a2c54
|
api/tests/__init__.py
|
api/tests/__init__.py
|
import json
from django.test import TestCase, Client
# Create your tests here.
from login.models import myuser
from rest_framework.authtoken.models import Token
class APITestCase(TestCase):
test_schema = 'schema1'
test_table = 'population2'
@classmethod
def setUpClass(cls):
super(APITestCase, cls).setUpClass()
cls.user = myuser.objects.create(name='MrTest')
cls.user.save()
cls.token = Token.objects.get(user=cls.user)
cls.client = Client()
def assertDictEqualKeywise(self, d1, d2, excluded=None):
if not excluded:
excluded = []
self.assertEqual(set(d1.keys()).union(excluded), set(d2.keys()).union(excluded), "Key sets do not match")
for key in d1:
if key not in excluded:
value = d1[key]
covalue = d2[key]
self.assertEqual(value, covalue,
"Key '{key}' does not match.".format(key=key))
|
import json
from django.test import TestCase, Client
# Create your tests here.
from login.models import myuser
from rest_framework.authtoken.models import Token
class APITestCase(TestCase):
test_schema = 'schema1'
test_table = 'population2'
@classmethod
def setUpClass(cls):
super(APITestCase, cls).setUpClass()
cls.user = myuser.objects.create(name='MrTest', mail_address='mrtest@test.com')
cls.user.save()
cls.token = Token.objects.get(user=cls.user)
cls.other_user = myuser.objects.create(name='NotMrTest', mail_address='notmrtest@test.com')
cls.other_user.save()
cls.other_token = Token.objects.get(user=cls.other_user)
cls.client = Client()
def assertDictEqualKeywise(self, d1, d2, excluded=None):
if not excluded:
excluded = []
self.assertEqual(set(d1.keys()).union(excluded), set(d2.keys()).union(excluded), "Key sets do not match")
for key in d1:
if key not in excluded:
value = d1[key]
covalue = d2[key]
self.assertEqual(value, covalue,
"Key '{key}' does not match.".format(key=key))
|
Add another user for permission testing
|
Add another user for permission testing
|
Python
|
agpl-3.0
|
tom-heimbrodt/oeplatform,tom-heimbrodt/oeplatform,tom-heimbrodt/oeplatform,openego/oeplatform,openego/oeplatform,openego/oeplatform,openego/oeplatform
|
---
+++
@@ -16,9 +16,14 @@
@classmethod
def setUpClass(cls):
super(APITestCase, cls).setUpClass()
- cls.user = myuser.objects.create(name='MrTest')
+ cls.user = myuser.objects.create(name='MrTest', mail_address='mrtest@test.com')
cls.user.save()
cls.token = Token.objects.get(user=cls.user)
+
+ cls.other_user = myuser.objects.create(name='NotMrTest', mail_address='notmrtest@test.com')
+ cls.other_user.save()
+ cls.other_token = Token.objects.get(user=cls.other_user)
+
cls.client = Client()
|
833a83a109bc52b034bbfeabc9a9e2d99d8226f9
|
app.tmpl/__init__.py
|
app.tmpl/__init__.py
|
# Main application file
#
# Copyright (c) 2017, Alexandre Hamelin <alexandre.hamelin gmail.com>
import os
from flask import Flask
from flask_login import LoginManager
app = Flask(__name__)
app.secret_key = 'default-secret-key'
app.config.from_object('plbackend.config.DefaultConfig')
if 'PLBACKEND_CONFIG' in os.environ:
app.config.from_envvar('PLBACKEND_CONFIG')
login_manager = LoginManager()
login_manager.init_app(app)
login_manager.login_view = 'login'
# Import anything that depended on `app`
from {{PROJECTNAME}}.views import *
from {{PROJECTNAME}}.models import *
|
# Main application file
#
# Copyright (c) 2017, Alexandre Hamelin <alexandre.hamelin gmail.com>
import os
from flask import Flask
from flask_login import LoginManager
app = Flask(__name__)
app.secret_key = 'default-secret-key'
app.config.from_object(app.name + '.config.DefaultConfig')
if 'APP_CONFIG' in os.environ:
app.config.from_envvar('APP_CONFIG')
login_manager = LoginManager()
login_manager.init_app(app)
login_manager.login_view = 'login'
# Import anything that depended on `app`
from {{PROJECTNAME}}.views import *
from {{PROJECTNAME}}.models import *
|
Fix the app config loading process
|
Fix the app config loading process
|
Python
|
mit
|
0xquad/flask-app-template,0xquad/flask-app-template,0xquad/flask-app-template
|
---
+++
@@ -9,9 +9,9 @@
app = Flask(__name__)
app.secret_key = 'default-secret-key'
-app.config.from_object('plbackend.config.DefaultConfig')
-if 'PLBACKEND_CONFIG' in os.environ:
- app.config.from_envvar('PLBACKEND_CONFIG')
+app.config.from_object(app.name + '.config.DefaultConfig')
+if 'APP_CONFIG' in os.environ:
+ app.config.from_envvar('APP_CONFIG')
login_manager = LoginManager()
login_manager.init_app(app)
|
90405c60b5d2ce583597382bc72e116cb9a450bd
|
project/library/models.py
|
project/library/models.py
|
from django.db import models
class Author(models.Model):
'''Object for book author'''
first_name = models.CharField(max_length=128)
last_name = models.CharField(max_length=128)
def __unicode__(self):
return self.last_name + ", " + self.first_name
class Book(models.Model):
'''Object for library books'''
title = models.CharField(max_length=128)
isbn = models.CharField(max_length=13)
isbn13 = models.CharField(max_length=13)
description = models.TextField()
authors = models.ManyToManyField(Author)
year_published = models.SmallIntegerField(null=True)
status = models.TextField(default="In")
def __unicode__(self):
return self.title
class Reservation(models.Model):
'''Object for book reservations'''
book_id = models.ForeignKey('Book')
member_name = models.CharField(max_length=128)
email = models.EmailField()
date_created = models.DateTimeField()
def __unicode__(self):
return self.member_name + ":" + self.book_id
|
from datetime import datetime
from django.db import models
class Author(models.Model):
'''Object for book author'''
first_name = models.CharField(max_length=128)
last_name = models.CharField(max_length=128)
def __unicode__(self):
return self.last_name + ", " + self.first_name
class Book(models.Model):
'''Object for library books'''
title = models.CharField(max_length=128)
isbn = models.CharField(max_length=13)
isbn13 = models.CharField(max_length=13)
description = models.TextField()
authors = models.ManyToManyField(Author)
year_published = models.SmallIntegerField(null=True)
status = models.TextField(default="In")
def __unicode__(self):
return self.title
class Reservation(models.Model):
'''Object for book reservations'''
book_id = models.ForeignKey('Book')
member_name = models.CharField(max_length=128)
email = models.EmailField()
date_created = models.DateTimeField(default=datetime.now())
def __unicode__(self):
return self.member_name + ":" + self.book_id
|
Update reservation object to use current time
|
Update reservation object to use current time
|
Python
|
mit
|
DUCSS/ducss-site-old,DUCSS/ducss-site-old,DUCSS/ducss-site-old
|
---
+++
@@ -1,3 +1,4 @@
+from datetime import datetime
from django.db import models
class Author(models.Model):
@@ -31,7 +32,7 @@
book_id = models.ForeignKey('Book')
member_name = models.CharField(max_length=128)
email = models.EmailField()
- date_created = models.DateTimeField()
+ date_created = models.DateTimeField(default=datetime.now())
def __unicode__(self):
return self.member_name + ":" + self.book_id
|
58ed8c24288ee8f470acfa85cc6ae267f0ad2fd8
|
pbag/tests/test_serialize.py
|
pbag/tests/test_serialize.py
|
from pbag.serialize import dump, load
data = [b'Hello\n', 1, b'world!', None]
def test_core():
with open('_foo.pack', 'wb') as f:
dump(data, f)
with open('_foo.pack', 'rb') as f:
data2 = load(f)
assert data == data2
def test_multiple_dumps():
with open('_foo.pack', 'wb') as f:
dump(1, f)
dump(data, f)
dump(2, f)
with open('_foo.pack', 'rb') as f:
a = load(f)
b = load(f)
c = load(f)
assert a == 1
assert b == data
assert c == 2
|
from tempfile import TemporaryFile
from pbag.serialize import dump, load
data = [b'Hello\n', 1, b'world!', None]
def test_core():
with TemporaryFile(mode='wb+') as f:
dump(data, f)
f.seek(0)
data2 = load(f)
assert data == data2
def test_multiple_dumps():
with TemporaryFile(mode='wb+') as f:
dump(1, f)
dump(data, f)
dump(2, f)
f.seek(0)
a = load(f)
b = load(f)
c = load(f)
assert a == 1
assert b == data
assert c == 2
|
Use temporary files for testing.
|
Use temporary files for testing.
|
Python
|
bsd-3-clause
|
jakirkham/dask,clarkfitzg/dask,marianotepper/dask,ContinuumIO/dask,esc/dask,pombredanne/dask,wiso/dask,jcrist/dask,mraspaud/dask,hainm/dask,pombredanne/dask,jakirkham/dask,ssanderson/dask,mraspaud/dask,cpcloud/dask,wiso/dask,mrocklin/dask,minrk/dask,ContinuumIO/dask,PhE/dask,PhE/dask,esc/dask,jayhetee/dask,blaze/dask,dask/dask,mrocklin/dask,cowlicks/dask,mikegraham/dask,chrisbarber/dask,hainm/dask,freeman-lab/dask,gameduell/dask,dask/dask,marianotepper/dask,jcrist/dask,clarkfitzg/dask,minrk/dask,simudream/dask,vikhyat/dask,simudream/dask,freeman-lab/dask,jayhetee/dask,vikhyat/dask,ssanderson/dask,blaze/dask
|
---
+++
@@ -1,3 +1,4 @@
+from tempfile import TemporaryFile
from pbag.serialize import dump, load
@@ -5,22 +6,22 @@
def test_core():
- with open('_foo.pack', 'wb') as f:
+ with TemporaryFile(mode='wb+') as f:
dump(data, f)
-
- with open('_foo.pack', 'rb') as f:
+ f.seek(0)
data2 = load(f)
assert data == data2
def test_multiple_dumps():
- with open('_foo.pack', 'wb') as f:
+ with TemporaryFile(mode='wb+') as f:
dump(1, f)
dump(data, f)
dump(2, f)
- with open('_foo.pack', 'rb') as f:
+ f.seek(0)
+
a = load(f)
b = load(f)
c = load(f)
|
2b9f5550c85c3f89de7ee25f29cf40af121c70d3
|
setup.py
|
setup.py
|
from setuptools import setup, find_packages
import os
f = open(os.path.join(os.path.dirname(__file__), 'README.rst'))
readme = f.read()
f.close()
setup(
name='roku',
version='3.1',
description='Client for the Roku media player',
long_description=readme,
author='Jeremy Carbaugh',
author_email='jcarbaugh@gmail.com',
url='https://github.com/jcarbaugh/python-roku',
packages=find_packages(),
install_requires=[
'lxml>=3.6,<3.7',
'requests>=2.10,<2.11',
'six'
],
license='BSD License',
platforms=["any"],
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python',
],
)
|
from setuptools import setup, find_packages
import os
f = open(os.path.join(os.path.dirname(__file__), 'README.rst'))
readme = f.read()
f.close()
setup(
name='roku',
version='3.0',
description='Client for the Roku media player',
long_description=readme,
author='Jeremy Carbaugh',
author_email='jcarbaugh@gmail.com',
url='https://github.com/jcarbaugh/python-roku',
packages=find_packages(),
install_requires=[
'lxml>=3.6,<3.7',
'requests>=2.10,<2.11',
'six'
],
license='BSD License',
platforms=["any"],
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python',
],
)
|
Modify version back to 3.0
|
Modify version back to 3.0
|
Python
|
bsd-3-clause
|
jcarbaugh/python-roku
|
---
+++
@@ -7,7 +7,7 @@
setup(
name='roku',
- version='3.1',
+ version='3.0',
description='Client for the Roku media player',
long_description=readme,
author='Jeremy Carbaugh',
|
08bab0a9e9ae2c362f1c4238fa70b39be109edfd
|
setup.py
|
setup.py
|
import os
from setuptools import setup
import mando
with open(os.path.join(os.path.dirname(__file__), 'README.rst')) as fobj:
readme = fobj.read()
setup(name='mando',
version=mando.__version__,
author='Michele Lacchia',
author_email='michelelacchia@gmail.com',
url='https://mando.readthedocs.org/',
download_url='https://pypi.python.org/mando/',
license='MIT',
description='Create Python CLI apps with little to no effort at all!',
platforms='any',
long_description=readme,
packages=['mando', 'mando.tests'],
test_suite='mando.tests',
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.1',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Topic :: Software Development',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Utilities',
]
)
|
import os
from setuptools import setup
import mando
with open(os.path.join(os.path.dirname(__file__), 'README.rst')) as fobj:
readme = fobj.read()
setup(name='mando',
version=mando.__version__,
author='Michele Lacchia',
author_email='michelelacchia@gmail.com',
url='https://mando.readthedocs.org/',
download_url='https://pypi.python.org/mando/',
license='MIT',
description='Create Python CLI apps with little to no effort at all!',
platforms='any',
long_description=readme,
packages=['mando', 'mando.tests'],
test_suite='mando.tests',
install_requires=['argparse'],
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.1',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Topic :: Software Development',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Utilities',
]
)
|
Add argparse as a dependency
|
Add argparse as a dependency
|
Python
|
mit
|
MarioSchwalbe/mando,MarioSchwalbe/mando,rubik/mando
|
---
+++
@@ -18,6 +18,7 @@
long_description=readme,
packages=['mando', 'mando.tests'],
test_suite='mando.tests',
+ install_requires=['argparse'],
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Console',
|
e69c2796fae7b91b2b3658bca5f946143277f54f
|
python_apps/pypo/setup.py
|
python_apps/pypo/setup.py
|
import os
from setuptools import setup
# Change directory since setuptools uses relative paths
os.chdir(os.path.dirname(os.path.realpath(__file__)))
setup(
name="airtime-playout",
version="1.0",
description="LibreTime Playout",
author="LibreTime Contributors",
url="https://github.com/libretime/libretime",
project_urls={
"Bug Tracker": "https://github.com/libretime/libretime/issues",
"Documentation": "https://libretime.org",
"Source Code": "https://github.com/libretime/libretime",
},
license="AGPLv3",
packages=[
"pypo",
"liquidsoap",
],
package_data={"": ["**/*.liq", "*.cfg", "*.types"]},
scripts=[
"bin/airtime-playout",
"bin/airtime-liquidsoap",
"bin/pyponotify",
],
install_requires=[
"amqplib",
"anyjson",
"argparse",
"configobj",
"docopt",
"future",
"kombu",
"mutagen",
"PyDispatcher",
"pyinotify",
"pytz",
"requests",
"defusedxml",
"packaging",
],
zip_safe=False,
)
|
import os
from setuptools import setup
# Change directory since setuptools uses relative paths
os.chdir(os.path.dirname(os.path.realpath(__file__)))
setup(
name="airtime-playout",
version="1.0",
description="LibreTime Playout",
author="LibreTime Contributors",
url="https://github.com/libretime/libretime",
project_urls={
"Bug Tracker": "https://github.com/libretime/libretime/issues",
"Documentation": "https://libretime.org",
"Source Code": "https://github.com/libretime/libretime",
},
license="AGPLv3",
packages=[
"pypo",
"liquidsoap",
],
package_data={"": ["**/*.liq", "*.cfg", "*.types"]},
scripts=[
"bin/airtime-playout",
"bin/airtime-liquidsoap",
"bin/pyponotify",
],
install_requires=[
"amqplib",
"configobj",
"defusedxml",
"kombu",
"mutagen",
"packaging",
"pytz",
"requests",
],
zip_safe=False,
)
|
Remove unused python packages from playout
|
Remove unused python packages from playout
- anyjson is failing with setuptools >=58, because it
dropped the use_2to3 support.
- other package are simply not used.
|
Python
|
agpl-3.0
|
LibreTime/libretime,LibreTime/libretime,LibreTime/libretime,LibreTime/libretime,LibreTime/libretime,LibreTime/libretime
|
---
+++
@@ -29,19 +29,13 @@
],
install_requires=[
"amqplib",
- "anyjson",
- "argparse",
"configobj",
- "docopt",
- "future",
+ "defusedxml",
"kombu",
"mutagen",
- "PyDispatcher",
- "pyinotify",
+ "packaging",
"pytz",
"requests",
- "defusedxml",
- "packaging",
],
zip_safe=False,
)
|
fa232a1bc9d051ac5d734d7dd888601608f6854b
|
setup.py
|
setup.py
|
from setuptools import setup
setup(
name='nose2_html_report',
packages=['nose2_html_report'],
version='0.1.0',
description='Generate an HTML report for your unit tests',
long_description=open('README.md', 'r').read(),
author='Michael Grijalva',
author_email='michaelgrij@gmail.com',
license='MIT',
install_requires=['jinja2'],
url='https://github.com/mgrijalva/nose2-html-report',
download_url='',
keywords=['nose2', 'testing', 'reporting'],
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Topic :: Software Development :: Testing',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
]
)
|
from setuptools import setup
setup(
name='nose2_html_report',
packages=['nose2_html_report'],
version='0.1.0',
description='Generate an HTML report for your unit tests',
long_description=open('README.md', 'r').read(),
author='Michael Grijalva',
author_email='michaelgrij@gmail.com',
license='MIT',
install_requires=['jinja2'],
package_data={
'nose2_html_report': ['templates/report.html']
},
url='https://github.com/mgrijalva/nose2-html-report',
download_url='',
keywords=['nose2', 'testing', 'reporting'],
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Topic :: Software Development :: Testing',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
]
)
|
Include template file in distribution
|
Include template file in distribution
|
Python
|
mit
|
mgrijalva/nose2-html-report,mgrijalva/nose2-html-report
|
---
+++
@@ -10,6 +10,9 @@
author_email='michaelgrij@gmail.com',
license='MIT',
install_requires=['jinja2'],
+ package_data={
+ 'nose2_html_report': ['templates/report.html']
+ },
url='https://github.com/mgrijalva/nose2-html-report',
download_url='',
keywords=['nose2', 'testing', 'reporting'],
|
f585b90825f5bb57cadefc6689765fd1ebff6b1a
|
setup.py
|
setup.py
|
import os
import re
import sys
from sys import version_info
from os.path import dirname, join as pjoin
from setuptools import setup, find_packages
with open(pjoin(dirname(__file__), 'furl', '__init__.py')) as fd:
VERSION = re.compile(
r".*__version__ = '(.*?)'", re.S).match(fd.read()).group(1)
if sys.argv[-1] == 'publish':
"""
Publish to PyPi.
"""
os.system('python setup.py sdist upload')
sys.exit()
long_description = (
'Information and documentation at https://github.com/gruns/furl.')
setup(name='furl',
version=VERSION,
author='Arthur Grunseid',
author_email='grunseid@gmail.com',
url='https://github.com/gruns/furl',
license='Unlicense',
description='URL manipulation made simple.',
long_description=long_description,
packages=find_packages(),
include_package_data=True,
platforms=['any'],
classifiers=['Topic :: Internet',
'Natural Language :: English',
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: Freely Distributable',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
],
install_requires=['orderedmultidict >= 0.7.2'],
test_suite='tests',
tests_require=[] if version_info[0:2] >= [2, 7] else ['unittest2'],
)
|
import os
import re
import sys
from sys import version_info
from os.path import dirname, join as pjoin
from setuptools import setup, find_packages
with open(pjoin(dirname(__file__), 'furl', '__init__.py')) as fd:
VERSION = re.compile(
r".*__version__ = '(.*?)'", re.S).match(fd.read()).group(1)
if sys.argv[-1] == 'publish':
"""
Publish to PyPi.
"""
os.system('python setup.py sdist upload')
sys.exit()
long_description = (
'Information and documentation at https://github.com/gruns/furl.')
setup(name='furl',
version=VERSION,
author='Arthur Grunseid',
author_email='grunseid@gmail.com',
url='https://github.com/gruns/furl',
license='Unlicense',
description='URL manipulation made simple.',
long_description=long_description,
packages=find_packages(),
include_package_data=True,
platforms=['any'],
classifiers=['Topic :: Internet',
'Natural Language :: English',
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: Freely Distributable',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
],
install_requires=['orderedmultidict >= 0.7.3'],
test_suite='tests',
tests_require=[] if version_info[0:2] >= [2, 7] else ['unittest2'],
)
|
Update orderedmultidict dependency to 0.7.3 and newer.
|
Update orderedmultidict dependency to 0.7.3 and newer.
|
Python
|
unlicense
|
Gerhut/furl,guiquanz/furl,lastfm/furl,penyatree/furl
|
---
+++
@@ -38,7 +38,7 @@
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
],
- install_requires=['orderedmultidict >= 0.7.2'],
+ install_requires=['orderedmultidict >= 0.7.3'],
test_suite='tests',
tests_require=[] if version_info[0:2] >= [2, 7] else ['unittest2'],
)
|
b21706f393ed7c619f5b4ce8ce8a481fb3941a92
|
setup.py
|
setup.py
|
#!/usr/bin/env python
import os
from setuptools import setup, find_packages
def recursive_path(pack, path):
matches = []
for root, dirnames, filenames in os.walk(os.path.join(pack, path)):
for filename in filenames:
matches.append(os.path.join(root, filename)[len(pack) + 1:])
return matches
try:
import pypandoc
long_description = pypandoc.convert('README.md', 'rst')
except (IOError, ImportError):
long_description = "Add unittest cell magics to IPython for easily running tests"
setup(
name="ipython_unittest",
version="0.2.4",
description="Add unittest cell magics to IPython for easily running tests",
long_description=long_description,
packages=find_packages(exclude=["tests_*", "tests"]),
package_data={
"ipython_unittest": recursive_path("ipython_unittest", "resources")},
author=("Joao Pimentel",),
author_email="joaofelipenp@gmail.com",
license="MIT",
keywords="ipython jupyter unittest tdd dojo",
url="https://github.com/JoaoFelipe/ipython-unittest"
)
|
#!/usr/bin/env python
import os
from setuptools import setup, find_packages
def recursive_path(pack, path):
matches = []
for root, dirnames, filenames in os.walk(os.path.join(pack, path)):
for filename in filenames:
matches.append(os.path.join(root, filename)[len(pack) + 1:])
return matches
try:
import pypandoc
long_description = pypandoc.convert('README.md', 'rst')
except (IOError, ImportError):
long_description = "Add unittest cell magics to IPython for easily running tests"
setup(
name="ipython_unittest",
version="0.2.6",
description="Add unittest cell magics to IPython for easily running tests",
long_description=long_description,
packages=find_packages(exclude=["tests_*", "tests"]),
package_data={
"ipython_unittest": recursive_path("ipython_unittest", "resources")},
author=("Joao Pimentel",),
author_email="joaofelipenp@gmail.com",
license="MIT",
keywords="ipython jupyter unittest tdd dojo",
url="https://github.com/JoaoFelipe/ipython-unittest"
)
|
Update package to show readme
|
Update package to show readme
|
Python
|
mit
|
JoaoFelipe/ipython-unittest,JoaoFelipe/ipython-unittest
|
---
+++
@@ -19,7 +19,7 @@
setup(
name="ipython_unittest",
- version="0.2.4",
+ version="0.2.6",
description="Add unittest cell magics to IPython for easily running tests",
long_description=long_description,
packages=find_packages(exclude=["tests_*", "tests"]),
|
f1a683e98bf72cbbe4b65414184ed371af3b5879
|
setup.py
|
setup.py
|
from setuptools import setup
from tools.generate_pyi import generate_pyi
def main():
# Generate .pyi files
import pyxtf.xtf_ctypes
generate_pyi(pyxtf.xtf_ctypes)
import pyxtf.vendors.kongsberg
generate_pyi(pyxtf.vendors.kongsberg)
# Run setup script
setup(name='pyxtf',
version='0.1',
description='eXtended Triton Format (XTF) file interface',
author='Oystein Sture',
author_email='oysstu@gmail.com',
url='https://github.com/oysstu/pyxtf',
license='MIT',
setup_requires=['numpy'],
install_requires=['numpy', 'matplotlib'],
packages=['pyxtf', 'pyxtf.vendors'],
package_data={'':['*.pyi']},
use_2to3=False,
classifiers=[
'License :: OSI Approved :: MIT License',
'Intended Audience :: Developers',
'Intended Audience :: Other Audience',
'Intended Audience :: Science/Research',
'Natural Language :: English',
'Topic :: Scientific/Engineering',
'Programming Language:: Python:: 3:: Only'
])
if __name__ == '__main__':
main()
|
from setuptools import setup
from tools.generate_pyi import generate_pyi
def main():
# Generate .pyi files
import pyxtf.xtf_ctypes
generate_pyi(pyxtf.xtf_ctypes)
import pyxtf.vendors.kongsberg
generate_pyi(pyxtf.vendors.kongsberg)
# Run setup script
setup(name='pyxtf',
version='0.1',
description='eXtended Triton Format (XTF) file interface',
author='Oystein Sture',
author_email='oysstu@gmail.com',
url='https://github.com/oysstu/pyxtf',
license='MIT',
setup_requires=['numpy'],
install_requires=['numpy'],
packages=['pyxtf', 'pyxtf.vendors'],
package_data={'':['*.pyi']},
use_2to3=False,
classifiers=[
'License :: OSI Approved :: MIT License',
'Intended Audience :: Developers',
'Intended Audience :: Other Audience',
'Intended Audience :: Science/Research',
'Natural Language :: English',
'Topic :: Scientific/Engineering',
'Programming Language:: Python:: 3:: Only'
])
if __name__ == '__main__':
main()
|
Remove matplotlib from required dependencies
|
Remove matplotlib from required dependencies
|
Python
|
mit
|
oysstu/pyxtf
|
---
+++
@@ -17,7 +17,7 @@
url='https://github.com/oysstu/pyxtf',
license='MIT',
setup_requires=['numpy'],
- install_requires=['numpy', 'matplotlib'],
+ install_requires=['numpy'],
packages=['pyxtf', 'pyxtf.vendors'],
package_data={'':['*.pyi']},
use_2to3=False,
|
d9f3e43a05663706b266d60c1f707133b0c3b6a0
|
error_proxy.py
|
error_proxy.py
|
#!/usr/bin/env python
import sys
from BaseHTTPServer import BaseHTTPRequestHandler, HTTPServer
class ErrorHTTPRequestHandler(BaseHTTPRequestHandler):
def do_GET(self):
self.send_response(420)
if sys.argv[1:]:
port = sys.argv[1:]
else:
port = 8000
httpd = HTTPServer(("localhost", port), ErrorHTTPRequestHandler)
httpd.serve_forever()
|
#!/usr/bin/env python
import sys
import json
from BaseHTTPServer import BaseHTTPRequestHandler, HTTPServer
class ErrorHTTPRequestHandler(BaseHTTPRequestHandler):
def do_GET(self):
self.send_response(420)
if sys.argv[1:]:
config_file = sys.argv[1:]
else:
config_file = "Proxyfile"
with open(config_file) as c:
config = json.load(c)
httpd = HTTPServer(("localhost", config['port']), ErrorHTTPRequestHandler)
httpd.serve_forever()
|
Configure port via a Proxyfile
|
Configure port via a Proxyfile
|
Python
|
mit
|
pozorvlak/error_proxy
|
---
+++
@@ -1,6 +1,7 @@
#!/usr/bin/env python
import sys
+import json
from BaseHTTPServer import BaseHTTPRequestHandler, HTTPServer
class ErrorHTTPRequestHandler(BaseHTTPRequestHandler):
@@ -8,8 +9,10 @@
self.send_response(420)
if sys.argv[1:]:
- port = sys.argv[1:]
+ config_file = sys.argv[1:]
else:
- port = 8000
-httpd = HTTPServer(("localhost", port), ErrorHTTPRequestHandler)
+ config_file = "Proxyfile"
+with open(config_file) as c:
+ config = json.load(c)
+httpd = HTTPServer(("localhost", config['port']), ErrorHTTPRequestHandler)
httpd.serve_forever()
|
a6ce774d11100208d2a65aa71c3cb147a550a906
|
dpath/__init__.py
|
dpath/__init__.py
|
import sys
# Python version flags for Python 3 support
PY2 = ( sys.version_info.major == 2 )
PY3 = ( sys.version_info.major == 3 )
|
import sys
# Python version flags for Python 3 support
python_major_version = 0
if hasattr(sys.version_info, 'major'):
python_major_version = sys.version_info.major
else:
python_major_version = sys.version_info[0]
PY2 = ( python_major_version == 2 )
PY3 = ( python_major_version == 3 )
|
Make this work on python2.6 again
|
Make this work on python2.6 again
|
Python
|
mit
|
akesterson/dpath-python,pombredanne/dpath-python,benthomasson/dpath-python,lexhung/dpath-python,calebcase/dpath-python
|
---
+++
@@ -1,5 +1,11 @@
import sys
# Python version flags for Python 3 support
-PY2 = ( sys.version_info.major == 2 )
-PY3 = ( sys.version_info.major == 3 )
+python_major_version = 0
+if hasattr(sys.version_info, 'major'):
+ python_major_version = sys.version_info.major
+else:
+ python_major_version = sys.version_info[0]
+
+PY2 = ( python_major_version == 2 )
+PY3 = ( python_major_version == 3 )
|
66250b7e448f54bc0cd9eb8b16731a885e4cf3e7
|
setup.py
|
setup.py
|
from setuptools import setup
try:
from pypandoc import convert
read_md = lambda f: convert(f, 'rst')
except ImportError:
print("warning: pypandoc module not found, could not convert Markdown to RST")
read_md = lambda f: open(f, 'r').read()
setup(
name="elyzer",
entry_points={
'console_scripts': [
'elyzer=elyzer.__main__:main'
]
},
packages=['elyzer'],
version="1.0.2",
description="Step-by-Step Debug Elasticsearch Analyzers",
long_description=read_md('README.md'),
license="Apache",
author="Doug Turnbull",
author_email="dturnbull@o19s.com",
url='https://github.com/o19s/elyzer',
install_requires=['elasticsearch>=5.0.0,<6.0.0'],
keywords=["elasticsearch", "database"],
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: Apache Software License',
'Natural Language :: English',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Topic :: Utilities'
]
)
|
from setuptools import setup
try:
from pypandoc import convert
read_md = lambda f: convert(f, 'rst')
except ImportError:
print("warning: pypandoc module not found, could not convert Markdown to RST")
read_md = lambda f: open(f, 'r').read()
setup(
name="elyzer",
entry_points={
'console_scripts': [
'elyzer=elyzer.__main__:main'
]
},
packages=['elyzer'],
version="1.0.2",
description="Step-by-Step Debug Elasticsearch Analyzers",
long_description=read_md('README.md'),
license="Apache",
author="Doug Turnbull",
author_email="dturnbull@o19s.com",
url='https://github.com/o19s/elyzer',
install_requires=['elasticsearch>=5.0.0,<6.0.0'],
keywords=["elasticsearch", "database"],
classifiers=[
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: Apache Software License',
'Natural Language :: English',
'Programming Language :: Python :: 3',
'Topic :: Utilities'
]
)
|
Put on Python 3 only
|
Put on Python 3 only
|
Python
|
apache-2.0
|
o19s/elyzer
|
---
+++
@@ -24,15 +24,12 @@
install_requires=['elasticsearch>=5.0.0,<6.0.0'],
keywords=["elasticsearch", "database"],
classifiers=[
- 'Development Status :: 3 - Alpha',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: Apache Software License',
'Natural Language :: English',
- 'Programming Language :: Python :: 2',
- 'Programming Language :: Python :: 2.6',
- 'Programming Language :: Python :: 2.7',
+ 'Programming Language :: Python :: 3',
'Topic :: Utilities'
]
)
|
28e9cd3e9d047883668263e595978392cd208ac5
|
setup.py
|
setup.py
|
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
import os
def readfile(fname):
with open(os.path.join(os.path.dirname(__file__), fname)) as f:
return f.read()
setup(
name="imagesort",
version="0.2.0",
packages=find_packages(),
install_requires=[
'ExifRead>=1.4.0',
],
include_package_data=True,
author="Børge Lanes",
author_email="borge.lanes@gmail.com",
description=('Organize image files by date taken'),
long_description=readfile("README.rst"),
license="MIT",
keywords="media",
url="https://github.com/leinz/imagesort",
entry_points={
'console_scripts': [
'imagesort = imagesort.imagesort:main',
]
},
classifiers=[
'Development Status :: 4 - Beta',
'License :: OSI Approved :: MIT License',
'Operating System :: POSIX',
'Operating System :: Microsoft :: Windows',
'Operating System :: MacOS :: MacOS X',
'Topic :: Multimedia',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
],
)
|
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
import os
def readfile(fname):
with open(os.path.join(os.path.dirname(__file__), fname)) as f:
return f.read()
setup(
name="imagesort",
version="0.2.0",
packages=find_packages(),
install_requires=[
'ExifRead>=1.4.0',
],
include_package_data=True,
author="Børge Lanes",
author_email="borge.lanes@gmail.com",
description=('Organize image files by date taken'),
long_description=readfile("README.rst"),
license="MIT",
keywords="media",
url="https://github.com/leinz/imagesort",
entry_points={
'console_scripts': [
'imagesort = imagesort.imagesort:main',
]
},
classifiers=[
'Development Status :: 4 - Beta',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Topic :: Multimedia',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
],
)
|
Mark package as OS independent
|
Mark package as OS independent
|
Python
|
mit
|
leinz/imagesort
|
---
+++
@@ -34,9 +34,7 @@
classifiers=[
'Development Status :: 4 - Beta',
'License :: OSI Approved :: MIT License',
- 'Operating System :: POSIX',
- 'Operating System :: Microsoft :: Windows',
- 'Operating System :: MacOS :: MacOS X',
+ 'Operating System :: OS Independent',
'Topic :: Multimedia',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
|
3b741d4707cc81abcb22945b8532a416bbf00791
|
setup.py
|
setup.py
|
#!/usr/bin/env python2
from setuptools import setup, find_packages
# Uploading to PyPI
# =================
# The first time only:
# $ python setup.py register -r pypi
#
# Every version bump:
# $ git tag <version>; git push --tags
# $ python setup.py sdist upload -r pypi
version = '0.4.1'
setup(
name='klab',
version=version,
author='Kortemme Lab, UCSF',
author_email='support@kortemmelab.ucsf.edu',
url='https://github.com/Kortemme-Lab/klab',
download_url='https://github.com/Kortemme-Lab/klab/tarball/'+version,
license='MIT',
description="A collection of utilities used by our lab for computational biophysics",
long_description=open('README.rst').read(),
keywords=['utilities', 'library', 'biophysics'],
classifiers=[
"Intended Audience :: Science/Research",
"Topic :: Scientific/Engineering :: Bio-Informatics",
"Development Status :: 3 - Alpha",
'Programming Language :: Python :: 2',
],
packages=find_packages(),
package_data={
'klab.bio.fragments': [
'make_fragments_QB3_cluster.pl',
],
},
install_requires=[],
entry_points={
'console_scripts': [
'klab_generate_fragments=klab.bio.fragments.generate_fragments:main',
],
},
)
|
#!/usr/bin/env python2
from setuptools import setup, find_packages
# Uploading to PyPI
# =================
# The first time only:
# $ python setup.py register -r pypi
#
# Every version bump:
# $ git tag <version>; git push --tags
# $ python setup.py sdist upload -r pypi
version = '0.4.1'
setup(
name='klab',
version=version,
author='Kortemme Lab, UCSF',
author_email='support@kortemmelab.ucsf.edu',
url='https://github.com/Kortemme-Lab/klab',
download_url='https://github.com/Kortemme-Lab/klab/tarball/'+version,
license='MIT',
description="A collection of utilities used by our lab for computational biophysics",
long_description=open('README.rst').read(),
keywords=['utilities', 'library', 'biophysics'],
classifiers=[
"Intended Audience :: Science/Research",
"Topic :: Scientific/Engineering :: Bio-Informatics",
"Development Status :: 3 - Alpha",
'Programming Language :: Python :: 2',
],
packages=find_packages(),
package_data={
'klab.bio.fragments': [
'make_fragments_QB3_cluster.pl',
],
},
install_requires=[],
entry_points={
'console_scripts': [
'klab_generate_fragments=klab.bio.fragments.generate_fragments:main',
'klab_per_residue_scores=klab.rosetta.per_residue_scores:main',
],
},
)
|
Add a script to show per-residue score from a PDB file.
|
Add a script to show per-residue score from a PDB file.
|
Python
|
mit
|
Kortemme-Lab/klab,Kortemme-Lab/klab,Kortemme-Lab/klab,Kortemme-Lab/klab
|
---
+++
@@ -39,6 +39,7 @@
entry_points={
'console_scripts': [
'klab_generate_fragments=klab.bio.fragments.generate_fragments:main',
+ 'klab_per_residue_scores=klab.rosetta.per_residue_scores:main',
],
},
)
|
ae84f8224d7ab01e419ff548cd8be28eb4b15804
|
examples/image_test.py
|
examples/image_test.py
|
import sys
import os
import pyglet.window
from pyglet.gl import *
from pyglet import clock
from pyglet.ext.scene2d import Image2d
from ctypes import *
if len(sys.argv) != 2:
print 'Usage: %s <PNG/JPEG filename>'%sys.argv[0]
sys.exit()
window = pyglet.window.Window(width=400, height=400)
image = Image2d.load(sys.argv[1])
s = max(image.width, image.height)
c = clock.Clock(60)
glMatrixMode(GL_PROJECTION)
glLoadIdentity()
gluPerspective(60., 1., 1., 100.)
glEnable(GL_COLOR_MATERIAL)
glMatrixMode(GL_MODELVIEW)
glClearColor(0, 0, 0, 0)
glColor4f(1, 1, 1, 1)
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA)
glEnable(GL_BLEND)
while not window.has_exit:
c.tick()
window.dispatch_events()
glClear(GL_COLOR_BUFFER_BIT)
glLoadIdentity()
glScalef(1./s, 1./s, 1.)
glTranslatef(-image.width/2, -image.height/2, -1.)
image.draw()
window.flip()
|
import sys
import os
import ctypes
import pyglet.window
from pyglet.gl import *
from pyglet import clock
from pyglet import image
if len(sys.argv) != 2:
print 'Usage: %s <PNG/JPEG filename>'%sys.argv[0]
sys.exit()
window = pyglet.window.Window(width=400, height=400)
image = image.load(sys.argv[1])
imx = imy = 0
@window.event
def on_mouse_drag(x, y, dx, dy, buttons, modifiers):
global imx, imy
imx += dx
imy += dy
clock.set_fps_limit(30)
while not window.has_exit:
clock.tick()
window.dispatch_events()
glClear(GL_COLOR_BUFFER_BIT)
image.blit(imx, imy, 0)
window.flip()
|
Use the core, make example more useful.
|
Use the core, make example more useful.
|
Python
|
bsd-3-clause
|
gdkar/pyglet,kmonsoor/pyglet,xshotD/pyglet,google-code-export/pyglet,xshotD/pyglet,kmonsoor/pyglet,shaileshgoogler/pyglet,mpasternak/pyglet-fix-issue-518-522,kmonsoor/pyglet,google-code-export/pyglet,Austin503/pyglet,arifgursel/pyglet,Austin503/pyglet,arifgursel/pyglet,cledio66/pyglet,shaileshgoogler/pyglet,odyaka341/pyglet,cledio66/pyglet,kmonsoor/pyglet,gdkar/pyglet,Alwnikrotikz/pyglet,shaileshgoogler/pyglet,mpasternak/michaldtz-fix-552,Alwnikrotikz/pyglet,arifgursel/pyglet,xshotD/pyglet,mpasternak/pyglet-fix-issue-552,gdkar/pyglet,mpasternak/michaldtz-fixes-518-522,mpasternak/pyglet-fix-issue-518-522,qbektrix/pyglet,mpasternak/pyglet-fix-issue-552,shaileshgoogler/pyglet,qbektrix/pyglet,Alwnikrotikz/pyglet,mpasternak/michaldtz-fixes-518-522,Austin503/pyglet,odyaka341/pyglet,gdkar/pyglet,shaileshgoogler/pyglet,mpasternak/michaldtz-fix-552,odyaka341/pyglet,Alwnikrotikz/pyglet,mpasternak/pyglet-fix-issue-552,mpasternak/pyglet-fix-issue-518-522,cledio66/pyglet,xshotD/pyglet,mpasternak/pyglet-fix-issue-552,Austin503/pyglet,google-code-export/pyglet,mpasternak/michaldtz-fix-552,google-code-export/pyglet,qbektrix/pyglet,odyaka341/pyglet,qbektrix/pyglet,xshotD/pyglet,qbektrix/pyglet,Austin503/pyglet,Alwnikrotikz/pyglet,google-code-export/pyglet,kmonsoor/pyglet,mpasternak/pyglet-fix-issue-518-522,mpasternak/michaldtz-fix-552,cledio66/pyglet,arifgursel/pyglet,odyaka341/pyglet,mpasternak/michaldtz-fixes-518-522,arifgursel/pyglet,cledio66/pyglet,mpasternak/michaldtz-fixes-518-522,gdkar/pyglet
|
---
+++
@@ -1,46 +1,30 @@
import sys
import os
+import ctypes
import pyglet.window
from pyglet.gl import *
from pyglet import clock
-from pyglet.ext.scene2d import Image2d
-
-from ctypes import *
+from pyglet import image
if len(sys.argv) != 2:
print 'Usage: %s <PNG/JPEG filename>'%sys.argv[0]
sys.exit()
window = pyglet.window.Window(width=400, height=400)
+image = image.load(sys.argv[1])
+imx = imy = 0
+@window.event
+def on_mouse_drag(x, y, dx, dy, buttons, modifiers):
+ global imx, imy
+ imx += dx
+ imy += dy
-image = Image2d.load(sys.argv[1])
-s = max(image.width, image.height)
-
-c = clock.Clock(60)
-
-glMatrixMode(GL_PROJECTION)
-glLoadIdentity()
-gluPerspective(60., 1., 1., 100.)
-glEnable(GL_COLOR_MATERIAL)
-
-glMatrixMode(GL_MODELVIEW)
-glClearColor(0, 0, 0, 0)
-glColor4f(1, 1, 1, 1)
-
-glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA)
-glEnable(GL_BLEND)
-
+clock.set_fps_limit(30)
while not window.has_exit:
- c.tick()
+ clock.tick()
window.dispatch_events()
-
glClear(GL_COLOR_BUFFER_BIT)
- glLoadIdentity()
-
- glScalef(1./s, 1./s, 1.)
- glTranslatef(-image.width/2, -image.height/2, -1.)
- image.draw()
-
+ image.blit(imx, imy, 0)
window.flip()
|
820ea244604a6563e2532d2cb848c9509bab4e77
|
ai_player.py
|
ai_player.py
|
import ab_bridge
import alpha_beta
from gui import *
from player import *
import threading
class AIPlayer(Player):
""" Yes there is a circular dependancy between AIPlayer and Game """
def __init__(self, max_depth, *args, **vargs):
Player.__init__(self, *args, **vargs)
self.max_depth = max_depth
'''
#TODO use super?
def __init__(self, max_depth, *args, **kwargs):
super(AIPlayer, self).__init__(*args, **kwargs)
'''
def attach_to_game(self, base_game):
self.ab_game = ab_bridge.ABGame(base_game)
def prompt_for_action(self, base_game, gui, test=False):
if test:
self.search_thread(gui, True)
else:
t = threading.Thread(target=self.search_thread, args=(gui,))
# Allow the program to be exited quickly
t.daemon = True
t.start()
return "%s is thinking" % self.get_name()
def get_type(self):
return "computer"
def search_thread(self, gui, test=False):
ab_game = self.ab_game
move, value = alpha_beta.alphabeta_search(ab_game.current_state, ab_game,
max_depth=self.max_depth)
action = move[0]
if test:
self.action = action
else:
gui.enqueue_action(action)
gui.trig()
# TODO This is only for testing!
def get_action(self, game, gui):
return self.action
|
import ab_bridge
import alpha_beta
from gui import *
from player import *
import threading
class AIPlayer(Player):
""" Yes there is a circular dependancy between AIPlayer and Game """
def __init__(self, *args, **vargs):
Player.__init__(self, *args, **vargs)
self.max_depth = 1
def set_max_depth(self, max_depth):
self.max_depth = max_depth
def attach_to_game(self, base_game):
self.ab_game = ab_bridge.ABGame(base_game)
def prompt_for_action(self, base_game, gui, test=False):
if test:
self.search_thread(gui, True)
else:
t = threading.Thread(target=self.search_thread, args=(gui,))
# Allow the program to be exited quickly
t.daemon = True
t.start()
return "%s is thinking" % self.get_name()
def get_type(self):
return "computer"
def search_thread(self, gui, test=False):
ab_game = self.ab_game
move, value = alpha_beta.alphabeta_search(ab_game.current_state, ab_game,
max_depth=self.max_depth)
action = move[0]
if test:
self.action = action
else:
gui.enqueue_action(action)
gui.trig()
# TODO This is only for testing!
def get_action(self, game, gui):
return self.action
|
Use a setter for max_depth
|
Use a setter for max_depth
|
Python
|
mit
|
cropleyb/pentai,cropleyb/pentai,cropleyb/pentai
|
---
+++
@@ -9,14 +9,12 @@
class AIPlayer(Player):
""" Yes there is a circular dependancy between AIPlayer and Game """
- def __init__(self, max_depth, *args, **vargs):
+ def __init__(self, *args, **vargs):
Player.__init__(self, *args, **vargs)
+ self.max_depth = 1
+
+ def set_max_depth(self, max_depth):
self.max_depth = max_depth
- '''
- #TODO use super?
- def __init__(self, max_depth, *args, **kwargs):
- super(AIPlayer, self).__init__(*args, **kwargs)
- '''
def attach_to_game(self, base_game):
self.ab_game = ab_bridge.ABGame(base_game)
|
e0aba854c685610af085da91e25bb3682f20461b
|
setup.py
|
setup.py
|
import setuptools
import codecs
import os.path
# Used to read the file
def read(rel_path):
here = os.path.abspath(os.path.dirname(__file__))
with codecs.open(os.path.join(here, rel_path), 'r') as fp:
return fp.read()
# Used to extract out the __version__
def get_version(rel_path):
for line in read(rel_path).splitlines():
if line.startswith('__version__'):
delim = '"' if '"' in line else "'"
return line.split(delim)[1]
else:
raise RuntimeError("Unable to find version string.")
# Used to read the readme file
with open("README.md", "r") as fh:
long_description = fh.read()
setuptools.setup(
name="NearBeach",
version=get_version('NearBeach/__init__.py'),
author="Luke Christopher Clarke",
author_email="luke@nearbeach.org",
description="NearBeach - an open source project management tool",
long_description=long_description,
long_description_content_type="text/markdown",
url="https://github.com/robotichead/NearBeach",
packages=setuptools.find_packages(),
install_requires=[
'django',
'simplejson',
'pillow',
'urllib3',
],
classifiers=[
"Programming Language :: Python :: 3",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
],
include_package_data=True,
)
|
import setuptools
import codecs
import os.path
# Used to read the file
def read(rel_path):
here = os.path.abspath(os.path.dirname(__file__))
with codecs.open(os.path.join(here, rel_path), 'r') as fp:
return fp.read()
# Used to extract out the __version__
def get_version(rel_path):
for line in read(rel_path).splitlines():
if line.startswith('__version__'):
delim = '"' if '"' in line else "'"
return line.split(delim)[1]
else:
raise RuntimeError("Unable to find version string.")
# Used to read the readme file
with open("README.md", "r") as fh:
long_description = fh.read()
setuptools.setup(
name="NearBeach",
version=get_version('NearBeach/__init__.py'),
author="Luke Christopher Clarke",
author_email="luke@nearbeach.org",
description="NearBeach - an open source project management tool",
long_description=long_description,
long_description_content_type="text/markdown",
url="https://github.com/robotichead/NearBeach",
packages=setuptools.find_packages(),
install_requires=[
'django',
'simplejson',
'pillow',
'urllib3',
'boto3',
],
classifiers=[
"Programming Language :: Python :: 3",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
],
include_package_data=True,
)
|
Fix Installation Issues -> missing Boto3
|
Fix Installation Issues -> missing Boto3
|
Python
|
mit
|
robotichead/NearBeach,robotichead/NearBeach,robotichead/NearBeach
|
---
+++
@@ -36,6 +36,7 @@
'simplejson',
'pillow',
'urllib3',
+ 'boto3',
],
classifiers=[
"Programming Language :: Python :: 3",
|
68f07070c829fa2703167e8f1c8161d364e3b800
|
setup.py
|
setup.py
|
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
setup(name='devito',
version='2.0.1',
description="""Finite Difference DSL for symbolic stencil computation.""",
author="Imperial College London",
license='MIT',
packages=['devito'])
|
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
setup(name='devito',
version='2.0.1',
description="Finite Difference DSL for symbolic computation.",
long_descritpion="""Devito is a new tool for performing
optimised Finite Difference (FD) computation from high-level
symbolic problem definitions. Devito performs automated code
generation and Just-In-time (JIT) compilation based on symbolic
equations defined in SymPy to create and execute highly
optimised Finite Difference kernels on multiple computer
platforms.""",
url='http://www.opesci.org/devito',
author="Imperial College London",
author_email='opesci@imperial.ac.uk',
license='MIT',
packages=['devito'],
install_requires=['numpy', 'sympy', 'mpmath', 'cgen', 'codepy'],
test_requires=['pytest', 'flake8', 'isort'])
|
Add more install meta-data, including dependency packages
|
Pip: Add more install meta-data, including dependency packages
|
Python
|
mit
|
opesci/devito,opesci/devito
|
---
+++
@@ -5,7 +5,18 @@
setup(name='devito',
version='2.0.1',
- description="""Finite Difference DSL for symbolic stencil computation.""",
+ description="Finite Difference DSL for symbolic computation.",
+ long_descritpion="""Devito is a new tool for performing
+ optimised Finite Difference (FD) computation from high-level
+ symbolic problem definitions. Devito performs automated code
+ generation and Just-In-time (JIT) compilation based on symbolic
+ equations defined in SymPy to create and execute highly
+ optimised Finite Difference kernels on multiple computer
+ platforms.""",
+ url='http://www.opesci.org/devito',
author="Imperial College London",
+ author_email='opesci@imperial.ac.uk',
license='MIT',
- packages=['devito'])
+ packages=['devito'],
+ install_requires=['numpy', 'sympy', 'mpmath', 'cgen', 'codepy'],
+ test_requires=['pytest', 'flake8', 'isort'])
|
89493c1249a5233cf8efb5be965ce427cdd5280a
|
setup.py
|
setup.py
|
"""vcrpy-unittest setup module based on
https://github.com/pypa/sampleproject
"""
from __future__ import absolute_import, unicode_literals
from setuptools import setup, find_packages
from codecs import open
import os
here = os.path.dirname(__file__)
with open(os.path.join(here, 'README.rst'), encoding='utf-8') as f:
long_description = f.read()
setup(
name='vcrpy-unittest',
version='0.1.2',
description='Python unittest integration for vcr.py',
long_description=long_description,
url='https://github.com/agriffis/vcrpy-unittest',
author='Aron Griffis',
author_email='aron@arongriffis.com',
license='MIT',
# See https://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Topic :: Software Development :: Build Tools',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
],
keywords='vcrpy vcr.py unittest testing mock http'.split(),
packages=find_packages(exclude=['tests']),
install_requires=['vcrpy'],
)
|
"""vcrpy-unittest setup module based on
https://github.com/pypa/sampleproject
"""
from __future__ import absolute_import, unicode_literals
from setuptools import setup, find_packages
from codecs import open
import os
here = os.path.dirname(__file__)
with open(os.path.join(here, 'README.rst'), encoding='utf-8') as f:
long_description = f.read()
setup(
name='vcrpy-unittest',
version='0.1.3',
description='Python unittest integration for vcr.py',
long_description=long_description,
url='https://github.com/agriffis/vcrpy-unittest',
author='Aron Griffis',
author_email='aron@arongriffis.com',
license='MIT',
# See https://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Topic :: Software Development :: Build Tools',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
],
keywords='vcrpy vcr.py unittest testing mock http'.split(),
packages=find_packages(exclude=['tests']),
install_requires=['vcrpy'],
)
|
Bump version for another PyPI release, this time to remove accidental vendorized vcr.py
|
Bump version for another PyPI release, this time to remove accidental vendorized vcr.py
|
Python
|
mit
|
agriffis/vcrpy-unittest
|
---
+++
@@ -13,7 +13,7 @@
setup(
name='vcrpy-unittest',
- version='0.1.2',
+ version='0.1.3',
description='Python unittest integration for vcr.py',
long_description=long_description,
url='https://github.com/agriffis/vcrpy-unittest',
|
fb4bac2a228a196359317f338c3f1e6643c3837d
|
nova/tests/unit/compute/fake_resource_tracker.py
|
nova/tests/unit/compute/fake_resource_tracker.py
|
# Copyright (c) 2012 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from nova.compute import resource_tracker
class FakeResourceTracker(resource_tracker.ResourceTracker):
"""Version without a DB requirement."""
def _create(self, context, values):
self._write_ext_resources(values)
self.compute_node = values
self.compute_node['id'] = 1
def _update(self, context):
self._write_ext_resources(self.compute_node)
|
# Copyright (c) 2012 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from nova.compute import resource_tracker
class FakeResourceTracker(resource_tracker.ResourceTracker):
"""Version without a DB requirement."""
def _update(self, context):
self._write_ext_resources(self.compute_node)
|
Remove an unused method in FakeResourceTracker
|
Remove an unused method in FakeResourceTracker
Nothing calls _create and there is no _create in the super class for
this to be overriding.
Change-Id: Ic41f2d249b9aaffb2caaa18dd492924a4ceb3653
|
Python
|
apache-2.0
|
gooddata/openstack-nova,Juniper/nova,mikalstill/nova,cernops/nova,cernops/nova,vmturbo/nova,NeCTAR-RC/nova,klmitch/nova,jianghuaw/nova,mikalstill/nova,klmitch/nova,Juniper/nova,cloudbase/nova,cernops/nova,openstack/nova,rahulunair/nova,gooddata/openstack-nova,jianghuaw/nova,klmitch/nova,NeCTAR-RC/nova,mikalstill/nova,hanlind/nova,rajalokan/nova,vmturbo/nova,jianghuaw/nova,cyx1231st/nova,openstack/nova,bigswitch/nova,phenoxim/nova,sebrandon1/nova,gooddata/openstack-nova,gooddata/openstack-nova,mahak/nova,zhimin711/nova,mahak/nova,cloudbase/nova,alaski/nova,BeyondTheClouds/nova,zhimin711/nova,alaski/nova,vmturbo/nova,hanlind/nova,openstack/nova,rajalokan/nova,CEG-FYP-OpenStack/scheduler,sebrandon1/nova,BeyondTheClouds/nova,bigswitch/nova,rajalokan/nova,vmturbo/nova,Juniper/nova,phenoxim/nova,cloudbase/nova,cyx1231st/nova,sebrandon1/nova,mahak/nova,CEG-FYP-OpenStack/scheduler,BeyondTheClouds/nova,klmitch/nova,rahulunair/nova,hanlind/nova,jianghuaw/nova,rahulunair/nova,rajalokan/nova,Juniper/nova
|
---
+++
@@ -19,10 +19,5 @@
class FakeResourceTracker(resource_tracker.ResourceTracker):
"""Version without a DB requirement."""
- def _create(self, context, values):
- self._write_ext_resources(values)
- self.compute_node = values
- self.compute_node['id'] = 1
-
def _update(self, context):
self._write_ext_resources(self.compute_node)
|
2e406c8cca9e55c9b8e2dcbf33005aa580ef74ea
|
tests/state/test_in_memory_key_value_store.py
|
tests/state/test_in_memory_key_value_store.py
|
from winton_kafka_streams.state.in_memory_key_value_store import InMemoryKeyValueStore
def test_inMemoryKeyValueStore():
store = InMemoryKeyValueStore('teststore')
store['a'] = 1
assert store['a'] == 1
store['a'] = 2
assert store['a'] == 2
|
import pytest
from winton_kafka_streams.state.in_memory_key_value_store import InMemoryKeyValueStore
def test_inMemoryKeyValueStore():
store = InMemoryKeyValueStore('teststore')
store['a'] = 1
assert store['a'] == 1
store['a'] = 2
assert store['a'] == 2
del store['a']
assert store.get('a') is None
with pytest.raises(KeyError):
store['a']
|
Test behaviour of key deletion
|
Test behaviour of key deletion
|
Python
|
apache-2.0
|
wintoncode/winton-kafka-streams
|
---
+++
@@ -1,3 +1,4 @@
+import pytest
from winton_kafka_streams.state.in_memory_key_value_store import InMemoryKeyValueStore
@@ -9,3 +10,8 @@
store['a'] = 2
assert store['a'] == 2
+
+ del store['a']
+ assert store.get('a') is None
+ with pytest.raises(KeyError):
+ store['a']
|
8b59148347f3009bb082f8fff3d11b04c3048433
|
setup.py
|
setup.py
|
#!/usr/bin/env python
import setuptools
setuptools.setup(
name='requires-provides',
version='0.1',
description='Flexible dependency decorators',
license='MIT',
author='Joshua Downer',
author_email='joshua.downer@gmail.com',
url='http://github.com/jdowner/requires-provides',
keywords='python decorator dependencies requires provides',
packages=['dependency'],
package_data={
'': ['*.rst', 'LICENSE'],
},
install_requires=[
'pep8',
'tox',
],
platforms=['Unix'],
test_suite="tests",
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: Unix',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Topic :: Software Development',
]
)
|
#!/usr/bin/env python
import setuptools
setuptools.setup(
name='requires-provides',
version='0.1',
description='Flexible dependency decorators',
license='MIT',
author='Joshua Downer',
author_email='joshua.downer@gmail.com',
url='http://github.com/jdowner/requires-provides',
keywords='python decorator dependencies requires provides',
packages=['dependency'],
package_data={
'': ['*.rst', 'LICENSE'],
},
extra_requires=[
'pep8',
'tox',
],
platforms=['Unix'],
test_suite="tests",
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: Unix',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Topic :: Software Development',
]
)
|
Remove tox and pep8 from "install" requires.
|
Remove tox and pep8 from "install" requires.
https://github.com/jdowner/requires-provides/issues/2
|
Python
|
mit
|
jdowner/requires-provides
|
---
+++
@@ -16,7 +16,7 @@
package_data={
'': ['*.rst', 'LICENSE'],
},
- install_requires=[
+ extra_requires=[
'pep8',
'tox',
],
|
493dc021322bd7ecf516535d9fef444376d96e4e
|
files/views.py
|
files/views.py
|
from django.shortcuts import render
from .models import Image
from .forms import ImageForm
from django.views.generic import CreateView, DeleteView, UpdateView, ListView, View
from django.shortcuts import redirect, get_object_or_404
from django.http import HttpResponseRedirect, HttpResponse
from django.contrib.auth.decorators import login_required
from django.contrib.auth.mixins import PermissionRequiredMixin
class ImageDeleteView(PermissionRequiredMixin, DeleteView):
model = Image
success_url = '/files/images'
permission_required = "files.delete_image"
class ImageListView(PermissionRequiredMixin, ListView):
queryset = Image.objects.order_by('category', '-time')
template_name = 'files/images.html'
permission_required = 'files.view_image'
context_object_name = 'images'
class ImageView(PermissionRequiredMixin, View):
permission_required = "files.view_image"
def get(self, request, *args, **kwargs):
image = get_object_or_404(Image, pk=kwargs['pk'])
return HttpResponseRedirect('/media/'+str(image.file))
@login_required()
def imageUpload(request):
if request.method == 'POST':
form = ImageForm(request.POST, request.FILES, prefix='img')
if form.is_valid():
image = form.save(commit=False)
image.save()
return render(request, 'files/single-image.html', {'image':image})
else:
return HttpResponse(form.errors)
else:
return HttpResponseRedirect('/')
|
from django.shortcuts import render
from .models import Image
from .forms import ImageForm
from django.views.generic import CreateView, DeleteView, UpdateView, ListView, View
from django.shortcuts import redirect, get_object_or_404
from django.http import HttpResponseRedirect, HttpResponse
from django.contrib.auth.decorators import login_required
from django.contrib.auth.mixins import PermissionRequiredMixin
class ImageDeleteView(PermissionRequiredMixin, DeleteView):
model = Image
success_url = '/files/images'
permission_required = "files.delete_image"
class ImageListView(PermissionRequiredMixin, ListView):
queryset = Image.objects.order_by('img_category', '-time')
template_name = 'files/images.html'
permission_required = 'files.view_image'
context_object_name = 'images'
class ImageView(PermissionRequiredMixin, View):
permission_required = "files.view_image"
def get(self, request, *args, **kwargs):
image = get_object_or_404(Image, pk=kwargs['pk'])
return HttpResponseRedirect('/media/'+str(image.file))
@login_required()
def imageUpload(request):
if request.method == 'POST':
form = ImageForm(request.POST, request.FILES, prefix='img')
if form.is_valid():
image = form.save(commit=False)
image.save()
return render(request, 'files/single-image.html', {'image':image})
else:
return HttpResponse(form.errors)
else:
return HttpResponseRedirect('/')
|
Fix image grouping in gallery view
|
Fix image grouping in gallery view
|
Python
|
mit
|
hackerspace-ntnu/website,hackerspace-ntnu/website,hackerspace-ntnu/website
|
---
+++
@@ -14,7 +14,7 @@
permission_required = "files.delete_image"
class ImageListView(PermissionRequiredMixin, ListView):
- queryset = Image.objects.order_by('category', '-time')
+ queryset = Image.objects.order_by('img_category', '-time')
template_name = 'files/images.html'
permission_required = 'files.view_image'
context_object_name = 'images'
|
776a7b25aa5fbb34b3b2597cd98a814147e97415
|
web.py
|
web.py
|
from flask import Flask, render_template, abort, url_for
from path import path
import settings
import lib
from rq import Connection, Queue
from redis import Redis
flapp = Flask(__name__)
flapp.debug = settings.DEBUG
redis_conn = Redis()
q = Queue(connection=redis_conn)
@flapp.route("/")
def hello():
files = settings.APPS_DIR.listdir('*.app')
apps = map(lambda app_file : lib.json_to_app(app_file), files)
return render_template('hello.html', apps=apps)
@flapp.route("/app/<app_id>/")
def show_app(app_id):
app = lib.app_id_to_data(app_id)
return render_template('app.html', app=app)
@flapp.route("/app/<app_id>/deploy/", methods=['GET', 'POST'])
def deploy_app(app_id):
lib.app_exists(app_id)
job = q.enqueue(lib.deploy_app, app_id)
return ("Deployment added in queue, should be ok soon.<br>"
+'<a href=".">Go back to app</a>')
if __name__ == "__main__":
flapp.run()
|
from flask import Flask, render_template, request
import settings
import lib
from rq import Queue
from redis import Redis
import json
flapp = Flask(__name__)
flapp.debug = settings.DEBUG
redis_conn = Redis()
q = Queue(connection=redis_conn)
@flapp.route("/")
def hello():
files = settings.APPS_DIR.listdir('*.app')
apps = map(lambda app_file: lib.json_to_app(app_file), files)
return render_template('hello.html', apps=apps)
@flapp.route("/app/<app_id>/")
def show_app(app_id):
app = lib.app_id_to_data(app_id)
return render_template('app.html', app=app)
@flapp.route("/app/<app_id>/deploy/", methods=['GET', 'POST'])
def deploy_app(app_id):
event_type = request.headers.get('X-GitHub-Event', 'manual')
if event_type not in ('push', 'manual'):
return "Not a usefull event X-GitHub-Event : {}".format(event_type)
if event_type == 'push':
try:
data = json.loads(request.data)
if data['ref'].split('/')[-1] != "master":
return "Not a usefull branch : {}".format(data['ref'])
except (json.JSONDecodeError, KeyError) as e:
print(e)
lib.app_exists(app_id)
q.enqueue(lib.deploy_app, app_id)
return 'Deployment added in queue, should be ok soon.<br> <a href=".">Go back to app</a>'
if __name__ == "__main__":
flapp.run()
|
Deploy only for master ?
|
Deploy only for master ?
|
Python
|
agpl-3.0
|
C4ptainCrunch/click-and-deploy,C4ptainCrunch/click-and-deploy
|
---
+++
@@ -1,9 +1,9 @@
-from flask import Flask, render_template, abort, url_for
-from path import path
+from flask import Flask, render_template, request
import settings
import lib
-from rq import Connection, Queue
+from rq import Queue
from redis import Redis
+import json
flapp = Flask(__name__)
flapp.debug = settings.DEBUG
@@ -16,20 +16,31 @@
@flapp.route("/")
def hello():
files = settings.APPS_DIR.listdir('*.app')
- apps = map(lambda app_file : lib.json_to_app(app_file), files)
+ apps = map(lambda app_file: lib.json_to_app(app_file), files)
return render_template('hello.html', apps=apps)
+
@flapp.route("/app/<app_id>/")
def show_app(app_id):
app = lib.app_id_to_data(app_id)
return render_template('app.html', app=app)
+
@flapp.route("/app/<app_id>/deploy/", methods=['GET', 'POST'])
def deploy_app(app_id):
+ event_type = request.headers.get('X-GitHub-Event', 'manual')
+ if event_type not in ('push', 'manual'):
+ return "Not a usefull event X-GitHub-Event : {}".format(event_type)
+ if event_type == 'push':
+ try:
+ data = json.loads(request.data)
+ if data['ref'].split('/')[-1] != "master":
+ return "Not a usefull branch : {}".format(data['ref'])
+ except (json.JSONDecodeError, KeyError) as e:
+ print(e)
lib.app_exists(app_id)
- job = q.enqueue(lib.deploy_app, app_id)
- return ("Deployment added in queue, should be ok soon.<br>"
- +'<a href=".">Go back to app</a>')
+ q.enqueue(lib.deploy_app, app_id)
+ return 'Deployment added in queue, should be ok soon.<br> <a href=".">Go back to app</a>'
if __name__ == "__main__":
|
12525b5e58752df67fc5bc063aaf4a5996c2bf21
|
example_storage.py
|
example_storage.py
|
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from pprint import pprint
from libcloud.storage.types import Provider
from libcloud.storage.providers import get_driver
CloudFiles = get_driver(Provider.CloudFiles)
driver = CloudFiles('access key id', 'secret key')
containers = driver.list_containers()
container_objects = driver.list_container_objects(containers[0])
pprint(containers)
pprint(container_objects)
|
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from pprint import pprint
from libcloud.storage.types import Provider
from libcloud.storage.providers import get_driver
CloudFiles = get_driver(Provider.CLOUDFILES)
driver = CloudFiles('access key id', 'secret key')
containers = driver.list_containers()
container_objects = driver.list_container_objects(containers[0])
pprint(containers)
pprint(container_objects)
|
Fix provider constant in the example.
|
Fix provider constant in the example.
git-svn-id: 353d90d4d8d13dcb4e0402680a9155a727f61a5a@1090630 13f79535-47bb-0310-9956-ffa450edef68
|
Python
|
apache-2.0
|
munkiat/libcloud,sfriesel/libcloud,wrigri/libcloud,JamesGuthrie/libcloud,mbrukman/libcloud,aleGpereira/libcloud,t-tran/libcloud,Itxaka/libcloud,briancurtin/libcloud,pquentin/libcloud,sgammon/libcloud,jimbobhickville/libcloud,Cloud-Elasticity-Services/as-libcloud,apache/libcloud,cryptickp/libcloud,watermelo/libcloud,apache/libcloud,andrewsomething/libcloud,mtekel/libcloud,smaffulli/libcloud,carletes/libcloud,atsaki/libcloud,wrigri/libcloud,iPlantCollaborativeOpenSource/libcloud,ZuluPro/libcloud,aleGpereira/libcloud,dcorbacho/libcloud,kater169/libcloud,sfriesel/libcloud,thesquelched/libcloud,ByteInternet/libcloud,niteoweb/libcloud,thesquelched/libcloud,vongazman/libcloud,watermelo/libcloud,wuyuewen/libcloud,techhat/libcloud,Kami/libcloud,wido/libcloud,curoverse/libcloud,supertom/libcloud,StackPointCloud/libcloud,Verizon/libcloud,Keisuke69/libcloud,Jc2k/libcloud,aleGpereira/libcloud,t-tran/libcloud,Scalr/libcloud,andrewsomething/libcloud,mtekel/libcloud,techhat/libcloud,DimensionDataCBUSydney/libcloud,sahildua2305/libcloud,erjohnso/libcloud,pantheon-systems/libcloud,mgogoulos/libcloud,ByteInternet/libcloud,StackPointCloud/libcloud,Jc2k/libcloud,samuelchong/libcloud,ninefold/libcloud,SecurityCompass/libcloud,wuyuewen/libcloud,iPlantCollaborativeOpenSource/libcloud,pquentin/libcloud,NexusIS/libcloud,Itxaka/libcloud,ZuluPro/libcloud,mistio/libcloud,aviweit/libcloud,pantheon-systems/libcloud,wuyuewen/libcloud,smaffulli/libcloud,mathspace/libcloud,ClusterHQ/libcloud,curoverse/libcloud,smaffulli/libcloud,jerryblakley/libcloud,mathspace/libcloud,sahildua2305/libcloud,MrBasset/libcloud,munkiat/libcloud,mbrukman/libcloud,niteoweb/libcloud,mistio/libcloud,StackPointCloud/libcloud,Keisuke69/libcloud,cloudControl/libcloud,Itxaka/libcloud,atsaki/libcloud,niteoweb/libcloud,Scalr/libcloud,marcinzaremba/libcloud,ZuluPro/libcloud,erjohnso/libcloud,thesquelched/libcloud,jerryblakley/libcloud,NexusIS/libcloud,jerryblakley/libcloud,Scalr/libcloud,kater169/libcloud,Cloud-Elasticity-Services/as-libcloud,NexusIS/libcloud,briancurtin/libcloud,MrBasset/libcloud,JamesGuthrie/libcloud,jimbobhickville/libcloud,vongazman/libcloud,lochiiconnectivity/libcloud,MrBasset/libcloud,cloudControl/libcloud,mgogoulos/libcloud,mtekel/libcloud,apache/libcloud,aviweit/libcloud,supertom/libcloud,sergiorua/libcloud,schaubl/libcloud,cloudControl/libcloud,sahildua2305/libcloud,lochiiconnectivity/libcloud,pquentin/libcloud,cryptickp/libcloud,samuelchong/libcloud,Verizon/libcloud,cryptickp/libcloud,curoverse/libcloud,sergiorua/libcloud,marcinzaremba/libcloud,illfelder/libcloud,carletes/libcloud,mgogoulos/libcloud,mathspace/libcloud,kater169/libcloud,vongazman/libcloud,andrewsomething/libcloud,mbrukman/libcloud,Cloud-Elasticity-Services/as-libcloud,wido/libcloud,munkiat/libcloud,illfelder/libcloud,jimbobhickville/libcloud,erjohnso/libcloud,supertom/libcloud,t-tran/libcloud,lochiiconnectivity/libcloud,atsaki/libcloud,ClusterHQ/libcloud,sgammon/libcloud,Verizon/libcloud,wido/libcloud,wrigri/libcloud,schaubl/libcloud,dcorbacho/libcloud,watermelo/libcloud,iPlantCollaborativeOpenSource/libcloud,sergiorua/libcloud,carletes/libcloud,marcinzaremba/libcloud,Kami/libcloud,SecurityCompass/libcloud,ByteInternet/libcloud,sfriesel/libcloud,SecurityCompass/libcloud,aviweit/libcloud,DimensionDataCBUSydney/libcloud,DimensionDataCBUSydney/libcloud,JamesGuthrie/libcloud,pantheon-systems/libcloud,Kami/libcloud,mistio/libcloud,illfelder/libcloud,ninefold/libcloud,briancurtin/libcloud,dcorbacho/libcloud,techhat/libcloud,samuelchong/libcloud,schaubl/libcloud
|
---
+++
@@ -18,7 +18,7 @@
from libcloud.storage.types import Provider
from libcloud.storage.providers import get_driver
-CloudFiles = get_driver(Provider.CloudFiles)
+CloudFiles = get_driver(Provider.CLOUDFILES)
driver = CloudFiles('access key id', 'secret key')
|
237faf53129e575faafad6cfeecf96c707d50c4b
|
examples/common.py
|
examples/common.py
|
def print_devices(b):
for device in sorted(b.devices, key=lambda d: len(d.ancestors)):
print(device) # this is a blivet.devices.StorageDevice instance
print()
|
def print_devices(b):
print(b.devicetree)
|
Use DeviceTree.__str__ when printing devices in examples.
|
Use DeviceTree.__str__ when printing devices in examples.
|
Python
|
lgpl-2.1
|
AdamWill/blivet,vojtechtrefny/blivet,rhinstaller/blivet,vpodzime/blivet,vojtechtrefny/blivet,AdamWill/blivet,rvykydal/blivet,rvykydal/blivet,jkonecny12/blivet,rhinstaller/blivet,jkonecny12/blivet,vpodzime/blivet
|
---
+++
@@ -1,6 +1,3 @@
def print_devices(b):
- for device in sorted(b.devices, key=lambda d: len(d.ancestors)):
- print(device) # this is a blivet.devices.StorageDevice instance
-
- print()
+ print(b.devicetree)
|
a51bc09ec5c7c03ce0d76cca443520036d45fa63
|
apps/projects/urls.py
|
apps/projects/urls.py
|
from django.conf.urls.defaults import patterns, url, include
from surlex.dj import surl
from .api import ProjectResource, ProjectDetailResource
from .views import ProjectListView, ProjectDetailView, ProjectMapView
project_resource = ProjectResource()
projectdetail_resource = ProjectDetailResource()
urlpatterns = patterns('apps.projects.views',
surl(r'^$', ProjectListView.as_view(), name='project_list'),
surl(r'^<slug:s>/$', ProjectDetailView.as_view(), name='project_detail'),
surl(r'^<slug:s>/map/$', ProjectMapView.as_view(), name='project_map'),
)
# API urls
urlpatterns += patterns('',
url(r'^api/', include(project_resource.urls)),
url(r'^api/', include(projectdetail_resource.urls)),
)
|
from django.conf.urls.defaults import patterns, url, include
from surlex.dj import surl
from .api import ProjectResource, ProjectDetailResource, ProjectSearchFormResource
from .views import ProjectListView, ProjectDetailView, ProjectMapView
project_resource = ProjectResource()
projectdetail_resource = ProjectDetailResource()
projectsearchform_resource = ProjectSearchFormResource()
urlpatterns = patterns('apps.projects.views',
surl(r'^$', ProjectListView.as_view(), name='project_list'),
surl(r'^<slug:s>/$', ProjectDetailView.as_view(), name='project_detail'),
surl(r'^<slug:s>/map/$', ProjectMapView.as_view(), name='project_map'),
)
# API urls
urlpatterns += patterns('',
url(r'^api/', include(project_resource.urls)),
url(r'^api/', include(projectdetail_resource.urls)),
url(r'^api/', include(projectsearchform_resource.urls)),
)
|
Enable project search form API.
|
Enable project search form API.
|
Python
|
bsd-3-clause
|
onepercentclub/onepercentclub-site,onepercentclub/onepercentclub-site,onepercentclub/onepercentclub-site,onepercentclub/onepercentclub-site,onepercentclub/onepercentclub-site
|
---
+++
@@ -2,11 +2,12 @@
from surlex.dj import surl
-from .api import ProjectResource, ProjectDetailResource
+from .api import ProjectResource, ProjectDetailResource, ProjectSearchFormResource
from .views import ProjectListView, ProjectDetailView, ProjectMapView
project_resource = ProjectResource()
projectdetail_resource = ProjectDetailResource()
+projectsearchform_resource = ProjectSearchFormResource()
urlpatterns = patterns('apps.projects.views',
@@ -19,5 +20,6 @@
urlpatterns += patterns('',
url(r'^api/', include(project_resource.urls)),
url(r'^api/', include(projectdetail_resource.urls)),
+ url(r'^api/', include(projectsearchform_resource.urls)),
)
|
e2a55bf686679c39304645e3c73cc4ba353f5018
|
setup.py
|
setup.py
|
from distutils.core import setup
setup(
name='cmsplugin_filery',
version=".".join(map(str, __import__('cmsplugin_filery').__version__)),
author='Alireza Savand',
author_email='alireza.savand@gmail.com',
description = 'Image gallery based on django-filer',
keywords=[
'django',
'django-cms',
'web',
'cms',
'cmsplugin',
'plugin',
'image',
'gallery',
],
packages=['cmsplugin_filery',],
package_dir={'cmsplugin_filery': 'cmsplugin_filery'},
package_data={'cmsplugin_filery': ['templates/*/*']},
provides=['cmsplugin_filery'],
include_package_data=True,
install_requires = ['django-inline-ordering>=0.1.1', 'easy-thumbnails',
'django-filer']
)
|
from distutils.core import setup
setup(
name='cmsplugin-filery',
version=".".join(map(str, __import__('cmsplugin_filery').__version__)),
author='Alireza Savand',
author_email='alireza.savand@gmail.com',
description = 'Image gallery based on django-filer',
keywords=[
'django',
'django-cms',
'web',
'cms',
'cmsplugin',
'plugin',
'image',
'gallery',
],
packages=['cmsplugin_filery',],
package_dir={'cmsplugin_filery': 'cmsplugin_filery'},
package_data={'cmsplugin_filery': ['templates/*/*']},
provides=['cmsplugin_filery'],
include_package_data=True,
install_requires = ['django-inline-ordering>=0.1.1', 'easy-thumbnails',
'django-filer']
)
|
Correct the plugin name typo mistake
|
Correct the plugin name typo mistake
|
Python
|
bsd-2-clause
|
jasekz/cmsplugin-filery,Alir3z4/cmsplugin-filery
|
---
+++
@@ -1,7 +1,7 @@
from distutils.core import setup
setup(
- name='cmsplugin_filery',
+ name='cmsplugin-filery',
version=".".join(map(str, __import__('cmsplugin_filery').__version__)),
author='Alireza Savand',
author_email='alireza.savand@gmail.com',
|
0607e619da2cf94a4d12c11b2b43ad3e21719dd4
|
queued_storage/signals.py
|
queued_storage/signals.py
|
"""
django-queued-storage ships with a signal fired after a file was transfered
by the Transfer task. It provides the name of the file, the local and the
remote storage backend instances as arguments to connected signal callbacks.
Imagine you'd want to post-process the file that has been transfered from
the local to the remote storage, e.g. add it to a log model to always know
what exactly happened. All you'd have to do is to connect a callback to
the ``file_transferred`` signal::
from django.dispatch import receiver
from django.utils.timezone import now
from queued_storage.signals import file_transferred
from mysite.transferlog.models import TransferLogEntry
@receiver(file_transferred)
def log_file_transferred(sender, name, local, remote, **kwargs):
remote_url = remote.url(name)
TransferLogEntry.objects.create(name=name, remote_url=remote_url, transfer_date=now())
# Alternatively, you can also use the signal's connect method to connect:
file_transferred.connect(log_file_transferred)
Note that this signal does **NOT** have access to the calling Model or even
the FileField instance that it relates to, only the name of the file.
As a result, this signal is somewhat limited and may only be of use if you
have a very specific usage of django-queued-storage.
"""
from django.dispatch import Signal
file_transferred = Signal(providing_args=["name", "local", "remote"])
|
"""
django-queued-storage ships with a signal fired after a file was transferred
by the Transfer task. It provides the name of the file, the local and the
remote storage backend instances as arguments to connected signal callbacks.
Imagine you'd want to post-process the file that has been transferred from
the local to the remote storage, e.g. add it to a log model to always know
what exactly happened. All you'd have to do is to connect a callback to
the ``file_transferred`` signal::
from django.dispatch import receiver
from django.utils.timezone import now
from queued_storage.signals import file_transferred
from mysite.transferlog.models import TransferLogEntry
@receiver(file_transferred)
def log_file_transferred(sender, name, local, remote, **kwargs):
remote_url = remote.url(name)
TransferLogEntry.objects.create(name=name, remote_url=remote_url, transfer_date=now())
# Alternatively, you can also use the signal's connect method to connect:
file_transferred.connect(log_file_transferred)
Note that this signal does **NOT** have access to the calling Model or even
the FileField instance that it relates to, only the name of the file.
As a result, this signal is somewhat limited and may only be of use if you
have a very specific usage of django-queued-storage.
"""
from django.dispatch import Signal
file_transferred = Signal(providing_args=["name", "local", "remote"])
|
Fix simple typo, transfered -> transferred
|
docs: Fix simple typo, transfered -> transferred
There is a small typo in queued_storage/signals.py.
Should read `transferred` rather than `transfered`.
|
Python
|
bsd-3-clause
|
jezdez/django-queued-storage,jazzband/django-queued-storage
|
---
+++
@@ -1,9 +1,9 @@
"""
-django-queued-storage ships with a signal fired after a file was transfered
+django-queued-storage ships with a signal fired after a file was transferred
by the Transfer task. It provides the name of the file, the local and the
remote storage backend instances as arguments to connected signal callbacks.
-Imagine you'd want to post-process the file that has been transfered from
+Imagine you'd want to post-process the file that has been transferred from
the local to the remote storage, e.g. add it to a log model to always know
what exactly happened. All you'd have to do is to connect a callback to
the ``file_transferred`` signal::
|
6ba28684960b14ecb29b26d63ae4a593337e7fa4
|
examples/wordy.py
|
examples/wordy.py
|
"""
Nodes can contain words
=======================
We here at **Daft** headquarters tend to put symbols (variable
names) in our graph nodes. But you don't have to if you don't
want to.
"""
from matplotlib import rc
rc("font", family="serif", size=12)
rc("text", usetex=True)
import daft
pgm = daft.PGM()
pgm.add_node("cloudy", r"cloudy", 3, 3, aspect=1.8)
pgm.add_node("rain", r"rain", 2, 2, aspect=1.2)
pgm.add_node("sprinkler", r"sprinkler", 4, 2, aspect=2.1)
pgm.add_node("wet", r"grass wet", 3, 1, aspect=2.4, observed=True)
pgm.add_edge("cloudy", "rain")
pgm.add_edge("cloudy", "sprinkler")
pgm.add_edge("rain", "wet")
pgm.add_edge("sprinkler", "wet")
pgm.render()
pgm.savefig("wordy.pdf")
pgm.savefig("wordy.png", dpi=150)
|
"""
Nodes can contain words
=======================
We here at **Daft** headquarters tend to put symbols (variable
names) in our graph nodes. But you don't have to if you don't
want to.
"""
from matplotlib import rc
rc("font", family="serif", size=12)
rc("text", usetex=True)
import daft
pgm = daft.PGM()
pgm.add_node("cloudy", r"cloudy", 3, 3, aspect=1.8)
pgm.add_node("rain", r"rain", 2, 2, aspect=1.2)
pgm.add_node("sprinkler", r"sprinkler", 4, 2, aspect=2.1)
pgm.add_node("wet", r"grass wet", 3, 1, aspect=2.4, observed=True)
pgm.add_edge("cloudy", "rain", label="65\%", xoffset=-.1, label_params={"rotation": 45})
pgm.add_edge("cloudy", "sprinkler", label="35\%", xoffset=.1, label_params={"rotation": -45})
pgm.add_edge("rain", "wet")
pgm.add_edge("sprinkler", "wet")
pgm.render()
pgm.savefig("wordy.pdf")
pgm.savefig("wordy.png", dpi=150)
|
Add edge label and rotation.
|
Add edge label and rotation.
|
Python
|
mit
|
dfm/daft
|
---
+++
@@ -19,8 +19,8 @@
pgm.add_node("rain", r"rain", 2, 2, aspect=1.2)
pgm.add_node("sprinkler", r"sprinkler", 4, 2, aspect=2.1)
pgm.add_node("wet", r"grass wet", 3, 1, aspect=2.4, observed=True)
-pgm.add_edge("cloudy", "rain")
-pgm.add_edge("cloudy", "sprinkler")
+pgm.add_edge("cloudy", "rain", label="65\%", xoffset=-.1, label_params={"rotation": 45})
+pgm.add_edge("cloudy", "sprinkler", label="35\%", xoffset=.1, label_params={"rotation": -45})
pgm.add_edge("rain", "wet")
pgm.add_edge("sprinkler", "wet")
|
49c83e0ef5ec7390a78e95dbc035b7d2808ec13e
|
feedback/tests.py
|
feedback/tests.py
|
# -*- coding: utf-8 -*-
from django.test import TestCase, client
from .models import Feedback
client = client.Client()
class FeedbackTest(TestCase):
def test_add_feedback(self):
before_add = Feedback.objects.count()
response = client.post('/feedback/add/', {
'name': 'Пандо Пандев',
'email': 'panda@panda.com',
'information': 'Тука се разхожда една панда по екрана'})
after_add = Feedback.objects.count()
self.assertEqual(response.status_code, 302)
self.assertEqual(before_add + 1, after_add)
def test_user_add_feedback(self):
pass
|
# -*- coding: utf-8 -*-
from django.test import TestCase, client
from .models import Feedback
client = client.Client()
class FeedbackTest(TestCase):
def test_add_feedback(self):
pass
# before_add = Feedback.objects.count()
# response = client.post('/feedback/add/', {
# 'name': 'Пандо Пандев',
# 'email': 'panda@panda.com',
# 'information': 'Тука се разхожда една панда по екрана'})
# after_add = Feedback.objects.count()
# self.assertEqual(response.status_code, 302)
# self.assertEqual(before_add + 1, after_add)
def test_user_add_feedback(self):
pass
|
Remove test for adding feedback
|
Remove test for adding feedback
|
Python
|
mit
|
Hackfmi/Diaphanum,Hackfmi/Diaphanum
|
---
+++
@@ -9,14 +9,15 @@
class FeedbackTest(TestCase):
def test_add_feedback(self):
- before_add = Feedback.objects.count()
- response = client.post('/feedback/add/', {
- 'name': 'Пандо Пандев',
- 'email': 'panda@panda.com',
- 'information': 'Тука се разхожда една панда по екрана'})
- after_add = Feedback.objects.count()
- self.assertEqual(response.status_code, 302)
- self.assertEqual(before_add + 1, after_add)
+ pass
+ # before_add = Feedback.objects.count()
+ # response = client.post('/feedback/add/', {
+ # 'name': 'Пандо Пандев',
+ # 'email': 'panda@panda.com',
+ # 'information': 'Тука се разхожда една панда по екрана'})
+ # after_add = Feedback.objects.count()
+ # self.assertEqual(response.status_code, 302)
+ # self.assertEqual(before_add + 1, after_add)
def test_user_add_feedback(self):
pass
|
1ead832009e87d8570202b71014c1dfadcaaee8f
|
autocloud/__init__.py
|
autocloud/__init__.py
|
# -*- coding: utf-8 -*-
import ConfigParser
import os
PROJECT_ROOT = os.path.abspath(os.path.dirname(__name__))
name = '/etc/autocloud/autocloud.cfg'
if not os.path.exists(name):
raise Exception('Please add a proper cofig file under /etc/autocloud/')
config = ConfigParser.RawConfigParser()
config.read(name)
KOJI_SERVER_URL = config.get('autocloud', 'koji_server_url')
BASE_KOJI_TASK_URL = config.get('autocloud', 'base_koji_task_url')
HOST = config.get('autocloud', 'host') or '127.0.0.1'
PORT = int(config.get('autocloud', 'port')) or 5000
DEBUG = config.getboolean('autocloud', 'debug')
SQLALCHEMY_URI = config.get('sqlalchemy', 'uri')
VIRTUALBOX = config.getboolean('autocloud', 'virtualbox')
|
# -*- coding: utf-8 -*-
import ConfigParser
import os
PROJECT_ROOT = os.path.abspath(os.path.dirname(__name__))
name = '/etc/autocloud/autocloud.cfg'
if not os.path.exists(name):
raise Exception('Please add a proper config file under /etc/autocloud/')
config = ConfigParser.RawConfigParser()
config.read(name)
KOJI_SERVER_URL = config.get('autocloud', 'koji_server_url')
BASE_KOJI_TASK_URL = config.get('autocloud', 'base_koji_task_url')
HOST = config.get('autocloud', 'host') or '127.0.0.1'
PORT = int(config.get('autocloud', 'port')) or 5000
DEBUG = config.getboolean('autocloud', 'debug')
SQLALCHEMY_URI = config.get('sqlalchemy', 'uri')
VIRTUALBOX = config.getboolean('autocloud', 'virtualbox')
|
Fix typo, cofig => config
|
config: Fix typo, cofig => config
Signed-off-by: Vivek Anand <6cbec6cb1b0c30c91d3fca6c61ddeb9b64cef11c@gmail.com>
|
Python
|
agpl-3.0
|
kushaldas/autocloud,kushaldas/autocloud,kushaldas/autocloud,kushaldas/autocloud
|
---
+++
@@ -7,7 +7,7 @@
name = '/etc/autocloud/autocloud.cfg'
if not os.path.exists(name):
- raise Exception('Please add a proper cofig file under /etc/autocloud/')
+ raise Exception('Please add a proper config file under /etc/autocloud/')
config = ConfigParser.RawConfigParser()
config.read(name)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.