commit
stringlengths 40
40
| old_file
stringlengths 4
150
| new_file
stringlengths 4
150
| old_contents
stringlengths 0
3.26k
| new_contents
stringlengths 1
4.43k
| subject
stringlengths 15
501
| message
stringlengths 15
4.06k
| lang
stringclasses 4
values | license
stringclasses 13
values | repos
stringlengths 5
91.5k
| diff
stringlengths 0
4.35k
|
|---|---|---|---|---|---|---|---|---|---|---|
5e0e0e792ca36b7a6daea3931e5333d5d6f28281
|
pygout/cmdline.py
|
pygout/cmdline.py
|
import sys
def main(argv=sys.argv):
raise NotImplementedError
|
import sys
import argparse
import pygments.styles
from pygout.application import find_apps
class _ListStyles(argparse.Action):
def __call__(self, parser, namespace, values, option_string):
styles = sorted(pygments.styles.get_all_styles())
parser.exit(0, '\n'.join(styles) + '\n')
class _ListApps(argparse.Action):
def __call__(self, parser, namespace, values, option_string):
apps = sorted(find_apps().keys())
parser.exit(0, '\n'.join(apps) + '\n')
def main(argv=sys.argv):
parser = argparse.ArgumentParser(
description='Generate editor color schemes')
parser.add_argument('--help-styles', nargs=0, action=_ListStyles,
help='Show available Pygments styles and exit')
parser.add_argument('--help-apps', nargs=0, action=_ListApps,
help='Show available applications and exit')
parser.add_argument('application', choices=find_apps(), metavar='app',
help='Target application')
group = parser.add_mutually_exclusive_group()
group.add_argument('-S', dest='pygments_style', metavar='STYLE',
choices=sorted(pygments.styles.get_all_styles()),
help='Use existing Pygments style')
group.add_argument('-f', dest='style', metavar='FILE',
type=argparse.FileType('r'),
choices=sorted(find_apps().keys()),
help='Use style definition file')
args = parser.parse_args()
print args
|
Add argument parsing for 'pygout' command
|
Add argument parsing for 'pygout' command
|
Python
|
bsd-3-clause
|
alanbriolat/PygOut
|
---
+++
@@ -1,5 +1,39 @@
import sys
+import argparse
+
+import pygments.styles
+
+from pygout.application import find_apps
+
+
+class _ListStyles(argparse.Action):
+ def __call__(self, parser, namespace, values, option_string):
+ styles = sorted(pygments.styles.get_all_styles())
+ parser.exit(0, '\n'.join(styles) + '\n')
+
+
+class _ListApps(argparse.Action):
+ def __call__(self, parser, namespace, values, option_string):
+ apps = sorted(find_apps().keys())
+ parser.exit(0, '\n'.join(apps) + '\n')
def main(argv=sys.argv):
- raise NotImplementedError
+ parser = argparse.ArgumentParser(
+ description='Generate editor color schemes')
+ parser.add_argument('--help-styles', nargs=0, action=_ListStyles,
+ help='Show available Pygments styles and exit')
+ parser.add_argument('--help-apps', nargs=0, action=_ListApps,
+ help='Show available applications and exit')
+ parser.add_argument('application', choices=find_apps(), metavar='app',
+ help='Target application')
+ group = parser.add_mutually_exclusive_group()
+ group.add_argument('-S', dest='pygments_style', metavar='STYLE',
+ choices=sorted(pygments.styles.get_all_styles()),
+ help='Use existing Pygments style')
+ group.add_argument('-f', dest='style', metavar='FILE',
+ type=argparse.FileType('r'),
+ choices=sorted(find_apps().keys()),
+ help='Use style definition file')
+ args = parser.parse_args()
+ print args
|
95a2d0bb226d57c3aa6977d5d1ce78b014e21e1e
|
harvest/jobresource.py
|
harvest/jobresource.py
|
import json
from restless.dj import DjangoResource
from restless.resources import skip_prepare
from django.conf.urls import patterns, url
from harvest.models import Job
from harvest.jobstatemachine import JobStatemachine
from borg_utils.jobintervals import Triggered
class JobResource(DjangoResource):
def is_authenticated(self):
return self.request.user.is_authenticated() or True
@staticmethod
def urls():
return patterns('',
url(r'^/?$',JobResource.as_list(),name='api_job_create'),
)
@skip_prepare
def create(self):
job_batch_id = Triggered.instance().job_batch_id
resp = {"status":True, "message":{}}
result = None
for name in self.data.get('publishes') or []:
result = JobStatemachine.create_job_by_name(name,Triggered.instance(),job_batch_id)
if result[0]:
resp["message"][name] = "job id : {0}".format(result[1])
else:
resp["status"] = False
resp["message"][name] = result[1]
return resp
|
import json
from restless.dj import DjangoResource
from restless.resources import skip_prepare
from django.conf.urls import patterns, url
from harvest.models import Job
from harvest.jobstatemachine import JobStatemachine
from borg_utils.jobintervals import Triggered
class JobResource(DjangoResource):
def is_authenticated(self):
return self.request.user.is_authenticated()
@staticmethod
def urls():
return patterns('',
url(r'^/?$',JobResource.as_list(),name='api_job_create'),
)
@skip_prepare
def create(self):
job_batch_id = Triggered.instance().job_batch_id
resp = {"status":True, "message":{}}
result = None
for name in self.data.get('publishes') or []:
result = JobStatemachine.create_job_by_name(name,Triggered.instance(),job_batch_id)
if result[0]:
resp["message"][name] = "job id : {0}".format(result[1])
else:
resp["status"] = False
resp["message"][name] = result[1]
return resp
|
Enable authentication for rest api
|
Enable authentication for rest api
|
Python
|
bsd-3-clause
|
rockychen-dpaw/borgcollector,rockychen-dpaw/borgcollector,parksandwildlife/borgcollector,rockychen-dpaw/borgcollector,parksandwildlife/borgcollector,parksandwildlife/borgcollector
|
---
+++
@@ -12,7 +12,7 @@
class JobResource(DjangoResource):
def is_authenticated(self):
- return self.request.user.is_authenticated() or True
+ return self.request.user.is_authenticated()
@staticmethod
def urls():
|
76b47fec3b24410f875db96b3404c47d4c3634cb
|
sheepdog_tables/__init__.py
|
sheepdog_tables/__init__.py
|
__version__ = '1.2.0'
try:
from django.conf import settings
getattr(settings, 'dummy_attr', 'dummy_value')
_LOAD_PACKAGES = True
except:
# Just running sdist, we think
_LOAD_PACKAGES = False
if _LOAD_PACKAGES:
from mixins import (TablesMixin, EditTablesMixin, FilteredListView,
CSVTableMixin)
from column import ColumnURL, Column, DictColumn, FieldColumn
from table import Table, EditTable
|
__version__ = '1.2.0'
try:
from django.conf import settings
getattr(settings, 'dummy_attr', 'dummy_value')
_LOAD_PACKAGES = True
except:
# Just running sdist, we think
_LOAD_PACKAGES = False
if _LOAD_PACKAGES:
from mixins import TablesMixin, EditTablesMixin, FilteredListView
from column import ColumnURL, Column, DictColumn, FieldColumn
from table import Table, EditTable
|
Fix import error after removal of old csv table mixin
|
Fix import error after removal of old csv table mixin
|
Python
|
bsd-3-clause
|
SheepDogInc/sheepdog_tables,SheepDogInc/sheepdog_tables
|
---
+++
@@ -9,7 +9,6 @@
_LOAD_PACKAGES = False
if _LOAD_PACKAGES:
- from mixins import (TablesMixin, EditTablesMixin, FilteredListView,
- CSVTableMixin)
+ from mixins import TablesMixin, EditTablesMixin, FilteredListView
from column import ColumnURL, Column, DictColumn, FieldColumn
from table import Table, EditTable
|
660cd7526c1aad6632446b1af5ae286b5383b52c
|
tests/commands/load/test_load_cnv_report_cmd.py
|
tests/commands/load/test_load_cnv_report_cmd.py
|
# -*- coding: utf-8 -*-
import os
from scout.demo import cnv_report_path
from scout.commands import cli
def test_load_cnv_report(mock_app, case_obj):
"""Testing the load delivery report cli command"""
# Make sure the path to delivery report is a valid path
assert os.path.isfile(cnv_report_path)
runner = mock_app.test_cli_runner()
assert runner
# Test CLI function
result = runner.invoke(cli, ["load", "cnv-report", case_obj["_id"], cnv_report_path, "-u"],)
assert "saved report to case!" in result.output
assert result.exit_code == 0
def test_invalid_path_load_cnv_report(mock_app, case_obj):
"""Testing the load delivery report cli command"""
runner = mock_app.test_cli_runner()
assert runner
# Test CLI function
result = runner.invoke(cli, ["load", "cnv-report", case_obj["_id"], "invalid-path", "-u"],)
assert "Path 'invalid-path' does not exist." in result.output
assert result.exit_code == 2
|
# -*- coding: utf-8 -*-
import os
from scout.demo import cnv_report_path
from scout.commands import cli
def test_load_cnv_report(mock_app, case_obj):
"""Testing the load delivery report cli command"""
# Make sure the path to delivery report is a valid path
assert os.path.isfile(cnv_report_path)
runner = mock_app.test_cli_runner()
assert runner
# Test CLI function
result = runner.invoke(
cli,
["load", "cnv-report", case_obj["_id"], cnv_report_path, "-u"],
)
assert "saved report to case!" in result.output
assert result.exit_code == 0
def test_invalid_path_load_cnv_report(mock_app, case_obj):
"""Testing the load delivery report cli command"""
runner = mock_app.test_cli_runner()
assert runner
# Test CLI function
result = runner.invoke(
cli,
["load", "cnv-report", case_obj["_id"], "invalid-path", "-u"],
)
assert "Path 'invalid-path' does not exist." in result.output
assert result.exit_code == 2
|
Fix code style issues with Black
|
Fix code style issues with Black
|
Python
|
bsd-3-clause
|
Clinical-Genomics/scout,Clinical-Genomics/scout,Clinical-Genomics/scout
|
---
+++
@@ -15,7 +15,10 @@
assert runner
# Test CLI function
- result = runner.invoke(cli, ["load", "cnv-report", case_obj["_id"], cnv_report_path, "-u"],)
+ result = runner.invoke(
+ cli,
+ ["load", "cnv-report", case_obj["_id"], cnv_report_path, "-u"],
+ )
assert "saved report to case!" in result.output
assert result.exit_code == 0
@@ -28,7 +31,10 @@
assert runner
# Test CLI function
- result = runner.invoke(cli, ["load", "cnv-report", case_obj["_id"], "invalid-path", "-u"],)
+ result = runner.invoke(
+ cli,
+ ["load", "cnv-report", case_obj["_id"], "invalid-path", "-u"],
+ )
assert "Path 'invalid-path' does not exist." in result.output
assert result.exit_code == 2
|
638ff83c99264fc8336b0a60e39400cafbcc643e
|
manage.py
|
manage.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import tornado.ioloop
import tornado.web
import tornado.autoreload
from tornado.options import parse_command_line, define, options
define('port', default=8888)
define('template_path', default='templates')
define('PROJECT_PATH', default=os.path.join(
os.path.abspath(os.path.dirname(__file__))))
settings = dict(
debug=True,
gzip=True,
template_path="{}/{}".format(options.PROJECT_PATH, options.template_path))
def main():
from urls import URLS
application = tornado.web.Application(URLS, **settings)
print "openmining.io server starting..."
def fn():
print "openmining.io before reloading..."
parse_command_line()
application.listen(options.port)
tornado.autoreload.add_reload_hook(fn)
tornado.autoreload.start()
tornado.ioloop.IOLoop.instance().start()
if __name__ == "__main__":
main()
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import tornado.ioloop
import tornado.web
import tornado.autoreload
from tornado.options import parse_command_line, define, options
define('port', default=8888)
define('template_path', default='templates')
define('PROJECT_PATH', default=os.path.join(
os.path.abspath(os.path.dirname(__file__))))
settings = dict(
debug=True,
gzip=True,
autoreload=True,
template_path="{}/{}".format(options.PROJECT_PATH, options.template_path))
def main():
from urls import URLS
application = tornado.web.Application(URLS, **settings)
print "openmining.io server starting..."
def fn():
print "openmining.io before reloading..."
parse_command_line()
application.listen(options.port)
tornado.autoreload.add_reload_hook(fn)
tornado.autoreload.start()
tornado.ioloop.IOLoop.instance().start()
if __name__ == "__main__":
main()
|
Add autoreload on settings default true
|
Add autoreload on settings default true
|
Python
|
mit
|
mlgruby/mining,seagoat/mining,seagoat/mining,avelino/mining,mlgruby/mining,jgabriellima/mining,mining/mining,chrisdamba/mining,mining/mining,mlgruby/mining,jgabriellima/mining,AndrzejR/mining,chrisdamba/mining,avelino/mining,AndrzejR/mining
|
---
+++
@@ -17,6 +17,7 @@
settings = dict(
debug=True,
gzip=True,
+ autoreload=True,
template_path="{}/{}".format(options.PROJECT_PATH, options.template_path))
|
51c5e4e1e670f52584c157330a9a3b910be92d57
|
runtests.py
|
runtests.py
|
#!/usr/bin/env python
from os.path import dirname, abspath
import sys
from django.conf import settings
if not settings.configured:
from django import VERSION
settings_dict = dict(
INSTALLED_APPS=(
'localeurl',
'localeurl.tests',
'django.contrib.sites', # for sitemap test
),
ROOT_URLCONF='localeurl.tests.test_urls',
)
if VERSION >= (1, 2):
settings_dict["DATABASES"] = {
"default": {
"ENGINE": "django.db.backends.sqlite3"
}}
else:
settings_dict["DATABASE_ENGINE"] = "sqlite3"
settings.configure(**settings_dict)
def runtests(*test_args):
if not test_args:
test_args = ['tests']
parent = dirname(abspath(__file__))
sys.path.insert(0, parent)
try:
from django.test.simple import DjangoTestSuiteRunner
def run_tests(test_args, verbosity, interactive):
runner = DjangoTestSuiteRunner(
verbosity=verbosity, interactive=interactive, failfast=False)
return runner.run_tests(test_args)
except ImportError:
# for Django versions that don't have DjangoTestSuiteRunner
from django.test.simple import run_tests
failures = run_tests(
test_args, verbosity=1, interactive=True)
sys.exit(failures)
if __name__ == '__main__':
runtests(*sys.argv[1:])
|
#!/usr/bin/env python
from os.path import dirname, abspath
import sys
from django.conf import settings
if not settings.configured:
from django import VERSION
settings_dict = dict(
INSTALLED_APPS=(
'localeurl',
'localeurl.tests',
'django.contrib.sites', # for sitemap test
),
ROOT_URLCONF='localeurl.tests.test_urls',
SITE_ID=1,
)
if VERSION >= (1, 2):
settings_dict["DATABASES"] = {
"default": {
"ENGINE": "django.db.backends.sqlite3"
}}
else:
settings_dict["DATABASE_ENGINE"] = "sqlite3"
settings.configure(**settings_dict)
def runtests(*test_args):
if not test_args:
test_args = ['tests']
parent = dirname(abspath(__file__))
sys.path.insert(0, parent)
try:
from django.test.simple import DjangoTestSuiteRunner
def run_tests(test_args, verbosity, interactive):
runner = DjangoTestSuiteRunner(
verbosity=verbosity, interactive=interactive, failfast=False)
return runner.run_tests(test_args)
except ImportError:
# for Django versions that don't have DjangoTestSuiteRunner
from django.test.simple import run_tests
failures = run_tests(
test_args, verbosity=1, interactive=True)
sys.exit(failures)
if __name__ == '__main__':
runtests(*sys.argv[1:])
|
Add SITE_ID to test settings since contrib.sites is in INSTALLED_APPS.
|
Add SITE_ID to test settings since contrib.sites is in INSTALLED_APPS.
|
Python
|
mit
|
simonluijk/django-localeurl,jmagnusson/django-localeurl
|
---
+++
@@ -14,6 +14,7 @@
'django.contrib.sites', # for sitemap test
),
ROOT_URLCONF='localeurl.tests.test_urls',
+ SITE_ID=1,
)
if VERSION >= (1, 2):
settings_dict["DATABASES"] = {
|
94e68ff420ecb07ad830e213b38863bf34b7f85c
|
autocomplete_light/urls.py
|
autocomplete_light/urls.py
|
"""
An url to AutocompleteView.
autocomplete_light_autocomplete
Given a 'autocomplete' argument with the name of the autocomplete, this url
routes to AutocompleteView.
autocomplete_light_registry
Renders the autocomplete registry, good for debugging, requires being
authenticated as superuser.
"""
from django import VERSION
from .views import AutocompleteView, RegistryView
try:
from django.conf.urls import patterns, url
except ImportError:
# Django < 1.5
from django.conf.urls.defaults import patterns, url
urlpatterns = [
url(r'^(?P<autocomplete>[-\w]+)/$',
AutocompleteView.as_view(),
name='autocomplete_light_autocomplete'
),
url(r'^$',
RegistryView.as_view(),
name='autocomplete_light_registry'
),
]
if VERSION < (1, 9):
urlpatterns = patterns('', *urlpatterns)
|
"""
An url to AutocompleteView.
autocomplete_light_autocomplete
Given a 'autocomplete' argument with the name of the autocomplete, this url
routes to AutocompleteView.
autocomplete_light_registry
Renders the autocomplete registry, good for debugging, requires being
authenticated as superuser.
"""
from django import VERSION
from .views import AutocompleteView, RegistryView
if VERSION > (1, 9):
from django.conf.urls import url
else:
try:
from django.conf.urls import patterns, url
except ImportError:
# Django < 1.5
from django.conf.urls.defaults import patterns, url
urlpatterns = [
url(r'^(?P<autocomplete>[-\w]+)/$',
AutocompleteView.as_view(),
name='autocomplete_light_autocomplete'
),
url(r'^$',
RegistryView.as_view(),
name='autocomplete_light_registry'
),
]
if VERSION < (1, 9):
urlpatterns = patterns('', *urlpatterns)
|
Fix Django 1.9 import error
|
Fix Django 1.9 import error
|
Python
|
mit
|
shubhamdipt/django-autocomplete-light,luzfcb/django-autocomplete-light,shubhamdipt/django-autocomplete-light,dsanders11/django-autocomplete-light,Perkville/django-autocomplete-light,Perkville/django-autocomplete-light,Eraldo/django-autocomplete-light,luzfcb/django-autocomplete-light,yourlabs/django-autocomplete-light,dsanders11/django-autocomplete-light,yourlabs/django-autocomplete-light,shubhamdipt/django-autocomplete-light,luzfcb/django-autocomplete-light,dsanders11/django-autocomplete-light,Perkville/django-autocomplete-light,shubhamdipt/django-autocomplete-light,yourlabs/django-autocomplete-light,Eraldo/django-autocomplete-light,Eraldo/django-autocomplete-light,yourlabs/django-autocomplete-light,dsanders11/django-autocomplete-light,Eraldo/django-autocomplete-light,luzfcb/django-autocomplete-light,Perkville/django-autocomplete-light
|
---
+++
@@ -13,11 +13,14 @@
from .views import AutocompleteView, RegistryView
-try:
- from django.conf.urls import patterns, url
-except ImportError:
- # Django < 1.5
- from django.conf.urls.defaults import patterns, url
+if VERSION > (1, 9):
+ from django.conf.urls import url
+else:
+ try:
+ from django.conf.urls import patterns, url
+ except ImportError:
+ # Django < 1.5
+ from django.conf.urls.defaults import patterns, url
urlpatterns = [
url(r'^(?P<autocomplete>[-\w]+)/$',
|
7e8823b59e7a41430fd5f7aad5481c3a9903b4ba
|
test/test_editor.py
|
test/test_editor.py
|
# -*- coding: utf-8 -*-
"""
This is a simple test script to that is meant to be run by Travis CI to ensure
everything works properly foreach bindings on each supported python
version (3.2, 3.3, 3.4).
It runs a QApplication and shows a QPythonCodeEdit for 500ms.
"""
import sys
from PyQt4 import QtCore, QtGui
from pyqode.core import client
from pyqode.python.editor import QPythonCodeEdit
import logging
logging.basicConfig(level=True)
def leave():
app = QtGui.QApplication.instance()
app.exit(0)
def test_editor():
app = QtGui.QApplication(sys.argv)
editor = QPythonCodeEdit()
editor.show()
editor.start_server()
editor.open_file(__file__)
QtCore.QTimer.singleShot(500, leave)
app.exec_()
client.stop_server()
del editor
del app
|
# -*- coding: utf-8 -*-
"""
This is a simple test script to that is meant to be run by Travis CI to ensure
everything works properly foreach bindings on each supported python
version (3.2, 3.3, 3.4).
It runs a QApplication and shows a QPythonCodeEdit for 500ms.
"""
import sys
from PyQt4 import QtCore, QtGui
from pyqode.core import client
from pyqode.python.editor import QPythonCodeEdit
import logging
logging.basicConfig(level=True)
def leave():
app = QtGui.QApplication.instance()
app.exit(0)
def test_editor():
app = QtGui.QApplication(sys.argv)
editor = QPythonCodeEdit()
editor.show()
editor.start_server()
editor.open_file(__file__)
QtCore.QTimer.singleShot(500, leave)
app.exec_()
client.stop_server(editor)
del editor
del app
|
Fix test, editor instance is required
|
Fix test, editor instance is required
|
Python
|
mit
|
pyQode/pyqode.python,mmolero/pyqode.python,zwadar/pyqode.python,pyQode/pyqode.python
|
---
+++
@@ -28,6 +28,6 @@
editor.open_file(__file__)
QtCore.QTimer.singleShot(500, leave)
app.exec_()
- client.stop_server()
+ client.stop_server(editor)
del editor
del app
|
d18ff01e737155eca2cb6c765291e6239328b003
|
scipy/lib/_numpy_compat.py
|
scipy/lib/_numpy_compat.py
|
"""Functions copypasted from newer versions of numpy.
"""
from __future__ import division, print_function, absolute_import
import warnings
import numpy as np
from scipy.lib._version import NumpyVersion
if NumpyVersion(np.__version__) > '1.7.0.dev':
_assert_warns = np.testing.assert_warns
else:
def _assert_warns(warning_class, func, *args, **kw):
r"""
Fail unless the given callable throws the specified warning.
This definition is copypasted from numpy 1.9.0.dev.
The version in earlier numpy returns None.
Parameters
----------
warning_class : class
The class defining the warning that `func` is expected to throw.
func : callable
The callable to test.
*args : Arguments
Arguments passed to `func`.
**kwargs : Kwargs
Keyword arguments passed to `func`.
Returns
-------
The value returned by `func`.
"""
with warnings.catch_warnings(record=True) as l:
warnings.simplefilter('always')
result = func(*args, **kw)
if not len(l) > 0:
raise AssertionError("No warning raised when calling %s"
% func.__name__)
if not l[0].category is warning_class:
raise AssertionError("First warning for %s is not a "
"%s( is %s)" % (func.__name__, warning_class, l[0]))
return result
|
"""Functions copypasted from newer versions of numpy.
"""
from __future__ import division, print_function, absolute_import
import warnings
import numpy as np
from scipy.lib._version import NumpyVersion
if NumpyVersion(np.__version__) > '1.7.0.dev':
_assert_warns = np.testing.assert_warns
else:
def _assert_warns(warning_class, func, *args, **kw):
r"""
Fail unless the given callable throws the specified warning.
This definition is copypasted from numpy 1.9.0.dev.
The version in earlier numpy returns None.
Parameters
----------
warning_class : class
The class defining the warning that `func` is expected to throw.
func : callable
The callable to test.
*args : Arguments
Arguments passed to `func`.
**kwargs : Kwargs
Keyword arguments passed to `func`.
Returns
-------
The value returned by `func`.
"""
with warnings.catch_warnings(record=True) as l:
warnings.simplefilter('always')
result = func(*args, **kw)
if not len(l) > 0:
raise AssertionError("No warning raised when calling %s"
% func.__name__)
if not l[0].category is warning_class:
raise AssertionError("First warning for %s is not a "
"%s( is %s)" % (func.__name__, warning_class, l[0]))
return result
if NumpyVersion(np.__version__) >= '1.6.0':
count_nonzero = np.count_nonzero
else:
def count_nonzero(a):
return (a != 0).sum()
|
Add a 'count_nonzero' function to use with numpy 1.5.1.
|
MAINT: Add a 'count_nonzero' function to use with numpy 1.5.1.
|
Python
|
bsd-3-clause
|
perimosocordiae/scipy,andyfaff/scipy,aman-iitj/scipy,jakevdp/scipy,dominicelse/scipy,zerothi/scipy,zxsted/scipy,matthewalbani/scipy,anielsen001/scipy,zxsted/scipy,WillieMaddox/scipy,endolith/scipy,cpaulik/scipy,anielsen001/scipy,woodscn/scipy,bkendzior/scipy,gef756/scipy,mdhaber/scipy,futurulus/scipy,Stefan-Endres/scipy,raoulbq/scipy,piyush0609/scipy,ilayn/scipy,cpaulik/scipy,behzadnouri/scipy,witcxc/scipy,piyush0609/scipy,scipy/scipy,lhilt/scipy,ortylp/scipy,Shaswat27/scipy,Dapid/scipy,Newman101/scipy,nvoron23/scipy,gef756/scipy,pnedunuri/scipy,pschella/scipy,surhudm/scipy,kalvdans/scipy,Gillu13/scipy,nmayorov/scipy,ales-erjavec/scipy,haudren/scipy,ChanderG/scipy,lukauskas/scipy,pizzathief/scipy,jonycgn/scipy,zerothi/scipy,njwilson23/scipy,sauliusl/scipy,giorgiop/scipy,vigna/scipy,jseabold/scipy,pyramania/scipy,kalvdans/scipy,aarchiba/scipy,grlee77/scipy,Dapid/scipy,gertingold/scipy,fredrikw/scipy,piyush0609/scipy,Newman101/scipy,zerothi/scipy,newemailjdm/scipy,ilayn/scipy,kleskjr/scipy,sriki18/scipy,mgaitan/scipy,rgommers/scipy,Newman101/scipy,witcxc/scipy,vhaasteren/scipy,Eric89GXL/scipy,haudren/scipy,Kamp9/scipy,sriki18/scipy,tylerjereddy/scipy,ortylp/scipy,lhilt/scipy,arokem/scipy,minhlongdo/scipy,nmayorov/scipy,dch312/scipy,bkendzior/scipy,gef756/scipy,mgaitan/scipy,chatcannon/scipy,sonnyhu/scipy,gdooper/scipy,pschella/scipy,sriki18/scipy,kalvdans/scipy,anielsen001/scipy,mikebenfield/scipy,jakevdp/scipy,jor-/scipy,newemailjdm/scipy,felipebetancur/scipy,FRidh/scipy,hainm/scipy,mhogg/scipy,kalvdans/scipy,jamestwebber/scipy,WillieMaddox/scipy,futurulus/scipy,mingwpy/scipy,Eric89GXL/scipy,lukauskas/scipy,ortylp/scipy,pizzathief/scipy,Eric89GXL/scipy,mtrbean/scipy,endolith/scipy,mortada/scipy,sauliusl/scipy,anielsen001/scipy,giorgiop/scipy,zerothi/scipy,newemailjdm/scipy,mortonjt/scipy,mortada/scipy,efiring/scipy,petebachant/scipy,larsmans/scipy,newemailjdm/scipy,petebachant/scipy,nvoron23/scipy,dch312/scipy,fredrikw/scipy,raoulbq/scipy,mortonjt/scipy,maciejkula/scipy,niknow/scipy,vanpact/scipy,dch312/scipy,jseabold/scipy,zaxliu/scipy,mortonjt/scipy,mdhaber/scipy,jjhelmus/scipy,ilayn/scipy,mortonjt/scipy,grlee77/scipy,aman-iitj/scipy,ales-erjavec/scipy,zaxliu/scipy,mortada/scipy,mgaitan/scipy,petebachant/scipy,nvoron23/scipy,trankmichael/scipy,efiring/scipy,kleskjr/scipy,Dapid/scipy,sonnyhu/scipy,josephcslater/scipy,lhilt/scipy,Gillu13/scipy,WillieMaddox/scipy,perimosocordiae/scipy,nmayorov/scipy,zaxliu/scipy,argriffing/scipy,Shaswat27/scipy,ales-erjavec/scipy,ilayn/scipy,rmcgibbo/scipy,hainm/scipy,person142/scipy,mingwpy/scipy,niknow/scipy,josephcslater/scipy,jamestwebber/scipy,jsilter/scipy,endolith/scipy,giorgiop/scipy,pizzathief/scipy,chatcannon/scipy,befelix/scipy,lukauskas/scipy,richardotis/scipy,anntzer/scipy,Shaswat27/scipy,FRidh/scipy,rmcgibbo/scipy,pbrod/scipy,nvoron23/scipy,Srisai85/scipy,trankmichael/scipy,josephcslater/scipy,gertingold/scipy,Newman101/scipy,zaxliu/scipy,jor-/scipy,dominicelse/scipy,jakevdp/scipy,e-q/scipy,pbrod/scipy,dominicelse/scipy,behzadnouri/scipy,mtrbean/scipy,maniteja123/scipy,WarrenWeckesser/scipy,arokem/scipy,jonycgn/scipy,sonnyhu/scipy,woodscn/scipy,nonhermitian/scipy,apbard/scipy,trankmichael/scipy,andim/scipy,mikebenfield/scipy,fernand/scipy,Kamp9/scipy,maciejkula/scipy,witcxc/scipy,zxsted/scipy,andyfaff/scipy,dominicelse/scipy,mdhaber/scipy,zaxliu/scipy,lukauskas/scipy,josephcslater/scipy,woodscn/scipy,minhlongdo/scipy,andyfaff/scipy,ales-erjavec/scipy,maciejkula/scipy,chatcannon/scipy,FRidh/scipy,minhlongdo/scipy,dch312/scipy,pyramania/scipy,matthew-brett/scipy,gertingold/scipy,anielsen001/scipy,niknow/scipy,ilayn/scipy,vberaudi/scipy,befelix/scipy,surhudm/scipy,petebachant/scipy,giorgiop/scipy,gfyoung/scipy,befelix/scipy,sriki18/scipy,zxsted/scipy,raoulbq/scipy,surhudm/scipy,mortada/scipy,Shaswat27/scipy,andyfaff/scipy,chatcannon/scipy,vanpact/scipy,jor-/scipy,vhaasteren/scipy,ndchorley/scipy,lukauskas/scipy,gfyoung/scipy,pnedunuri/scipy,woodscn/scipy,vhaasteren/scipy,cpaulik/scipy,andim/scipy,perimosocordiae/scipy,pizzathief/scipy,dominicelse/scipy,gef756/scipy,mtrbean/scipy,kalvdans/scipy,Gillu13/scipy,futurulus/scipy,surhudm/scipy,maciejkula/scipy,FRidh/scipy,rmcgibbo/scipy,cpaulik/scipy,richardotis/scipy,vigna/scipy,vberaudi/scipy,aeklant/scipy,WillieMaddox/scipy,aarchiba/scipy,anntzer/scipy,ChanderG/scipy,larsmans/scipy,aarchiba/scipy,andim/scipy,nmayorov/scipy,ndchorley/scipy,mgaitan/scipy,jsilter/scipy,jor-/scipy,endolith/scipy,aarchiba/scipy,befelix/scipy,bkendzior/scipy,Dapid/scipy,felipebetancur/scipy,mortada/scipy,jakevdp/scipy,newemailjdm/scipy,vigna/scipy,Srisai85/scipy,nonhermitian/scipy,sonnyhu/scipy,befelix/scipy,gertingold/scipy,mtrbean/scipy,andyfaff/scipy,nonhermitian/scipy,e-q/scipy,ndchorley/scipy,mdhaber/scipy,felipebetancur/scipy,Stefan-Endres/scipy,perimosocordiae/scipy,jjhelmus/scipy,nmayorov/scipy,scipy/scipy,pschella/scipy,Newman101/scipy,efiring/scipy,lhilt/scipy,apbard/scipy,WillieMaddox/scipy,njwilson23/scipy,Srisai85/scipy,FRidh/scipy,Srisai85/scipy,andim/scipy,felipebetancur/scipy,fernand/scipy,trankmichael/scipy,jjhelmus/scipy,person142/scipy,sauliusl/scipy,andim/scipy,aeklant/scipy,arokem/scipy,jseabold/scipy,perimosocordiae/scipy,aman-iitj/scipy,raoulbq/scipy,argriffing/scipy,petebachant/scipy,mikebenfield/scipy,jsilter/scipy,rgommers/scipy,futurulus/scipy,ndchorley/scipy,Stefan-Endres/scipy,aman-iitj/scipy,nvoron23/scipy,mortada/scipy,mingwpy/scipy,WarrenWeckesser/scipy,vhaasteren/scipy,pbrod/scipy,ortylp/scipy,scipy/scipy,ilayn/scipy,WarrenWeckesser/scipy,scipy/scipy,apbard/scipy,fernand/scipy,jonycgn/scipy,sonnyhu/scipy,ndchorley/scipy,ChanderG/scipy,matthewalbani/scipy,nonhermitian/scipy,zerothi/scipy,piyush0609/scipy,Shaswat27/scipy,fredrikw/scipy,jjhelmus/scipy,minhlongdo/scipy,Dapid/scipy,mortonjt/scipy,pyramania/scipy,vberaudi/scipy,piyush0609/scipy,FRidh/scipy,gdooper/scipy,WarrenWeckesser/scipy,argriffing/scipy,Gillu13/scipy,mingwpy/scipy,pbrod/scipy,mhogg/scipy,dch312/scipy,pnedunuri/scipy,niknow/scipy,haudren/scipy,anntzer/scipy,vanpact/scipy,Kamp9/scipy,larsmans/scipy,grlee77/scipy,gef756/scipy,cpaulik/scipy,sauliusl/scipy,jakevdp/scipy,niknow/scipy,haudren/scipy,aeklant/scipy,cpaulik/scipy,gfyoung/scipy,kleskjr/scipy,giorgiop/scipy,pnedunuri/scipy,argriffing/scipy,maniteja123/scipy,pyramania/scipy,ChanderG/scipy,pnedunuri/scipy,zerothi/scipy,aeklant/scipy,jsilter/scipy,grlee77/scipy,jamestwebber/scipy,matthew-brett/scipy,behzadnouri/scipy,zxsted/scipy,Kamp9/scipy,person142/scipy,gfyoung/scipy,richardotis/scipy,witcxc/scipy,witcxc/scipy,sriki18/scipy,kleskjr/scipy,ChanderG/scipy,maniteja123/scipy,pizzathief/scipy,mingwpy/scipy,scipy/scipy,ortylp/scipy,andyfaff/scipy,vigna/scipy,sonnyhu/scipy,jonycgn/scipy,larsmans/scipy,jjhelmus/scipy,grlee77/scipy,endolith/scipy,tylerjereddy/scipy,endolith/scipy,apbard/scipy,jonycgn/scipy,aman-iitj/scipy,maniteja123/scipy,matthewalbani/scipy,larsmans/scipy,scipy/scipy,tylerjereddy/scipy,jseabold/scipy,ales-erjavec/scipy,njwilson23/scipy,kleskjr/scipy,gertingold/scipy,pbrod/scipy,nvoron23/scipy,mhogg/scipy,njwilson23/scipy,nonhermitian/scipy,rmcgibbo/scipy,mdhaber/scipy,hainm/scipy,josephcslater/scipy,maciejkula/scipy,pnedunuri/scipy,jseabold/scipy,Newman101/scipy,kleskjr/scipy,gdooper/scipy,rgommers/scipy,arokem/scipy,jor-/scipy,sauliusl/scipy,Gillu13/scipy,vanpact/scipy,Stefan-Endres/scipy,mdhaber/scipy,pbrod/scipy,haudren/scipy,trankmichael/scipy,andim/scipy,Gillu13/scipy,Dapid/scipy,njwilson23/scipy,WarrenWeckesser/scipy,bkendzior/scipy,WillieMaddox/scipy,mgaitan/scipy,fredrikw/scipy,lhilt/scipy,anntzer/scipy,piyush0609/scipy,aman-iitj/scipy,mikebenfield/scipy,gdooper/scipy,Kamp9/scipy,vanpact/scipy,Srisai85/scipy,Srisai85/scipy,behzadnouri/scipy,efiring/scipy,Stefan-Endres/scipy,petebachant/scipy,fernand/scipy,maniteja123/scipy,anielsen001/scipy,gdooper/scipy,minhlongdo/scipy,mtrbean/scipy,bkendzior/scipy,felipebetancur/scipy,behzadnouri/scipy,haudren/scipy,matthewalbani/scipy,perimosocordiae/scipy,newemailjdm/scipy,fredrikw/scipy,futurulus/scipy,matthew-brett/scipy,sauliusl/scipy,mgaitan/scipy,felipebetancur/scipy,anntzer/scipy,matthew-brett/scipy,gfyoung/scipy,anntzer/scipy,ChanderG/scipy,rmcgibbo/scipy,Eric89GXL/scipy,efiring/scipy,pyramania/scipy,jamestwebber/scipy,hainm/scipy,vanpact/scipy,ndchorley/scipy,tylerjereddy/scipy,chatcannon/scipy,efiring/scipy,njwilson23/scipy,rmcgibbo/scipy,matthewalbani/scipy,surhudm/scipy,person142/scipy,jamestwebber/scipy,Eric89GXL/scipy,lukauskas/scipy,vberaudi/scipy,giorgiop/scipy,mhogg/scipy,mhogg/scipy,vhaasteren/scipy,richardotis/scipy,hainm/scipy,matthew-brett/scipy,zxsted/scipy,behzadnouri/scipy,Eric89GXL/scipy,argriffing/scipy,pschella/scipy,argriffing/scipy,woodscn/scipy,jsilter/scipy,fernand/scipy,chatcannon/scipy,zaxliu/scipy,ortylp/scipy,woodscn/scipy,mingwpy/scipy,mhogg/scipy,richardotis/scipy,vhaasteren/scipy,gef756/scipy,mortonjt/scipy,apbard/scipy,vberaudi/scipy,tylerjereddy/scipy,ales-erjavec/scipy,rgommers/scipy,Stefan-Endres/scipy,hainm/scipy,aeklant/scipy,minhlongdo/scipy,mtrbean/scipy,aarchiba/scipy,sriki18/scipy,WarrenWeckesser/scipy,niknow/scipy,mikebenfield/scipy,surhudm/scipy,raoulbq/scipy,e-q/scipy,fredrikw/scipy,maniteja123/scipy,vigna/scipy,futurulus/scipy,fernand/scipy,vberaudi/scipy,pschella/scipy,jonycgn/scipy,rgommers/scipy,e-q/scipy,jseabold/scipy,e-q/scipy,trankmichael/scipy,richardotis/scipy,larsmans/scipy,Shaswat27/scipy,Kamp9/scipy,raoulbq/scipy,person142/scipy,arokem/scipy
|
---
+++
@@ -45,3 +45,10 @@
raise AssertionError("First warning for %s is not a "
"%s( is %s)" % (func.__name__, warning_class, l[0]))
return result
+
+
+if NumpyVersion(np.__version__) >= '1.6.0':
+ count_nonzero = np.count_nonzero
+else:
+ def count_nonzero(a):
+ return (a != 0).sum()
|
be922ce28931c101a245aa4b5b0f74c31c23cc60
|
tests/test_group.py
|
tests/test_group.py
|
from unittest import TestCase
class GroupTestCase(TestCase):
def get_persons(self):
pass
|
from unittest import TestCase
from address_book import Person, Group
class GroupTestCase(TestCase):
def get_persons(self):
john_person = Person(
'John',
'Doe',
['Russian Federation, Kemerovo region, Kemerovo, Kirova street 23, apt. 42'],
['+79834772053'],
['john@gmail.com']
)
ivan_person = Person(
'Ivan',
'Sidorov',
['Russian Federation, Kemerovo region, Belovo, Kirova street 42, apt. 13'],
['+79834771122'],
['john@gmail.com']
)
group = Group('friends')
self.assertFalse(group.persons)
group.add_person(ivan_person)
group.add_person(john_person)
self.assertEqual(
group.persons,
[ivan_person, john_person]
)
|
Test the ability to add and get all persons in the group
|
Test the ability to add and get all persons in the group
|
Python
|
mit
|
dizpers/python-address-book-assignment
|
---
+++
@@ -1,7 +1,30 @@
from unittest import TestCase
+
+from address_book import Person, Group
class GroupTestCase(TestCase):
def get_persons(self):
- pass
+ john_person = Person(
+ 'John',
+ 'Doe',
+ ['Russian Federation, Kemerovo region, Kemerovo, Kirova street 23, apt. 42'],
+ ['+79834772053'],
+ ['john@gmail.com']
+ )
+ ivan_person = Person(
+ 'Ivan',
+ 'Sidorov',
+ ['Russian Federation, Kemerovo region, Belovo, Kirova street 42, apt. 13'],
+ ['+79834771122'],
+ ['john@gmail.com']
+ )
+ group = Group('friends')
+ self.assertFalse(group.persons)
+ group.add_person(ivan_person)
+ group.add_person(john_person)
+ self.assertEqual(
+ group.persons,
+ [ivan_person, john_person]
+ )
|
be4d21b5486f3bba5a4d844015d3d35630ac7d03
|
udata/auth/forms.py
|
udata/auth/forms.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from flask_security.forms import RegisterForm
from udata.forms import fields
from udata.forms import validators
class ExtendedRegisterForm(RegisterForm):
first_name = fields.StringField(
'First Name', [validators.Required('First name is required')])
last_name = fields.StringField(
'Last Name', [validators.Required('Last name is required')])
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from flask_security.forms import RegisterForm
from udata.forms import fields
from udata.forms import validators
from udata.i18n import lazy_gettext as _
class ExtendedRegisterForm(RegisterForm):
first_name = fields.StringField(
_('First name'), [validators.Required(_('First name is required'))])
last_name = fields.StringField(
_('Last name'), [validators.Required(_('Last name is required'))])
|
Apply i18n to First and Last name in registration form
|
Apply i18n to First and Last name in registration form
|
Python
|
agpl-3.0
|
etalab/udata,etalab/udata,etalab/udata,opendatateam/udata,opendatateam/udata,opendatateam/udata
|
---
+++
@@ -4,10 +4,10 @@
from flask_security.forms import RegisterForm
from udata.forms import fields
from udata.forms import validators
-
+from udata.i18n import lazy_gettext as _
class ExtendedRegisterForm(RegisterForm):
first_name = fields.StringField(
- 'First Name', [validators.Required('First name is required')])
+ _('First name'), [validators.Required(_('First name is required'))])
last_name = fields.StringField(
- 'Last Name', [validators.Required('Last name is required')])
+ _('Last name'), [validators.Required(_('Last name is required'))])
|
12728f6b924a3d45f78b3955cb9fcb563db6a81f
|
pida_abc_type.py
|
pida_abc_type.py
|
from abc import ABCMeta, abstractmethod
class IdaTypes:
__metaclass__ = ABCMeta
@abstractmethod
def decode(self, data):
raise NotImplementedError()
@abstractmethod
def get_name(self):
raise NotImplementedError()
@abstractmethod
def get_type(self):
raise NotImplementedError()
|
from abc import ABCMeta, abstractmethod
class IdaTypes:
__metaclass__ = ABCMeta
@abstractmethod
def decode(self, data):
raise NotImplementedError()
@abstractmethod
def get_type(self):
raise NotImplementedError()
|
Delete abstract method get name
|
Delete abstract method get name
|
Python
|
mit
|
goodwinxp/ATFGenerator,goodwinxp/ATFGenerator,goodwinxp/ATFGenerator
|
---
+++
@@ -9,9 +9,5 @@
raise NotImplementedError()
@abstractmethod
- def get_name(self):
- raise NotImplementedError()
-
- @abstractmethod
def get_type(self):
raise NotImplementedError()
|
e421a3cfd9ecfe05aa21b2b3da792f7ab824727d
|
experimental/db/remove_property.py
|
experimental/db/remove_property.py
|
""" Remove a property from the datastore.
How to use:
$ cd experimental/db/
$ PYTHONPATH=. remote_api_shell.py -s homeawesomation.appspot.com
> import remove_property
"""
from google.appengine.api import namespace_manager
from google.appengine.ext import db
class Base(db.Expando): pass
def remove(namespace, field):
namespace_manager.set_namespace(namespace)
for base in Base.all().run():
if hasattr(base, field):
print "%s %s" %(base.id, base.name)
del base.category
base.put()
|
""" Remove a property from the datastore.
How to use:
$ cd experimental/db/
$ PYTHONPATH=. remote_api_shell.py -s homeawesomation.appspot.com
> import remove_property
"""
from google.appengine.api import namespace_manager
from google.appengine.ext import db
class Base(db.Expando): pass
def remove(namespace, field):
namespace_manager.set_namespace(namespace)
for base in Base.all().run():
if hasattr(base, field):
print "%s %s" % (base.key().id_or_name(), getattr(base, 'name', None))
delattr(base, field)
base.put()
|
Fix datastore delete field script.
|
Fix datastore delete field script.
|
Python
|
mit
|
tomwilkie/awesomation,tomwilkie/awesomation,tomwilkie/awesomation,tomwilkie/awesomation,tomwilkie/awesomation
|
---
+++
@@ -15,6 +15,6 @@
namespace_manager.set_namespace(namespace)
for base in Base.all().run():
if hasattr(base, field):
- print "%s %s" %(base.id, base.name)
- del base.category
+ print "%s %s" % (base.key().id_or_name(), getattr(base, 'name', None))
+ delattr(base, field)
base.put()
|
5c95d23ff85a5db3c533679befd8aef4a85baf9d
|
speeches/search_indexes.py
|
speeches/search_indexes.py
|
import datetime
from haystack import indexes
from speeches.models import Speech
class SpeechIndex(indexes.SearchIndex, indexes.Indexable):
# Use a template here to include speaker name as well... TODO
text = indexes.CharField(document=True, model_attr='text') # , use_template=True)
title = indexes.CharField() # use_template=True)
start_date = indexes.DateTimeField(model_attr='start_date')
instance = indexes.CharField(model_attr='instance__label')
speaker = indexes.IntegerField(model_attr='speaker__id', null=True)
def get_model(self):
return Speech
def index_queryset(self, using=None):
"""Used when the entire index for model is updated."""
return self.get_model().objects # .filter(pub_date__lte=datetime.datetime.now())
|
import datetime
from haystack import indexes
from speeches.models import Speech
class SpeechIndex(indexes.SearchIndex, indexes.Indexable):
# Use a template here to include speaker name as well... TODO
text = indexes.CharField(document=True, model_attr='text') # , use_template=True)
title = indexes.CharField() # use_template=True)
start_date = indexes.DateTimeField(model_attr='start_date', null=True)
instance = indexes.CharField(model_attr='instance__label')
speaker = indexes.IntegerField(model_attr='speaker__id', null=True)
def get_model(self):
return Speech
def index_queryset(self, using=None):
"""Used when the entire index for model is updated."""
return self.get_model().objects # .filter(pub_date__lte=datetime.datetime.now())
|
Allow a null start date in search index.
|
Allow a null start date in search index.
|
Python
|
agpl-3.0
|
opencorato/sayit,opencorato/sayit,opencorato/sayit,opencorato/sayit
|
---
+++
@@ -6,7 +6,7 @@
# Use a template here to include speaker name as well... TODO
text = indexes.CharField(document=True, model_attr='text') # , use_template=True)
title = indexes.CharField() # use_template=True)
- start_date = indexes.DateTimeField(model_attr='start_date')
+ start_date = indexes.DateTimeField(model_attr='start_date', null=True)
instance = indexes.CharField(model_attr='instance__label')
speaker = indexes.IntegerField(model_attr='speaker__id', null=True)
|
1fac10d27f00322e34c3b89527c32b1dcb02decd
|
linter.py
|
linter.py
|
#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Jack Brewer
# Copyright (c) 2015 Jack Brewer
#
# License: MIT
"""This module exports the Stylint plugin class."""
from SublimeLinter.lint import NodeLinter, util
class Stylint(NodeLinter):
"""Provides an interface to stylint."""
npm_name = 'stylint'
syntax = 'stylus'
cmd = 'stylint @ *'
executable = 'stylint'
version_requirement = '>= 1.5.0'
regex = r'''(?xi)
# Comments show example output for each line of a Stylint warning
# /path/to/file/example.styl
^.*$\s*
# 177:24 colors warning hexidecimal color should be a variable
^(?P<line>\d+):?(?P<col>\d+)?\s*((?P<warning>warning)|(?P<error>error))\s*(?P<message>.+)$\s*
'''
multiline = True
error_stream = util.STREAM_STDOUT
tempfile_suffix = 'styl'
config_file = ('--config', '.stylintrc', '~')
|
#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Jack Brewer
# Copyright (c) 2015 Jack Brewer
#
# License: MIT
"""This module exports the Stylint plugin class."""
from SublimeLinter.lint import NodeLinter, util
class Stylint(NodeLinter):
"""Provides an interface to stylint."""
npm_name = 'stylint'
syntax = ('stylus', 'vue')
selectors = {'vue': 'source.stylus.embedded.html'}
cmd = 'stylint @ *'
executable = 'stylint'
version_requirement = '>= 1.5.0'
regex = r'''(?xi)
# Comments show example output for each line of a Stylint warning
# /path/to/file/example.styl
^.*$\s*
# 177:24 colors warning hexidecimal color should be a variable
^(?P<line>\d+):?(?P<col>\d+)?\s*((?P<warning>warning)|(?P<error>error))\s*(?P<message>.+)$\s*
'''
multiline = True
error_stream = util.STREAM_STDOUT
tempfile_suffix = 'styl'
config_file = ('--config', '.stylintrc', '~')
|
Support Stylus blocks in Vue single-file components
|
Support Stylus blocks in Vue single-file components
|
Python
|
mit
|
jackbrewer/SublimeLinter-contrib-stylint
|
---
+++
@@ -17,7 +17,8 @@
"""Provides an interface to stylint."""
npm_name = 'stylint'
- syntax = 'stylus'
+ syntax = ('stylus', 'vue')
+ selectors = {'vue': 'source.stylus.embedded.html'}
cmd = 'stylint @ *'
executable = 'stylint'
version_requirement = '>= 1.5.0'
|
33b5e210ffc32f9f7b3764e1f6f3d54e1f040783
|
changes/flow.py
|
changes/flow.py
|
import logging
from plumbum import local, CommandNotFound
from changes.changelog import generate_changelog
from changes.packaging import build_package, install_package, upload_package, install_from_pypi
from changes.vcs import tag_and_push, commit_version_change
from changes.verification import run_tests
from changes.version import increment_version
log = logging.getLogger(__name__)
def perform_release(context):
"""Executes the release process."""
try:
run_tests()
if not context.skip_changelog:
generate_changelog(context)
increment_version(context)
build_package(context)
install_package(context)
upload_package(context)
install_from_pypi(context)
commit_version_change(context)
tag_and_push(context)
except:
log.exception('Error releasing')
|
import logging
import click
from changes.changelog import generate_changelog
from changes.config import project_config, store_settings
from changes.packaging import build_distributions, install_package, upload_package, install_from_pypi
from changes.vcs import tag_and_push, commit_version_change, create_github_release, upload_release_distributions
from changes.verification import run_tests
from changes.version import increment_version
log = logging.getLogger(__name__)
def publish(context):
"""Publishes the project"""
commit_version_change(context)
if context.github:
# github token
project_settings = project_config(context.module_name)
if not project_settings['gh_token']:
click.echo('You need a GitHub token for changes to create a release.')
click.pause('Press [enter] to launch the GitHub "New personal access '
'token" page, to create a token for changes.')
click.launch('https://github.com/settings/tokens/new')
project_settings['gh_token'] = click.prompt('Enter your changes token')
store_settings(context.module_name, project_settings)
description = click.prompt('Describe this release')
upload_url = create_github_release(context, project_settings['gh_token'], description)
upload_release_distributions(
context,
project_settings['gh_token'],
build_distributions(context),
upload_url,
)
click.pause('Press [enter] to review and update your new release')
click.launch('{0}/releases/tag/{1}'.format(context.repo_url, context.new_version))
else:
tag_and_push(context)
def perform_release(context):
"""Executes the release process."""
try:
run_tests()
if not context.skip_changelog:
generate_changelog(context)
increment_version(context)
build_distributions(context)
install_package(context)
upload_package(context)
install_from_pypi(context)
publish(context)
except:
log.exception('Error releasing')
|
Add github releases to publishing
|
Add github releases to publishing
|
Python
|
mit
|
goldsborough/changes
|
---
+++
@@ -1,14 +1,47 @@
import logging
-from plumbum import local, CommandNotFound
+import click
from changes.changelog import generate_changelog
-from changes.packaging import build_package, install_package, upload_package, install_from_pypi
-from changes.vcs import tag_and_push, commit_version_change
+from changes.config import project_config, store_settings
+from changes.packaging import build_distributions, install_package, upload_package, install_from_pypi
+from changes.vcs import tag_and_push, commit_version_change, create_github_release, upload_release_distributions
from changes.verification import run_tests
from changes.version import increment_version
log = logging.getLogger(__name__)
+
+
+def publish(context):
+ """Publishes the project"""
+ commit_version_change(context)
+
+ if context.github:
+ # github token
+ project_settings = project_config(context.module_name)
+ if not project_settings['gh_token']:
+ click.echo('You need a GitHub token for changes to create a release.')
+ click.pause('Press [enter] to launch the GitHub "New personal access '
+ 'token" page, to create a token for changes.')
+ click.launch('https://github.com/settings/tokens/new')
+ project_settings['gh_token'] = click.prompt('Enter your changes token')
+
+ store_settings(context.module_name, project_settings)
+ description = click.prompt('Describe this release')
+
+ upload_url = create_github_release(context, project_settings['gh_token'], description)
+
+ upload_release_distributions(
+ context,
+ project_settings['gh_token'],
+ build_distributions(context),
+ upload_url,
+ )
+
+ click.pause('Press [enter] to review and update your new release')
+ click.launch('{0}/releases/tag/{1}'.format(context.repo_url, context.new_version))
+ else:
+ tag_and_push(context)
def perform_release(context):
@@ -18,14 +51,17 @@
if not context.skip_changelog:
generate_changelog(context)
+
increment_version(context)
- build_package(context)
+ build_distributions(context)
+
install_package(context)
+
upload_package(context)
+
install_from_pypi(context)
- commit_version_change(context)
- tag_and_push(context)
+ publish(context)
except:
log.exception('Error releasing')
|
6418eb4cb76a00372552b3e06b71f42c520ada7b
|
ci/TimeUtils.py
|
ci/TimeUtils.py
|
# Copyright 2016 Battelle Energy Alliance, LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from django.contrib.humanize.templatetags.humanize import naturaltime
from django.utils import timezone
import datetime, math
def sortable_time_str(d):
return d.strftime('%Y%m%d%H%M%S')
def display_time_str(d):
#return d.strftime('%H:%M:%S %m/%d/%y')
return naturaltime(d)
def human_time_str(d):
#return d.strftime('%H:%M:%S %m/%d/%y')
return naturaltime(d)
def get_local_timestamp():
return math.floor((timezone.localtime(timezone.now()) - timezone.make_aware(datetime.datetime.fromtimestamp(0))).total_seconds())
def get_local_time():
return timezone.localtime(timezone.now())
def std_time_str(d):
return d.strftime('%H:%M:%S %m/%d/%y')
|
# Copyright 2016 Battelle Energy Alliance, LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from django.contrib.humanize.templatetags.humanize import naturaltime
from django.utils import timezone
import datetime, math
def sortable_time_str(d):
return d.strftime('%Y%m%d%H%M%S%f')
def display_time_str(d):
#return d.strftime('%H:%M:%S %m/%d/%y')
return naturaltime(d)
def human_time_str(d):
#return d.strftime('%H:%M:%S %m/%d/%y')
return naturaltime(d)
def get_local_timestamp():
return math.floor((timezone.localtime(timezone.now()) - timezone.make_aware(datetime.datetime.fromtimestamp(0))).total_seconds())
def get_local_time():
return timezone.localtime(timezone.now())
def std_time_str(d):
return d.strftime('%H:%M:%S %m/%d/%y')
|
Use microseconds in event sort string.
|
Use microseconds in event sort string.
This helps javascript sorting even when two events happen
in the same second.
|
Python
|
apache-2.0
|
brianmoose/civet,idaholab/civet,idaholab/civet,idaholab/civet,idaholab/civet,brianmoose/civet,brianmoose/civet,brianmoose/civet
|
---
+++
@@ -18,7 +18,7 @@
import datetime, math
def sortable_time_str(d):
- return d.strftime('%Y%m%d%H%M%S')
+ return d.strftime('%Y%m%d%H%M%S%f')
def display_time_str(d):
#return d.strftime('%H:%M:%S %m/%d/%y')
|
bca298cc9942005f3ab6c74359a52a4c410a5231
|
manage.py
|
manage.py
|
from flask.ext.script import Manager
from flask.ext.alembic import ManageMigrations
import os
from starter import app, db
from starter.users.models import user_datastore
manager = Manager(app)
manager.add_command("migrate", ManageMigrations())
@manager.command
def add_admin(email, password):
user = user_datastore.create_user(email=email, password=password)
admin_role = user_datastore.find_or_create_role("admin")
user_datastore.add_role_to_user(user, admin_role)
db.session.commit()
print "Created admin user: %s" % (user, )
@manager.command
def init(name):
print "Initializing flask-starter project with name '%s'" % (name, )
module_name = "_".join(name.split()).lower()
print "Python main module will be:", module_name
module_files = ["manage.py", "dev.py", "shell.py"]
for filename in module_files:
print "Updating module name in '%s'" % (filename, )
with open(filename) as f:
lines = [l.replace("starter", module_name) for l in f.readlines()]
with open(filename, 'w') as f:
f.writelines(lines)
print "Renaming 'starter' module to '%s'" % (module_name, )
os.rename("starter", module_name)
if __name__ == "__main__":
manager.run()
|
from flask.ext.script import Manager
from flask.ext.alembic import ManageMigrations
import os
from starter import app, db
from starter.users.models import user_datastore
manager = Manager(app)
manager.add_command("migrate", ManageMigrations())
@manager.command
def add_admin(email, password):
user = user_datastore.create_user(email=email, password=password)
admin_role = user_datastore.find_or_create_role("admin")
user_datastore.add_role_to_user(user, admin_role)
db.session.commit()
print "Created admin user: %s" % (user, )
@manager.command
def init(name):
print "Initializing flask-starter project with name '%s'" % (name, )
module_name = "_".join(name.split()).lower()
print "Python main module will be:", module_name
module_files = ["manage.py", "dev.py", "shell.py", "starter/config.py"]
for filename in module_files:
print "Updating module name in '%s'" % (filename, )
with open(filename) as f:
lines = [l.replace("starter", module_name) for l in f.readlines()]
with open(filename, 'w') as f:
f.writelines(lines)
print "Renaming 'starter' module to '%s'" % (module_name, )
os.rename("starter", module_name)
if __name__ == "__main__":
manager.run()
|
Update config file with updated project name
|
Update config file with updated project name
|
Python
|
mit
|
litnimax/flask-starter,andrewsnowden/flask-starter,wenxer/flask-starter,andrewsnowden/flask-starter,litnimax/flask-starter,litnimax/flask-starter,wenxer/flask-starter,andrewsnowden/flask-starter,wenxer/flask-starter
|
---
+++
@@ -28,7 +28,7 @@
module_name = "_".join(name.split()).lower()
print "Python main module will be:", module_name
- module_files = ["manage.py", "dev.py", "shell.py"]
+ module_files = ["manage.py", "dev.py", "shell.py", "starter/config.py"]
for filename in module_files:
print "Updating module name in '%s'" % (filename, )
|
6aec2246389934bca253a2fcd18f3ac24525c670
|
molvs/utils.py
|
molvs/utils.py
|
# -*- coding: utf-8 -*-
"""
molvs.utils
~~~~~~~~~~~
This module contains miscellaneous utility functions.
:copyright: Copyright 2014 by Matt Swain.
:license: MIT, see LICENSE file for more details.
"""
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
import functools
from itertools import izip, tee
def memoized_property(fget):
"""Decorator to create memoized properties."""
attr_name = '_{}'.format(fget.__name__)
@functools.wraps(fget)
def fget_memoized(self):
if not hasattr(self, attr_name):
setattr(self, attr_name, fget(self))
return getattr(self, attr_name)
return property(fget_memoized)
def pairwise(iterable):
"""Utility function to iterate in a pairwise fashion."""
a, b = tee(iterable)
next(b, None)
return izip(a, b)
|
# -*- coding: utf-8 -*-
"""
molvs.utils
~~~~~~~~~~~
This module contains miscellaneous utility functions.
:copyright: Copyright 2014 by Matt Swain.
:license: MIT, see LICENSE file for more details.
"""
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
import functools
from itertools import tee
try:
from itertools import izip
except ImportError:
izip = zip
def memoized_property(fget):
"""Decorator to create memoized properties."""
attr_name = '_{}'.format(fget.__name__)
@functools.wraps(fget)
def fget_memoized(self):
if not hasattr(self, attr_name):
setattr(self, attr_name, fget(self))
return getattr(self, attr_name)
return property(fget_memoized)
def pairwise(iterable):
"""Utility function to iterate in a pairwise fashion."""
a, b = tee(iterable)
next(b, None)
return izip(a, b)
|
Fix izip import for python3
|
Fix izip import for python3
|
Python
|
mit
|
mcs07/MolVS
|
---
+++
@@ -13,7 +13,12 @@
from __future__ import unicode_literals
from __future__ import division
import functools
-from itertools import izip, tee
+from itertools import tee
+
+try:
+ from itertools import izip
+except ImportError:
+ izip = zip
def memoized_property(fget):
|
d99ad3de00ec8bb9b3a36de5f50bd4f48a08cbb1
|
test/acceptance/test_cli_vital.py
|
test/acceptance/test_cli_vital.py
|
import unittest
from pathlib import Path
import subprocess
class TestVintDoNotDiedWhenLintingVital(unittest.TestCase):
def assertVintStillAlive(self, cmd):
try:
got_output = subprocess.check_output(cmd,
stderr=subprocess.STDOUT,
universal_newlines=True)
except subprocess.CalledProcessError as err:
got_output = err.output
unexpected_keyword = r'^Traceback'
self.assertNotRegex(got_output, unexpected_keyword)
def assertNotRegex(self, string, pattern):
assertNotRegexpMatches = getattr(self, 'assertNotRegexpMatches', None)
if assertNotRegexpMatches:
assertNotRegexpMatches(string, pattern)
return
super(TestVintDoNotDiedWhenLintingVital, self).assertNotRegex(string, pattern)
def test_not_died_when_linting_vital(self):
vital_dir = str(Path('test', 'fixture', 'cli', 'vital.vim'))
cmd = ['vint', vital_dir]
self.assertVintStillAlive(cmd)
if __name__ == '__main__':
unittest.main()
|
import unittest
from pathlib import Path
import subprocess
class TestVintDoNotDiedWhenLintingVital(unittest.TestCase):
def assertVintStillAlive(self, cmd):
try:
got_output = subprocess.check_output(cmd,
stderr=subprocess.STDOUT,
universal_newlines=True)
except subprocess.CalledProcessError as err:
got_output = err.output
unexpected_keyword = 'Traceback'
self.assertFalse(unexpected_keyword in got_output,
'vint was died when linting vital.vim: ' + got_output)
def test_survive_after_linting(self):
vital_dir = str(Path('test', 'fixture', 'cli', 'vital.vim'))
cmd = ['vint', vital_dir]
self.assertVintStillAlive(cmd)
if __name__ == '__main__':
unittest.main()
|
Fix false-negative test caused by using fallbacked assertNotRegex
|
Fix false-negative test caused by using fallbacked assertNotRegex
|
Python
|
mit
|
Kuniwak/vint,RianFuro/vint,Kuniwak/vint,RianFuro/vint
|
---
+++
@@ -12,21 +12,12 @@
except subprocess.CalledProcessError as err:
got_output = err.output
- unexpected_keyword = r'^Traceback'
- self.assertNotRegex(got_output, unexpected_keyword)
+ unexpected_keyword = 'Traceback'
+ self.assertFalse(unexpected_keyword in got_output,
+ 'vint was died when linting vital.vim: ' + got_output)
- def assertNotRegex(self, string, pattern):
- assertNotRegexpMatches = getattr(self, 'assertNotRegexpMatches', None)
- if assertNotRegexpMatches:
- assertNotRegexpMatches(string, pattern)
- return
-
- super(TestVintDoNotDiedWhenLintingVital, self).assertNotRegex(string, pattern)
-
-
-
- def test_not_died_when_linting_vital(self):
+ def test_survive_after_linting(self):
vital_dir = str(Path('test', 'fixture', 'cli', 'vital.vim'))
cmd = ['vint', vital_dir]
|
57fbdb0f3856b91f55ef8e230044a1f60b46f14d
|
bioshareX/serializers.py
|
bioshareX/serializers.py
|
from bioshareX.models import ShareLog, Share, Tag, ShareStats
from rest_framework import serializers
from django.contrib.auth.models import User
from django.core.urlresolvers import reverse
class UserSerializer(serializers.ModelSerializer):
class Meta:
fields=('first_name','last_name','email','username','id')
model = User
class ShareLogSerializer(serializers.ModelSerializer):
user = UserSerializer(read_only=True)
paths = serializers.JSONField()
class Meta:
model = ShareLog
class TagSerializer(serializers.ModelSerializer):
class Meta:
model = Tag
class ShareStatsSerializer(serializers.ModelSerializer):
class Meta:
model = ShareStats
fields = ('num_files','bytes','updated')
class ShareSerializer(serializers.ModelSerializer):
url = serializers.SerializerMethodField()
stats = ShareStatsSerializer(many=False,read_only=True)
tags = TagSerializer(many=True,read_only=True)
owner = UserSerializer(read_only=True)
def get_url(self,obj):
reverse('list_directory',kwargs={'share':obj.id})
class Meta:
model = Share
|
from bioshareX.models import ShareLog, Share, Tag, ShareStats
from rest_framework import serializers
from django.contrib.auth.models import User
from django.core.urlresolvers import reverse
class UserSerializer(serializers.ModelSerializer):
class Meta:
fields=('first_name','last_name','email','username','id')
model = User
class ShareLogSerializer(serializers.ModelSerializer):
user = UserSerializer(read_only=True)
paths = serializers.JSONField()
class Meta:
model = ShareLog
class TagSerializer(serializers.ModelSerializer):
class Meta:
model = Tag
class ShareStatsSerializer(serializers.ModelSerializer):
class Meta:
model = ShareStats
fields = ('num_files','bytes','updated')
class ShareSerializer(serializers.ModelSerializer):
url = serializers.SerializerMethodField()
stats = ShareStatsSerializer(many=False,read_only=True)
tags = TagSerializer(many=True,read_only=True)
owner = UserSerializer(read_only=True)
def get_url(self,obj):
return reverse('list_directory',kwargs={'share':obj.id})
class Meta:
model = Share
|
Fix url attribute on share serializer :)
|
Fix url attribute on share serializer :)
|
Python
|
mit
|
amschaal/bioshare,amschaal/bioshare,amschaal/bioshare,amschaal/bioshare,amschaal/bioshare
|
---
+++
@@ -27,7 +27,7 @@
tags = TagSerializer(many=True,read_only=True)
owner = UserSerializer(read_only=True)
def get_url(self,obj):
- reverse('list_directory',kwargs={'share':obj.id})
+ return reverse('list_directory',kwargs={'share':obj.id})
class Meta:
model = Share
|
ba722635f13350c4b1e04aeab0838c923deb1985
|
feeds/middlewares.py
|
feeds/middlewares.py
|
import logging
from scrapy.spidermiddlewares.httperror import HttpError
logger = logging.getLogger(__name__)
class FeedsHttpErrorMiddleware:
@classmethod
def from_crawler(cls, crawler):
return cls()
def process_spider_exception(self, response, exception, spider):
if isinstance(exception, HttpError):
logger.warning(
"Ignoring response %(response)r: HTTP status code is not "
"handled or not allowed",
{'response': response}, extra={'spider': spider},
)
return []
|
import logging
from scrapy.spidermiddlewares.httperror import HttpError
logger = logging.getLogger(__name__)
class FeedsHttpErrorMiddleware:
@classmethod
def from_crawler(cls, crawler):
return cls()
def process_spider_exception(self, response, exception, spider):
if isinstance(exception, HttpError):
if response.status in [500, 502, 503, 504]:
# These status codes are usually induced by overloaded sites,
# updates, short downtimes, etc. and are not that relevant.
lgr = logger.info
else:
lgr = logger.warning
lgr(
"Ignoring response %(response)r: HTTP status code is not "
"handled or not allowed",
{'response': response}, extra={'spider': spider},
)
return []
|
Use log level info for HTTP statuses 500, 502, 503, 504.
|
Use log level info for HTTP statuses 500, 502, 503, 504.
These status codes are usually induced by overloaded sites, updates, short
downtimes, etc. and are not that relevant.
|
Python
|
agpl-3.0
|
Lukas0907/feeds,Lukas0907/feeds,nblock/feeds,nblock/feeds
|
---
+++
@@ -13,7 +13,13 @@
def process_spider_exception(self, response, exception, spider):
if isinstance(exception, HttpError):
- logger.warning(
+ if response.status in [500, 502, 503, 504]:
+ # These status codes are usually induced by overloaded sites,
+ # updates, short downtimes, etc. and are not that relevant.
+ lgr = logger.info
+ else:
+ lgr = logger.warning
+ lgr(
"Ignoring response %(response)r: HTTP status code is not "
"handled or not allowed",
{'response': response}, extra={'spider': spider},
|
150dad224dd985762714b73e9a91d084efb11e06
|
ob_pipelines/sample.py
|
ob_pipelines/sample.py
|
import os
from luigi import Parameter
from ob_airtable import get_record_by_name, get_record
AIRTABLE_EXPT_TABLE = 'Genomics%20Expt'
AIRTABLE_SAMPLE_TABLE = 'Genomics%20Sample'
S3_BUCKET = os.environ.get('S3_BUCKET')
def get_samples(expt_id):
expt = get_record_by_name(expt_id, AIRTABLE_EXPT_TABLE)
sample_keys = expt['fields']['Genomics samples']
for sample_key in sample_keys:
sample = get_record(sample_key, AIRTABLE_SAMPLE_TABLE)
yield sample['fields']['Name']
class Sample(object):
sample_id = Parameter()
@property
def sample(self):
if not hasattr(self, '_sample'):
self._sample = get_record_by_name(self.sample_id, AIRTABLE_SAMPLE_TABLE)['fields']
return self._sample
@property
def sample_folder(self):
return '{expt}/{sample}'.format(
bucket=S3_BUCKET,
expt = self.experiment['Name'],
sample=self.sample_id)
@property
def experiment(self):
if not hasattr(self, '_experiment'):
expt_key = self.sample['Experiment'][0]
self._experiment = get_record(expt_key, AIRTABLE_EXPT_TABLE)['fields']
return self._experiment
|
import os
from luigi import Parameter
from ob_airtable import AirtableClient
AIRTABLE_EXPT_TABLE = 'Genomics%20Expt'
AIRTABLE_SAMPLE_TABLE = 'Genomics%20Sample'
S3_BUCKET = os.environ.get('S3_BUCKET')
client = AirtableClient()
def get_samples(expt_id):
expt = client.get_record_by_name(expt_id, AIRTABLE_EXPT_TABLE)
sample_keys = expt['fields']['Genomics samples']
for sample_key in sample_keys:
sample = client.get_record(sample_key, AIRTABLE_SAMPLE_TABLE)
yield sample['fields']['Name']
class Sample(object):
sample_id = Parameter()
@property
def sample(self):
if not hasattr(self, '_sample'):
self._sample = client.get_record_by_name(self.sample_id, AIRTABLE_SAMPLE_TABLE)['fields']
return self._sample
@property
def sample_folder(self):
return '{expt}/{sample}'.format(
bucket=S3_BUCKET,
expt = self.experiment['Name'],
sample=self.sample_id)
@property
def experiment(self):
if not hasattr(self, '_experiment'):
expt_key = self.sample['Experiment'][0]
self._experiment = client.get_record(expt_key, AIRTABLE_EXPT_TABLE)['fields']
return self._experiment
|
Update to match changes in ob-airtable
|
Update to match changes in ob-airtable
|
Python
|
apache-2.0
|
outlierbio/ob-pipelines,outlierbio/ob-pipelines,outlierbio/ob-pipelines
|
---
+++
@@ -1,20 +1,21 @@
import os
from luigi import Parameter
-from ob_airtable import get_record_by_name, get_record
+from ob_airtable import AirtableClient
AIRTABLE_EXPT_TABLE = 'Genomics%20Expt'
AIRTABLE_SAMPLE_TABLE = 'Genomics%20Sample'
S3_BUCKET = os.environ.get('S3_BUCKET')
+client = AirtableClient()
def get_samples(expt_id):
- expt = get_record_by_name(expt_id, AIRTABLE_EXPT_TABLE)
+ expt = client.get_record_by_name(expt_id, AIRTABLE_EXPT_TABLE)
sample_keys = expt['fields']['Genomics samples']
for sample_key in sample_keys:
- sample = get_record(sample_key, AIRTABLE_SAMPLE_TABLE)
+ sample = client.get_record(sample_key, AIRTABLE_SAMPLE_TABLE)
yield sample['fields']['Name']
@@ -25,7 +26,7 @@
@property
def sample(self):
if not hasattr(self, '_sample'):
- self._sample = get_record_by_name(self.sample_id, AIRTABLE_SAMPLE_TABLE)['fields']
+ self._sample = client.get_record_by_name(self.sample_id, AIRTABLE_SAMPLE_TABLE)['fields']
return self._sample
@property
@@ -39,5 +40,5 @@
def experiment(self):
if not hasattr(self, '_experiment'):
expt_key = self.sample['Experiment'][0]
- self._experiment = get_record(expt_key, AIRTABLE_EXPT_TABLE)['fields']
+ self._experiment = client.get_record(expt_key, AIRTABLE_EXPT_TABLE)['fields']
return self._experiment
|
af10bb6acd0a7f4f68d71662eca648ef51eba1c4
|
src/lib/db/redismanager.py
|
src/lib/db/redismanager.py
|
import redis
import pickle
from .idatabasemanager import IDatabaseManager
class RedisManager(IDatabaseManager):
KEY_MESSAGE_QUEUE = 'message_queue'
def __init__(self, redis_url):
self.connection = redis.Redis.from_url(redis_url)
def queue_message(self, message):
serialized_message = pickle.dumps(message)
self.connection.rpush(self.KEY_MESSAGE_QUEUE, message)
# self.connection.save()
def get_queued_message(self):
serialized_message = self.connection.lpop(self.KEY_MESSAGE_QUEUE)
print(serialized_message)
# self.connection.save()
if serialized_message != None:
return pickle.loads(serialized_message)
return None
|
import redis
import pickle
from .idatabasemanager import IDatabaseManager
class RedisManager(IDatabaseManager):
KEY_MESSAGE_QUEUE = 'message_queue'
def __init__(self, redis_url):
self.connection = redis.Redis.from_url(redis_url)
def queue_message(self, message):
serialized_message = pickle.dumps(message)
self.connection.rpush(self.KEY_MESSAGE_QUEUE, serialized_message)
def get_queued_message(self):
serialized_message = self.connection.lpop(self.KEY_MESSAGE_QUEUE)
print(serialized_message)
if serialized_message != None:
return pickle.loads(serialized_message)
return None
|
Fix object serialization and deserialization in Redis Manager module.
|
Fix object serialization and deserialization in Redis Manager module.
|
Python
|
mit
|
edonosotti/wechat-bot-skeleton-python
|
---
+++
@@ -11,13 +11,11 @@
def queue_message(self, message):
serialized_message = pickle.dumps(message)
- self.connection.rpush(self.KEY_MESSAGE_QUEUE, message)
- # self.connection.save()
+ self.connection.rpush(self.KEY_MESSAGE_QUEUE, serialized_message)
def get_queued_message(self):
serialized_message = self.connection.lpop(self.KEY_MESSAGE_QUEUE)
print(serialized_message)
- # self.connection.save()
if serialized_message != None:
return pickle.loads(serialized_message)
return None
|
fbbef3b57f115fa7adc291c80459cd04e7c4e877
|
config.py
|
config.py
|
import os
basedir = os.path.abspath(os.path.dirname(__file__))
class Config:
SECRET_KEY = 'secret'
class DevelopmentConfig(Config):
DEBUG = True
class ProductionConfig(Config):
DEBUG = False
class TestingConfig(Config):
DEBUG = True
config = {
'development': DevelopmentConfig,
'testing': TestingConfig,
'production': ProductionConfig,
}
|
import os
basedir = os.path.abspath(os.path.dirname(__file__))
class Config:
SECRET_KEY = 'secret'
class DevelopmentConfig(Config):
WTF_CSRF_ENABLED = False
DEBUG = True
class ProductionConfig(Config):
DEBUG = False
class TestingConfig(Config):
WTF_CSRF_ENABLED = False
DEBUG = True
config = {
'development': DevelopmentConfig,
'testing': TestingConfig,
'production': ProductionConfig,
}
|
Disable CSRF in testing and development mode
|
Disable CSRF in testing and development mode
|
Python
|
mit
|
JoshuaOndieki/buckylist,JoshuaOndieki/buckylist
|
---
+++
@@ -7,6 +7,7 @@
class DevelopmentConfig(Config):
+ WTF_CSRF_ENABLED = False
DEBUG = True
class ProductionConfig(Config):
@@ -14,6 +15,7 @@
class TestingConfig(Config):
+ WTF_CSRF_ENABLED = False
DEBUG = True
|
78a157d25018a9ed858d4b3b89ee565e6ae9a44c
|
backend/integrations/tasks.py
|
backend/integrations/tasks.py
|
from functools import wraps
from celery import shared_task
from django.conf import settings
from integrations import slack
def switchable_task(func):
@wraps(func)
def wrapper(*args, **kwargs):
if settings.USE_SCHEDULER:
return func.delay(*args, **kwargs)
return func(*args, **kwargs)
return wrapper
@switchable_task
@shared_task
def notify_new_submission(
title: str,
elevator_pitch: str,
submission_type: str,
admin_url,
topic: str,
duration: int,
):
slack.send_message(
[
{
"type": "section",
"text": {
"text": f"New _{submission_type}_ Submission",
"type": "mrkdwn",
},
}
],
[
{
"blocks": [
{
"type": "section",
"text": {
"type": "mrkdwn",
"text": f"*<{admin_url}|{title.capitalize()}>*\n"
f"*Elevator Pitch*\n{elevator_pitch}",
},
"fields": [
{"type": "mrkdwn", "text": "*Topic*"},
{"type": "mrkdwn", "text": "*Duration*"},
{"type": "plain_text", "text": str(duration)},
{"type": "mrkdwn", "text": topic},
],
}
]
}
],
)
|
from functools import wraps
from celery import shared_task
from django.conf import settings
from integrations import slack
def switchable_task(func):
@wraps(func)
def wrapper(*args, **kwargs):
if settings.USE_SCHEDULER:
return func.delay(*args, **kwargs)
return func(*args, **kwargs)
return wrapper
@switchable_task
@shared_task
def notify_new_submission(
title: str,
elevator_pitch: str,
submission_type: str,
admin_url,
topic: str,
duration: int,
):
slack.send_message(
[
{
"type": "section",
"text": {
"text": f"New _{submission_type}_ Submission",
"type": "mrkdwn",
},
}
],
[
{
"blocks": [
{
"type": "section",
"text": {
"type": "mrkdwn",
"text": f"*<{admin_url}|{title.capitalize()}>*\n"
f"*Elevator Pitch*\n{elevator_pitch}",
},
"fields": [
{"type": "mrkdwn", "text": "*Topic*"},
{"type": "mrkdwn", "text": "*Duration*"},
{"type": "plain_text", "text": str(duration)},
{"type": "mrkdwn", "text": str(topic)},
],
}
]
}
],
)
|
Fix slack 'invalid attachments' error message
|
Fix slack 'invalid attachments' error message
|
Python
|
mit
|
patrick91/pycon,patrick91/pycon
|
---
+++
@@ -49,7 +49,7 @@
{"type": "mrkdwn", "text": "*Topic*"},
{"type": "mrkdwn", "text": "*Duration*"},
{"type": "plain_text", "text": str(duration)},
- {"type": "mrkdwn", "text": topic},
+ {"type": "mrkdwn", "text": str(topic)},
],
}
]
|
c973068ada6fa5039a289719c852f06fe786c8fa
|
bucketeer/test/test_commit.py
|
bucketeer/test/test_commit.py
|
import unittest
import boto
from bucketeer import commit
class BuckeeterTest(unittest.TestCase):
global existing_bucket
existing_bucket = 'bucket.exists'
def setUp(self):
# Create a bucket with one file
connection = boto.connect_s3()
bucket = connection.create_bucket(existing_bucket)
return
def tearDown(self):
# Remove all test-created buckets and files
connection = boto.connect_s3()
bucket = connection.delete_bucket(existing_bucket)
return
def testMain(self):
self.assertTrue(commit)
if __name__ == '__main__':
unittest.main()
|
import unittest, boto, os
from bucketeer import commit
class BuckeeterTest(unittest.TestCase):
global existing_bucket, test_dir
existing_bucket = 'bucket.exists'
test_dir = 'bucketeer_test_dir'
def setUp(self):
# Create a bucket to test on existing bucket
connection = boto.connect_s3()
bucket = connection.create_bucket(existing_bucket)
# Create directory to house test files
os.makedirs(test_dir)
return
def tearDown(self):
# Remove bucket created to test on existing bucket
connection = boto.connect_s3()
bucket = connection.delete_bucket(existing_bucket)
# Remove directory created to house test files
os.rmdir(test_dir)
return
def testMain(self):
self.assertTrue(commit)
if __name__ == '__main__':
unittest.main()
|
Add create test file dir to setUp and tearDown
|
Add create test file dir to setUp and tearDown
May need to revisit and identify best place to put such a directory.
|
Python
|
mit
|
mgarbacz/bucketeer
|
---
+++
@@ -1,23 +1,30 @@
-import unittest
-import boto
+import unittest, boto, os
from bucketeer import commit
class BuckeeterTest(unittest.TestCase):
- global existing_bucket
+ global existing_bucket, test_dir
existing_bucket = 'bucket.exists'
+ test_dir = 'bucketeer_test_dir'
def setUp(self):
- # Create a bucket with one file
+ # Create a bucket to test on existing bucket
connection = boto.connect_s3()
bucket = connection.create_bucket(existing_bucket)
+
+ # Create directory to house test files
+ os.makedirs(test_dir)
return
def tearDown(self):
- # Remove all test-created buckets and files
+ # Remove bucket created to test on existing bucket
connection = boto.connect_s3()
bucket = connection.delete_bucket(existing_bucket)
+
+ # Remove directory created to house test files
+ os.rmdir(test_dir)
+
return
def testMain(self):
|
ede7a27ca8862bdd1b9b0b7a113b80d055492ae1
|
debexpo/config/__init__.py
|
debexpo/config/__init__.py
|
import os.path
import pylons
from paste.deploy import appconfig
def easy_app_init(ini_path):
ini_path = os.path.abspath(ini_path)
assert os.path.exists(ini_path)
# Initialize Pylons app
conf = appconfig('config:' + ini_path)
import debexpo.config.environment
pylons.config = debexpo.config.environment.load_environment(conf.global_conf, conf.local_conf)
|
Add a simple app initialization function since paster shell is busted
|
Add a simple app initialization function since paster shell is busted
|
Python
|
mit
|
jonnylamb/debexpo,jadonk/debexpo,jonnylamb/debexpo,jonnylamb/debexpo,swvist/Debexpo,jadonk/debexpo,swvist/Debexpo,swvist/Debexpo,jadonk/debexpo
|
---
+++
@@ -0,0 +1,14 @@
+import os.path
+import pylons
+from paste.deploy import appconfig
+
+
+def easy_app_init(ini_path):
+ ini_path = os.path.abspath(ini_path)
+ assert os.path.exists(ini_path)
+
+ # Initialize Pylons app
+ conf = appconfig('config:' + ini_path)
+ import debexpo.config.environment
+ pylons.config = debexpo.config.environment.load_environment(conf.global_conf, conf.local_conf)
+
|
|
d2ac548441523e2ed4d0ac824e5972ae48be3b19
|
packages/Python/lldbsuite/test/lang/swift/closure_shortcuts/TestClosureShortcuts.py
|
packages/Python/lldbsuite/test/lang/swift/closure_shortcuts/TestClosureShortcuts.py
|
# TestAccelerateSIMD.py
#
# This source file is part of the Swift.org open source project
#
# Copyright (c) 2014 - 2018 Apple Inc. and the Swift project authors
# Licensed under Apache License v2.0 with Runtime Library Exception
#
# See https://swift.org/LICENSE.txt for license information
# See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors
#
# ------------------------------------------------------------------------------
import lldbsuite.test.lldbinline as lldbinline
from lldbsuite.test.decorators import *
lldbinline.MakeInlineTest(__file__, globals(),
decorators=[swiftTest,skipUnlessDarwin])
|
# TestClosureShortcuts.py
#
# This source file is part of the Swift.org open source project
#
# Copyright (c) 2014 - 2018 Apple Inc. and the Swift project authors
# Licensed under Apache License v2.0 with Runtime Library Exception
#
# See https://swift.org/LICENSE.txt for license information
# See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors
#
# ------------------------------------------------------------------------------
import lldbsuite.test.lldbinline as lldbinline
from lldbsuite.test.decorators import *
lldbinline.MakeInlineTest(__file__, globals(),
decorators=[swiftTest])
|
Fix typo and run everywhere.
|
Fix typo and run everywhere.
|
Python
|
apache-2.0
|
apple/swift-lldb,apple/swift-lldb,apple/swift-lldb,apple/swift-lldb,apple/swift-lldb,apple/swift-lldb
|
---
+++
@@ -1,4 +1,4 @@
-# TestAccelerateSIMD.py
+# TestClosureShortcuts.py
#
# This source file is part of the Swift.org open source project
#
@@ -13,4 +13,4 @@
from lldbsuite.test.decorators import *
lldbinline.MakeInlineTest(__file__, globals(),
- decorators=[swiftTest,skipUnlessDarwin])
+ decorators=[swiftTest])
|
0dcc2a5865ed31618f63e9b152501cf8fbc201ac
|
doorman/main.py
|
doorman/main.py
|
import argparse
import os
from doorman import Doorman
parser = argparse.ArgumentParser(description='Doorman keeps your secret things')
parser.add_argument('-s', '--secret', action="store_true", dest="status", help='Hide all secret things')
parser.add_argument('-u', '--unsecret', action="store_false", dest="status", help='Unhide all secret things')
parser.add_argument('-c', '--config', action="store", dest="config_file", type=file, help='Config file')
args = parser.parse_args()
def main():
doorman = Doorman(args.status, os.path.abspath(args.config_file.name))
doorman.run()
if __name__ == "__main__":
main()
|
import argparse
import os
from doorman import Doorman
DEFAULT_CONFIG_PATH = os.path.join(os.path.expanduser("~"), ".doormanrc")
DEFAULT_CONFIG = """[secrets]
test_secret =
[files]
test_secret =
"""
if not os.path.exists(DEFAULT_CONFIG_PATH):
with open(DEFAULT_CONFIG_PATH, "w") as f:
f.write(DEFAULT_CONFIG)
parser = argparse.ArgumentParser(description='Doorman keeps your secret things')
parser.add_argument('-s', '--secret', action="store_true", dest="status", help='Hide all secret things')
parser.add_argument('-u', '--unsecret', action="store_false", dest="status", help='Unhide all secret things')
parser.add_argument('-c', '--config', action="store", dest="config_file",
default=DEFAULT_CONFIG_PATH, type=file, help='Config file')
args = parser.parse_args()
def main():
"""
Main function
"""
if args.config_file.name is DEFAULT_CONFIG_PATH:
parser.print_help()
else:
doorman = Doorman(args.status, os.path.abspath(args.config_file.name))
doorman.run()
if __name__ == "__main__":
main()
|
Add default parameter and default config
|
Add default parameter and default config
|
Python
|
mit
|
halitalptekin/doorman
|
---
+++
@@ -2,17 +2,36 @@
import os
from doorman import Doorman
+DEFAULT_CONFIG_PATH = os.path.join(os.path.expanduser("~"), ".doormanrc")
+DEFAULT_CONFIG = """[secrets]
+test_secret =
+
+[files]
+test_secret =
+"""
+
+if not os.path.exists(DEFAULT_CONFIG_PATH):
+ with open(DEFAULT_CONFIG_PATH, "w") as f:
+ f.write(DEFAULT_CONFIG)
+
parser = argparse.ArgumentParser(description='Doorman keeps your secret things')
parser.add_argument('-s', '--secret', action="store_true", dest="status", help='Hide all secret things')
parser.add_argument('-u', '--unsecret', action="store_false", dest="status", help='Unhide all secret things')
-parser.add_argument('-c', '--config', action="store", dest="config_file", type=file, help='Config file')
-
+parser.add_argument('-c', '--config', action="store", dest="config_file",
+ default=DEFAULT_CONFIG_PATH, type=file, help='Config file')
args = parser.parse_args()
def main():
- doorman = Doorman(args.status, os.path.abspath(args.config_file.name))
- doorman.run()
+ """
+ Main function
+ """
+ if args.config_file.name is DEFAULT_CONFIG_PATH:
+ parser.print_help()
+ else:
+ doorman = Doorman(args.status, os.path.abspath(args.config_file.name))
+ doorman.run()
+
if __name__ == "__main__":
main()
|
a8ac03e3a556230e9318daee1dd80f77502d76c2
|
examples/threads.py
|
examples/threads.py
|
import guv
guv.monkey_patch()
from guv import gyield, sleep
import threading
import greenlet
greenlet_ids = {}
def debug(i):
print('{} greenlet_ids: {}'.format(i, greenlet_ids))
def f():
greenlet_ids[1] = greenlet.getcurrent()
debug(2)
print('t: 1')
gyield()
print('t: 2')
gyield()
print('t: 3')
def main():
greenlet_ids[0] = greenlet.getcurrent()
debug(1)
t = threading.Thread(target=f)
t.start()
debug(3)
print('m: 1')
gyield()
print('m: 2')
gyield()
print('m: 3')
if __name__ == '__main__':
main()
|
import guv
guv.monkey_patch()
from guv import gyield, patcher
import threading
import greenlet
threading_orig = patcher.original('threading')
greenlet_ids = {}
def check_thread():
current = threading_orig.current_thread()
assert type(current) is threading_orig._MainThread
def debug(i):
print('{} greenlet_ids: {}'.format(i, greenlet_ids))
def f():
check_thread()
greenlet_ids[1] = greenlet.getcurrent()
debug(2)
print('t: 1')
gyield()
print('t: 2')
gyield()
print('t: 3')
def main():
check_thread()
greenlet_ids[0] = greenlet.getcurrent()
debug(1)
t = threading.Thread(target=f)
t.start()
debug(3)
print('m: 1')
gyield()
print('m: 2')
gyield()
print('m: 3')
if __name__ == '__main__':
main()
|
Add check to ensure that we're in the same OS thread
|
Add check to ensure that we're in the same OS thread
|
Python
|
mit
|
veegee/guv,veegee/guv
|
---
+++
@@ -2,11 +2,18 @@
guv.monkey_patch()
-from guv import gyield, sleep
+from guv import gyield, patcher
import threading
import greenlet
+threading_orig = patcher.original('threading')
+
greenlet_ids = {}
+
+
+def check_thread():
+ current = threading_orig.current_thread()
+ assert type(current) is threading_orig._MainThread
def debug(i):
@@ -14,6 +21,7 @@
def f():
+ check_thread()
greenlet_ids[1] = greenlet.getcurrent()
debug(2)
@@ -25,6 +33,7 @@
def main():
+ check_thread()
greenlet_ids[0] = greenlet.getcurrent()
debug(1)
|
7fb46bc6fc2c5783569f869bf4855d1ed3709ccb
|
elmo/moon_tracker/forms.py
|
elmo/moon_tracker/forms.py
|
from django import forms
class BatchMoonScanForm(forms.Form):
data = forms.CharField(
widget=forms.Textarea(attrs={'class':'form-control monospace'}),
)
|
from django import forms
import csv
from io import StringIO
class BatchMoonScanForm(forms.Form):
data = forms.CharField(
widget=forms.Textarea(attrs={'class':'form-control monospace'}),
)
def clean(self):
cleaned_data = super(BatchMoonScanForm, self).clean()
raw = StringIO(cleaned_data['data'])
reader = csv.reader(raw, delimiter='\t')
next(reader)
res = []
for x in reader:
print(x)
if len(x) == 1:
assert(len(x[0]) > 0)
current_moon = 0
current_scan = {}
res.append(current_scan)
else:
assert(len(x[0]) == 0)
moon_id = int(x[6])
ore_id = int(x[3])
percentage = int(round(100 * float(x[2])))
if current_moon == 0:
current_moon = moon_id
else:
assert(moon_id == current_moon)
assert(ore_id not in current_scan)
current_scan[ore_id] = percentage
print(res)
cleaned_data['data'] = res
|
Add clean method for batch submission form.
|
Add clean method for batch submission form.
|
Python
|
mit
|
StephenSwat/eve_lunar_mining_organiser,StephenSwat/eve_lunar_mining_organiser
|
---
+++
@@ -1,6 +1,45 @@
from django import forms
+
+import csv
+from io import StringIO
class BatchMoonScanForm(forms.Form):
data = forms.CharField(
widget=forms.Textarea(attrs={'class':'form-control monospace'}),
)
+
+ def clean(self):
+ cleaned_data = super(BatchMoonScanForm, self).clean()
+ raw = StringIO(cleaned_data['data'])
+ reader = csv.reader(raw, delimiter='\t')
+
+ next(reader)
+
+ res = []
+
+ for x in reader:
+ print(x)
+ if len(x) == 1:
+ assert(len(x[0]) > 0)
+
+ current_moon = 0
+ current_scan = {}
+ res.append(current_scan)
+ else:
+ assert(len(x[0]) == 0)
+
+ moon_id = int(x[6])
+ ore_id = int(x[3])
+ percentage = int(round(100 * float(x[2])))
+
+ if current_moon == 0:
+ current_moon = moon_id
+ else:
+ assert(moon_id == current_moon)
+
+ assert(ore_id not in current_scan)
+
+ current_scan[ore_id] = percentage
+
+ print(res)
+ cleaned_data['data'] = res
|
f4d7f7207cff82c38d6973dbef717bfc50345b32
|
models.py
|
models.py
|
from django.db import models
class FandomHierarchy(models.Model):
name = models.CharField(max_length=100)
parent = models.ForeignKey('self')
class Image(models.Model):
pixel_width = models.IntegerField()
pixel_height = models.IntegerField()
name = models.CharField(max_length=100)
fandoms = models.ManyToManyField(FandomHierarchy)
image = models.ImageField(upload_to='source_images')
class Media(models.Model):
visible_width = models.FloatField()
visible_height = models.FloatField()
cost_cents = models.IntegerField()
price_cents = models.IntegerField()
weight_oz = models.FloatField()
exterior_width = models.FloatField()
exterior_height = models.FloatField()
exterior_depth = models.FloatField()
stock_amount = models.IntegerField()
|
from django.db import models
class FandomHierarchy(models.Model):
name = models.CharField(max_length=100)
parent = models.ForeignKey('self')
def __unicode__(self):
return "Fandom tree node %s" % self.name
class Image(models.Model):
pixel_width = models.IntegerField()
pixel_height = models.IntegerField()
name = models.CharField(max_length=100)
fandoms = models.ManyToManyField(FandomHierarchy)
image = models.ImageField(upload_to='source_images')
def __unicode__(self):
return "Image instance %s (%dx%d)" % (self.name, self.pixel_width, self.pixel_height)
class Media(models.Model):
name = models.CharField(max_length=100)
visible_width = models.FloatField()
visible_height = models.FloatField()
cost_cents = models.IntegerField()
price_cents = models.IntegerField()
weight_oz = models.FloatField()
exterior_width = models.FloatField()
exterior_height = models.FloatField()
exterior_depth = models.FloatField()
stock_amount = models.IntegerField()
def __unicode__(self):
return "Media instance %s, %d in stock" % (self.name, self.stock_amount)
|
Add __unicode__ methods to model
|
Add __unicode__ methods to model
|
Python
|
bsd-3-clause
|
willmurnane/store
|
---
+++
@@ -3,6 +3,8 @@
class FandomHierarchy(models.Model):
name = models.CharField(max_length=100)
parent = models.ForeignKey('self')
+ def __unicode__(self):
+ return "Fandom tree node %s" % self.name
class Image(models.Model):
pixel_width = models.IntegerField()
@@ -10,8 +12,11 @@
name = models.CharField(max_length=100)
fandoms = models.ManyToManyField(FandomHierarchy)
image = models.ImageField(upload_to='source_images')
+ def __unicode__(self):
+ return "Image instance %s (%dx%d)" % (self.name, self.pixel_width, self.pixel_height)
class Media(models.Model):
+ name = models.CharField(max_length=100)
visible_width = models.FloatField()
visible_height = models.FloatField()
cost_cents = models.IntegerField()
@@ -21,3 +26,5 @@
exterior_height = models.FloatField()
exterior_depth = models.FloatField()
stock_amount = models.IntegerField()
+ def __unicode__(self):
+ return "Media instance %s, %d in stock" % (self.name, self.stock_amount)
|
ce291ba622ae27e0bdb448fee26b37c9af4ffeb0
|
example/urls.py
|
example/urls.py
|
from django.conf.urls.defaults import patterns, include, url
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
url(r'^admin/', include(admin.site.urls)),
url(r'^comments/', include('fluent_comments.urls')),
url(r'^articles/', include('article.urls')),
url(r'^$', 'django.views.generic.simple.redirect_to', {'url': 'articles/', 'permanent': False}),
)
|
from django.conf.urls.defaults import patterns, include, url
from django.contrib import admin
from django.views.generic import RedirectView
admin.autodiscover()
urlpatterns = patterns('',
url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
url(r'^admin/', include(admin.site.urls)),
url(r'^comments/', include('fluent_comments.urls')),
url(r'^articles/', include('article.urls')),
url(r'^$', RedirectView.as_view(url='articles/', permanent=False)),
)
|
Fix running the example project with Django 1.5
|
Fix running the example project with Django 1.5
|
Python
|
apache-2.0
|
BangorUniversity/django-fluent-comments,PetrDlouhy/django-fluent-comments,mgpyh/django-fluent-comments,Afnarel/django-fluent-comments,django-fluent/django-fluent-comments,BangorUniversity/django-fluent-comments,PetrDlouhy/django-fluent-comments,BangorUniversity/django-fluent-comments,Afnarel/django-fluent-comments,django-fluent/django-fluent-comments,mgpyh/django-fluent-comments,PetrDlouhy/django-fluent-comments,django-fluent/django-fluent-comments,akszydelko/django-fluent-comments,akszydelko/django-fluent-comments,edoburu/django-fluent-comments,akszydelko/django-fluent-comments,Afnarel/django-fluent-comments,edoburu/django-fluent-comments,django-fluent/django-fluent-comments,edoburu/django-fluent-comments,mgpyh/django-fluent-comments
|
---
+++
@@ -1,5 +1,6 @@
from django.conf.urls.defaults import patterns, include, url
from django.contrib import admin
+from django.views.generic import RedirectView
admin.autodiscover()
urlpatterns = patterns('',
@@ -9,5 +10,5 @@
url(r'^comments/', include('fluent_comments.urls')),
url(r'^articles/', include('article.urls')),
- url(r'^$', 'django.views.generic.simple.redirect_to', {'url': 'articles/', 'permanent': False}),
+ url(r'^$', RedirectView.as_view(url='articles/', permanent=False)),
)
|
71cc08178729c81d1af75e3ad5cf637f658e68b1
|
comet/plugins/eventprinter.py
|
comet/plugins/eventprinter.py
|
# Comet VOEvent Broker.
# Example event handler: print an event.
import lxml.etree as ElementTree
from zope.interface import implementer
from twisted.plugin import IPlugin
from comet.icomet import IHandler
# Event handlers must implement IPlugin and IHandler.
@implementer(IPlugin, IHandler)
class EventPrinter(object):
# Simple example of an event handler plugin. This simply prints the
# received event to standard output.
# The name attribute enables the user to specify plugins they want on the
# command line.
name = "print-event"
# When the handler is called, it is passed an instance of
# comet.utility.xml.xml_document.
def __call__(self, event):
"""
Print an event to standard output.
"""
print(ElementTree.tostring(event.element))
# This instance of the handler is what actually constitutes our plugin.
print_event = EventPrinter()
|
# Comet VOEvent Broker.
# Example event handler: print an event.
import lxml.etree as ElementTree
from zope.interface import implementer
from twisted.plugin import IPlugin
from comet.icomet import IHandler
# Event handlers must implement IPlugin and IHandler.
@implementer(IPlugin, IHandler)
class EventPrinter(object):
# Simple example of an event handler plugin. This simply prints the
# received event to standard output.
# The name attribute enables the user to specify plugins they want on the
# command line.
name = "print-event"
# When the handler is called, it is passed an instance of
# comet.utility.xml.xml_document.
def __call__(self, event):
"""
Print an event to standard output.
"""
print(ElementTree.tounicode(event.element))
# This instance of the handler is what actually constitutes our plugin.
print_event = EventPrinter()
|
Convert to unicode for printing.
|
Convert to unicode for printing.
|
Python
|
bsd-2-clause
|
jdswinbank/Comet,jdswinbank/Comet
|
---
+++
@@ -22,7 +22,7 @@
"""
Print an event to standard output.
"""
- print(ElementTree.tostring(event.element))
+ print(ElementTree.tounicode(event.element))
# This instance of the handler is what actually constitutes our plugin.
print_event = EventPrinter()
|
fc2d03b5ec8de233b61994b26d27214ada719d33
|
humanize/__init__.py
|
humanize/__init__.py
|
VERSION = (0,5,1)
from humanize.time import *
from humanize.number import *
from humanize.filesize import *
from humanize.i18n import activate, deactivate
__all__ = ['VERSION', 'naturalday', 'naturaltime', 'ordinal', 'intword',
'naturaldelta', 'intcomma', 'apnumber', 'fractional', 'naturalsize',
'activate', 'deactivate', 'naturaldate']
|
__version__ = VERSION = (0, 5, 1)
from humanize.time import *
from humanize.number import *
from humanize.filesize import *
from humanize.i18n import activate, deactivate
__all__ = ['__version__', 'VERSION', 'naturalday', 'naturaltime', 'ordinal', 'intword',
'naturaldelta', 'intcomma', 'apnumber', 'fractional', 'naturalsize',
'activate', 'deactivate', 'naturaldate']
|
Add common __version__, same as VERSION
|
Add common __version__, same as VERSION
|
Python
|
mit
|
jmoiron/humanize,jmoiron/humanize
|
---
+++
@@ -1,10 +1,10 @@
-VERSION = (0,5,1)
+__version__ = VERSION = (0, 5, 1)
from humanize.time import *
from humanize.number import *
from humanize.filesize import *
from humanize.i18n import activate, deactivate
-__all__ = ['VERSION', 'naturalday', 'naturaltime', 'ordinal', 'intword',
+__all__ = ['__version__', 'VERSION', 'naturalday', 'naturaltime', 'ordinal', 'intword',
'naturaldelta', 'intcomma', 'apnumber', 'fractional', 'naturalsize',
'activate', 'deactivate', 'naturaldate']
|
877a8fc9989644312b18c5eeeb6552f84350c182
|
timed/redmine/admin.py
|
timed/redmine/admin.py
|
from django.contrib import admin
from timed.projects.admin import ProjectAdmin
from timed.projects.models import Project
from timed_adfinis.redmine.models import RedmineProject
admin.site.unregister(Project)
class RedmineProjectInline(admin.StackedInline):
model = RedmineProject
@admin.register(Project)
class ProjectAdmin(ProjectAdmin):
"""Adfinis specific project including Redmine issue configuration."""
inlines = ProjectAdmin.inlines + [RedmineProjectInline, ]
|
from django.contrib import admin
from timed.projects.admin import ProjectAdmin
from timed.projects.models import Project
from timed_adfinis.redmine.models import RedmineProject
from timed_adfinis.subscription.admin import SubscriptionProjectInline
admin.site.unregister(Project)
class RedmineProjectInline(admin.StackedInline):
model = RedmineProject
@admin.register(Project)
class ProjectAdmin(ProjectAdmin):
"""Adfinis specific project including Redmine issue configuration."""
inlines = ProjectAdmin.inlines + [
RedmineProjectInline, SubscriptionProjectInline
]
|
Add support subscriptions for parity with SSA portal
|
Add support subscriptions for parity with SSA portal
These includes:
* customer password
* subscription and packages
* orders
* import from timescout
|
Python
|
agpl-3.0
|
adfinis-sygroup/timed-backend,adfinis-sygroup/timed-backend,adfinis-sygroup/timed-backend
|
---
+++
@@ -3,6 +3,7 @@
from timed.projects.admin import ProjectAdmin
from timed.projects.models import Project
from timed_adfinis.redmine.models import RedmineProject
+from timed_adfinis.subscription.admin import SubscriptionProjectInline
admin.site.unregister(Project)
@@ -15,4 +16,6 @@
class ProjectAdmin(ProjectAdmin):
"""Adfinis specific project including Redmine issue configuration."""
- inlines = ProjectAdmin.inlines + [RedmineProjectInline, ]
+ inlines = ProjectAdmin.inlines + [
+ RedmineProjectInline, SubscriptionProjectInline
+ ]
|
59fef68bee92c45438a87336c92bce031de21139
|
tests/test_utils.py
|
tests/test_utils.py
|
from datetime import timedelta
from jose import utils
class TestUtils:
def test_total_seconds(self):
td = timedelta(seconds=5)
assert utils.timedelta_total_seconds(td) == 5
def test_long_to_base64(self):
assert utils.long_to_base64(0xDEADBEEF) == b'3q2-7w'
|
from datetime import timedelta
from jose import utils
class TestUtils:
def test_total_seconds(self):
td = timedelta(seconds=5)
assert utils.timedelta_total_seconds(td) == 5
def test_long_to_base64(self):
assert utils.long_to_base64(0xDEADBEEF) == b'3q2-7w'
assert utils.long_to_base64(0xCAFED00D, size=10) == b'AAAAAAAAyv7QDQ'
|
Add test for size parameter of long_to_base64.
|
Add test for size parameter of long_to_base64.
|
Python
|
mit
|
mpdavis/python-jose
|
---
+++
@@ -13,3 +13,4 @@
def test_long_to_base64(self):
assert utils.long_to_base64(0xDEADBEEF) == b'3q2-7w'
+ assert utils.long_to_base64(0xCAFED00D, size=10) == b'AAAAAAAAyv7QDQ'
|
554cedb2113f57799f0c62b42d6ff1b317c6100a
|
ingestors/support/image.py
|
ingestors/support/image.py
|
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
from PIL import Image
from PIL.Image import DecompressionBombWarning
from ingestors.exc import ProcessingException
class ImageSupport(object):
"""Provides helpers for image extraction."""
def parse_image(self, data):
"""Parse an image file into PIL."""
try:
image = Image.open(StringIO(data))
image.load()
return image
except DecompressionBombWarning as dce:
raise ProcessingException("Image too large: %r" % dce)
except IOError as ioe:
raise ProcessingException("Unknown image format: %r" % ioe)
except (RuntimeError, SyntaxError) as err:
raise ProcessingException("Failed to load image: %r" % err)
|
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
from PIL import Image
from PIL.Image import DecompressionBombError as DBE
from PIL.Image import DecompressionBombWarning as DBW
from ingestors.exc import ProcessingException
class ImageSupport(object):
"""Provides helpers for image extraction."""
def parse_image(self, data):
"""Parse an image file into PIL."""
try:
image = Image.open(StringIO(data))
image.load()
return image
except (DBE, DBW) as dce:
raise ProcessingException("Image too large: %r" % dce)
except IOError as ioe:
raise ProcessingException("Unknown image format: %r" % ioe)
except (RuntimeError, SyntaxError) as err:
raise ProcessingException("Failed to load image: %r" % err)
|
Handle decompression bomb errors as well as warnings.
|
Handle decompression bomb errors as well as warnings.
|
Python
|
mit
|
alephdata/ingestors
|
---
+++
@@ -3,7 +3,8 @@
except ImportError:
from StringIO import StringIO
from PIL import Image
-from PIL.Image import DecompressionBombWarning
+from PIL.Image import DecompressionBombError as DBE
+from PIL.Image import DecompressionBombWarning as DBW
from ingestors.exc import ProcessingException
@@ -17,7 +18,7 @@
image = Image.open(StringIO(data))
image.load()
return image
- except DecompressionBombWarning as dce:
+ except (DBE, DBW) as dce:
raise ProcessingException("Image too large: %r" % dce)
except IOError as ioe:
raise ProcessingException("Unknown image format: %r" % ioe)
|
4117b767f48678542797d811cc1a8ea75f37c714
|
saleor/account/migrations/0040_auto_20200415_0443.py
|
saleor/account/migrations/0040_auto_20200415_0443.py
|
# Generated by Django 3.0.5 on 2020-04-15 09:43
from django.db import migrations
def change_extension_permission_to_plugin_permission(apps, schema_editor):
permission = apps.get_model("auth", "Permission")
users = apps.get_model("account", "User")
plugin_permission = permission.objects.filter(
codename="manage_plugins", content_type__app_label="plugins"
).first()
extension_permission = permission.objects.filter(
codename="manage_plugins", content_type__app_label="extensions"
).first()
users = users.objects.filter(
user_permissions__content_type__app_label="extensions",
user_permissions__codename="manage_plugins",
)
if not plugin_permission or not extension_permission:
return
for user in users:
user.user_permissions.remove(extension_permission)
user.user_permissions.add(plugin_permission)
class Migration(migrations.Migration):
dependencies = [
("account", "0039_auto_20200221_0257"),
("plugins", "0001_initial"),
]
operations = [
migrations.RunPython(change_extension_permission_to_plugin_permission),
]
|
# Generated by Django 3.0.5 on 2020-04-15 09:43
from django.db import migrations
def change_extension_permission_to_plugin_permission(apps, schema_editor):
permission = apps.get_model("auth", "Permission")
users = apps.get_model("account", "User")
plugin_permission = permission.objects.filter(
codename="manage_plugins", content_type__app_label="plugins"
).first()
extension_permission = permission.objects.filter(
codename="manage_plugins", content_type__app_label="extensions"
).first()
users = users.objects.filter(
user_permissions__content_type__app_label="extensions",
user_permissions__codename="manage_plugins",
)
if not plugin_permission or not extension_permission:
return
for user in users:
user.user_permissions.remove(extension_permission)
user.user_permissions.add(plugin_permission)
if extension_permission:
extension_permission.delete()
class Migration(migrations.Migration):
dependencies = [
("account", "0039_auto_20200221_0257"),
("plugins", "0001_initial"),
]
operations = [
migrations.RunPython(change_extension_permission_to_plugin_permission),
]
|
Remove unused permission from db
|
Remove unused permission from db
|
Python
|
bsd-3-clause
|
mociepka/saleor,mociepka/saleor,mociepka/saleor
|
---
+++
@@ -25,6 +25,9 @@
user.user_permissions.remove(extension_permission)
user.user_permissions.add(plugin_permission)
+ if extension_permission:
+ extension_permission.delete()
+
class Migration(migrations.Migration):
|
bf97ea6e188175cb452fd1534356fc205d1434f7
|
hiscore/__init__.py
|
hiscore/__init__.py
|
__author__ = 'Abraham Othman'
__copyright__ = 'Copyright 2014, Abraham Othman'
__version__ = '1.0'
__maintainer__ = 'Abraham Othman'
__email__ = 'aothman@cs.cmu.edu'
from .engine import create, HiScoreEngine, Point
from .errors import MonotoneError, MonotoneBoundsError, ScoreCreationError
|
__author__ = 'Abraham Othman'
__copyright__ = 'Copyright 2014, Abraham Othman'
__version__ = '1.0.0'
__maintainer__ = 'Abraham Othman'
__email__ = 'aothman@cs.cmu.edu'
from .engine import create, HiScoreEngine, Point
from .errors import MonotoneError, MonotoneBoundsError, ScoreCreationError
|
Change versioning to be PEP compatible
|
Change versioning to be PEP compatible
|
Python
|
bsd-3-clause
|
aothman/hiscore
|
---
+++
@@ -1,7 +1,7 @@
__author__ = 'Abraham Othman'
__copyright__ = 'Copyright 2014, Abraham Othman'
-__version__ = '1.0'
+__version__ = '1.0.0'
__maintainer__ = 'Abraham Othman'
__email__ = 'aothman@cs.cmu.edu'
|
080101d59490ca5f5b0b1208a9a11663cdfaf7a7
|
results/views.py
|
results/views.py
|
# Create your views here.
|
# Create your views here.
from django.shortcuts import render_to_response
from django.template import RequestContext
from libs.parser import Parser
import os, settings
from plots.models import Md5Log, BenchmarkLogs, MachineInfo, RtAverage, RtMoss, RtBldg391, RtM35, RtSphflake, RtWorld, RtStar
def show_result(request, filename):
"""
"""
# Parse the file
file = settings.MEDIA_ROOT + filename + '.log'
parser_obj = Parser(file)
parser_obj.run()
data_dict = {}
#Query the database for Benchmark data from benchmark_logs table
file_obj = Md5Log.objects.filter(file_name=filename+'.log')[0]
data_dict['BRLCAD-Version'] = file_obj.benchmark.brlcad_version
data_dict['Running-Time'] = file_obj.benchmark.running_time
data_dict['Time-of-Execution'] = file_obj.benchmark.time_of_execution
data_dict['VGR-Rating'] = file_obj.benchmark.approx_vgr
data_dict['Log-VGR'] = file_obj.benchmark.log_vgr
data_dict['Parameters'] = file_obj.benchmark.params
#Query the database for System Information from machine_info table
data_dict['Clock-Speed'] = file_obj.benchmark.machineinfo.cpu_mhz
data_dict['NCores'] = file_obj.benchmark.machineinfo.cores
data_dict['NProcessors'] = file_obj.benchmark.machineinfo.processors
data_dict['Vendor-ID'] = file_obj.benchmark.machineinfo.vendor_id
data_dict['OS-Type'] = file_obj.benchmark.machineinfo.ostype
data_dict['Processor-Model-Name'] = file_obj.benchmark.machineinfo.model_name
#Query the database for individual Image Performance
data_dict['Rt-Average'] = file_obj.benchmark.rtaverage_set.all()[0].abs_rps
data_dict['Rt-Bldg391'] = file_obj.benchmark.rtbldg391_set.all()[0].abs_rps
data_dict['Rt-M35'] = file_obj.benchmark.rtm35_set.all()[0].abs_rps
data_dict['Rt-Moss'] = file_obj.benchmark.rtmoss_set.all()[0].abs_rps
data_dict['Rt-Sphlake'] = file_obj.benchmark.rtbldg391_set.all()[0].abs_rps
data_dict['Rt-Star'] = file_obj.benchmark.rtstar_set.all()[0].abs_rps
data_dict['Rt-World'] = file_obj.benchmark.rtworld_set.all()[0].abs_rps
return render_to_response('result.html', data_dict, context_instance=RequestContext(request))
|
Add show_result view that extracts data from the database for the file, and sends it on to the result.html template
|
Add show_result view that extracts data from the database for the file, and sends it on to the result.html template
|
Python
|
bsd-2-clause
|
ankeshanand/benchmark,ankeshanand/benchmark,ankeshanand/benchmark,ankeshanand/benchmark
|
---
+++
@@ -1 +1,46 @@
# Create your views here.
+from django.shortcuts import render_to_response
+from django.template import RequestContext
+from libs.parser import Parser
+import os, settings
+from plots.models import Md5Log, BenchmarkLogs, MachineInfo, RtAverage, RtMoss, RtBldg391, RtM35, RtSphflake, RtWorld, RtStar
+
+def show_result(request, filename):
+ """
+ """
+ # Parse the file
+ file = settings.MEDIA_ROOT + filename + '.log'
+ parser_obj = Parser(file)
+ parser_obj.run()
+
+ data_dict = {}
+ #Query the database for Benchmark data from benchmark_logs table
+ file_obj = Md5Log.objects.filter(file_name=filename+'.log')[0]
+ data_dict['BRLCAD-Version'] = file_obj.benchmark.brlcad_version
+ data_dict['Running-Time'] = file_obj.benchmark.running_time
+ data_dict['Time-of-Execution'] = file_obj.benchmark.time_of_execution
+ data_dict['VGR-Rating'] = file_obj.benchmark.approx_vgr
+ data_dict['Log-VGR'] = file_obj.benchmark.log_vgr
+ data_dict['Parameters'] = file_obj.benchmark.params
+
+ #Query the database for System Information from machine_info table
+ data_dict['Clock-Speed'] = file_obj.benchmark.machineinfo.cpu_mhz
+ data_dict['NCores'] = file_obj.benchmark.machineinfo.cores
+ data_dict['NProcessors'] = file_obj.benchmark.machineinfo.processors
+ data_dict['Vendor-ID'] = file_obj.benchmark.machineinfo.vendor_id
+ data_dict['OS-Type'] = file_obj.benchmark.machineinfo.ostype
+ data_dict['Processor-Model-Name'] = file_obj.benchmark.machineinfo.model_name
+
+ #Query the database for individual Image Performance
+ data_dict['Rt-Average'] = file_obj.benchmark.rtaverage_set.all()[0].abs_rps
+ data_dict['Rt-Bldg391'] = file_obj.benchmark.rtbldg391_set.all()[0].abs_rps
+ data_dict['Rt-M35'] = file_obj.benchmark.rtm35_set.all()[0].abs_rps
+ data_dict['Rt-Moss'] = file_obj.benchmark.rtmoss_set.all()[0].abs_rps
+ data_dict['Rt-Sphlake'] = file_obj.benchmark.rtbldg391_set.all()[0].abs_rps
+ data_dict['Rt-Star'] = file_obj.benchmark.rtstar_set.all()[0].abs_rps
+ data_dict['Rt-World'] = file_obj.benchmark.rtworld_set.all()[0].abs_rps
+
+ return render_to_response('result.html', data_dict, context_instance=RequestContext(request))
+
+
+
|
0827ea6ef23e87461bb936684bc61bcc1cb6b42f
|
spider.py
|
spider.py
|
from scrapy.contrib.spiders import CrawlSpider, Rule
from scrapy.contrib.linkextractors.sgml import SgmlLinkExtractor
from scrapy.selector import Selector
from dataset import DatasetItem
class DatasetSpider(CrawlSpider):
name = 'dataset'
allowed_domains = ['data.gc.ca/data/en']
start_urls = ['http://data.gc.ca/data/en/dataset?page=1']
rules = [Rule(SgmlLinkExtractor(allow=['/dataset/[0-9a-z]{8}-[0-9a-z]{4}-[0-9a-z]{4}-[0-9a-z]{4}-[0-9a-z]{12}']),
'parse_dataset')]
def parse_dataset(self, response):
sel = Selector(response)
dataset = DatasetItem()
dataset['url'] = response.url
dataset['frequency'] = sel.xpath("//div[@class='span-2']/aside[@class='secondary']/div[@class='module-related'][2]/ul[1]/li[@class='margin-bottom-medium']").extract()
|
from scrapy.contrib.spiders import CrawlSpider, Rule
from scrapy.contrib.linkextractors.sgml import SgmlLinkExtractor
from scrapy.selector import Selector
from dataset import DatasetItem
class DatasetSpider(CrawlSpider):
name = 'dataset'
allowed_domains = ['data.gc.ca/data/en']
start_urls = ['http://data.gc.ca/data/en/dataset?page=1']
rules = [Rule(SgmlLinkExtractor(allow=['/dataset/[0-9a-z]{8}-[0-9a-z]{4}-[0-9a-z]{4}-[0-9a-z]{4}-[0-9a-z]{12}']),
'parse_dataset')]
def parse_dataset(self, response):
sel = Selector(response)
dataset = DatasetItem()
dataset['url'] = response.url
dataset['name'] = sel.xpath("//div[@class='span-6']/article/div[@class='module'][1]/section[@class='module-content indent-large'][1]/h1").extract()
dataset['frequency'] = sel.xpath("//div[@class='span-2']/aside[@class='secondary']/div[@class='module-related'][2]/ul[1]/li[@class='margin-bottom-medium']").extract()
return dataset
|
Add XPath query to extract dataset name
|
Add XPath query to extract dataset name
|
Python
|
mit
|
MaxLikelihood/CODE
|
---
+++
@@ -15,5 +15,7 @@
sel = Selector(response)
dataset = DatasetItem()
dataset['url'] = response.url
+ dataset['name'] = sel.xpath("//div[@class='span-6']/article/div[@class='module'][1]/section[@class='module-content indent-large'][1]/h1").extract()
dataset['frequency'] = sel.xpath("//div[@class='span-2']/aside[@class='secondary']/div[@class='module-related'][2]/ul[1]/li[@class='margin-bottom-medium']").extract()
-
+
+ return dataset
|
f8c61141e8466a408284f45ca331a2f6d87f9363
|
django_extensions/settings.py
|
django_extensions/settings.py
|
# -*- coding: utf-8 -*-
import os
from django.conf import settings
BASE_DIR = os.path.dirname(os.path.realpath(__file__))
REPLACEMENTS = getattr(settings, 'EXTENSIONS_REPLACEMENTS', {})
DEFAULT_SQLITE_ENGINES = (
'django.db.backends.sqlite3',
'django.db.backends.spatialite',
)
DEFAULT_MYSQL_ENGINES = (
'django.db.backends.mysql',
'django.contrib.gis.db.backends.mysql',
'mysql.connector.django',
)
DEFAULT_POSTGRESQL_ENGINES = (
'django.db.backends.postgresql',
'django.db.backends.postgresql_psycopg2',
'django.db.backends.postgis',
'django.contrib.gis.db.backends.postgis',
'psqlextra.backend',
)
SQLITE_ENGINES = getattr(settings, 'DJANGO_EXTENSIONS_RESET_DB_SQLITE_ENGINES', DEFAULT_SQLITE_ENGINES)
MYSQL_ENGINES = getattr(settings, 'DJANGO_EXTENSIONS_RESET_DB_MYSQL_ENGINES', DEFAULT_MYSQL_ENGINES)
POSTGRESQL_ENGINES = getattr(settings, 'DJANGO_EXTENSIONS_RESET_DB_POSTGRESQL_ENGINES', DEFAULT_POSTGRESQL_ENGINES)
|
# -*- coding: utf-8 -*-
import os
from django.conf import settings
BASE_DIR = os.path.dirname(os.path.realpath(__file__))
REPLACEMENTS = getattr(settings, 'EXTENSIONS_REPLACEMENTS', {})
DEFAULT_SQLITE_ENGINES = (
'django.db.backends.sqlite3',
'django.db.backends.spatialite',
)
DEFAULT_MYSQL_ENGINES = (
'django.db.backends.mysql',
'django.contrib.gis.db.backends.mysql',
'mysql.connector.django',
)
DEFAULT_POSTGRESQL_ENGINES = (
'django.db.backends.postgresql',
'django.db.backends.postgresql_psycopg2',
'django.db.backends.postgis',
'django.contrib.gis.db.backends.postgis',
'psqlextra.backend',
'django_zero_downtime_migrations.backends.postgres',
'django_zero_downtime_migrations.backends.postgis',
)
SQLITE_ENGINES = getattr(settings, 'DJANGO_EXTENSIONS_RESET_DB_SQLITE_ENGINES', DEFAULT_SQLITE_ENGINES)
MYSQL_ENGINES = getattr(settings, 'DJANGO_EXTENSIONS_RESET_DB_MYSQL_ENGINES', DEFAULT_MYSQL_ENGINES)
POSTGRESQL_ENGINES = getattr(settings, 'DJANGO_EXTENSIONS_RESET_DB_POSTGRESQL_ENGINES', DEFAULT_POSTGRESQL_ENGINES)
|
Add django-zero-downtime-migrations Postgres DB engine
|
Add django-zero-downtime-migrations Postgres DB engine
From the repo:
https://github.com/tbicr/django-pg-zero-downtime-migrations
|
Python
|
mit
|
django-extensions/django-extensions,django-extensions/django-extensions,django-extensions/django-extensions
|
---
+++
@@ -21,6 +21,8 @@
'django.db.backends.postgis',
'django.contrib.gis.db.backends.postgis',
'psqlextra.backend',
+ 'django_zero_downtime_migrations.backends.postgres',
+ 'django_zero_downtime_migrations.backends.postgis',
)
SQLITE_ENGINES = getattr(settings, 'DJANGO_EXTENSIONS_RESET_DB_SQLITE_ENGINES', DEFAULT_SQLITE_ENGINES)
|
1cb0b66476f9876f9d59c3c3049e2f00dd8b9dca
|
pycargr/cli.py
|
pycargr/cli.py
|
import csv
from argparse import ArgumentParser
from json import dumps
from pycargr.model import to_dict
from pycargr.parser import parse_car_page
parser = ArgumentParser()
parser.add_argument('car_ids', nargs='+')
parser.add_argument('--output', choices=['csv', 'json', 'stdout'], default='stdout')
def main():
args = parser.parse_args()
car_ids = args.car_ids
output = args.output
results = []
for cid in car_ids:
results.append(to_dict(parse_car_page(cid)))
if output == 'csv':
with open('data.csv', 'w') as f:
writer = csv.DictWriter(f, fieldnames=results[0].keys())
writer.writeheader()
for d in results:
# images is a list - not suitable for csv
d.pop('images')
writer.writerow(d)
elif output == 'json' or output == 'stdout':
print(dumps(results, sort_keys=True, indent=4, ensure_ascii=False))
if __name__ == '__main__':
main()
|
import csv
from argparse import ArgumentParser
from json import dumps
from pycargr.model import to_dict
from pycargr.parser import parse_car_page
parser = ArgumentParser(description='CLI interface to interact with car.gr')
parser.add_argument('car_ids', nargs='+')
parser.add_argument('--output', choices=['csv', 'json', 'stdout'], default='stdout')
parser.add_argument('--scrape',
help='If set scraps the page again and replace the DB entry. Otherwise atempts to read from already-scraped version',
default=True)
def main():
args = parser.parse_args()
car_ids = args.car_ids
output = args.output
scrape = args.scrape
results = []
for cid in car_ids:
if scrape:
c_data = to_dict(parse_car_page(cid))
else:
# TODO: Fetch from DB
raise NotImplementedError
c_data.pop('html')
results.append(c_data)
if output == 'csv':
with open('data.csv', 'w') as f:
writer = csv.DictWriter(f, fieldnames=results[0].keys())
writer.writeheader()
for d in results:
# images is a list - not suitable for csv
d.pop('images')
d.pop('html')
writer.writerow(d)
elif output == 'json' or output == 'stdout':
print(dumps(results, sort_keys=True, indent=4, ensure_ascii=False))
if __name__ == '__main__':
main()
|
Add scrape flag and pop html from output
|
Add scrape flag and pop html from output
|
Python
|
mit
|
Florents-Tselai/PyCarGr
|
---
+++
@@ -5,19 +5,29 @@
from pycargr.model import to_dict
from pycargr.parser import parse_car_page
-parser = ArgumentParser()
+parser = ArgumentParser(description='CLI interface to interact with car.gr')
parser.add_argument('car_ids', nargs='+')
parser.add_argument('--output', choices=['csv', 'json', 'stdout'], default='stdout')
+parser.add_argument('--scrape',
+ help='If set scraps the page again and replace the DB entry. Otherwise atempts to read from already-scraped version',
+ default=True)
def main():
args = parser.parse_args()
car_ids = args.car_ids
output = args.output
+ scrape = args.scrape
results = []
for cid in car_ids:
- results.append(to_dict(parse_car_page(cid)))
+ if scrape:
+ c_data = to_dict(parse_car_page(cid))
+ else:
+ # TODO: Fetch from DB
+ raise NotImplementedError
+ c_data.pop('html')
+ results.append(c_data)
if output == 'csv':
with open('data.csv', 'w') as f:
@@ -26,6 +36,7 @@
for d in results:
# images is a list - not suitable for csv
d.pop('images')
+ d.pop('html')
writer.writerow(d)
elif output == 'json' or output == 'stdout':
print(dumps(results, sort_keys=True, indent=4, ensure_ascii=False))
|
e58c3de5e4537478299f9c9caff751e908bf569a
|
scripts/shell.py
|
scripts/shell.py
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
"""
Spawns an interactive Python shell.
Usage:
python pwb.py shell [args]
If no arguments are given, the pywikibot library will not be loaded.
The following parameters are supported:
¶ms;
"""
# (C) Pywikibot team, 2014
#
# Distributed under the terms of the MIT license.
#
__version__ = '$Id$'
#
def main(*args):
"""Script entry point."""
env = None
if args:
import pywikibot
pywikibot.handle_args(*args)
env = locals()
import code
code.interact("""Welcome to the Pywikibot interactive shell!""", local=env)
if __name__ == "__main__":
import sys
args = []
if set(sys.argv) - set(['shell', 'shell.py']):
args = sys.argv
del sys
main(*args)
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
"""
Spawns an interactive Python shell.
Usage:
python pwb.py shell [args]
If no arguments are given, the pywikibot library will not be loaded.
The following parameters are supported:
¶ms;
"""
# (C) Pywikibot team, 2014
#
# Distributed under the terms of the MIT license.
#
__version__ = '$Id$'
#
def main(*args):
"""Script entry point."""
env = None
if args:
import pywikibot
pywikibot.handle_args(args)
env = locals()
import code
code.interact("""Welcome to the Pywikibot interactive shell!""", local=env)
if __name__ == "__main__":
import sys
args = []
if set(sys.argv) - set(['shell', 'shell.py']):
args = sys.argv
del sys
main(*args)
|
Use args as one parameter
|
[FIX] Shell: Use args as one parameter
Change-Id: If96eb33fb24e0e9c23e4bf9f20a65a48da5fc159
|
Python
|
mit
|
magul/pywikibot-core,hasteur/g13bot_tools_new,xZise/pywikibot-core,jayvdb/pywikibot-core,icyflame/batman,Darkdadaah/pywikibot-core,PersianWikipedia/pywikibot-core,happy5214/pywikibot-core,emijrp/pywikibot-core,npdoty/pywikibot,VcamX/pywikibot-core,h4ck3rm1k3/pywikibot-core,h4ck3rm1k3/pywikibot-core,magul/pywikibot-core,wikimedia/pywikibot-core,happy5214/pywikibot-core,valhallasw/pywikibot-core,TridevGuha/pywikibot-core,darthbhyrava/pywikibot-local,npdoty/pywikibot,Darkdadaah/pywikibot-core,wikimedia/pywikibot-core,hasteur/g13bot_tools_new,jayvdb/pywikibot-core,smalyshev/pywikibot-core,trishnaguha/pywikibot-core,hasteur/g13bot_tools_new
|
---
+++
@@ -26,7 +26,7 @@
env = None
if args:
import pywikibot
- pywikibot.handle_args(*args)
+ pywikibot.handle_args(args)
env = locals()
import code
|
1ea9dd3cf9effc6f65d01a31acdbd39848492223
|
soco/__init__.py
|
soco/__init__.py
|
"""SoCo (Sonos Controller) is a simple library to control Sonos speakers."""
# There is no need for all strings here to be unicode, and Py2 cannot import
# modules with unicode names so do not use from __future__ import
# unicode_literals
# https://github.com/SoCo/SoCo/issues/98
#
import logging
from .core import SoCo
from .discovery import discover
from .exceptions import SoCoException, UnknownSoCoException
# Will be parsed by setup.py to determine package metadata
__author__ = "The SoCo-Team <python-soco@googlegroups.com>"
# Please increment the version number and add the suffix "-dev" after
# a release, to make it possible to identify in-development code
__version__ = "0.25.0"
__website__ = "https://github.com/SoCo/SoCo"
__license__ = "MIT License"
# You really should not `import *` - it is poor practice
# but if you do, here is what you get:
__all__ = [
"discover",
"SoCo",
"SoCoException",
"UnknownSoCoException",
]
# http://docs.python.org/2/howto/logging.html#library-config
# Avoids spurious error messages if no logger is configured by the user
logging.getLogger(__name__).addHandler(logging.NullHandler())
|
"""SoCo (Sonos Controller) is a simple library to control Sonos speakers."""
# There is no need for all strings here to be unicode, and Py2 cannot import
# modules with unicode names so do not use from __future__ import
# unicode_literals
# https://github.com/SoCo/SoCo/issues/98
#
import logging
from .core import SoCo
from .discovery import discover
from .exceptions import SoCoException, UnknownSoCoException
# Will be parsed by setup.py to determine package metadata
__author__ = "The SoCo-Team <python-soco@googlegroups.com>"
# Please increment the version number and add the suffix "-dev" after
# a release, to make it possible to identify in-development code
__version__ = "0.26.0-dev"
__website__ = "https://github.com/SoCo/SoCo"
__license__ = "MIT License"
# You really should not `import *` - it is poor practice
# but if you do, here is what you get:
__all__ = [
"discover",
"SoCo",
"SoCoException",
"UnknownSoCoException",
]
# http://docs.python.org/2/howto/logging.html#library-config
# Avoids spurious error messages if no logger is configured by the user
logging.getLogger(__name__).addHandler(logging.NullHandler())
|
Set up for v0.26.0 release
|
Set up for v0.26.0 release
|
Python
|
mit
|
SoCo/SoCo,SoCo/SoCo
|
---
+++
@@ -17,7 +17,7 @@
__author__ = "The SoCo-Team <python-soco@googlegroups.com>"
# Please increment the version number and add the suffix "-dev" after
# a release, to make it possible to identify in-development code
-__version__ = "0.25.0"
+__version__ = "0.26.0-dev"
__website__ = "https://github.com/SoCo/SoCo"
__license__ = "MIT License"
|
557f4129dc50acddd6c80d0a0679d8c82d5d9215
|
linter.py
|
linter.py
|
#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Patrik,,,
# Copyright (c) 2015 Patrik,,,
#
# License: MIT
#
"""This module exports the Polylint plugin class."""
from SublimeLinter.lint import NodeLinter, util
class Polylint(NodeLinter):
"""Provides an interface to polylint."""
syntax = ('html')
cmd = 'polylint --only-inputs -i'
executable = None
version_args = '--version'
version_re = r'(?P<version>\d+\.\d+\.\d+)'
version_requirement = '>= 1.0'
regex = r'^[^:]*:(?P<line>\d+):(?P<col>\d+)\r?\n\s*(?P<message>.+)$'
multiline = True
line_col_base = (1, 1)
tempfile_suffix = '-'
error_stream = util.STREAM_BOTH
selectors = {}
word_re = None
defaults = {}
inline_settings = None
inline_overrides = None
comment_re = r'\s*/[/*]'
|
#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Patrik,,,
# Copyright (c) 2015 Patrik,,,
#
# License: MIT
#
"""This module exports the Polylint plugin class."""
from SublimeLinter.lint import NodeLinter, util
class Polylint(NodeLinter):
"""Provides an interface to polylint."""
syntax = ('html')
cmd = 'polylint --no-recursion -i'
executable = None
version_args = '--version'
version_re = r'(?P<version>\d+\.\d+\.\d+)'
version_requirement = '>= 1.0'
regex = r'^[^:]*:(?P<line>\d+):(?P<col>\d+)\r?\n\s*(?P<message>.+)$'
multiline = True
line_col_base = (1, 1)
tempfile_suffix = '-'
error_stream = util.STREAM_BOTH
selectors = {}
word_re = None
defaults = {}
inline_settings = None
inline_overrides = None
comment_re = r'\s*/[/*]'
|
Use proper flag for single file error reporting
|
Use proper flag for single file error reporting
"inputs-only" was changed to "no-recursion" before getting merged
|
Python
|
mit
|
nomego/SublimeLinter-contrib-polylint
|
---
+++
@@ -18,7 +18,7 @@
"""Provides an interface to polylint."""
syntax = ('html')
- cmd = 'polylint --only-inputs -i'
+ cmd = 'polylint --no-recursion -i'
executable = None
version_args = '--version'
version_re = r'(?P<version>\d+\.\d+\.\d+)'
|
53cca5180ec5ad04694ce28d0fc0d945004c33b3
|
src/unifind.py
|
src/unifind.py
|
class UnionFind:
def __init__(self, it=None):
self.uf = {} if it is None else {i : i for i in it}
self.count = len(self.uf)
def __iter__(self):
return iter(self.uf.keys())
def __getitem__(self, key):
return self.uf[key]
def __setitem__(self, key, val):
if key is not val:
raise RuntimeError("key and val must be the same object")
self.uf[key] = key
class QuickFind(UnionFind):
def find(self, key):
return self.uf[key]
def union(self, key1, key2):
u1 = self.find(key1)
u2 = self.find(key2)
if u1 == u2: return
for i in range(len(self.uf)):
if self.uf[i] == u1:
self.uf[i] = u2
self.count -= 1
class QuickUnion(UnionFind):
def find(self, key):
while self.uf[key] != key:
key = self.uf[key]
return key
def union(self, key1, key2):
u1 = self.find(key1)
u2 = self.find(key2)
if u1 == u2: return
self.uf[u1] = u2
self.count -= 1
|
class UnionFind:
def __init__(self, it=None):
self.uf = {} if it is None else {i : i for i in it}
self.count = len(self.uf)
def __iter__(self):
return iter(self.uf.keys())
def __getitem__(self, key):
return self.uf[key]
def __setitem__(self, key, val):
if key is not val:
raise RuntimeError("key and val must be the same object")
self.uf[key] = key
class QuickFind(UnionFind):
def find(self, key):
return self.uf[key]
def union(self, key1, key2):
u1 = self.find(key1)
u2 = self.find(key2)
if u1 == u2: return
for k in self.uf:
if self.uf[k] == u1:
self.uf[k] = u2
self.count -= 1
class QuickUnion(UnionFind):
def find(self, key):
while self.uf[key] != key:
key = self.uf[key]
return key
def union(self, key1, key2):
u1 = self.find(key1)
u2 = self.find(key2)
if u1 == u2: return
self.uf[u1] = u2
self.count -= 1
|
Fix QuickFind: stop relying on keys being integers
|
Fix QuickFind: stop relying on keys being integers
|
Python
|
mit
|
all3fox/algos-py
|
---
+++
@@ -26,9 +26,9 @@
if u1 == u2: return
- for i in range(len(self.uf)):
- if self.uf[i] == u1:
- self.uf[i] = u2
+ for k in self.uf:
+ if self.uf[k] == u1:
+ self.uf[k] = u2
self.count -= 1
|
56cde7a0a93a733e0f2958837f935b1446214168
|
tests/test_artefacts.py
|
tests/test_artefacts.py
|
import pytest
from plumbium import artefacts
def test_NiiGzImage_basename():
img = artefacts.NiiGzImage('foo.nii.gz')
assert img.basename == 'foo'
def test_NiiGzImage_bad_extension():
with pytest.raises(ValueError):
img = artefacts.NiiGzImage('foo.nii.gx')
|
import pytest
from plumbium import artefacts
def test_Artefact_basename():
img = artefacts.Artefact('foo.nii.gz', '.nii.gz')
assert img.basename == 'foo'
def test_Artefact_repr():
img = artefacts.Artefact('foo.nii.gz', '.nii.gz')
assert repr(img) == "Artefact('foo.nii.gz')"
def test_NiiGzImage_bad_extension():
with pytest.raises(ValueError):
img = artefacts.NiiGzImage('foo.nii.gx')
def test_TextFile_bad_extension():
with pytest.raises(ValueError):
img = artefacts.NiiGzImage('foo.txx')
|
Improve test coverage of artefact module
|
Improve test coverage of artefact module
|
Python
|
mit
|
jstutters/Plumbium
|
---
+++
@@ -2,11 +2,21 @@
from plumbium import artefacts
-def test_NiiGzImage_basename():
- img = artefacts.NiiGzImage('foo.nii.gz')
+def test_Artefact_basename():
+ img = artefacts.Artefact('foo.nii.gz', '.nii.gz')
assert img.basename == 'foo'
+
+
+def test_Artefact_repr():
+ img = artefacts.Artefact('foo.nii.gz', '.nii.gz')
+ assert repr(img) == "Artefact('foo.nii.gz')"
def test_NiiGzImage_bad_extension():
with pytest.raises(ValueError):
img = artefacts.NiiGzImage('foo.nii.gx')
+
+
+def test_TextFile_bad_extension():
+ with pytest.raises(ValueError):
+ img = artefacts.NiiGzImage('foo.txx')
|
460bb04533b96e7812964553d9af0c5e25033f21
|
rxet/helper.py
|
rxet/helper.py
|
from struct import unpack
def read_uint32(fileobj):
return unpack("I", fileobj.read(4))[0]
# read int as a big endian number
def read_uint32_BE(fileobj):
return unpack(">I", fileobj.read(4))[0]
|
from struct import unpack
def read_uint32(fileobj):
return unpack("I", fileobj.read(4))[0]
# read int as a big endian number
def read_uint32_BE(fileobj):
return unpack(">I", fileobj.read(4))[0]
def unpack_uint32(data, offset):
return unpack("I", data[offset:offset+4])[0]
|
Add unpacking of uint32 at an offset
|
Add unpacking of uint32 at an offset
|
Python
|
mit
|
RenolY2/battalion-tools
|
---
+++
@@ -7,3 +7,6 @@
# read int as a big endian number
def read_uint32_BE(fileobj):
return unpack(">I", fileobj.read(4))[0]
+
+def unpack_uint32(data, offset):
+ return unpack("I", data[offset:offset+4])[0]
|
71100d859689a975c6a9bcb06bd5ec8dedbcc876
|
preferences/views.py
|
preferences/views.py
|
from django.shortcuts import render
from django.views.generic.edit import FormView
from registration.forms import RegistrationFormUniqueEmail
from registration.backends.default.views import RegistrationView
from preferences.forms import PreferencesForm
class EmailRegistrationView(RegistrationView):
form_class = RegistrationFormUniqueEmail
class UserPreferences(FormView):
template_name = 'preferences/preferences.html'
form_class = PreferencesForm
success_url = '/index/'
def form_valid(self, form):
return super(UserPreferences, self).form_valid(form)
|
from django.shortcuts import render
from django.db import transaction
# from django.views.generic import TemplateView
from registration.forms import RegistrationFormUniqueEmail
from registration.backends.default.views import RegistrationView
from preferences.models import PersonFollow
from opencivicdata.models.people_orgs import Person
class EmailRegistrationView(RegistrationView):
form_class = RegistrationFormUniqueEmail
def user_preferences(request):
user = request.user
senators = Person.objects.filter(memberships__organization__name='Florida Senate')
representitives = Person.objects.filter(memberships__organization__name='Florida House of Representatives')
if request.method == 'POST':
with transaction.atomic():
PersonFollow.objects.filter(user=user).delete()
for senator in request.POST.getlist('senators'):
PersonFollow.objects.create(user=user, person_id=senator)
for representitive in request.POST.getlist('representitives'):
PersonFollow.objects.create(user=user, person_id=representitive)
return render(
request,
'preferences/preferences.html',
{'user': user, 'senators': senators, 'representitives': representitives}
)
|
Make view to handle saving and displaying of preferences form
|
Make view to handle saving and displaying of preferences form
|
Python
|
mit
|
jamesturk/tot,jamesturk/tot,jamesturk/tot,jamesturk/tot,jamesturk/tot
|
---
+++
@@ -1,10 +1,14 @@
from django.shortcuts import render
-from django.views.generic.edit import FormView
+from django.db import transaction
+# from django.views.generic import TemplateView
from registration.forms import RegistrationFormUniqueEmail
from registration.backends.default.views import RegistrationView
-from preferences.forms import PreferencesForm
+from preferences.models import PersonFollow
+
+from opencivicdata.models.people_orgs import Person
+
class EmailRegistrationView(RegistrationView):
@@ -12,10 +16,22 @@
form_class = RegistrationFormUniqueEmail
-class UserPreferences(FormView):
- template_name = 'preferences/preferences.html'
- form_class = PreferencesForm
- success_url = '/index/'
+def user_preferences(request):
+ user = request.user
- def form_valid(self, form):
- return super(UserPreferences, self).form_valid(form)
+ senators = Person.objects.filter(memberships__organization__name='Florida Senate')
+ representitives = Person.objects.filter(memberships__organization__name='Florida House of Representatives')
+
+ if request.method == 'POST':
+ with transaction.atomic():
+ PersonFollow.objects.filter(user=user).delete()
+ for senator in request.POST.getlist('senators'):
+ PersonFollow.objects.create(user=user, person_id=senator)
+ for representitive in request.POST.getlist('representitives'):
+ PersonFollow.objects.create(user=user, person_id=representitive)
+
+ return render(
+ request,
+ 'preferences/preferences.html',
+ {'user': user, 'senators': senators, 'representitives': representitives}
+ )
|
05d2230015e085cba408474539f997f7fecd2f91
|
tests/utils.py
|
tests/utils.py
|
# -*- coding: utf8 -*-
from __future__ import absolute_import
from __future__ import division, print_function, unicode_literals
from os.path import abspath, dirname, join
TEST_DIR = abspath(dirname(__file__))
def load_snippet(file_name):
"""Helper to fetch in the content of a test snippet."""
file_path = join(TEST_DIR, "data/snippets", file_name)
with open(file_path) as file:
return file.read()
def load_article(file_name):
"""Helper to fetch in the content of a test article."""
file_path = join(TEST_DIR, "data/articles", file_name)
with open(file_path) as file:
return file.read()
|
# -*- coding: utf8 -*-
from __future__ import absolute_import
from __future__ import division, print_function, unicode_literals
from os.path import abspath, dirname, join
TEST_DIR = abspath(dirname(__file__))
def load_snippet(file_name):
"""Helper to fetch in the content of a test snippet."""
file_path = join(TEST_DIR, "data/snippets", file_name)
with open(file_path, "rb") as file:
return file.read()
def load_article(file_name):
"""Helper to fetch in the content of a test article."""
file_path = join(TEST_DIR, "data/articles", file_name)
with open(file_path, "rb") as file:
return file.read()
|
Load articles/snippets as binary strings
|
Load articles/snippets as binary strings
|
Python
|
bsd-2-clause
|
bookieio/breadability,bookieio/breadability
|
---
+++
@@ -12,12 +12,12 @@
def load_snippet(file_name):
"""Helper to fetch in the content of a test snippet."""
file_path = join(TEST_DIR, "data/snippets", file_name)
- with open(file_path) as file:
+ with open(file_path, "rb") as file:
return file.read()
def load_article(file_name):
"""Helper to fetch in the content of a test article."""
file_path = join(TEST_DIR, "data/articles", file_name)
- with open(file_path) as file:
+ with open(file_path, "rb") as file:
return file.read()
|
b7e6e2d6665e5ec81bddcb77b33de6a2d2bc7807
|
users/views.py
|
users/views.py
|
from rest_framework import generics
from django.contrib.auth.models import User
from users.serializers import UserSerializer
class UserList(generics.ListCreateAPIView):
queryset = User.objects.all()
serializer_class = UserSerializer
class UserDetail(generics.RetrieveUpdateDestroyAPIView):
queryset = User.objects.all()
serializer_class = UserSerializer
|
from rest_framework import generics, permissions
from django.contrib.auth.models import User
from users.serializers import UserSerializer
class UserList(generics.ListCreateAPIView):
queryset = User.objects.all()
serializer_class = UserSerializer
class UserDetail(generics.RetrieveUpdateDestroyAPIView):
permission_classes = (
permissions.IsAuthenticatedOrReadOnly,
)
queryset = User.objects.all()
serializer_class = UserSerializer
|
Add permission to edit user
|
Add permission to edit user
|
Python
|
mit
|
OscaRoa/api-cats
|
---
+++
@@ -1,4 +1,4 @@
-from rest_framework import generics
+from rest_framework import generics, permissions
from django.contrib.auth.models import User
from users.serializers import UserSerializer
@@ -9,5 +9,8 @@
class UserDetail(generics.RetrieveUpdateDestroyAPIView):
+ permission_classes = (
+ permissions.IsAuthenticatedOrReadOnly,
+ )
queryset = User.objects.all()
serializer_class = UserSerializer
|
3d74cd5a02f1d5aefb98e4ef97a1ef458a6b79ea
|
IPython/zmq/__init__.py
|
IPython/zmq/__init__.py
|
#-----------------------------------------------------------------------------
# Copyright (C) 2010 The IPython Development Team
#
# Distributed under the terms of the BSD License. The full license is in
# the file COPYING.txt, distributed as part of this software.
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Verify zmq version dependency >= 2.1.4
#-----------------------------------------------------------------------------
import warnings
from distutils.version import LooseVersion as V
def check_for_zmq(minimum_version, module='IPython.zmq'):
try:
import zmq
except ImportError:
raise ImportError("%s requires pyzmq >= %s"%(module, minimum_version))
pyzmq_version = zmq.__version__
if V(pyzmq_version) < V(minimum_version):
raise ImportError("%s requires pyzmq >= %s, but you have %s"%(
module, minimum_version, pyzmq_version))
# fix missing DEALER/ROUTER aliases in pyzmq < 2.1.9
if not hasattr(zmq, 'DEALER'):
zmq.DEALER = zmq.XREQ
if not hasattr(zmq, 'ROUTER'):
zmq.ROUTER = zmq.XREP
if V(zmq.zmq_version()) >= V('4.0.0'):
warnings.warn("""libzmq 4 detected.
It is unlikely that IPython's zmq code will work properly.
Please install libzmq stable, which is 2.1.x or 2.2.x""",
RuntimeWarning)
check_for_zmq('2.1.4')
|
#-----------------------------------------------------------------------------
# Copyright (C) 2010 The IPython Development Team
#
# Distributed under the terms of the BSD License. The full license is in
# the file COPYING.txt, distributed as part of this software.
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Verify zmq version dependency >= 2.1.4
#-----------------------------------------------------------------------------
import warnings
from distutils.version import LooseVersion as V
def check_for_zmq(minimum_version, module='IPython.zmq'):
try:
import zmq
except ImportError:
raise ImportError("%s requires pyzmq >= %s"%(module, minimum_version))
pyzmq_version = zmq.__version__
if 'dev' not in pyzmq_version and V(pyzmq_version) < V(minimum_version):
raise ImportError("%s requires pyzmq >= %s, but you have %s"%(
module, minimum_version, pyzmq_version))
# fix missing DEALER/ROUTER aliases in pyzmq < 2.1.9
if not hasattr(zmq, 'DEALER'):
zmq.DEALER = zmq.XREQ
if not hasattr(zmq, 'ROUTER'):
zmq.ROUTER = zmq.XREP
if V(zmq.zmq_version()) >= V('4.0.0'):
warnings.warn("""libzmq 4 detected.
It is unlikely that IPython's zmq code will work properly.
Please install libzmq stable, which is 2.1.x or 2.2.x""",
RuntimeWarning)
check_for_zmq('2.1.4')
|
Check for dev version of zmq per @minrk's request
|
Check for dev version of zmq per @minrk's request
|
Python
|
bsd-3-clause
|
ipython/ipython,ipython/ipython
|
---
+++
@@ -20,7 +20,7 @@
pyzmq_version = zmq.__version__
- if V(pyzmq_version) < V(minimum_version):
+ if 'dev' not in pyzmq_version and V(pyzmq_version) < V(minimum_version):
raise ImportError("%s requires pyzmq >= %s, but you have %s"%(
module, minimum_version, pyzmq_version))
|
5cf4cd8b457bf3c6fa7b4f46b1ed4dfd542e5139
|
scripts/lib/fontbuild/decomposeGlyph.py
|
scripts/lib/fontbuild/decomposeGlyph.py
|
def decomposeGlyph(glyph):
"""Moves the components of a glyph to its outline."""
font = glyph.getParent()
for component in glyph.components:
componentGlyph = font[component.baseGlyph]
for contour in componentGlyph:
contour = contour.copy()
contour.move(component.offset)
contour.scale(component.scale)
glyph.appendContour(contour)
glyph.clear(contours=False, anchors=False, guides=False)
|
def decomposeGlyph(glyph):
"""Moves the components of a glyph to its outline."""
font = glyph.getParent()
for component in glyph.components:
componentGlyph = font[component.baseGlyph]
for contour in componentGlyph:
contour = contour.copy()
contour.scale(component.scale)
contour.move(component.offset)
glyph.appendContour(contour)
glyph.clear(contours=False, anchors=False, guides=False)
|
Move after scaling during decomposition.
|
Move after scaling during decomposition.
|
Python
|
apache-2.0
|
supriyantomaftuh/roboto,anthrotype/roboto,urandu/roboto,urandu/roboto,bowlofstew/roboto,googlefonts/roboto,moyogo/roboto,Cartman0/roboto,moyogo/roboto,Cartman0/roboto,supriyantomaftuh/roboto,googlefonts/roboto,anthrotype/roboto,bowlofstew/roboto
|
---
+++
@@ -6,7 +6,7 @@
componentGlyph = font[component.baseGlyph]
for contour in componentGlyph:
contour = contour.copy()
+ contour.scale(component.scale)
contour.move(component.offset)
- contour.scale(component.scale)
glyph.appendContour(contour)
glyph.clear(contours=False, anchors=False, guides=False)
|
95dab3dbd10ed923c3d37d29efda6ab8ee971c61
|
plugin.py
|
plugin.py
|
import pre
import supybot.log as log
import supybot.conf as conf
import supybot.utils as utils
import supybot.world as world
import supybot.ircdb as ircdb
from supybot.commands import *
import supybot.irclib as irclib
import supybot.ircmsgs as ircmsgs
import supybot.plugins as plugins
import supybot.ircutils as ircutils
import supybot.callbacks as callbacks
class Dupe(callbacks.Plugin):
def _dupe(self, irc, query, limit):
results = pre.dupe(query, limit)
if (results):
irc.reply(format('Got %s.', results))
else:
irc.reply(format('Could not find any results for %s.', name))
def dupe(self, irc, msg, args, text):
"""dupe <search>
Perform a search for dupe releases using Pre.im's Web API
"""
limit = self.registryValue('limit', msg.args[0])
self._dupe(irc, text, limit)
dupe = wrap(dupe, ['text'])
Class = Dupe
|
import pre
import supybot.log as log
import supybot.conf as conf
import supybot.utils as utils
import supybot.world as world
import supybot.ircdb as ircdb
from supybot.commands import *
import supybot.irclib as irclib
import supybot.ircmsgs as ircmsgs
import supybot.plugins as plugins
import supybot.ircutils as ircutils
import supybot.callbacks as callbacks
class Dupe(callbacks.Plugin):
def _dupe(self, irc, query, limit):
results = pre.dupe(query, limit)
if (results):
irc.reply(format('Got %s.', results.length))
irc.reply(format('Results %s', results), private=True)
else:
irc.reply(format('Could not find any results for %s.', name))
def dupe(self, irc, msg, args, text):
"""dupe <search>
Perform a search for dupe releases using Pre.im's Web API
"""
limit = self.registryValue('limit', msg.args[0])
self._dupe(irc, text, limit)
dupe = wrap(dupe, ['text'])
Class = Dupe
|
Send PM to querying user (i think? bad docs.)
|
Send PM to querying user (i think? bad docs.)
|
Python
|
mit
|
bcowdery/supybot-predb-plugin
|
---
+++
@@ -18,7 +18,8 @@
def _dupe(self, irc, query, limit):
results = pre.dupe(query, limit)
if (results):
- irc.reply(format('Got %s.', results))
+ irc.reply(format('Got %s.', results.length))
+ irc.reply(format('Results %s', results), private=True)
else:
irc.reply(format('Could not find any results for %s.', name))
|
f41e46c0dd0b859cdcc88a2d3ae96fa01864445f
|
plugin.py
|
plugin.py
|
import os
import sublime
def plugin_loaded():
disable_native_php_package_completions()
def disable_native_php_package_completions():
completions_file = os.path.join(
sublime.packages_path(),
'PHP',
'PHP.sublime-completions'
)
if not os.path.isfile(completions_file):
try:
polyfill_makedirs(os.path.dirname(completions_file))
polyfill_writefile(completions_file, '// generated by php-completions (PHP Completions Kit) to disable the native PHP package completions')
except:
pass
def polyfill_writefile(path, content):
with open(path, 'w+', encoding='utf8', newline='') as f:
f.write(str(content))
if 3000 <= int(sublime.version()) < 3088:
# Fixes as best as possible a new file permissions issue
# See https://github.com/titoBouzout/SideBarEnhancements/issues/203
# See https://github.com/SublimeTextIssues/Core/issues/239
oldmask = os.umask(0o000)
if oldmask == 0:
os.chmod(path, 0o644)
os.umask(oldmask)
def polyfill_makedirs(path):
if 3000 <= int(sublime.version()) < 3088:
# Fixes as best as possible a new directory permissions issue
# See https://github.com/titoBouzout/SideBarEnhancements/issues/203
# See https://github.com/SublimeTextIssues/Core/issues/239
oldmask = os.umask(0o000)
if oldmask == 0:
os.makedirs(path, 0o755);
else:
os.makedirs(path);
os.umask(oldmask)
else:
os.makedirs(path)
|
import os
import sublime
def plugin_loaded():
# disable_native_php_package_completions
completions_file = os.path.join(sublime.packages_path(), 'PHP', 'PHP.sublime-completions')
if not os.path.isfile(completions_file):
os.makedirs(os.path.dirname(completions_file))
with open(completions_file, 'w+', encoding='utf8', newline='') as f:
f.write('// generated by php-completions (PHP Completions Kit) to disable the native PHP package completions')
|
Remove fixes for sublime text backwards compatibility
|
Remove fixes for sublime text backwards compatibility
I'm not working around sublime text issues anymore.
|
Python
|
bsd-3-clause
|
gerardroche/sublime-phpck
|
---
+++
@@ -2,49 +2,9 @@
import sublime
def plugin_loaded():
-
- disable_native_php_package_completions()
-
-def disable_native_php_package_completions():
-
- completions_file = os.path.join(
- sublime.packages_path(),
- 'PHP',
- 'PHP.sublime-completions'
- )
-
+ # disable_native_php_package_completions
+ completions_file = os.path.join(sublime.packages_path(), 'PHP', 'PHP.sublime-completions')
if not os.path.isfile(completions_file):
- try:
- polyfill_makedirs(os.path.dirname(completions_file))
- polyfill_writefile(completions_file, '// generated by php-completions (PHP Completions Kit) to disable the native PHP package completions')
- except:
- pass
-
-def polyfill_writefile(path, content):
-
- with open(path, 'w+', encoding='utf8', newline='') as f:
- f.write(str(content))
-
- if 3000 <= int(sublime.version()) < 3088:
- # Fixes as best as possible a new file permissions issue
- # See https://github.com/titoBouzout/SideBarEnhancements/issues/203
- # See https://github.com/SublimeTextIssues/Core/issues/239
- oldmask = os.umask(0o000)
- if oldmask == 0:
- os.chmod(path, 0o644)
- os.umask(oldmask)
-
-def polyfill_makedirs(path):
-
- if 3000 <= int(sublime.version()) < 3088:
- # Fixes as best as possible a new directory permissions issue
- # See https://github.com/titoBouzout/SideBarEnhancements/issues/203
- # See https://github.com/SublimeTextIssues/Core/issues/239
- oldmask = os.umask(0o000)
- if oldmask == 0:
- os.makedirs(path, 0o755);
- else:
- os.makedirs(path);
- os.umask(oldmask)
- else:
- os.makedirs(path)
+ os.makedirs(os.path.dirname(completions_file))
+ with open(completions_file, 'w+', encoding='utf8', newline='') as f:
+ f.write('// generated by php-completions (PHP Completions Kit) to disable the native PHP package completions')
|
ab0e5da8c56817ed80552d94e02b7cec7b9d1a97
|
rmgpy/qm/__init__.py
|
rmgpy/qm/__init__.py
|
import os
if not os.environ.get("RMG_workingDirectory"):
import os.path
message = "Please set your RMG_workingDirectory environment variable.\n" +\
"(eg. export RMG_workingDirectory=%s )" % \
os.path.abspath(os.path.join(os.path.dirname(__file__),'..'))
raise Exception(message)
|
import os
if not os.environ.get("RMG_workingDirectory"):
import os.path
message = "Please set your RMG_workingDirectory environment variable.\n" +\
"(eg. export RMG_workingDirectory=%s )" % \
os.path.abspath(os.path.join(os.path.dirname(__file__),'..','..'))
raise Exception(message)
|
Change suggested RMG_workingDirectory path due to qm package move
|
Change suggested RMG_workingDirectory path due to qm package move
|
Python
|
mit
|
comocheng/RMG-Py,pierrelb/RMG-Py,enochd/RMG-Py,KEHANG/RMG-Py,nyee/RMG-Py,enochd/RMG-Py,nyee/RMG-Py,KEHANG/RMG-Py,nickvandewiele/RMG-Py,comocheng/RMG-Py,faribas/RMG-Py,chatelak/RMG-Py,pierrelb/RMG-Py,faribas/RMG-Py,chatelak/RMG-Py,nickvandewiele/RMG-Py
|
---
+++
@@ -3,5 +3,5 @@
import os.path
message = "Please set your RMG_workingDirectory environment variable.\n" +\
"(eg. export RMG_workingDirectory=%s )" % \
- os.path.abspath(os.path.join(os.path.dirname(__file__),'..'))
+ os.path.abspath(os.path.join(os.path.dirname(__file__),'..','..'))
raise Exception(message)
|
fe698148b69d90270b309bf63d1289522a120e86
|
salt/grains/extra.py
|
salt/grains/extra.py
|
import os
def shell():
'''
Return the default shell to use on this system
'''
# Provides:
# shell
return {'shell': os.environ.get('SHELL', '/bin/sh'}
|
import os
def shell():
'''
Return the default shell to use on this system
'''
# Provides:
# shell
return {'shell': os.environ.get('SHELL', '/bin/sh')}
|
Add missing parens that was causing an error
|
Add missing parens that was causing an error
|
Python
|
apache-2.0
|
saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt
|
---
+++
@@ -6,4 +6,4 @@
'''
# Provides:
# shell
- return {'shell': os.environ.get('SHELL', '/bin/sh'}
+ return {'shell': os.environ.get('SHELL', '/bin/sh')}
|
26e26e50ddd8b64fd3206788ef1defbf337698e5
|
app/urls.py
|
app/urls.py
|
# Copyright (C) 2014 Linaro Ltd.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""Define URLs and handlers to server them."""
from tornado.web import url
from handlers import (
DefConfHandler,
JobHandler,
SubscriptionHandler
)
APP_URLS = [
url(r'/api/defconfig(?P<sl>/)?(?P<id>.*)', DefConfHandler, name='defconf'),
url(r'/api/job(?P<sl>/)?(?P<id>.*)', JobHandler, name='job'),
url(r'/api/subscription', SubscriptionHandler, name='subscription')
]
|
# Copyright (C) 2014 Linaro Ltd.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""Define URLs and handlers to server them."""
from tornado.web import url
from handlers import (
DefConfHandler,
JobHandler,
SubscriptionHandler
)
APP_URLS = [
url(r'/api/defconfig(?P<sl>/)?(?P<id>.*)', DefConfHandler, name='defconf'),
url(r'/api/job(?P<sl>/)?(?P<id>.*)', JobHandler, name='job'),
url(
r'/api/subscription(?P<sl>/)?(?P<id>.*)',
SubscriptionHandler,
name='subscription'
)
]
|
Add ID support for subscription/ URLs.
|
Add ID support for subscription/ URLs.
|
Python
|
agpl-3.0
|
joyxu/kernelci-backend,joyxu/kernelci-backend,kernelci/kernelci-backend,joyxu/kernelci-backend,kernelci/kernelci-backend
|
---
+++
@@ -25,5 +25,9 @@
APP_URLS = [
url(r'/api/defconfig(?P<sl>/)?(?P<id>.*)', DefConfHandler, name='defconf'),
url(r'/api/job(?P<sl>/)?(?P<id>.*)', JobHandler, name='job'),
- url(r'/api/subscription', SubscriptionHandler, name='subscription')
+ url(
+ r'/api/subscription(?P<sl>/)?(?P<id>.*)',
+ SubscriptionHandler,
+ name='subscription'
+ )
]
|
a54135deeb90ea8248a26c235843c9b8f16e1372
|
project/urls.py
|
project/urls.py
|
# Django
# Third-Party
from rest_framework.documentation import include_docs_urls
from rest_framework.schemas import get_schema_view
from django.conf import settings
from django.conf.urls import (
include,
url,
)
from django.contrib import admin
from django.http import (
HttpResponse,
HttpResponseRedirect,
)
schema_view = get_schema_view(
title='Barberscore API',
)
urlpatterns = [
url(r'^$', lambda r: HttpResponseRedirect('admin/')),
url(r'^admin/', admin.site.urls),
url(r'^api/', include('api.urls')),
# url(r'^bhs/', include('bhs.urls')),
url(r'^rq/', include('django_rq.urls')),
url(r'^api-auth/', include('rest_framework.urls')),
url(r'^schema/', schema_view),
url(r'^docs/', include_docs_urls(title='Documentation', description='API Documentation')),
url(r'^robots.txt$', lambda r: HttpResponse("User-agent: *\nDisallow: /", content_type="text/plain")),
]
if settings.DEBUG:
import debug_toolbar
urlpatterns += [
url(r'^__debug__/', include(debug_toolbar.urls)),
]
|
# Django
# Third-Party
from rest_framework.documentation import include_docs_urls
from rest_framework.schemas import get_schema_view
from django.conf import settings
from django.urls import (
include,
path,
)
from django.contrib import admin
from django.http import (
HttpResponse,
HttpResponseRedirect,
)
schema_view = get_schema_view(
title='Barberscore API',
)
urlpatterns = [
path('', lambda r: HttpResponseRedirect('admin/')),
path('admin/', admin.site.urls),
path('api/', include('api.urls')),
path('rq/', include('django_rq.urls')),
path('api-auth/', include('rest_framework.urls')),
path('schema/', schema_view),
path('docs/', include_docs_urls(title='Documentation', description='API Documentation')),
path('robots.txt', lambda r: HttpResponse("User-agent: *\nDisallow: /", content_type="text/plain")),
]
if settings.DEBUG:
import debug_toolbar
urlpatterns += [
path('__debug__/', include(debug_toolbar.urls)),
]
|
Update to new URL dispatcher syntax
|
Update to new URL dispatcher syntax
|
Python
|
bsd-2-clause
|
dbinetti/barberscore-django,dbinetti/barberscore,barberscore/barberscore-api,barberscore/barberscore-api,barberscore/barberscore-api,dbinetti/barberscore-django,dbinetti/barberscore,barberscore/barberscore-api
|
---
+++
@@ -4,9 +4,9 @@
from rest_framework.schemas import get_schema_view
from django.conf import settings
-from django.conf.urls import (
+from django.urls import (
include,
- url,
+ path,
)
from django.contrib import admin
from django.http import (
@@ -19,19 +19,18 @@
)
urlpatterns = [
- url(r'^$', lambda r: HttpResponseRedirect('admin/')),
- url(r'^admin/', admin.site.urls),
- url(r'^api/', include('api.urls')),
- # url(r'^bhs/', include('bhs.urls')),
- url(r'^rq/', include('django_rq.urls')),
- url(r'^api-auth/', include('rest_framework.urls')),
- url(r'^schema/', schema_view),
- url(r'^docs/', include_docs_urls(title='Documentation', description='API Documentation')),
- url(r'^robots.txt$', lambda r: HttpResponse("User-agent: *\nDisallow: /", content_type="text/plain")),
+ path('', lambda r: HttpResponseRedirect('admin/')),
+ path('admin/', admin.site.urls),
+ path('api/', include('api.urls')),
+ path('rq/', include('django_rq.urls')),
+ path('api-auth/', include('rest_framework.urls')),
+ path('schema/', schema_view),
+ path('docs/', include_docs_urls(title='Documentation', description='API Documentation')),
+ path('robots.txt', lambda r: HttpResponse("User-agent: *\nDisallow: /", content_type="text/plain")),
]
if settings.DEBUG:
import debug_toolbar
urlpatterns += [
- url(r'^__debug__/', include(debug_toolbar.urls)),
+ path('__debug__/', include(debug_toolbar.urls)),
]
|
ebfc7061f7515af3ec8b2f61bdbcf4a26c212095
|
src/clarifai_client.py
|
src/clarifai_client.py
|
from clarifai.rest import ClarifaiApp
class Clarifai():
def __init__(self, api_id, api_secret):
self.api_id = api_id
self.api_secret = api_secret
self.app = ClarifaiApp(self.api_id, self.api_secret)
def test(self):
res = self.app.tag_urls(['https://samples.clarifai.com/metro-north.jpg'])
print(res)
|
from clarifai.rest import ClarifaiApp
class Clarifai():
def __init__(self, api_id, api_secret):
self.api_id = api_id
self.api_secret = api_secret
self.app = ClarifaiApp(self.api_id, self.api_secret)
def test(self):
res = self.app.tag_urls(['https://samples.clarifai.com/metro-north.jpg'])
if res['status']['description'] == 'Ok':
print("COOL!")
concepts = res['outputs'][0]['data']['concepts']
for concept in concepts:
print(concept['name'], concept['value'])
|
Test on clarifai is working and returns the tags
|
Test on clarifai is working and returns the tags
|
Python
|
apache-2.0
|
rlokc/PyCVTagger
|
---
+++
@@ -10,4 +10,8 @@
def test(self):
res = self.app.tag_urls(['https://samples.clarifai.com/metro-north.jpg'])
- print(res)
+ if res['status']['description'] == 'Ok':
+ print("COOL!")
+ concepts = res['outputs'][0]['data']['concepts']
+ for concept in concepts:
+ print(concept['name'], concept['value'])
|
acb8d6e8cff9878a14b157d47072de415d12757b
|
src/endpoints/teams.py
|
src/endpoints/teams.py
|
from src.endpoints.base import Base
class Teams(Base):
endpoint = '/teams'
def create_team(self, options=None):
return self.client.post(
self.endpoint,
options
)
def get_teams(self, query=None, options=None):
query_string = self.build_query(query)
return self.client.get(
self.endpoint + query_string,
options
)
def get_team(self, team_id, options=None):
return self.client.get(
self.endpoint + '/' + team_id,
options
)
def update_team(self, team_id, options=None):
return self.client.put(
self.endpoint + '/' + team_id,
options
)
def delete_team(self, team_id, query=None, options=None):
query_string = self.build_query(query)
return self.client.delete(
self.endpoint + '/' + team_id + query_string,
options
)
def patch_team(self, team_id, options=None):
return self.client.put(
self.endpoint + '/' + team_id + '/patch',
options
)
def get_team_by_name(self, name, options=None):
return self.client.get(
self.endpoint + '/name/' + name,
options
)
def search_teams(self, options=None):
return self.client.post(
self.endpoint + '/search',
options
)
def check_team_exists(self, name, options=None):
return self.client.get(
self.endpoint + '/name/' + name + '/exists',
options
)
|
Add first batch of team endpoints
|
Add first batch of team endpoints
|
Python
|
mit
|
Vaelor/python-mattermost-driver
|
---
+++
@@ -0,0 +1,62 @@
+from src.endpoints.base import Base
+
+
+class Teams(Base):
+ endpoint = '/teams'
+
+ def create_team(self, options=None):
+ return self.client.post(
+ self.endpoint,
+ options
+ )
+
+ def get_teams(self, query=None, options=None):
+ query_string = self.build_query(query)
+
+ return self.client.get(
+ self.endpoint + query_string,
+ options
+ )
+
+ def get_team(self, team_id, options=None):
+ return self.client.get(
+ self.endpoint + '/' + team_id,
+ options
+ )
+
+ def update_team(self, team_id, options=None):
+ return self.client.put(
+ self.endpoint + '/' + team_id,
+ options
+ )
+
+ def delete_team(self, team_id, query=None, options=None):
+ query_string = self.build_query(query)
+ return self.client.delete(
+ self.endpoint + '/' + team_id + query_string,
+ options
+ )
+
+ def patch_team(self, team_id, options=None):
+ return self.client.put(
+ self.endpoint + '/' + team_id + '/patch',
+ options
+ )
+
+ def get_team_by_name(self, name, options=None):
+ return self.client.get(
+ self.endpoint + '/name/' + name,
+ options
+ )
+
+ def search_teams(self, options=None):
+ return self.client.post(
+ self.endpoint + '/search',
+ options
+ )
+
+ def check_team_exists(self, name, options=None):
+ return self.client.get(
+ self.endpoint + '/name/' + name + '/exists',
+ options
+ )
|
|
51222dd65133159c1fe65a51e8f2ce237d40edef
|
geotrek/outdoor/forms.py
|
geotrek/outdoor/forms.py
|
from crispy_forms.layout import Div
from geotrek.common.forms import CommonForm
from geotrek.outdoor.models import Site
class SiteForm(CommonForm):
geomfields = ['geom']
fieldslayout = [
Div(
'structure',
'name',
'description',
'eid',
)
]
class Meta:
fields = ['structure', 'name', 'description', 'geom', 'eid']
model = Site
|
from crispy_forms.layout import Div
from django import forms
from geotrek.common.forms import CommonForm
from geotrek.outdoor.models import Site, Practice, SitePractice
class SiteForm(CommonForm):
practices = forms.ModelMultipleChoiceField(
queryset=Practice.objects.all(),
widget=forms.CheckboxSelectMultiple(),
required=False
)
geomfields = ['geom']
fieldslayout = [
Div(
'structure',
'name',
'description',
'practices',
'eid',
)
]
class Meta:
fields = ['structure', 'name', 'description', 'geom', 'practices', 'eid']
model = Site
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.fields['practices'].initial = self.instance.site_practices.values_list('practice', flat=True)
def save(self, commit=True):
site = super().save(commit=commit)
if commit:
for practice in Practice.objects.all():
if practice in self.cleaned_data['practices']:
SitePractice.objects.get_or_create(site=site, practice=practice)
else:
SitePractice.objects.filter(site=site, practice=practice).delete()
site.save()
return site
|
Add multiple choice of practices to Site form
|
Add multiple choice of practices to Site form
|
Python
|
bsd-2-clause
|
GeotrekCE/Geotrek-admin,makinacorpus/Geotrek,GeotrekCE/Geotrek-admin,GeotrekCE/Geotrek-admin,makinacorpus/Geotrek,makinacorpus/Geotrek,GeotrekCE/Geotrek-admin,makinacorpus/Geotrek
|
---
+++
@@ -1,9 +1,16 @@
from crispy_forms.layout import Div
+from django import forms
from geotrek.common.forms import CommonForm
-from geotrek.outdoor.models import Site
+from geotrek.outdoor.models import Site, Practice, SitePractice
class SiteForm(CommonForm):
+ practices = forms.ModelMultipleChoiceField(
+ queryset=Practice.objects.all(),
+ widget=forms.CheckboxSelectMultiple(),
+ required=False
+ )
+
geomfields = ['geom']
fieldslayout = [
@@ -11,10 +18,26 @@
'structure',
'name',
'description',
+ 'practices',
'eid',
)
]
class Meta:
- fields = ['structure', 'name', 'description', 'geom', 'eid']
+ fields = ['structure', 'name', 'description', 'geom', 'practices', 'eid']
model = Site
+
+ def __init__(self, *args, **kwargs):
+ super().__init__(*args, **kwargs)
+ self.fields['practices'].initial = self.instance.site_practices.values_list('practice', flat=True)
+
+ def save(self, commit=True):
+ site = super().save(commit=commit)
+ if commit:
+ for practice in Practice.objects.all():
+ if practice in self.cleaned_data['practices']:
+ SitePractice.objects.get_or_create(site=site, practice=practice)
+ else:
+ SitePractice.objects.filter(site=site, practice=practice).delete()
+ site.save()
+ return site
|
424d8ed4b24afc8fc0c2e8e7fd9212091a5a0fcc
|
reddit_adzerk/adzerkads.py
|
reddit_adzerk/adzerkads.py
|
from urllib import quote
from pylons import c, g
from r2.lib.pages import Ads as BaseAds
class Ads(BaseAds):
def __init__(self):
BaseAds.__init__(self)
adzerk_test_srs = g.live_config.get("adzerk_test_srs")
if adzerk_test_srs and c.site.name in adzerk_test_srs:
url_key = "adzerk_https_url" if c.secure else "adzerk_url"
self.ad_url = g.config[url_key].format(
subreddit=quote(c.site.name),
origin=c.request_origin,
)
self.frame_id = "ad_main"
|
from urllib import quote
from pylons import c, g
from r2.lib.pages import Ads as BaseAds
class Ads(BaseAds):
def __init__(self):
BaseAds.__init__(self)
adzerk_test_srs = g.live_config.get("adzerk_test_srs")
if adzerk_test_srs and c.site.name.lower() in adzerk_test_srs:
url_key = "adzerk_https_url" if c.secure else "adzerk_url"
self.ad_url = g.config[url_key].format(
subreddit=quote(c.site.name),
origin=c.request_origin,
)
self.frame_id = "ad_main"
|
Check for test subreddits case insensitively.
|
Check for test subreddits case insensitively.
|
Python
|
bsd-3-clause
|
madbook/reddit-plugin-adzerk,madbook/reddit-plugin-adzerk,madbook/reddit-plugin-adzerk
|
---
+++
@@ -9,7 +9,7 @@
def __init__(self):
BaseAds.__init__(self)
adzerk_test_srs = g.live_config.get("adzerk_test_srs")
- if adzerk_test_srs and c.site.name in adzerk_test_srs:
+ if adzerk_test_srs and c.site.name.lower() in adzerk_test_srs:
url_key = "adzerk_https_url" if c.secure else "adzerk_url"
self.ad_url = g.config[url_key].format(
subreddit=quote(c.site.name),
|
e93789084c03b2a566835006d6d5adaee3d4bbe6
|
silk/globals.py
|
silk/globals.py
|
#!/usr/bin/env python
__all__ = []
try:
from silk.webdoc import css, html, node
__all__.extend(('css', 'html', 'node'))
except ImportError:
pass
try:
from silk.webdb import (
AuthenticationError, BoolColumn, Column, DB, DataColumn,
DateTimeColumn, FloatColumn, IntColumn, RecordError, ReferenceColumn,
RowidColumn, SQLSyntaxError, StrColumn, Table, UnknownDriver, connect
)
__all__.extend((
'AuthenticationError', 'BoolColumn', 'Column', 'DB', 'DataColumn',
'DateTimeColumn', 'FloatColumn', 'IntColumn', 'RecordError',
'ReferenceColumn', 'RowidColumn', 'SQLSyntaxError', 'StrColumn',
'Table', 'UnknownDriver', 'connect'
))
except ImportError:
pass
try:
from silk.webreq import (
B64Document, BaseRouter, Document, FormData, HTTP, Header, HeaderList,
PathRouter, Query, Redirect, Response, TextView, URI
)
__all__.extend((
'B64Document', 'BaseRouter', 'Document', 'FormData', 'HTTP', 'Header',
'HeaderList', 'PathRouter', 'Query', 'Redirect', 'Response',
'TextView', 'URI'
))
except ImportError:
pass
|
#!/usr/bin/env python
__all__ = []
try:
from silk.webdoc import css, html, node
__all__ += ['css', 'html', 'node']
except ImportError:
pass
try:
from silk.webdb import (
AuthenticationError, BoolColumn, Column, DB, DataColumn,
DateTimeColumn, FloatColumn, IntColumn, RecordError, ReferenceColumn,
RowidColumn, SQLSyntaxError, StrColumn, Table, UnknownDriver, connect
)
__all__ += [
'AuthenticationError', 'BoolColumn', 'Column', 'DB', 'DataColumn',
'DateTimeColumn', 'FloatColumn', 'IntColumn', 'RecordError',
'ReferenceColumn', 'RowidColumn', 'SQLSyntaxError', 'StrColumn',
'Table', 'UnknownDriver', 'connect'
]
except ImportError:
pass
try:
from silk.webreq import (
B64Document, BaseRouter, Document, FormData, HTTP, Header, HeaderList,
PathRouter, Query, Redirect, Response, TextView, URI
)
__all__ += [
'B64Document', 'BaseRouter', 'Document', 'FormData', 'HTTP', 'Header',
'HeaderList', 'PathRouter', 'Query', 'Redirect', 'Response',
'TextView', 'URI'
]
except ImportError:
pass
|
Use += to modify __all__, to appease flake8
|
Use += to modify __all__, to appease flake8
|
Python
|
bsd-3-clause
|
orbnauticus/silk
|
---
+++
@@ -4,7 +4,7 @@
try:
from silk.webdoc import css, html, node
- __all__.extend(('css', 'html', 'node'))
+ __all__ += ['css', 'html', 'node']
except ImportError:
pass
@@ -14,12 +14,12 @@
DateTimeColumn, FloatColumn, IntColumn, RecordError, ReferenceColumn,
RowidColumn, SQLSyntaxError, StrColumn, Table, UnknownDriver, connect
)
- __all__.extend((
+ __all__ += [
'AuthenticationError', 'BoolColumn', 'Column', 'DB', 'DataColumn',
'DateTimeColumn', 'FloatColumn', 'IntColumn', 'RecordError',
'ReferenceColumn', 'RowidColumn', 'SQLSyntaxError', 'StrColumn',
'Table', 'UnknownDriver', 'connect'
- ))
+ ]
except ImportError:
pass
@@ -29,10 +29,10 @@
PathRouter, Query, Redirect, Response, TextView, URI
)
- __all__.extend((
+ __all__ += [
'B64Document', 'BaseRouter', 'Document', 'FormData', 'HTTP', 'Header',
'HeaderList', 'PathRouter', 'Query', 'Redirect', 'Response',
'TextView', 'URI'
- ))
+ ]
except ImportError:
pass
|
0af35018fbdf2460d8890a7d7b4ad8246a3d121d
|
IPython/testing/__init__.py
|
IPython/testing/__init__.py
|
"""Testing support (tools to test IPython itself).
"""
#-----------------------------------------------------------------------------
# Copyright (C) 2009-2011 The IPython Development Team
#
# Distributed under the terms of the BSD License. The full license is in
# the file COPYING, distributed as part of this software.
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Functions
#-----------------------------------------------------------------------------
# User-level entry point for testing
def test(all=False):
"""Run the entire IPython test suite.
For fine-grained control, you should use the :file:`iptest` script supplied
with the IPython installation."""
# Do the import internally, so that this function doesn't increase total
# import time
from .iptestcontroller import run_iptestall, default_options
options = default_options()
options.all = all
run_iptestall(options)
# So nose doesn't try to run this as a test itself and we end up with an
# infinite test loop
test.__test__ = False
|
"""Testing support (tools to test IPython itself).
"""
#-----------------------------------------------------------------------------
# Copyright (C) 2009-2011 The IPython Development Team
#
# Distributed under the terms of the BSD License. The full license is in
# the file COPYING, distributed as part of this software.
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Functions
#-----------------------------------------------------------------------------
# User-level entry point for testing
def test(**kwargs):
"""Run the entire IPython test suite.
Any of the options for run_iptestall() may be passed as keyword arguments.
"""
# Do the import internally, so that this function doesn't increase total
# import time
from .iptestcontroller import run_iptestall, default_options
options = default_options()
for name, val in kwargs.items():
setattr(options, name, val)
run_iptestall(options)
# So nose doesn't try to run this as a test itself and we end up with an
# infinite test loop
test.__test__ = False
|
Allow any options to be passed through test function
|
Allow any options to be passed through test function
|
Python
|
bsd-3-clause
|
ipython/ipython,ipython/ipython
|
---
+++
@@ -13,17 +13,18 @@
#-----------------------------------------------------------------------------
# User-level entry point for testing
-def test(all=False):
+def test(**kwargs):
"""Run the entire IPython test suite.
- For fine-grained control, you should use the :file:`iptest` script supplied
- with the IPython installation."""
+ Any of the options for run_iptestall() may be passed as keyword arguments.
+ """
# Do the import internally, so that this function doesn't increase total
# import time
from .iptestcontroller import run_iptestall, default_options
options = default_options()
- options.all = all
+ for name, val in kwargs.items():
+ setattr(options, name, val)
run_iptestall(options)
# So nose doesn't try to run this as a test itself and we end up with an
|
5ccadaae69f8011f16f7df7ae5711277931a94a8
|
tests/testall.py
|
tests/testall.py
|
#!/usr/bin/env python
import unittest, os, sys
try:
import coverage
coverage.erase()
coverage.start()
except ImportError:
coverage = None
my_dir = os.path.dirname(sys.argv[0])
if not my_dir:
my_dir = os.getcwd()
sys.argv.append('-v')
suite_names = [f[:-3] for f in os.listdir(my_dir)
if f.startswith('test') and f.endswith('.py')]
suite_names.remove('testall')
suite_names.sort()
alltests = unittest.TestSuite()
for name in suite_names:
m = __import__(name, globals(), locals(), [])
alltests.addTest(m.suite)
a = unittest.TextTestRunner(verbosity=2).run(alltests)
if coverage:
coverage.stop()
else:
print "Coverage module not found. Skipping coverage report."
print "\nResult", a
if not a.wasSuccessful():
sys.exit(1)
if coverage:
all_sources = []
def incl(d):
for x in os.listdir(d):
if x.endswith('.py'):
all_sources.append(os.path.join(d, x))
incl('..')
coverage.report(all_sources + ['../0publish'])
|
#!/usr/bin/env python
import unittest, os, sys
for x in ['LANGUAGE', 'LANG']:
if x in os.environ:
del os.environ[x]
try:
import coverage
coverage.erase()
coverage.start()
except ImportError:
coverage = None
my_dir = os.path.dirname(sys.argv[0])
if not my_dir:
my_dir = os.getcwd()
sys.argv.append('-v')
suite_names = [f[:-3] for f in os.listdir(my_dir)
if f.startswith('test') and f.endswith('.py')]
suite_names.remove('testall')
suite_names.sort()
alltests = unittest.TestSuite()
for name in suite_names:
m = __import__(name, globals(), locals(), [])
alltests.addTest(m.suite)
a = unittest.TextTestRunner(verbosity=2).run(alltests)
if coverage:
coverage.stop()
else:
print "Coverage module not found. Skipping coverage report."
print "\nResult", a
if not a.wasSuccessful():
sys.exit(1)
if coverage:
all_sources = []
def incl(d):
for x in os.listdir(d):
if x.endswith('.py'):
all_sources.append(os.path.join(d, x))
incl('..')
coverage.report(all_sources + ['../0publish'])
|
Clear $LANGUAGE before running tests
|
Clear $LANGUAGE before running tests
|
Python
|
lgpl-2.1
|
timdiels/0publish
|
---
+++
@@ -1,5 +1,8 @@
#!/usr/bin/env python
import unittest, os, sys
+for x in ['LANGUAGE', 'LANG']:
+ if x in os.environ:
+ del os.environ[x]
try:
import coverage
coverage.erase()
|
76f6497389d2e6588d91fbd7c24d2f368592140b
|
tests/utils.py
|
tests/utils.py
|
import bottle
import threading
import time as _time
def start_bottle_server(app, port, **kwargs):
server_thread = ServerThread(app, port, kwargs)
server_thread.daemon = True
server_thread.start()
_time.sleep(0.1)
class ServerThread(threading.Thread):
def __init__(self, app, port, server_kwargs):
threading.Thread.__init__(self)
self.app = app
self.port = port
self.server_kwargs = server_kwargs
def run(self):
bottle.run(self.app, host='localhost', port=self.port, **self.server_kwargs)
# http://code.activestate.com/recipes/106033-deep-list-to-convert-a-nested-tuple-of-tuples/
def listit(t):
return list(map(listit, t)) if isinstance(t, (list, tuple)) else t
|
import bottle
import threading
import socket
import time as _time
def start_bottle_server(app, port, **kwargs):
server_thread = ServerThread(app, port, kwargs)
server_thread.daemon = True
server_thread.start()
ok = False
for i in range(10):
try:
conn = socket.create_connection(('127.0.0.1', port), 0.1)
ok = True
break
except socket.error as e:
_time.sleep(0.1)
if not ok:
import warnings
warnings.warn('Server did not start after 1 second')
class ServerThread(threading.Thread):
def __init__(self, app, port, server_kwargs):
threading.Thread.__init__(self)
self.app = app
self.port = port
self.server_kwargs = server_kwargs
def run(self):
bottle.run(self.app, host='localhost', port=self.port, **self.server_kwargs)
# http://code.activestate.com/recipes/106033-deep-list-to-convert-a-nested-tuple-of-tuples/
def listit(t):
return list(map(listit, t)) if isinstance(t, (list, tuple)) else t
|
Test server existence via a socket connection
|
Test server existence via a socket connection
|
Python
|
bsd-2-clause
|
p/webracer
|
---
+++
@@ -1,12 +1,24 @@
import bottle
import threading
+import socket
import time as _time
def start_bottle_server(app, port, **kwargs):
server_thread = ServerThread(app, port, kwargs)
server_thread.daemon = True
server_thread.start()
- _time.sleep(0.1)
+
+ ok = False
+ for i in range(10):
+ try:
+ conn = socket.create_connection(('127.0.0.1', port), 0.1)
+ ok = True
+ break
+ except socket.error as e:
+ _time.sleep(0.1)
+ if not ok:
+ import warnings
+ warnings.warn('Server did not start after 1 second')
class ServerThread(threading.Thread):
def __init__(self, app, port, server_kwargs):
|
dc786699618e6ebc1206080d9c0fdb697d519668
|
pydy/viz/server.py
|
pydy/viz/server.py
|
#!/usr/bin/env python
import os
import webbrowser
import BaseHTTPServer
from SimpleHTTPServer import SimpleHTTPRequestHandler
__all__ = ['run_server']
def run_server(port=8000,scene_file="Null"):
#change dir to static first.
os.chdir("static/")
HandlerClass = SimpleHTTPRequestHandler
ServerClass = BaseHTTPServer.HTTPServer
Protocol = "HTTP/1.0"
server_address = ('127.0.0.1', port)
HandlerClass.protocol_version = Protocol
httpd = ServerClass(server_address, HandlerClass)
sa = httpd.socket.getsockname()
print("Serving HTTP on", sa[0], "port", sa[1], "...")
print("hit ctrl+c to stop the server..")
print("To view visualization, open:\n")
url = "http://localhost:"+ str(sa[1]) + "/index.html?load=" + scene_file
print(url)
webbrowser.open(url)
httpd.serve_forever()
if __name__ == "__main__":
run_server()
|
#!/usr/bin/env python
import os
import sys
import webbrowser
if sys.version_info < (3, 0):
from SimpleHTTPServer import SimpleHTTPRequestHandler
from BaseHTTPServer import HTTPServer
else:
from http.server import SimpleHTTPRequestHandler
from http.server import HTTPServer
__all__ = ['run_server']
def run_server(port=8000,scene_file="Null"):
#change dir to static first.
os.chdir("static/")
HandlerClass = SimpleHTTPRequestHandler
ServerClass = HTTPServer
Protocol = "HTTP/1.0"
server_address = ('127.0.0.1', port)
HandlerClass.protocol_version = Protocol
httpd = ServerClass(server_address, HandlerClass)
sa = httpd.socket.getsockname()
print("Serving HTTP on", sa[0], "port", sa[1], "...")
print("hit ctrl+c to stop the server..")
print("To view visualization, open:\n")
url = "http://localhost:"+ str(sa[1]) + "/index.html?load=" + scene_file
print(url)
webbrowser.open(url)
httpd.serve_forever()
if __name__ == "__main__":
run_server()
|
Fix HTTPServer imports with Python 3
|
Fix HTTPServer imports with Python 3
|
Python
|
bsd-3-clause
|
Shekharrajak/pydy,Shekharrajak/pydy,skidzo/pydy,skidzo/pydy,oliverlee/pydy,Shekharrajak/pydy,oliverlee/pydy,skidzo/pydy,skidzo/pydy,Shekharrajak/pydy,oliverlee/pydy
|
---
+++
@@ -1,9 +1,15 @@
#!/usr/bin/env python
import os
+import sys
import webbrowser
-import BaseHTTPServer
-from SimpleHTTPServer import SimpleHTTPRequestHandler
+if sys.version_info < (3, 0):
+ from SimpleHTTPServer import SimpleHTTPRequestHandler
+ from BaseHTTPServer import HTTPServer
+else:
+ from http.server import SimpleHTTPRequestHandler
+ from http.server import HTTPServer
+
__all__ = ['run_server']
@@ -12,7 +18,7 @@
#change dir to static first.
os.chdir("static/")
HandlerClass = SimpleHTTPRequestHandler
- ServerClass = BaseHTTPServer.HTTPServer
+ ServerClass = HTTPServer
Protocol = "HTTP/1.0"
server_address = ('127.0.0.1', port)
HandlerClass.protocol_version = Protocol
|
ac403cc68eac59c918ce83d48dc1beb98ddd3484
|
pystil/__init__.py
|
pystil/__init__.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (C) 2011 by Florian Mounier, Kozea
# This file is part of pystil, licensed under a 3-clause BSD license.
"""
pystil - An elegant site web traffic analyzer
"""
from flask import Flask
from logging import getLogger, INFO
from pystil.log import get_default_handler
from pystil.routes import register_common_routes
from pystil.routes.data import register_data_routes
def app():
"""Create Flask app"""
app = Flask(__name__)
handler = get_default_handler()
getLogger('werkzeug').addHandler(handler)
getLogger('werkzeug').setLevel(INFO)
app.logger.handlers = []
app.logger.addHandler(handler)
register_common_routes(app)
register_data_routes(app)
return app
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (C) 2011 by Florian Mounier, Kozea
# This file is part of pystil, licensed under a 3-clause BSD license.
"""
pystil - An elegant site web traffic analyzer
"""
from flask import Flask
from logging import getLogger, INFO
from pystil.log import get_default_handler
from pystil.routes import register_common_routes
from pystil.routes.data import register_data_routes
import pystil
import os
def app():
"""Create Flask app"""
root = os.path.dirname(pystil.__file__)
static_folder = os.path.join(root, 'static')
template_folder = os.path.join(root, 'templates')
app = Flask(__name__,
static_folder=static_folder, template_folder=template_folder)
handler = get_default_handler()
getLogger('werkzeug').addHandler(handler)
getLogger('werkzeug').setLevel(INFO)
app.logger.handlers = []
app.logger.addHandler(handler)
register_common_routes(app)
register_data_routes(app)
return app
|
Use __file__ for static and templates
|
Use __file__ for static and templates
|
Python
|
bsd-3-clause
|
Kozea/pystil,Kozea/pystil,Kozea/pystil,Kozea/pystil,Kozea/pystil
|
---
+++
@@ -10,11 +10,17 @@
from pystil.log import get_default_handler
from pystil.routes import register_common_routes
from pystil.routes.data import register_data_routes
+import pystil
+import os
def app():
"""Create Flask app"""
- app = Flask(__name__)
+ root = os.path.dirname(pystil.__file__)
+ static_folder = os.path.join(root, 'static')
+ template_folder = os.path.join(root, 'templates')
+ app = Flask(__name__,
+ static_folder=static_folder, template_folder=template_folder)
handler = get_default_handler()
getLogger('werkzeug').addHandler(handler)
getLogger('werkzeug').setLevel(INFO)
|
a973f489d265c0f4f5baa3357eaf5a772a825821
|
tweetstream.py
|
tweetstream.py
|
# -*- coding: utf-8 -*-
import os
from tweepy import Stream
from tweepy import OAuthHandler
from tweepy import API
from tweepy.streaming import StreamListener
from listener import Listener
ckey = os.environ['CKEY']
consumer_secret = os.environ['CONSUMER_KEY']
access_token_key = os.environ['ACCESS_TOKEN_KEY']
access_token_secret = os.environ['ACCESS_TOKEN_SECRET']
keywords = [
u"كيماوي",
u"غاز سام",
u"كلور",
u"اختناق",
u"سام",
u"غازات سامة",
u"الكلور",
u"الكيماوي",
u"الاختناق",
u"الغازات السامة",
u"السام"
]
def call():
auth = OAuthHandler(ckey, consumer_secret)
auth.set_access_token(access_token_key, access_token_secret)
print "Connecting to Twitter Streaming API..."
api = API(auth)
print "Done."
# initialize Stream object
twitterStream = Stream(auth, Listener(api))
# call filter on Stream object
twitterStream.filter(track=keywords, languages=["ar"])
|
# -*- coding: utf-8 -*-
import os
from tweepy import Stream
from tweepy import OAuthHandler
from tweepy import API
from tweepy.streaming import StreamListener
from listener import Listener
ckey = os.environ['CKEY']
consumer_secret = os.environ['CONSUMER_SECRET']
access_token_key = os.environ['ACCESS_TOKEN_KEY']
access_token_secret = os.environ['ACCESS_TOKEN_SECRET']
keywords = [
u"كيماوي",
u"غاز سام",
u"كلور",
u"اختناق",
u"سام",
u"غازات سامة",
u"الكلور",
u"الكيماوي",
u"الاختناق",
u"الغازات السامة",
u"السام"
]
def call():
auth = OAuthHandler(ckey, consumer_secret)
auth.set_access_token(access_token_key, access_token_secret)
print "Connecting to Twitter Streaming API..."
api = API(auth)
print "Done."
# initialize Stream object
twitterStream = Stream(auth, Listener(api))
# call filter on Stream object
twitterStream.filter(track=keywords, languages=["ar"])
|
Use the correct env variable name
|
Use the correct env variable name
|
Python
|
mit
|
robot-overlord/syriarightnow
|
---
+++
@@ -7,7 +7,7 @@
from listener import Listener
ckey = os.environ['CKEY']
-consumer_secret = os.environ['CONSUMER_KEY']
+consumer_secret = os.environ['CONSUMER_SECRET']
access_token_key = os.environ['ACCESS_TOKEN_KEY']
access_token_secret = os.environ['ACCESS_TOKEN_SECRET']
|
a4efaef6474592229156d7c434679f98d0b5cbea
|
tests/test_R_transformer.py
|
tests/test_R_transformer.py
|
import pytest
from spec2scl.transformers.R import RTransformer
from transformer_test_case import TransformerTestCase
class TestRTransformer(TransformerTestCase):
def setup_method(self, method):
self.t = RTransformer('', {})
@pytest.mark.parametrize(('spec'), [
('"%{bindir}/R foo" stays'),
])
def test_ruby_specific_commands_not_matching(self, spec):
patterns = self.t.handle_R_specific_commands.matches
assert self.get_pattern_for_spec(patterns, spec) == None
@pytest.mark.parametrize(('spec', 'expected'), [
('R CMD foo bar', '%{?scl:scl enable %{scl} "}\nR CMD foo bar%{?scl:"}\n'),
('%{bindir}/R CMD foo bar\n', '%{?scl:scl enable %{scl} "}\n%{bindir}/R CMD foo bar\n%{?scl:"}\n'),
])
def test_ruby_specific_commands_matching(self, spec, expected):
patterns = self.t.handle_R_specific_commands.matches
assert self.t.handle_R_specific_commands(self.get_pattern_for_spec(patterns, spec), spec) == expected
|
import pytest
from spec2scl.transformers.R import RTransformer
from transformer_test_case import TransformerTestCase
class TestRTransformer(TransformerTestCase):
def setup_method(self, method):
self.t = RTransformer('', {})
@pytest.mark.parametrize(('spec'), [
('"%{bindir}/R foo" stays'),
])
def test_R_specific_commands_not_matching(self, spec):
patterns = self.t.handle_R_specific_commands.matches
assert self.get_pattern_for_spec(patterns, spec) == None
@pytest.mark.parametrize(('spec', 'expected'), [
('R CMD foo bar', '%{?scl:scl enable %{scl} "}\nR CMD foo bar%{?scl:"}\n'),
('%{bindir}/R CMD foo bar\n', '%{?scl:scl enable %{scl} "}\n%{bindir}/R CMD foo bar\n%{?scl:"}\n'),
])
def test_R_specific_commands_matching(self, spec, expected):
patterns = self.t.handle_R_specific_commands.matches
assert self.t.handle_R_specific_commands(self.get_pattern_for_spec(patterns, spec), spec) == expected
|
Fix the test method names :)
|
Fix the test method names :)
|
Python
|
mit
|
mbooth101/spec2scl,sclorg/spec2scl
|
---
+++
@@ -11,7 +11,7 @@
@pytest.mark.parametrize(('spec'), [
('"%{bindir}/R foo" stays'),
])
- def test_ruby_specific_commands_not_matching(self, spec):
+ def test_R_specific_commands_not_matching(self, spec):
patterns = self.t.handle_R_specific_commands.matches
assert self.get_pattern_for_spec(patterns, spec) == None
@@ -19,6 +19,6 @@
('R CMD foo bar', '%{?scl:scl enable %{scl} "}\nR CMD foo bar%{?scl:"}\n'),
('%{bindir}/R CMD foo bar\n', '%{?scl:scl enable %{scl} "}\n%{bindir}/R CMD foo bar\n%{?scl:"}\n'),
])
- def test_ruby_specific_commands_matching(self, spec, expected):
+ def test_R_specific_commands_matching(self, spec, expected):
patterns = self.t.handle_R_specific_commands.matches
assert self.t.handle_R_specific_commands(self.get_pattern_for_spec(patterns, spec), spec) == expected
|
a8bf783c99691d12acc9298a67b09a857bed755e
|
tests/test_oai_harvester.py
|
tests/test_oai_harvester.py
|
from __future__ import unicode_literals
from scrapi.base import OAIHarvester
from scrapi.linter import RawDocument
from .utils import TEST_OAI_DOC
class TestHarvester(OAIHarvester):
base_url = ''
long_name = 'Test'
short_name = 'test'
url = 'test'
property_list = ['type', 'source', 'publisher', 'format', 'date']
def harvest(self, days_back=1):
return [RawDocument({
'doc': str(TEST_OAI_DOC),
'source': 'TEST',
'filetype': 'XML',
'docID': "1"
}) for _ in xrange(days_back)]
class TestOAIHarvester(object):
def setup_method(self, method):
self.harvester = TestHarvester()
def test_normalize(self):
results = [
self.harvester.normalize(record) for record in self.harvester.harvest()
]
for res in results:
assert res['title'] == 'Test'
|
from __future__ import unicode_literals
import httpretty
from scrapi.base import OAIHarvester
from scrapi.linter import RawDocument
from .utils import TEST_OAI_DOC
class TestHarvester(OAIHarvester):
base_url = ''
long_name = 'Test'
short_name = 'test'
url = 'test'
property_list = ['type', 'source', 'publisher', 'format', 'date']
@httpretty.activate
def harvest(self, days_back=1):
start_date = '2015-03-14'
end_date = '2015-03-16'
request_url = 'http://validAI.edu/?from={}&to={}'.format(start_date, end_date)
httpretty.register_uri(httpretty.GET, request_url,
body=TEST_OAI_DOC,
content_type="application/XML")
records = self.get_records(request_url, start_date, end_date)
return [RawDocument({
'doc': str(TEST_OAI_DOC),
'source': 'TEST',
'filetype': 'XML',
'docID': "1"
}) for record in records]
class TestOAIHarvester(object):
def setup_method(self, method):
self.harvester = TestHarvester()
def test_normalize(self):
results = [
self.harvester.normalize(record) for record in self.harvester.harvest()
]
for res in results:
assert res['title'] == 'Test'
|
Add more robust test harvester harvest method with mocking
|
Add more robust test harvester harvest method with mocking
|
Python
|
apache-2.0
|
icereval/scrapi,mehanig/scrapi,fabianvf/scrapi,CenterForOpenScience/scrapi,erinspace/scrapi,felliott/scrapi,alexgarciac/scrapi,ostwald/scrapi,jeffreyliu3230/scrapi,erinspace/scrapi,mehanig/scrapi,felliott/scrapi,CenterForOpenScience/scrapi,fabianvf/scrapi
|
---
+++
@@ -1,4 +1,6 @@
from __future__ import unicode_literals
+
+import httpretty
from scrapi.base import OAIHarvester
from scrapi.linter import RawDocument
@@ -13,13 +15,26 @@
url = 'test'
property_list = ['type', 'source', 'publisher', 'format', 'date']
+ @httpretty.activate
def harvest(self, days_back=1):
+
+ start_date = '2015-03-14'
+ end_date = '2015-03-16'
+
+ request_url = 'http://validAI.edu/?from={}&to={}'.format(start_date, end_date)
+
+ httpretty.register_uri(httpretty.GET, request_url,
+ body=TEST_OAI_DOC,
+ content_type="application/XML")
+
+ records = self.get_records(request_url, start_date, end_date)
+
return [RawDocument({
'doc': str(TEST_OAI_DOC),
'source': 'TEST',
'filetype': 'XML',
'docID': "1"
- }) for _ in xrange(days_back)]
+ }) for record in records]
class TestOAIHarvester(object):
|
d446a634cf2d903b8f7a7964210017065ffb9b9a
|
tests/test_xorshift_rand.py
|
tests/test_xorshift_rand.py
|
# Copyright 2014 Anonymous7 from Reddit, Julian Andrews
#
# This software may be modified and distributed under the terms
# of the MIT license. See the LICENSE file for details.
from __future__ import absolute_import, division
import collections
import unittest
from eval7 import xorshift_rand
class XorshiftRandTestCase(unittest.TestCase):
SAMPLE_COUNT = 1000000
BINS = 1000
DELTA = 125
def check_uniform(self, counter):
expected_count = self.SAMPLE_COUNT / self.BINS
self.assertEqual(set(range(self.BINS)), set(counter.keys()))
for count in counter.values():
self.assertAlmostEqual(count, expected_count, delta=self.DELTA)
def test_random_is_uniform(self):
sample = (xorshift_rand.random() for i in range(self.SAMPLE_COUNT))
counter = collections.Counter(int(num * self.BINS) for num in sample)
self.check_uniform(counter)
def test_randint_is_uniform(self):
sample = (
xorshift_rand.randint(self.BINS) for i in range(self.SAMPLE_COUNT)
)
self.check_uniform(collections.Counter(sample))
|
# Copyright 2014 Anonymous7 from Reddit, Julian Andrews
#
# This software may be modified and distributed under the terms
# of the MIT license. See the LICENSE file for details.
from __future__ import absolute_import, division
import collections
import unittest
from eval7 import xorshift_rand
class XorshiftRandTestCase(unittest.TestCase):
SAMPLE_COUNT = 1000000
BINS = 1000
DELTA = 150 # This should give < 1% chance of failure per test
def check_uniform(self, counter):
expected_count = self.SAMPLE_COUNT / self.BINS
self.assertEqual(set(range(self.BINS)), set(counter.keys()))
for count in counter.values():
self.assertAlmostEqual(count, expected_count, delta=self.DELTA)
def test_random_is_uniform(self):
sample = (xorshift_rand.random() for i in range(self.SAMPLE_COUNT))
counter = collections.Counter(int(num * self.BINS) for num in sample)
self.check_uniform(counter)
def test_randint_is_uniform(self):
sample = (
xorshift_rand.randint(self.BINS) for i in range(self.SAMPLE_COUNT)
)
self.check_uniform(collections.Counter(sample))
|
Increase delta to reduce chance of random failure
|
Increase delta to reduce chance of random failure
Exact chance of failure is more trouble to calculate than I care to go
to, but the the old chance of failure was likely somewhere in the 1-10%
range. The purpose of the test is to detect meaningful non-uniformity,
so false positives should be kept rare.
|
Python
|
mit
|
JulianAndrews/pyeval7,JulianAndrews/pyeval7
|
---
+++
@@ -14,7 +14,7 @@
class XorshiftRandTestCase(unittest.TestCase):
SAMPLE_COUNT = 1000000
BINS = 1000
- DELTA = 125
+ DELTA = 150 # This should give < 1% chance of failure per test
def check_uniform(self, counter):
expected_count = self.SAMPLE_COUNT / self.BINS
|
b7433c0d18a01a9e1340123f7c0423d1fdec04a3
|
sphinxdoc/urls.py
|
sphinxdoc/urls.py
|
# encoding: utf-8
"""
URL conf for django-sphinxdoc.
"""
from django.conf.urls.defaults import patterns, url
from django.views.generic import list_detail
from sphinxdoc import models
from sphinxdoc.views import ProjectSearchView
project_info = {
'queryset': models.Project.objects.all().order_by('name'),
'template_object_name': 'project',
}
urlpatterns = patterns('sphinxdoc.views',
url(
r'^$',
list_detail.object_list,
project_info,
),
url(
r'^(?P<slug>[\w-]+)/search/$',
ProjectSearchView(),
name='doc-search',
),
# These URLs have to be without the / at the end so that relative links in
# static HTML files work correctly and that browsers know how to name files
# for download
url(
r'^(?P<slug>[\w-]+)/(?P<type_>_images|_static|_downloads|_source)/' + \
r'(?P<path>.+)$',
'sphinx_serve',
),
url(
r'^(?P<slug>[\w-]+)/_objects/$',
'objects_inventory',
name='objects-inv',
),
url(
r'^(?P<slug>[\w-]+)/$',
'documentation',
{'path': ''},
name='doc-index',
),
url(
r'^(?P<slug>[\w-]+)/(?P<path>.+)/$',
'documentation',
name='doc-detail',
),
)
|
# encoding: utf-8
"""
URL conf for django-sphinxdoc.
"""
from django.conf.urls.defaults import patterns, url
from django.views.generic.list import ListView
from sphinxdoc import models
from sphinxdoc.views import ProjectSearchView
urlpatterns = patterns('sphinxdoc.views',
url(
r'^$',
ListView.as_view(queryset=models.Project.objects.all().order_by('name'))
),
url(
r'^(?P<slug>[\w-]+)/search/$',
ProjectSearchView(),
name='doc-search',
),
# These URLs have to be without the / at the end so that relative links in
# static HTML files work correctly and that browsers know how to name files
# for download
url(
r'^(?P<slug>[\w-]+)/(?P<type_>_images|_static|_downloads|_source)/' + \
r'(?P<path>.+)$',
'sphinx_serve',
),
url(
r'^(?P<slug>[\w-]+)/_objects/$',
'objects_inventory',
name='objects-inv',
),
url(
r'^(?P<slug>[\w-]+)/$',
'documentation',
{'path': ''},
name='doc-index',
),
url(
r'^(?P<slug>[\w-]+)/(?P<path>.+)/$',
'documentation',
name='doc-detail',
),
)
|
Change function-based generic view to class-based.
|
Change function-based generic view to class-based.
As per their deprecation policy, Django 1.5 removed function-based
generic views.
|
Python
|
mit
|
kamni/django-sphinxdoc
|
---
+++
@@ -4,22 +4,16 @@
"""
from django.conf.urls.defaults import patterns, url
-from django.views.generic import list_detail
+from django.views.generic.list import ListView
from sphinxdoc import models
from sphinxdoc.views import ProjectSearchView
-project_info = {
- 'queryset': models.Project.objects.all().order_by('name'),
- 'template_object_name': 'project',
-}
-
urlpatterns = patterns('sphinxdoc.views',
url(
r'^$',
- list_detail.object_list,
- project_info,
+ ListView.as_view(queryset=models.Project.objects.all().order_by('name'))
),
url(
r'^(?P<slug>[\w-]+)/search/$',
|
807c02f081dff8945f18f6017fa4bf1769c97513
|
spiral-maker01.py
|
spiral-maker01.py
|
import turtle
import random
from time import sleep
turtle1 = turtle.Turtle()
wn = turtle.Screen()
wn.bgcolor("lightgreen")
colors = ("blue","red","green","pink","gray")
for s in range(5):
x = 10
c = random.randint(0,len(colors)-1)
turtle1.color(colors[c])
x_pos = random.randint(-300,300)
y_pos = random.randint(-300,300)
turtle1.pu()
turtle1.setpos(x_pos,y_pos)
turtle1.pd()
for d in range(30):
turtle1.left(137.5)
turtle1.forward(float(x))
x = x + 5
wn.exitonclick()
|
import turtle
import random
from time import sleep
turtle1 = turtle.Turtle()
wn = turtle.Screen()
wn.bgcolor("lightgreen")
colors = ("blue","red","green","yellow","gray")
for s in range(5):
x = 10
c = random.randint(0,len(colors)-1)
turtle1.color(colors[c])
x_pos = random.randint(-300,300)
y_pos = random.randint(-300,300)
turtle1.pu()
turtle1.setpos(x_pos,y_pos)
turtle1.pd()
for d in range(30):
turtle1.left(137.5)
turtle1.forward(float(x))
x = x + 5
wn.exitonclick()
|
Change colors because of contrast problems.
|
Change colors because of contrast problems.
|
Python
|
mit
|
erichmatt/turtle
|
---
+++
@@ -6,7 +6,7 @@
wn = turtle.Screen()
wn.bgcolor("lightgreen")
-colors = ("blue","red","green","pink","gray")
+colors = ("blue","red","green","yellow","gray")
for s in range(5):
x = 10
c = random.randint(0,len(colors)-1)
|
31fc3b8c9fd2f0c7f10e14078fca2e7cd43ac4b1
|
lifelines/tests/__main__.py
|
lifelines/tests/__main__.py
|
import pytest
if __name__ == '__main__':
pytest.main("--pyargs lifelines.tests")
|
import sys
import pytest
if __name__ == '__main__':
# Exit with correct code
sys.exit(pytest.main("--pyargs lifelines.tests"))
|
Exit with correct error code after tests
|
Exit with correct error code after tests
Signed-off-by: Jonas Kalderstam <35a2c6fae61f8077aab61faa4019722abf05093c@kalderstam.se>
|
Python
|
mit
|
wavelets/lifelines,jstoxrocky/lifelines,nerdless/lifelines,CamDavidsonPilon/lifelines
|
---
+++
@@ -1,5 +1,7 @@
+import sys
import pytest
if __name__ == '__main__':
- pytest.main("--pyargs lifelines.tests")
+ # Exit with correct code
+ sys.exit(pytest.main("--pyargs lifelines.tests"))
|
98516d4fa4a7f56e5133377b228ac8c90aed74c9
|
magnum/tests/fake_policy.py
|
magnum/tests/fake_policy.py
|
# Copyright (c) 2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
policy_data = """
{
"context_is_admin": "role:admin",
"admin_or_owner": "is_admin:True or project_id:%(project_id)s",
"default": "rule:admin_or_owner",
"admin_api": "rule:context_is_admin",
"bay:create": "",
"bay:delete": "",
"bay:detail": "",
"bay:get": "",
"bay:get_all": "",
"bay:update": "",
"baymodel:create": "",
"baymodel:delete": "",
"baymodel:detail": "",
"baymodel:get": "",
"baymodel:get_all": "",
"baymodel:update": "",
"rc:create": "",
"rc:delete": "",
"rc:detail": "",
"rc:get": "",
"rc:get_all": "",
"rc:update": ""
}
"""
|
# Copyright (c) 2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
policy_data = """
{
"context_is_admin": "role:admin",
"admin_or_owner": "is_admin:True or project_id:%(project_id)s",
"default": "rule:admin_or_owner",
"admin_api": "rule:context_is_admin",
"bay:create": "",
"bay:delete": "",
"bay:detail": "",
"bay:get": "",
"bay:get_all": "",
"bay:update": "",
"baymodel:create": "",
"baymodel:delete": "",
"baymodel:detail": "",
"baymodel:get": "",
"baymodel:get_all": "",
"baymodel:update": ""
}
"""
|
Clean rc from unit tests
|
Clean rc from unit tests
Magnum have removed the k8s rc apis, but have not removed it from
policy.json. The patch (https://review.openstack.org/#/c/384064/)
remove rc from etc/magnum/policy.json.
And we should remove rc from tests/fake_policy.py.
Change-Id: Ia98e1637f2e3a5919be3784322a55005970d4da8
|
Python
|
apache-2.0
|
openstack/magnum,ArchiFleKs/magnum,ArchiFleKs/magnum,openstack/magnum
|
---
+++
@@ -32,13 +32,6 @@
"baymodel:detail": "",
"baymodel:get": "",
"baymodel:get_all": "",
- "baymodel:update": "",
-
- "rc:create": "",
- "rc:delete": "",
- "rc:detail": "",
- "rc:get": "",
- "rc:get_all": "",
- "rc:update": ""
+ "baymodel:update": ""
}
"""
|
08652630865a706126ac61420edb55298296d2eb
|
abilian/services/__init__.py
|
abilian/services/__init__.py
|
"""
Modules that provide services. They are implemented as
Flask extensions (see: http://flask.pocoo.org/docs/extensiondev/ )
"""
__all__ = ['Service', 'ServiceState',
'audit_service', 'index_service', 'activity_service', 'auth_service']
from .base import Service, ServiceState
# Homegrown extensions.
from .audit import audit_service
from .indexing import service as index_service
from .conversion import converter
from .activity import ActivityService
activity_service = ActivityService()
from .auth import AuthService
auth_service = AuthService()
|
"""
Modules that provide services. They are implemented as
Flask extensions (see: http://flask.pocoo.org/docs/extensiondev/ )
"""
from flask import current_app
from .base import Service, ServiceState
# Homegrown extensions.
from .audit import audit_service
from .indexing import service as index_service
from .conversion import converter
from .activity import ActivityService
from .auth import AuthService
__all__ = ['Service', 'ServiceState', 'get_service',
'audit_service', 'index_service', 'activity_service', 'auth_service']
auth_service = AuthService()
activity_service = ActivityService()
def get_service(service):
return current_app.services.get(service)
|
Add a get_service convenience method.
|
Add a get_service convenience method.
|
Python
|
lgpl-2.1
|
abilian/abilian-core,abilian/abilian-core,abilian/abilian-core,abilian/abilian-core,abilian/abilian-core
|
---
+++
@@ -4,8 +4,7 @@
"""
-__all__ = ['Service', 'ServiceState',
- 'audit_service', 'index_service', 'activity_service', 'auth_service']
+from flask import current_app
from .base import Service, ServiceState
@@ -13,9 +12,15 @@
from .audit import audit_service
from .indexing import service as index_service
from .conversion import converter
+from .activity import ActivityService
+from .auth import AuthService
-from .activity import ActivityService
+__all__ = ['Service', 'ServiceState', 'get_service',
+ 'audit_service', 'index_service', 'activity_service', 'auth_service']
+
+auth_service = AuthService()
activity_service = ActivityService()
-from .auth import AuthService
-auth_service = AuthService()
+
+def get_service(service):
+ return current_app.services.get(service)
|
0b9d3dbfca2fa54444e8d95b1c63ecf3e726ee5b
|
sso/middleware.py
|
sso/middleware.py
|
class IGBMiddleware(object):
"""
Middleware to detect the EVE IGB
"""
def process_request(self, request):
request.is_igb = False
request.is_igb_trusted = False
if request.META.has_key('HTTP_EVE_TRUSTED'):
request.is_igb = True
if request.META.get('HTTP_EVE_TRUSTED') == 'Yes':
request.is_igb_trusted = True
|
class IGBMiddleware(object):
"""
Middleware to detect the EVE IGB
"""
def process_request(self, request):
request.is_igb = False
request.is_igb_trusted = False
header_map = [
('HTTP_EVE_SERVERIP', 'eve_server_ip'),
('HTTP_EVE_CHARNAME', 'eve_charname'),
('HTTP_EVE_CHARID', 'eve_charid'),
('HTTP_EVE_CORPNAME', 'eve_corpname'),
('HTTP_EVE_CORPID', 'eve_corpid'),
('HTTP_EVE_ALLIANCENAME', 'eve_alliancename'),
('HTTP_EVE_ALLIANCEID', 'eve_allianceid'),
('HTTP_EVE_REGIONNAME', 'eve_regionid'),
('HTTP_EVE_CONSTELLATIONNAME', 'eve_constellationname'),
('HTTP_EVE_SOLARSYSTEMNAME', 'eve_systemname'),
('HTTP_EVE_STATIONNAME,' 'eve_stationname'),
('HTTP_EVE_STATIONID,' 'eve_stationid'),
('HTTP_EVE_CORPROLE,' 'eve_corprole'),
]
if request.META.has_key('HTTP_EVE_TRUSTED'):
request.is_igb = True
if request.META.get('HTTP_EVE_TRUSTED') == 'Yes':
request.is_igb_trusted = True
for header, map in header_map:
if request.META.get(header, None):
setattr(request, map, request.META.get(header, None))
|
Expand the IGB Middleware to map headers into the request object
|
Expand the IGB Middleware to map headers into the request object
|
Python
|
bsd-3-clause
|
nikdoof/test-auth
|
---
+++
@@ -9,11 +9,30 @@
request.is_igb = False
request.is_igb_trusted = False
+ header_map = [
+ ('HTTP_EVE_SERVERIP', 'eve_server_ip'),
+ ('HTTP_EVE_CHARNAME', 'eve_charname'),
+ ('HTTP_EVE_CHARID', 'eve_charid'),
+ ('HTTP_EVE_CORPNAME', 'eve_corpname'),
+ ('HTTP_EVE_CORPID', 'eve_corpid'),
+ ('HTTP_EVE_ALLIANCENAME', 'eve_alliancename'),
+ ('HTTP_EVE_ALLIANCEID', 'eve_allianceid'),
+ ('HTTP_EVE_REGIONNAME', 'eve_regionid'),
+ ('HTTP_EVE_CONSTELLATIONNAME', 'eve_constellationname'),
+ ('HTTP_EVE_SOLARSYSTEMNAME', 'eve_systemname'),
+ ('HTTP_EVE_STATIONNAME,' 'eve_stationname'),
+ ('HTTP_EVE_STATIONID,' 'eve_stationid'),
+ ('HTTP_EVE_CORPROLE,' 'eve_corprole'),
+ ]
+
if request.META.has_key('HTTP_EVE_TRUSTED'):
request.is_igb = True
if request.META.get('HTTP_EVE_TRUSTED') == 'Yes':
request.is_igb_trusted = True
+ for header, map in header_map:
+ if request.META.get(header, None):
+ setattr(request, map, request.META.get(header, None))
|
1e03a6ba2a5277c6e665cde994e0ced8b15192c0
|
migrations/482-generate-featured-image-assets.py
|
migrations/482-generate-featured-image-assets.py
|
from amo.utils import chunked
from mkt.developers.tasks import generate_image_assets
from mkt.webapps.models import Webapp
def run():
"""Generate featured tiles."""
for chunk in chunked(Webapp.objects.all(), 50):
for app in chunk:
generate_image_assets.delay(app, slug='featured_tile')
print u'Generated feature tile for %s' % app
|
from amo.utils import chunked
from mkt.developers.tasks import generate_image_assets
from mkt.webapps.models import Webapp
def run():
"""Generate featured tiles."""
for chunk in chunked(Webapp.objects.all(), 50):
for app in chunk:
generate_image_assets.delay(app, slug='featured_tile')
print u'Generated feature tile for app %d' % app.id
|
Fix migration to avoid unicode errors
|
Fix migration to avoid unicode errors
If the app has a name that's not ASCII, you'd get an encoding error.
|
Python
|
bsd-3-clause
|
Joergen/olympia,aviarypl/mozilla-l10n-addons-server,mstriemer/olympia,lavish205/olympia,wagnerand/olympia,robhudson/zamboni,Nolski/olympia,andymckay/zamboni,kmaglione/olympia,mdaif/olympia,eviljeff/zamboni,eviljeff/zamboni,tsl143/zamboni,andymckay/addons-server,jamesthechamp/zamboni,harikishen/addons-server,yfdyh000/olympia,kumar303/zamboni,wagnerand/addons-server,mrrrgn/olympia,shahbaz17/zamboni,mstriemer/zamboni,ayushagrawal288/zamboni,wagnerand/zamboni,harikishen/addons-server,johancz/olympia,mdaif/olympia,crdoconnor/olympia,elysium001/zamboni,eviljeff/olympia,spasovski/zamboni,Hitechverma/zamboni,beni55/olympia,andymckay/olympia,kumar303/olympia,mstriemer/zamboni,Witia1/olympia,Revanth47/addons-server,andymckay/zamboni,clouserw/zamboni,Prashant-Surya/addons-server,aviarypl/mozilla-l10n-addons-server,diox/olympia,lavish205/olympia,Witia1/olympia,ngokevin/zamboni,beni55/olympia,ingenioustechie/zamboni,eviljeff/olympia,SuriyaaKudoIsc/olympia,magopian/olympia,mozilla/olympia,Witia1/olympia,ddurst/zamboni,washort/zamboni,SuriyaaKudoIsc/olympia,diox/zamboni,aviarypl/mozilla-l10n-addons-server,jasonthomas/zamboni,harry-7/addons-server,Hitechverma/zamboni,mozilla/olympia,muffinresearch/addons-server,eviljeff/zamboni,Jobava/zamboni,muffinresearch/olympia,harry-7/addons-server,kmaglione/olympia,psiinon/addons-server,psiinon/addons-server,diox/zamboni,muffinresearch/addons-server,Witia1/olympia,muffinresearch/olympia,jpetto/olympia,mrrrgn/olympia,muffinresearch/olympia,kumar303/zamboni,tsl143/addons-server,wagnerand/addons-server,eviljeff/olympia,johancz/olympia,atiqueahmedziad/addons-server,washort/zamboni,kmaglione/olympia,luckylavish/zamboni,tsl143/addons-server,ngokevin/zamboni,kmaglione/olympia,eviljeff/zamboni,jpetto/olympia,shahbaz17/zamboni,ddurst/zamboni,mudithkr/zamboni,spasovski/zamboni,koehlermichael/olympia,ngokevin/zamboni,mstriemer/addons-server,Joergen/zamboni,robhudson/zamboni,mdaif/olympia,bqbn/addons-server,kmaglione/olympia,Hitechverma/zamboni,Jobava/zamboni,jpetto/olympia,crdoconnor/olympia,mstriemer/addons-server,crdoconnor/olympia,eviljeff/olympia,tsl143/zamboni,mozilla/olympia,Joergen/olympia,bqbn/addons-server,muffinresearch/olympia,luckylavish/zamboni,kumar303/addons-server,kumar303/zamboni,beni55/olympia,elysium001/zamboni,andymckay/zamboni,jamesthechamp/zamboni,Joergen/zamboni,bqbn/addons-server,mrrrgn/olympia,lavish205/olympia,harry-7/addons-server,yfdyh000/olympia,mstriemer/zamboni,wagnerand/olympia,mstriemer/olympia,SuriyaaKudoIsc/olympia,Hitechverma/zamboni,psiinon/addons-server,magopian/olympia,andymckay/olympia,Witia1/olympia,mdaif/olympia,johancz/olympia,clouserw/zamboni,kumar303/olympia,magopian/olympia,Joergen/zamboni,diox/olympia,beni55/olympia,mozilla/addons-server,washort/zamboni,beni55/olympia,clouserw/zamboni,mudithkr/zamboni,andymckay/olympia,wagnerand/addons-server,johancz/olympia,Nolski/olympia,shahbaz17/zamboni,elysium001/zamboni,ingenioustechie/zamboni,yfdyh000/olympia,mrrrgn/olympia,jpetto/olympia,wagnerand/zamboni,kumar303/olympia,koehlermichael/olympia,andymckay/olympia,muffinresearch/addons-server,mozilla/addons-server,magopian/olympia,Revanth47/addons-server,yfdyh000/olympia,Joergen/zamboni,kumar303/addons-server,atiqueahmedziad/addons-server,mrrrgn/olympia,psiinon/addons-server,luckylavish/zamboni,lavish205/olympia,kumar303/addons-server,johancz/olympia,mozilla/olympia,jasonthomas/zamboni,mudithkr/zamboni,yfdyh000/olympia,mstriemer/addons-server,muffinresearch/addons-server,crdoconnor/olympia,jamesthechamp/zamboni,koehlermichael/olympia,ingenioustechie/zamboni,jasonthomas/zamboni,luckylavish/zamboni,mudithkr/zamboni,harikishen/addons-server,elysium001/zamboni,Revanth47/addons-server,wagnerand/addons-server,mstriemer/zamboni,koehlermichael/olympia,ddurst/zamboni,washort/zamboni,Jobava/zamboni,Prashant-Surya/addons-server,anaran/olympia,mozilla/zamboni,Joergen/zamboni,tsl143/zamboni,mozilla/zamboni,clouserw/zamboni,Joergen/olympia,wagnerand/olympia,jamesthechamp/zamboni,Joergen/olympia,kumar303/addons-server,Revanth47/addons-server,kumar303/olympia,jasonthomas/zamboni,diox/olympia,mozilla/addons-server,atiqueahmedziad/addons-server,kumar303/zamboni,mozilla/zamboni,diox/zamboni,Joergen/olympia,mstriemer/olympia,wagnerand/zamboni,anaran/olympia,SuriyaaKudoIsc/olympia,wagnerand/zamboni,tsl143/addons-server,harikishen/addons-server,bqbn/addons-server,ingenioustechie/zamboni,mozilla/zamboni,mstriemer/addons-server,diox/zamboni,magopian/olympia,ddurst/zamboni,muffinresearch/olympia,anaran/olympia,anaran/olympia,tsl143/zamboni,andymckay/addons-server,aviarypl/mozilla-l10n-addons-server,robhudson/zamboni,ayushagrawal288/zamboni,Nolski/olympia,mstriemer/olympia,mdaif/olympia,tsl143/addons-server,mozilla/addons-server,Prashant-Surya/addons-server,andymckay/addons-server,Jobava/zamboni,spasovski/zamboni,andymckay/addons-server,spasovski/zamboni,ayushagrawal288/zamboni,crdoconnor/olympia,muffinresearch/addons-server,harry-7/addons-server,Nolski/olympia,robhudson/zamboni,wagnerand/olympia,shahbaz17/zamboni,ayushagrawal288/zamboni,koehlermichael/olympia,Nolski/olympia,diox/olympia,atiqueahmedziad/addons-server,Prashant-Surya/addons-server,Joergen/zamboni
|
---
+++
@@ -9,4 +9,4 @@
for chunk in chunked(Webapp.objects.all(), 50):
for app in chunk:
generate_image_assets.delay(app, slug='featured_tile')
- print u'Generated feature tile for %s' % app
+ print u'Generated feature tile for app %d' % app.id
|
c716124b8ede9678ca24eb07f1aa83c1fba9f177
|
doorman/celery_serializer.py
|
doorman/celery_serializer.py
|
# -*- coding: utf-8 -*-
from datetime import datetime
from time import mktime
import json
class DJSONEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, datetime):
return {
'__type__': '__datetime__',
'epoch': int(mktime(obj.timetuple()))
}
else:
return json.JSONEncoder.default(self, obj)
def djson_decoder(obj):
if '__type__' in obj:
if obj['__type__'] == '__datetime__':
return datetime.fromtimestamp(obj['epoch'])
return obj
# Encoder function
def djson_dumps(obj):
return json.dumps(obj, cls=DJSONEncoder)
# Decoder function
def djson_loads(obj):
return json.loads(obj, object_hook=djson_decoder)
|
# -*- coding: utf-8 -*-
from datetime import datetime
from time import mktime
import json
from doorman.compat import string_types
class DJSONEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, datetime):
return {
'__type__': '__datetime__',
'epoch': int(mktime(obj.timetuple()))
}
else:
return json.JSONEncoder.default(self, obj)
def djson_decoder(obj):
if '__type__' in obj:
if obj['__type__'] == '__datetime__':
return datetime.fromtimestamp(obj['epoch'])
return obj
# Encoder function
def djson_dumps(obj):
return json.dumps(obj, cls=DJSONEncoder)
# Decoder function
def djson_loads(s):
if not isinstance(s, string_types):
s = s.decode('utf-8')
return json.loads(s, object_hook=djson_decoder)
|
Fix custom decoder on Python 3
|
Fix custom decoder on Python 3
|
Python
|
mit
|
mwielgoszewski/doorman,mwielgoszewski/doorman,mwielgoszewski/doorman,mwielgoszewski/doorman
|
---
+++
@@ -2,6 +2,8 @@
from datetime import datetime
from time import mktime
import json
+
+from doorman.compat import string_types
class DJSONEncoder(json.JSONEncoder):
@@ -28,5 +30,7 @@
# Decoder function
-def djson_loads(obj):
- return json.loads(obj, object_hook=djson_decoder)
+def djson_loads(s):
+ if not isinstance(s, string_types):
+ s = s.decode('utf-8')
+ return json.loads(s, object_hook=djson_decoder)
|
70b23dbd315b93213fc62540aab6293665b9dd0c
|
pebble_tool/sdk/__init__.py
|
pebble_tool/sdk/__init__.py
|
from __future__ import absolute_import
__author__ = 'katharine'
import os
import subprocess
from pebble_tool.exceptions import MissingSDK
from pebble_tool.util import get_persist_dir
def sdk_path():
path = os.getenv('PEBBLE_SDK_PATH', None) or os.path.normpath(os.path.join(os.path.dirname(__file__), '..', '..'))
if not os.path.exists(path):
raise MissingSDK("SDK unavailable; can't run this command.")
return path
def sdk_version():
try:
from . import version
return version.version_string
except ImportError:
try:
return subprocess.check_output(["git", "describe"], stderr=subprocess.STDOUT).strip()
except subprocess.CalledProcessError as e:
if e.returncode == 128:
return 'g{}'.format(subprocess.check_output(["git", "rev-parse", "--short", "HEAD"],
stderr=subprocess.STDOUT)).strip()
else:
return 'unknown'
def get_sdk_persist_dir(platform):
dir = os.path.join(get_persist_dir(), sdk_version(), platform)
if not os.path.exists(dir):
os.makedirs(dir)
return dir
def add_arm_tools_to_path(self, args):
os.environ['PATH'] += ":{}".format(os.path.join(self.sdk_path(args), "arm-cs-tools", "bin"))
|
from __future__ import absolute_import
__author__ = 'katharine'
import os
import subprocess
from pebble_tool.exceptions import MissingSDK
from pebble_tool.util import get_persist_dir
def sdk_path():
path = os.getenv('PEBBLE_SDK_PATH', None) or os.path.normpath(os.path.join(os.path.dirname(__file__), '..', '..'))
if not os.path.exists(path):
raise MissingSDK("SDK unavailable; can't run this command.")
return path
def sdk_version():
try:
from . import version
return version.version_string
except ImportError:
here = os.path.dirname(__file__)
try:
return subprocess.check_output(["git", "describe"], cwd=here, stderr=subprocess.STDOUT).strip()
except subprocess.CalledProcessError as e:
if e.returncode == 128:
try:
return 'g{}'.format(subprocess.check_output(["git", "rev-parse", "--short", "HEAD"], cwd=here,
stderr=subprocess.STDOUT)).strip()
except subprocess.CalledProcessError as e:
pass
return 'unknown'
def get_sdk_persist_dir(platform):
dir = os.path.join(get_persist_dir(), sdk_version(), platform)
if not os.path.exists(dir):
os.makedirs(dir)
return dir
def add_arm_tools_to_path(self, args):
os.environ['PATH'] += ":{}".format(os.path.join(self.sdk_path(args), "arm-cs-tools", "bin"))
|
Fix error determining current version in other working directories.
|
Fix error determining current version in other working directories.
|
Python
|
mit
|
pebble/pebble-tool,pebble/pebble-tool,gregoiresage/pebble-tool,gregoiresage/pebble-tool,pebble/pebble-tool,pebble/pebble-tool,gregoiresage/pebble-tool,gregoiresage/pebble-tool
|
---
+++
@@ -20,14 +20,17 @@
from . import version
return version.version_string
except ImportError:
+ here = os.path.dirname(__file__)
try:
- return subprocess.check_output(["git", "describe"], stderr=subprocess.STDOUT).strip()
+ return subprocess.check_output(["git", "describe"], cwd=here, stderr=subprocess.STDOUT).strip()
except subprocess.CalledProcessError as e:
if e.returncode == 128:
- return 'g{}'.format(subprocess.check_output(["git", "rev-parse", "--short", "HEAD"],
- stderr=subprocess.STDOUT)).strip()
- else:
- return 'unknown'
+ try:
+ return 'g{}'.format(subprocess.check_output(["git", "rev-parse", "--short", "HEAD"], cwd=here,
+ stderr=subprocess.STDOUT)).strip()
+ except subprocess.CalledProcessError as e:
+ pass
+ return 'unknown'
def get_sdk_persist_dir(platform):
dir = os.path.join(get_persist_dir(), sdk_version(), platform)
|
79cbfc35ecca75434cf31839416e5866bad7909d
|
app/__init__.py
|
app/__init__.py
|
import logging
logging.basicConfig(
format='%(asctime)s %(levelname)-8s %(message)s',
level=logging.INFO,
datefmt='%Y-%m-%d %H:%M:%S')
|
import logging
logging.root.handlers = []
logging.basicConfig(format='%(asctime)s %(levelname)-8s %(message)s',
datefmt='%Y-%m-%d %H:%M:%S',
level=logging.INFO,
filename='triggear.log')
console = logging.StreamHandler()
console.setLevel(logging.WARNING)
logging.getLogger("").addHandler(console)
|
Add logging to file on side of logging to stdout
|
Add logging to file on side of logging to stdout
|
Python
|
mit
|
futuresimple/triggear
|
---
+++
@@ -1,6 +1,11 @@
import logging
-logging.basicConfig(
- format='%(asctime)s %(levelname)-8s %(message)s',
- level=logging.INFO,
- datefmt='%Y-%m-%d %H:%M:%S')
+logging.root.handlers = []
+logging.basicConfig(format='%(asctime)s %(levelname)-8s %(message)s',
+ datefmt='%Y-%m-%d %H:%M:%S',
+ level=logging.INFO,
+ filename='triggear.log')
+
+console = logging.StreamHandler()
+console.setLevel(logging.WARNING)
+logging.getLogger("").addHandler(console)
|
4ec3bd4e4d45c0dba9b8333b52ecd8e8a4a6796f
|
ghpythonremote/ghcompservice.py
|
ghpythonremote/ghcompservice.py
|
import sys
import rpyc
from rpyc.utils.server import OneShotServer
class GhcompService(rpyc.ClassicService):
def on_connect(self, conn):
print('Incoming connection.')
super(GhcompService, self).on_connect(conn)
import ghpythonlib.components as ghcomp
self.ghcomp = ghcomp
def on_disconnect(self, conn):
print('Disconnected.')
def exposed_get_component(self, component_name, is_cluster_component=False):
component = getattr(self.ghcomp, component_name)
if is_cluster_component:
component = getattr(component, component_name) # TODO: improve ghcomp to get clusters the same way we get compiled components, thus removing the need for a custom getter
return component
if __name__ == '__main__':
import rhinoscriptsyntax as rs
port = rs.GetInteger("Server bind port", 18871, 1023, 65535)
server = OneShotServer(GhcompService, hostname='localhost', port=port, listener_timeout=None)
server.start()
|
import sys
import rpyc
from rpyc.utils.server import OneShotServer
class GhcompService(rpyc.ClassicService):
def on_connect(self, conn):
print('Incoming connection.')
super(GhcompService, self).on_connect(conn)
import ghpythonlib.components as ghcomp
self.ghcomp = ghcomp
def on_disconnect(self, conn):
print('Disconnected.')
def get_component(self, component_name, is_cluster_component=False):
component = getattr(self.ghcomp, component_name)
if is_cluster_component:
component = getattr(component, component_name) # TODO: improve ghcomp to get clusters the same way we get compiled components, thus removing the need for a custom getter
return component
if __name__ == '__main__':
import rhinoscriptsyntax as rs
port = rs.GetInteger("Server bind port", 18871, 1023, 65535)
server = OneShotServer(GhcompService, hostname='localhost', port=port, listener_timeout=None)
server.start()
|
Stop using `exposed` prefix for remote attributes, per rpyc v4
|
Stop using `exposed` prefix for remote attributes, per rpyc v4
|
Python
|
mit
|
Digital-Structures/ghpythonremote,pilcru/ghpythonremote
|
---
+++
@@ -13,7 +13,7 @@
def on_disconnect(self, conn):
print('Disconnected.')
- def exposed_get_component(self, component_name, is_cluster_component=False):
+ def get_component(self, component_name, is_cluster_component=False):
component = getattr(self.ghcomp, component_name)
if is_cluster_component:
component = getattr(component, component_name) # TODO: improve ghcomp to get clusters the same way we get compiled components, thus removing the need for a custom getter
|
b9fc50abab64f784c8d6defb74aaaf5bdb5a45a7
|
src/python/main.py
|
src/python/main.py
|
"""Script to act as a template"""
import os
import sys
def main(args):
return os.EX_OK
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))
|
#! /usr/bin/env python2
"""Script to act as a template"""
import os
import sys
def main(_args):
return os.EX_OK
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))
|
Hide variable till something actually uses it
|
Hide variable till something actually uses it
|
Python
|
mit
|
jalanb/jab,jalanb/dotjab,jalanb/dotjab,jalanb/jab
|
---
+++
@@ -1,10 +1,11 @@
+#! /usr/bin/env python2
"""Script to act as a template"""
import os
import sys
-def main(args):
+def main(_args):
return os.EX_OK
|
7bae2375adfbd178ce71cc817d1d44876aa58ab7
|
flaskapp.py
|
flaskapp.py
|
from flask import Flask
app = Flask(__name__)
from pymongo import MongoClient
connection = MongoClient()
@app.route("/")
def hello():
return connection.essa.users.find({}).next()['name']
@app.route('/halla/<username>')
def hi(username):
return "Halla " + username
@app.route('/add/<int:x>/<int:y>/')
def sum(x,y):
return str( x+y)
@app.route('/test')
def test():
return "test"
app.debug = True
if __name__ == "__main__":
app.run("0.0.0.0")
|
from flask import Flask
app = Flask(__name__)
from pymongo import MongoClient
connection = MongoClient()
@app.route("/")
def hello():
return connection.essa.users.find({}).next()['name']
#update master
@app.route('/halla/<username>')
def hi(username):
return "Halla " + username
@app.route('/add/<int:x>/<int:y>/')
def sum(x,y):
return str( x+y)
@app.route('/test')
def test():
return "test"
app.debug = True
if __name__ == "__main__":
app.run("0.0.0.0")
|
Update the comments for master
|
Update the comments for master
|
Python
|
apache-2.0
|
raklove/hello-world
|
---
+++
@@ -12,6 +12,7 @@
return connection.essa.users.find({}).next()['name']
+#update master
@app.route('/halla/<username>')
def hi(username):
|
e9df4039849e88433f75869b8b6997f21726e761
|
blog/myblog/views.py
|
blog/myblog/views.py
|
from django.shortcuts import render, get_object_or_404, HttpResponseRedirect, HttpResponse
from myblog.models import Article
from django.views.decorators.csrf import csrf_exempt
from django.utils import timezone
from django.core.urlresolvers import reverse
def index(request):
latest_articles = Article.objects.filter(pub_date__lte=timezone.now()
).order_by("-pub_date")[:5]
most_rated = Article.objects.order_by("-rating")[:3]
for article in most_rated:
article.text = article.text.split(".")[0]
context = {"latest_articles": latest_articles, "most_rated": most_rated}
return render(request, "myblog/index.html", context)
@csrf_exempt
def show_article(request, article_id):
article = get_object_or_404(Article, pk=article_id)
if request.method == "POST":
if request.POST.get("id"):
article.rating += 1
article.save()
return HttpResponse(request.POST.get("id"))
article.comment = request.POST.get("comment")
article.save()
return HttpResponse(article.comment)
return render(request, "myblog/show_article.html", {"article": article})
def contact(request):
return render(request, "myblog/contact.html", {"contact": contact})
|
from django.shortcuts import render, get_object_or_404, HttpResponseRedirect, HttpResponse
from myblog.models import Article
from django.views.decorators.csrf import csrf_exempt
from django.utils import timezone
from django.core.urlresolvers import reverse
def index(request):
latest_articles = Article.objects.filter(pub_date__lte=timezone.now()
).order_by("-pub_date")[:5]
most_rated = Article.objects.order_by("-rating")[:3]
for article in most_rated:
current = article.text.split(".")
article.text = current[0] + ". " + current[1]
context = {"latest_articles": latest_articles, "most_rated": most_rated}
return render(request, "myblog/index.html", context)
@csrf_exempt
def show_article(request, article_id):
article = get_object_or_404(Article, pk=article_id)
if request.method == "POST":
if request.POST.get("id"):
article.rating += 1
article.save()
return HttpResponse(request.POST.get("id"))
article.comment = request.POST.get("comment")
article.save()
return HttpResponse(article.comment)
return render(request, "myblog/show_article.html", {"article": article})
def contact(request):
return render(request, "myblog/contact.html", {"contact": contact})
|
Add more text in jumbotron
|
Add more text in jumbotron
|
Python
|
mit
|
mileto94/Django-tutorial,mileto94/Django-tutorial
|
---
+++
@@ -10,7 +10,8 @@
).order_by("-pub_date")[:5]
most_rated = Article.objects.order_by("-rating")[:3]
for article in most_rated:
- article.text = article.text.split(".")[0]
+ current = article.text.split(".")
+ article.text = current[0] + ". " + current[1]
context = {"latest_articles": latest_articles, "most_rated": most_rated}
return render(request, "myblog/index.html", context)
|
da0ba836636f6ce78d8606a5932de34a8e1160a1
|
registration/__init__.py
|
registration/__init__.py
|
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.utils.importlib import import_module
def get_backend():
"""
Return an instance of the registration backend for use on this
site, as determined by the ``REGISTRATION_BACKEND`` setting. Raise
``django.core.exceptions.ImproperlyConfigured`` if the specified
backend cannot be located.
"""
i = settings.REGISTRATION_BACKEND.rfind('.')
module, attr = settings.REGISTRATION_BACKEND[:i], settings.REGISTRATION_BACKEND[i+1:]
try:
mod = import_module(module)
except ImportError, e:
raise ImproperlyConfigured('Error loading registration backend %s: "%s"' % (module, e))
try:
backend_class = getattr(mod, attr)
except AttributeError:
raise ImproperlyConfigured('Module "%s" does not define a registration backend named "%s"' % (module, attr))
return backend_class()
|
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
# TODO: When Python 2.7 is released this becomes a try/except falling
# back to Django's implementation.
from django.utils.importlib import import_module
def get_backend():
"""
Return an instance of the registration backend for use on this
site, as determined by the ``REGISTRATION_BACKEND`` setting. Raise
``django.core.exceptions.ImproperlyConfigured`` if the specified
backend cannot be located.
"""
i = settings.REGISTRATION_BACKEND.rfind('.')
module, attr = settings.REGISTRATION_BACKEND[:i], settings.REGISTRATION_BACKEND[i+1:]
try:
mod = import_module(module)
except ImportError, e:
raise ImproperlyConfigured('Error loading registration backend %s: "%s"' % (module, e))
try:
backend_class = getattr(mod, attr)
except AttributeError:
raise ImproperlyConfigured('Module "%s" does not define a registration backend named "%s"' % (module, attr))
return backend_class()
|
Add reminder to myself to to importlib fallback.
|
Add reminder to myself to to importlib fallback.
|
Python
|
bsd-3-clause
|
Geffersonvivan/django-registration,tanjunyen/django-registration,rulz/django-registration,allo-/django-registration,furious-luke/django-registration,kinsights/django-registration,yorkedork/django-registration,mick-t/django-registration,torchingloom/django-registration,alawnchen/django-registration,furious-luke/django-registration,rulz/django-registration,torchingloom/django-registration,arpitremarkable/django-registration,percipient/django-registration,erinspace/django-registration,percipient/django-registration,imgmix/django-registration,tanjunyen/django-registration,imgmix/django-registration,timgraham/django-registration,wda-hb/test,sergafts/django-registration,ei-grad/django-registration,nikolas/django-registration,pando85/django-registration,PetrDlouhy/django-registration,maitho/django-registration,erinspace/django-registration,nikolas/django-registration,timgraham/django-registration,kazitanvirahsan/django-registration,wy123123/django-registration,alawnchen/django-registration,pando85/django-registration,PSU-OIT-ARC/django-registration,Geffersonvivan/django-registration,stillmatic/django-registration,matejkloska/django-registration,arpitremarkable/django-registration,ei-grad/django-registration,memnonila/django-registration,wda-hb/test,yorkedork/django-registration,sergafts/django-registration,PetrDlouhy/django-registration,kazitanvirahsan/django-registration,PSU-OIT-ARC/django-registration,memnonila/django-registration,wy123123/django-registration,maitho/django-registration,allo-/django-registration,kinsights/django-registration,mick-t/django-registration,stillmatic/django-registration,matejkloska/django-registration
|
---
+++
@@ -1,5 +1,8 @@
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
+
+# TODO: When Python 2.7 is released this becomes a try/except falling
+# back to Django's implementation.
from django.utils.importlib import import_module
def get_backend():
|
3398ca0dbdb6e76986ddaadf8ee35c637ce13c14
|
csunplugged/general/views.py
|
csunplugged/general/views.py
|
"""Views for the general application."""
from django.views.generic import TemplateView
from django.http import HttpResponse
class GeneralIndexView(TemplateView):
"""View for the homepage that renders from a template."""
template_name = 'general/index.html'
class GeneralAboutView(TemplateView):
"""View for the about page that renders from a template."""
template_name = 'general/about.html'
class GeneralContactView(TemplateView):
"""View for the contact page that renders from a template"""
template_name = 'general/contact.html'
class GeneralPeopleView(TemplateView):
"""View for the people page that renders from a template."""
template_name = 'general/people.html'
class GeneralPrinciplesView(TemplateView):
"""View for the princples page that renders from a template."""
template_name = 'general/principles.html'
def health_check(request):
"""Return heath check response for Google App Engine.
Returns a 200 HTTP response for Google App Engine to detect the system
is running.
"""
return HttpResponse(status=200)
|
"""Views for the general application."""
from django.views.generic import TemplateView
from django.http import HttpResponse
class GeneralIndexView(TemplateView):
"""View for the homepage that renders from a template."""
template_name = 'general/index.html'
class GeneralAboutView(TemplateView):
"""View for the about page that renders from a template."""
template_name = 'general/about.html'
class GeneralContactView(TemplateView):
"""View for the contact page that renders from a template"""
template_name = 'general/contact.html'
class GeneralPeopleView(TemplateView):
"""View for the people page that renders from a template."""
template_name = 'general/people.html'
class GeneralPrinciplesView(TemplateView):
"""View for the princples page that renders from a template."""
template_name = 'general/principles.html'
def health_check(request):
"""Return heath check response for Google App Engine.
Returns a 200 HTTP response for Google App Engine to detect the system
is running.
"""
return HttpResponse(status=200)
|
Break pydocstyle to check Travis
|
Break pydocstyle to check Travis
|
Python
|
mit
|
uccser/cs-unplugged,uccser/cs-unplugged,uccser/cs-unplugged,uccser/cs-unplugged
|
---
+++
@@ -19,7 +19,7 @@
class GeneralContactView(TemplateView):
"""View for the contact page that renders from a template"""
- template_name = 'general/contact.html'
+ template_name = 'general/contact.html'
class GeneralPeopleView(TemplateView):
|
47bf4b10624d8bcdbf3906ff69f5430975679c1c
|
demo/mnist_learning_lenet.py
|
demo/mnist_learning_lenet.py
|
"""Training LeNet on MNIST data."""
import athenet
from athenet.data_loader import MNISTDataLoader
network = athenet.models.lenet(trained=False)
network.data_loader = MNISTDataLoader()
config = athenet.TrainConfig()
config.n_epochs = 10
config.batch_size = 300
config.learning_rate = 0.1
config.val_interval = 1
config.val_units = 'epochs'
network.train(config)
print 'Accuracy on test data: {:.2f}%'.format(100*network.test_accuracy())
|
"""Training LeNet on MNIST data."""
import athenet
from athenet.data_loader import MNISTDataLoader
network = athenet.models.lenet(trained=False)
network.data_loader = MNISTDataLoader()
config = athenet.TrainConfig()
config.n_epochs = 10
config.batch_size = 300
config.learning_rate = 0.1
config.val_interval = 1
config.val_interval_units = 'epochs'
network.train(config)
print 'Accuracy on test data: {:.2f}%'.format(100*network.test_accuracy())
|
Fix name of config field
|
Fix name of config field
|
Python
|
bsd-2-clause
|
heurezjusz/Athenet,heurezjusz/Athena
|
---
+++
@@ -13,7 +13,7 @@
config.batch_size = 300
config.learning_rate = 0.1
config.val_interval = 1
-config.val_units = 'epochs'
+config.val_interval_units = 'epochs'
network.train(config)
print 'Accuracy on test data: {:.2f}%'.format(100*network.test_accuracy())
|
7297f61674d37795f3d63ec990f87e9950d68456
|
salt/utils/yamldumper.py
|
salt/utils/yamldumper.py
|
# -*- coding: utf-8 -*-
'''
salt.utils.yamldumper
~~~~~~~~~~~~~~~~~~~~~
'''
# pylint: disable=W0232
# class has no __init__ method
from __future__ import absolute_import
try:
from yaml import CDumper as Dumper
from yaml import CSafeDumper as SafeDumper
except ImportError:
from yaml import Dumper
from yaml import SafeDumper
from salt.utils.odict import OrderedDict
class OrderedDumper(Dumper):
'''
A YAML dumper that represents python OrderedDict as simple YAML map.
'''
class SafeOrderedDumper(SafeDumper):
'''
A YAML safe dumper that represents python OrderedDict as simple YAML map.
'''
def represent_ordereddict(dumper, data):
return dumper.represent_dict(data.items())
OrderedDumper.add_representer(OrderedDict, represent_ordereddict)
SafeOrderedDumper.add_representer(OrderedDict, represent_ordereddict)
|
# -*- coding: utf-8 -*-
'''
salt.utils.yamldumper
~~~~~~~~~~~~~~~~~~~~~
'''
# pylint: disable=W0232
# class has no __init__ method
from __future__ import absolute_import
try:
from yaml import CDumper as Dumper
from yaml import CSafeDumper as SafeDumper
except ImportError:
from yaml import Dumper
from yaml import SafeDumper
from salt.utils.odict import OrderedDict
try:
from ioflo.base.odicting import odict
HAS_IOFLO = True
except ImportError:
odict = None
HAS_IOFLO = False
class OrderedDumper(Dumper):
'''
A YAML dumper that represents python OrderedDict as simple YAML map.
'''
class SafeOrderedDumper(SafeDumper):
'''
A YAML safe dumper that represents python OrderedDict as simple YAML map.
'''
def represent_ordereddict(dumper, data):
return dumper.represent_dict(data.items())
OrderedDumper.add_representer(OrderedDict, represent_ordereddict)
SafeOrderedDumper.add_representer(OrderedDict, represent_ordereddict)
if HAS_IOFLO:
OrderedDumper.add_representer(odict, represent_ordereddict)
SafeOrderedDumper.add_representer(odict, represent_ordereddict)
|
Check for ioflo-flavored OrderedDicts as well when outputting YAML
|
Check for ioflo-flavored OrderedDicts as well when outputting YAML
|
Python
|
apache-2.0
|
saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt
|
---
+++
@@ -16,6 +16,13 @@
from yaml import SafeDumper
from salt.utils.odict import OrderedDict
+
+try:
+ from ioflo.base.odicting import odict
+ HAS_IOFLO = True
+except ImportError:
+ odict = None
+ HAS_IOFLO = False
class OrderedDumper(Dumper):
@@ -36,3 +43,7 @@
OrderedDumper.add_representer(OrderedDict, represent_ordereddict)
SafeOrderedDumper.add_representer(OrderedDict, represent_ordereddict)
+
+if HAS_IOFLO:
+ OrderedDumper.add_representer(odict, represent_ordereddict)
+ SafeOrderedDumper.add_representer(odict, represent_ordereddict)
|
44f603cd947f63101cf6b7eb8e49b5210cfa4f6f
|
wry/__init__.py
|
wry/__init__.py
|
#!/usr/bin/env python2
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import AMTDevice
import AMTBoot
import AMTPower
import AMTKVM
import AMTOptIn
import AMTRedirection
AMTDevice = AMTDevice.AMTDevice
AMTBoot = AMTBoot.AMTBoot
AMTPower = AMTPower.AMTPower
AMTKVM = AMTKVM.AMTKVM
AMTOptin = AMTOptIn.AMTOptIn
AMTRedirection = AMTRedirection.AMTRedirection
# For backwards compatibility
device = {
'AMTDevice': AMTDevice,
'AMTBoot': AMTBoot,
'AMTPower': AMTPower,
'AMTKVM': AMTKVM,
'AMTOptIn': AMTOptIn,
'AMTRedirection': AMTRedirection,
}
__all__ = [AMTDevice, AMTBoot, AMTPower, AMTKVM, AMTOptIn, AMTRedirection]
|
#!/usr/bin/env python2
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import version
import AMTDevice
import AMTBoot
import AMTPower
import AMTKVM
import AMTOptIn
import AMTRedirection
AMTDevice = AMTDevice.AMTDevice
AMTBoot = AMTBoot.AMTBoot
AMTPower = AMTPower.AMTPower
AMTKVM = AMTKVM.AMTKVM
AMTOptin = AMTOptIn.AMTOptIn
AMTRedirection = AMTRedirection.AMTRedirection
# For backwards compatibility
device = {
'AMTDevice': AMTDevice,
'AMTBoot': AMTBoot,
'AMTPower': AMTPower,
'AMTKVM': AMTKVM,
'AMTOptIn': AMTOptIn,
'AMTRedirection': AMTRedirection,
}
__all__ = [AMTDevice, AMTBoot, AMTPower, AMTKVM, AMTOptIn, AMTRedirection]
|
Add version. Note this will cause the file to be modified in your working copy. This change is gitignored
|
Add version. Note this will cause the file to be modified in your
working copy. This change is gitignored
|
Python
|
apache-2.0
|
ocadotechnology/wry
|
---
+++
@@ -12,6 +12,7 @@
# License for the specific language governing permissions and limitations
# under the License.
+import version
import AMTDevice
import AMTBoot
import AMTPower
|
0d9542011b78dca40f0c4c18a932a06d5305f6ef
|
examples/delete_old_files.py
|
examples/delete_old_files.py
|
#!/bin/python
# installation:
# pip install pytz pyuploadcare~=2.1.0
import pytz
from datetime import timedelta, datetime
import time
from pyuploadcare import conf
from pyuploadcare.api_resources import FileList, FilesStorage
MAX_LIFETIME = 30 # days
conf.pub_key = 'demopublickey'
conf.secret = 'demoprivatekey'
dt_cutoff = datetime.now(pytz.utc) - timedelta(days=MAX_LIFETIME)
if __name__ == '__main__':
print 'Selecting files to be deleted...'
uuid_list = [f.uuid for f in FileList(starting_point=dt_cutoff,
ordering='-datetime_uploaded',
stored=True,
request_limit=500)]
print 'Batch delete of selected files'
ts1 = time.time()
fs = FilesStorage(uuid_list)
fs.delete()
ts2 = time.time()
print 'Operation completed in %f seconds' % (ts2 - ts1)
|
#!/bin/python
# installation:
# pip install pytz pyuploadcare~=2.1.0
import pytz
from datetime import timedelta, datetime
import time
from pyuploadcare import conf
from pyuploadcare.api_resources import FileList, FilesStorage
MAX_LIFETIME = 30 # days
conf.pub_key = 'demopublickey'
conf.secret = 'demoprivatekey'
dt_cutoff = datetime.now(pytz.utc) - timedelta(days=MAX_LIFETIME)
if __name__ == '__main__':
print('Selecting files to be deleted...')
uuid_list = [f.uuid for f in FileList(starting_point=dt_cutoff,
ordering='-datetime_uploaded',
stored=True,
request_limit=500)]
print('Batch delete of selected files')
ts1 = time.time()
fs = FilesStorage(uuid_list)
fs.delete()
ts2 = time.time()
print('Operation completed in %f seconds' % (ts2 - ts1))
|
Make the example code compatible with Python3
|
Make the example code compatible with Python3
|
Python
|
mit
|
uploadcare/pyuploadcare
|
---
+++
@@ -20,14 +20,14 @@
if __name__ == '__main__':
- print 'Selecting files to be deleted...'
+ print('Selecting files to be deleted...')
uuid_list = [f.uuid for f in FileList(starting_point=dt_cutoff,
ordering='-datetime_uploaded',
stored=True,
request_limit=500)]
- print 'Batch delete of selected files'
+ print('Batch delete of selected files')
ts1 = time.time()
fs = FilesStorage(uuid_list)
fs.delete()
ts2 = time.time()
- print 'Operation completed in %f seconds' % (ts2 - ts1)
+ print('Operation completed in %f seconds' % (ts2 - ts1))
|
ddb91c20793d8e5e8a01e0302afeaaba76776741
|
setuptools/extern/six.py
|
setuptools/extern/six.py
|
"""
Handle loading six package from system or from the bundled copy
"""
import imp
_SIX_SEARCH_PATH = ['setuptools._vendor.six', 'six']
def _find_module(name, path=None):
"""
Alternative to `imp.find_module` that can also search in subpackages.
"""
parts = name.split('.')
for part in parts:
if path is not None:
path = [path]
fh, path, descr = imp.find_module(part, path)
return fh, path, descr
def _import_six(search_path=_SIX_SEARCH_PATH):
for mod_name in search_path:
try:
mod_info = _find_module(mod_name)
except ImportError:
continue
imp.load_module(__name__, *mod_info)
break
else:
raise ImportError(
"The 'six' module of minimum version {0} is required; "
"normally this is bundled with this package so if you get "
"this warning, consult the packager of your "
"distribution.")
_import_six()
|
"""
Handle loading a package from system or from the bundled copy
"""
import imp
_SEARCH_PATH = ['setuptools._vendor.six', 'six']
def _find_module(name, path=None):
"""
Alternative to `imp.find_module` that can also search in subpackages.
"""
parts = name.split('.')
for part in parts:
if path is not None:
path = [path]
fh, path, descr = imp.find_module(part, path)
return fh, path, descr
def _import_in_place(search_path=_SEARCH_PATH):
for mod_name in search_path:
try:
mod_info = _find_module(mod_name)
except ImportError:
continue
imp.load_module(__name__, *mod_info)
break
else:
raise ImportError(
"The '{name}' package is required; "
"normally this is bundled with this package so if you get "
"this warning, consult the packager of your "
"distribution.".format(name=_SEARCH_PATH[-1]))
_import_in_place()
|
Make the technique even more generic
|
Make the technique even more generic
--HG--
branch : feature/issue-229
|
Python
|
mit
|
pypa/setuptools,pypa/setuptools,pypa/setuptools
|
---
+++
@@ -1,11 +1,11 @@
"""
-Handle loading six package from system or from the bundled copy
+Handle loading a package from system or from the bundled copy
"""
import imp
-_SIX_SEARCH_PATH = ['setuptools._vendor.six', 'six']
+_SEARCH_PATH = ['setuptools._vendor.six', 'six']
def _find_module(name, path=None):
@@ -24,7 +24,7 @@
return fh, path, descr
-def _import_six(search_path=_SIX_SEARCH_PATH):
+def _import_in_place(search_path=_SEARCH_PATH):
for mod_name in search_path:
try:
mod_info = _find_module(mod_name)
@@ -32,15 +32,14 @@
continue
imp.load_module(__name__, *mod_info)
-
break
else:
raise ImportError(
- "The 'six' module of minimum version {0} is required; "
+ "The '{name}' package is required; "
"normally this is bundled with this package so if you get "
"this warning, consult the packager of your "
- "distribution.")
+ "distribution.".format(name=_SEARCH_PATH[-1]))
-_import_six()
+_import_in_place()
|
88ae237911346d74c1c8a51a11dd47b486f3b4c5
|
setuptools/py31compat.py
|
setuptools/py31compat.py
|
try:
# Python >=3.2
from tempfile import TemporaryDirectory
except ImportError:
import shutil
import tempfile
class TemporaryDirectory(object):
"""
Very simple temporary directory context manager.
Will try to delete afterward, but will also ignore OS and similar
errors on deletion.
"""
def __init__(self):
self.name = None # Handle mkdtemp raising an exception
self.name = tempfile.mkdtemp()
def __enter__(self):
return self.name
def __exit__(self, exctype, excvalue, exctrace):
try:
shutil.rmtree(self.name, True)
except OSError: # removal errors are not the only possible
pass
self.name = None
|
__all__ = []
try:
# Python >=3.2
from tempfile import TemporaryDirectory
except ImportError:
import shutil
import tempfile
class TemporaryDirectory(object):
"""
Very simple temporary directory context manager.
Will try to delete afterward, but will also ignore OS and similar
errors on deletion.
"""
def __init__(self):
self.name = None # Handle mkdtemp raising an exception
self.name = tempfile.mkdtemp()
def __enter__(self):
return self.name
def __exit__(self, exctype, excvalue, exctrace):
try:
shutil.rmtree(self.name, True)
except OSError: # removal errors are not the only possible
pass
self.name = None
|
Make it clear this compat module provides no public members
|
Make it clear this compat module provides no public members
|
Python
|
mit
|
pypa/setuptools,pypa/setuptools,pypa/setuptools
|
---
+++
@@ -1,3 +1,5 @@
+__all__ = []
+
try:
# Python >=3.2
from tempfile import TemporaryDirectory
|
3b257526c9f0ed523d51b023fa454bae59ecac5c
|
channels/ya_metro/app.py
|
channels/ya_metro/app.py
|
#encoding:utf-8
from urllib.parse import urlparse
from utils import get_url
t_channel = '@ya_metro'
subreddit = 'Subways'
def send_post(submission, r2t):
what, url, ext = get_url(submission)
title = submission.title
link = submission.shortlink
text = '{}\n{}'.format(title, link)
if what == 'text':
return False
elif what == 'album':
base_url = submission.url
text = '{}\n{}\n\n{}'.format(title, base_url, link)
r2t.send_text(text)
r2t.send_album(url)
return True
elif what == 'other':
domain = urlparse(url).netloc
if domain in ('www.youtube.com', 'youtu.be'):
text = '{}\n{}\n\n{}'.format(title, url, link)
return r2t.send_text(text)
elif submission.score > 10:
text = '{}\n{}\n\n{}'.format(title, url, link)
return r2t.send_text(text)
else:
return False
elif what in ('gif', 'img'):
return r2t.send_gif_img(what, url, ext, text)
else:
return False
|
#encoding:utf-8
from urllib.parse import urlparse
from utils import get_url
t_channel = '@ya_metro'
subreddit = 'Subways'
def send_post(submission, r2t):
what, url, ext = get_url(submission)
title = submission.title
link = submission.shortlink
text = '{}\n{}'.format(title, link)
if what == 'text':
return False
elif what == 'album':
base_url = submission.url
text = '{}\n{}\n\n{}'.format(title, base_url, link)
r2t.send_text(text)
r2t.send_album(url)
return True
elif what == 'other':
domain = urlparse(url).netloc
if domain in ('www.youtube.com', 'youtu.be'):
text = '{}\n{}\n\n{}'.format(title, url, link)
return r2t.send_text(text)
elif submission.score >= 4:
text = '{}\n{}\n\n{}'.format(title, url, link)
return r2t.send_text(text)
else:
return False
elif what in ('gif', 'img'):
return r2t.send_gif_img(what, url, ext, text)
else:
return False
|
Set URL submission score cutoff to 4.
|
Set URL submission score cutoff to 4.
|
Python
|
mit
|
nsiregar/reddit2telegram,nsiregar/reddit2telegram,Fillll/reddit2telegram,Fillll/reddit2telegram
|
---
+++
@@ -28,7 +28,7 @@
if domain in ('www.youtube.com', 'youtu.be'):
text = '{}\n{}\n\n{}'.format(title, url, link)
return r2t.send_text(text)
- elif submission.score > 10:
+ elif submission.score >= 4:
text = '{}\n{}\n\n{}'.format(title, url, link)
return r2t.send_text(text)
else:
|
3849a5a842137a29ce06b5b7e027c8f8efd4e00e
|
shopify/product/admin.py
|
shopify/product/admin.py
|
from django.contrib import admin
from .models import Product
class ProductAdmin(admin.ModelAdmin):
list_display = ('description', 'product_type', 'account_number')
ordering = ('description',)
class Meta:
model = Product
admin.site.register(Product, ProductAdmin)
|
from django.contrib import admin
from .models import Product
class ProductAdmin(admin.ModelAdmin):
list_display = ('description', 'product_type', 'account_number')
ordering = ('description',)
readonly_fields = ('description', 'product_type', 'product_id')
class Meta:
model = Product
admin.site.register(Product, ProductAdmin)
|
Make Shopify product fields readonly
|
Make Shopify product fields readonly
|
Python
|
bsd-3-clause
|
CorbanU/corban-shopify,CorbanU/corban-shopify
|
---
+++
@@ -6,6 +6,7 @@
class ProductAdmin(admin.ModelAdmin):
list_display = ('description', 'product_type', 'account_number')
ordering = ('description',)
+ readonly_fields = ('description', 'product_type', 'product_id')
class Meta:
model = Product
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.