commit
stringlengths 40
40
| old_file
stringlengths 4
150
| new_file
stringlengths 4
150
| old_contents
stringlengths 0
3.26k
| new_contents
stringlengths 1
4.43k
| subject
stringlengths 15
501
| message
stringlengths 15
4.06k
| lang
stringclasses 4
values | license
stringclasses 13
values | repos
stringlengths 5
91.5k
| diff
stringlengths 0
4.35k
|
|---|---|---|---|---|---|---|---|---|---|---|
8817481f758eeb0610b8c77fb9dd15dbdc37579b
|
setup.py
|
setup.py
|
from setuptools import setup
exec (open('plotly/version.py').read())
def readme():
with open('README.rst') as f:
return f.read()
setup(name='plotly',
version=__version__,
use_2to3=False,
author='Chris P',
author_email='chris@plot.ly',
maintainer='Chris P',
maintainer_email='chris@plot.ly',
url='https://plot.ly/api/python',
description="Python plotting library for collaborative, "
"interactive, publication-quality graphs.",
long_description=readme(),
classifiers=[
'Development Status :: 4 - Beta',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Topic :: Scientific/Engineering :: Visualization',
],
license='MIT',
packages=['plotly',
'plotly/plotly',
'plotly/plotly/chunked_requests',
'plotly/graph_objs',
'plotly/grid_objs',
'plotly/widgets',
'plotly/matplotlylib',
'plotly/matplotlylib/mplexporter',
'plotly/matplotlylib/mplexporter/renderers'],
package_data={'plotly': ['graph_reference/*.json', 'widgets/*.js']},
install_requires=['requests', 'six', 'pytz'],
extras_require={"PY2.6": ['simplejson', 'ordereddict',
'requests[security]']},
zip_safe=False)
|
from setuptools import setup
from setuptools import setup, find_packages
exec (open('plotly/version.py').read())
def readme():
with open('README.rst') as f:
return f.read()
setup(name='plotly',
version=__version__,
use_2to3=False,
author='Chris P',
author_email='chris@plot.ly',
maintainer='Chris P',
maintainer_email='chris@plot.ly',
url='https://plot.ly/api/python',
description="Python plotting library for collaborative, "
"interactive, publication-quality graphs.",
long_description=readme(),
classifiers=[
'Development Status :: 4 - Beta',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Topic :: Scientific/Engineering :: Visualization',
],
license='MIT',
packages=find_packages(),
package_data={'plotly': ['graph_reference/*.json', 'widgets/*.js']},
install_requires=['requests', 'six', 'pytz'],
extras_require={"PY2.6": ['simplejson', 'ordereddict',
'requests[security]']},
zip_safe=False)
|
Use `find_packages()` like all the cool kids do.
|
Use `find_packages()` like all the cool kids do.
|
Python
|
mit
|
plotly/python-api,plotly/python-api,plotly/python-api,plotly/plotly.py,ee-in/python-api,ee-in/python-api,ee-in/python-api,plotly/plotly.py,plotly/plotly.py
|
---
+++
@@ -1,4 +1,5 @@
from setuptools import setup
+from setuptools import setup, find_packages
exec (open('plotly/version.py').read())
@@ -31,15 +32,7 @@
'Topic :: Scientific/Engineering :: Visualization',
],
license='MIT',
- packages=['plotly',
- 'plotly/plotly',
- 'plotly/plotly/chunked_requests',
- 'plotly/graph_objs',
- 'plotly/grid_objs',
- 'plotly/widgets',
- 'plotly/matplotlylib',
- 'plotly/matplotlylib/mplexporter',
- 'plotly/matplotlylib/mplexporter/renderers'],
+ packages=find_packages(),
package_data={'plotly': ['graph_reference/*.json', 'widgets/*.js']},
install_requires=['requests', 'six', 'pytz'],
extras_require={"PY2.6": ['simplejson', 'ordereddict',
|
3320bd5e5790433e9d45c6ee69d87a3ebef88939
|
setup.py
|
setup.py
|
from setuptools import setup, find_packages
import sys, os
here = os.path.abspath(os.path.dirname(__file__))
try:
README = open(os.path.join(here, 'README.rst')).read()
except IOError:
README = ''
version = "0.0.1"
setup(name='tgext.socketio',
version=version,
description="SocketIO support for TurboGears through gevent-socketio",
long_description=README,
classifiers=[], # Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers
keywords='turbogears2.extension, socketio, gevent',
author='Alessandro Molina',
author_email='alessandro.molina@axant.it',
url='http://github.com/amol-/tgext.socketio',
license='MIT',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
namespace_packages=['tgext'],
include_package_data=True,
zip_safe=False,
install_requires=[
'gevent',
'gevent-socketio'
],
test_suite='nose.collector',
tests_require=[
'TurboGears2',
'WebTest==1.4.3',
'repoze.who',
'nose',
'coverage',
'mock',
'pastedeploy',
'formencode'
],
entry_points={
'paste.server_runner': [
'socketio = tgext.socketio.server:socketio_server_runner'
]
}
)
|
from setuptools import setup, find_packages
import sys, os
here = os.path.abspath(os.path.dirname(__file__))
try:
README = open(os.path.join(here, 'README.rst')).read()
except IOError:
README = ''
version = "0.0.1"
setup(name='tgext.socketio',
version=version,
description="SocketIO support for TurboGears through gevent-socketio",
long_description=README,
classifiers=[], # Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers
keywords='turbogears2.extension, socketio, gevent',
author='Alessandro Molina',
author_email='alessandro.molina@axant.it',
url='http://github.com/amol-/tgext.socketio',
license='MIT',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
namespace_packages=['tgext'],
include_package_data=True,
zip_safe=False,
install_requires=[
'gevent',
'gevent-socketio'
],
test_suite='nose.collector',
tests_require=[
'TurboGears2',
'WebTest==1.4.3',
'repoze.who',
'nose',
'coverage',
'mock',
'pastedeploy',
'formencode',
'anypubsub'
],
entry_points={
'paste.server_runner': [
'socketio = tgext.socketio.server:socketio_server_runner'
]
}
)
|
Add pubsub dependency for tests
|
Add pubsub dependency for tests
|
Python
|
mit
|
amol-/tgext.socketio
|
---
+++
@@ -36,7 +36,8 @@
'coverage',
'mock',
'pastedeploy',
- 'formencode'
+ 'formencode',
+ 'anypubsub'
],
entry_points={
'paste.server_runner': [
|
87ee89bfa404a81d704fe775031599de84686bbe
|
braid/base.py
|
braid/base.py
|
from __future__ import absolute_import
from fabric.api import sudo, task, put
from twisted.python.filepath import FilePath
from braid import pypy, service, authbind, git, package, bazaar, postgres
@task
def bootstrap():
"""
Prepare the machine to be able to correctly install, configure and execute
twisted services.
"""
# Each service specific system user shall be added to the 'service' group
sudo('groupadd -f --system service')
# gcc is needed for 'pip install'
package.install(['gcc', 'python-pip'])
# For trac
package.install(['python-subversion'])
pypy.install()
authbind.install()
git.install()
bazaar.install()
postgres.install()
sshConfig()
def sshConfig():
"""
Install ssh config that allows anyone who can login as root
to login as any service.
"""
configFile = FilePath(__file__).sibling('sshd_config')
put(configFile.path, '/etc/ssh/sshd_config', use_sudo=True)
sudo('chgrp service /root/.ssh/authorized_keys')
sudo('chmod go+X /root /root/.ssh')
sudo('chmod g+r /root/.ssh/authorized_keys')
service.restart('ssh')
|
from __future__ import absolute_import
from fabric.api import sudo, task, put
from twisted.python.filepath import FilePath
from braid import pypy, service, authbind, git, package, bazaar, postgres
@task
def bootstrap():
"""
Prepare the machine to be able to correctly install, configure and execute
twisted services.
"""
# Each service specific system user shall be added to the 'service' group
sudo('groupadd -f --system service')
package.install(['python2.7', 'python2.7-dev'])
# gcc is needed for 'pip install'
package.install(['gcc', 'python-pip'])
# For trac
package.install(['python-subversion', 'enscript'])
pypy.install()
authbind.install()
git.install()
bazaar.install()
postgres.install()
sshConfig()
def sshConfig():
"""
Install ssh config that allows anyone who can login as root
to login as any service.
"""
configFile = FilePath(__file__).sibling('sshd_config')
put(configFile.path, '/etc/ssh/sshd_config', use_sudo=True)
sudo('chgrp service /root/.ssh/authorized_keys')
sudo('chmod go+X /root /root/.ssh')
sudo('chmod g+r /root/.ssh/authorized_keys')
service.restart('ssh')
|
Install some more packages for trac.
|
Install some more packages for trac.
|
Python
|
mit
|
alex/braid,alex/braid
|
---
+++
@@ -17,10 +17,11 @@
# Each service specific system user shall be added to the 'service' group
sudo('groupadd -f --system service')
+ package.install(['python2.7', 'python2.7-dev'])
# gcc is needed for 'pip install'
package.install(['gcc', 'python-pip'])
# For trac
- package.install(['python-subversion'])
+ package.install(['python-subversion', 'enscript'])
pypy.install()
authbind.install()
git.install()
|
590a1684c7c073879d74240685fe5a304afacfdd
|
setup.py
|
setup.py
|
import setuptools
with open("README.md", "r") as fh:
long_description = fh.read()
setuptools.setup(
name="mock-firestore",
version="0.1.2",
author="Matt Dowds",
description="In-memory implementation of Google Cloud Firestore for use in tests",
long_description=long_description,
url="https://github.com/mdowds/mock-firestore",
packages=setuptools.find_packages(),
test_suite='',
classifiers=[
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
"License :: OSI Approved :: MIT License",
],
)
|
import setuptools
with open("README.md", "r") as fh:
long_description = fh.read()
setuptools.setup(
name="mock-firestore",
version="0.1.2",
author="Matt Dowds",
description="In-memory implementation of Google Cloud Firestore for use in tests",
long_description=long_description,
long_description_content_type="text/markdown",
url="https://github.com/mdowds/mock-firestore",
packages=setuptools.find_packages(),
test_suite='',
classifiers=[
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
"License :: OSI Approved :: MIT License",
],
)
|
Set description content type so it renders on PyPI
|
Set description content type so it renders on PyPI
|
Python
|
mit
|
mdowds/python-mock-firestore
|
---
+++
@@ -9,6 +9,7 @@
author="Matt Dowds",
description="In-memory implementation of Google Cloud Firestore for use in tests",
long_description=long_description,
+ long_description_content_type="text/markdown",
url="https://github.com/mdowds/mock-firestore",
packages=setuptools.find_packages(),
test_suite='',
|
b3533959e096f41c4dfad19d98bf43cb13bd070f
|
setup.py
|
setup.py
|
#!/usr/bin/env python
# coding: utf-8
import re
from setuptools import setup, find_packages
setup(
name='dictmixin',
__version__=re.search(
r'__version__\s*=\s*[\'"]([^\'"]*)[\'"]', # It excludes inline comment too
open('dictmixin/__init__.py').read()).group(1),
description='Parsing mixin which converts `data class instance`, `dict object`, and `json string` each other.',
license='MIT',
author='tadashi-aikawa',
author_email='syou.maman@gmail.com',
maintainer='tadashi-aikawa',
maintainer_email='tadashi-aikawa',
url='https://github.com/tadashi-aikawa/dictmixin.git',
keywords='dict json convert parse each other',
packages=find_packages(exclude=['tests*']),
install_requires=[],
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Intended Audience :: Developers',
'Topic :: Software Development :: Build Tools',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
],
)
|
#!/usr/bin/env python
# coding: utf-8
import os
import re
from setuptools import setup, find_packages
def load_required_modules():
with open(os.path.join(os.path.dirname(__file__), "requirements.txt")) as f:
return [line.strip() for line in f.read().strip().split(os.linesep) if line.strip()]
setup(
name='dictmixin',
__version__=re.search(
r'__version__\s*=\s*[\'"]([^\'"]*)[\'"]', # It excludes inline comment too
open('dictmixin/__init__.py').read()).group(1),
description='Parsing mixin which converts `data class instance`, `dict object`, and `json string` each other.',
license='MIT',
author='tadashi-aikawa',
author_email='syou.maman@gmail.com',
maintainer='tadashi-aikawa',
maintainer_email='tadashi-aikawa',
url='https://github.com/tadashi-aikawa/dictmixin.git',
keywords='dict json convert parse each other',
packages=find_packages(exclude=['tests*']),
install_requires=load_required_modules(),
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Intended Audience :: Developers',
'Topic :: Software Development :: Build Tools',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
],
)
|
Fix bug `No module named yaml`
|
Fix bug `No module named yaml`
|
Python
|
mit
|
tadashi-aikawa/owlmixin
|
---
+++
@@ -1,8 +1,14 @@
#!/usr/bin/env python
# coding: utf-8
+import os
import re
from setuptools import setup, find_packages
+
+
+def load_required_modules():
+ with open(os.path.join(os.path.dirname(__file__), "requirements.txt")) as f:
+ return [line.strip() for line in f.read().strip().split(os.linesep) if line.strip()]
setup(
@@ -19,7 +25,7 @@
url='https://github.com/tadashi-aikawa/dictmixin.git',
keywords='dict json convert parse each other',
packages=find_packages(exclude=['tests*']),
- install_requires=[],
+ install_requires=load_required_modules(),
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Intended Audience :: Developers',
|
ac92bffbbc4b1c1f1fe4f56cd8600b38182e46a8
|
setup.py
|
setup.py
|
import names
from setuptools import setup, find_packages
setup(
name=names.__title__,
version=names.__version__,
author=names.__author__,
url="https://github.com/treyhunner/names",
description="Generate random names",
long_description='\n\n'.join((
open('README.rst').read(),
open('CHANGES.rst').read(),
)),
license=names.__license__,
packages=find_packages(),
package_data={'names': ['dist.*']},
include_package_data=True,
entry_points={
'console_scripts': [
'names = names.main:main',
],
},
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: Implementation :: PyPy',
],
test_suite='test_names',
)
|
import names
from setuptools import setup, find_packages
setup(
name=names.__title__,
version=names.__version__,
author=names.__author__,
url="https://github.com/treyhunner/names",
description="Generate random names",
long_description='\n\n'.join((
open('README.rst').read(),
open('CHANGES.rst').read(),
open('CONTRIBUTING.rst').read(),
)),
license=names.__license__,
packages=find_packages(),
package_data={'names': ['dist.*']},
include_package_data=True,
entry_points={
'console_scripts': [
'names = names.main:main',
],
},
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: Implementation :: PyPy',
],
test_suite='test_names',
)
|
Add CONTRIBUTING file to package description
|
Add CONTRIBUTING file to package description
|
Python
|
mit
|
treyhunner/names,treyhunner/names
|
---
+++
@@ -11,6 +11,7 @@
long_description='\n\n'.join((
open('README.rst').read(),
open('CHANGES.rst').read(),
+ open('CONTRIBUTING.rst').read(),
)),
license=names.__license__,
packages=find_packages(),
|
08f501e3276811de7cedc52213c268cb6f1499cb
|
setup.py
|
setup.py
|
#!/usr/bin/env python
try:
from setuptools import setup, find_packages
except ImportError:
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
tests_require = [
'django',
'django-celery',
'south',
'django-haystack',
]
setup(
name='django-sentry',
version='1.6.6.1',
author='David Cramer',
author_email='dcramer@gmail.com',
url='http://github.com/dcramer/django-sentry',
description = 'Exception Logging to a Database in Django',
packages=find_packages(exclude="example_project"),
zip_safe=False,
install_requires=[
'django-paging>=0.2.2',
'django-indexer==0.2.1',
'uuid',
],
dependency_links=[
'https://github.com/disqus/django-haystack/tarball/master#egg=django-haystack',
],
tests_require=tests_require,
extras_require={'test': tests_require},
test_suite='sentry.runtests.runtests',
include_package_data=True,
classifiers=[
'Framework :: Django',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'Operating System :: OS Independent',
'Topic :: Software Development'
],
)
|
#!/usr/bin/env python
try:
from setuptools import setup, find_packages
except ImportError:
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
tests_require = [
'django',
'django-celery',
'south',
'django-haystack',
'whoosh',
]
setup(
name='django-sentry',
version='1.6.6.1',
author='David Cramer',
author_email='dcramer@gmail.com',
url='http://github.com/dcramer/django-sentry',
description = 'Exception Logging to a Database in Django',
packages=find_packages(exclude="example_project"),
zip_safe=False,
install_requires=[
'django-paging>=0.2.2',
'django-indexer==0.2.1',
'uuid',
],
dependency_links=[
'https://github.com/disqus/django-haystack/tarball/master#egg=django-haystack',
],
tests_require=tests_require,
extras_require={'test': tests_require},
test_suite='sentry.runtests.runtests',
include_package_data=True,
classifiers=[
'Framework :: Django',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'Operating System :: OS Independent',
'Topic :: Software Development'
],
)
|
Add whoosh as test dependancy
|
Add whoosh as test dependancy
|
Python
|
bsd-3-clause
|
WoLpH/django-sentry,WoLpH/django-sentry,tbarbugli/sentry_fork,WoLpH/django-sentry,tbarbugli/sentry_fork,tbarbugli/sentry_fork
|
---
+++
@@ -12,6 +12,7 @@
'django-celery',
'south',
'django-haystack',
+ 'whoosh',
]
setup(
|
ab199d2cbaf72bb5a8c01e96582f726fdb5acbf5
|
setup.py
|
setup.py
|
from setuptools import find_packages, setup
version = '1.1.0'
install_requires = (
'djangorestframework>=3.0.5,<3.2',
'FeinCMS>=1.9,<1.11',
'django-orderable>=2.0.1,<4',
'feincms-extensions>=0.1.0,<1',
)
setup(
name='feincms-pages-api',
version=version,
author='Incuna Ltd',
author_email='admin@incuna.com',
url='https://github.com/incuna/feincms-pages-api/',
packages=find_packages(),
install_requires=install_requires,
include_package_data=True,
license='BSD',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development',
'Topic :: Software Development :: Libraries :: Application Frameworks',
],
)
|
from setuptools import find_packages, setup
version = '1.1.0'
install_requires = (
'djangorestframework>=3.0.5,<3.3',
'FeinCMS>=1.9,<1.11',
'django-orderable>=2.0.1,<4',
'feincms-extensions>=0.1.0,<1',
)
setup(
name='feincms-pages-api',
version=version,
author='Incuna Ltd',
author_email='admin@incuna.com',
url='https://github.com/incuna/feincms-pages-api/',
packages=find_packages(),
install_requires=install_requires,
include_package_data=True,
license='BSD',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development',
'Topic :: Software Development :: Libraries :: Application Frameworks',
],
)
|
Make 'pages' compatible with 'djangorestframework' 3.2
|
Make 'pages' compatible with 'djangorestframework' 3.2
|
Python
|
bsd-2-clause
|
incuna/feincms-pages-api
|
---
+++
@@ -5,7 +5,7 @@
install_requires = (
- 'djangorestframework>=3.0.5,<3.2',
+ 'djangorestframework>=3.0.5,<3.3',
'FeinCMS>=1.9,<1.11',
'django-orderable>=2.0.1,<4',
'feincms-extensions>=0.1.0,<1',
|
fd50ce4b22b4f3d948a64ed400340c0fc744de49
|
src/waldur_core/core/migrations/0008_changeemailrequest_uuid.py
|
src/waldur_core/core/migrations/0008_changeemailrequest_uuid.py
|
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('core', '0007_changeemailrequest'),
]
operations = [
migrations.AddField(
model_name='changeemailrequest', name='uuid', field=models.UUIDField(),
),
]
|
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('core', '0007_changeemailrequest'),
]
operations = [
migrations.AddField(
model_name='changeemailrequest',
name='uuid',
field=models.UUIDField(null=True),
),
]
|
Allow null values in UUID field.
|
Allow null values in UUID field.
|
Python
|
mit
|
opennode/nodeconductor-assembly-waldur,opennode/waldur-mastermind,opennode/nodeconductor-assembly-waldur,opennode/waldur-mastermind,opennode/waldur-mastermind,opennode/nodeconductor-assembly-waldur,opennode/waldur-mastermind
|
---
+++
@@ -9,6 +9,8 @@
operations = [
migrations.AddField(
- model_name='changeemailrequest', name='uuid', field=models.UUIDField(),
+ model_name='changeemailrequest',
+ name='uuid',
+ field=models.UUIDField(null=True),
),
]
|
a885ebda3774f9d81422a96265bde25f6a93e7bf
|
tasks.py
|
tasks.py
|
from invocations import docs
from invocations.testing import test
from invocations.packaging import release
from invoke import Collection
from invoke import run
from invoke import task
@task(help={
'pty': "Whether to run tests under a pseudo-tty",
})
def integration(pty=True):
"""Runs integration tests."""
cmd = 'inv test -o --tests=integration'
run(cmd + ('' if pty else ' --no-pty'), pty=pty)
ns = Collection(test, integration, release, docs)
|
from invocations import docs
from invocations.testing import test, integration, watch_tests
from invocations.packaging import release
from invoke import Collection
from invoke import run
from invoke import task
ns = Collection(test, integration, watch_tests, release, docs)
ns.configure({
'tests': {
'package': 'releases',
},
})
|
Use invocations' integration task, also add watch_tests
|
Use invocations' integration task, also add watch_tests
|
Python
|
bsd-2-clause
|
bitprophet/releases
|
---
+++
@@ -1,5 +1,5 @@
from invocations import docs
-from invocations.testing import test
+from invocations.testing import test, integration, watch_tests
from invocations.packaging import release
from invoke import Collection
@@ -7,13 +7,9 @@
from invoke import task
-@task(help={
- 'pty': "Whether to run tests under a pseudo-tty",
+ns = Collection(test, integration, watch_tests, release, docs)
+ns.configure({
+ 'tests': {
+ 'package': 'releases',
+ },
})
-def integration(pty=True):
- """Runs integration tests."""
- cmd = 'inv test -o --tests=integration'
- run(cmd + ('' if pty else ' --no-pty'), pty=pty)
-
-
-ns = Collection(test, integration, release, docs)
|
49bed20629d4b2ef50026700b98694da4c2ce224
|
tasks.py
|
tasks.py
|
# coding=utf-8
"""Useful task commands for development and maintenance."""
from invoke import run, task
@task
def clean():
"""Clean the project directory of unwanted files and directories."""
run('rm -rf gmusicapi_scripts.egg-info')
run('rm -rf .coverage')
run('rm -rf .tox')
run('rm -rf .cache')
run('rm -rf build/')
run('rm -rf dist/')
run('rm -rf site/')
run('find . -name *.pyc -delete')
run('find . -name *.pyo -delete')
run('find . -name __pycache__ -delete -depth')
run('find . -name *~ -delete')
@task(clean)
def build():
"""Build sdist and bdist_wheel distributions."""
run('python setup.py sdist bdist_wheel')
@task(build)
def deploy():
"""Build and upload gmusicapi_scripts distributions."""
upload()
@task
def upload():
"""Upload gmusicapi_scripts distributions using twine."""
run('twine upload dist/*')
|
# coding=utf-8
"""Useful task commands for development and maintenance."""
from invoke import run, task
@task
def clean():
"""Clean the project directory of unwanted files and directories."""
run('rm -rf gmusicapi_scripts.egg-info')
run('rm -rf .coverage')
run('rm -rf .tox')
run('rm -rf .cache')
run('rm -rf build/')
run('rm -rf dist/')
run('rm -rf site/')
run('find . -name *.pyc -delete')
run('find . -name *.pyo -delete')
run('find . -name __pycache__ -delete -depth')
run('find . -name *~ -delete')
@task(clean)
def build():
"""Build sdist and bdist_wheel distributions."""
run('python setup.py sdist bdist_wheel')
@task(build)
def deploy():
"""Build and upload gmusicapi_scripts distributions."""
upload()
@task
def docs(test=False):
""""Build the gmusicapi_scripts docs."""
if test:
run('mkdocs serve')
else:
run('mkdocs gh-deploy --clean')
@task
def upload():
"""Upload gmusicapi_scripts distributions using twine."""
run('twine upload dist/*')
|
Add task for building docs
|
Add task for building docs
|
Python
|
mit
|
thebigmunch/gmusicapi-scripts
|
---
+++
@@ -37,6 +37,16 @@
@task
+def docs(test=False):
+ """"Build the gmusicapi_scripts docs."""
+
+ if test:
+ run('mkdocs serve')
+ else:
+ run('mkdocs gh-deploy --clean')
+
+
+@task
def upload():
"""Upload gmusicapi_scripts distributions using twine."""
|
5641cfde8ddec78a8559705b81b78c3f18c46f8c
|
python/lazperf/__init__.py
|
python/lazperf/__init__.py
|
__version__='1.1.0'
from .pylazperfapi import PyDecompressor as Decompressor
from .pylazperfapi import PyCompressor as Compressor
from .pylazperfapi import PyVLRDecompressor as VLRDecompressor
from .pylazperfapi import PyVLRCompressor as VLRCompressor
from .pylazperfapi import PyRecordSchema as RecordSchema
from .pylazperfapi import PyLazVlr as LazVLR
from .pylazperfapi import buildNumpyDescription
from .pylazperfapi import buildGreyhoundDescription
|
__version__='1.2.1'
from .pylazperfapi import PyDecompressor as Decompressor
from .pylazperfapi import PyCompressor as Compressor
from .pylazperfapi import PyVLRDecompressor as VLRDecompressor
from .pylazperfapi import PyVLRCompressor as VLRCompressor
from .pylazperfapi import PyRecordSchema as RecordSchema
from .pylazperfapi import PyLazVlr as LazVLR
from .pylazperfapi import buildNumpyDescription
from .pylazperfapi import buildGreyhoundDescription
|
Change Python package version to 1.2.1
|
Change Python package version to 1.2.1
|
Python
|
lgpl-2.1
|
verma/laz-perf,verma/laz-perf,verma/laz-perf,abellgithub/laz-perf,hobu/laz-perf,abellgithub/laz-perf,hobu/laz-perf,abellgithub/laz-perf,hobu/laz-perf,verma/laz-perf,hobu/laz-perf,hobu/laz-perf,verma/laz-perf,abellgithub/laz-perf,abellgithub/laz-perf
|
---
+++
@@ -1,4 +1,4 @@
-__version__='1.1.0'
+__version__='1.2.1'
from .pylazperfapi import PyDecompressor as Decompressor
from .pylazperfapi import PyCompressor as Compressor
from .pylazperfapi import PyVLRDecompressor as VLRDecompressor
|
e24f89366a8a58a29d26f58b8f21aba437ec1566
|
tests/integration/runners/test_cache.py
|
tests/integration/runners/test_cache.py
|
# -*- coding: utf-8 -*-
'''
Tests for the salt-run command
'''
# Import Python libs
from __future__ import absolute_import
# Import Salt Testing libs
import tests.integration as integration
class ManageTest(integration.ShellCase):
'''
Test the manage runner
'''
def test_cache(self):
'''
Store, list, fetch, then flush data
'''
# Store the data
ret = self.run_run_plus(
'cache.store',
bank='test/runner',
key='test_cache',
data='The time has come the walrus said',
)
# Make sure we can see the new key
ret = self.run_run_plus('cache.list', bank='test/runner')
self.assertIn('test_cache', ret['return'])
# Make sure we can see the new data
ret = self.run_run_plus('cache.fetch', bank='test/runner', key='test_cache')
self.assertIn('The time has come the walrus said', ret['return'])
# Make sure we can delete the data
ret = self.run_run_plus('cache.flush', bank='test/runner', key='test_cache')
ret = self.run_run_plus('cache.list', bank='test/runner')
self.assertNotIn('test_cache', ret['return'])
|
# -*- coding: utf-8 -*-
'''
Tests for the salt-run command
'''
# Import Python libs
from __future__ import absolute_import
# Import Salt Testing libs
import tests.integration as integration
class ManageTest(integration.ShellCase):
'''
Test the manage runner
'''
def test_cache(self):
'''
Store, list, fetch, then flush data
'''
# Store the data
ret = self.run_run_plus(
'cache.store',
bank='cachetest/runner',
key='test_cache',
data='The time has come the walrus said',
)
# Make sure we can see the new key
ret = self.run_run_plus('cache.list', bank='cachetest/runner')
self.assertIn('test_cache', ret['return'])
# Make sure we can see the new data
ret = self.run_run_plus('cache.fetch', bank='cachetest/runner', key='test_cache')
self.assertIn('The time has come the walrus said', ret['return'])
# Make sure we can delete the data
ret = self.run_run_plus('cache.flush', bank='cachetest/runner', key='test_cache')
ret = self.run_run_plus('cache.list', bank='cachetest/runner')
self.assertNotIn('test_cache', ret['return'])
|
Use a slightly more specific bank name
|
Use a slightly more specific bank name
|
Python
|
apache-2.0
|
saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt
|
---
+++
@@ -20,17 +20,17 @@
# Store the data
ret = self.run_run_plus(
'cache.store',
- bank='test/runner',
+ bank='cachetest/runner',
key='test_cache',
data='The time has come the walrus said',
)
# Make sure we can see the new key
- ret = self.run_run_plus('cache.list', bank='test/runner')
+ ret = self.run_run_plus('cache.list', bank='cachetest/runner')
self.assertIn('test_cache', ret['return'])
# Make sure we can see the new data
- ret = self.run_run_plus('cache.fetch', bank='test/runner', key='test_cache')
+ ret = self.run_run_plus('cache.fetch', bank='cachetest/runner', key='test_cache')
self.assertIn('The time has come the walrus said', ret['return'])
# Make sure we can delete the data
- ret = self.run_run_plus('cache.flush', bank='test/runner', key='test_cache')
- ret = self.run_run_plus('cache.list', bank='test/runner')
+ ret = self.run_run_plus('cache.flush', bank='cachetest/runner', key='test_cache')
+ ret = self.run_run_plus('cache.list', bank='cachetest/runner')
self.assertNotIn('test_cache', ret['return'])
|
e32be307fd99c0d38b514385e0af2c257a50d50b
|
mining/urls.py
|
mining/urls.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from .views import MainHandler, ProcessHandler, DashboardHandler
INCLUDE_URLS = [
(r"/process/(?P<slug>[\w-]+).json", ProcessHandler),
(r"/(?P<slug>[\w-]+)", DashboardHandler),
(r"/", MainHandler),
]
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from .views import MainHandler, ProcessHandler, DashboardHandler
from .views import ProcessWebSocketHandler
INCLUDE_URLS = [
(r"/process/(?P<slug>[\w-]+).ws", ProcessWebSocketHandler),
(r"/process/(?P<slug>[\w-]+).json", ProcessHandler),
(r"/process/(?P<slug>[\w-]+).json", ProcessHandler),
(r"/(?P<slug>[\w-]+)", DashboardHandler),
(r"/", MainHandler),
]
|
Add URL enter in dynamic process
|
Add URL enter in dynamic process
|
Python
|
mit
|
mining/mining,mlgruby/mining,jgabriellima/mining,mlgruby/mining,mlgruby/mining,chrisdamba/mining,seagoat/mining,chrisdamba/mining,AndrzejR/mining,avelino/mining,seagoat/mining,jgabriellima/mining,avelino/mining,mining/mining,AndrzejR/mining
|
---
+++
@@ -1,9 +1,12 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from .views import MainHandler, ProcessHandler, DashboardHandler
+from .views import ProcessWebSocketHandler
INCLUDE_URLS = [
+ (r"/process/(?P<slug>[\w-]+).ws", ProcessWebSocketHandler),
+ (r"/process/(?P<slug>[\w-]+).json", ProcessHandler),
(r"/process/(?P<slug>[\w-]+).json", ProcessHandler),
(r"/(?P<slug>[\w-]+)", DashboardHandler),
(r"/", MainHandler),
|
cafe4629666326fefab17784abac281e7fb226c7
|
examples/confluence-get-group-members.py
|
examples/confluence-get-group-members.py
|
# coding: utf8
from atlassian import Confluence
confluence = Confluence(
url='http://localhost:8090',
username='admin',
password='admin')
# this example related get all user from group e.g. group_name
group_name = 'confluence-users'
flag = True
i = 0
limit = 50
result = []
while flag:
response = confluence.get_group_members(group_name=group_name, start=i * limit, limit=limit)
if response:
i += 1
result.append(response)
print(result)
|
# coding: utf8
from atlassian import Confluence
confluence = Confluence(
url='http://localhost:8090',
username='admin',
password='admin')
# this example related get all user from group e.g. group_name
group_name = 'confluence-users'
flag = True
i = 0
limit = 50
result = []
while flag:
response = confluence.get_group_members(group_name=group_name, start=i * limit, limit=limit)
if response and len(response):
i += 1
result.append(response)
else:
flag = False
print(result)
|
Add other side of condition
|
Add other side of condition
|
Python
|
apache-2.0
|
MattAgile/atlassian-python-api,AstroTech/atlassian-python-api,AstroTech/atlassian-python-api
|
---
+++
@@ -13,8 +13,9 @@
result = []
while flag:
response = confluence.get_group_members(group_name=group_name, start=i * limit, limit=limit)
- if response:
+ if response and len(response):
i += 1
result.append(response)
-
+ else:
+ flag = False
print(result)
|
cb5f74d83f1ed5d655823b87d25ff031e9cb4bc8
|
test/test_utils.py
|
test/test_utils.py
|
import unittest
from LinkMeBot.utils import get_text_from_markdown, human_readable_download_number
class TestUtils(unittest.TestCase):
def test_get_text_from_markdown(self):
markdown = '**test** [^this](https://google.com) ~~is~~ _a_ test! https://google.com'
text = 'test this is a test!'
self.assertEqual(get_text_from_markdown(markdown), text)
# make sure quoted text is discarded
markdown = '''test
> this is a test
hello world
'''
text = 'test\n\nhello world'
self.assertEqual(get_text_from_markdown(markdown), text)
def test_human_readable_download_number(self):
self.assertEqual(human_readable_download_number('12'), '12')
self.assertEqual(human_readable_download_number('12000'), '12 Thousand')
self.assertEqual(human_readable_download_number('12000000'), '12 Million')
self.assertEqual(human_readable_download_number('12,000,000 - 15,000,000'), '12 Million')
if __name__ == '__main__':
unittest.main()
|
import unittest
from LinkMeBot.utils import get_text_from_markdown, human_readable_download_number
class TestUtils(unittest.TestCase):
def test_get_text_from_markdown(self):
markdown = '**test** [^this](https://google.com) ~~is~~ _a_ test! https://google.com'
text = 'test this is a test!'
self.assertEqual(get_text_from_markdown(markdown), text)
# make sure quoted text is discarded
markdown = '''test
> this is a test
hello world
'''
text = 'test\n\nhello world'
self.assertEqual(get_text_from_markdown(markdown), text)
def test_human_readable_download_number(self):
self.assertEqual(human_readable_download_number('12'), '12')
self.assertEqual(human_readable_download_number('12000'), '12 thousand')
self.assertEqual(human_readable_download_number('12000000'), '12 million')
self.assertEqual(human_readable_download_number('12,000,000 - 15,000,000'), '12 million')
if __name__ == '__main__':
unittest.main()
|
Fix tests for human readable numbers
|
Fix tests for human readable numbers
|
Python
|
mit
|
crisbal/PlayStoreLinks_Bot
|
---
+++
@@ -19,9 +19,9 @@
def test_human_readable_download_number(self):
self.assertEqual(human_readable_download_number('12'), '12')
- self.assertEqual(human_readable_download_number('12000'), '12 Thousand')
- self.assertEqual(human_readable_download_number('12000000'), '12 Million')
- self.assertEqual(human_readable_download_number('12,000,000 - 15,000,000'), '12 Million')
+ self.assertEqual(human_readable_download_number('12000'), '12 thousand')
+ self.assertEqual(human_readable_download_number('12000000'), '12 million')
+ self.assertEqual(human_readable_download_number('12,000,000 - 15,000,000'), '12 million')
if __name__ == '__main__':
unittest.main()
|
1c61a731bf33ee25e1bc1725455978064c59748b
|
parkings/api/public/parking_area_statistics.py
|
parkings/api/public/parking_area_statistics.py
|
from django.utils import timezone
from rest_framework import serializers, viewsets
from parkings.models import Parking, ParkingArea
class ParkingAreaStatisticsSerializer(serializers.ModelSerializer):
current_parking_count = serializers.SerializerMethodField()
def get_current_parking_count(self, area):
count = Parking.objects.filter(
parking_area=area,
time_end__gte=timezone.now(),
time_start__lte=timezone.now(),
).count()
return self.blur_count(count)
def blur_count(self, count):
"""
Returns a blurred count, which is supposed to hide individual
parkings.
"""
if count <= 3:
return 0
else:
return count
class Meta:
model = ParkingArea
fields = (
'id',
'current_parking_count',
)
class PublicAPIParkingAreaStatisticsViewSet(viewsets.ReadOnlyModelViewSet):
queryset = ParkingArea.objects.all()
serializer_class = ParkingAreaStatisticsSerializer
|
from django.utils import timezone
from rest_framework import serializers, viewsets
from parkings.models import Parking, ParkingArea
from ..common import WGS84InBBoxFilter
class ParkingAreaStatisticsSerializer(serializers.ModelSerializer):
current_parking_count = serializers.SerializerMethodField()
def get_current_parking_count(self, area):
count = Parking.objects.filter(
parking_area=area,
time_end__gte=timezone.now(),
time_start__lte=timezone.now(),
).count()
return self.blur_count(count)
def blur_count(self, count):
"""
Returns a blurred count, which is supposed to hide individual
parkings.
"""
if count <= 3:
return 0
else:
return count
class Meta:
model = ParkingArea
fields = (
'id',
'current_parking_count',
)
class PublicAPIParkingAreaStatisticsViewSet(viewsets.ReadOnlyModelViewSet):
queryset = ParkingArea.objects.all()
serializer_class = ParkingAreaStatisticsSerializer
bbox_filter_field = 'areas'
filter_backends = (WGS84InBBoxFilter,)
bbox_filter_include_overlapping = True
|
Add bbox to parking area statistics view set
|
Add bbox to parking area statistics view set
|
Python
|
mit
|
tuomas777/parkkihubi
|
---
+++
@@ -2,6 +2,8 @@
from rest_framework import serializers, viewsets
from parkings.models import Parking, ParkingArea
+
+from ..common import WGS84InBBoxFilter
class ParkingAreaStatisticsSerializer(serializers.ModelSerializer):
@@ -36,3 +38,6 @@
class PublicAPIParkingAreaStatisticsViewSet(viewsets.ReadOnlyModelViewSet):
queryset = ParkingArea.objects.all()
serializer_class = ParkingAreaStatisticsSerializer
+ bbox_filter_field = 'areas'
+ filter_backends = (WGS84InBBoxFilter,)
+ bbox_filter_include_overlapping = True
|
2d811e0e7e7acf952df25c444d926aee7ef0d8fc
|
harvest/urls.py
|
harvest/urls.py
|
from django.conf.urls import patterns, url
from harvest.views import ApproveJobView, CancelJobView
urlpatterns = patterns('',
url(r'^(?P<job_id>\d+)/approve/?$', ApproveJobView.as_view(), name = 'approve_job' ),
url(r'^(?P<job_id>\d+)/cancel/?$', CancelJobView.as_view(), name = 'cancel_job' ),
)
|
from django.conf.urls import patterns, url
from harvest.views import ApproveJobView, CancelJobView
urlpatterns = patterns('',
url(r'^(?P<job_id>\d+)/approve?$', ApproveJobView.as_view(), name = 'approve_job' ),
url(r'^(?P<job_id>\d+)/cancel?$', CancelJobView.as_view(), name = 'cancel_job' ),
)
|
Remove the tail slash from approce and cancel url
|
Remove the tail slash from approce and cancel url
|
Python
|
bsd-3-clause
|
rockychen-dpaw/borgcollector,parksandwildlife/borgcollector,rockychen-dpaw/borgcollector,parksandwildlife/borgcollector,rockychen-dpaw/borgcollector,parksandwildlife/borgcollector
|
---
+++
@@ -2,7 +2,7 @@
from harvest.views import ApproveJobView, CancelJobView
urlpatterns = patterns('',
- url(r'^(?P<job_id>\d+)/approve/?$', ApproveJobView.as_view(), name = 'approve_job' ),
- url(r'^(?P<job_id>\d+)/cancel/?$', CancelJobView.as_view(), name = 'cancel_job' ),
+ url(r'^(?P<job_id>\d+)/approve?$', ApproveJobView.as_view(), name = 'approve_job' ),
+ url(r'^(?P<job_id>\d+)/cancel?$', CancelJobView.as_view(), name = 'cancel_job' ),
)
|
5e3e3bea2c210d2b14873930dac5e98e4b813726
|
api/base/exceptions.py
|
api/base/exceptions.py
|
from rest_framework import status
from rest_framework.exceptions import APIException
def json_api_exception_handler(exc, context):
"""
Custom exception handler that returns errors object as an array
"""
from rest_framework.views import exception_handler
response = exception_handler(exc, context)
# Title removed to avoid clash with node "title" errors
acceptable_members = ['id', 'links', 'status', 'code', 'detail', 'source', 'meta']
errors = []
if response is not None:
message = response.data
if isinstance(message, dict):
for key, value in message.iteritems():
if key in acceptable_members:
errors.append({key: value})
else:
errors.append({'detail': {key: value}})
elif isinstance(message, list):
for error in message:
errors.append({'detail': error})
else:
errors.append({'detail': message})
response.data = {'errors': errors}
# Return 401 instead of 403 during unauthorized requests without having user log in with Basic Auth
if response is not None and response.data['errors'][0]['detail'] == "Authentication credentials were not provided.":
response.status_code = 401
return response
# Custom Exceptions the Django Rest Framework does not support
class Gone(APIException):
status_code = status.HTTP_410_GONE
default_detail = ('The requested resource is no longer available.')
|
from rest_framework import status
from rest_framework.exceptions import APIException
def json_api_exception_handler(exc, context):
"""
Custom exception handler that returns errors object as an array
"""
from rest_framework.views import exception_handler
response = exception_handler(exc, context)
# Title removed to avoid clash with node "title" errors
acceptable_members = ['id', 'links', 'status', 'code', 'detail', 'source', 'meta']
errors = []
if response is not None:
message = response.data
if isinstance(message, dict):
for key, value in message.iteritems():
if key in acceptable_members:
errors.append({key: value})
else:
errors.append({'detail': {key: value}})
elif isinstance(message, list):
for error in message:
errors.append({'detail': error})
else:
errors.append({'detail': message})
response.data = {'errors': errors}
# Return 401 instead of 403 during unauthorized requests without having user log in with Basic Auth
if response is not None and response.data['errors'][0].get('detail') == "Authentication credentials were not provided.":
response.status_code = 401
return response
# Custom Exceptions the Django Rest Framework does not support
class Gone(APIException):
status_code = status.HTTP_410_GONE
default_detail = ('The requested resource is no longer available.')
|
Use dictionary get() method to access 'detail'
|
Use dictionary get() method to access 'detail'
|
Python
|
apache-2.0
|
amyshi188/osf.io,haoyuchen1992/osf.io,leb2dg/osf.io,DanielSBrown/osf.io,mfraezz/osf.io,arpitar/osf.io,saradbowman/osf.io,felliott/osf.io,brandonPurvis/osf.io,SSJohns/osf.io,njantrania/osf.io,sloria/osf.io,mattclark/osf.io,icereval/osf.io,baylee-d/osf.io,crcresearch/osf.io,RomanZWang/osf.io,caneruguz/osf.io,amyshi188/osf.io,aaxelb/osf.io,caseyrollins/osf.io,Ghalko/osf.io,rdhyee/osf.io,petermalcolm/osf.io,sloria/osf.io,zachjanicki/osf.io,icereval/osf.io,asanfilippo7/osf.io,abought/osf.io,chrisseto/osf.io,cslzchen/osf.io,icereval/osf.io,monikagrabowska/osf.io,danielneis/osf.io,rdhyee/osf.io,SSJohns/osf.io,caseyrygt/osf.io,mluo613/osf.io,zamattiac/osf.io,brandonPurvis/osf.io,aaxelb/osf.io,monikagrabowska/osf.io,hmoco/osf.io,haoyuchen1992/osf.io,HalcyonChimera/osf.io,danielneis/osf.io,GageGaskins/osf.io,wearpants/osf.io,jnayak1/osf.io,saradbowman/osf.io,rdhyee/osf.io,zamattiac/osf.io,brianjgeiger/osf.io,haoyuchen1992/osf.io,cosenal/osf.io,KAsante95/osf.io,crcresearch/osf.io,TomHeatwole/osf.io,rdhyee/osf.io,emetsger/osf.io,doublebits/osf.io,pattisdr/osf.io,sbt9uc/osf.io,RomanZWang/osf.io,alexschiller/osf.io,brandonPurvis/osf.io,monikagrabowska/osf.io,HalcyonChimera/osf.io,kwierman/osf.io,ZobairAlijan/osf.io,hmoco/osf.io,brandonPurvis/osf.io,samanehsan/osf.io,erinspace/osf.io,acshi/osf.io,danielneis/osf.io,samanehsan/osf.io,DanielSBrown/osf.io,wearpants/osf.io,pattisdr/osf.io,Nesiehr/osf.io,monikagrabowska/osf.io,caneruguz/osf.io,chennan47/osf.io,DanielSBrown/osf.io,sloria/osf.io,acshi/osf.io,binoculars/osf.io,KAsante95/osf.io,acshi/osf.io,sbt9uc/osf.io,hmoco/osf.io,caseyrollins/osf.io,samchrisinger/osf.io,GageGaskins/osf.io,ZobairAlijan/osf.io,arpitar/osf.io,sbt9uc/osf.io,kwierman/osf.io,mluke93/osf.io,Johnetordoff/osf.io,adlius/osf.io,adlius/osf.io,asanfilippo7/osf.io,sbt9uc/osf.io,chrisseto/osf.io,KAsante95/osf.io,Ghalko/osf.io,TomHeatwole/osf.io,caseyrygt/osf.io,alexschiller/osf.io,samanehsan/osf.io,mfraezz/osf.io,billyhunt/osf.io,brianjgeiger/osf.io,Johnetordoff/osf.io,kch8qx/osf.io,mluke93/osf.io,Johnetordoff/osf.io,crcresearch/osf.io,samchrisinger/osf.io,GageGaskins/osf.io,chennan47/osf.io,samanehsan/osf.io,CenterForOpenScience/osf.io,doublebits/osf.io,erinspace/osf.io,mluo613/osf.io,TomBaxter/osf.io,ticklemepierce/osf.io,mluo613/osf.io,TomBaxter/osf.io,asanfilippo7/osf.io,cslzchen/osf.io,mluo613/osf.io,ticklemepierce/osf.io,Nesiehr/osf.io,aaxelb/osf.io,TomBaxter/osf.io,jnayak1/osf.io,binoculars/osf.io,laurenrevere/osf.io,mfraezz/osf.io,cslzchen/osf.io,hmoco/osf.io,KAsante95/osf.io,RomanZWang/osf.io,CenterForOpenScience/osf.io,caneruguz/osf.io,Johnetordoff/osf.io,pattisdr/osf.io,danielneis/osf.io,baylee-d/osf.io,njantrania/osf.io,emetsger/osf.io,billyhunt/osf.io,haoyuchen1992/osf.io,kch8qx/osf.io,amyshi188/osf.io,alexschiller/osf.io,alexschiller/osf.io,mluo613/osf.io,acshi/osf.io,petermalcolm/osf.io,adlius/osf.io,CenterForOpenScience/osf.io,emetsger/osf.io,caseyrollins/osf.io,Ghalko/osf.io,kch8qx/osf.io,SSJohns/osf.io,Nesiehr/osf.io,felliott/osf.io,njantrania/osf.io,chrisseto/osf.io,leb2dg/osf.io,Ghalko/osf.io,caseyrygt/osf.io,jnayak1/osf.io,cosenal/osf.io,ZobairAlijan/osf.io,asanfilippo7/osf.io,HalcyonChimera/osf.io,mattclark/osf.io,kwierman/osf.io,acshi/osf.io,cwisecarver/osf.io,petermalcolm/osf.io,jnayak1/osf.io,billyhunt/osf.io,wearpants/osf.io,doublebits/osf.io,abought/osf.io,HalcyonChimera/osf.io,adlius/osf.io,mluke93/osf.io,zachjanicki/osf.io,ticklemepierce/osf.io,KAsante95/osf.io,mluke93/osf.io,cslzchen/osf.io,cwisecarver/osf.io,felliott/osf.io,zamattiac/osf.io,laurenrevere/osf.io,caneruguz/osf.io,emetsger/osf.io,aaxelb/osf.io,Nesiehr/osf.io,binoculars/osf.io,erinspace/osf.io,arpitar/osf.io,zamattiac/osf.io,kch8qx/osf.io,ZobairAlijan/osf.io,laurenrevere/osf.io,zachjanicki/osf.io,ticklemepierce/osf.io,mattclark/osf.io,samchrisinger/osf.io,leb2dg/osf.io,baylee-d/osf.io,abought/osf.io,chennan47/osf.io,TomHeatwole/osf.io,brandonPurvis/osf.io,cosenal/osf.io,TomHeatwole/osf.io,samchrisinger/osf.io,zachjanicki/osf.io,brianjgeiger/osf.io,GageGaskins/osf.io,DanielSBrown/osf.io,petermalcolm/osf.io,monikagrabowska/osf.io,amyshi188/osf.io,arpitar/osf.io,felliott/osf.io,leb2dg/osf.io,SSJohns/osf.io,billyhunt/osf.io,wearpants/osf.io,doublebits/osf.io,njantrania/osf.io,doublebits/osf.io,abought/osf.io,chrisseto/osf.io,alexschiller/osf.io,mfraezz/osf.io,RomanZWang/osf.io,cosenal/osf.io,kwierman/osf.io,RomanZWang/osf.io,billyhunt/osf.io,kch8qx/osf.io,GageGaskins/osf.io,cwisecarver/osf.io,CenterForOpenScience/osf.io,caseyrygt/osf.io,cwisecarver/osf.io,brianjgeiger/osf.io
|
---
+++
@@ -31,7 +31,7 @@
response.data = {'errors': errors}
# Return 401 instead of 403 during unauthorized requests without having user log in with Basic Auth
- if response is not None and response.data['errors'][0]['detail'] == "Authentication credentials were not provided.":
+ if response is not None and response.data['errors'][0].get('detail') == "Authentication credentials were not provided.":
response.status_code = 401
return response
|
367e025f5710301545bde09daf06cc2e5fe21510
|
nltk/test/unit/test_stem.py
|
nltk/test/unit/test_stem.py
|
# -*- coding: utf-8 -*-
from __future__ import print_function, unicode_literals
import unittest
from nltk.stem.snowball import SnowballStemmer
class SnowballTest(unittest.TestCase):
def test_russian(self):
# Russian words both consisting of Cyrillic
# and Roman letters can be stemmed.
stemmer_russian = SnowballStemmer("russian")
assert stemmer_russian.stem("Π°Π²Π°Π½ΡΠ½Π΅Π½ΡΠΊΠ°Ρ") == "Π°Π²Π°Π½ΡΠ½Π΅Π½ΡΠΊ"
assert stemmer_russian.stem("avenantnen'kai^a") == "avenantnen'k"
def test_german(self):
stemmer_german = SnowballStemmer("german")
stemmer_german2 = SnowballStemmer("german", ignore_stopwords=True)
assert stemmer_german.stem("Schr\xe4nke") == 'schrank'
assert stemmer_german2.stem("Schr\xe4nke") == 'schrank'
assert stemmer_german.stem("keinen") == 'kein'
assert stemmer_german2.stem("keinen") == 'keinen'
def test_short_strings_bug(self):
stemmer = SnowballStemmer('english')
assert stemmer.stem("y's") == 'y'
|
# -*- coding: utf-8 -*-
from __future__ import print_function, unicode_literals
import unittest
from nltk.stem.snowball import SnowballStemmer
class SnowballTest(unittest.TestCase):
def test_russian(self):
# Russian words both consisting of Cyrillic
# and Roman letters can be stemmed.
stemmer_russian = SnowballStemmer("russian")
assert stemmer_russian.stem("Π°Π²Π°Π½ΡΠ½Π΅Π½ΡΠΊΠ°Ρ") == "Π°Π²Π°Π½ΡΠ½Π΅Π½ΡΠΊ"
assert stemmer_russian.stem("avenantnen'kai^a") == "avenantnen'k"
def test_german(self):
stemmer_german = SnowballStemmer("german")
stemmer_german2 = SnowballStemmer("german", ignore_stopwords=True)
assert stemmer_german.stem("Schr\xe4nke") == 'schrank'
assert stemmer_german2.stem("Schr\xe4nke") == 'schrank'
assert stemmer_german.stem("keinen") == 'kein'
assert stemmer_german2.stem("keinen") == 'keinen'
def test_spanish(self):
stemmer = SnowballStemmer('spanish')
assert stemmer.stem("Visionado") == 'vision'
def test_short_strings_bug(self):
stemmer = SnowballStemmer('english')
assert stemmer.stem("y's") == 'y'
|
Add stem test for spanish stemmer
|
Add stem test for spanish stemmer
|
Python
|
apache-2.0
|
nltk/nltk,nltk/nltk,nltk/nltk
|
---
+++
@@ -2,6 +2,7 @@
from __future__ import print_function, unicode_literals
import unittest
from nltk.stem.snowball import SnowballStemmer
+
class SnowballTest(unittest.TestCase):
@@ -22,6 +23,11 @@
assert stemmer_german.stem("keinen") == 'kein'
assert stemmer_german2.stem("keinen") == 'keinen'
+ def test_spanish(self):
+ stemmer = SnowballStemmer('spanish')
+
+ assert stemmer.stem("Visionado") == 'vision'
+
def test_short_strings_bug(self):
stemmer = SnowballStemmer('english')
assert stemmer.stem("y's") == 'y'
|
e922f526ce95b8a992004df91b08bf466d9eea15
|
dbmigrator/cli.py
|
dbmigrator/cli.py
|
# -*- coding: utf-8 -*-
# ###
# Copyright (c) 2015, Rice University
# This software is subject to the provisions of the GNU Affero General
# Public License version 3 (AGPLv3).
# See LICENCE.txt for details.
# ###
import argparse
import os
import sys
from . import commands, utils
DEFAULTS = {
'migrations_directory': 'migrations',
}
DEFAULT_CONFIG_PATH = 'development.ini'
def main(argv=sys.argv[1:]):
parser = argparse.ArgumentParser(description='DB Migrator')
parser.add_argument('--migrations-directory')
parser.add_argument('--config', default=DEFAULT_CONFIG_PATH)
parser.add_argument('--db-connection-string',
help='a psycopg2 db connection string')
subparsers = parser.add_subparsers(help='commands')
commands.load_cli(subparsers)
args = parser.parse_args(argv)
args = vars(args)
if os.path.exists(args['config']):
utils.get_settings_from_config(args['config'], [
'migrations-directory',
'db-connection-string',
], args)
utils.get_settings_from_entry_points(args)
for name, value in DEFAULTS.items():
if not args.get(name):
args[name] = value
if 'cmmd' not in args:
parser.print_help()
return parser.error('command missing')
args['migrations_directory'] = os.path.relpath(
args['migrations_directory'])
return args['cmmd'](**args)
|
# -*- coding: utf-8 -*-
# ###
# Copyright (c) 2015, Rice University
# This software is subject to the provisions of the GNU Affero General
# Public License version 3 (AGPLv3).
# See LICENCE.txt for details.
# ###
import argparse
import os
import sys
from . import commands, utils
DEFAULTS = {
}
def main(argv=sys.argv[1:]):
parser = argparse.ArgumentParser(description='DB Migrator')
parser.add_argument('--migrations-directory')
parser.add_argument('--config')
parser.add_argument('--db-connection-string',
help='a psycopg2 db connection string')
subparsers = parser.add_subparsers(help='commands')
commands.load_cli(subparsers)
args = parser.parse_args(argv)
args = vars(args)
if os.path.exists(args['config']):
utils.get_settings_from_config(args['config'], [
'migrations-directory',
'db-connection-string',
], args)
utils.get_settings_from_entry_points(args)
for name, value in DEFAULTS.items():
if not args.get(name):
args[name] = value
if 'cmmd' not in args:
parser.print_help()
return parser.error('command missing')
args['migrations_directory'] = os.path.relpath(
args['migrations_directory'])
return args['cmmd'](**args)
|
Remove default config path (development.ini)
|
Remove default config path (development.ini)
Towards #1
|
Python
|
agpl-3.0
|
karenc/db-migrator
|
---
+++
@@ -14,10 +14,7 @@
DEFAULTS = {
- 'migrations_directory': 'migrations',
}
-
-DEFAULT_CONFIG_PATH = 'development.ini'
def main(argv=sys.argv[1:]):
@@ -25,7 +22,7 @@
parser.add_argument('--migrations-directory')
- parser.add_argument('--config', default=DEFAULT_CONFIG_PATH)
+ parser.add_argument('--config')
parser.add_argument('--db-connection-string',
help='a psycopg2 db connection string')
|
4539ebc92d59dd0388658fa482626185088222b8
|
tests.py
|
tests.py
|
from __future__ import unicode_literals
from tqdm import format_interval, format_meter
def test_format_interval():
assert format_interval(60) == '01:00'
assert format_interval(6160) == '1:42:40'
assert format_interval(238113) == '66:08:33'
def test_format_meter():
assert format_meter(0, 1000, 13) == \
"|----------| 0/1000 0% [elapsed: " \
"00:13 left: ?, 0.00 iters/sec]"
assert format_meter(231, 1000, 392) == \
"|##--------| 231/1000 23% [elapsed: " \
"06:32 left: 21:44, 0.59 iters/sec]"
|
from __future__ import unicode_literals
from StringIO import StringIO
import csv
from tqdm import format_interval, format_meter, tqdm
def test_format_interval():
assert format_interval(60) == '01:00'
assert format_interval(6160) == '1:42:40'
assert format_interval(238113) == '66:08:33'
def test_format_meter():
assert format_meter(0, 1000, 13) == \
"|----------| 0/1000 0% [elapsed: " \
"00:13 left: ?, 0.00 iters/sec]"
assert format_meter(231, 1000, 392) == \
"|##--------| 231/1000 23% [elapsed: " \
"06:32 left: 21:44, 0.59 iters/sec]"
def test_iterate_over_csv_rows():
# Create a test csv pseudo file
test_csv_file = StringIO()
writer = csv.writer(test_csv_file)
for i in range(3):
writer.writerow(['test', 'test', 'test'])
test_csv_file.seek(0)
# Test that nothing fails if we iterate over rows
reader = csv.DictReader(test_csv_file, fieldnames=('row1', 'row2', 'row3'))
for row in tqdm(reader):
pass
|
Test that tqdm fails when iterating over a csv file
|
Test that tqdm fails when iterating over a csv file
|
Python
|
mit
|
lrq3000/tqdm,kmike/tqdm
|
---
+++
@@ -1,5 +1,7 @@
from __future__ import unicode_literals
-from tqdm import format_interval, format_meter
+from StringIO import StringIO
+import csv
+from tqdm import format_interval, format_meter, tqdm
def test_format_interval():
@@ -15,3 +17,17 @@
assert format_meter(231, 1000, 392) == \
"|##--------| 231/1000 23% [elapsed: " \
"06:32 left: 21:44, 0.59 iters/sec]"
+
+
+def test_iterate_over_csv_rows():
+ # Create a test csv pseudo file
+ test_csv_file = StringIO()
+ writer = csv.writer(test_csv_file)
+ for i in range(3):
+ writer.writerow(['test', 'test', 'test'])
+ test_csv_file.seek(0)
+
+ # Test that nothing fails if we iterate over rows
+ reader = csv.DictReader(test_csv_file, fieldnames=('row1', 'row2', 'row3'))
+ for row in tqdm(reader):
+ pass
|
7c5cf5eb9b59a499ab480ea571cbb9d8203c1a79
|
tests/test_gdal.py
|
tests/test_gdal.py
|
""" Test gdal plugin functionality.
"""
import pytest
from imageio.testing import run_tests_if_main, get_test_dir
import imageio
from imageio.core import get_remote_file
test_dir = get_test_dir()
try:
from osgeo import gdal
except ImportError:
gdal = None
@pytest.mark.skipif('gdal is None')
def test_gdal_reading():
""" Test reading gdal"""
filename = get_remote_file('images/geotiff.tif')
im = imageio.imread(filename, 'gdal')
assert im.shape == (929, 699)
R = imageio.read(filename, 'gdal')
assert R.format.name == 'GDAL'
meta_data = R.get_meta_data()
assert 'TIFFTAG_XRESOLUTION' in meta_data
# Fail
raises = pytest.raises
raises(IndexError, R.get_data, -1)
raises(IndexError, R.get_data, 3)
run_tests_if_main()
|
""" Test gdal plugin functionality.
"""
import pytest
from imageio.testing import run_tests_if_main, get_test_dir, need_internet
import imageio
from imageio.core import get_remote_file
test_dir = get_test_dir()
try:
from osgeo import gdal
except ImportError:
gdal = None
@pytest.mark.skipif('gdal is None')
def test_gdal_reading():
""" Test reading gdal"""
need_internet()
filename = get_remote_file('images/geotiff.tif')
im = imageio.imread(filename, 'gdal')
assert im.shape == (929, 699)
R = imageio.read(filename, 'gdal')
assert R.format.name == 'GDAL'
meta_data = R.get_meta_data()
assert 'TIFFTAG_XRESOLUTION' in meta_data
# Fail
raises = pytest.raises
raises(IndexError, R.get_data, -1)
raises(IndexError, R.get_data, 3)
run_tests_if_main()
|
Mark gdal tests as requiring internet
|
Mark gdal tests as requiring internet
|
Python
|
bsd-2-clause
|
imageio/imageio
|
---
+++
@@ -1,7 +1,7 @@
""" Test gdal plugin functionality.
"""
import pytest
-from imageio.testing import run_tests_if_main, get_test_dir
+from imageio.testing import run_tests_if_main, get_test_dir, need_internet
import imageio
from imageio.core import get_remote_file
@@ -18,7 +18,8 @@
@pytest.mark.skipif('gdal is None')
def test_gdal_reading():
""" Test reading gdal"""
-
+ need_internet()
+
filename = get_remote_file('images/geotiff.tif')
im = imageio.imread(filename, 'gdal')
|
54933f992af05ea3ac1edbb11e5873b9327a946e
|
script/lib/config.py
|
script/lib/config.py
|
#!/usr/bin/env python
import platform
import sys
BASE_URL = 'http://gh-contractor-zcbenz.s3.amazonaws.com/libchromiumcontent'
LIBCHROMIUMCONTENT_COMMIT = 'e375124044f9044ac88076eba0cd17361ee0997c'
ARCH = {
'cygwin': '32bit',
'darwin': '64bit',
'linux2': platform.architecture()[0],
'win32': '32bit',
}[sys.platform]
DIST_ARCH = {
'32bit': 'ia32',
'64bit': 'x64',
}[ARCH]
TARGET_PLATFORM = {
'cygwin': 'win32',
'darwin': 'darwin',
'linux2': 'linux',
'win32': 'win32',
}[sys.platform]
verbose_mode = False
def enable_verbose_mode():
print 'Running in verbose mode'
global verbose_mode
verbose_mode = True
def is_verbose_mode():
return verbose_mode
|
#!/usr/bin/env python
import platform
import sys
BASE_URL = 'http://gh-contractor-zcbenz.s3.amazonaws.com/libchromiumcontent'
LIBCHROMIUMCONTENT_COMMIT = '55efd338101e08691560192b2be0f9c3b1b0eb72'
ARCH = {
'cygwin': '32bit',
'darwin': '64bit',
'linux2': platform.architecture()[0],
'win32': '32bit',
}[sys.platform]
DIST_ARCH = {
'32bit': 'ia32',
'64bit': 'x64',
}[ARCH]
TARGET_PLATFORM = {
'cygwin': 'win32',
'darwin': 'darwin',
'linux2': 'linux',
'win32': 'win32',
}[sys.platform]
verbose_mode = False
def enable_verbose_mode():
print 'Running in verbose mode'
global verbose_mode
verbose_mode = True
def is_verbose_mode():
return verbose_mode
|
Upgrade libchromiumcontent to fix icu symbols
|
Upgrade libchromiumcontent to fix icu symbols
|
Python
|
mit
|
JussMee15/electron,mrwizard82d1/electron,chriskdon/electron,seanchas116/electron,the-ress/electron,anko/electron,brenca/electron,kikong/electron,RobertJGabriel/electron,jacksondc/electron,etiktin/electron,arturts/electron,DivyaKMenon/electron,gerhardberger/electron,twolfson/electron,fffej/electron,mirrh/electron,BionicClick/electron,bpasero/electron,gamedevsam/electron,thomsonreuters/electron,Jonekee/electron,jhen0409/electron,greyhwndz/electron,vHanda/electron,leftstick/electron,renaesop/electron,gerhardberger/electron,deepak1556/atom-shell,jcblw/electron,minggo/electron,icattlecoder/electron,mjaniszew/electron,thingsinjars/electron,nicobot/electron,vHanda/electron,carsonmcdonald/electron,jtburke/electron,DivyaKMenon/electron,anko/electron,Zagorakiss/electron,coderhaoxin/electron,rreimann/electron,hokein/atom-shell,stevekinney/electron,mhkeller/electron,RobertJGabriel/electron,rsvip/electron,nagyistoce/electron-atom-shell,xiruibing/electron,SufianHassan/electron,miniak/electron,anko/electron,RobertJGabriel/electron,eric-seekas/electron,jtburke/electron,Jonekee/electron,jhen0409/electron,nicobot/electron,renaesop/electron,shiftkey/electron,setzer777/electron,BionicClick/electron,egoist/electron,iftekeriba/electron,micalan/electron,setzer777/electron,zhakui/electron,SufianHassan/electron,coderhaoxin/electron,farmisen/electron,thomsonreuters/electron,dkfiresky/electron,electron/electron,kostia/electron,gabrielPeart/electron,vaginessa/electron,adamjgray/electron,jaanus/electron,kcrt/electron,felixrieseberg/electron,SufianHassan/electron,simongregory/electron,jacksondc/electron,gabrielPeart/electron,cos2004/electron,aichingm/electron,carsonmcdonald/electron,arturts/electron,jiaz/electron,shockone/electron,bbondy/electron,gstack/infinium-shell,shennushi/electron,gamedevsam/electron,robinvandernoord/electron,twolfson/electron,noikiy/electron,robinvandernoord/electron,trigrass2/electron,gabriel/electron,kokdemo/electron,jannishuebl/electron,Zagorakiss/electron,fffej/electron,bright-sparks/electron,joaomoreno/atom-shell,nagyistoce/electron-atom-shell,brave/electron,brave/muon,bpasero/electron,medixdev/electron,faizalpribadi/electron,zhakui/electron,beni55/electron,darwin/electron,Ivshti/electron,setzer777/electron,MaxGraey/electron,joneit/electron,renaesop/electron,howmuchcomputer/electron,vipulroxx/electron,ianscrivener/electron,synaptek/electron,bright-sparks/electron,vaginessa/electron,jaanus/electron,kokdemo/electron,voidbridge/electron,destan/electron,timruffles/electron,DivyaKMenon/electron,gerhardberger/electron,dahal/electron,bpasero/electron,benweissmann/electron,thomsonreuters/electron,mattdesl/electron,leolujuyi/electron,tonyganch/electron,ianscrivener/electron,roadev/electron,mhkeller/electron,michaelchiche/electron,webmechanicx/electron,JesselJohn/electron,simonfork/electron,nekuz0r/electron,arusakov/electron,medixdev/electron,edulan/electron,biblerule/UMCTelnetHub,mjaniszew/electron,arusakov/electron,lzpfmh/electron,shennushi/electron,Rokt33r/electron,fomojola/electron,leftstick/electron,anko/electron,vaginessa/electron,destan/electron,LadyNaggaga/electron,icattlecoder/electron,pirafrank/electron,ankitaggarwal011/electron,mrwizard82d1/electron,voidbridge/electron,timruffles/electron,vaginessa/electron,systembugtj/electron,Andrey-Pavlov/electron,thingsinjars/electron,jonatasfreitasv/electron,yan-foto/electron,mirrh/electron,smczk/electron,beni55/electron,webmechanicx/electron,Gerhut/electron,takashi/electron,brenca/electron,dkfiresky/electron,adamjgray/electron,jcblw/electron,tylergibson/electron,bright-sparks/electron,lrlna/electron,bpasero/electron,kokdemo/electron,nekuz0r/electron,leftstick/electron,shaundunne/electron,greyhwndz/electron,micalan/electron,davazp/electron,digideskio/electron,LadyNaggaga/electron,noikiy/electron,astoilkov/electron,fffej/electron,simonfork/electron,maxogden/atom-shell,ianscrivener/electron,Evercoder/electron,kazupon/electron,vipulroxx/electron,SufianHassan/electron,systembugtj/electron,natgolov/electron,posix4e/electron,SufianHassan/electron,xiruibing/electron,aichingm/electron,icattlecoder/electron,fritx/electron,gamedevsam/electron,stevekinney/electron,MaxGraey/electron,nagyistoce/electron-atom-shell,christian-bromann/electron,RIAEvangelist/electron,eriser/electron,tincan24/electron,cqqccqc/electron,wolfflow/electron,BionicClick/electron,neutrous/electron,xfstudio/electron,deed02392/electron,mubassirhayat/electron,kazupon/electron,John-Lin/electron,rajatsingla28/electron,mattdesl/electron,SufianHassan/electron,Neron-X5/electron,leethomas/electron,lzpfmh/electron,sky7sea/electron,meowlab/electron,Jacobichou/electron,Faiz7412/electron,biblerule/UMCTelnetHub,howmuchcomputer/electron,neutrous/electron,greyhwndz/electron,the-ress/electron,deepak1556/atom-shell,thompsonemerson/electron,Jonekee/electron,pirafrank/electron,abhishekgahlot/electron,rreimann/electron,pirafrank/electron,nicholasess/electron,oiledCode/electron,darwin/electron,John-Lin/electron,jlhbaseball15/electron,Ivshti/electron,adcentury/electron,ervinb/electron,vaginessa/electron,MaxGraey/electron,tincan24/electron,kcrt/electron,medixdev/electron,pombredanne/electron,cqqccqc/electron,roadev/electron,gerhardberger/electron,LadyNaggaga/electron,yalexx/electron,smczk/electron,jcblw/electron,ankitaggarwal011/electron,ervinb/electron,vHanda/electron,digideskio/electron,nagyistoce/electron-atom-shell,howmuchcomputer/electron,sky7sea/electron,seanchas116/electron,howmuchcomputer/electron,tomashanacek/electron,hokein/atom-shell,farmisen/electron,fritx/electron,astoilkov/electron,mubassirhayat/electron,thompsonemerson/electron,faizalpribadi/electron,Andrey-Pavlov/electron,adcentury/electron,jjz/electron,rhencke/electron,dahal/electron,RIAEvangelist/electron,rhencke/electron,farmisen/electron,jlord/electron,anko/electron,kokdemo/electron,MaxWhere/electron,roadev/electron,gabriel/electron,simongregory/electron,DivyaKMenon/electron,BionicClick/electron,electron/electron,vipulroxx/electron,cos2004/electron,trigrass2/electron,seanchas116/electron,nekuz0r/electron,mhkeller/electron,twolfson/electron,gamedevsam/electron,DivyaKMenon/electron,bwiggs/electron,jlhbaseball15/electron,darwin/electron,chriskdon/electron,takashi/electron,sshiting/electron,electron/electron,mrwizard82d1/electron,bwiggs/electron,fireball-x/atom-shell,aaron-goshine/electron,faizalpribadi/electron,farmisen/electron,bruce/electron,MaxWhere/electron,roadev/electron,webmechanicx/electron,jacksondc/electron,jlhbaseball15/electron,Floato/electron,gabrielPeart/electron,IonicaBizauKitchen/electron,d-salas/electron,preco21/electron,mhkeller/electron,Rokt33r/electron,fomojola/electron,Gerhut/electron,jsutcodes/electron,pirafrank/electron,wan-qy/electron,jonatasfreitasv/electron,michaelchiche/electron,natgolov/electron,aaron-goshine/electron,pombredanne/electron,Neron-X5/electron,baiwyc119/electron,nekuz0r/electron,ankitaggarwal011/electron,aliib/electron,icattlecoder/electron,fritx/electron,ervinb/electron,jonatasfreitasv/electron,deepak1556/atom-shell,adamjgray/electron,robinvandernoord/electron,carsonmcdonald/electron,leolujuyi/electron,bwiggs/electron,leethomas/electron,nicholasess/electron,rajatsingla28/electron,Andrey-Pavlov/electron,chrisswk/electron,Floato/electron,timruffles/electron,fomojola/electron,bpasero/electron,bitemyapp/electron,gabriel/electron,vipulroxx/electron,carsonmcdonald/electron,webmechanicx/electron,GoooIce/electron,cos2004/electron,wan-qy/electron,JussMee15/electron,Neron-X5/electron,xiruibing/electron,wan-qy/electron,arusakov/electron,bobwol/electron,fritx/electron,eriser/electron,icattlecoder/electron,ianscrivener/electron,kazupon/electron,pirafrank/electron,pandoraui/electron,bobwol/electron,electron/electron,leethomas/electron,joaomoreno/atom-shell,mubassirhayat/electron,robinvandernoord/electron,ervinb/electron,preco21/electron,maxogden/atom-shell,bbondy/electron,jacksondc/electron,shennushi/electron,davazp/electron,electron/electron,bitemyapp/electron,tincan24/electron,kikong/electron,edulan/electron,abhishekgahlot/electron,Zagorakiss/electron,kostia/electron,sircharleswatson/electron,jlord/electron,brave/electron,timruffles/electron,jsutcodes/electron,maxogden/atom-shell,bbondy/electron,tomashanacek/electron,stevemao/electron,gerhardberger/electron,stevekinney/electron,d-salas/electron,Jacobichou/electron,sky7sea/electron,matiasinsaurralde/electron,bpasero/electron,mirrh/electron,dkfiresky/electron,JussMee15/electron,electron/electron,rhencke/electron,jcblw/electron,miniak/electron,d-salas/electron,brave/electron,rsvip/electron,d-salas/electron,farmisen/electron,subblue/electron,tylergibson/electron,etiktin/electron,trigrass2/electron,brave/electron,gabrielPeart/electron,jannishuebl/electron,ianscrivener/electron,miniak/electron,thompsonemerson/electron,IonicaBizauKitchen/electron,gabrielPeart/electron,kokdemo/electron,astoilkov/electron,michaelchiche/electron,destan/electron,leftstick/electron,jsutcodes/electron,GoooIce/electron,kikong/electron,christian-bromann/electron,MaxWhere/electron,mattdesl/electron,Ivshti/electron,Ivshti/electron,kikong/electron,astoilkov/electron,pombredanne/electron,edulan/electron,felixrieseberg/electron,mirrh/electron,jonatasfreitasv/electron,aaron-goshine/electron,Jonekee/electron,hokein/atom-shell,coderhaoxin/electron,gstack/infinium-shell,beni55/electron,adcentury/electron,kostia/electron,gamedevsam/electron,jannishuebl/electron,ankitaggarwal011/electron,egoist/electron,sky7sea/electron,natgolov/electron,ankitaggarwal011/electron,Zagorakiss/electron,felixrieseberg/electron,astoilkov/electron,vipulroxx/electron,micalan/electron,John-Lin/electron,christian-bromann/electron,d-salas/electron,synaptek/electron,beni55/electron,sky7sea/electron,dongjoon-hyun/electron,chriskdon/electron,thomsonreuters/electron,Zagorakiss/electron,jlord/electron,sshiting/electron,systembugtj/electron,synaptek/electron,arturts/electron,simongregory/electron,tinydew4/electron,fabien-d/electron,natgolov/electron,leftstick/electron,xfstudio/electron,bruce/electron,kazupon/electron,simonfork/electron,pandoraui/electron,jaanus/electron,kenmozi/electron,tomashanacek/electron,rreimann/electron,jjz/electron,bbondy/electron,micalan/electron,BionicClick/electron,fomojola/electron,JesselJohn/electron,deed02392/electron,bobwol/electron,dongjoon-hyun/electron,rreimann/electron,xiruibing/electron,benweissmann/electron,arusakov/electron,kenmozi/electron,pirafrank/electron,abhishekgahlot/electron,gabriel/electron,jaanus/electron,yan-foto/electron,chriskdon/electron,stevekinney/electron,roadev/electron,wolfflow/electron,GoooIce/electron,bruce/electron,medixdev/electron,thomsonreuters/electron,Gerhut/electron,renaesop/electron,eriser/electron,joaomoreno/atom-shell,mjaniszew/electron,gabriel/electron,benweissmann/electron,fabien-d/electron,zhakui/electron,tylergibson/electron,brenca/electron,aecca/electron,webmechanicx/electron,saronwei/electron,preco21/electron,thingsinjars/electron,rhencke/electron,iftekeriba/electron,deed02392/electron,rsvip/electron,mattotodd/electron,tomashanacek/electron,joneit/electron,biblerule/UMCTelnetHub,oiledCode/electron,Rokt33r/electron,tincan24/electron,howmuchcomputer/electron,coderhaoxin/electron,jtburke/electron,mattdesl/electron,felixrieseberg/electron,xfstudio/electron,shiftkey/electron,sircharleswatson/electron,jtburke/electron,fomojola/electron,Floato/electron,tonyganch/electron,miniak/electron,mattotodd/electron,Evercoder/electron,joneit/electron,saronwei/electron,oiledCode/electron,zhakui/electron,brave/electron,deepak1556/atom-shell,nicholasess/electron,takashi/electron,kazupon/electron,synaptek/electron,lrlna/electron,meowlab/electron,Evercoder/electron,nekuz0r/electron,kikong/electron,rsvip/electron,arturts/electron,yalexx/electron,rajatsingla28/electron,jiaz/electron,aliib/electron,adamjgray/electron,kcrt/electron,subblue/electron,aaron-goshine/electron,tonyganch/electron,arturts/electron,shennushi/electron,MaxGraey/electron,RobertJGabriel/electron,aecca/electron,michaelchiche/electron,nicholasess/electron,Andrey-Pavlov/electron,soulteary/electron,biblerule/UMCTelnetHub,posix4e/electron,stevemao/electron,bruce/electron,jonatasfreitasv/electron,aichingm/electron,voidbridge/electron,cqqccqc/electron,fireball-x/atom-shell,michaelchiche/electron,takashi/electron,minggo/electron,JussMee15/electron,bbondy/electron,saronwei/electron,timruffles/electron,pombredanne/electron,takashi/electron,wan-qy/electron,cqqccqc/electron,mhkeller/electron,bobwol/electron,the-ress/electron,the-ress/electron,jiaz/electron,jsutcodes/electron,yalexx/electron,brave/electron,faizalpribadi/electron,bbondy/electron,sshiting/electron,christian-bromann/electron,jcblw/electron,meowlab/electron,minggo/electron,aliib/electron,coderhaoxin/electron,arusakov/electron,Neron-X5/electron,digideskio/electron,trankmichael/electron,howmuchcomputer/electron,LadyNaggaga/electron,noikiy/electron,Jacobichou/electron,cos2004/electron,IonicaBizauKitchen/electron,Faiz7412/electron,thompsonemerson/electron,electron/electron,soulteary/electron,setzer777/electron,Rokt33r/electron,yan-foto/electron,sky7sea/electron,mjaniszew/electron,shaundunne/electron,davazp/electron,webmechanicx/electron,dahal/electron,RIAEvangelist/electron,thompsonemerson/electron,mubassirhayat/electron,matiasinsaurralde/electron,Rokt33r/electron,saronwei/electron,chriskdon/electron,digideskio/electron,posix4e/electron,anko/electron,maxogden/atom-shell,saronwei/electron,nagyistoce/electron-atom-shell,fomojola/electron,leethomas/electron,adcentury/electron,dahal/electron,micalan/electron,iftekeriba/electron,eric-seekas/electron,bwiggs/electron,tincan24/electron,Neron-X5/electron,tylergibson/electron,aaron-goshine/electron,biblerule/UMCTelnetHub,Jonekee/electron,mattdesl/electron,JesselJohn/electron,jlhbaseball15/electron,minggo/electron,soulteary/electron,tinydew4/electron,baiwyc119/electron,leolujuyi/electron,fabien-d/electron,jlhbaseball15/electron,zhakui/electron,dkfiresky/electron,noikiy/electron,kcrt/electron,brenca/electron,trigrass2/electron,matiasinsaurralde/electron,trigrass2/electron,leolujuyi/electron,chriskdon/electron,rhencke/electron,eriser/electron,yan-foto/electron,fffej/electron,kenmozi/electron,subblue/electron,vHanda/electron,felixrieseberg/electron,chrisswk/electron,evgenyzinoviev/electron,nicobot/electron,deed02392/electron,pandoraui/electron,minggo/electron,nicobot/electron,greyhwndz/electron,tonyganch/electron,gbn972/electron,shaundunne/electron,brave/muon,medixdev/electron,wolfflow/electron,rajatsingla28/electron,kazupon/electron,jlord/electron,noikiy/electron,darwin/electron,DivyaKMenon/electron,John-Lin/electron,kcrt/electron,zhakui/electron,subblue/electron,oiledCode/electron,coderhaoxin/electron,tinydew4/electron,bobwol/electron,bright-sparks/electron,aichingm/electron,dongjoon-hyun/electron,bruce/electron,meowlab/electron,jonatasfreitasv/electron,Neron-X5/electron,cqqccqc/electron,tonyganch/electron,davazp/electron,micalan/electron,oiledCode/electron,John-Lin/electron,simonfork/electron,digideskio/electron,egoist/electron,eric-seekas/electron,joaomoreno/atom-shell,faizalpribadi/electron,nicholasess/electron,twolfson/electron,Faiz7412/electron,mattotodd/electron,greyhwndz/electron,MaxWhere/electron,xfstudio/electron,sshiting/electron,gbn972/electron,Zagorakiss/electron,davazp/electron,RIAEvangelist/electron,etiktin/electron,jaanus/electron,shiftkey/electron,rajatsingla28/electron,mirrh/electron,kokdemo/electron,egoist/electron,thompsonemerson/electron,brenca/electron,JesselJohn/electron,eric-seekas/electron,aecca/electron,GoooIce/electron,ervinb/electron,gbn972/electron,baiwyc119/electron,shockone/electron,chrisswk/electron,stevekinney/electron,jcblw/electron,shaundunne/electron,kostia/electron,dongjoon-hyun/electron,aliib/electron,preco21/electron,d-salas/electron,brave/muon,nekuz0r/electron,kenmozi/electron,lrlna/electron,tonyganch/electron,iftekeriba/electron,jannishuebl/electron,neutrous/electron,mhkeller/electron,mrwizard82d1/electron,gabriel/electron,Jacobichou/electron,tinydew4/electron,mattotodd/electron,stevemao/electron,ervinb/electron,kenmozi/electron,simongregory/electron,tinydew4/electron,matiasinsaurralde/electron,astoilkov/electron,jtburke/electron,LadyNaggaga/electron,lzpfmh/electron,gstack/infinium-shell,aaron-goshine/electron,medixdev/electron,leolujuyi/electron,xfstudio/electron,baiwyc119/electron,ankitaggarwal011/electron,aecca/electron,hokein/atom-shell,tincan24/electron,beni55/electron,michaelchiche/electron,pandoraui/electron,saronwei/electron,mrwizard82d1/electron,IonicaBizauKitchen/electron,the-ress/electron,soulteary/electron,smczk/electron,jlhbaseball15/electron,fffej/electron,shaundunne/electron,abhishekgahlot/electron,baiwyc119/electron,xfstudio/electron,thomsonreuters/electron,aliib/electron,MaxGraey/electron,brave/muon,aecca/electron,trankmichael/electron,sircharleswatson/electron,sshiting/electron,thingsinjars/electron,neutrous/electron,chrisswk/electron,seanchas116/electron,Andrey-Pavlov/electron,rsvip/electron,jtburke/electron,minggo/electron,thingsinjars/electron,gerhardberger/electron,vHanda/electron,stevemao/electron,vHanda/electron,lrlna/electron,subblue/electron,abhishekgahlot/electron,jjz/electron,systembugtj/electron,rhencke/electron,faizalpribadi/electron,edulan/electron,GoooIce/electron,posix4e/electron,systembugtj/electron,simonfork/electron,arusakov/electron,smczk/electron,christian-bromann/electron,smczk/electron,jsutcodes/electron,benweissmann/electron,aliib/electron,matiasinsaurralde/electron,vaginessa/electron,stevemao/electron,evgenyzinoviev/electron,wan-qy/electron,arturts/electron,yan-foto/electron,jhen0409/electron,seanchas116/electron,gstack/infinium-shell,JesselJohn/electron,farmisen/electron,RIAEvangelist/electron,egoist/electron,evgenyzinoviev/electron,nicholasess/electron,shockone/electron,aecca/electron,Rokt33r/electron,cqqccqc/electron,the-ress/electron,simongregory/electron,yalexx/electron,pandoraui/electron,adamjgray/electron,maxogden/atom-shell,mjaniszew/electron,bitemyapp/electron,gamedevsam/electron,brave/muon,abhishekgahlot/electron,dkfiresky/electron,JussMee15/electron,fritx/electron,digideskio/electron,joneit/electron,jacksondc/electron,felixrieseberg/electron,fireball-x/atom-shell,ianscrivener/electron,eric-seekas/electron,shockone/electron,natgolov/electron,noikiy/electron,Jacobichou/electron,cos2004/electron,wan-qy/electron,LadyNaggaga/electron,pombredanne/electron,JussMee15/electron,leftstick/electron,aichingm/electron,jannishuebl/electron,shaundunne/electron,smczk/electron,fireball-x/atom-shell,RobertJGabriel/electron,christian-bromann/electron,jiaz/electron,yalexx/electron,synaptek/electron,jsutcodes/electron,kostia/electron,sircharleswatson/electron,jaanus/electron,jlord/electron,RIAEvangelist/electron,jjz/electron,Jonekee/electron,egoist/electron,cos2004/electron,greyhwndz/electron,robinvandernoord/electron,evgenyzinoviev/electron,bwiggs/electron,rajatsingla28/electron,lzpfmh/electron,biblerule/UMCTelnetHub,benweissmann/electron,joaomoreno/atom-shell,nicobot/electron,shockone/electron,soulteary/electron,rreimann/electron,IonicaBizauKitchen/electron,synaptek/electron,jiaz/electron,benweissmann/electron,Ivshti/electron,MaxWhere/electron,posix4e/electron,Faiz7412/electron,tomashanacek/electron,bright-sparks/electron,Gerhut/electron,gbn972/electron,wolfflow/electron,Evercoder/electron,Jacobichou/electron,dahal/electron,simonfork/electron,destan/electron,sircharleswatson/electron,jjz/electron,edulan/electron,tinydew4/electron,wolfflow/electron,darwin/electron,preco21/electron,tylergibson/electron,bobwol/electron,bruce/electron,yan-foto/electron,voidbridge/electron,MaxWhere/electron,soulteary/electron,kostia/electron,setzer777/electron,dahal/electron,gabrielPeart/electron,evgenyzinoviev/electron,mattotodd/electron,eriser/electron,posix4e/electron,leolujuyi/electron,jjz/electron,bpasero/electron,shiftkey/electron,setzer777/electron,vipulroxx/electron,IonicaBizauKitchen/electron,Floato/electron,shennushi/electron,gbn972/electron,joneit/electron,systembugtj/electron,Andrey-Pavlov/electron,trigrass2/electron,preco21/electron,sircharleswatson/electron,fabien-d/electron,dongjoon-hyun/electron,joaomoreno/atom-shell,brave/muon,carsonmcdonald/electron,JesselJohn/electron,bright-sparks/electron,Floato/electron,lrlna/electron,lzpfmh/electron,natgolov/electron,edulan/electron,beni55/electron,leethomas/electron,mattotodd/electron,Faiz7412/electron,trankmichael/electron,stevemao/electron,oiledCode/electron,wolfflow/electron,eriser/electron,aichingm/electron,carsonmcdonald/electron,gerhardberger/electron,takashi/electron,voidbridge/electron,adcentury/electron,deed02392/electron,BionicClick/electron,davazp/electron,thingsinjars/electron,gbn972/electron,Evercoder/electron,iftekeriba/electron,miniak/electron,xiruibing/electron,etiktin/electron,eric-seekas/electron,Floato/electron,twolfson/electron,brenca/electron,lzpfmh/electron,leethomas/electron,trankmichael/electron,hokein/atom-shell,shockone/electron,destan/electron,fireball-x/atom-shell,dkfiresky/electron,icattlecoder/electron,jhen0409/electron,deepak1556/atom-shell,trankmichael/electron,iftekeriba/electron,neutrous/electron,sshiting/electron,etiktin/electron,RobertJGabriel/electron,kcrt/electron,evgenyzinoviev/electron,pombredanne/electron,renaesop/electron,chrisswk/electron,simongregory/electron,fritx/electron,adcentury/electron,dongjoon-hyun/electron,mirrh/electron,mrwizard82d1/electron,meowlab/electron,baiwyc119/electron,tylergibson/electron,lrlna/electron,miniak/electron,gstack/infinium-shell,bitemyapp/electron,shennushi/electron,neutrous/electron,destan/electron,shiftkey/electron,pandoraui/electron,shiftkey/electron,robinvandernoord/electron,fffej/electron,jhen0409/electron,mubassirhayat/electron,roadev/electron,mattdesl/electron,jannishuebl/electron,Evercoder/electron,mjaniszew/electron,subblue/electron,Gerhut/electron,voidbridge/electron,adamjgray/electron,trankmichael/electron,jhen0409/electron,nicobot/electron,kenmozi/electron,bitemyapp/electron,seanchas116/electron,jiaz/electron,John-Lin/electron,tomashanacek/electron,jacksondc/electron,Gerhut/electron,yalexx/electron,stevekinney/electron,rreimann/electron,deed02392/electron,fabien-d/electron,renaesop/electron,meowlab/electron,matiasinsaurralde/electron,bwiggs/electron,the-ress/electron,joneit/electron,xiruibing/electron,etiktin/electron,twolfson/electron,bitemyapp/electron,GoooIce/electron
|
---
+++
@@ -4,7 +4,7 @@
import sys
BASE_URL = 'http://gh-contractor-zcbenz.s3.amazonaws.com/libchromiumcontent'
-LIBCHROMIUMCONTENT_COMMIT = 'e375124044f9044ac88076eba0cd17361ee0997c'
+LIBCHROMIUMCONTENT_COMMIT = '55efd338101e08691560192b2be0f9c3b1b0eb72'
ARCH = {
'cygwin': '32bit',
|
b8839302c0a4d8ada99a695f8829027fa433e05e
|
zerver/migrations/0232_make_archive_transaction_field_not_nullable.py
|
zerver/migrations/0232_make_archive_transaction_field_not_nullable.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('zerver', '0231_add_archive_transaction_model'),
]
operations = [
migrations.RunSQL("DELETE FROM zerver_archivedusermessage"),
migrations.RunSQL("DELETE FROM zerver_archivedreaction"),
migrations.RunSQL("DELETE FROM zerver_archivedsubmessage"),
migrations.RunSQL("DELETE FROM zerver_archivedattachment"),
migrations.RunSQL("DELETE FROM zerver_archivedattachment_messages"),
migrations.RunSQL("DELETE FROM zerver_archivedmessage"),
migrations.RunSQL("DELETE FROM zerver_archivetransaction"),
migrations.AlterField(
model_name='archivedmessage',
name='archive_transaction',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='zerver.ArchiveTransaction'),
),
]
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
"""
Tables cannot have data deleted from them and be altered in a single transaction,
but we need the DELETEs to be atomic together. So we set atomic=False for the migration
in general, and run the DELETEs in one transaction, and AlterField in another.
"""
atomic = False
dependencies = [
('zerver', '0231_add_archive_transaction_model'),
]
operations = [
migrations.RunSQL("""
BEGIN;
DELETE FROM zerver_archivedusermessage;
DELETE FROM zerver_archivedreaction;
DELETE FROM zerver_archivedsubmessage;
DELETE FROM zerver_archivedattachment_messages;
DELETE FROM zerver_archivedattachment;
DELETE FROM zerver_archivedmessage;
DELETE FROM zerver_archivetransaction;
COMMIT;
"""),
migrations.AlterField(
model_name='archivedmessage',
name='archive_transaction',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='zerver.ArchiveTransaction'),
),
]
|
Fix migration making archive_transaction field not null.
|
retention: Fix migration making archive_transaction field not null.
DELETing from archive tables and ALTERing ArchivedMessage needs to be
split into separate transactions.
zerver_archivedattachment_messages needs to be cleared out before
zerver_archivedattachment.
|
Python
|
apache-2.0
|
eeshangarg/zulip,shubhamdhama/zulip,zulip/zulip,brainwane/zulip,synicalsyntax/zulip,eeshangarg/zulip,andersk/zulip,hackerkid/zulip,hackerkid/zulip,timabbott/zulip,zulip/zulip,timabbott/zulip,synicalsyntax/zulip,tommyip/zulip,tommyip/zulip,rht/zulip,andersk/zulip,rishig/zulip,rht/zulip,timabbott/zulip,brainwane/zulip,eeshangarg/zulip,showell/zulip,rht/zulip,showell/zulip,andersk/zulip,tommyip/zulip,showell/zulip,showell/zulip,synicalsyntax/zulip,hackerkid/zulip,punchagan/zulip,shubhamdhama/zulip,rishig/zulip,brainwane/zulip,andersk/zulip,rht/zulip,brainwane/zulip,brainwane/zulip,shubhamdhama/zulip,eeshangarg/zulip,timabbott/zulip,rishig/zulip,punchagan/zulip,zulip/zulip,zulip/zulip,showell/zulip,kou/zulip,synicalsyntax/zulip,showell/zulip,timabbott/zulip,andersk/zulip,rishig/zulip,timabbott/zulip,kou/zulip,kou/zulip,brainwane/zulip,rishig/zulip,kou/zulip,eeshangarg/zulip,tommyip/zulip,showell/zulip,tommyip/zulip,shubhamdhama/zulip,punchagan/zulip,synicalsyntax/zulip,rishig/zulip,shubhamdhama/zulip,zulip/zulip,kou/zulip,shubhamdhama/zulip,eeshangarg/zulip,andersk/zulip,andersk/zulip,synicalsyntax/zulip,tommyip/zulip,shubhamdhama/zulip,kou/zulip,punchagan/zulip,rishig/zulip,tommyip/zulip,kou/zulip,hackerkid/zulip,punchagan/zulip,rht/zulip,hackerkid/zulip,rht/zulip,timabbott/zulip,punchagan/zulip,rht/zulip,synicalsyntax/zulip,brainwane/zulip,punchagan/zulip,hackerkid/zulip,hackerkid/zulip,eeshangarg/zulip,zulip/zulip,zulip/zulip
|
---
+++
@@ -4,21 +4,30 @@
from django.db import migrations, models
import django.db.models.deletion
-
class Migration(migrations.Migration):
+ """
+ Tables cannot have data deleted from them and be altered in a single transaction,
+ but we need the DELETEs to be atomic together. So we set atomic=False for the migration
+ in general, and run the DELETEs in one transaction, and AlterField in another.
+ """
+ atomic = False
dependencies = [
('zerver', '0231_add_archive_transaction_model'),
]
operations = [
- migrations.RunSQL("DELETE FROM zerver_archivedusermessage"),
- migrations.RunSQL("DELETE FROM zerver_archivedreaction"),
- migrations.RunSQL("DELETE FROM zerver_archivedsubmessage"),
- migrations.RunSQL("DELETE FROM zerver_archivedattachment"),
- migrations.RunSQL("DELETE FROM zerver_archivedattachment_messages"),
- migrations.RunSQL("DELETE FROM zerver_archivedmessage"),
- migrations.RunSQL("DELETE FROM zerver_archivetransaction"),
+ migrations.RunSQL("""
+ BEGIN;
+ DELETE FROM zerver_archivedusermessage;
+ DELETE FROM zerver_archivedreaction;
+ DELETE FROM zerver_archivedsubmessage;
+ DELETE FROM zerver_archivedattachment_messages;
+ DELETE FROM zerver_archivedattachment;
+ DELETE FROM zerver_archivedmessage;
+ DELETE FROM zerver_archivetransaction;
+ COMMIT;
+ """),
migrations.AlterField(
model_name='archivedmessage',
name='archive_transaction',
|
a8974f140158d27ff2b3c6cf0d38829109244bed
|
future/builtins/__init__.py
|
future/builtins/__init__.py
|
"""
A module that brings in equivalents of the new and modified Python 3
builtins into Py2. Has no effect on Py3.
See the docs for these modules for more information::
- future.builtins.iterators
- future.builtins.backports
- future.builtins.misc
- future.builtins.disabled
"""
from future.builtins.iterators import (filter, map, zip)
from future.builtins.misc import (ascii, chr, hex, input, int, oct, open)
from future.builtins.backports import (bytes, range, round, str, super)
from future import utils
if not utils.PY3:
# We only import names that shadow the builtins on Py2. No other namespace
# pollution on Py2.
# Only shadow builtins on Py2; no new names
__all__ = ['filter', 'map', 'zip',
'ascii', 'chr', 'hex', 'input', 'oct', 'open',
'bytes', 'int', 'range', 'round', 'str', 'super',
]
else:
# No namespace pollution on Py3
__all__ = []
# TODO: add 'callable' for Py3.0 and Py3.1?
|
"""
A module that brings in equivalents of the new and modified Python 3
builtins into Py2. Has no effect on Py3.
See the docs for these modules for more information::
- future.builtins.iterators
- future.builtins.backports
- future.builtins.misc
- future.builtins.disabled
"""
from future.builtins.iterators import (filter, map, zip)
from future.builtins.misc import (ascii, chr, hex, input, oct, open)
from future.builtins.backports import (bytes, int, range, round, str, super)
from future import utils
if not utils.PY3:
# We only import names that shadow the builtins on Py2. No other namespace
# pollution on Py2.
# Only shadow builtins on Py2; no new names
__all__ = ['filter', 'map', 'zip',
'ascii', 'chr', 'hex', 'input', 'oct', 'open',
'bytes', 'int', 'range', 'round', 'str', 'super',
]
else:
# No namespace pollution on Py3
__all__ = []
# TODO: add 'callable' for Py3.0 and Py3.1?
|
Enable backported ``int`` in future.builtins
|
Enable backported ``int`` in future.builtins
|
Python
|
mit
|
michaelpacer/python-future,krischer/python-future,PythonCharmers/python-future,krischer/python-future,PythonCharmers/python-future,QuLogic/python-future,michaelpacer/python-future,QuLogic/python-future
|
---
+++
@@ -12,8 +12,8 @@
"""
from future.builtins.iterators import (filter, map, zip)
-from future.builtins.misc import (ascii, chr, hex, input, int, oct, open)
-from future.builtins.backports import (bytes, range, round, str, super)
+from future.builtins.misc import (ascii, chr, hex, input, oct, open)
+from future.builtins.backports import (bytes, int, range, round, str, super)
from future import utils
if not utils.PY3:
|
b2c90df01d86488b1dece173b6bb6b0afa0fbdcf
|
src/binder/db_sqlite.py
|
src/binder/db_sqlite.py
|
import sqlite3
from binder.conn import Connection
from binder.sqlgen import DIALECT_SQLITE
class SqliteConnection(Connection):
def __init__(self, dbfile, read_only):
dbconn = sqlite3.connect(dbfile)
dberror = sqlite3.Error
Connection.__init__(
self, dbconn, dberror,
DIALECT_SQLITE, "?",
read_only
)
|
import sqlite3
from binder.conn import Connection
from binder.sqlgen import DIALECT_SQLITE
class SqliteConnection(Connection):
def __init__(self, dbfile, read_only=False):
dbconn = sqlite3.connect(dbfile)
dberror = sqlite3.Error
Connection.__init__(
self, dbconn, dberror,
DIALECT_SQLITE, "?",
read_only
)
|
Make SqliteConnection read_only parameter optional
|
Make SqliteConnection read_only parameter optional
|
Python
|
mit
|
divtxt/binder
|
---
+++
@@ -6,7 +6,7 @@
class SqliteConnection(Connection):
- def __init__(self, dbfile, read_only):
+ def __init__(self, dbfile, read_only=False):
dbconn = sqlite3.connect(dbfile)
dberror = sqlite3.Error
Connection.__init__(
|
2a7322104eb4222517f8a1167597104c5daab0bc
|
notes-cli.py
|
notes-cli.py
|
import argparse
import yaml
from os.path import expanduser
def load_config_from(path):
with open(expanduser(path)) as file:
return yaml.load(file)
def parse_options():
parser = argparse.ArgumentParser()
parser.add_argument("command",
choices=["ls", "add", "rm", "edit", "view", "reindex"])
parser.add_argument("--query")
return parser.parse_args()
def main():
config = load_config_from("~/.notes-cli/config.yaml")
options = parse_options()
print options
if __name__ == "__main__":
main()
|
import argparse
import yaml
import os
from os.path import expanduser, isdir
import whoosh.index as ix
from whoosh.fields import *
def load_config_from(path):
with open(expanduser(path)) as file:
return yaml.load(file)
def parse_options():
parser = argparse.ArgumentParser()
parser.add_argument("command",
choices=["ls", "add", "rm", "edit", "view", "reindex"])
parser.add_argument("--query")
return parser.parse_args()
def create_or_load_index(index_path):
index_full_path = expanduser(index_path)
if isdir(index_full_path):
return ix.open_dir(index_full_path)
else:
os.mkdir(index_full_path)
schema = Schema(filename=TEXT(stored=True), content=TEXT)
return ix.create_in(index_full_path, schema)
def main():
config = load_config_from("~/.notes-cli/config.yaml")
options = parse_options()
index = create_or_load_index(config["indexdir"])
print options
if __name__ == "__main__":
main()
|
Create or load index directory
|
Create or load index directory
|
Python
|
mit
|
phss/notes-cli
|
---
+++
@@ -1,6 +1,9 @@
import argparse
import yaml
-from os.path import expanduser
+import os
+from os.path import expanduser, isdir
+import whoosh.index as ix
+from whoosh.fields import *
def load_config_from(path):
@@ -14,9 +17,19 @@
parser.add_argument("--query")
return parser.parse_args()
+def create_or_load_index(index_path):
+ index_full_path = expanduser(index_path)
+ if isdir(index_full_path):
+ return ix.open_dir(index_full_path)
+ else:
+ os.mkdir(index_full_path)
+ schema = Schema(filename=TEXT(stored=True), content=TEXT)
+ return ix.create_in(index_full_path, schema)
+
def main():
config = load_config_from("~/.notes-cli/config.yaml")
options = parse_options()
+ index = create_or_load_index(config["indexdir"])
print options
if __name__ == "__main__":
|
8b0c1fc8d06a4561b9241a92162a24a4df0efa34
|
viper.py
|
viper.py
|
#!/usr/bin/env python3
from viper.interactive import *
if __name__ == '__main__':
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('-L', '--interactive-lexer', action='store_true', help='lexes input')
parser.add_argument('-S', '--interactive-sppf', action='store_true', help='lexes input and produces SPPF')
parser.add_argument('-r', '--grammar-rule', default='single_line', help='grammar rule from which to start parsing')
args = parser.parse_args()
if args.interactive_lexer:
InteractiveLexer().cmdloop()
elif args.interactive_sppf:
InteractiveSPPF(args.grammar_rule).cmdloop()
|
#!/usr/bin/env python3
from viper.interactive import *
from viper.lexer import lex_file
from viper.grammar import GRAMMAR
if __name__ == '__main__':
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('-L', '--interactive-lexer', action='store_true', help='lexes input')
parser.add_argument('-S', '--interactive-sppf', action='store_true', help='lexes input and produces SPPF')
parser.add_argument('-s', '--file-sppf', help='produces SPPF for given input')
parser.add_argument('-r', '--grammar-rule', default='single_line', help='grammar rule from which to start parsing')
args = parser.parse_args()
if args.interactive_lexer:
InteractiveLexer().cmdloop()
elif args.interactive_sppf:
InteractiveSPPF(args.grammar_rule).cmdloop()
elif args.file_sppf:
lexemes = lex_file(args.file_sppf)
sppf = GRAMMAR.parse_multiple(lexemes)
print(sppf)
|
Allow direct SPPF generation from interactive script
|
Allow direct SPPF generation from interactive script
|
Python
|
apache-2.0
|
pdarragh/Viper
|
---
+++
@@ -1,6 +1,8 @@
#!/usr/bin/env python3
from viper.interactive import *
+from viper.lexer import lex_file
+from viper.grammar import GRAMMAR
if __name__ == '__main__':
@@ -8,6 +10,7 @@
parser = argparse.ArgumentParser()
parser.add_argument('-L', '--interactive-lexer', action='store_true', help='lexes input')
parser.add_argument('-S', '--interactive-sppf', action='store_true', help='lexes input and produces SPPF')
+ parser.add_argument('-s', '--file-sppf', help='produces SPPF for given input')
parser.add_argument('-r', '--grammar-rule', default='single_line', help='grammar rule from which to start parsing')
args = parser.parse_args()
@@ -15,3 +18,7 @@
InteractiveLexer().cmdloop()
elif args.interactive_sppf:
InteractiveSPPF(args.grammar_rule).cmdloop()
+ elif args.file_sppf:
+ lexemes = lex_file(args.file_sppf)
+ sppf = GRAMMAR.parse_multiple(lexemes)
+ print(sppf)
|
3598313c087651a85dce5e31d9fdc227dea0ccf4
|
binary-search.py
|
binary-search.py
|
# iterative approach to binary search function (assume list has distinct elements and elements are in ascending order)
def binary_search(arr, data):
low = 0 # first element in array
high = len(arr) - 1 # last item in array
while low <= high: # iterate through "entire" array
middle = (low + high)/2
if arr[middle] == data:
return middle
elif arr[middle] < data:
low = middle + 1 # narrow down search to upper half
else:
high = middle - 1 # narrow down search to bottom half
return -1 # data not in array
|
# iterative approach to binary search function (assume list has distinct elements and elements are in ascending order)
def binary_search(arr, data):
low = 0 # first element position in array
high = len(arr) - 1 # last element position in array
while low <= high: # iterate through "entire" array
middle = (low + high)/2
if arr[middle] == data:
return middle
elif arr[middle] < data:
low = middle + 1 # narrow down search to upper half
else:
high = middle - 1 # narrow down search to bottom half
return -1 # data not in array
# test cases
test = [1, 4, 5, 7, 8, 9, 11, 17, 19, 26, 32, 35, 36]
data_one = 11
data_two = 4
data_three = 35
data_four = 27
data_five = 38
print binary_search(test, data_one) # prints 6
print binary_search(test, data_two) # prints 1
print binary_search(test, data_three) # prints 11
print binary_search(test, data_four) # prints -1
print binary_search(test, data_five) # prints -1
|
Add test cases for python implementation of binary search function
|
Add test cases for python implementation of binary search function
|
Python
|
mit
|
derekmpham/interview-prep,derekmpham/interview-prep
|
---
+++
@@ -1,8 +1,8 @@
# iterative approach to binary search function (assume list has distinct elements and elements are in ascending order)
def binary_search(arr, data):
- low = 0 # first element in array
- high = len(arr) - 1 # last item in array
+ low = 0 # first element position in array
+ high = len(arr) - 1 # last element position in array
while low <= high: # iterate through "entire" array
middle = (low + high)/2
if arr[middle] == data:
@@ -12,3 +12,18 @@
else:
high = middle - 1 # narrow down search to bottom half
return -1 # data not in array
+
+
+# test cases
+test = [1, 4, 5, 7, 8, 9, 11, 17, 19, 26, 32, 35, 36]
+data_one = 11
+data_two = 4
+data_three = 35
+data_four = 27
+data_five = 38
+
+print binary_search(test, data_one) # prints 6
+print binary_search(test, data_two) # prints 1
+print binary_search(test, data_three) # prints 11
+print binary_search(test, data_four) # prints -1
+print binary_search(test, data_five) # prints -1
|
1468d6e257d4f22f803549606cbd3e3245c2ce37
|
redash/utils/comparators.py
|
redash/utils/comparators.py
|
from sqlalchemy import func
from sqlalchemy.ext.hybrid import Comparator
class CaseInsensitiveComparator(Comparator):
def __eq__(self, other):
return func.lower(self.__clause_element__()) == func.lower(other)
|
from sqlalchemy import String
class CaseInsensitiveComparator(String.Comparator):
def __eq__(self, other):
return func.lower(self.__clause_element__()) == func.lower(other)
|
Change CaseInsensitiveComparator to support all operations.
|
Change CaseInsensitiveComparator to support all operations.
|
Python
|
bsd-2-clause
|
moritz9/redash,44px/redash,getredash/redash,alexanderlz/redash,alexanderlz/redash,moritz9/redash,44px/redash,44px/redash,denisov-vlad/redash,getredash/redash,getredash/redash,denisov-vlad/redash,getredash/redash,denisov-vlad/redash,chriszs/redash,denisov-vlad/redash,44px/redash,alexanderlz/redash,getredash/redash,chriszs/redash,alexanderlz/redash,moritz9/redash,denisov-vlad/redash,chriszs/redash,chriszs/redash,moritz9/redash
|
---
+++
@@ -1,7 +1,6 @@
-from sqlalchemy import func
-from sqlalchemy.ext.hybrid import Comparator
+from sqlalchemy import String
-class CaseInsensitiveComparator(Comparator):
+class CaseInsensitiveComparator(String.Comparator):
def __eq__(self, other):
return func.lower(self.__clause_element__()) == func.lower(other)
|
566e68d3b06d3aaba796f0cda57a42bb12cf6d79
|
celery/__init__.py
|
celery/__init__.py
|
"""Distributed Task Queue"""
from celery.distmeta import __version__, __author__, __contact__
from celery.distmeta import __homepage__, __docformat__
from celery.distmeta import VERSION, is_stable_release, version_with_meta
|
"""Distributed Task Queue"""
from celery.distmeta import __version__, __author__, __contact__
from celery.distmeta import __homepage__, __docformat__
from celery.distmeta import VERSION, is_stable_release, version_with_meta
from celery.decorators import task
from celery.task.base import Task, PeriodicTask
from celery.execute import apply_async, apply
|
Make decorators.task, Task, PeriodicTask, apply* available from celery.py
|
Make decorators.task, Task, PeriodicTask, apply* available from celery.py
|
Python
|
bsd-3-clause
|
cbrepo/celery,WoLpH/celery,ask/celery,mitsuhiko/celery,cbrepo/celery,mitsuhiko/celery,frac/celery,frac/celery,ask/celery,WoLpH/celery
|
---
+++
@@ -2,3 +2,7 @@
from celery.distmeta import __version__, __author__, __contact__
from celery.distmeta import __homepage__, __docformat__
from celery.distmeta import VERSION, is_stable_release, version_with_meta
+
+from celery.decorators import task
+from celery.task.base import Task, PeriodicTask
+from celery.execute import apply_async, apply
|
3f87d22624a5a27fdca2f82103aff6bda6491c70
|
caribou/antler/antler_settings.py
|
caribou/antler/antler_settings.py
|
from caribou.settings.setting_types import *
from caribou.i18n import _
AntlerSettings = SettingsTopGroup(
_("Antler Preferences"), "/org/gnome/antler/", "org.gnome.antler",
[SettingsGroup("antler", _("Antler"), [
SettingsGroup("appearance", _("Appearance"), [
StringSetting(
"keyboard_type", _("Keyboard Type"), "touch",
_("The keyboard geometery Caribou should use"),
_("The keyboard geometery determines the shape "
"and complexity of the keyboard, it could range from "
"a 'natural' look and feel good for composing simple "
"text, to a fullscale keyboard."),
allowed=[(('touch'), _('Touch')),
(('scan'), _('Scan'))]),
BooleanSetting("use_system", _("Use System Theme"),
True, _("Use System Theme")),
FloatSetting("min_alpha", _("Minimum Alpha"),
0.2, _("Minimal opacity of keyboard"),
min=0.0, max=1.0),
FloatSetting("max_alpha", _("Maximum Alpha"),
1.0, _("Maximal opacity of keyboard"),
min=0.0, max=1.0),
IntegerSetting("max_distance", _("Maximum Distance"),
100, _("Maximum distance when keyboard is hidden"),
min=0, max=1024)
])
])
])
|
from caribou.settings.setting_types import *
from caribou.i18n import _
AntlerSettings = SettingsTopGroup(
_("Antler Preferences"), "/org/gnome/antler/", "org.gnome.antler",
[SettingsGroup("antler", _("Antler"), [
SettingsGroup("appearance", _("Appearance"), [
StringSetting(
"keyboard_type", _("Keyboard Type"), "touch",
_("The keyboard geometery Caribou should use"),
_("The keyboard geometery determines the shape "
"and complexity of the keyboard, it could range from "
"a 'natural' look and feel good for composing simple "
"text, to a fullscale keyboard."),
allowed=[(('touch'), _('Touch')),
(('fullscale'), _('Full scale')),
(('scan'), _('Scan'))]),
BooleanSetting("use_system", _("Use System Theme"),
True, _("Use System Theme")),
FloatSetting("min_alpha", _("Minimum Alpha"),
0.2, _("Minimal opacity of keyboard"),
min=0.0, max=1.0),
FloatSetting("max_alpha", _("Maximum Alpha"),
1.0, _("Maximal opacity of keyboard"),
min=0.0, max=1.0),
IntegerSetting("max_distance", _("Maximum Distance"),
100, _("Maximum distance when keyboard is hidden"),
min=0, max=1024)
])
])
])
|
Add "full scale" to antler settings.
|
Add "full scale" to antler settings.
|
Python
|
lgpl-2.1
|
GNOME/caribou,GNOME/caribou,GNOME/caribou
|
---
+++
@@ -13,6 +13,7 @@
"a 'natural' look and feel good for composing simple "
"text, to a fullscale keyboard."),
allowed=[(('touch'), _('Touch')),
+ (('fullscale'), _('Full scale')),
(('scan'), _('Scan'))]),
BooleanSetting("use_system", _("Use System Theme"),
True, _("Use System Theme")),
|
66602e67c06266735b58fd2bee8b55b7cac401b1
|
archive/archive_report_ingest_status/src/test_archive_report_ingest_status.py
|
archive/archive_report_ingest_status/src/test_archive_report_ingest_status.py
|
# -*- encoding: utf-8 -*-
import uuid
import archive_report_ingest_status as report_ingest_status
def test_get_returns_status(dynamodb_resource, table_name):
guid = str(uuid.uuid4())
table = dynamodb_resource.Table(table_name)
table.put_item(Item={'id': guid})
event = {
'request_method': 'GET',
'id': guid
}
response = report_ingest_status.main(
event=event,
dynamodb_resource=dynamodb_resource
)
assert response['id'] == guid
def test_get_includes_other_dynamodb_metadata(dynamodb_resource, table_name):
guid = str(uuid.uuid4())
item = {'id': guid, 'fooKey': 'barValue'}
table = dynamodb_resource.Table(table_name)
table.put_item(Item=item)
event = {
'request_method': 'GET',
'id': guid
}
response = report_ingest_status.main(
event=event,
dynamodb_resource=dynamodb_resource
)
assert response == item
|
# -*- encoding: utf-8 -*-
import uuid
import pytest
import archive_report_ingest_status as report_ingest_status
def test_get_returns_status(dynamodb_resource, table_name):
guid = str(uuid.uuid4())
table = dynamodb_resource.Table(table_name)
table.put_item(Item={'id': guid})
event = {
'request_method': 'GET',
'id': guid
}
response = report_ingest_status.main(
event=event,
dynamodb_resource=dynamodb_resource
)
assert response['id'] == guid
def test_get_includes_other_dynamodb_metadata(dynamodb_resource, table_name):
guid = str(uuid.uuid4())
item = {'id': guid, 'fooKey': 'barValue'}
table = dynamodb_resource.Table(table_name)
table.put_item(Item=item)
event = {
'request_method': 'GET',
'id': guid
}
response = report_ingest_status.main(
event=event,
dynamodb_resource=dynamodb_resource
)
assert response == item
def test_fails_if_called_with_post_event():
event = {
'request_method': 'POST'
}
with pytest.raises(AssertionError, match='Expected request_method=GET'):
report_ingest_status.main(event=event)
|
Add a test that a non-GET method is rejected
|
Add a test that a non-GET method is rejected
|
Python
|
mit
|
wellcometrust/platform-api,wellcometrust/platform-api,wellcometrust/platform-api,wellcometrust/platform-api
|
---
+++
@@ -1,6 +1,8 @@
# -*- encoding: utf-8 -*-
import uuid
+
+import pytest
import archive_report_ingest_status as report_ingest_status
@@ -40,3 +42,12 @@
dynamodb_resource=dynamodb_resource
)
assert response == item
+
+
+def test_fails_if_called_with_post_event():
+ event = {
+ 'request_method': 'POST'
+ }
+
+ with pytest.raises(AssertionError, match='Expected request_method=GET'):
+ report_ingest_status.main(event=event)
|
342c1c70e2fbf13bea87792b6a289f830fb95692
|
letsencryptae/models.py
|
letsencryptae/models.py
|
# THIRD PARTY
from djangae.fields import CharField
from django.db import models
class Secret(models.Model):
created = models.DateTimeField(auto_now_add=True)
url_slug = CharField(primary_key=True)
secret = CharField()
def __unicode__(self):
return self.url_slug
def clean(self, *args, **kwargs):
return_value = super(Secret, self).clean(*args, **kwargs)
if not self.secret.startswith(self.url_slug):
raise ValidationError("The URL slug and the beginning of the secret should be the same.")
|
# THIRD PARTY
from djangae.fields import CharField
from django.db import models
class Secret(models.Model):
created = models.DateTimeField(auto_now_add=True)
url_slug = CharField(primary_key=True)
secret = CharField()
class Meta(object):
ordering = ('-created',)
def __unicode__(self):
return self.url_slug
def clean(self, *args, **kwargs):
return_value = super(Secret, self).clean(*args, **kwargs)
if not self.secret.startswith(self.url_slug):
raise ValidationError("The URL slug and the beginning of the secret should be the same.")
|
Order Secret objects by `created` date.
|
Order Secret objects by `created` date.
|
Python
|
mit
|
adamalton/letsencrypt-appengine
|
---
+++
@@ -8,6 +8,9 @@
url_slug = CharField(primary_key=True)
secret = CharField()
+ class Meta(object):
+ ordering = ('-created',)
+
def __unicode__(self):
return self.url_slug
|
a827279098ab2ef73778b15a76f738fedce9ed30
|
tests.py
|
tests.py
|
import api
import unittest
import os
from getpass import getpass
class ApiTest(unittest.TestCase):
def setUp(self):
self.linode = api.Api(os.environ['LINODE_API_KEY'])
def testAvailLinodeplans(self):
available_plans = self.linode.avail_linodeplans()
self.assertTrue(isinstance(available_plans, list))
if __name__ == "__main__":
if 'LINODE_API_KEY' not in os.environ:
os.environ['LINODE_API_KEY'] = getpass('Enter API Key: ')
unittest.main()
|
import api
import unittest
import os
from getpass import getpass
class ApiTest(unittest.TestCase):
def setUp(self):
self.linode = api.Api(os.environ['LINODE_API_KEY'])
def testAvailLinodeplans(self):
available_plans = self.linode.avail_linodeplans()
self.assertTrue(isinstance(available_plans, list))
def testEcho(self):
test_parameters = {'FOO': 'bar', 'FIZZ': 'buzz'}
response = self.linode.test_echo(**test_parameters)
self.assertTrue('FOO' in response)
self.assertTrue('FIZZ' in response)
self.assertEqual(test_parameters['FOO'], response['FOO'])
self.assertEqual(test_parameters['FIZZ'], response['FIZZ'])
if __name__ == "__main__":
if 'LINODE_API_KEY' not in os.environ:
os.environ['LINODE_API_KEY'] = getpass('Enter API Key: ')
unittest.main()
|
Add a test case for test.echo
|
Add a test case for test.echo
|
Python
|
mit
|
ryanshawty/linode-python,tjfontaine/linode-python
|
---
+++
@@ -12,6 +12,14 @@
available_plans = self.linode.avail_linodeplans()
self.assertTrue(isinstance(available_plans, list))
+ def testEcho(self):
+ test_parameters = {'FOO': 'bar', 'FIZZ': 'buzz'}
+ response = self.linode.test_echo(**test_parameters)
+ self.assertTrue('FOO' in response)
+ self.assertTrue('FIZZ' in response)
+ self.assertEqual(test_parameters['FOO'], response['FOO'])
+ self.assertEqual(test_parameters['FIZZ'], response['FIZZ'])
+
if __name__ == "__main__":
if 'LINODE_API_KEY' not in os.environ:
os.environ['LINODE_API_KEY'] = getpass('Enter API Key: ')
|
08da651286cf7ac8459a059a01831f75c08f80a7
|
alltests.py
|
alltests.py
|
#!/usr/bin/env python
# Run all tests
import os
import subprocess
import sys
topdir = os.path.dirname(os.path.realpath(os.path.abspath(__file__)))
os.chdir(topdir)
retcode = subprocess.call("nosetests")
if not retcode:
os.chdir(os.path.join(topdir, "docs"))
subprocess.call("make", "doctest")
|
#!/usr/bin/env python
# Run all tests
import os
import subprocess
import sys
topdir = os.path.dirname(os.path.realpath(os.path.abspath(__file__)))
os.chdir(topdir)
retcode = subprocess.call(["nosetests"])
if not retcode:
os.chdir(os.path.join(topdir, "docs"))
subprocess.call(["make", "doctest"])
|
Fix invocation of subprocess.call to pass arguments as a list - makes doctests run again.
|
Fix invocation of subprocess.call to pass arguments as a list - makes doctests run again.
|
Python
|
mit
|
restpose/restpose-py,restpose/restpose-py
|
---
+++
@@ -7,7 +7,7 @@
topdir = os.path.dirname(os.path.realpath(os.path.abspath(__file__)))
os.chdir(topdir)
-retcode = subprocess.call("nosetests")
+retcode = subprocess.call(["nosetests"])
if not retcode:
os.chdir(os.path.join(topdir, "docs"))
- subprocess.call("make", "doctest")
+ subprocess.call(["make", "doctest"])
|
2fcd13435d04622c7ec0915a77efb390ea9c09b1
|
rcstreamlistener.py
|
rcstreamlistener.py
|
# rcstreamlistener.py
import urllib3.contrib.pyopenssl
import logging
from ipaddress import ip_address
from socketIO_client import SocketIO, BaseNamespace
urllib3.contrib.pyopenssl.inject_into_urllib3()
logging.basicConfig(level=logging.WARNING)
class MainNamespace(BaseNamespace):
def on_change(self, change):
if change['namespace'] == 3:
strippedTitle = change['title'].lstrip('User talk:')
try:
ipAddressObject = ip_address(strippedTitle)
print 'True'
except ValueError:
print 'False'
def on_connect(self):
self.emit('subscribe', 'en.wikipedia.org')
print 'Connected.'
print 'Connecting...'
socketIO = SocketIO('https://stream.wikimedia.org')
socketIO.define(MainNamespace, '/rc')
socketIO.wait()
|
# rcstreamlistener.py
import urllib3.contrib.pyopenssl
import logging
from ipaddress import ip_address
from socketIO_client import SocketIO, BaseNamespace
import template_adder
urllib3.contrib.pyopenssl.inject_into_urllib3()
logging.basicConfig(level=logging.WARNING)
class MainNamespace(BaseNamespace):
def on_change(self, change):
if change['namespace'] == 3:
strippedTitle = change['title'].lstrip('User talk:')
if ip_address(strippedTitle):
print 'True'
else:
print 'False'
def on_connect(self):
self.emit('subscribe', 'en.wikipedia.org')
print 'Connected.'
print 'Connecting...'
socketIO = SocketIO('https://stream.wikimedia.org')
socketIO.define(MainNamespace, '/rc')
socketIO.wait()
|
Use if/else instead of try/except
|
Use if/else instead of try/except
|
Python
|
mit
|
piagetbot/enwikibot
|
---
+++
@@ -3,6 +3,7 @@
import logging
from ipaddress import ip_address
from socketIO_client import SocketIO, BaseNamespace
+import template_adder
urllib3.contrib.pyopenssl.inject_into_urllib3()
logging.basicConfig(level=logging.WARNING)
@@ -11,10 +12,9 @@
def on_change(self, change):
if change['namespace'] == 3:
strippedTitle = change['title'].lstrip('User talk:')
- try:
- ipAddressObject = ip_address(strippedTitle)
+ if ip_address(strippedTitle):
print 'True'
- except ValueError:
+ else:
print 'False'
def on_connect(self):
|
7f1109d38f9bc2f973410f071c97ad874dd6cb0d
|
minicps/topology.py
|
minicps/topology.py
|
"""
Recreate the SWaT network with the highest level of precision.
"""
from mininet.net import Mininet
from mininet.topo import Topo
from minicps import constants as c
class EthRing(Topo):
"""Docstring for EthRing. """
def __init__(self):
"""TODO: to be defined1. """
Topo.__init__(self)
pass
class EthStar(Topo):
"""Docstring for EthStar. """
def __init__(self):
"""TODO: to be defined1. """
Topo.__init__(self)
pass
class Minicps(Mininet):
"""Docstring for Minicps. """
def __init__(self):
"""TODO: to be defined1. """
Mininet.__init__(self)
pass
|
"""
Recreate the SWaT network with the highest level of precision.
DMZ AP, L3, L2 L1 wireless star networks and L0 wireless DLR
cannot be simulated because miniet lacks wireless (IEEE 802.11)
simulation support.
"""
from mininet.net import Mininet
from mininet.topo import Topo
# from minicps import constants as c
class DLR(Topo):
"""Device Level Ring Topology."""
def __init__(self):
"""TODO: to be defined1. """
Topo.__init__(self)
pass
class EthStar(Topo):
"""Docstring for EthStar. """
def __init__(self):
"""TODO: to be defined1. """
Topo.__init__(self)
pass
class L3EthStar(Topo):
"""
Connects Historian, Workstation and process PLCs
using a 5-port ethernet switch.
An industrial firewall service router filter the traffic.
"""
def __init__(self):
"""TODO: to be defined1. """
Topo.__init__(self)
class L2EthStar(Topo):
"""
Connects HMI and process PLCs
using a 5-ports ethernet switches and
16-ports ethernet switches.
"""
def __init__(self):
"""TODO: to be defined1. """
Topo.__init__(self)
class L1EthStar(Topo):
"""
Connects process PLCs
using a 5-ports ethernet switches and
16-ports ethernet switches.
"""
def __init__(self):
"""TODO: to be defined1. """
Topo.__init__(self)
class L0DLR(DLR):
"""
One for each sub-process (6 in total)
It connects redundant PLCs, sensors and actuators
using a remote IO adaptor.
"""
def __init__(self):
"""TODO: to be defined1. """
Topo.__init__(self)
class Minicps(Mininet):
"""Docstring for Minicps. """
def __init__(self):
"""TODO: to be defined1. """
Mininet.__init__(self)
pass
|
Add swat network layers class.
|
Add swat network layers class.
|
Python
|
mit
|
scy-phy/minicps,scy-phy/minicps,remmihsorp/minicps,remmihsorp/minicps
|
---
+++
@@ -1,15 +1,19 @@
"""
Recreate the SWaT network with the highest level of precision.
+
+DMZ AP, L3, L2 L1 wireless star networks and L0 wireless DLR
+cannot be simulated because miniet lacks wireless (IEEE 802.11)
+simulation support.
"""
from mininet.net import Mininet
from mininet.topo import Topo
-from minicps import constants as c
+# from minicps import constants as c
-class EthRing(Topo):
+class DLR(Topo):
- """Docstring for EthRing. """
+ """Device Level Ring Topology."""
def __init__(self):
"""TODO: to be defined1. """
@@ -29,6 +33,58 @@
pass
+class L3EthStar(Topo):
+
+ """
+ Connects Historian, Workstation and process PLCs
+ using a 5-port ethernet switch.
+ An industrial firewall service router filter the traffic.
+ """
+
+ def __init__(self):
+ """TODO: to be defined1. """
+ Topo.__init__(self)
+
+
+class L2EthStar(Topo):
+
+ """
+ Connects HMI and process PLCs
+ using a 5-ports ethernet switches and
+ 16-ports ethernet switches.
+ """
+
+ def __init__(self):
+ """TODO: to be defined1. """
+ Topo.__init__(self)
+
+
+class L1EthStar(Topo):
+
+ """
+ Connects process PLCs
+ using a 5-ports ethernet switches and
+ 16-ports ethernet switches.
+ """
+
+ def __init__(self):
+ """TODO: to be defined1. """
+ Topo.__init__(self)
+
+
+class L0DLR(DLR):
+
+ """
+ One for each sub-process (6 in total)
+ It connects redundant PLCs, sensors and actuators
+ using a remote IO adaptor.
+ """
+
+ def __init__(self):
+ """TODO: to be defined1. """
+ Topo.__init__(self)
+
+
class Minicps(Mininet):
"""Docstring for Minicps. """
|
cf6e7bdbd9ec6968be8767f1bbcab09de6f8aace
|
codekitlang/command.py
|
codekitlang/command.py
|
# -*- coding: utf-8 -*-
import argparse
from . import compiler
def main():
parser = argparse.ArgumentParser(description='CodeKit Language Compiler.')
parser.add_argument('src', nargs=1, metavar='SOURCE')
parser.add_argument('dest', nargs=1, metavar='DEST')
parser.add_argument('--framework-paths', '-f', action='append',
metavar='DIR')
namespace = parser.parse_args()
compiler_ = compiler.Compiler(framework_paths=namespace.framework_paths)
compiler_.generate_to_file(namespace.dest[0], namespace.src[0])
|
# -*- coding: utf-8 -*-
import argparse
from . import compiler
def main():
parser = argparse.ArgumentParser(description='CodeKit Language Compiler.')
parser.add_argument('src', nargs=1, metavar='SOURCE')
parser.add_argument('dest', nargs=1, metavar='DEST')
parser.add_argument('--framework-paths', '-f', action='append',
metavar='DIR')
namespace = parser.parse_args()
compiler_ = compiler.Compiler(framework_paths=namespace.framework_paths)
compiler_.generate_to_file(namespace.dest[0], namespace.src[0])
if __name__ == '__main__': # pragma:nocover
main()
|
Add interface to interpreter's "-m" option.
|
Add interface to interpreter's "-m" option.
|
Python
|
bsd-3-clause
|
gjo/python-codekitlang,gjo/python-codekitlang
|
---
+++
@@ -13,3 +13,6 @@
namespace = parser.parse_args()
compiler_ = compiler.Compiler(framework_paths=namespace.framework_paths)
compiler_.generate_to_file(namespace.dest[0], namespace.src[0])
+
+if __name__ == '__main__': # pragma:nocover
+ main()
|
a2f338d06f097d3446627dcbc9b0d727951fcf56
|
tilezilla/multiprocess.py
|
tilezilla/multiprocess.py
|
""" Multiprocess helpers
"""
class SerialExecutor(object):
""" Make regular old 'map' look like :mod:`futures.concurrent`
"""
map = map
def get_executor(executor, njob):
""" Return an instance of a execution mapper
Args:
executor (str): Name of execution method to return
njob (int): Number of jobs to use in execution
Returns:
cls: Instance of a pool executor
"""
if executor.lower() == 'process':
from concurrent.futures import ProcessPoolExecutor
return ProcessPoolExecutor(njob)
else:
return SerialExecutor()
MULTIPROC_METHODS = [
'serial',
'process',
# TODO: ipyparallel for distributed across network
]
|
""" Multiprocess helpers
"""
# LOGGING FOR MULTIPROCESSING
MULTIPROC_LOG_FORMAT = '%(asctime)s:%(hostname)s:%(process)d:%(levelname)s:%(message)s' # noqa
MULTIPROC_LOG_DATE_FORMAT = '%H:%M:%S'
def get_logger_multiproc(name=None, filename='', stream='stdout'):
""" Return a logger configured/styled for multi-processing
Args:
name (str): Name of logger to retrieve/configure
filename (str): Log to this filename using :class:`logging.FileHandler`
stream (str): Name of stream to use with logger. If `stream` is
specified with `filename`, then the `stream` argument is ignored.
Returns:
logging.LoggerAdapter: A configured logger
"""
import logging
import socket
import click
class ClickFileHandler(logging.FileHandler):
def emit(self, record):
try:
msg = self.format(record)
err = record.levelno > 20
click.echo(msg, file=self.stream, err=err)
except (KeyboardInterrupt, SystemExit):
raise
except:
self.handleError(record)
logger = logging.getLogger(name)
formatter = logging.Formatter(MULTIPROC_LOG_FORMAT,
MULTIPROC_LOG_DATE_FORMAT)
if filename:
handler = ClickFileHandler(filename, 'w')
else:
stream = click.get_text_stream(stream)
handler = logging.StreamHandler(stream)
handler.setFormatter(formatter)
logger.addHandler(handler)
logger.setLevel(logging.DEBUG)
extra = {'hostname': socket.gethostname()}
logger_adapter = logging.LoggerAdapter(logger, extra)
return logger_adapter
# MULTIPROCESSING
class SerialExecutor(object):
""" Make regular old 'map' look like :mod:`futures.concurrent`
"""
map = map
def get_executor(executor, njob):
""" Return an instance of a execution mapper
Args:
executor (str): Name of execution method to return
njob (int): Number of jobs to use in execution
Returns:
cls: Instance of a pool executor
"""
if executor.lower() == 'process':
from concurrent.futures import ProcessPoolExecutor
return ProcessPoolExecutor(njob)
else:
return SerialExecutor()
MULTIPROC_METHODS = [
'serial',
'process',
# TODO: ipyparallel for distributed across network
]
|
Add func to return logger configured for multiproc
|
Add func to return logger configured for multiproc
|
Python
|
bsd-3-clause
|
ceholden/tilezilla,ceholden/landsat_tile,ceholden/landsat_tile,ceholden/landsat_tiles,ceholden/landsat_tiles
|
---
+++
@@ -1,7 +1,59 @@
""" Multiprocess helpers
"""
+# LOGGING FOR MULTIPROCESSING
+MULTIPROC_LOG_FORMAT = '%(asctime)s:%(hostname)s:%(process)d:%(levelname)s:%(message)s' # noqa
+MULTIPROC_LOG_DATE_FORMAT = '%H:%M:%S'
+def get_logger_multiproc(name=None, filename='', stream='stdout'):
+ """ Return a logger configured/styled for multi-processing
+
+ Args:
+ name (str): Name of logger to retrieve/configure
+ filename (str): Log to this filename using :class:`logging.FileHandler`
+ stream (str): Name of stream to use with logger. If `stream` is
+ specified with `filename`, then the `stream` argument is ignored.
+
+ Returns:
+ logging.LoggerAdapter: A configured logger
+ """
+ import logging
+ import socket
+ import click
+
+ class ClickFileHandler(logging.FileHandler):
+ def emit(self, record):
+ try:
+ msg = self.format(record)
+ err = record.levelno > 20
+ click.echo(msg, file=self.stream, err=err)
+ except (KeyboardInterrupt, SystemExit):
+ raise
+ except:
+ self.handleError(record)
+
+
+ logger = logging.getLogger(name)
+ formatter = logging.Formatter(MULTIPROC_LOG_FORMAT,
+ MULTIPROC_LOG_DATE_FORMAT)
+
+ if filename:
+ handler = ClickFileHandler(filename, 'w')
+ else:
+ stream = click.get_text_stream(stream)
+ handler = logging.StreamHandler(stream)
+ handler.setFormatter(formatter)
+ logger.addHandler(handler)
+
+ logger.setLevel(logging.DEBUG)
+
+ extra = {'hostname': socket.gethostname()}
+ logger_adapter = logging.LoggerAdapter(logger, extra)
+
+ return logger_adapter
+
+
+# MULTIPROCESSING
class SerialExecutor(object):
""" Make regular old 'map' look like :mod:`futures.concurrent`
"""
|
f0af944db962bdb8ea764737860ce9168f779977
|
perfkitbenchmarker/linux_packages/azure_credentials.py
|
perfkitbenchmarker/linux_packages/azure_credentials.py
|
# Copyright 2016 PerfKitBenchmarker Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Package for installing the Azure credentials."""
import os
from perfkitbenchmarker import object_storage_service
AZURE_CREDENTIAL_LOCATION = '.azure'
AZURE_CREDENTIAL_TOKENS_FILE = os.path.join(
AZURE_CREDENTIAL_LOCATION, 'accessTokens.json')
AZURE_CREDENTIAL_PROFILE_FILE = os.path.join(
AZURE_CREDENTIAL_LOCATION, 'azureProfile.json')
def Install(vm):
"""Copies Azure credentials to the VM."""
vm.PushFile(
object_storage_service.FindCredentialFile(
os.path.join('~', AZURE_CREDENTIAL_TOKENS_FILE)),
AZURE_CREDENTIAL_LOCATION)
vm.PushFile(
object_storage_service.FindCredentialFile(
os.path.join('~', AZURE_CREDENTIAL_PROFILE_FILE)),
AZURE_CREDENTIAL_LOCATION)
|
# Copyright 2016 PerfKitBenchmarker Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Package for installing the Azure credentials."""
import os
from perfkitbenchmarker import object_storage_service
AZURE_CREDENTIAL_LOCATION = '.azure'
AZURE_CREDENTIAL_TOKENS_FILE = os.path.join(
AZURE_CREDENTIAL_LOCATION, 'accessTokens.json')
AZURE_CREDENTIAL_PROFILE_FILE = os.path.join(
AZURE_CREDENTIAL_LOCATION, 'azureProfile.json')
def Install(vm):
"""Copies Azure credentials to the VM."""
vm.RemoteCommand('mkdir -p {0}'.format(AZURE_CREDENTIAL_LOCATION))
vm.PushFile(
object_storage_service.FindCredentialFile(
os.path.join('~', AZURE_CREDENTIAL_TOKENS_FILE)),
AZURE_CREDENTIAL_TOKENS_FILE)
vm.PushFile(
object_storage_service.FindCredentialFile(
os.path.join('~', AZURE_CREDENTIAL_PROFILE_FILE)),
AZURE_CREDENTIAL_PROFILE_FILE)
|
Fix a bug in the Azure credentials package in which they would overwrite the directory.
|
Fix a bug in the Azure credentials package in which they would overwrite the directory.
-------------
Created by MOE: https://github.com/google/moe
MOE_MIGRATED_REVID=248750675
|
Python
|
apache-2.0
|
GoogleCloudPlatform/PerfKitBenchmarker,GoogleCloudPlatform/PerfKitBenchmarker,GoogleCloudPlatform/PerfKitBenchmarker,GoogleCloudPlatform/PerfKitBenchmarker
|
---
+++
@@ -26,11 +26,12 @@
def Install(vm):
"""Copies Azure credentials to the VM."""
+ vm.RemoteCommand('mkdir -p {0}'.format(AZURE_CREDENTIAL_LOCATION))
vm.PushFile(
object_storage_service.FindCredentialFile(
os.path.join('~', AZURE_CREDENTIAL_TOKENS_FILE)),
- AZURE_CREDENTIAL_LOCATION)
+ AZURE_CREDENTIAL_TOKENS_FILE)
vm.PushFile(
object_storage_service.FindCredentialFile(
os.path.join('~', AZURE_CREDENTIAL_PROFILE_FILE)),
- AZURE_CREDENTIAL_LOCATION)
+ AZURE_CREDENTIAL_PROFILE_FILE)
|
6250427143245676a5efd7e5c55054b5b3a285fd
|
src/pushover_complete/__init__.py
|
src/pushover_complete/__init__.py
|
"""A Python 3 package for interacting with *all* aspects of the Pushover API"""
from .error import PushoverCompleteError, BadAPIRequestError
from .pushover_api import PushoverAPI
__all__ = [
'PushoverCompleteError', 'BadAPIRequestError',
'PushoverAPI'
]
__version__ = '0.0.1'
__title__ = 'pushover_complete'
__description__ = ''
__url__ = ''
__author__ = 'Scott Colby'
__email__ = 'scolby33@gmail.com'
__license__ = 'MIT License'
__copyright__ = 'Copyright (c) 2016 Scott Colby'
|
"""A Python 3 package for interacting with *all* aspects of the Pushover API"""
from .error import PushoverCompleteError, BadAPIRequestError
from .pushover_api import PushoverAPI
__all__ = [
'PushoverCompleteError', 'BadAPIRequestError',
'PushoverAPI'
]
__version__ = '0.0.1'
__title__ = 'pushover_complete'
__description__ = ''
__url__ = 'https://github.com/scolby33/pushover_complete'
__author__ = 'Scott Colby'
__email__ = 'scolby33@gmail.com'
__license__ = 'MIT License'
__copyright__ = 'Copyright (c) 2016 Scott Colby'
|
Add URL for project to the project metadata
|
Add URL for project to the project metadata
|
Python
|
mit
|
scolby33/pushover_complete
|
---
+++
@@ -12,7 +12,7 @@
__title__ = 'pushover_complete'
__description__ = ''
-__url__ = ''
+__url__ = 'https://github.com/scolby33/pushover_complete'
__author__ = 'Scott Colby'
|
8ff455e9eb0f7cbf80f1ac631a5aa7e98f8935af
|
mysite/project/controllers.py
|
mysite/project/controllers.py
|
import mysite.search.models
import logging
KEY='answer_ids_that_are_ours'
def similar_project_names(project_name):
# HOPE: One day, order this by relevance.
return mysite.search.models.Project.objects.filter(
name__icontains=project_name)
def note_in_session_we_control_answer_id(session, answer_id, KEY=KEY):
if KEY not in session:
session[KEY] = []
session[KEY].append(answer_id)
def get_unsaved_answers_from_session(session):
ret = []
for answer_id in session.get(KEY, []):
try:
ret.append(mysite.search.models.Answer.all_even_unowned.get(id=answer_id))
except mysite.search.models.Answer.DoesNotExist:
logging.warn("Whoa, the answer has gone away. Session and Answer IDs: " +
str(session) + str(answer_id))
return ret
def take_control_of_our_answers(user, session, KEY=KEY):
# FIXME: This really ought to be some sort of thread-safe queue,
# or stored in the database, or something.
answers = get_unsaved_answers_from_session(session)
# It's unsafe to remove this KEY from the session, in case of concurrent access.
# But we do anyway. God help us.
if KEY in session:
del session[KEY]
|
import mysite.search.models
import logging
KEY='answer_ids_that_are_ours'
def similar_project_names(project_name):
# HOPE: One day, order this by relevance.
return mysite.search.models.Project.objects.filter(
name__icontains=project_name)
def note_in_session_we_control_answer_id(session, answer_id, KEY=KEY):
if KEY not in session:
session[KEY] = []
session[KEY].append(answer_id)
def get_unsaved_answers_from_session(session):
ret = []
for answer_id in session.get(KEY, []):
try:
ret.append(mysite.search.models.Answer.all_even_unowned.get(id=answer_id))
except mysite.search.models.Answer.DoesNotExist:
logging.warn("Whoa, the answer has gone away. Session and Answer IDs: " +
str(session) + str(answer_id))
return ret
def take_control_of_our_answers(user, session, KEY=KEY):
# FIXME: This really ought to be some sort of thread-safe queue,
# or stored in the database, or something.
for answer_id in session.get(KEY, []):
answer = mysite.search.models.Answer.all_even_unowned.get(pk=answer_id)
answer.author = user
answer.save()
# It's unsafe to remove this KEY from the session, in case of concurrent access.
# But we do anyway. God help us.
if KEY in session:
del session[KEY]
|
Revert "Do not assume all answers in our session are valid."
|
Revert "Do not assume all answers in our session are valid."
This reverts commit 0f7e535d645086f9c29e77e28588a3165f7bb2b8.
|
Python
|
agpl-3.0
|
moijes12/oh-mainline,nirmeshk/oh-mainline,mzdaniel/oh-mainline,moijes12/oh-mainline,mzdaniel/oh-mainline,heeraj123/oh-mainline,eeshangarg/oh-mainline,SnappleCap/oh-mainline,campbe13/openhatch,ojengwa/oh-mainline,mzdaniel/oh-mainline,onceuponatimeforever/oh-mainline,ehashman/oh-mainline,willingc/oh-mainline,ojengwa/oh-mainline,campbe13/openhatch,jledbetter/openhatch,ehashman/oh-mainline,vipul-sharma20/oh-mainline,ojengwa/oh-mainline,heeraj123/oh-mainline,jledbetter/openhatch,waseem18/oh-mainline,Changaco/oh-mainline,nirmeshk/oh-mainline,onceuponatimeforever/oh-mainline,SnappleCap/oh-mainline,eeshangarg/oh-mainline,Changaco/oh-mainline,onceuponatimeforever/oh-mainline,nirmeshk/oh-mainline,nirmeshk/oh-mainline,onceuponatimeforever/oh-mainline,sudheesh001/oh-mainline,onceuponatimeforever/oh-mainline,mzdaniel/oh-mainline,SnappleCap/oh-mainline,nirmeshk/oh-mainline,eeshangarg/oh-mainline,SnappleCap/oh-mainline,eeshangarg/oh-mainline,heeraj123/oh-mainline,Changaco/oh-mainline,moijes12/oh-mainline,heeraj123/oh-mainline,sudheesh001/oh-mainline,mzdaniel/oh-mainline,Changaco/oh-mainline,SnappleCap/oh-mainline,jledbetter/openhatch,openhatch/oh-mainline,willingc/oh-mainline,openhatch/oh-mainline,ojengwa/oh-mainline,waseem18/oh-mainline,waseem18/oh-mainline,ojengwa/oh-mainline,ehashman/oh-mainline,mzdaniel/oh-mainline,waseem18/oh-mainline,vipul-sharma20/oh-mainline,willingc/oh-mainline,sudheesh001/oh-mainline,campbe13/openhatch,sudheesh001/oh-mainline,moijes12/oh-mainline,Changaco/oh-mainline,openhatch/oh-mainline,mzdaniel/oh-mainline,vipul-sharma20/oh-mainline,jledbetter/openhatch,campbe13/openhatch,vipul-sharma20/oh-mainline,campbe13/openhatch,ehashman/oh-mainline,openhatch/oh-mainline,jledbetter/openhatch,willingc/oh-mainline,vipul-sharma20/oh-mainline,waseem18/oh-mainline,eeshangarg/oh-mainline,willingc/oh-mainline,moijes12/oh-mainline,ehashman/oh-mainline,sudheesh001/oh-mainline,openhatch/oh-mainline,heeraj123/oh-mainline
|
---
+++
@@ -25,7 +25,10 @@
def take_control_of_our_answers(user, session, KEY=KEY):
# FIXME: This really ought to be some sort of thread-safe queue,
# or stored in the database, or something.
- answers = get_unsaved_answers_from_session(session)
+ for answer_id in session.get(KEY, []):
+ answer = mysite.search.models.Answer.all_even_unowned.get(pk=answer_id)
+ answer.author = user
+ answer.save()
# It's unsafe to remove this KEY from the session, in case of concurrent access.
# But we do anyway. God help us.
if KEY in session:
|
546140bee689fc63361977dafa600022396606e7
|
audio_train.py
|
audio_train.py
|
#%% Setup.
import numpy as np
import scipy.io.wavfile
from keras.utils.visualize_util import plot
from keras.callbacks import TensorBoard, ModelCheckpoint
from keras.utils import np_utils
from eva.models.wavenet import Wavenet, compute_receptive_field
from eva.util.mutil import sparse_labels
#%% Data
RATE, DATA = scipy.io.wavfile.read('./data/undertale/undertale_001_once_upon_a_time.comp.wav')
#%% Model Config.
MODEL = Wavenet
FILTERS = 256
DEPTH = 7
STACKS = 4
LENGTH = 1 + compute_receptive_field(RATE, DEPTH, STACKS)[0]
BINS = 256
LOAD = False
#%% Model.
INPUT = (LENGTH, BINS)
ARGS = (INPUT, FILTERS, DEPTH, STACKS)
M = MODEL(*ARGS)
if LOAD:
M.load_weights('model.h5')
M.summary()
plot(M)
#%% Train.
TRAIN = np_utils.to_categorical(DATA, BINS)
TRAIN = TRAIN[:TRAIN.shape[0]//LENGTH*LENGTH].reshape(TRAIN.shape[0]//LENGTH, LENGTH, BINS)
M.fit(TRAIN, sparse_labels(TRAIN), nb_epoch=2000, batch_size=8,
callbacks=[TensorBoard(), ModelCheckpoint('model.h5')])
|
#%% Setup.
import numpy as np
import scipy.io.wavfile
from keras.utils.visualize_util import plot
from keras.callbacks import TensorBoard, ModelCheckpoint
from keras.utils import np_utils
from eva.models.wavenet import Wavenet, compute_receptive_field
from eva.util.mutil import sparse_labels
#%% Data
RATE, DATA = scipy.io.wavfile.read('./data/undertale/undertale_001_once_upon_a_time.comp.wav')
#%% Model Config.
MODEL = Wavenet
FILTERS = 256
DEPTH = 7
STACKS = 4
LENGTH = DATA.shape[0]
BINS = 256
LOAD = False
#%% Train Config.
BATCH_SIZE = 5
EPOCHS = 2000
#%% Model.
INPUT = (LENGTH, BINS)
ARGS = (INPUT, FILTERS, DEPTH, STACKS)
M = MODEL(*ARGS)
if LOAD:
M.load_weights('model.h5')
M.summary()
plot(M)
#%% Train.
TRAIN = np_utils.to_categorical(DATA, BINS)
TRAIN = TRAIN.reshape(BATCH_SIZE, TRAIN.shape[0]//BATCH_SIZE, TRAIN.shape[1])
M.fit(TRAIN, sparse_labels(TRAIN), nb_epoch=EPOCHS, batch_size=BATCH_SIZE,
callbacks=[TensorBoard(), ModelCheckpoint('model.h5')])
|
Add train config to audio train
|
Add train config to audio train
|
Python
|
apache-2.0
|
israelg99/eva
|
---
+++
@@ -18,10 +18,14 @@
FILTERS = 256
DEPTH = 7
STACKS = 4
-LENGTH = 1 + compute_receptive_field(RATE, DEPTH, STACKS)[0]
+LENGTH = DATA.shape[0]
BINS = 256
LOAD = False
+
+#%% Train Config.
+BATCH_SIZE = 5
+EPOCHS = 2000
#%% Model.
INPUT = (LENGTH, BINS)
@@ -37,7 +41,7 @@
#%% Train.
TRAIN = np_utils.to_categorical(DATA, BINS)
-TRAIN = TRAIN[:TRAIN.shape[0]//LENGTH*LENGTH].reshape(TRAIN.shape[0]//LENGTH, LENGTH, BINS)
+TRAIN = TRAIN.reshape(BATCH_SIZE, TRAIN.shape[0]//BATCH_SIZE, TRAIN.shape[1])
-M.fit(TRAIN, sparse_labels(TRAIN), nb_epoch=2000, batch_size=8,
+M.fit(TRAIN, sparse_labels(TRAIN), nb_epoch=EPOCHS, batch_size=BATCH_SIZE,
callbacks=[TensorBoard(), ModelCheckpoint('model.h5')])
|
b7077a85956cae3efb2ec1b2f474735cf6e8c4ed
|
csv_converter.py
|
csv_converter.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import csv
class CsvConverter:
def __init__(self, csv_file_path):
self.csv_file_path = csv_file_path
self.rows = []
self.source_product_code = "product_code"
self.source_quantity = "quantity"
self.debug = False
def set_debug(self, debug):
self.debug = debug
def clear(self):
self.rows = []
def addRow(self, row):
self.rows.append(row)
def getRow(self, index):
return self.rows[index]
def setSourceColumns(self, source_product_code, source_quantity):
self.source_product_code = source_product_code
self.source_quantity = source_quantity
def convertRow(self, row):
if not row[self.source_product_code]:
raise ValueError
if self.debug:
print row
return {
'product_code': row[self.source_product_code],
'quantity': int(row[self.source_quantity])
}
def read_csv(self, file_object):
reader = csv.DictReader(file_object)
for row in reader:
self.addRow(self.convertRow(row))
def read_file(self):
with open(self.csv_file_path, 'rb') as csvfile:
self.read_csv(csvfile)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import csv
import warnings
class CsvConverter:
def __init__(self, csv_file_path):
self.csv_file_path = csv_file_path
self.rows = []
self.source_product_code = "product_code"
self.source_quantity = "quantity"
self.debug = False
def clear(self):
self.rows = []
def addRow(self, row):
self.rows.append(row)
def getRow(self, index):
return self.rows[index]
def setSourceColumns(self, source_product_code, source_quantity):
self.source_product_code = source_product_code
self.source_quantity = source_quantity
def convertRow(self, row):
if not row[self.source_product_code]:
raise ValueError
return {
'product_code': row[self.source_product_code],
'quantity': int(row[self.source_quantity])
}
def read_csv(self, file_object):
reader = csv.DictReader(file_object)
for row in reader:
try:
self.addRow(self.convertRow(row))
except ValueError as e:
warnings.warn("Row parsing: {} Warning: {}".format(row, e.strerror), UserWarning)
def read_file(self):
with open(self.csv_file_path, 'rb') as csvfile:
self.read_csv(csvfile)
|
Remove debug and use warngins
|
Remove debug and use warngins
|
Python
|
mit
|
stormaaja/csvconverter,stormaaja/csvconverter,stormaaja/csvconverter
|
---
+++
@@ -2,6 +2,7 @@
# -*- coding: utf-8 -*-
import csv
+import warnings
class CsvConverter:
@@ -11,9 +12,6 @@
self.source_product_code = "product_code"
self.source_quantity = "quantity"
self.debug = False
-
- def set_debug(self, debug):
- self.debug = debug
def clear(self):
self.rows = []
@@ -31,8 +29,6 @@
def convertRow(self, row):
if not row[self.source_product_code]:
raise ValueError
- if self.debug:
- print row
return {
'product_code': row[self.source_product_code],
'quantity': int(row[self.source_quantity])
@@ -41,7 +37,10 @@
def read_csv(self, file_object):
reader = csv.DictReader(file_object)
for row in reader:
- self.addRow(self.convertRow(row))
+ try:
+ self.addRow(self.convertRow(row))
+ except ValueError as e:
+ warnings.warn("Row parsing: {} Warning: {}".format(row, e.strerror), UserWarning)
def read_file(self):
with open(self.csv_file_path, 'rb') as csvfile:
|
fd5674a1b36498e5d3a597203c180ded8c57d058
|
morph_proxy.py
|
morph_proxy.py
|
# Run this with mitmdump -q -s morph_proxy.py
def request(context, flow):
# print out all the basic information to determine what request is being made
# coming from which container
# print flow.request.method
# print flow.request.host
# print flow.request.path
# print flow.request.scheme
# print flow.request.client_conn.address[0]
# print "***"
#text = flow.request.method + " " + flow.request.scheme + "://" + flow.request.host + flow.request.path + " FROM " + flow.request.client_conn
text = flow.request.method + " " + flow.request.scheme + "://" + flow.request.host + flow.request.path + " FROM " + flow.request.client_conn.address[0]
print text
# print "***"
|
# Run this with mitmdump -q -s morph_proxy.py
def response(context, flow):
text = flow.request.method + " " + flow.request.scheme + "://" + flow.request.host + flow.request.path + " FROM " + flow.request.client_conn.address[0] + " REQUEST SIZE " + str(len(flow.request.content)) + " RESPONSE SIZE " + str(len(flow.response.content))
print text
|
Add request and response size
|
Add request and response size
|
Python
|
agpl-3.0
|
otherchirps/morph,openaustralia/morph,otherchirps/morph,otherchirps/morph,OpenAddressesUK/morph,otherchirps/morph,otherchirps/morph,openaustralia/morph,openaustralia/morph,otherchirps/morph,otherchirps/morph,OpenAddressesUK/morph,openaustralia/morph,OpenAddressesUK/morph,openaustralia/morph,openaustralia/morph,OpenAddressesUK/morph,openaustralia/morph
|
---
+++
@@ -1,15 +1,5 @@
# Run this with mitmdump -q -s morph_proxy.py
-def request(context, flow):
- # print out all the basic information to determine what request is being made
- # coming from which container
- # print flow.request.method
- # print flow.request.host
- # print flow.request.path
- # print flow.request.scheme
- # print flow.request.client_conn.address[0]
- # print "***"
- #text = flow.request.method + " " + flow.request.scheme + "://" + flow.request.host + flow.request.path + " FROM " + flow.request.client_conn
- text = flow.request.method + " " + flow.request.scheme + "://" + flow.request.host + flow.request.path + " FROM " + flow.request.client_conn.address[0]
+def response(context, flow):
+ text = flow.request.method + " " + flow.request.scheme + "://" + flow.request.host + flow.request.path + " FROM " + flow.request.client_conn.address[0] + " REQUEST SIZE " + str(len(flow.request.content)) + " RESPONSE SIZE " + str(len(flow.response.content))
print text
- # print "***"
|
c8cac2a2c1b42fde675b166272729c74c2c42cdc
|
x.py
|
x.py
|
#!/usr/bin/env python
# This file is only a "symlink" to bootstrap.py, all logic should go there.
import os
import sys
rust_dir = os.path.dirname(os.path.abspath(__file__))
sys.path.append(os.path.join(rust_dir, "src", "bootstrap"))
import bootstrap
bootstrap.main()
|
#!/usr/bin/env python
# This file is only a "symlink" to bootstrap.py, all logic should go there.
import os
import sys
# If this is python2, check if python3 is available and re-execute with that
# interpreter.
if sys.version_info.major < 3:
try:
# On Windows, `py -3` sometimes works.
# Try this first, because 'python3' sometimes tries to launch the app
# store on Windows
os.execvp("py", ["py", "-3"] + sys.argv)
except OSError:
try:
os.execvp("python3", ["python3"] + sys.argv)
except OSError:
# Python 3 isn't available, fall back to python 2
pass
rust_dir = os.path.dirname(os.path.abspath(__file__))
sys.path.append(os.path.join(rust_dir, "src", "bootstrap"))
import bootstrap
bootstrap.main()
|
Choose the version of python at runtime (portable version)
|
Choose the version of python at runtime (portable version)
- Try `py -3` first for windows compatibility
- Fall back to `python3` if `py` doesn't work
|
Python
|
apache-2.0
|
aidancully/rust,graydon/rust,graydon/rust,graydon/rust,graydon/rust,aidancully/rust,aidancully/rust,aidancully/rust,aidancully/rust,graydon/rust,graydon/rust,aidancully/rust
|
---
+++
@@ -4,6 +4,22 @@
import os
import sys
+
+# If this is python2, check if python3 is available and re-execute with that
+# interpreter.
+if sys.version_info.major < 3:
+ try:
+ # On Windows, `py -3` sometimes works.
+ # Try this first, because 'python3' sometimes tries to launch the app
+ # store on Windows
+ os.execvp("py", ["py", "-3"] + sys.argv)
+ except OSError:
+ try:
+ os.execvp("python3", ["python3"] + sys.argv)
+ except OSError:
+ # Python 3 isn't available, fall back to python 2
+ pass
+
rust_dir = os.path.dirname(os.path.abspath(__file__))
sys.path.append(os.path.join(rust_dir, "src", "bootstrap"))
|
f707daae978f6fb7c8b5cc8f66cac2820097ebde
|
attack.py
|
attack.py
|
from subprocess import Popen, PIPE
proc = Popen(["./badencrypt.py", "hello!!!"],stdout=PIPE)
hexCiphertext = proc.communicate()[0].strip()
#import pdb
#pdb.set_trace()
print hexCiphertext
proc = Popen(["./baddecrypt.py", hexCiphertext],stdout=PIPE)
output = proc.communicate()[0]
print output
|
from subprocess import Popen, PIPE
proc = Popen(["./badencrypt.py", "hellooooworlddd"],stdout=PIPE)
hexCiphertext = proc.communicate()[0].strip()
import pdb
pdb.set_trace()
print hexCiphertext
print str(len(hexCiphertext)/16)+" blocks"
proc = Popen(["./baddecrypt.py", hexCiphertext],stdout=PIPE)
output = proc.communicate()[0]
print output
|
Make a sixteen byte plaintext
|
Make a sixteen byte plaintext
|
Python
|
mit
|
somethingnew2-0/CS642-HW4,somethingnew2-0/CS642-HW4
|
---
+++
@@ -1,11 +1,12 @@
from subprocess import Popen, PIPE
-proc = Popen(["./badencrypt.py", "hello!!!"],stdout=PIPE)
+proc = Popen(["./badencrypt.py", "hellooooworlddd"],stdout=PIPE)
hexCiphertext = proc.communicate()[0].strip()
-#import pdb
-#pdb.set_trace()
+import pdb
+pdb.set_trace()
print hexCiphertext
+print str(len(hexCiphertext)/16)+" blocks"
proc = Popen(["./baddecrypt.py", hexCiphertext],stdout=PIPE)
output = proc.communicate()[0]
|
d37237779e6e2a7ed1df9a9ea83e93df2cf2b478
|
elifetools/tests/fixtures/test_pub_history/content_01_expected.py
|
elifetools/tests/fixtures/test_pub_history/content_01_expected.py
|
from collections import OrderedDict
from elifetools.utils import date_struct
expected = [
OrderedDict([
('event_type', 'preprint-publication'),
('event_desc', 'This article was originally published as a <ext-link ext-link-type="uri" xlink:href="https://www.biorxiv.org/content/early/2017/03/24/118356">preprint on Biorxiv</ext-link>'),
('event_desc_html', 'This article was originally published as a <a href="https://www.biorxiv.org/content/early/2017/03/24/118356">preprint on Biorxiv</a>'),
('uri', 'https://www.biorxiv.org/content/early/2017/03/24/118356'),
('uri_text', 'preprint on Biorxiv'),
('day', '24'),
('month', '03'),
('year', '2017'),
(u'date', date_struct(2017, 3, 24)),
('iso-8601-date', '2017-03-24')
])
]
|
from collections import OrderedDict
from elifetools.utils import date_struct
expected = [
OrderedDict([
('event_type', 'preprint-publication'),
('event_desc', 'This article was originally published as a <ext-link ext-link-type="uri" xlink:href="https://www.biorxiv.org/content/early/2017/03/24/118356">preprint on Biorxiv</ext-link>'),
('event_desc_html', 'This article was originally published as a <a href="https://www.biorxiv.org/content/early/2017/03/24/118356">preprint on Biorxiv</a>'),
('uri', 'https://www.biorxiv.org/content/early/2017/03/24/118356'),
('uri_text', 'preprint on Biorxiv'),
('day', '24'),
('month', '03'),
('year', '2017'),
('date', date_struct(2017, 3, 24)),
('iso-8601-date', '2017-03-24')
])
]
|
Change the date key to non-unicode in the test fixture.
|
Change the date key to non-unicode in the test fixture.
|
Python
|
mit
|
elifesciences/elife-tools,elifesciences/elife-tools
|
---
+++
@@ -12,7 +12,7 @@
('day', '24'),
('month', '03'),
('year', '2017'),
- (u'date', date_struct(2017, 3, 24)),
+ ('date', date_struct(2017, 3, 24)),
('iso-8601-date', '2017-03-24')
])
]
|
56116d198ea9fea6d9753c412fff550876e17196
|
setup.py
|
setup.py
|
from distutils.core import setup
setup(
name='Yify',
version='0.1.0',
author='Batista Harahap',
author_email='batista@bango29.com',
packages=['yify'],
url='https://github.com/tistaharahap/yify-python',
license='LICENSE',
description='Yify provides a Python interface to interact with Yify Torrent\'s API.',
long_description=open('README').read()
)
|
from distutils.core import setup
setup(
name='Yify',
version='0.1.0',
author='Batista Harahap',
author_email='batista@bango29.com',
packages=['yify'],
url='https://github.com/tistaharahap/yify-python',
license='LICENSE',
description='Yify provides a Python interface to interact with Yify Torrent\'s API.',
long_description=open('README.md').read()
)
|
Update to read README.md for the long description
|
Update to read README.md for the long description
|
Python
|
mit
|
tistaharahap/yify-python
|
---
+++
@@ -9,5 +9,5 @@
url='https://github.com/tistaharahap/yify-python',
license='LICENSE',
description='Yify provides a Python interface to interact with Yify Torrent\'s API.',
- long_description=open('README').read()
+ long_description=open('README.md').read()
)
|
6ab12eb503bbe2eddc1eb419398082dffeb335d5
|
setup.py
|
setup.py
|
from setuptools import setup, find_packages
VERSION = (1, 4, 13)
# Dynamically calculate the version based on VERSION tuple
if len(VERSION)>2 and VERSION[2] is not None:
str_version = "%d.%d_%s" % VERSION[:3]
else:
str_version = "%d.%d" % VERSION[:2]
version= str_version
setup(
name = 'django-livesettings',
version = version,
description = "livesettings",
long_description = """Django-Livesettings is a project split from the Satchmo Project. It provides the ability to configure settings via an admin interface, rather than by editing "settings.py".""",
author = 'Bruce Kroeze',
author_email = 'bruce@ecomsmith.com',
url = 'http://bitbucket.org/bkroeze/django-livesettings/',
license = 'New BSD License',
platforms = ['any'],
classifiers = ['Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Framework :: Django'],
packages = find_packages(),
setup_requires=["setuptools_hg"],
include_package_data = True,
)
|
from setuptools import setup, find_packages
VERSION = (1, 6, 0)
# Dynamically calculate the version based on VERSION tuple
if len(VERSION)>2 and VERSION[2] is not None:
str_version = "%d.%d_%s" % VERSION[:3]
else:
str_version = "%d.%d" % VERSION[:2]
version= str_version
setup(
name = 'django-livesettings',
version = version,
description = "livesettings",
long_description = """Django-Livesettings is a project split from the Satchmo Project. It provides the ability to configure settings via an admin interface, rather than by editing "settings.py".""",
author = 'Bruce Kroeze',
author_email = 'bruce@ecomsmith.com',
url = 'http://bitbucket.org/bkroeze/django-livesettings/',
license = 'New BSD License',
platforms = ('any',),
classifiers = ['Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Framework :: Django'],
packages = find_packages(),
install_requires = ('django-keyedcache>=1.6-0',),
dependency_links = (
'https://github.com/pjrobertson/dj-keyedcache/archive/master.zip#egg=django-keyedcache-1.6-0',
),
setup_requires=('setuptools_hg',),
include_package_data = True,
)
|
Add django-keyedcache as a requirement
|
Add django-keyedcache as a requirement
|
Python
|
bsd-3-clause
|
pjrobertson/dj-livesettings
|
---
+++
@@ -1,6 +1,6 @@
from setuptools import setup, find_packages
-VERSION = (1, 4, 13)
+VERSION = (1, 6, 0)
# Dynamically calculate the version based on VERSION tuple
if len(VERSION)>2 and VERSION[2] is not None:
@@ -19,7 +19,7 @@
author_email = 'bruce@ecomsmith.com',
url = 'http://bitbucket.org/bkroeze/django-livesettings/',
license = 'New BSD License',
- platforms = ['any'],
+ platforms = ('any',),
classifiers = ['Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
@@ -28,6 +28,10 @@
'Programming Language :: Python',
'Framework :: Django'],
packages = find_packages(),
- setup_requires=["setuptools_hg"],
+ install_requires = ('django-keyedcache>=1.6-0',),
+ dependency_links = (
+ 'https://github.com/pjrobertson/dj-keyedcache/archive/master.zip#egg=django-keyedcache-1.6-0',
+ ),
+ setup_requires=('setuptools_hg',),
include_package_data = True,
)
|
a5f1e3d6120672b24f4ec4445b7d34003a739cdd
|
setup.py
|
setup.py
|
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
from setuptools import setup
PACKAGE_VERSION = '0.1'
deps = ['fxos-appgen>=0.2.10',
'marionette_client>=0.7.1.1',
'marionette_extension >= 0.4',
'mozdevice >= 0.33',
'mozlog == 1.8',
'moznetwork >= 0.24',
'mozprocess >= 0.18',
'wptserve >= 1.0.1',
'wptrunner >= 0.3.8']
setup(name='fxos-certsuite',
version=PACKAGE_VERSION,
description='Certification suite for FirefoxOS',
classifiers=[],
keywords='mozilla',
author='Mozilla Automation and Testing Team',
author_email='tools@lists.mozilla.org',
url='https://github.com/mozilla-b2g/fxos-certsuite',
license='MPL',
packages=['certsuite'],
include_package_data=True,
zip_safe=False,
install_requires=deps,
entry_points="""
# -*- Entry points: -*-
[console_scripts]
runcertsuite = certsuite:harness_main
cert = certsuite:certcli
""")
|
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
from setuptools import setup
PACKAGE_VERSION = '0.1'
deps = ['fxos-appgen>=0.2.10',
'marionette_client>=0.7.1.1',
'marionette_extension >= 0.4',
'mozdevice >= 0.33',
'mozlog >= 1.8',
'moznetwork >= 0.24',
'mozprocess >= 0.18',
'wptserve >= 1.0.1',
'wptrunner >= 0.3.8']
setup(name='fxos-certsuite',
version=PACKAGE_VERSION,
description='Certification suite for FirefoxOS',
classifiers=[],
keywords='mozilla',
author='Mozilla Automation and Testing Team',
author_email='tools@lists.mozilla.org',
url='https://github.com/mozilla-b2g/fxos-certsuite',
license='MPL',
packages=['certsuite'],
include_package_data=True,
zip_safe=False,
install_requires=deps,
entry_points="""
# -*- Entry points: -*-
[console_scripts]
runcertsuite = certsuite:harness_main
cert = certsuite:certcli
""")
|
Revert "Freeze mozlog version because of proposed API changes."
|
Revert "Freeze mozlog version because of proposed API changes."
This reverts commit 932b9d0b8e432ec1a7ff175c8716c8948b0beddd.
|
Python
|
mpl-2.0
|
oouyang/fxos-certsuite,ShakoHo/fxos-certsuite,mozilla-b2g/fxos-certsuite,oouyang/fxos-certsuite,Conjuror/fxos-certsuite,cr/fxos-certsuite,cr/fxos-certsuite,askeing/fxos-certsuite,cr/fxos-certsuite,mozilla-b2g/fxos-certsuite,oouyang/fxos-certsuite,askeing/fxos-certsuite,ShakoHo/fxos-certsuite,ypwalter/fxos-certsuite,ypwalter/fxos-certsuite,mozilla-b2g/fxos-certsuite,ypwalter/fxos-certsuite,cr/fxos-certsuite,cr/fxos-certsuite,ypwalter/fxos-certsuite,cr/fxos-certsuite,Conjuror/fxos-certsuite,cr/fxos-certsuite,ypwalter/fxos-certsuite,oouyang/fxos-certsuite,mozilla-b2g/fxos-certsuite,mozilla-b2g/fxos-certsuite,mozilla-b2g/fxos-certsuite,askeing/fxos-certsuite,Conjuror/fxos-certsuite,oouyang/fxos-certsuite,askeing/fxos-certsuite,ypwalter/fxos-certsuite,ShakoHo/fxos-certsuite,Conjuror/fxos-certsuite,ShakoHo/fxos-certsuite,ShakoHo/fxos-certsuite,ypwalter/fxos-certsuite,oouyang/fxos-certsuite,askeing/fxos-certsuite,Conjuror/fxos-certsuite,ShakoHo/fxos-certsuite,askeing/fxos-certsuite,Conjuror/fxos-certsuite,askeing/fxos-certsuite,oouyang/fxos-certsuite,mozilla-b2g/fxos-certsuite,ShakoHo/fxos-certsuite,Conjuror/fxos-certsuite
|
---
+++
@@ -9,7 +9,7 @@
'marionette_client>=0.7.1.1',
'marionette_extension >= 0.4',
'mozdevice >= 0.33',
- 'mozlog == 1.8',
+ 'mozlog >= 1.8',
'moznetwork >= 0.24',
'mozprocess >= 0.18',
'wptserve >= 1.0.1',
|
a5e6c7ab40d1f8dd7f8b3c68ad461ec6adaa0ed2
|
setup.py
|
setup.py
|
from distutils.core import setup
MESOS_VERSION = '0.20.0'
UBUNTU_VERSION = '14.04'
tests_require = ['pytest>=2.5.0,<2.6.0', 'pytest-cov>=1.6,<1.7',
'pytest-xdist>=1.9,<1.10', 'unittest2>=0.5.1,<0.6.0',
'mock>=1.0.1,<1.1.0', 'flask>=0.10.1,<0.11.0']
setup(name='changes-mesos-scheduler',
scripts=['scripts/changes-mesos-scheduler'],
packages=['changes_mesos_scheduler'],
extras_require={'tests': tests_require},
dependency_links = ['http://downloads.mesosphere.io/master/ubuntu/%s/mesos-%s-py2.7-linux-x86_64.egg#egg=mesos'
% (UBUNTU_VERSION, MESOS_VERSION)],
install_requires=['statsd', 'mesos'],
package_dir={'changes_mesos_scheduler': 'changes_mesos_scheduler'})
|
from distutils.core import setup
MESOS_VERSION = '0.27.0'
UBUNTU_VERSION = '14.04'
tests_require = ['pytest>=2.5.0,<2.6.0', 'pytest-cov>=1.6,<1.7',
'pytest-xdist>=1.9,<1.10', 'unittest2>=0.5.1,<0.6.0',
'mock>=1.0.1,<1.1.0', 'flask>=0.10.1,<0.11.0']
setup(name='changes-mesos-scheduler',
scripts=['scripts/changes-mesos-scheduler'],
packages=['changes_mesos_scheduler'],
extras_require={'tests': tests_require},
dependency_links = ['http://downloads.mesosphere.io/master/ubuntu/%s/mesos-%s-py2.7-linux-x86_64.egg#egg=mesos'
% (UBUNTU_VERSION, MESOS_VERSION)],
install_requires=['statsd', 'mesos'],
package_dir={'changes_mesos_scheduler': 'changes_mesos_scheduler'})
|
Update Mesos bindings to 0.27.0
|
Update Mesos bindings to 0.27.0
Summary: Won't take effect until we update the deb that's installed.
Test Plan: staging (I've been able to install the generated deb on the staging scheduler)
Reviewers: kylec, paulruan
Reviewed By: paulruan
Subscribers: changesbot
Differential Revision: https://tails.corp.dropbox.com/D173733
|
Python
|
apache-2.0
|
dropbox/changes-mesos-framework,dropbox/changes-mesos-framework
|
---
+++
@@ -1,6 +1,6 @@
from distutils.core import setup
-MESOS_VERSION = '0.20.0'
+MESOS_VERSION = '0.27.0'
UBUNTU_VERSION = '14.04'
tests_require = ['pytest>=2.5.0,<2.6.0', 'pytest-cov>=1.6,<1.7',
|
e318716fdaeda8fdabe06daf644178a43bc7400e
|
setup.py
|
setup.py
|
from setuptools import setup, find_packages
setup(name='nanomon',
version='1.0',
author='Michael Barrett',
author_email='mike@brkt.com',
description='The Nano Monitoring System',
packages=find_packages(),
)
|
from setuptools import setup, find_packages
setup(
name='nymms',
version='0.1.0',
author='Michael Barrett',
author_email='loki77@gmail.com',
license="New BSD license",
description='Not Your Mother\'s Monitoring System (NYMMS)',
packages=find_packages(),
)
|
Change package name, add license
|
Change package name, add license
|
Python
|
bsd-2-clause
|
cloudtools/nymms
|
---
+++
@@ -1,9 +1,11 @@
from setuptools import setup, find_packages
-setup(name='nanomon',
- version='1.0',
+setup(
+ name='nymms',
+ version='0.1.0',
author='Michael Barrett',
- author_email='mike@brkt.com',
- description='The Nano Monitoring System',
+ author_email='loki77@gmail.com',
+ license="New BSD license",
+ description='Not Your Mother\'s Monitoring System (NYMMS)',
packages=find_packages(),
)
|
4ced26323a1e98b9fea823c35ebbbb28b103369a
|
setup.py
|
setup.py
|
from setuptools import setup, find_packages
setup(
name='zeit.brightcove',
version='2.10.2.dev0',
author='gocept, Zeit Online',
author_email='zon-backend@zeit.de',
url='http://www.zeit.de/',
description='Brightcove HTTP interface',
packages=find_packages('src'),
package_dir={'': 'src'},
include_package_data=True,
zip_safe=False,
license='BSD',
namespace_packages=['zeit'],
install_requires=[
'gocept.runner>0.5.3',
'grokcore.component',
'grokcore.view',
'lxml',
'pytz',
'setuptools',
'zeit.addcentral',
'zeit.cms>=2.104.0.dev0',
'zeit.content.video>=2.7.4.dev0',
'zeit.solr>=2.2.0.dev0',
'zope.cachedescriptors',
'zope.component',
'zope.interface',
'zope.schema',
],
extras_require=dict(test=[
'zeit.content.author',
]),
entry_points="""
[console_scripts]
update-brightcove-repository=zeit.brightcove.update:_update_from_brightcove
brightcove-import-playlists=zeit.brightcove.update2:_import_playlists
"""
)
|
from setuptools import setup, find_packages
setup(
name='zeit.brightcove',
version='2.10.2.dev0',
author='gocept, Zeit Online',
author_email='zon-backend@zeit.de',
url='http://www.zeit.de/',
description='Brightcove HTTP interface',
packages=find_packages('src'),
package_dir={'': 'src'},
include_package_data=True,
zip_safe=False,
license='BSD',
namespace_packages=['zeit'],
install_requires=[
'gocept.runner>0.5.3',
'grokcore.component',
'grokcore.view',
'lxml',
'pytz',
'setuptools',
'zeit.addcentral',
'zeit.cms>=2.104.0.dev0',
'zeit.content.video>=2.7.4.dev0',
'zeit.solr>=2.2.0.dev0',
'zope.cachedescriptors',
'zope.component',
'zope.interface',
'zope.schema',
],
extras_require=dict(test=[
'zeit.content.author',
]),
entry_points="""
[console_scripts]
update-brightcove-repository=zeit.brightcove.update:_update_from_brightcove
brightcove-import-playlists=zeit.brightcove.update2:import_playlists
"""
)
|
Fix typo (belongs to commit:9ec5886)
|
ZON-4070: Fix typo (belongs to commit:9ec5886)
|
Python
|
bsd-3-clause
|
ZeitOnline/zeit.brightcove
|
---
+++
@@ -35,6 +35,6 @@
entry_points="""
[console_scripts]
update-brightcove-repository=zeit.brightcove.update:_update_from_brightcove
- brightcove-import-playlists=zeit.brightcove.update2:_import_playlists
+ brightcove-import-playlists=zeit.brightcove.update2:import_playlists
"""
)
|
77bda2ce09b8d397a25c4bff83268aa3d8ec187b
|
setup.py
|
setup.py
|
from setuptools import setup, find_packages
setup(
name="comt",
version="2.6.0",
url='http://www.co-ment.org',
license='AGPL3',
description="Web-based Text Annotation Application.",
long_description=open('ABOUT.rst').read(),
author='Abilian SAS',
author_email='dev@abilian.com',
packages=find_packages('src'),
package_dir={'': 'src'},
install_requires=['setuptools'],
zip_safe=False,
)
|
from setuptools import setup, find_packages
setup(
name="comt",
use_scm_version=True,
url='http://www.co-ment.org',
license='AGPL3',
description="Web-based Text Annotation Application.",
long_description=open('ABOUT.rst').read(),
author='Abilian SAS',
author_email='dev@abilian.com',
packages=find_packages('src'),
package_dir={'': 'src'},
install_requires=['setuptools'],
zip_safe=False,
)
|
Use git tag for package version.
|
Use git tag for package version.
|
Python
|
agpl-3.0
|
co-ment/comt,co-ment/comt,co-ment/comt,co-ment/comt,co-ment/comt,co-ment/comt,co-ment/comt
|
---
+++
@@ -2,7 +2,7 @@
setup(
name="comt",
- version="2.6.0",
+ use_scm_version=True,
url='http://www.co-ment.org',
license='AGPL3',
description="Web-based Text Annotation Application.",
|
dfb133beb576cddc67bbaf17c2598b7f37c76bad
|
setup.py
|
setup.py
|
from setuptools import setup, find_packages
if __name__ == "__main__":
import falafel
setup(
name=falafel.NAME,
version=falafel.VERSION,
description="Insights Application Programming Interface",
packages=find_packages(),
package_data={"": ["*.json", "RELEASE", "COMMIT"]},
install_requires=[
'pyyaml',
],
extras_require={
'develop': [
'flake8',
'coverage',
'numpydoc',
'pytest',
'pytest-cov',
'Sphinx',
'sphinx_rtd_theme',
'Jinja2'
],
'optional': [
'python-cjson'
],
'test': [
'flake8',
'coverage',
'pytest',
'pytest-cov'
]
},
entry_points={
'console_scripts': [
'insights-run = falafel.core:main',
'gen_api = falafel.tools.generate_api_config:main',
'compare_api = falafel.tools.compare_uploader_configs:main'
]
}
)
|
from setuptools import setup, find_packages
if __name__ == "__main__":
import falafel
setup(
name=falafel.NAME,
version=falafel.VERSION,
description="Insights Application Programming Interface",
packages=find_packages(),
package_data={"": ["*.json", "RELEASE", "COMMIT", "*.md"]},
install_requires=[
'pyyaml',
],
extras_require={
'develop': [
'flake8',
'coverage',
'numpydoc',
'pytest',
'pytest-cov',
'Sphinx',
'sphinx_rtd_theme',
'Jinja2'
],
'optional': [
'python-cjson'
],
'test': [
'flake8',
'coverage',
'pytest',
'pytest-cov'
]
},
entry_points={
'console_scripts': [
'insights-run = falafel.core:main',
'gen_api = falafel.tools.generate_api_config:main',
'compare_api = falafel.tools.compare_uploader_configs:main'
]
}
)
|
Add bodytemplate.md to build artifacts
|
Add bodytemplate.md to build artifacts
|
Python
|
apache-2.0
|
RedHatInsights/insights-core,RedHatInsights/insights-core
|
---
+++
@@ -8,7 +8,7 @@
version=falafel.VERSION,
description="Insights Application Programming Interface",
packages=find_packages(),
- package_data={"": ["*.json", "RELEASE", "COMMIT"]},
+ package_data={"": ["*.json", "RELEASE", "COMMIT", "*.md"]},
install_requires=[
'pyyaml',
],
|
5089c8a348f44a4741ab94be78f81ce0fbf34c65
|
setup.py
|
setup.py
|
import setuptools
import versioneer
if __name__ == "__main__":
setuptools.setup(
name='basis_set_exchange',
version=versioneer.get_version(),
cmdclass=versioneer.get_cmdclass(),
description='The Quantum Chemistry Basis Set Exchange',
author='The Molecular Sciences Software Institute',
author_email='bpp4@vt.edu',
url="https://github.com/MolSSI/basis_set_exchange",
license='BSD-3C',
packages=setuptools.find_packages(),
install_requires=[
'jsonschema',
],
extras_require={
'docs': [
'sphinx==1.2.3', # autodoc was broken in 1.3.1
'sphinxcontrib-napoleon',
'sphinx_rtd_theme',
'numpydoc',
],
'tests': [
'pytest',
'pytest-cov'
],
},
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Science/Research',
'Programming Language :: Python :: 3',
],
zip_safe=True,
)
|
import setuptools
import versioneer
if __name__ == "__main__":
setuptools.setup(
name='basis_set_exchange',
version=versioneer.get_version(),
cmdclass=versioneer.get_cmdclass(),
description='The Quantum Chemistry Basis Set Exchange',
author='The Molecular Sciences Software Institute',
author_email='bpp4@vt.edu',
url="https://github.com/MolSSI/basis_set_exchange",
license='BSD-3C',
packages=setuptools.find_packages(),
install_requires=[
'jsonschema',
],
extras_require={
'docs': [
'sphinx',
'sphinxcontrib-napoleon',
'sphinx_rtd_theme',
'numpydoc',
],
'tests': [
'pytest',
'pytest-cov'
],
},
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Science/Research',
'Programming Language :: Python :: 3',
],
zip_safe=True,
)
|
Remove version limit on sphinx
|
Remove version limit on sphinx
|
Python
|
bsd-3-clause
|
MOLSSI-BSE/basis_set_exchange
|
---
+++
@@ -17,7 +17,7 @@
],
extras_require={
'docs': [
- 'sphinx==1.2.3', # autodoc was broken in 1.3.1
+ 'sphinx',
'sphinxcontrib-napoleon',
'sphinx_rtd_theme',
'numpydoc',
|
9b01b00bf5fa60efaa402bf1e40154574942558e
|
setup.py
|
setup.py
|
from setuptools import setup
import mzgtfs
setup(
name='mzgtfs',
version=mzgtfs.__version__,
description='Mapzen GTFS',
author='Ian Rees',
author_email='ian@mapzen.com',
url='https://github.com/transitland/mapzen-gtfs',
license='License :: OSI Approved :: MIT License',
packages=['mzgtfs'],
install_requires=['unicodecsv', 'mzgeohash'],
zip_safe=False,
# Include examples.
package_data = {
'': ['*.txt', '*.md', '*.zip']
}
)
|
from setuptools import setup
import mzgtfs
setup(
name='mzgtfs',
version=mzgtfs.__version__,
description='Mapzen GTFS',
author='Ian Rees',
author_email='ian@mapzen.com',
url='https://github.com/transitland/mapzen-gtfs',
license='License :: OSI Approved :: MIT License',
packages=['mzgtfs'],
install_requires=['unicodecsv', 'mzgeohash', 'pytz'],
zip_safe=False,
# Include examples.
package_data = {
'': ['*.txt', '*.md', '*.zip']
}
)
|
Add pytz to required dependencies
|
Add pytz to required dependencies
|
Python
|
mit
|
brechtvdv/mapzen-gtfs,brennan-v-/mapzen-gtfs,transitland/mapzen-gtfs
|
---
+++
@@ -11,7 +11,7 @@
url='https://github.com/transitland/mapzen-gtfs',
license='License :: OSI Approved :: MIT License',
packages=['mzgtfs'],
- install_requires=['unicodecsv', 'mzgeohash'],
+ install_requires=['unicodecsv', 'mzgeohash', 'pytz'],
zip_safe=False,
# Include examples.
package_data = {
|
f4510b9b6402ddbe2412eb5524c7a44eb6bc966d
|
setup.py
|
setup.py
|
#!/usr/bin/env python
# coding: utf8
# Copyright 2014-2015 Vincent Jacques <vincent@vincent-jacques.net>
import contextlib
import os
import setuptools
import setuptools.command.test
version = "0.2.1"
setuptools.setup(
name="LowVoltage",
version=version,
description="Standalone DynamoDB client not hiding any feature",
author="Vincent Jacques",
author_email="vincent@vincent-jacques.net",
url="http://jacquev6.github.io/LowVoltage",
packages=sorted(dirpath.replace("/", ".") for dirpath, dirnames, filenames in os.walk("LowVoltage") if "__init__.py" in filenames),
classifiers=[
"Development Status :: 3 - Alpha",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.2",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Environment :: Web Environment",
],
test_suite="LowVoltage.tests" if "AWS_ACCESS_KEY_ID" in os.environ else "LowVoltage.tests.local",
test_loader="testresources:TestLoader",
use_2to3=True,
)
|
#!/usr/bin/env python
# coding: utf8
# Copyright 2014-2015 Vincent Jacques <vincent@vincent-jacques.net>
import contextlib
import os
import setuptools
import setuptools.command.test
version = "0.2.3"
setuptools.setup(
name="LowVoltage",
version=version,
description="Standalone DynamoDB client not hiding any feature",
author="Vincent Jacques",
author_email="vincent@vincent-jacques.net",
url="http://jacquev6.github.io/LowVoltage",
packages=sorted(dirpath.replace("/", ".") for dirpath, dirnames, filenames in os.walk("LowVoltage") if "__init__.py" in filenames),
classifiers=[
"Development Status :: 3 - Alpha",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.2",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Environment :: Web Environment",
],
test_suite="LowVoltage.tests" if "AWS_ACCESS_KEY_ID" in os.environ else "LowVoltage.tests.local",
test_loader="testresources:TestLoader",
use_2to3=True,
)
|
Fix version (0.2.2 never made it to PyPI)
|
Fix version (0.2.2 never made it to PyPI)
|
Python
|
mit
|
jacquev6/LowVoltage,jacquev6/LowVoltage
|
---
+++
@@ -9,7 +9,7 @@
import setuptools.command.test
-version = "0.2.1"
+version = "0.2.3"
setuptools.setup(
|
87b708002b80be80657c0eb1d7670fe40f1d992d
|
setup.py
|
setup.py
|
from setuptools import setup
REPO_URL = 'http://github.com/datasciencebr/serenata-toolbox'
setup(
author='Serenata de Amor',
author_email='op.serenatadeamor@gmail.com',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3.5',
'Topic :: Utilities',
],
description='Toolbox for Serenata de Amor project',
zip_safe=False,
install_requires=[
'aiofiles',
'aiohttp',
'boto3',
'beautifulsoup4>=4.4',
'lxml>=3.6',
'pandas>=0.18',
'tqdm'
],
keywords='serenata de amor, data science, brazil, corruption',
license='MIT',
long_description='Check `Serenata Toolbox at GitHub <{}>`_.'.format(REPO_URL),
name='serenata-toolbox',
packages=[
'serenata_toolbox.federal_senate',
'serenata_toolbox.chamber_of_deputies',
'serenata_toolbox.datasets'
],
url=REPO_URL,
version='12.3.0'
)
|
from setuptools import setup
REPO_URL = 'http://github.com/datasciencebr/serenata-toolbox'
setup(
author='Serenata de Amor',
author_email='op.serenatadeamor@gmail.com',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3.5',
'Topic :: Utilities',
],
description='Toolbox for Serenata de Amor project',
zip_safe=False,
install_requires=[
'aiofiles',
'aiohttp',
'boto3',
'beautifulsoup4>=4.4',
'lxml>=3.6',
'pandas>=0.18',
'tqdm'
],
keywords='serenata de amor, data science, brazil, corruption',
license='MIT',
long_description='Check `Serenata Toolbox at GitHub <{}>`_.'.format(REPO_URL),
name='serenata-toolbox',
packages=[
'serenata_toolbox.federal_senate',
'serenata_toolbox.chamber_of_deputies',
'serenata_toolbox.datasets'
],
url=REPO_URL,
version='12.3.1'
)
|
Change version from 12.3.0 to 12.3.1
|
Change version from 12.3.0 to 12.3.1
|
Python
|
mit
|
datasciencebr/serenata-toolbox
|
---
+++
@@ -34,5 +34,5 @@
'serenata_toolbox.datasets'
],
url=REPO_URL,
- version='12.3.0'
+ version='12.3.1'
)
|
63431113fbb1d1d6793761e3cc30a8492df2f580
|
setup.py
|
setup.py
|
#!/usr/bin/env python
"""How to release a new version: https://packaging.python.org/en/latest/distributing.html#uploading-your-project-to-pypi"""
from businesstime import __version__
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
setup(
name='businesstime',
version=__version__,
author='SeatGeek',
author_email='hi@seatgeek.com',
packages=['businesstime'],
url='http://github.com/seatgeek/businesstime',
license=open('LICENSE.txt').read(),
classifiers=[
'Programming Language :: Python :: 2.7',
],
description='A simple utility for calculating business time aware timedeltas between two datetimes',
long_description=open('README.rst').read() + '\n\n' +
open('CHANGES.rst').read(),
tests_require=['nose'],
test_suite='nose.collector'
)
|
#!/usr/bin/env python
"""How to release a new version: https://packaging.python.org/en/latest/distributing.html#uploading-your-project-to-pypi"""
from businesstime import __version__
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
setup(
name='businesstime',
version=__version__,
author='SeatGeek',
author_email='hi@seatgeek.com',
packages=[
'businesstime',
'businesstime.holidays',
],
url='http://github.com/seatgeek/businesstime',
license=open('LICENSE.txt').read(),
classifiers=[
'Programming Language :: Python :: 2.7',
],
description='A simple utility for calculating business time aware timedeltas between two datetimes',
long_description=open('README.rst').read() + '\n\n' +
open('CHANGES.rst').read(),
tests_require=['nose'],
test_suite='nose.collector'
)
|
Add holidays module to packages list
|
Add holidays module to packages list
Closes #14
|
Python
|
bsd-2-clause
|
seatgeek/businesstime
|
---
+++
@@ -14,7 +14,10 @@
version=__version__,
author='SeatGeek',
author_email='hi@seatgeek.com',
- packages=['businesstime'],
+ packages=[
+ 'businesstime',
+ 'businesstime.holidays',
+ ],
url='http://github.com/seatgeek/businesstime',
license=open('LICENSE.txt').read(),
classifiers=[
|
76dddab86552a1e65cdc8ed26bb0674c83d5697b
|
setup.py
|
setup.py
|
from distutils.core import setup
import pykka
setup(
name='Pykka',
version=pykka.get_version(),
author='Stein Magnus Jodal',
author_email='stein.magnus@jodal.no',
packages=['pykka'],
url='http://jodal.github.com/pykka/',
license='Apache License, Version 2.0',
description='Pykka is a concurrency abstraction which let you use ' +
'concurrent actors like regular objects',
long_description=open('README.rst').read(),
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Topic :: Software Development :: Libraries',
],
)
|
from distutils.core import setup
import pykka
setup(
name='Pykka',
version=pykka.get_version(),
author='Stein Magnus Jodal',
author_email='stein.magnus@jodal.no',
packages=['pykka'],
url='http://jodal.github.com/pykka/',
license='Apache License, Version 2.0',
description='Pykka is easy to use concurrency using the actor model',
long_description=open('README.rst').read(),
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Topic :: Software Development :: Libraries',
],
)
|
Update description and categories for PyPI
|
Update description and categories for PyPI
|
Python
|
apache-2.0
|
tempbottle/pykka,jodal/pykka,tamland/pykka
|
---
+++
@@ -10,8 +10,7 @@
packages=['pykka'],
url='http://jodal.github.com/pykka/',
license='Apache License, Version 2.0',
- description='Pykka is a concurrency abstraction which let you use ' +
- 'concurrent actors like regular objects',
+ description='Pykka is easy to use concurrency using the actor model',
long_description=open('README.rst').read(),
classifiers=[
'Development Status :: 3 - Alpha',
@@ -19,6 +18,7 @@
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
+ 'Programming Language :: Python :: 3',
'Topic :: Software Development :: Libraries',
],
)
|
e7d610ae806979eae0084c442bdba79c2bd919a2
|
setup.py
|
setup.py
|
#!/usr/bin/env python
import sys
from setuptools import setup, find_packages
import pyecore
if sys.version_info < (3, 0):
sys.exit('Sorry, Python < 3.0 is not supported')
setup(
name="pyecore",
version=pyecore.__version__,
description=("A Pythonic Implementation of the Eclipse Modeling "
"Framework"),
long_description=open('README.rst').read(),
keywords="model metamodel EMF Ecore",
url="https://github.com/aranega/pyecore",
author="Vincent Aranega",
author_email="vincent.aranega@gmail.com",
packages=find_packages(exclude=['examples']),
package_data={'': ['LICENSE',
'README.rst']},
include_package_data=True,
install_requires=['ordered-set', 'lxml'],
extras_require={'testing': ['pytest']},
license='BSD 3-Clause',
classifiers=[
"Development Status :: 2 - Pre-Alpha",
"Programming Language :: Python",
"Programming Language :: Python :: 3 :: Only",
"Operating System :: OS Independent",
"Intended Audience :: Developers",
"Topic :: Software Development",
"Topic :: Software Development :: Libraries",
"License :: OSI Approved :: BSD License",
]
)
|
#!/usr/bin/env python
import sys
from setuptools import setup, find_packages
import pyecore
if sys.version_info < (3, 0):
sys.exit('Sorry, Python < 3.0 is not supported')
setup(
name="pyecore",
version=pyecore.__version__,
description=("A Pythonic Implementation of the Eclipse Modeling "
"Framework"),
long_description=open('README.rst').read(),
keywords="model metamodel EMF Ecore",
url="https://github.com/aranega/pyecore",
author="Vincent Aranega",
author_email="vincent.aranega@gmail.com",
packages=find_packages(exclude=['examples']),
package_data={'': ['LICENSE',
'README.rst']},
include_package_data=True,
install_requires=['ordered-set', 'lxml'],
extras_require={'testing': ['pytest']},
license='BSD 3-Clause',
classifiers=[
"Development Status :: 4 - Beta",
"Programming Language :: Python",
"Programming Language :: Python :: 3 :: Only",
"Operating System :: OS Independent",
"Intended Audience :: Developers",
"Topic :: Software Development",
"Topic :: Software Development :: Libraries",
"License :: OSI Approved :: BSD License",
]
)
|
Change status from pre-alpha to beta (yeay \o/)
|
Change status from pre-alpha to beta (yeay \o/)
|
Python
|
bsd-3-clause
|
pyecore/pyecore,aranega/pyecore
|
---
+++
@@ -28,7 +28,7 @@
license='BSD 3-Clause',
classifiers=[
- "Development Status :: 2 - Pre-Alpha",
+ "Development Status :: 4 - Beta",
"Programming Language :: Python",
"Programming Language :: Python :: 3 :: Only",
"Operating System :: OS Independent",
|
d403ae5e41f53f6c2893a946b94c4de899127fba
|
setup.py
|
setup.py
|
from distutils.core import setup
setup(
name='udiskie',
version='0.3.10',
description='Removable disk automounter for udisks',
author='Byron Clark',
author_email='byron@theclarkfamily.name',
url='http://bitbucket.org/byronclark/udiskie',
license='MIT',
packages=[
'udiskie',
],
scripts=[
'bin/udiskie',
'bin/udiskie-umount',
],
)
|
from distutils.core import setup
setup(
name='udiskie',
version='0.3.11',
description='Removable disk automounter for udisks',
author='Byron Clark',
author_email='byron@theclarkfamily.name',
url='http://bitbucket.org/byronclark/udiskie',
license='MIT',
packages=[
'udiskie',
],
scripts=[
'bin/udiskie',
'bin/udiskie-umount',
],
)
|
Prepare for next development cycle
|
Prepare for next development cycle
|
Python
|
mit
|
pstray/udiskie,pstray/udiskie,coldfix/udiskie,coldfix/udiskie,mathstuf/udiskie,khardix/udiskie
|
---
+++
@@ -2,7 +2,7 @@
setup(
name='udiskie',
- version='0.3.10',
+ version='0.3.11',
description='Removable disk automounter for udisks',
author='Byron Clark',
author_email='byron@theclarkfamily.name',
|
68ca30a898306a86092f8477666ba20649ff6a08
|
setup.py
|
setup.py
|
#! /usr/bin/env python
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
setup(
name='OpenFisca-Senegal',
version='0.5.1',
author='OpenFisca Team',
author_email='contact@openfisca.fr',
classifiers=[
"Development Status :: 2 - Pre-Alpha",
"License :: OSI Approved :: GNU Affero General Public License v3",
"Operating System :: POSIX",
"Programming Language :: Python",
"Topic :: Scientific/Engineering :: Information Analysis",
],
description=u'Senegalese tax and benefit system for OpenFisca',
keywords='benefit microsimulation senegal social tax',
license='http://www.fsf.org/licensing/licenses/agpl-3.0.html',
url='https://github.com/openfisca/senegal',
data_files=[
('share/openfisca/openfisca-senegal',
['CHANGELOG.md', 'LICENSE', 'README.md']),
],
extras_require={
'notebook': [
'notebook',
'matplotlib',
'pandas',
],
'survey': [
'OpenFisca-Survey-Manager >= 0.8.2',
]
},
include_package_data = True, # Will read MANIFEST.in
install_requires=[
'OpenFisca-Core >= 20.0, < 21.0',
'PyYAML >= 3.10',
],
packages=find_packages(exclude=['openfisca_senegal.tests*']),
test_suite='nose.collector',
)
|
#! /usr/bin/env python
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
setup(
name='OpenFisca-Senegal',
version='0.5.1',
author='OpenFisca Team',
author_email='contact@openfisca.fr',
classifiers=[
"Development Status :: 2 - Pre-Alpha",
"License :: OSI Approved :: GNU Affero General Public License v3",
"Operating System :: POSIX",
"Programming Language :: Python",
"Topic :: Scientific/Engineering :: Information Analysis",
],
description=u'Senegalese tax and benefit system for OpenFisca',
keywords='benefit microsimulation senegal social tax',
license='http://www.fsf.org/licensing/licenses/agpl-3.0.html',
url='https://github.com/openfisca/senegal',
data_files=[
('share/openfisca/openfisca-senegal',
['CHANGELOG.md', 'LICENSE', 'README.md']),
],
extras_require={
'notebook': [
'notebook',
'matplotlib',
'pandas',
'xlrd',
],
'survey': [
'OpenFisca-Survey-Manager >= 0.8.2',
]
},
include_package_data = True, # Will read MANIFEST.in
install_requires=[
'OpenFisca-Core >= 20.0, < 21.0',
'PyYAML >= 3.10',
],
packages=find_packages(exclude=['openfisca_senegal.tests*']),
test_suite='nose.collector',
)
|
Add xlrd dependency for notebook
|
Add xlrd dependency for notebook
|
Python
|
agpl-3.0
|
openfisca/senegal
|
---
+++
@@ -31,6 +31,7 @@
'notebook',
'matplotlib',
'pandas',
+ 'xlrd',
],
'survey': [
'OpenFisca-Survey-Manager >= 0.8.2',
|
10eb6b8b29906c5c89c3675b660c21028db5b2c3
|
setup.py
|
setup.py
|
import setuptools
setuptools.setup(
name="nbresuse",
version='0.2.0',
url="https://github.com/yuvipanda/nbresuse",
author="Yuvi Panda",
description="Simple Jupyter extension to show how much resources (RAM) your notebook is using",
packages=setuptools.find_packages(),
install_requires=[
'psutil',
'notebook',
],
package_data={'nbresuse': ['static/*']},
data_files=[
('etc/jupyter/jupyter_notebook_config.d', ['nbresuse/etc/serverextension.json']),
('etc/jupyter/nbconfig/notebook.d', ['nbresuse/etc/nbextension.json'])
],
zip_safe=False,
include_package_data=True
)
|
from glob import glob
import setuptools
setuptools.setup(
name="nbresuse",
version='0.2.0',
url="https://github.com/yuvipanda/nbresuse",
author="Yuvi Panda",
description="Simple Jupyter extension to show how much resources (RAM) your notebook is using",
packages=setuptools.find_packages(),
install_requires=[
'psutil',
'notebook',
],
data_files=[
('share/jupyter/nbextensions/nbresuse', glob('nbresuse/static/*')),
('etc/jupyter/jupyter_notebook_config.d', ['nbresuse/etc/serverextension.json']),
('etc/jupyter/nbconfig/notebook.d', ['nbresuse/etc/nbextension.json'])
],
zip_safe=False,
include_package_data=True
)
|
Put nbresuse js files in appropriate path
|
Put nbresuse js files in appropriate path
How did this work before?
|
Python
|
bsd-2-clause
|
yuvipanda/nbresuse,yuvipanda/nbresuse
|
---
+++
@@ -1,3 +1,4 @@
+from glob import glob
import setuptools
setuptools.setup(
@@ -11,8 +12,8 @@
'psutil',
'notebook',
],
- package_data={'nbresuse': ['static/*']},
data_files=[
+ ('share/jupyter/nbextensions/nbresuse', glob('nbresuse/static/*')),
('etc/jupyter/jupyter_notebook_config.d', ['nbresuse/etc/serverextension.json']),
('etc/jupyter/nbconfig/notebook.d', ['nbresuse/etc/nbextension.json'])
],
|
c42bd2e3ae6f8c5c4170c3d5f2ae8ce176939f97
|
setup.py
|
setup.py
|
#!/usr/bin/env python
from codecs import open
from setuptools import find_packages, setup
with open('README.rst', 'r', 'utf-8') as f:
readme = f.read()
setup(
name='blanc-contentfiles',
version='0.2.4',
description='Blanc Content Files',
long_description=readme,
url='https://github.com/blancltd/blanc-contentfiles',
maintainer='Blanc Ltd',
maintainer_email='studio@blanc.ltd.uk',
platforms=['any'],
packages=find_packages(),
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
license='BSD',
)
|
#!/usr/bin/env python
from codecs import open
from setuptools import find_packages, setup
with open('README.rst', 'r', 'utf-8') as f:
readme = f.read()
setup(
name='blanc-contentfiles',
version='0.2.4',
description='Blanc Content Files',
long_description=readme,
url='https://github.com/developersociety/blanc-contentfiles',
maintainer='Blanc Ltd',
maintainer_email='studio@blanc.ltd.uk',
platforms=['any'],
packages=find_packages(),
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
license='BSD',
)
|
Update GitHub repos from blancltd to developersociety
|
Update GitHub repos from blancltd to developersociety
|
Python
|
bsd-3-clause
|
blancltd/blanc-contentfiles
|
---
+++
@@ -13,7 +13,7 @@
version='0.2.4',
description='Blanc Content Files',
long_description=readme,
- url='https://github.com/blancltd/blanc-contentfiles',
+ url='https://github.com/developersociety/blanc-contentfiles',
maintainer='Blanc Ltd',
maintainer_email='studio@blanc.ltd.uk',
platforms=['any'],
|
7dcd285bb9e1b48c70d39eb35d132277e4d8ee88
|
setup.py
|
setup.py
|
#!/usr/bin/env python
from __future__ import print_function
from setuptools import setup, find_packages
entry_points = """
[glue.plugins]
vispy_volume=glue_vispy_viewers.volume:setup
vispy_scatter=glue_vispy_viewers.scatter:setup
"""
# Add the following to the above entry points to enable the isosurface viewer
# vispy_isosurface=glue_vispy_viewers.isosurface:setup
try:
import pypandoc
LONG_DESCRIPTION = pypandoc.convert('README.md', 'rst')
except (IOError, ImportError):
with open('README.md') as infile:
LONG_DESCRIPTION = infile.read()
setup(name='glue-vispy-viewers',
version="0.2.dev0",
description='Vispy-based viewers for Glue',
long_description=LONG_DESCRIPTION,
url="https://github.com/glue-viz/glue-3d-viewer",
author='Penny Qian, Maxwell Tsai, and Thomas Robitaille',
author_email='glueviz@gmail.com',
packages = find_packages(),
package_data={'glue_vispy_viewers.volume': ['*.ui'],
'glue_vispy_viewers.common': ['*.ui'],
'glue_vispy_viewers.isosurface': ['*.ui'],
'glue_vispy_viewers.scatter': ['*.ui']},
entry_points=entry_points
)
|
#!/usr/bin/env python
from __future__ import print_function
from setuptools import setup, find_packages
entry_points = """
[glue.plugins]
vispy_volume=glue_vispy_viewers.volume:setup
vispy_scatter=glue_vispy_viewers.scatter:setup
vispy_isosurface=glue_vispy_viewers.isosurface:setup
"""
# Add the following to the above entry points to enable the isosurface viewer
# vispy_isosurface=glue_vispy_viewers.isosurface:setup
try:
import pypandoc
LONG_DESCRIPTION = pypandoc.convert('README.md', 'rst')
except (IOError, ImportError):
with open('README.md') as infile:
LONG_DESCRIPTION = infile.read()
setup(name='glue-vispy-viewers',
version="0.2.dev0",
description='Vispy-based viewers for Glue',
long_description=LONG_DESCRIPTION,
url="https://github.com/glue-viz/glue-3d-viewer",
author='Penny Qian, Maxwell Tsai, and Thomas Robitaille',
author_email='glueviz@gmail.com',
packages = find_packages(),
package_data={'glue_vispy_viewers.volume': ['*.ui'],
'glue_vispy_viewers.common': ['*.ui'],
'glue_vispy_viewers.isosurface': ['*.ui'],
'glue_vispy_viewers.scatter': ['*.ui']},
entry_points=entry_points
)
|
Enable Isosurface class by default
|
Enable Isosurface class by default
|
Python
|
bsd-2-clause
|
glue-viz/glue-3d-viewer,PennyQ/astro-vispy,PennyQ/glue-3d-viewer,glue-viz/glue-vispy-viewers,astrofrog/glue-vispy-viewers,astrofrog/glue-3d-viewer
|
---
+++
@@ -8,6 +8,7 @@
[glue.plugins]
vispy_volume=glue_vispy_viewers.volume:setup
vispy_scatter=glue_vispy_viewers.scatter:setup
+vispy_isosurface=glue_vispy_viewers.isosurface:setup
"""
# Add the following to the above entry points to enable the isosurface viewer
|
c1498aebcb7d74d023be65055f19a89acf0ec546
|
setup.py
|
setup.py
|
#!/usr/bin/env python
from distutils.core import setup
from distutils.file_util import copy_file
import platform
version = "0.1.0"
setup(name="riemann-sumd",
version=version,
description="Python agent for scheduling event generating processes and sending the results to Riemann",
author="Brian Hatfield",
author_email="bmhatfield@gmail.com",
url="https://github.com/bmhatfield/riemann-sumd",
package_dir={'': 'lib'},
py_modules=['event', 'loader', 'scheduler', 'sender', 'task'],
data_files=[('/etc/init/', ["init/ubuntu/sumd.conf"]),
('/etc/sumd', ['examples/etc/sumd/sumd.conf']),
('/etc/sumd/tasks.d', ['examples/etc/sumd/tasks.d/simple.task.example']),
('/etc/sumd/tags.d', ['examples/etc/sumd/tags.d/simple.tag.example'])],
scripts=["bin/sumd"]
)
copy_file('/lib/init/upstart-job', '/etc/init.d/sumd', link='sym')
|
#!/usr/bin/env python
from distutils.core import setup
from distutils.file_util import copy_file
import platform
version = "0.1.0"
setup(name="riemann-sumd",
version=version,
description="Python agent for scheduling event generating processes and sending the results to Riemann",
author="Brian Hatfield",
author_email="bmhatfield@gmail.com",
url="https://github.com/bmhatfield/riemann-sumd",
package_dir={'': 'lib'},
py_modules=['event', 'loader', 'scheduler', 'sender', 'task'],
data_files=[('/etc/init/', ["init/ubuntu/sumd.conf"]),
('/etc/sumd', ['examples/etc/sumd/sumd.conf']),
('/etc/sumd/tasks.d', ['examples/etc/sumd/tasks.d/simple.task.example']),
('/etc/sumd/tags.d', ['examples/etc/sumd/tags.d/simple.tag.example'])],
scripts=["bin/sumd"]
)
|
Remove copy_file call because it doesn't work right with bdist_deb
|
Remove copy_file call because it doesn't work right with bdist_deb
|
Python
|
mit
|
crashlytics/riemann-sumd
|
---
+++
@@ -19,5 +19,3 @@
('/etc/sumd/tags.d', ['examples/etc/sumd/tags.d/simple.tag.example'])],
scripts=["bin/sumd"]
)
-
-copy_file('/lib/init/upstart-job', '/etc/init.d/sumd', link='sym')
|
4d3bf81a46033fd2a522705612746ac332a2aa73
|
setup.py
|
setup.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from setuptools import setup
try:
from pypandoc import convert
long_description = convert('README.md', 'rst')
except IOError:
print("warning: README.md not found")
long_description = ""
except ImportError:
print("warning: pypandoc module not found, could not convert Markdown to RST")
long_description = ""
setup(
name="pybossa-pbs",
version="1.4",
author="Daniel LombraΓ±a GonzΓ‘lez",
author_email="info@pybossa.com",
description="PyBossa command line client",
long_description=long_description,
license="AGPLv3",
url="https://github.com/PyBossa/pbs",
classifiers = ['Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: GNU Affero General Public License v3 or later (AGPLv3+)',
'Operating System :: OS Independent',
'Programming Language :: Python',],
py_modules=['pbs', 'helpers'],
install_requires=['Click', 'pybossa-client', 'requests', 'nose', 'mock', 'coverage',
'rednose', 'pypandoc', 'simplejson', 'jsonschema'],
entry_points='''
[console_scripts]
pbs=pbs:cli
'''
)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from setuptools import setup
try:
from pypandoc import convert
long_description = convert('README.md', 'rst')
except IOError:
print("warning: README.md not found")
long_description = ""
except ImportError:
print("warning: pypandoc module not found, could not convert Markdown to RST")
long_description = ""
setup(
name="pybossa-pbs",
version="1.4",
author="Daniel LombraΓ±a GonzΓ‘lez",
author_email="info@pybossa.com",
description="PyBossa command line client",
long_description=long_description,
license="AGPLv3",
url="https://github.com/PyBossa/pbs",
classifiers = ['Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: GNU Affero General Public License v3 or later (AGPLv3+)',
'Operating System :: OS Independent',
'Programming Language :: Python',],
py_modules=['pbs', 'helpers'],
install_requires=['Click', 'pybossa-client', 'requests', 'nose', 'mock', 'coverage',
'rednose', 'pypandoc', 'simplejson', 'jsonschema', 'polib'],
entry_points='''
[console_scripts]
pbs=pbs:cli
'''
)
|
Add polib as a dependency.
|
Add polib as a dependency.
|
Python
|
agpl-3.0
|
PyBossa/pbs,PyBossa/pbs,PyBossa/pbs
|
---
+++
@@ -30,7 +30,7 @@
'Programming Language :: Python',],
py_modules=['pbs', 'helpers'],
install_requires=['Click', 'pybossa-client', 'requests', 'nose', 'mock', 'coverage',
- 'rednose', 'pypandoc', 'simplejson', 'jsonschema'],
+ 'rednose', 'pypandoc', 'simplejson', 'jsonschema', 'polib'],
entry_points='''
[console_scripts]
pbs=pbs:cli
|
fe870ecdfe63348607fa5d5e6c4101e2609c7553
|
setup.py
|
setup.py
|
# -*- coding: utf-8 -*-
"""
Adapted from:
https://packaging.python.org/tutorials/packaging-projects/
"""
import setuptools
from fsic import __version__
with open('README.md') as f:
long_description = f.read()
setuptools.setup(
name='fsic',
version=__version__,
author='Chris Thoung',
author_email='chris.thoung@gmail.com',
description='Tools for macroeconomic modelling in Python',
long_description=long_description,
long_description_content_type='text/markdown',
url='https://github.com/ChrisThoung/fsic',
py_modules=['fsic', 'fsictools', 'fsic_fortran'],
python_requires='>=3.6',
install_requires=[
'numpy',
],
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Environment :: Console',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3',
'Topic :: Scientific/Engineering',
],
platforms=['Any'],
)
|
# -*- coding: utf-8 -*-
"""
Adapted from:
https://packaging.python.org/tutorials/packaging-projects/
"""
import re
import setuptools
# Get version number without having to `import` the `fsic` module (and
# attempting to import NumPy before it gets installed). Idea from:
# https://packaging.python.org/guides/single-sourcing-package-version/
def get_version():
with open('fsic.py') as f:
for line in f:
if line.startswith('__version__'):
return re.split(r'''["']''', line)[1]
with open('README.md') as f:
long_description = f.read()
setuptools.setup(
name='fsic',
version=get_version(),
author='Chris Thoung',
author_email='chris.thoung@gmail.com',
description='Tools for macroeconomic modelling in Python',
long_description=long_description,
long_description_content_type='text/markdown',
url='https://github.com/ChrisThoung/fsic',
py_modules=['fsic', 'fsictools', 'fsic_fortran'],
python_requires='>=3.6',
install_requires=[
'numpy',
],
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Environment :: Console',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3',
'Topic :: Scientific/Engineering',
],
platforms=['Any'],
)
|
Implement alternative way of getting version number
|
BLD: Implement alternative way of getting version number
|
Python
|
mit
|
ChrisThoung/fsic
|
---
+++
@@ -4,8 +4,18 @@
https://packaging.python.org/tutorials/packaging-projects/
"""
+import re
import setuptools
-from fsic import __version__
+
+
+# Get version number without having to `import` the `fsic` module (and
+# attempting to import NumPy before it gets installed). Idea from:
+# https://packaging.python.org/guides/single-sourcing-package-version/
+def get_version():
+ with open('fsic.py') as f:
+ for line in f:
+ if line.startswith('__version__'):
+ return re.split(r'''["']''', line)[1]
with open('README.md') as f:
@@ -13,7 +23,7 @@
setuptools.setup(
name='fsic',
- version=__version__,
+ version=get_version(),
author='Chris Thoung',
author_email='chris.thoung@gmail.com',
description='Tools for macroeconomic modelling in Python',
|
7ab6e87a8cf89c12733f7923c0f4564672f549fc
|
setup.py
|
setup.py
|
from setuptools import setup, find_packages
setup(
name='k2catalogue',
version='0.0.1',
author='Simon Walker',
install_requires=['requests', 'sqlalchemy', 'ipython',],
tests_require=['vcrpy', 'pytest'],
packages=find_packages(exclude=['venv']),
entry_points={
'console_scripts': [
'k2cat-search = k2catalogue.cli:main',
],
}
)
|
from setuptools import setup, find_packages
setup(
name='k2catalogue',
version='0.0.1',
author='Simon Walker',
install_requires=['requests', 'sqlalchemy', 'ipython', 'vcrpy'],
tests_require=['vcrpy', 'pytest'],
packages=find_packages(exclude=['venv']),
entry_points={
'console_scripts': [
'k2cat-search = k2catalogue.cli:main',
],
}
)
|
Add vcrpy back to main program requirements
|
Add vcrpy back to main program requirements
|
Python
|
mit
|
mindriot101/k2catalogue
|
---
+++
@@ -4,7 +4,7 @@
name='k2catalogue',
version='0.0.1',
author='Simon Walker',
- install_requires=['requests', 'sqlalchemy', 'ipython',],
+ install_requires=['requests', 'sqlalchemy', 'ipython', 'vcrpy'],
tests_require=['vcrpy', 'pytest'],
packages=find_packages(exclude=['venv']),
entry_points={
|
7c0c5631ff9f2d3511b7c460d22516b5b0393697
|
setup.py
|
setup.py
|
#!/usr/bin/env python
import distutils.core
# Uploading to PyPI
# =================
# $ python setup.py register -r pypi
# $ python setup.py sdist upload -r pypi
version = '1.1'
distutils.core.setup(
name='linersock',
version=version,
author='Kale Kundert and Alex Mitchell',
packages=['linersock'],
url='https://github.com/kxgames/linersock',
download_url='https://github.com/kxgames/linersock/tarball/'+version,
license='LICENSE.txt',
description="A thin layer between you and your sockets that helps prevent chafing.",
long_description=open('README.rst').read(),
keywords=['nonblocking', 'socket', 'wrapper', 'library'])
|
#!/usr/bin/env python
import distutils.core
# Uploading to PyPI
# =================
# $ python setup.py register -r pypi
# $ python setup.py sdist upload -r pypi
version = '1.2'
distutils.core.setup(
name='linersock',
version=version,
author='Kale Kundert and Alex Mitchell',
url='https://github.com/kxgames/linersock',
download_url='https://github.com/kxgames/linersock/tarball/'+version,
license='LICENSE.txt',
description="A thin layer between you and your sockets that helps prevent chafing.",
long_description=open('README.rst').read(),
keywords=['nonblocking', 'socket', 'wrapper', 'library'],
packages=['linersock'],
install_requires=[
'six',
],
)
|
Add six as a dependency.
|
Add six as a dependency.
|
Python
|
mit
|
kalekundert/linersock,kalekundert/linersock
|
---
+++
@@ -7,15 +7,19 @@
# $ python setup.py register -r pypi
# $ python setup.py sdist upload -r pypi
-version = '1.1'
+version = '1.2'
distutils.core.setup(
name='linersock',
version=version,
author='Kale Kundert and Alex Mitchell',
- packages=['linersock'],
url='https://github.com/kxgames/linersock',
download_url='https://github.com/kxgames/linersock/tarball/'+version,
license='LICENSE.txt',
description="A thin layer between you and your sockets that helps prevent chafing.",
long_description=open('README.rst').read(),
- keywords=['nonblocking', 'socket', 'wrapper', 'library'])
+ keywords=['nonblocking', 'socket', 'wrapper', 'library'],
+ packages=['linersock'],
+ install_requires=[
+ 'six',
+ ],
+)
|
149c5fb9651f8e33a5e2984f47f8de05c02c1e43
|
setup.py
|
setup.py
|
import os
from setuptools import setup
with open(os.path.join(os.path.dirname(__file__), 'README.md')) as readme:
README = readme.read()
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name='django-smsish',
version='1.1',
packages=[
'smsish',
'smsish.sms',
],
include_package_data=True,
license='MIT', # example license
description='A simple Django app to send SMS messages using an API similar to that of django.core.mail.',
long_description=README,
url='https://github.com/RyanBalfanz/django-smsish',
author='Ryan Balfanz',
author_email='ryan@ryanbalfanz.net',
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Framework :: Django :: 1.8',
'Framework :: Django :: 1.9',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
'Topic :: Communications',
'Topic :: Communications :: Telephony',
],
)
|
import os
from setuptools import setup
with open(os.path.join(os.path.dirname(__file__), 'README.md')) as readme:
README = readme.read()
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name='django-smsish',
version='1.1',
packages=[
'smsish',
'smsish.sms',
],
include_package_data=True,
license='MIT', # example license
description='A simple Django app to send SMS messages using an API similar to that of django.core.mail.',
long_description=README,
url='https://github.com/RyanBalfanz/django-smsish',
author='Ryan Balfanz',
author_email='ryan@ryanbalfanz.net',
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Framework :: Django :: 1.8',
'Framework :: Django :: 1.9',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Topic :: Communications',
'Topic :: Communications :: Telephony',
],
)
|
Add 'Programming Language :: Python :: 3.4' to classifiers
|
Add 'Programming Language :: Python :: 3.4' to classifiers
https://travis-ci.org/RyanBalfanz/django-smsish/builds/105968596
|
Python
|
mit
|
RyanBalfanz/django-smsish
|
---
+++
@@ -31,6 +31,7 @@
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
+ 'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Topic :: Communications',
'Topic :: Communications :: Telephony',
|
30c80e5c793c1f59f2a22e10cf183de15ef463dd
|
setup.py
|
setup.py
|
# -*- coding: utf-8 -*-
from setuptools import setup
setup(
name='pynubank',
version='1.2.0',
url='https://github.com/andreroggeri/pynubank',
author='AndrΓ© Roggeri Campos',
author_email='a.roggeri.c@gmail.com',
license='MIT',
packages=['pynubank'],
package_data={'pynubank': ['queries/*.gql']},
install_requires=['requests', 'qrcode'],
)
|
from setuptools import setup, find_packages
setup(
name='pynubank',
version='2.0.0b',
url='https://github.com/andreroggeri/pynubank',
author='AndrΓ© Roggeri Campos',
author_email='a.roggeri.c@gmail.com',
license='MIT',
packages=find_packages(),
package_data={'pynubank': ['queries/*.gql']},
install_requires=['requests', 'qrcode', 'pyOpenSSL', 'colorama', 'requests-pkcs12'],
entry_points={
'console_scripts': [
'pynubank = pynubank.cli:main'
]
}
)
|
Add cert generator script as entry point
|
feat: Add cert generator script as entry point
|
Python
|
mit
|
andreroggeri/pynubank
|
---
+++
@@ -1,14 +1,18 @@
-# -*- coding: utf-8 -*-
-from setuptools import setup
+from setuptools import setup, find_packages
setup(
name='pynubank',
- version='1.2.0',
+ version='2.0.0b',
url='https://github.com/andreroggeri/pynubank',
author='AndrΓ© Roggeri Campos',
author_email='a.roggeri.c@gmail.com',
license='MIT',
- packages=['pynubank'],
+ packages=find_packages(),
package_data={'pynubank': ['queries/*.gql']},
- install_requires=['requests', 'qrcode'],
+ install_requires=['requests', 'qrcode', 'pyOpenSSL', 'colorama', 'requests-pkcs12'],
+ entry_points={
+ 'console_scripts': [
+ 'pynubank = pynubank.cli:main'
+ ]
+ }
)
|
eb151779dc462e29001c75ed0e05b1c395a27968
|
setup.py
|
setup.py
|
from setuptools import setup, PEP420PackageFinder
setup(
name='tangled.web',
version='1.0a13.dev0',
description='RESTful Web Framework',
long_description=open('README.rst').read(),
url='http://tangledframework.org/',
download_url='https://github.com/TangledWeb/tangled.web/tags',
author='Wyatt Baldwin',
author_email='self@wyattbaldwin.com',
packages=PEP420PackageFinder.find(include=['tangled*']),
include_package_data=True,
install_requires=[
'tangled>=1.0a11',
'MarkupSafe>=0.23',
'WebOb>=1.5.1',
],
extras_require={
'dev': [
'tangled[dev]>=1.0a11',
],
},
entry_points="""
[tangled.scripts]
serve = tangled.web.scripts.serve
shell = tangled.web.scripts.shell
show = tangled.web.scripts.show
[tangled.scaffolds]
basic = tangled.web.scaffolds:basic
""",
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
)
|
from setuptools import setup, PEP420PackageFinder
setup(
name='tangled.web',
version='1.0a13.dev0',
description='RESTful Web Framework',
long_description=open('README.rst').read(),
url='http://tangledframework.org/',
download_url='https://github.com/TangledWeb/tangled.web/tags',
author='Wyatt Baldwin',
author_email='self@wyattbaldwin.com',
packages=PEP420PackageFinder.find(include=['tangled*']),
include_package_data=True,
install_requires=[
'tangled>=1.0a12',
'MarkupSafe>=0.23',
'WebOb>=1.5.1',
],
extras_require={
'dev': [
'tangled[dev]>=1.0a12',
],
},
entry_points="""
[tangled.scripts]
serve = tangled.web.scripts.serve
shell = tangled.web.scripts.shell
show = tangled.web.scripts.show
[tangled.scaffolds]
basic = tangled.web.scaffolds:basic
""",
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
)
|
Upgrade tangled 1.0a11 => 1.0a12
|
Upgrade tangled 1.0a11 => 1.0a12
|
Python
|
mit
|
TangledWeb/tangled.web
|
---
+++
@@ -13,13 +13,13 @@
packages=PEP420PackageFinder.find(include=['tangled*']),
include_package_data=True,
install_requires=[
- 'tangled>=1.0a11',
+ 'tangled>=1.0a12',
'MarkupSafe>=0.23',
'WebOb>=1.5.1',
],
extras_require={
'dev': [
- 'tangled[dev]>=1.0a11',
+ 'tangled[dev]>=1.0a12',
],
},
entry_points="""
|
4a36283ac196f5c5832d32ba656aca7c651183f4
|
setup.py
|
setup.py
|
from setuptools import setup
setup(
name='daffodil',
version='0.3.0',
author='James Robert',
description='A Super-simple DSL for filtering datasets',
license='MIT',
keywords='data filtering',
url='https://github.com/mediapredict/daffodil',
packages=['daffodil'],
install_requires=[
"parsimonious",
],
long_description='A Super-simple DSL for filtering datasets',
classifiers=[
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'Topic :: Utilities'
]
)
|
from setuptools import setup
setup(
name='daffodil',
version='0.3.1',
author='James Robert',
description='A Super-simple DSL for filtering datasets',
license='MIT',
keywords='data filtering',
url='https://github.com/mediapredict/daffodil',
packages=['daffodil'],
install_requires=[
"parsimonious",
],
long_description='A Super-simple DSL for filtering datasets',
classifiers=[
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'Topic :: Utilities'
]
)
|
Increment version with bug fixes
|
Increment version with bug fixes
|
Python
|
mit
|
igorkramaric/daffodil,mediapredict/daffodil
|
---
+++
@@ -2,7 +2,7 @@
setup(
name='daffodil',
- version='0.3.0',
+ version='0.3.1',
author='James Robert',
description='A Super-simple DSL for filtering datasets',
license='MIT',
|
36f5fc790e27b9d617f71a3fe49160cb99d2e5d3
|
setup.py
|
setup.py
|
# -*- coding: utf-8 -*-
import os
from setuptools import setup, find_packages
README_FILE = os.path.join(os.path.dirname(__file__), 'README.rst')
setup(
name='tomako',
version='dev',
description='Tomako is the easiest way to use Mako as a template engine '
'for Tornado',
long_description=open(README_FILE).read(),
keywords=['tomako', 'mako', 'tornado'],
author='Rodrigo Machado',
author_email='rcmachado@gmail.com',
url='https://github.com/rcmachado/tomako',
license='MIT',
packages=find_packages(),
package_dir={'tomako': 'tomako'},
install_requires=[
"tornado>=2.3",
"Mako>=0.7.2",
],
include_package_data=True
)
|
# -*- coding: utf-8 -*-
import os
from setuptools import setup, find_packages
README_FILE = os.path.join(os.path.dirname(__file__), 'README.rst')
setup(
name='tomako',
version='0.1.0',
description='Tomako is the easiest way to use Mako as a template engine '
'for Tornado',
long_description=open(README_FILE).read(),
keywords=['tomako', 'mako', 'tornado'],
author='Rodrigo Machado',
author_email='rcmachado@gmail.com',
url='https://github.com/rcmachado/tomako',
license='MIT',
packages=find_packages(),
package_dir={'tomako': 'tomako'},
install_requires=[
"tornado>=2.3",
"Mako>=0.7.2",
],
include_package_data=True
)
|
Change version from dev to 0.1.0
|
Change version from dev to 0.1.0
|
Python
|
mit
|
rcmachado/tomako,rcmachado/tomako
|
---
+++
@@ -8,7 +8,7 @@
setup(
name='tomako',
- version='dev',
+ version='0.1.0',
description='Tomako is the easiest way to use Mako as a template engine '
'for Tornado',
long_description=open(README_FILE).read(),
|
ab5a93e55f9e6e1607afcb3ba03d2103dc423cc7
|
setup.py
|
setup.py
|
#!/usr/bin/env python
"""
properties: Fancy properties for Python.
"""
from distutils.core import setup
from setuptools import find_packages
CLASSIFIERS = [
'Development Status :: 4 - Beta',
'Programming Language :: Python',
'Topic :: Scientific/Engineering',
'Topic :: Scientific/Engineering :: Mathematics',
'Topic :: Scientific/Engineering :: Physics',
'Operating System :: Microsoft :: Windows',
'Operating System :: POSIX',
'Operating System :: Unix',
'Operating System :: MacOS',
'Natural Language :: English',
]
with open("README.rst") as f:
LONG_DESCRIPTION = ''.join(f.readlines())
setup(
name="properties",
version="0.2.2",
packages=find_packages(exclude=('tests',)),
install_requires=[
'future',
'numpy>=1.7',
'six',
'vectormath',
],
author="3point Science",
author_email="info@3ptscience.com",
description="properties",
long_description=LONG_DESCRIPTION,
keywords="property",
url="http://steno3d.com/",
download_url="http://github.com/3ptscience/properties",
classifiers=CLASSIFIERS,
platforms=["Windows", "Linux", "Solaris", "Mac OS-X", "Unix"],
use_2to3=False,
)
|
#!/usr/bin/env python
"""
properties: Fancy properties for Python.
"""
from distutils.core import setup
from setuptools import find_packages
CLASSIFIERS = [
'Development Status :: 4 - Beta',
'Programming Language :: Python',
'Topic :: Scientific/Engineering',
'Topic :: Scientific/Engineering :: Mathematics',
'Topic :: Scientific/Engineering :: Physics',
'Operating System :: Microsoft :: Windows',
'Operating System :: POSIX',
'Operating System :: Unix',
'Operating System :: MacOS',
'Natural Language :: English',
]
with open("README.rst") as f:
LONG_DESCRIPTION = ''.join(f.readlines())
setup(
name="properties",
version="0.2.2",
packages=find_packages(exclude=('tests',)),
install_requires=[
'future',
'numpy>=1.7',
'six',
'vectormath>=0.1.0',
],
author="3point Science",
author_email="info@3ptscience.com",
description="properties",
long_description=LONG_DESCRIPTION,
keywords="property",
url="http://steno3d.com/",
download_url="http://github.com/3ptscience/properties",
classifiers=CLASSIFIERS,
platforms=["Windows", "Linux", "Solaris", "Mac OS-X", "Unix"],
use_2to3=False,
)
|
Update the required vectormath version to 0.1.0
|
Update the required vectormath version to 0.1.0
|
Python
|
mit
|
3ptscience/properties,aranzgeo/properties
|
---
+++
@@ -30,7 +30,7 @@
'future',
'numpy>=1.7',
'six',
- 'vectormath',
+ 'vectormath>=0.1.0',
],
author="3point Science",
author_email="info@3ptscience.com",
|
ad53b2e3f52382e9656a00b5c3641fa9d3e47bb1
|
setup.py
|
setup.py
|
#!/usr/bin/env python2
from distutils.core import setup
from hipshot import hipshot
_classifiers = [
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: End Users/Desktop',
'License :: OSI Approved :: ISC License (ISCL)',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Topic :: Multimedia :: Graphics',
'Topic :: Multimedia :: Video',
]
with open('README.rst', 'r') as file:
_long_description = file.read()
_setup_args = {
'author': hipshot.__author__,
'author_email': hipshot.__email__,
'classifiers': _classifiers,
'description': hipshot.__doc__,
'license': hipshot.__license__,
'long_description': _long_description,
'name': 'Hipshot',
'url': 'https://bitbucket.org/eliteraspberries/hipshot',
'version': hipshot.__version__,
}
if __name__ == '__main__':
setup(packages=['hipshot'], scripts=['scripts/hipshot'],
**_setup_args)
|
#!/usr/bin/env python2
from distutils.core import setup
from hipshot import hipshot
_classifiers = [
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: End Users/Desktop',
'License :: OSI Approved :: ISC License (ISCL)',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Topic :: Multimedia :: Graphics',
'Topic :: Multimedia :: Video',
]
with open('README.rst', 'r') as file:
_long_description = file.read()
_setup_args = {
'author': hipshot.__author__,
'author_email': hipshot.__email__,
'classifiers': _classifiers,
'description': hipshot.__doc__,
'license': hipshot.__license__,
'long_description': _long_description,
'name': 'Hipshot',
'url': 'https://bitbucket.org/eliteraspberries/hipshot',
'version': hipshot.__version__,
}
if __name__ == '__main__':
setup(packages=['avena', 'hipshot'], scripts=['scripts/hipshot'],
**_setup_args)
|
Install the Avena library too.
|
Install the Avena library too.
|
Python
|
isc
|
eliteraspberries/hipshot
|
---
+++
@@ -34,5 +34,5 @@
if __name__ == '__main__':
- setup(packages=['hipshot'], scripts=['scripts/hipshot'],
+ setup(packages=['avena', 'hipshot'], scripts=['scripts/hipshot'],
**_setup_args)
|
f74ccf59547d8b7dbee5cbcbf608940a33a8c3f9
|
setup.py
|
setup.py
|
from setuptools import setup
from flask_mwoauth import __version__
setup(name='flask-mwoauth',
version=__version__,
description='Flask blueprint to connect to a MediaWiki OAuth server',
url='http://github.com/valhallasw/flask-mwoauth',
author='Merlijn van Deen',
author_email='valhallasw@arctus.nl',
license='MIT',
packages=['flask_mwoauth'],
install_requires=['flask-oauth'],
zip_safe=True,
classifiers=[
"Development Status :: 3 - Alpha",
"Environment :: Web Environment",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python :: 2",
"Topic :: Internet :: WWW/HTTP :: Dynamic Content",
"Topic :: Software Development :: Libraries :: Python Modules",
]
)
|
from setuptools import setup
exec([l for l in open("flask_mwoauth/__init__.py") if l.startswith('__version__')][0])
setup(name='flask-mwoauth',
version=__version__,
description='Flask blueprint to connect to a MediaWiki OAuth server',
url='http://github.com/valhallasw/flask-mwoauth',
author='Merlijn van Deen',
author_email='valhallasw@arctus.nl',
license='MIT',
packages=['flask_mwoauth'],
install_requires=['flask-oauth'],
zip_safe=True,
classifiers=[
"Development Status :: 3 - Alpha",
"Environment :: Web Environment",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python :: 2",
"Topic :: Internet :: WWW/HTTP :: Dynamic Content",
"Topic :: Software Development :: Libraries :: Python Modules",
]
)
|
Use exec instead of import to determine package version
|
Use exec instead of import to determine package version
|
Python
|
mit
|
sitic/flask-mwoauth,valhallasw/flask-mwoauth,sitic/flask-mwoauth,valhallasw/flask-mwoauth
|
---
+++
@@ -1,6 +1,6 @@
from setuptools import setup
-from flask_mwoauth import __version__
+exec([l for l in open("flask_mwoauth/__init__.py") if l.startswith('__version__')][0])
setup(name='flask-mwoauth',
version=__version__,
|
571298802f889a1eb4c397ae9bfa4fea0a2a558c
|
prints_a_multiplication_table_of_primes_numbers/fibonacci_generator.py
|
prints_a_multiplication_table_of_primes_numbers/fibonacci_generator.py
|
# -*- coding: utf-8 -*-
__all__ = ["FibonacciGenerator"]
class FibonacciGenerator(object):
"""
"""
# We use List to cache already founded fibonacci numbers,
# and 1 is the first fibonacci number.
_fibonacci_list = [0, 1] # 0 is placeholder
skip_the_placeholder_idx = 1
def generate(self, n):
with_placehoder_len = n + 2
while len(self._fibonacci_list) < with_placehoder_len:
self._fibonacci_list.append(self.find_next_fibonacci())
return self._fibonacci_list[self.skip_the_placeholder_idx:n+1]
def find_next_fibonacci(self):
"""
find the next fibonacci after the last number of `self._fibonacci_list`
"""
assert len(self._fibonacci_list[-2:]) == 2, self._fibonacci_list[-2:]
last2_num, last1_num = self._fibonacci_list[-2:]
return last2_num + last1_num
|
# -*- coding: utf-8 -*-
__all__ = ["FibonacciGenerator"]
class FibonacciGenerator(object):
"""
"""
# We use List to cache already founded fibonacci numbers,
# and 1 is the first fibonacci number.
_fibonacci_list = [0, 1] # 0 is placeholder
skip_the_placeholder_idx = 1
def generate(self, n):
with_placehoder_len = n + 2
while len(self._fibonacci_list) < with_placehoder_len:
self._fibonacci_list.append(self.find_next_fibonacci())
return self._fibonacci_list[self.skip_the_placeholder_idx: n + 1]
def find_next_fibonacci(self):
"""
find the next fibonacci after the last number of `self._fibonacci_list`
"""
assert len(self._fibonacci_list[-2:]) == 2, self._fibonacci_list[-2:]
last2_num, last1_num = self._fibonacci_list[-2:]
return last2_num + last1_num
|
Fix E226 missing whitespace around arithmetic operator
|
Fix E226 missing whitespace around arithmetic operator
|
Python
|
mit
|
mvj3/prints_a_multiplication_table_of_primes_numbers
|
---
+++
@@ -17,7 +17,7 @@
while len(self._fibonacci_list) < with_placehoder_len:
self._fibonacci_list.append(self.find_next_fibonacci())
- return self._fibonacci_list[self.skip_the_placeholder_idx:n+1]
+ return self._fibonacci_list[self.skip_the_placeholder_idx: n + 1]
def find_next_fibonacci(self):
"""
|
54ba0d0f7ead8ce49f50d3ecfa6b0a86b227bea7
|
app.py
|
app.py
|
"""This is your typical app, demonstrating usage."""
import os
from flask_jsondash.charts_builder import charts
from flask import (
Flask,
session,
)
app = Flask(__name__)
app.config['SECRET_KEY'] = 'NOTSECURELOL'
app.config.update(
JSONDASH_FILTERUSERS=True,
JSONDASH_GLOBALDASH=True,
JSONDASH_GLOBAL_USER='global',
)
app.debug = True
app.register_blueprint(charts)
def _can_delete():
return False
def _can_clone():
return True
def _get_username():
return 'anonymous'
# Config examples.
app.config['JSONDASH'] = dict(
metadata=dict(
created_by=_get_username,
username=_get_username,
),
auth=dict(
clone=_can_clone,
delete=_can_delete,
)
)
@app.route('/', methods=['GET'])
def index():
"""Sample index."""
return '<a href="/charts">Visit the charts blueprint.</a>'
if __name__ == '__main__':
PORT = int(os.getenv('PORT', 5002))
app.run(debug=True, port=PORT)
|
"""This is your typical app, demonstrating usage."""
import os
from flask_jsondash.charts_builder import charts
from flask import (
Flask,
session,
)
app = Flask(__name__)
app.config['SECRET_KEY'] = 'NOTSECURELOL'
app.config.update(
JSONDASH_FILTERUSERS=True,
JSONDASH_GLOBALDASH=True,
JSONDASH_GLOBAL_USER='global',
)
app.debug = True
app.register_blueprint(charts)
def _can_delete():
return True
def _can_clone():
return True
def _get_username():
return 'anonymous'
# Config examples.
app.config['JSONDASH'] = dict(
metadata=dict(
created_by=_get_username,
username=_get_username,
),
auth=dict(
clone=_can_clone,
delete=_can_delete,
)
)
@app.route('/', methods=['GET'])
def index():
"""Sample index."""
return '<a href="/charts">Visit the charts blueprint.</a>'
if __name__ == '__main__':
PORT = int(os.getenv('PORT', 5002))
app.run(debug=True, port=PORT)
|
Make example auth always true.
|
Make example auth always true.
|
Python
|
mit
|
christabor/flask_jsondash,christabor/flask_jsondash,christabor/flask_jsondash
|
---
+++
@@ -21,7 +21,7 @@
def _can_delete():
- return False
+ return True
def _can_clone():
|
e9bb349469b27ae9cd3280cf14aad27101b2f0fa
|
app.py
|
app.py
|
from flask import Flask
#Β from image_classification import ImageClassifier
app = Flask(__name__)
@app.route('/')
def home():
return 'Hello classification world!'
if __name__ == '__main__':
app.run()
|
from flask import Flask
#Β from image_classification import ImageClassifier
app = Flask(__name__)
@app.route('/')
def home():
return 'Hello classification world!'
if __name__ == '__main__':
app.run(debug=True, port=33507)
|
Add port as conf var
|
Add port as conf var
|
Python
|
mit
|
yassineAlouini/image-recognition-as-a-service,yassineAlouini/image-recognition-as-a-service
|
---
+++
@@ -11,4 +11,4 @@
if __name__ == '__main__':
- app.run()
+ app.run(debug=True, port=33507)
|
e2ea8d08d5e6006136b74d00a2ee64d1e7858941
|
plot_scores.py
|
plot_scores.py
|
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
from __future__ import absolute_import
from future import standard_library
standard_library.install_aliases()
import argparse
import matplotlib.pyplot as plt
import pandas as pd
def main():
parser = argparse.ArgumentParser()
parser.add_argument('scores', type=str, help='specify path of scores.txt')
parser.add_argument('--title', type=str, default=None)
args = parser.parse_args()
scores = pd.read_csv(args.scores, delimiter='\t')
for col in ['mean', 'median']:
plt.plot(scores['steps'], scores[col], label=col)
if args.title is not None:
plt.title(args.title)
plt.xlabel('steps')
plt.ylabel('score')
plt.legend(loc='best')
fig_fname = args.scores + '.png'
plt.savefig(fig_fname)
print('Saved a figure as {}'.format(fig_fname))
if __name__ == '__main__':
main()
|
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
from __future__ import absolute_import
from future import standard_library
standard_library.install_aliases()
import argparse
import os
import matplotlib.pyplot as plt
import pandas as pd
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--title', type=str, default='')
parser.add_argument('--file', action='append', dest='files',
default=[], type=str,
help='specify paths of scores.txt')
parser.add_argument('--label', action='append', dest='labels',
default=[], type=str,
help='specify labels for scores.txt files')
args = parser.parse_args()
assert len(args.files) > 0
assert len(args.labels) == len(args.files)
for fpath, label in zip(args.files, args.labels):
if os.path.isdir(fpath):
fpath = os.path.join(fpath, 'scores.txt')
assert os.path.exists(fpath)
scores = pd.read_csv(fpath, delimiter='\t')
plt.plot(scores['steps'], scores['mean'], label=label)
plt.xlabel('steps')
plt.ylabel('score')
plt.legend(loc='best')
if args.title:
plt.title(args.title)
fig_fname = args.files[0] + args.title + '.png'
plt.savefig(fig_fname)
print('Saved a figure as {}'.format(fig_fname))
if __name__ == '__main__':
main()
|
Support plotting multiple score files
|
Support plotting multiple score files
|
Python
|
mit
|
toslunar/chainerrl,toslunar/chainerrl
|
---
+++
@@ -5,6 +5,7 @@
from future import standard_library
standard_library.install_aliases()
import argparse
+import os
import matplotlib.pyplot as plt
import pandas as pd
@@ -12,19 +13,32 @@
def main():
parser = argparse.ArgumentParser()
- parser.add_argument('scores', type=str, help='specify path of scores.txt')
- parser.add_argument('--title', type=str, default=None)
+ parser.add_argument('--title', type=str, default='')
+ parser.add_argument('--file', action='append', dest='files',
+ default=[], type=str,
+ help='specify paths of scores.txt')
+ parser.add_argument('--label', action='append', dest='labels',
+ default=[], type=str,
+ help='specify labels for scores.txt files')
args = parser.parse_args()
- scores = pd.read_csv(args.scores, delimiter='\t')
- for col in ['mean', 'median']:
- plt.plot(scores['steps'], scores[col], label=col)
- if args.title is not None:
- plt.title(args.title)
+ assert len(args.files) > 0
+ assert len(args.labels) == len(args.files)
+
+ for fpath, label in zip(args.files, args.labels):
+ if os.path.isdir(fpath):
+ fpath = os.path.join(fpath, 'scores.txt')
+ assert os.path.exists(fpath)
+ scores = pd.read_csv(fpath, delimiter='\t')
+ plt.plot(scores['steps'], scores['mean'], label=label)
+
plt.xlabel('steps')
plt.ylabel('score')
plt.legend(loc='best')
- fig_fname = args.scores + '.png'
+ if args.title:
+ plt.title(args.title)
+
+ fig_fname = args.files[0] + args.title + '.png'
plt.savefig(fig_fname)
print('Saved a figure as {}'.format(fig_fname))
|
1e1c9715be91c2b723a26e037d2fa68064bd3bf6
|
run.py
|
run.py
|
import serial
import threading
print('Starting server...')
temperature_usb = '/dev/ttyAMA0'
BAUD_RATE = 9600
temperature_ser = serial.Serial(temperature_usb, BAUD_RATE)
def process_line(line):
print('Need to process line: {}'.format(line))
def temperature_loop():
line = ""
while True:
data = temperature_ser.read()
if(data == "\r"):
process_line(line)
line = ""
else:
line = line + data
temperature_thread = threading.Thread(target=temperature_loop)
temperature_thread.start()
|
import serial
import threading
print('Starting server...')
temperature_usb = '/dev/ttyAMA0'
BAUD_RATE = 38400
temperature_ser = serial.Serial(temperature_usb, BAUD_RATE)
def process_line(line):
print('Need to process line: {}'.format(line))
def temperature_loop():
line = ""
while True:
data = temperature_ser.read()
if(data == "\r"):
process_line(line)
line = ""
else:
line = line + data
temperature_thread = threading.Thread(target=temperature_loop)
temperature_thread.start()
|
Fix baud rate for temperature sensor
|
Fix baud rate for temperature sensor
|
Python
|
mit
|
illumenati/duwamish-sensor,tipsqueal/duwamish-sensor
|
---
+++
@@ -4,7 +4,7 @@
print('Starting server...')
temperature_usb = '/dev/ttyAMA0'
-BAUD_RATE = 9600
+BAUD_RATE = 38400
temperature_ser = serial.Serial(temperature_usb, BAUD_RATE)
def process_line(line):
|
9a30728493258d7dcf60b67a8c87489e1457df1a
|
kitchen/dashboard/templatetags/filters.py
|
kitchen/dashboard/templatetags/filters.py
|
"""Dashboard template filters"""
from django import template
import littlechef
from kitchen.settings import REPO
register = template.Library()
@register.filter(name='get_role_list')
def get_role_list(run_list):
"""Returns the role sublist from the given run_list"""
prev_role_list = littlechef.lib.get_roles_in_node({'run_list': run_list})
role_list = []
for role in prev_role_list:
if not role.startswith(REPO['EXCLUDE_ROLE_PREFIX']):
# Only add if it doesn't start with excluded role prefixes
role_list.append(role)
return role_list
@register.filter(name='get_recipe_list')
def get_recipe_list(run_list):
"""Returns the recipe sublist from the given run_list"""
return littlechef.lib.get_recipes_in_node({'run_list': run_list})
|
"""Dashboard template filters"""
from django import template
import littlechef
from kitchen.settings import REPO
register = template.Library()
@register.filter(name='get_role_list')
def get_role_list(run_list):
"""Returns the role sublist from the given run_list"""
if run_list:
all_roles = littlechef.lib.get_roles_in_node(
{'run_list': run_list})
role_list = []
for role in all_roles:
if not role.startswith(REPO['EXCLUDE_ROLE_PREFIX']):
# Only add if it doesn't start with excluded role prefixes
role_list.append(role)
return role_list
else:
return []
@register.filter(name='get_recipe_list')
def get_recipe_list(run_list):
"""Returns the recipe sublist from the given run_list"""
if run_list:
return littlechef.lib.get_recipes_in_node({'run_list': run_list})
else:
return []
|
Check 'role_list' before sending it to little_chef
|
Check 'role_list' before sending it to little_chef
|
Python
|
apache-2.0
|
edelight/kitchen,edelight/kitchen,edelight/kitchen,edelight/kitchen
|
---
+++
@@ -10,16 +10,23 @@
@register.filter(name='get_role_list')
def get_role_list(run_list):
"""Returns the role sublist from the given run_list"""
- prev_role_list = littlechef.lib.get_roles_in_node({'run_list': run_list})
- role_list = []
- for role in prev_role_list:
- if not role.startswith(REPO['EXCLUDE_ROLE_PREFIX']):
- # Only add if it doesn't start with excluded role prefixes
- role_list.append(role)
- return role_list
+ if run_list:
+ all_roles = littlechef.lib.get_roles_in_node(
+ {'run_list': run_list})
+ role_list = []
+ for role in all_roles:
+ if not role.startswith(REPO['EXCLUDE_ROLE_PREFIX']):
+ # Only add if it doesn't start with excluded role prefixes
+ role_list.append(role)
+ return role_list
+ else:
+ return []
@register.filter(name='get_recipe_list')
def get_recipe_list(run_list):
"""Returns the recipe sublist from the given run_list"""
- return littlechef.lib.get_recipes_in_node({'run_list': run_list})
+ if run_list:
+ return littlechef.lib.get_recipes_in_node({'run_list': run_list})
+ else:
+ return []
|
0e9b8c2dbe5d3fbedf1b444819ca3820a9a13135
|
utils.py
|
utils.py
|
# -*- coding: utf-8 -*-
import re
_strip_re = re.compile(ur'[\'"`βββββ²β³β΄]+')
_punctuation_re = re.compile(ur'[\t !#$%&()*\-/<=>?@\[\\\]^_{|}:;,.β¦ββββ]+')
def makename(text, delim=u'-', maxlength=50, filter=None):
u"""
Generate a Unicode name slug.
>>> makename('This is a title')
u'this-is-a-title'
>>> makename('Invalid URL/slug here')
u'invalid-url-slug-here'
>>> makename('this.that')
u'this-that'
>>> makename("How 'bout this?")
u'how-bout-this'
>>> makename(u"Howβs that?")
u'hows-that'
>>> makename(u'K & D')
u'k-d'
"""
return unicode(delim.join([_strip_re.sub('', x) for x in _punctuation_re.split(text.lower()) if x != '']))
|
# -*- coding: utf-8 -*-
import re
_strip_re = re.compile(ur'[\'"`βββββ²β³β΄]+')
_punctuation_re = re.compile(ur'[\t +!#$%&()*\-/<=>?@\[\\\]^_{|}:;,.β¦ββββ]+')
def makename(text, delim=u'-', maxlength=50, filter=None):
u"""
Generate a Unicode name slug.
>>> makename('This is a title')
u'this-is-a-title'
>>> makename('Invalid URL/slug here')
u'invalid-url-slug-here'
>>> makename('this.that')
u'this-that'
>>> makename("How 'bout this?")
u'how-bout-this'
>>> makename(u"Howβs that?")
u'hows-that'
>>> makename(u'K & D')
u'k-d'
>>> makename('billion+ pageviews')
u'billion-pageviews'
"""
return unicode(delim.join([_strip_re.sub('', x) for x in _punctuation_re.split(text.lower()) if x != '']))
|
Remove + character from generated URLs
|
Remove + character from generated URLs
|
Python
|
agpl-3.0
|
hasgeek/funnel,piyushroshan/fossmeet,hasgeek/funnel,hasgeek/funnel,hasgeek/funnel,piyushroshan/fossmeet,jace/failconfunnel,jace/failconfunnel,piyushroshan/fossmeet,hasgeek/funnel,jace/failconfunnel
|
---
+++
@@ -3,7 +3,7 @@
import re
_strip_re = re.compile(ur'[\'"`βββββ²β³β΄]+')
-_punctuation_re = re.compile(ur'[\t !#$%&()*\-/<=>?@\[\\\]^_{|}:;,.β¦ββββ]+')
+_punctuation_re = re.compile(ur'[\t +!#$%&()*\-/<=>?@\[\\\]^_{|}:;,.β¦ββββ]+')
def makename(text, delim=u'-', maxlength=50, filter=None):
u"""
@@ -21,5 +21,7 @@
u'hows-that'
>>> makename(u'K & D')
u'k-d'
+ >>> makename('billion+ pageviews')
+ u'billion-pageviews'
"""
return unicode(delim.join([_strip_re.sub('', x) for x in _punctuation_re.split(text.lower()) if x != '']))
|
9448374db62049b6f0209a0b6c3b01f3336e2b2b
|
talks/core/renderers.py
|
talks/core/renderers.py
|
from datetime import datetime
from rest_framework import renderers
from icalendar import Calendar, Event
class ICalRenderer(renderers.BaseRenderer):
media_type = 'text/calendar'
format = 'ics'
def render(self, data, media_type=None, renderer_context=None):
cal = Calendar()
cal.add('prodid', 'talks.ox.ac.uk')
cal.add('version', '2.0')
for e in data:
cal.add_component(self._event_to_ics(e))
return cal.to_ical()
@staticmethod
def _event_to_ics(e):
event = Event()
if 'title' in e:
event.add('summary', e['title'])
if 'description' in e:
event.add('description', e['description'])
if 'start' in e:
# 2015-01-29T18:00:00Z
event.add('dtstart', datetime.strptime(e['start'], "%Y-%m-%dT%H:%M:%SZ"))
if 'url' in e:
event.add('url', e['url'])
event.add('uid', e['url'])
# TODO add location field
return event
|
from datetime import datetime
from rest_framework import renderers
from icalendar import Calendar, Event
class ICalRenderer(renderers.BaseRenderer):
media_type = 'text/calendar'
format = 'ics'
def render(self, data, media_type=None, renderer_context=None):
cal = Calendar()
cal.add('prodid', 'talks.ox.ac.uk')
cal.add('version', '2.0')
for e in data:
cal.add_component(self._event_to_ics(e))
return cal.to_ical()
@staticmethod
def _event_to_ics(e):
event = Event()
if 'title' in e:
event.add('summary', e['title'])
if 'description' in e:
event.add('description', e['description'])
if 'start' in e:
event.add('dtstart', dt_string_to_object(e['start']))
if 'url' in e:
event.add('url', e['url'])
event.add('uid', e['url'])
# TODO add location field
return event
def dt_string_to_object(string):
"""Transforms a string date into a datetime object
:param string: string representing a date/time
:return: python datetime object
"""
return datetime.strptime(string, "%Y-%m-%dT%H:%M:%SZ")
|
Refactor method string to date
|
Refactor method string to date
|
Python
|
apache-2.0
|
ox-it/talks.ox,ox-it/talks.ox,ox-it/talks.ox
|
---
+++
@@ -25,10 +25,17 @@
if 'description' in e:
event.add('description', e['description'])
if 'start' in e:
- # 2015-01-29T18:00:00Z
- event.add('dtstart', datetime.strptime(e['start'], "%Y-%m-%dT%H:%M:%SZ"))
+ event.add('dtstart', dt_string_to_object(e['start']))
if 'url' in e:
event.add('url', e['url'])
event.add('uid', e['url'])
# TODO add location field
return event
+
+
+def dt_string_to_object(string):
+ """Transforms a string date into a datetime object
+ :param string: string representing a date/time
+ :return: python datetime object
+ """
+ return datetime.strptime(string, "%Y-%m-%dT%H:%M:%SZ")
|
f16800869c6eaa00cb633f181749e4938c257dc6
|
robots/migrations/__init__.py
|
robots/migrations/__init__.py
|
"""
Django migrations for robots app
This package does not contain South migrations. South migrations can be found
in the ``south_migrations`` package.
"""
# This check is based on code from django-email-log. Thanks Trey Hunner.
# https://github.com/treyhunner/django-email-log/blob/master/email_log/migrations/__init__.py
SOUTH_ERROR_MESSAGE = """\n
For South support, customize the SOUTH_MIGRATION_MODULES setting like so:
SOUTH_MIGRATION_MODULES = {
'robots': 'robots.south_migrations',
}
"""
# Ensure the user is not using Django 1.6 or below with South
try:
from django.db import migrations # noqa
except ImportError:
from django.core.exceptions import ImproperlyConfigured
raise ImproperlyConfigured(SOUTH_ERROR_MESSAGE)
|
Raise ImproperlyConfigured if django migrations module does not exist (for South users).
|
Raise ImproperlyConfigured if django migrations module does not exist (for South users).
|
Python
|
bsd-3-clause
|
jazzband/django-robots,jezdez/django-robots,jscott1971/django-robots,jscott1971/django-robots,jazzband/django-robots,jezdez/django-robots
|
---
+++
@@ -0,0 +1,24 @@
+"""
+Django migrations for robots app
+
+This package does not contain South migrations. South migrations can be found
+in the ``south_migrations`` package.
+"""
+
+# This check is based on code from django-email-log. Thanks Trey Hunner.
+# https://github.com/treyhunner/django-email-log/blob/master/email_log/migrations/__init__.py
+
+SOUTH_ERROR_MESSAGE = """\n
+For South support, customize the SOUTH_MIGRATION_MODULES setting like so:
+
+ SOUTH_MIGRATION_MODULES = {
+ 'robots': 'robots.south_migrations',
+ }
+"""
+
+# Ensure the user is not using Django 1.6 or below with South
+try:
+ from django.db import migrations # noqa
+except ImportError:
+ from django.core.exceptions import ImproperlyConfigured
+ raise ImproperlyConfigured(SOUTH_ERROR_MESSAGE)
|
|
5448e38d14589b7558513f51d0abf541790be817
|
i3pystatus/core/command.py
|
i3pystatus/core/command.py
|
# from subprocess import CalledProcessError
import subprocess
def run_through_shell(command, enable_shell=False):
"""
Retrieves output of command
Returns tuple success (boolean)/ stdout(string) / stderr (string)
Don't use this function with programs that outputs lots of data since the output is saved
in one variable
"""
returncode = None
try:
proc = subprocess.Popen(command, stderr=subprocess.PIPE, stdout=subprocess.PIPE, shell=enable_shell)
out, stderr = proc.communicate()
out = out.decode("UTF-8")
stderr = stderr.decode("UTF-8")
returncode = proc.returncode
except OSError as e:
out = e.strerror
stderr = e.strerror
except subprocess.CalledProcessError as e:
out = e.output
return returncode, out, stderr
|
# from subprocess import CalledProcessError
from collections import namedtuple
import subprocess
CommandResult = namedtuple("Result", ['rc', 'out', 'err'])
def run_through_shell(command, enable_shell=False):
"""
Retrieves output of command
Returns tuple success (boolean)/ stdout(string) / stderr (string)
Don't use this function with programs that outputs lots of data since the output is saved
in one variable
"""
returncode = None
stderr = None
try:
proc = subprocess.Popen(command, stderr=subprocess.PIPE, stdout=subprocess.PIPE, shell=enable_shell)
out, stderr = proc.communicate()
out = out.decode("UTF-8")
stderr = stderr.decode("UTF-8")
returncode = proc.returncode
except OSError as e:
out = e.strerror
stderr = e.strerror
except subprocess.CalledProcessError as e:
out = e.output
return CommandResult(returncode, out, stderr)
|
Use named tuple for return value
|
Use named tuple for return value
|
Python
|
mit
|
m45t3r/i3pystatus,teto/i3pystatus,yang-ling/i3pystatus,opatut/i3pystatus,m45t3r/i3pystatus,juliushaertl/i3pystatus,Elder-of-Ozone/i3pystatus,MaicoTimmerman/i3pystatus,paulollivier/i3pystatus,onkelpit/i3pystatus,richese/i3pystatus,asmikhailov/i3pystatus,enkore/i3pystatus,juliushaertl/i3pystatus,eBrnd/i3pystatus,claria/i3pystatus,plumps/i3pystatus,eBrnd/i3pystatus,ismaelpuerto/i3pystatus,opatut/i3pystatus,richese/i3pystatus,drwahl/i3pystatus,Elder-of-Ozone/i3pystatus,ncoop/i3pystatus,fmarchenko/i3pystatus,paulollivier/i3pystatus,ismaelpuerto/i3pystatus,plumps/i3pystatus,schroeji/i3pystatus,yang-ling/i3pystatus,teto/i3pystatus,Arvedui/i3pystatus,ncoop/i3pystatus,drwahl/i3pystatus,schroeji/i3pystatus,onkelpit/i3pystatus,claria/i3pystatus,MaicoTimmerman/i3pystatus,asmikhailov/i3pystatus,facetoe/i3pystatus,enkore/i3pystatus,fmarchenko/i3pystatus,Arvedui/i3pystatus,facetoe/i3pystatus
|
---
+++
@@ -1,5 +1,8 @@
# from subprocess import CalledProcessError
+from collections import namedtuple
import subprocess
+
+CommandResult = namedtuple("Result", ['rc', 'out', 'err'])
def run_through_shell(command, enable_shell=False):
@@ -10,10 +13,11 @@
Don't use this function with programs that outputs lots of data since the output is saved
in one variable
"""
+
returncode = None
+ stderr = None
try:
proc = subprocess.Popen(command, stderr=subprocess.PIPE, stdout=subprocess.PIPE, shell=enable_shell)
-
out, stderr = proc.communicate()
out = out.decode("UTF-8")
stderr = stderr.decode("UTF-8")
@@ -26,4 +30,4 @@
except subprocess.CalledProcessError as e:
out = e.output
- return returncode, out, stderr
+ return CommandResult(returncode, out, stderr)
|
4eecac0764e8abfc33c9e77b8eb6b700b536f1a0
|
pull_me.py
|
pull_me.py
|
#!/usr/bin/env python
from random import randint
from time import sleep
from os import system
from easygui import msgbox
while True:
delay = randint(60, 2000)
sleep(delay)
system("aplay /usr/lib/libreoffice/share/gallery/sounds/kongas.wav")
msgbox("Hi Dan", "Time is up")
|
#!/usr/bin/env python
from random import randint
from time import sleep
from os import system
import os.path
from easygui import msgbox
while True:
delay = randint(60, 2000)
sleep(delay)
if os.path.isfile("/usr/share/sounds/GNUstep/Glass.wav"):
system("aplay /usr/share/sounds/GNUstep/Glass.wav")
else:
system("aplay /usr/lib/libreoffice/share/gallery/sounds/kongas.wav")
msgbox("Hi Dan", "Time is up")
|
Use Glass.wav if it exists.
|
Use Glass.wav if it exists.
|
Python
|
apache-2.0
|
dnuffer/carrot_slots
|
---
+++
@@ -2,10 +2,14 @@
from random import randint
from time import sleep
from os import system
+import os.path
from easygui import msgbox
while True:
delay = randint(60, 2000)
sleep(delay)
- system("aplay /usr/lib/libreoffice/share/gallery/sounds/kongas.wav")
+ if os.path.isfile("/usr/share/sounds/GNUstep/Glass.wav"):
+ system("aplay /usr/share/sounds/GNUstep/Glass.wav")
+ else:
+ system("aplay /usr/lib/libreoffice/share/gallery/sounds/kongas.wav")
msgbox("Hi Dan", "Time is up")
|
9310e94a1406102bba109416f781f9d6330d0028
|
tests/test_itunes.py
|
tests/test_itunes.py
|
"""
test_itunes.py
Copyright Β© 2015 Alex Danoff. All Rights Reserved.
2015-08-02
This file tests the functionality provided by the itunes module.
"""
import unittest
from datetime import datetime
from itunes.itunes import parse_value
class ITunesTests(unittest.TestCase):
"""
Test cases for iTunes functionality.
"""
def test_parse_value(self):
self.assertEquals(parse_value("10"), 10)
self.assertEquals(parse_value("1.0"), 1.0)
self.assertTrue(parse_value("true"))
self.assertFalse(parse_value("false"))
self.assertIsNone(parse_value(""))
self.assertIsNone(parse_value('""'))
self.assertIsNone(parse_value("missing value"))
self.assertEquals(parse_value('date: "Saturday, March 13, 2010 at ' \
'5:02:22 PM"'), datetime.fromtimestamp(1268517742))
|
"""
test_itunes.py
Copyright Β© 2015 Alex Danoff. All Rights Reserved.
2015-08-02
This file tests the functionality provided by the itunes module.
"""
import unittest
from datetime import datetime
from itunes.itunes import parse_value, run_applescript
from itunes.exceptions import AppleScriptError
class ITunesTests(unittest.TestCase):
"""
Test cases for iTunes functionality.
"""
def test_parse_value(self):
self.assertEquals(parse_value("10"), 10)
self.assertEquals(parse_value("1.0"), 1.0)
self.assertTrue(parse_value("true"))
self.assertFalse(parse_value("false"))
self.assertIsNone(parse_value(""))
self.assertIsNone(parse_value('""'))
self.assertIsNone(parse_value("missing value"))
self.assertEquals(parse_value('date: "Saturday, March 13, 2010 at ' \
'5:02:22 PM"'), datetime.fromtimestamp(1268517742))
def test_run_applescript(self):
self.assertRaises(AppleScriptError, run_applescript, "THIS IS INVALID" \
" APPLESCRIPT")
|
Add test to make sure `run_applescript` throws on bad script
|
Add test to make sure `run_applescript` throws on bad script
|
Python
|
mit
|
adanoff/iTunesTUI
|
---
+++
@@ -10,7 +10,8 @@
import unittest
from datetime import datetime
-from itunes.itunes import parse_value
+from itunes.itunes import parse_value, run_applescript
+from itunes.exceptions import AppleScriptError
class ITunesTests(unittest.TestCase):
"""
@@ -27,3 +28,7 @@
self.assertIsNone(parse_value("missing value"))
self.assertEquals(parse_value('date: "Saturday, March 13, 2010 at ' \
'5:02:22 PM"'), datetime.fromtimestamp(1268517742))
+
+ def test_run_applescript(self):
+ self.assertRaises(AppleScriptError, run_applescript, "THIS IS INVALID" \
+ " APPLESCRIPT")
|
44d5974fafdddb09a684882fc79662ae4c509f57
|
names/__init__.py
|
names/__init__.py
|
from os.path import abspath, join, dirname
import random
__title__ = 'names'
__version__ = '0.2'
__author__ = 'Trey Hunner'
__license__ = 'MIT'
full_path = lambda filename: abspath(join(dirname(__file__), filename))
FILES = {
'first:male': full_path('dist.male.first'),
'first:female': full_path('dist.female.first'),
'last': full_path('dist.all.last'),
}
def get_name(filename):
selected = random.random() * 90
with open(filename) as name_file:
for line in name_file:
name, _, cummulative, _ = line.split()
if float(cummulative) > selected:
return name
def get_first_name(gender=None):
if gender not in ('male', 'female'):
gender = random.choice(('male', 'female'))
return get_name(FILES['first:%s' % gender]).capitalize()
def get_last_name():
return get_name(FILES['last']).capitalize()
def get_full_name(gender=None):
return u"%s %s" % (get_first_name(gender), get_last_name())
|
from os.path import abspath, join, dirname
import random
__title__ = 'names'
__version__ = '0.2'
__author__ = 'Trey Hunner'
__license__ = 'MIT'
full_path = lambda filename: abspath(join(dirname(__file__), filename))
FILES = {
'first:male': full_path('dist.male.first'),
'first:female': full_path('dist.female.first'),
'last': full_path('dist.all.last'),
}
def get_name(filename):
selected = random.random() * 90
with open(filename) as name_file:
for line in name_file:
name, _, cummulative, _ = line.split()
if float(cummulative) > selected:
return name
def get_first_name(gender=None):
if gender not in ('male', 'female'):
gender = random.choice(('male', 'female'))
return get_name(FILES['first:%s' % gender]).capitalize()
def get_last_name():
return get_name(FILES['last']).capitalize()
def get_full_name(gender=None):
return unicode("%s %s").format(get_first_name(gender), get_last_name())
|
Fix unicode string syntax for Python 3
|
Fix unicode string syntax for Python 3
|
Python
|
mit
|
treyhunner/names,treyhunner/names
|
---
+++
@@ -38,4 +38,4 @@
def get_full_name(gender=None):
- return u"%s %s" % (get_first_name(gender), get_last_name())
+ return unicode("%s %s").format(get_first_name(gender), get_last_name())
|
b26f768067d0ac495d0cbecdf68c38e73e42662b
|
textblob_fr/__init__.py
|
textblob_fr/__init__.py
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from textblob_fr.taggers import PatternTagger
from textblob_fr.sentiments import PatternAnalyzer
__version__ = '0.2.0-dev'
__author__ = 'Steven Loria'
__license__ = "MIT"
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from textblob_fr.taggers import PatternTagger
from textblob_fr.sentiments import PatternAnalyzer
__version__ = '0.2.0'
__author__ = 'Steven Loria'
__license__ = "MIT"
|
Bump version 0.1.0 -> 0.2.0
|
Bump version 0.1.0 -> 0.2.0
|
Python
|
mit
|
sloria/textblob-fr
|
---
+++
@@ -3,6 +3,6 @@
from textblob_fr.taggers import PatternTagger
from textblob_fr.sentiments import PatternAnalyzer
-__version__ = '0.2.0-dev'
+__version__ = '0.2.0'
__author__ = 'Steven Loria'
__license__ = "MIT"
|
07058595e43290524d28b53b5919fb76f16c618b
|
test/test_validators.py
|
test/test_validators.py
|
from unittest import TestCase
from win_unc import validators as V
class TestIsValidDriveLetter(TestCase):
def test_valid(self):
self.assertTrue(V.is_valid_drive_letter('A'))
self.assertTrue(V.is_valid_drive_letter('Z'))
self.assertTrue(V.is_valid_drive_letter('a'))
self.assertTrue(V.is_valid_drive_letter('z'))
def test_invalid(self):
self.assertFalse(V.is_valid_drive_letter(''))
self.assertFalse(V.is_valid_drive_letter(':'))
self.assertFalse(V.is_valid_drive_letter('aa'))
self.assertFalse(V.is_valid_drive_letter('a:'))
|
from unittest import TestCase
from win_unc import validators as V
class TestIsValidDriveLetter(TestCase):
def test_valid(self):
self.assertTrue(V.is_valid_drive_letter('A'))
self.assertTrue(V.is_valid_drive_letter('Z'))
self.assertTrue(V.is_valid_drive_letter('a'))
self.assertTrue(V.is_valid_drive_letter('z'))
def test_invalid(self):
self.assertFalse(V.is_valid_drive_letter(''))
self.assertFalse(V.is_valid_drive_letter(':'))
self.assertFalse(V.is_valid_drive_letter('aa'))
self.assertFalse(V.is_valid_drive_letter('a:'))
class TestIsValidUncPath(TestCase):
def test_valid(self):
self.assertTrue(V.is_valid_unc_path(r'\\a'))
self.assertTrue(V.is_valid_unc_path(r'\\a\b\c'))
self.assertTrue(V.is_valid_unc_path(r'\\ABC\\'))
def test_invalid(self):
self.assertFalse(V.is_valid_unc_path(''))
self.assertFalse(V.is_valid_unc_path(r'\\'))
self.assertFalse(V.is_valid_unc_path(r'\\\a'))
self.assertFalse(V.is_valid_unc_path(r'C:\path'))
self.assertFalse(V.is_valid_unc_path(r'\\<a>'))
|
Add tests for UNC path validator
|
Add tests for UNC path validator
|
Python
|
mit
|
CovenantEyes/py_win_unc,nithinphilips/py_win_unc
|
---
+++
@@ -15,3 +15,17 @@
self.assertFalse(V.is_valid_drive_letter(':'))
self.assertFalse(V.is_valid_drive_letter('aa'))
self.assertFalse(V.is_valid_drive_letter('a:'))
+
+
+class TestIsValidUncPath(TestCase):
+ def test_valid(self):
+ self.assertTrue(V.is_valid_unc_path(r'\\a'))
+ self.assertTrue(V.is_valid_unc_path(r'\\a\b\c'))
+ self.assertTrue(V.is_valid_unc_path(r'\\ABC\\'))
+
+ def test_invalid(self):
+ self.assertFalse(V.is_valid_unc_path(''))
+ self.assertFalse(V.is_valid_unc_path(r'\\'))
+ self.assertFalse(V.is_valid_unc_path(r'\\\a'))
+ self.assertFalse(V.is_valid_unc_path(r'C:\path'))
+ self.assertFalse(V.is_valid_unc_path(r'\\<a>'))
|
6bde135b964690d2c51fe944e52f2a9c9c9dadab
|
opps/db/_redis.py
|
opps/db/_redis.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from opps.db.conf import settings
from redis import ConnectionPool
from redis import Redis as RedisClient
class Redis:
def __init__(self, key_prefix, key_sufix):
self.key_prefix = key_prefix
self.key_sufix = key_sufix
self.host = settings.OPPS_DB_HOST
self.port = settings.OPPS_DB_PORT
self.db = 0
pool = ConnectionPool(host=self.host,
port=self.port,
db=self.db)
self.conn = RedisClient(connection_pool=pool)
def close(self):
self.conn = None
return True
def key(self):
return '{}_{}_{}'.format(settings.OPPS_DB_NAME,
self.key_prefix,
self.key_sufix)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from opps.db.conf import settings
from redis import ConnectionPool
from redis import Redis as RedisClient
class Redis:
def __init__(self, key_prefix, key_sufix):
self.key_prefix = key_prefix
self.key_sufix = key_sufix
self.host = settings.OPPS_DB_HOST
self.port = settings.OPPS_DB_PORT
self.db = 0
pool = ConnectionPool(host=self.host,
port=self.port,
db=self.db)
self.conn = RedisClient(connection_pool=pool)
def close(self):
self.conn = None
return True
def key(self):
return '{}_{}_{}'.format(settings.OPPS_DB_NAME,
self.key_prefix,
self.key_sufix)
def save(self, document):
return self.conn.set(self.key(), document)
|
Add method save, manager create or update on opps db redis
|
Add method save, manager create or update on opps db redis
|
Python
|
mit
|
jeanmask/opps,opps/opps,YACOWS/opps,jeanmask/opps,jeanmask/opps,williamroot/opps,williamroot/opps,williamroot/opps,YACOWS/opps,opps/opps,opps/opps,williamroot/opps,YACOWS/opps,YACOWS/opps,jeanmask/opps,opps/opps
|
---
+++
@@ -28,3 +28,6 @@
self.key_prefix,
self.key_sufix)
+ def save(self, document):
+ return self.conn.set(self.key(), document)
+
|
8ea90a83318e4c1cb01b773435ef4861a459ac0f
|
indra/sources/utils.py
|
indra/sources/utils.py
|
# -*- coding: utf-8 -*-
"""Processor for remote INDRA JSON files."""
import requests
from typing import List
from ..statements import Statement, stmts_from_json
__all__ = [
'RemoteProcessor',
]
class RemoteProcessor:
"""A processor for INDRA JSON file to be retrieved by URL.
Parameters
----------
url :
The URL of the INDRA JSON file to load
"""
#: The URL of the data
url: str
#: A list of statements
statements: List[Statement]
def __init__(self, url: str):
self.url = url
self.statements = []
def extract_statements(self) -> List[Statement]:
"""Extract statements from the remote JSON file."""
res = requests.get(self.url)
res.raise_for_status()
self.statements = stmts_from_json(res.json())
return self.statements
|
# -*- coding: utf-8 -*-
"""Processor for remote INDRA JSON files."""
from collections import Counter
import requests
from typing import List
from ..statements import Statement, stmts_from_json
__all__ = [
'RemoteProcessor',
]
class RemoteProcessor:
"""A processor for INDRA JSON file to be retrieved by URL.
Parameters
----------
url :
The URL of the INDRA JSON file to load
"""
#: The URL of the data
url: str
def __init__(self, url: str):
self.url = url
self._statements = None
@property
def statements(self) -> List[Statement]:
"""The extracted statements."""
if self._statements is None:
self.extract_statements()
return self._statements
def extract_statements(self) -> List[Statement]:
"""Extract statements from the remote JSON file."""
res = requests.get(self.url)
res.raise_for_status()
self._statements = stmts_from_json(res.json())
return self._statements
def print_summary(self) -> None:
"""print a summary of the statements."""
from tabulate import tabulate
print(tabulate(
Counter(
statement.__class__.__name__
for statement in self.statements
).most_common(),
headers=["Statement Type", "Count"],
))
|
Implement autoloading and summary function
|
Implement autoloading and summary function
|
Python
|
bsd-2-clause
|
sorgerlab/indra,sorgerlab/indra,sorgerlab/belpy,bgyori/indra,bgyori/indra,bgyori/indra,johnbachman/indra,sorgerlab/belpy,sorgerlab/indra,johnbachman/indra,sorgerlab/belpy,johnbachman/indra
|
---
+++
@@ -1,6 +1,8 @@
# -*- coding: utf-8 -*-
"""Processor for remote INDRA JSON files."""
+
+from collections import Counter
import requests
from typing import List
@@ -24,16 +26,31 @@
#: The URL of the data
url: str
- #: A list of statements
- statements: List[Statement]
-
def __init__(self, url: str):
self.url = url
- self.statements = []
+ self._statements = None
+
+ @property
+ def statements(self) -> List[Statement]:
+ """The extracted statements."""
+ if self._statements is None:
+ self.extract_statements()
+ return self._statements
def extract_statements(self) -> List[Statement]:
"""Extract statements from the remote JSON file."""
res = requests.get(self.url)
res.raise_for_status()
- self.statements = stmts_from_json(res.json())
- return self.statements
+ self._statements = stmts_from_json(res.json())
+ return self._statements
+
+ def print_summary(self) -> None:
+ """print a summary of the statements."""
+ from tabulate import tabulate
+ print(tabulate(
+ Counter(
+ statement.__class__.__name__
+ for statement in self.statements
+ ).most_common(),
+ headers=["Statement Type", "Count"],
+ ))
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.