commit stringlengths 40 40 | old_file stringlengths 4 150 | new_file stringlengths 4 150 | old_contents stringlengths 0 3.26k | new_contents stringlengths 1 4.43k | subject stringlengths 15 501 | message stringlengths 15 4.06k | lang stringclasses 4 values | license stringclasses 13 values | repos stringlengths 5 91.5k | diff stringlengths 0 4.35k |
|---|---|---|---|---|---|---|---|---|---|---|
9745638bbb58b0ccef543a76d1f0677d4dda6c03 | setup.py | setup.py | from setuptools import find_packages, setup
import versioneer
setup(
name="bmipy",
version=versioneer.get_version(),
description="Basic Model Interface for Python",
author="Eric Hutton",
author_email="huttone@colorado.edu",
url="http://csdms.colorado.edu",
classifiers=[
"Intended Audience :: Science/Research",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: Implementation :: CPython",
"Topic :: Scientific/Engineering :: Physics",
],
setup_requires=["setuptools"],
packages=find_packages(),
cmdclass=versioneer.get_cmdclass(),
)
| from setuptools import find_packages, setup
import versioneer
setup(
name="bmipy",
version=versioneer.get_version(),
description="Basic Model Interface for Python",
author="Eric Hutton",
author_email="huttone@colorado.edu",
url="http://csdms.colorado.edu",
classifiers=[
"Intended Audience :: Science/Research",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: Implementation :: CPython",
"Topic :: Scientific/Engineering :: Physics",
],
setup_requires=["setuptools"],
install_requires=["numpy"],
packages=find_packages(),
cmdclass=versioneer.get_cmdclass(),
)
| Add numpy as an install requirement. | Add numpy as an install requirement.
| Python | mit | csdms/bmi-python | ---
+++
@@ -21,6 +21,7 @@
"Topic :: Scientific/Engineering :: Physics",
],
setup_requires=["setuptools"],
+ install_requires=["numpy"],
packages=find_packages(),
cmdclass=versioneer.get_cmdclass(),
) |
889174cf77f31b7a683085dffb109c1ca8445ae4 | setup.py | setup.py | # -*- coding: utf-8 -*-
from setuptools import setup
VERSION = '0.1.2'
setup(
name='tree_extractor',
version=VERSION,
description="Lib to extract html elements by preserving ancestors and cleaning CSS",
author=u'Jurismarchés',
author_email='contact@jurismarches.com',
url='https://github.com/jurismarches/tree_extractor',
packages=[
'tree_extractor'
],
install_requires=[
'cssselect==0.9.1',
'tinycss==0.3',
'lxml==3.3.5'
],
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Programming Language :: Python :: 3.4',
],
test_suite='tree_extractor.tests'
)
| # -*- coding: utf-8 -*-
from setuptools import setup
VERSION = '0.1.3'
with open('README.rst', 'r') as f:
long_description = f.read()
setup(
name='tree_extractor',
version=VERSION,
description="Lib to extract html elements by preserving ancestors and cleaning CSS",
long_description=long_description,
author=u'Jurismarchés',
author_email='contact@jurismarches.com',
url='https://github.com/jurismarches/tree_extractor',
packages=[
'tree_extractor'
],
install_requires=[
'cssselect==0.9.1',
'tinycss==0.3',
'lxml==3.3.5'
],
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Programming Language :: Python :: 3.4',
],
test_suite='tree_extractor.tests'
)
| Add long description and bump version number | Add long description and bump version number
| Python | mit | jurismarches/chopper | ---
+++
@@ -1,13 +1,16 @@
# -*- coding: utf-8 -*-
from setuptools import setup
-VERSION = '0.1.2'
+VERSION = '0.1.3'
+with open('README.rst', 'r') as f:
+ long_description = f.read()
setup(
name='tree_extractor',
version=VERSION,
description="Lib to extract html elements by preserving ancestors and cleaning CSS",
+ long_description=long_description,
author=u'Jurismarchés',
author_email='contact@jurismarches.com',
url='https://github.com/jurismarches/tree_extractor', |
52f936b943471fae3f320da1eaeabd49ab0ed959 | setup.py | setup.py | import os
from setuptools import setup
with open(os.path.join(os.path.dirname(__file__), 'README.rst')) as readme:
README = readme.read()
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(name='django-autocert',
version='0.1.4',
packages=['autocert'],
include_package_data=True,
license='MIT',
description="Automatic SSL certificates from Let's Encrypt for Django projects",
long_description=README,
author='Patrick Farrell',
author_email='p@farrell.io',
url='https://github.com/farrepa/django-autocert/',
keywords='django ssl certificate acme',
install_requires=['acme>=0.9.3', 'Django>=1.8', 'urllib3'],
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Topic :: Software Development :: Build Tools',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
],
)
| import os
from setuptools import setup
with open(os.path.join(os.path.dirname(__file__), 'README.rst')) as readme:
README = readme.read()
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(name='django-autocert',
version='0.1.4',
packages=['autocert'],
include_package_data=True,
license='MIT',
description="Automatic SSL certificates from Let's Encrypt for Django projects",
long_description=README,
author='Patrick Farrell',
author_email='p@farrell.io',
url='https://github.com/farrepa/django-autocert/',
keywords='django ssl certificate acme',
install_requires=['acme>=0.9.3', 'Django>=1.8', 'urllib3', 'josepy'],
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Topic :: Software Development :: Build Tools',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
],
)
| Add temp dependency on josepy until acme adds it | Add temp dependency on josepy until acme adds it
| Python | mit | farrepa/django-autocert | ---
+++
@@ -17,7 +17,7 @@
author_email='p@farrell.io',
url='https://github.com/farrepa/django-autocert/',
keywords='django ssl certificate acme',
- install_requires=['acme>=0.9.3', 'Django>=1.8', 'urllib3'],
+ install_requires=['acme>=0.9.3', 'Django>=1.8', 'urllib3', 'josepy'],
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers', |
a1b3d04d33d3b59b2829e552747f6113241fdd7a | setup.py | setup.py | from setuptools import setup
from tumblr_reader import __version__
setup(
name='django-tumblr-reader',
version=__version__,
author='Zach Snow',
author_email='z@zachsnow.com',
packages=['tumblr_reader', 'tumblr_reader.templatetags'],
url='http://zachsnow.com/projects/',
license='LICENSE.rst',
description=r"""django-tumblr-reader is a simple, reusable Django application that defines template tags for embedding your Tumblr blog in your Django website.""",
long_description=open('README.rst').read(),
)
| from setuptools import setup
from tumblr_reader import __version__
setup(
name='django-tumblr-reader',
version=__version__,
author='Zach Snow',
author_email='z@zachsnow.com',
packages=['tumblr_reader', 'tumblr_reader.templatetags'],
include_package_data=True,
url='http://zachsnow.com/projects/',
license='LICENSE.rst',
description=r"""django-tumblr-reader is a simple, reusable Django application that defines template tags for embedding your Tumblr blog in your Django website.""",
long_description=open('README.rst').read(),
)
| Make sure to actually include the stuff in the manifest. | Make sure to actually include the stuff in the manifest.
| Python | mit | zachsnow/django-tumblr-reader,zachsnow/django-tumblr-reader | ---
+++
@@ -7,6 +7,7 @@
author='Zach Snow',
author_email='z@zachsnow.com',
packages=['tumblr_reader', 'tumblr_reader.templatetags'],
+ include_package_data=True,
url='http://zachsnow.com/projects/',
license='LICENSE.rst',
description=r"""django-tumblr-reader is a simple, reusable Django application that defines template tags for embedding your Tumblr blog in your Django website.""", |
eba11d1cf17a6e240ac89edebe0e7835a6273cc4 | setup.py | setup.py | from setuptools import setup
if __name__ == "__main__":
with open('README.rst', 'r') as f:
long_description = f.read()
setup(
classifiers=[
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules'
],
description="werkzeug + twisted.web",
long_description=long_description,
setup_requires=["incremental"],
use_incremental=True,
install_requires=[
"six",
"Twisted>=13.2",
"werkzeug",
"incremental",
],
keywords="twisted flask werkzeug web",
license="MIT",
name="klein",
packages=["klein", "klein.test"],
package_dir={"": "src"},
url="https://github.com/twisted/klein",
maintainer='Amber Brown (HawkOwl)',
maintainer_email='hawkowl@twistedmatrix.com',
)
| from setuptools import setup
if __name__ == "__main__":
with open('README.rst', 'r') as f:
long_description = f.read()
setup(
classifiers=[
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules'
],
description="werkzeug + twisted.web",
long_description=long_description,
setup_requires=["incremental"],
use_incremental=True,
install_requires=[
"six",
"Twisted>=13.2",
"werkzeug",
"incremental",
],
keywords="twisted flask werkzeug web",
license="MIT",
name="klein",
packages=["klein", "klein.test"],
package_dir={"": "src"},
url="https://github.com/twisted/klein",
maintainer='Amber Brown (HawkOwl)',
maintainer_email='hawkowl@twistedmatrix.com',
)
| Remove compatibility tag for Python 2.6 | Remove compatibility tag for Python 2.6
| Python | mit | joac/klein,joac/klein | ---
+++
@@ -12,7 +12,6 @@
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
- 'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5', |
7fbee246eab2baac089c37a8949f1fd5835c9c98 | setup.py | setup.py | from setuptools import setup
setup(
name='pytest-ui',
description='Text User Interface for running python tests',
version='0.1',
license='MIT',
platforms=['linux', 'osx', 'win32'],
packages=['pytui'],
url='https://github.com/martinsmid/pytest-ui',
author_email='martin.smid@gmail.com',
author='Martin Smid',
entry_points={
'pytest11': [
'pytui = pytui.plugin',
],
'console_scripts': [
'pytui = pytui.ui:main',
]
},
install_requires=['urwid>=1.3.1', 'pytest>=3.0.5'],
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Operating System :: POSIX',
'Operating System :: Microsoft :: Windows',
'Operating System :: MacOS :: MacOS X',
'Topic :: Software Development :: Testing',
'Topic :: Utilities',
'Programming Language :: Python', ],
)
| from setuptools import setup
setup(
name='pytest-ui',
description='Text User Interface for running python tests',
version='0.1b',
license='MIT',
platforms=['linux', 'osx', 'win32'],
packages=['pytui'],
url='https://github.com/martinsmid/pytest-ui',
author_email='martin.smid@gmail.com',
author='Martin Smid',
entry_points={
'console_scripts': [
'pytui = pytui.ui:main',
]
},
install_requires=['urwid>=1.3.1', 'pytest>=3.0.5'],
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Operating System :: POSIX',
'Operating System :: Microsoft :: Windows',
'Operating System :: MacOS :: MacOS X',
'Topic :: Software Development :: Testing',
'Topic :: Utilities',
'Programming Language :: Python', ],
)
| Mark version as beta Remove pytest plugin entrypoint (not ready) | Mark version as beta
Remove pytest plugin entrypoint (not ready)
| Python | mit | martinsmid/pytest-ui | ---
+++
@@ -3,7 +3,7 @@
setup(
name='pytest-ui',
description='Text User Interface for running python tests',
- version='0.1',
+ version='0.1b',
license='MIT',
platforms=['linux', 'osx', 'win32'],
packages=['pytui'],
@@ -11,9 +11,6 @@
author_email='martin.smid@gmail.com',
author='Martin Smid',
entry_points={
- 'pytest11': [
- 'pytui = pytui.plugin',
- ],
'console_scripts': [
'pytui = pytui.ui:main',
] |
9bc84f3be08aa97e68e567e5aa78c3edd23f2b5a | setup.py | setup.py | from setuptools import setup
setup(
name = 'saml2idp',
version = '0.18',
author = 'John Samuel Anderson',
author_email = 'john@andersoninnovative.com',
description = 'SAML 2.0 IdP for Django',
long_description = 'SAML 2.0 Identity Provider app for Django projects.',
install_requires = [
'M2Crypto>=0.20.1',
'BeautifulSoup>=3.2.0',
],
license = 'MIT',
packages = ['saml2idp', 'saml2idp.tests'],
package_dir = {'saml2idp': 'idptest/saml2idp'},
package_data = {'saml2idp': ['templates/saml2idp/*.html']},
url = 'http://code.google.com/p/django-saml2-idp/',
zip_safe = True,
)
| from setuptools import setup
setup(
name = 'saml2idp',
version = '0.18',
author = 'John Samuel Anderson',
author_email = 'john@andersoninnovative.com',
description = 'SAML 2.0 IdP for Django',
long_description = 'SAML 2.0 Identity Provider app for Django projects.',
install_requires = [
'M2Crypto>=0.20.1',
'BeautifulSoup>=3.2.0',
],
license = 'MIT',
packages = ['saml2idp', 'saml2idp.tests'],
package_dir = {'saml2idp': 'idptest/saml2idp'},
package_data = {'saml2idp': ['templates/saml2idp/*.html', 'templates/saml2idp/*.xml']},
url = 'http://code.google.com/p/django-saml2-idp/',
zip_safe = True,
)
| Add XML files in template folder to package data | Add XML files in template folder to package data | Python | mit | mobify/dj-saml-idp,mobify/dj-saml-idp,mobify/dj-saml-idp | ---
+++
@@ -14,7 +14,7 @@
license = 'MIT',
packages = ['saml2idp', 'saml2idp.tests'],
package_dir = {'saml2idp': 'idptest/saml2idp'},
- package_data = {'saml2idp': ['templates/saml2idp/*.html']},
+ package_data = {'saml2idp': ['templates/saml2idp/*.html', 'templates/saml2idp/*.xml']},
url = 'http://code.google.com/p/django-saml2-idp/',
zip_safe = True,
) |
9433c1dde6f082f4f2279a3f3ef776b12bcf70c4 | setup.py | setup.py | """ Setup file for statscache """
from setuptools import setup
def get_description():
with open('README.rst', 'r') as f:
return ''.join(f.readlines()[2:])
requires = [
'fedmsg',
'fedmsg_meta_fedora_infrastructure',
'sqlalchemy',
]
tests_require = [
'nose',
'freezegun'
]
setup(
name='statscache',
version='0.0.1',
description='Daemon to build and keep fedmsg statistics',
long_description=get_description(),
author='Ralph Bean',
author_email='rbean@redhat.com',
url="https://github.com/fedora-infra/statscache/",
download_url="https://pypi.python.org/pypi/statscache/",
license='LGPLv2+',
install_requires=requires,
tests_require=tests_require,
test_suite='nose.collector',
packages=['statscache'],
include_package_data=True,
zip_safe=False,
classifiers=[
'Environment :: Web Environment',
'Topic :: Software Development :: Libraries :: Python Modules',
'Intended Audience :: Developers',
'Programming Language :: Python',
],
entry_points={
'moksha.consumer': [
"statscache_consumer = statscache.consumer:StatsConsumer",
],
'moksha.producer': [
"statscache_producers = statscache.producer:producers",
],
},
)
| """ Setup file for statscache """
from setuptools import setup
def get_description():
with open('README.rst', 'r') as f:
return ''.join(f.readlines()[2:])
requires = [
'fedmsg',
'moksha.hub>=1.4.6',
'fedmsg_meta_fedora_infrastructure',
'sqlalchemy',
]
tests_require = [
'nose',
'freezegun'
]
setup(
name='statscache',
version='0.0.1',
description='Daemon to build and keep fedmsg statistics',
long_description=get_description(),
author='Ralph Bean',
author_email='rbean@redhat.com',
url="https://github.com/fedora-infra/statscache/",
download_url="https://pypi.python.org/pypi/statscache/",
license='LGPLv2+',
install_requires=requires,
tests_require=tests_require,
test_suite='nose.collector',
packages=['statscache'],
include_package_data=True,
zip_safe=False,
classifiers=[
'Environment :: Web Environment',
'Topic :: Software Development :: Libraries :: Python Modules',
'Intended Audience :: Developers',
'Programming Language :: Python',
],
entry_points={
'moksha.consumer': [
"statscache_consumer = statscache.consumer:StatsConsumer",
],
'moksha.producer': [
"statscache_producers = statscache.producer:producers",
],
},
)
| Add Moksha Hub version requirement | Add Moksha Hub version requirement
New features of statscache require a moksha installation with corresponding
support, or else it won't run at all.
| Python | lgpl-2.1 | yazman/statscache,yazman/statscache,yazman/statscache | ---
+++
@@ -9,6 +9,7 @@
requires = [
'fedmsg',
+ 'moksha.hub>=1.4.6',
'fedmsg_meta_fedora_infrastructure',
'sqlalchemy',
] |
7d54a1c2934935fe2919bf053c9897e0c4f48e6a | setup.py | setup.py | #!/usr/bin/env python2.7
from distutils.core import setup
setup(
name='spreads',
version='0.1.0',
author='Johannes Baiter',
author_email='johannes.baiter@gmail.com',
packages=['spreads', 'spreadsplug'],
scripts=['spread', ],
url='http://github.com/jbaiter/spreads',
license='LICENSE.txt',
description='Tool to facilitate book digitization with the DIY Book '
'Scanner',
long_description=open('README.rst').read(),
install_requires=[
"Pillow >=2.0.0",
"clint >= 0.3.1",
"pyusb >=1.0.0a3",
],
)
| #!/usr/bin/env python2.7
from setuptools import setup, find_packages
setup(
name='spreads',
version='0.1',
author='Johannes Baiter',
author_email='johannes.baiter@gmail.com',
#packages=['spreads', 'spreadsplug'],
packages=find_packages(),
scripts=['spread', ],
url='http://github.com/jbaiter/spreads',
license='MIT',
description='Tool to facilitate book digitization with the DIY Book '
'Scanner',
long_description=open('README.rst').read(),
install_requires=[
"Pillow >=2.0.0",
"clint >= 0.3.1",
"pyusb >=1.0.0a3",
],
)
| Switch from distutils to distribute | Switch from distutils to distribute
| Python | agpl-3.0 | miloh/spreads,adongy/spreads,DIYBookScanner/spreads,gareth8118/spreads,gareth8118/spreads,gareth8118/spreads,miloh/spreads,nafraf/spreads,miloh/spreads,adongy/spreads,DIYBookScanner/spreads,DIYBookScanner/spreads,jbaiter/spreads,adongy/spreads,jbaiter/spreads,nafraf/spreads,jbaiter/spreads,nafraf/spreads | ---
+++
@@ -1,15 +1,16 @@
#!/usr/bin/env python2.7
-from distutils.core import setup
+from setuptools import setup, find_packages
setup(
name='spreads',
- version='0.1.0',
+ version='0.1',
author='Johannes Baiter',
author_email='johannes.baiter@gmail.com',
- packages=['spreads', 'spreadsplug'],
+ #packages=['spreads', 'spreadsplug'],
+ packages=find_packages(),
scripts=['spread', ],
url='http://github.com/jbaiter/spreads',
- license='LICENSE.txt',
+ license='MIT',
description='Tool to facilitate book digitization with the DIY Book '
'Scanner',
long_description=open('README.rst').read(), |
cc9bfdb3185c2fbb6c7d0f6f2cb8b36f8e5024ed | setup.py | setup.py | #!/usr/bin/env python
from setuptools import setup, find_packages
# Match releases to redis-py versions
__version__ = '2.7.2'
# Jenkins will replace __build__ with a unique value.
__build__ = ''
setup(name='mockredis',
version=__version__ + __build__,
description='Mock for redis-py',
url='http://www.github.com/locationlabs/mockredis',
license='Apache2',
packages=find_packages(exclude=['*.tests']),
setup_requires=[
'nose==1.2.1'
],
install_requires=[
'bintrees==1.0.1'
],
tests_require=[
'redis>=2.7.2'
],
test_suite='mockredis.tests',
)
| #!/usr/bin/env python
from setuptools import setup, find_packages
# Match releases to redis-py versions
__version__ = '2.7.2'
# Jenkins will replace __build__ with a unique value.
__build__ = ''
setup(name='mockredis',
version=__version__ + __build__,
description='Mock for redis-py',
url='http://www.github.com/locationlabs/mockredis',
license='Apache2',
packages=find_packages(exclude=['*.tests']),
setup_requires=[
'nose==1.2.1'
],
install_requires=[
],
tests_require=[
'redis>=2.7.2'
],
test_suite='mockredis.tests',
)
| Revert use of bintrees; it's not a great fit. | Revert use of bintrees; it's not a great fit.
| Python | apache-2.0 | yossigo/mockredis,matejkloska/mockredis,path/mockredis,locationlabs/mockredis,optimizely/mockredis | ---
+++
@@ -18,7 +18,6 @@
'nose==1.2.1'
],
install_requires=[
- 'bintrees==1.0.1'
],
tests_require=[
'redis>=2.7.2' |
33eb1ea53c3f999a1134dd180bc4b1d8e38d6a80 | setup.py | setup.py | from setuptools import setup, find_packages
setup(
name="ghostwriter",
version="0.0.1",
packages=find_packages(),
include_package_data=True,
install_requires=[
'flask', "six", "enum34", 'flask_login', 'flask_sqlalchemy'
],
test_suite='ghostwriter.test',
)
| from setuptools import setup, find_packages
setup(
name="ghostwriter",
version="0.0.2",
author="Arthur M",
description="a simple article/blog management tool of which *you* show how to show"
packages=find_packages(),
include_package_data=True,
install_requires=[
'flask', "six", "enum34", 'flask_login', 'flask_sqlalchemy'
],
test_suite='ghostwriter.test',
)
| Add more information to the package | Add more information to the package
| Python | mit | arthurmco/ghostwriter,arthurmco/ghostwriter | ---
+++
@@ -2,7 +2,9 @@
setup(
name="ghostwriter",
- version="0.0.1",
+ version="0.0.2",
+ author="Arthur M",
+ description="a simple article/blog management tool of which *you* show how to show"
packages=find_packages(),
include_package_data=True,
install_requires=[ |
e6d42d647bd919f178066d8278c649858a164e5e | setup.py | setup.py | import ez_setup
ez_setup.use_setuptools()
from setuptools import setup, find_packages
setup(
name = "clamped",
version = "0.1",
packages = find_packages(),
package_data = {
"clamped": ["data/*.txt"],
},
install_requires = ["clamp>=0.3"],
clamp = ["clamped"],
)
| import ez_setup
ez_setup.use_setuptools()
from setuptools import setup, find_packages
setup(
name = "clamped",
version = "0.1",
packages = find_packages(),
install_requires = ["clamp>=0.3"],
clamp = {
"modules": ["clamped"]},
)
| Use new clamp keyword structure | Use new clamp keyword structure
| Python | apache-2.0 | jimbaker/clamped,jimbaker/clamped | ---
+++
@@ -8,9 +8,7 @@
name = "clamped",
version = "0.1",
packages = find_packages(),
- package_data = {
- "clamped": ["data/*.txt"],
- },
install_requires = ["clamp>=0.3"],
- clamp = ["clamped"],
+ clamp = {
+ "modules": ["clamped"]},
) |
19db38abd7390c69192c7ead259b821a19ec97fe | sqlviz.py | sqlviz.py | #! usr/bin/env python3
from docopt import docopt
from matplotlib import pyplot
class Schema:
"""
Wraps the SQL source code for a schema and provides methods to get information about that schema.
"""
def __init__(self, source):
"""
Creates a new instance of Schema for the specified source code string.
"""
self.source = source
def n_tables(self):
"""
Returns the number of tables defined in the schema
"""
pass
def n_keys(self):
"""
Returns the number of keys defined in the schema
"""
pass
def n_datatypes(self):
"""
Returns the number of each data type in the schema.
"""
pass
def lengths(self):
"""
Returns a dictionary mapping each data type in the schema
to a list of the lengths of those data types.
"""
pass
| #! usr/bin/env python3
from docopt import docopt
from matplotlib import pyplot
class Schema:
"""
Wraps the SQL source code for a schema and provides methods to get information about that schema.
"""
def __init__(self, source):
"""
Creates a new instance of Schema for the specified source code string.
"""
self.source = source
def n_tables(self):
"""
Returns the number of tables defined in the schema
"""
pass #TODO: not yet implementend
def n_keys(self):
"""
Returns the number of keys defined in the schema
"""
pass #TODO: not yet implementend
def n_datatypes(self):
"""
Returns the number of each data type in the schema.
"""
pass #TODO: not yet implementend
def lengths(self):
"""
Returns a dictionary mapping each data type in the schema
to a list of the lengths of those data types.
"""
pass #TODO: not yet implementend
| Add comments explaining why methods do nothing | Add comments explaining why methods do nothing | Python | mit | hawkw/sqlviz | ---
+++
@@ -12,29 +12,29 @@
"""
Creates a new instance of Schema for the specified source code string.
"""
- self.source = source
+ self.source = source
def n_tables(self):
"""
Returns the number of tables defined in the schema
"""
- pass
+ pass #TODO: not yet implementend
def n_keys(self):
"""
Returns the number of keys defined in the schema
"""
- pass
+ pass #TODO: not yet implementend
def n_datatypes(self):
"""
Returns the number of each data type in the schema.
"""
- pass
+ pass #TODO: not yet implementend
def lengths(self):
"""
Returns a dictionary mapping each data type in the schema
to a list of the lengths of those data types.
"""
- pass
+ pass #TODO: not yet implementend |
de3161d66ab0a5661d98ace04f5f0ae7c01062bf | smsgateway/utils.py | smsgateway/utils.py | import logging
logger = logging.getLogger(__name__)
def strspn(source, allowed):
newchrs = []
for c in source:
if c in allowed:
newchrs.append(c)
return u''.join(newchrs)
def check_cell_phone_number(number):
cleaned_number = strspn(number, u'+0123456789')
if not u'+' in cleaned_number[:1]:
cleaned_number = u'+%s' % cleaned_number
return cleaned_number
def truncate_sms(text, max_length=160):
if len(text) <= max_length:
return text
else:
logger.error("Trying to send an SMS that is too long: %s", text)
return text[:max_length-3] + '...'
def parse_sms(content):
content = content.upper().strip()
from smsgateway.backends.base import hook
for keyword, subkeywords in hook.iteritems():
if content[:len(keyword)] == unicode(keyword):
remainder = content[len(keyword):].strip()
if '*' in subkeywords:
parts = remainder.split(u' ')
subkeyword = parts[0].strip()
if subkeyword in subkeywords:
return [keyword] + parts
return keyword, remainder
else:
for subkeyword in subkeywords:
if remainder[:len(subkeyword)] == unicode(subkeyword):
subremainder = remainder[len(subkeyword):].strip()
return [keyword, subkeyword] + subremainder.split()
return None
| import logging
logger = logging.getLogger(__name__)
def strspn(source, allowed):
newchrs = []
for c in source:
if c in allowed:
newchrs.append(c)
return u''.join(newchrs)
def check_cell_phone_number(number):
cleaned_number = strspn(number, u'0123456789')
#if not u'+' in cleaned_number[:1]:
# cleaned_number = u'+%s' % cleaned_number
return int(cleaned_number)
def truncate_sms(text, max_length=160):
if len(text) <= max_length:
return text
else:
logger.error("Trying to send an SMS that is too long: %s", text)
return text[:max_length-3] + '...'
def parse_sms(content):
content = content.upper().strip()
from smsgateway.backends.base import hook
for keyword, subkeywords in hook.iteritems():
if content[:len(keyword)] == unicode(keyword):
remainder = content[len(keyword):].strip()
if '*' in subkeywords:
parts = remainder.split(u' ')
subkeyword = parts[0].strip()
if subkeyword in subkeywords:
return [keyword] + parts
return keyword, remainder
else:
for subkeyword in subkeywords:
if remainder[:len(subkeyword)] == unicode(subkeyword):
subremainder = remainder[len(subkeyword):].strip()
return [keyword, subkeyword] + subremainder.split()
return None
| Use international MSISDN format according to SMPP protocol spec: 4.2.6.1.1 | Use international MSISDN format
according to SMPP protocol spec: 4.2.6.1.1
| Python | bsd-3-clause | peterayeni/django-smsgateway,peterayeni/django-smsgateway,mvpoland/django-smsgateway,mvpoland/django-smsgateway,peterayeni/django-smsgateway,mvpoland/django-smsgateway,peterayeni/django-smsgateway | ---
+++
@@ -9,12 +9,10 @@
return u''.join(newchrs)
def check_cell_phone_number(number):
- cleaned_number = strspn(number, u'+0123456789')
-
- if not u'+' in cleaned_number[:1]:
- cleaned_number = u'+%s' % cleaned_number
-
- return cleaned_number
+ cleaned_number = strspn(number, u'0123456789')
+ #if not u'+' in cleaned_number[:1]:
+ # cleaned_number = u'+%s' % cleaned_number
+ return int(cleaned_number)
def truncate_sms(text, max_length=160):
if len(text) <= max_length: |
22888f6731cf7e6ab0a6cb14088075cf7061d310 | sympy/interactive/ipythonprinting.py | sympy/interactive/ipythonprinting.py | """
A print function that pretty prints SymPy objects.
:moduleauthor: Brian Granger
Usage
=====
To use this extension, execute:
%load_ext sympy.interactive.ipythonprinting
Once the extension is loaded, SymPy Basic objects are automatically
pretty-printed in the terminal and rendered in LaTeX in the Qt console and
notebook.
"""
#-----------------------------------------------------------------------------
# Copyright (C) 2008 The IPython Development Team
#
# Distributed under the terms of the BSD License. The full license is in
# the file COPYING, distributed as part of this software.
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
from sympy.interactive.printing import init_printing
#-----------------------------------------------------------------------------
# Definitions of special display functions for use with IPython
#-----------------------------------------------------------------------------
_loaded = False
def load_ipython_extension(ip):
"""Load the extension in IPython."""
import IPython
global _loaded
# Use extension manager to track loaded status if available
# This is currently in IPython 0.14.dev
if hasattr(ip.extension_manager, 'loaded'):
loaded = 'sympy.interactive.ipythonprinting' in ip.extension_manager.loaded
else:
loaded = _loaded
if not loaded:
init_printing(ip=ip)
_loaded = True
| """
A print function that pretty prints SymPy objects.
:moduleauthor: Brian Granger
Usage
=====
To use this extension, execute:
%load_ext sympy.interactive.ipythonprinting
Once the extension is loaded, SymPy Basic objects are automatically
pretty-printed in the terminal and rendered in LaTeX in the Qt console and
notebook.
"""
#-----------------------------------------------------------------------------
# Copyright (C) 2008 The IPython Development Team
#
# Distributed under the terms of the BSD License. The full license is in
# the file COPYING, distributed as part of this software.
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
from sympy.interactive.printing import init_printing
#-----------------------------------------------------------------------------
# Definitions of special display functions for use with IPython
#-----------------------------------------------------------------------------
def load_ipython_extension(ip):
"""Load the extension in IPython."""
init_printing(ip=ip)
| Remove checks that module is loaded | Remove checks that module is loaded
| Python | bsd-3-clause | wanglongqi/sympy,meghana1995/sympy,skirpichev/omg,ga7g08/sympy,jamesblunt/sympy,Shaswat27/sympy,amitjamadagni/sympy,saurabhjn76/sympy,shikil/sympy,ga7g08/sympy,kevalds51/sympy,maniteja123/sympy,sampadsaha5/sympy,postvakje/sympy,debugger22/sympy,emon10005/sympy,farhaanbukhsh/sympy,diofant/diofant,sunny94/temp,souravsingh/sympy,Designist/sympy,liangjiaxing/sympy,dqnykamp/sympy,Vishluck/sympy,yashsharan/sympy,vipulroxx/sympy,skidzo/sympy,jbbskinny/sympy,farhaanbukhsh/sympy,souravsingh/sympy,AunShiLord/sympy,Titan-C/sympy,Curious72/sympy,garvitr/sympy,chaffra/sympy,shipci/sympy,ChristinaZografou/sympy,mcdaniel67/sympy,pandeyadarsh/sympy,sahilshekhawat/sympy,Sumith1896/sympy,aktech/sympy,liangjiaxing/sympy,kumarkrishna/sympy,Davidjohnwilson/sympy,meghana1995/sympy,mcdaniel67/sympy,toolforger/sympy,farhaanbukhsh/sympy,yashsharan/sympy,dqnykamp/sympy,toolforger/sympy,Mitchkoens/sympy,ChristinaZografou/sympy,Curious72/sympy,madan96/sympy,beni55/sympy,aktech/sympy,jamesblunt/sympy,sahmed95/sympy,ahhda/sympy,oliverlee/sympy,rahuldan/sympy,atsao72/sympy,kaushik94/sympy,Vishluck/sympy,Vishluck/sympy,atreyv/sympy,kaushik94/sympy,Shaswat27/sympy,abloomston/sympy,postvakje/sympy,shipci/sympy,dqnykamp/sympy,madan96/sympy,iamutkarshtiwari/sympy,wanglongqi/sympy,drufat/sympy,asm666/sympy,jamesblunt/sympy,Sumith1896/sympy,pandeyadarsh/sympy,mafiya69/sympy,Davidjohnwilson/sympy,Davidjohnwilson/sympy,skidzo/sympy,VaibhavAgarwalVA/sympy,Arafatk/sympy,grevutiu-gabriel/sympy,pbrady/sympy,MridulS/sympy,ahhda/sympy,jbbskinny/sympy,yukoba/sympy,aktech/sympy,Designist/sympy,kmacinnis/sympy,abhiii5459/sympy,cccfran/sympy,mafiya69/sympy,kmacinnis/sympy,AunShiLord/sympy,sahilshekhawat/sympy,pbrady/sympy,MridulS/sympy,vipulroxx/sympy,abhiii5459/sympy,Arafatk/sympy,abloomston/sympy,lindsayad/sympy,saurabhjn76/sympy,MridulS/sympy,liangjiaxing/sympy,pbrady/sympy,Curious72/sympy,mcdaniel67/sympy,lidavidm/sympy,hrashk/sympy,atreyv/sympy,VaibhavAgarwalVA/sympy,kumarkrishna/sympy,hrashk/sympy,jaimahajan1997/sympy,abloomston/sympy,hargup/sympy,beni55/sympy,bukzor/sympy,iamutkarshtiwari/sympy,Titan-C/sympy,MechCoder/sympy,cswiercz/sympy,mafiya69/sympy,Sumith1896/sympy,MechCoder/sympy,toolforger/sympy,Shaswat27/sympy,kaushik94/sympy,jbbskinny/sympy,Titan-C/sympy,jaimahajan1997/sympy,atsao72/sympy,lidavidm/sympy,postvakje/sympy,lidavidm/sympy,Mitchkoens/sympy,hargup/sympy,grevutiu-gabriel/sympy,MechCoder/sympy,abhiii5459/sympy,shikil/sympy,AunShiLord/sympy,kevalds51/sympy,meghana1995/sympy,sahmed95/sympy,sahilshekhawat/sympy,Designist/sympy,atsao72/sympy,asm666/sympy,sampadsaha5/sympy,garvitr/sympy,sampadsaha5/sympy,bukzor/sympy,oliverlee/sympy,AkademieOlympia/sympy,ahhda/sympy,jerli/sympy,sunny94/temp,grevutiu-gabriel/sympy,sahmed95/sympy,Gadal/sympy,yukoba/sympy,chaffra/sympy,wyom/sympy,drufat/sympy,oliverlee/sympy,iamutkarshtiwari/sympy,kaichogami/sympy,cswiercz/sympy,asm666/sympy,cccfran/sympy,kevalds51/sympy,hargup/sympy,kumarkrishna/sympy,amitjamadagni/sympy,lindsayad/sympy,debugger22/sympy,chaffra/sympy,cccfran/sympy,wyom/sympy,AkademieOlympia/sympy,Gadal/sympy,AkademieOlympia/sympy,ga7g08/sympy,yashsharan/sympy,kaichogami/sympy,atreyv/sympy,jerli/sympy,kaichogami/sympy,moble/sympy,skidzo/sympy,souravsingh/sympy,Arafatk/sympy,emon10005/sympy,lindsayad/sympy,yukoba/sympy,sunny94/temp,Gadal/sympy,moble/sympy,maniteja123/sympy,jerli/sympy,debugger22/sympy,maniteja123/sympy,wyom/sympy,kmacinnis/sympy,emon10005/sympy,jaimahajan1997/sympy,madan96/sympy,vipulroxx/sympy,saurabhjn76/sympy,ChristinaZografou/sympy,wanglongqi/sympy,shikil/sympy,beni55/sympy,rahuldan/sympy,bukzor/sympy,Mitchkoens/sympy,VaibhavAgarwalVA/sympy,garvitr/sympy,moble/sympy,hrashk/sympy,drufat/sympy,cswiercz/sympy,rahuldan/sympy,pandeyadarsh/sympy,shipci/sympy | ---
+++
@@ -32,21 +32,6 @@
# Definitions of special display functions for use with IPython
#-----------------------------------------------------------------------------
-_loaded = False
-
-
def load_ipython_extension(ip):
"""Load the extension in IPython."""
- import IPython
-
- global _loaded
- # Use extension manager to track loaded status if available
- # This is currently in IPython 0.14.dev
- if hasattr(ip.extension_manager, 'loaded'):
- loaded = 'sympy.interactive.ipythonprinting' in ip.extension_manager.loaded
- else:
- loaded = _loaded
-
- if not loaded:
- init_printing(ip=ip)
- _loaded = True
+ init_printing(ip=ip) |
10a5f15b1a7703179edac113dcdadd6042fb29f8 | txircd/modules/cmode_s.py | txircd/modules/cmode_s.py | from twisted.words.protocols import irc
from txircd.modbase import Mode
class SecretMode(Mode):
def checkPermission(self, user, cmd, data):
if cmd != "NAMES":
return data
remove = []
for chan in data["targetchan"]:
if "s" in chan.mode and chan.name not in user.channels:
user.sendMessage(irc.ERR_NOSUCHNICK, chan, ":No such nick/channel")
remove.append(chan)
for chan in remove:
data["targetchan"].remove(chan)
return data
def listOutput(self, command, data):
if command != "LIST":
return data
cdata = data["cdata"]
if "s" in cdata["channel"].mode and cdata["channel"].name not in data["user"].channels:
data["cdata"].clear()
# other +s stuff is hiding in other modules.
class Spawner(object):
def __init__(self, ircd):
self.ircd = ircd
self.mode_s = None
def spawn(self):
self.mode_s = SecretMode()
return {
"modes": {
"cns": self.mode_s
},
"actions": {
"commandextra": [self.mode_s.listOutput]
},
"common": True
}
def cleanup(self):
self.ircd.removeMode("cns")
self.ircd.actions["commandextra"].remove(self.mode_s.listOutput) | from twisted.words.protocols import irc
from txircd.modbase import Mode
class SecretMode(Mode):
def checkPermission(self, user, cmd, data):
if cmd != "NAMES":
return data
remove = []
for chan in data["targetchan"]:
if "s" in chan.mode and chan.name not in user.channels:
user.sendMessage(irc.ERR_NOSUCHNICK, chan, ":No such nick/channel")
remove.append(chan)
for chan in remove:
data["targetchan"].remove(chan)
return data
def listOutput(self, command, data):
if command != "LIST":
return data
if "cdata" not in data:
return data
cdata = data["cdata"]
if "s" in cdata["channel"].mode and cdata["channel"].name not in data["user"].channels:
data["cdata"].clear()
# other +s stuff is hiding in other modules.
class Spawner(object):
def __init__(self, ircd):
self.ircd = ircd
self.mode_s = None
def spawn(self):
self.mode_s = SecretMode()
return {
"modes": {
"cns": self.mode_s
},
"actions": {
"commandextra": [self.mode_s.listOutput]
},
"common": True
}
def cleanup(self):
self.ircd.removeMode("cns")
self.ircd.actions["commandextra"].remove(self.mode_s.listOutput) | Fix LIST command crashing (again) on certain input | Fix LIST command crashing (again) on certain input
| Python | bsd-3-clause | DesertBus/txircd,Heufneutje/txircd,ElementalAlchemist/txircd | ---
+++
@@ -16,6 +16,8 @@
def listOutput(self, command, data):
if command != "LIST":
+ return data
+ if "cdata" not in data:
return data
cdata = data["cdata"]
if "s" in cdata["channel"].mode and cdata["channel"].name not in data["user"].channels: |
857350154e11f09a1b4aeecd411ab41df2acf378 | tests/integration/test_crossmodel.py | tests/integration/test_crossmodel.py | import pytest
from .. import base
@base.bootstrapped
@pytest.mark.asyncio
async def test_offer(event_loop):
async with base.CleanModel() as model:
application = await model.deploy(
'cs:~jameinel/ubuntu-lite-7',
application_name='ubuntu',
series='bionic',
channel='stable',
)
assert 'ubuntu' in model.applications
await model.block_until(
lambda: all(unit.workload_status == 'active'
for unit in application.units))
await model.create_offer("ubuntu:ubuntu")
offers = await model.list_offers()
await model.block_until(
lambda: all(offer.application_name == 'ubuntu'
for offer in offers))
await model.remove_offer("ubuntu", force=True)
| import pytest
from .. import base
@base.bootstrapped
@pytest.mark.asyncio
async def test_offer(event_loop):
async with base.CleanModel() as model:
application = await model.deploy(
'cs:~jameinel/ubuntu-lite-7',
application_name='ubuntu',
series='bionic',
channel='stable',
)
assert 'ubuntu' in model.applications
await model.block_until(
lambda: all(unit.workload_status == 'active'
for unit in application.units))
await model.create_offer("ubuntu:ubuntu")
offers = await model.list_offers()
await model.block_until(
lambda: all(offer.application_name == 'ubuntu'
for offer in offers))
await model.remove_offer("admin/{}.ubuntu".format(model.info.name), force=True)
| Remove offer with model name | Remove offer with model name
| Python | apache-2.0 | juju/python-libjuju,juju/python-libjuju | ---
+++
@@ -23,4 +23,4 @@
await model.block_until(
lambda: all(offer.application_name == 'ubuntu'
for offer in offers))
- await model.remove_offer("ubuntu", force=True)
+ await model.remove_offer("admin/{}.ubuntu".format(model.info.name), force=True) |
6715e42b5a3e0b8b9caea853a073d1aac0495885 | phplint.py | phplint.py | import subprocess
import os
class PHPLint:
def __init__(self):
self.silent = False
def setSilentLint(self, isSilent):
self.silent = isSilent
def lint(self, path):
if os.path.isfile(path):
self.lintFile(path)
elif os.path.isdir(path):
self.lintDir(path)
def lintFile(self, path):
if self.isPHPFile(path):
process = subprocess.Popen(['php', '-l', path], shell=False, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
(processStdOut, processStdErr) = process.communicate();
if not self.isSilentLint():
print processStdOut.rstrip()
if process.returncode > 0:
print processStdErr.rstrip()
raise SystemExit(1)
def lintDir(self, path):
for rootDir, dirName, files in os.walk(path):
for f in files:
self.lintFile(os.path.join(rootDir, f))
def isPHPFile(self, filename):
return filename.endswith('.php')
def isSilentLint(self):
return self.silent
| import subprocess
import os
class PHPLint:
def __init__(self):
self.silent = False
def set_silent_lint(self, is_silent):
self.silent = is_silent
def lint(self, path):
if os.path.isfile(path):
self.lint_file(path)
elif os.path.isdir(path):
self.lint_dir(path)
def lint_file(self, path):
if self.is_php_file(path):
process = subprocess.Popen(['php', '-l', path], shell=False,
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
(process_stdout, process_stderr) = process.communicate();
if not self.is_silent_lint():
print process_stdout.rstrip()
if process.returncode > 0:
print process_stderr.rstrip()
raise SystemExit(1)
def lint_dir(self, path):
for root_dir, dir_name, files in os.walk(path):
for f in files:
self.lint_file(os.path.join(root_dir, f))
def is_php_file(self, filename):
return filename.endswith('.php')
def is_silent_lint(self):
return self.silent
| Change code to be compliant with PEP 8 | Change code to be compliant with PEP 8
This change camel cased functions for underscored function names and
properties.
| Python | mit | nelsonsar/phplinter | ---
+++
@@ -5,33 +5,34 @@
def __init__(self):
self.silent = False
- def setSilentLint(self, isSilent):
- self.silent = isSilent
+ def set_silent_lint(self, is_silent):
+ self.silent = is_silent
def lint(self, path):
if os.path.isfile(path):
- self.lintFile(path)
+ self.lint_file(path)
elif os.path.isdir(path):
- self.lintDir(path)
+ self.lint_dir(path)
- def lintFile(self, path):
- if self.isPHPFile(path):
- process = subprocess.Popen(['php', '-l', path], shell=False, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
- (processStdOut, processStdErr) = process.communicate();
- if not self.isSilentLint():
- print processStdOut.rstrip()
+ def lint_file(self, path):
+ if self.is_php_file(path):
+ process = subprocess.Popen(['php', '-l', path], shell=False,
+ stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ (process_stdout, process_stderr) = process.communicate();
+ if not self.is_silent_lint():
+ print process_stdout.rstrip()
if process.returncode > 0:
- print processStdErr.rstrip()
+ print process_stderr.rstrip()
raise SystemExit(1)
- def lintDir(self, path):
- for rootDir, dirName, files in os.walk(path):
+ def lint_dir(self, path):
+ for root_dir, dir_name, files in os.walk(path):
for f in files:
- self.lintFile(os.path.join(rootDir, f))
+ self.lint_file(os.path.join(root_dir, f))
- def isPHPFile(self, filename):
+ def is_php_file(self, filename):
return filename.endswith('.php')
- def isSilentLint(self):
+ def is_silent_lint(self):
return self.silent
|
e2bc8b6010e979a9c00851d21ee783c8e8e27a55 | adaptive/typecheck.py | adaptive/typecheck.py | # Tools for type checking
def assertListOf(lst, typ):
assert isinstance(lst, list), lst
for idx, value in enumerate(lst):
#assert isinstance(value, typ), (idx, value)
assert value is None or isinstance(value, typ), (idx, value)
return True
| # Tools for type checking
def assertListOf(lst, typ, orNone=True):
assert isinstance(lst, list), lst
if orNone:
for idx, value in enumerate(lst):
assert value is None or isinstance(value, typ), (idx, value)
else:
for idx, value in enumerate(lst):
assert isinstance(value, typ), (idx, value)
return True
def emitTypeCheck(out, name, typ, orNone=True):
d = dict(name=name, typ=typ.py_name)
if typ.name == "void":
out("assert %(name)s is None, %(name)s" % d)
elif typ.parameters:
assert len(typ.parameters) == 1, "Unimplemented: %s" % typ
assert typ.name == "List", "Unimplemented: %s" % typ
d["param"] = typ.parameters[0].py_name
if orNone:
out("assert %(name)s is None or _assertListOf(%(name)s, %(param)s), %(name)s" % d)
else:
out("_assertListOf(%(name)s, %(param)s), %(name)s" % d)
else:
if orNone:
out("assert %(name)s is None or isinstance(%(name)s, %(typ)s), %(name)s" % d)
else:
out("assert isinstance(%(name)s, %(typ)s), %(name)s" % d)
| Add type check helper; make typ or None optional for lists | Add type check helper; make typ or None optional for lists | Python | apache-2.0 | datawire/adaptive | ---
+++
@@ -1,8 +1,29 @@
# Tools for type checking
-def assertListOf(lst, typ):
+def assertListOf(lst, typ, orNone=True):
assert isinstance(lst, list), lst
- for idx, value in enumerate(lst):
- #assert isinstance(value, typ), (idx, value)
- assert value is None or isinstance(value, typ), (idx, value)
+ if orNone:
+ for idx, value in enumerate(lst):
+ assert value is None or isinstance(value, typ), (idx, value)
+ else:
+ for idx, value in enumerate(lst):
+ assert isinstance(value, typ), (idx, value)
return True
+
+def emitTypeCheck(out, name, typ, orNone=True):
+ d = dict(name=name, typ=typ.py_name)
+ if typ.name == "void":
+ out("assert %(name)s is None, %(name)s" % d)
+ elif typ.parameters:
+ assert len(typ.parameters) == 1, "Unimplemented: %s" % typ
+ assert typ.name == "List", "Unimplemented: %s" % typ
+ d["param"] = typ.parameters[0].py_name
+ if orNone:
+ out("assert %(name)s is None or _assertListOf(%(name)s, %(param)s), %(name)s" % d)
+ else:
+ out("_assertListOf(%(name)s, %(param)s), %(name)s" % d)
+ else:
+ if orNone:
+ out("assert %(name)s is None or isinstance(%(name)s, %(typ)s), %(name)s" % d)
+ else:
+ out("assert isinstance(%(name)s, %(typ)s), %(name)s" % d) |
ae78e44461ec710c65479b094dcff257944e1f83 | pyof/v0x01/controller2switch/stats_request.py | pyof/v0x01/controller2switch/stats_request.py | """Query the datapath about its current state."""
# System imports
# Third-party imports
from pyof.foundation.base import GenericMessage
from pyof.foundation.basic_types import ConstantTypeList, UBInt16
# Local imports
from pyof.v0x01.common.header import Header, Type
from pyof.v0x01.controller2switch.common import StatsTypes
__all__ = ('StatsRequest',)
class StatsRequest(GenericMessage):
"""Response to the config request."""
#: OpenFlow :class:`.Header`
header = Header(message_type=Type.OFPT_STATS_REQUEST)
body_type = UBInt16(enum_ref=StatsTypes)
flags = UBInt16()
body = ConstantTypeList()
def __init__(self, xid=None, body_type=None, flags=None, body=None):
"""The constructor just assings parameters to object attributes.
Args:
body_type (StatsTypes): One of the OFPST_* constants.
flags (int): OFPSF_REQ_* flags (none yet defined).
body (ConstantTypeList): Body of the request.
"""
super().__init__(xid)
self.body_type = body_type
self.flags = flags
self.body = [] if body is None else body
| """Query the datapath about its current state."""
# System imports
# Third-party imports
from pyof.foundation.base import GenericMessage
from pyof.foundation.basic_types import BinaryData, UBInt16
# Local imports
from pyof.v0x01.common.header import Header, Type
from pyof.v0x01.controller2switch.common import StatsTypes
__all__ = ('StatsRequest',)
class StatsRequest(GenericMessage):
"""Response to the config request."""
#: OpenFlow :class:`.Header`
header = Header(message_type=Type.OFPT_STATS_REQUEST)
body_type = UBInt16(enum_ref=StatsTypes)
flags = UBInt16()
body = BinaryData()
def __init__(self, xid=None, body_type=None, flags=0, body=b''):
"""The constructor just assings parameters to object attributes.
Args:
body_type (StatsTypes): One of the OFPST_* constants.
flags (int): OFPSF_REQ_* flags (none yet defined).
body (ConstantTypeList): Body of the request.
"""
super().__init__(xid)
self.body_type = body_type
self.flags = flags
self.body = body
| Fix StatsRequest body type; add default values | Fix StatsRequest body type; add default values
| Python | mit | cemsbr/python-openflow,kytos/python-openflow | ---
+++
@@ -5,7 +5,7 @@
# Third-party imports
from pyof.foundation.base import GenericMessage
-from pyof.foundation.basic_types import ConstantTypeList, UBInt16
+from pyof.foundation.basic_types import BinaryData, UBInt16
# Local imports
from pyof.v0x01.common.header import Header, Type
from pyof.v0x01.controller2switch.common import StatsTypes
@@ -20,9 +20,9 @@
header = Header(message_type=Type.OFPT_STATS_REQUEST)
body_type = UBInt16(enum_ref=StatsTypes)
flags = UBInt16()
- body = ConstantTypeList()
+ body = BinaryData()
- def __init__(self, xid=None, body_type=None, flags=None, body=None):
+ def __init__(self, xid=None, body_type=None, flags=0, body=b''):
"""The constructor just assings parameters to object attributes.
Args:
@@ -33,4 +33,4 @@
super().__init__(xid)
self.body_type = body_type
self.flags = flags
- self.body = [] if body is None else body
+ self.body = body |
490f2b1f31fb1ec99bdd09db0e3806fc070ea32a | daemon/__init__.py | daemon/__init__.py | # -*- coding: utf-8 -*-
# Copyright © 2009 Ben Finney <ben+python@benfinney.id.au>
# Copyright © 2006 Robert Niederreiter
#
# This is free software: you may copy, modify, and/or distribute this work
# under the terms of the Python Software Foundation License, version 2 or
# later as published by the Python Software Foundation.
# No warranty expressed or implied. See the file LICENSE.PSF-2 for details.
""" Library to implement a well-behaved Unix daemon process
This library implements PEP [no number yet], Standard daemon
process library.
A well-behaved Unix daemon process is tricky to get right, but the
required steps are much the same for every daemon program. An
instance of the `DaemonContext` holds the behaviour and configured
process environment for the program; use the instance as a context
manager to enter a daemon state.
Simple example of usage::
import daemon
from spam import do_main_program
with daemon.DaemonContext() as daemon_context:
do_main_program()
"""
from daemon import DaemonContext
version = "1.4.4"
| # -*- coding: utf-8 -*-
# Copyright © 2009 Ben Finney <ben+python@benfinney.id.au>
# Copyright © 2006 Robert Niederreiter
#
# This is free software: you may copy, modify, and/or distribute this work
# under the terms of the Python Software Foundation License, version 2 or
# later as published by the Python Software Foundation.
# No warranty expressed or implied. See the file LICENSE.PSF-2 for details.
""" Library to implement a well-behaved Unix daemon process
This library implements PEP 3143: Standard daemon process library.
A well-behaved Unix daemon process is tricky to get right, but the
required steps are much the same for every daemon program. A
`DaemonContext` instance holds the behaviour and configured
process environment for the program; use the instance as a context
manager to enter a daemon state.
Simple example of usage::
import daemon
from spam import do_main_program
with daemon.DaemonContext():
do_main_program()
"""
from daemon import DaemonContext
version = "1.4.4"
| Update package docstring for PEP number. | Update package docstring for PEP number. | Python | apache-2.0 | wting/python-daemon,eaufavor/python-daemon | ---
+++
@@ -10,12 +10,11 @@
""" Library to implement a well-behaved Unix daemon process
- This library implements PEP [no number yet], Standard daemon
- process library.
+ This library implements PEP 3143: Standard daemon process library.
A well-behaved Unix daemon process is tricky to get right, but the
- required steps are much the same for every daemon program. An
- instance of the `DaemonContext` holds the behaviour and configured
+ required steps are much the same for every daemon program. A
+ `DaemonContext` instance holds the behaviour and configured
process environment for the program; use the instance as a context
manager to enter a daemon state.
@@ -25,7 +24,7 @@
from spam import do_main_program
- with daemon.DaemonContext() as daemon_context:
+ with daemon.DaemonContext():
do_main_program()
""" |
1c8674029a1f1fa4a8f9545f540402ba418bd693 | docs/generate_documentation.py | docs/generate_documentation.py | # -*- coding: utf-8 -*-
#!/usr/bin/python
import os
from subprocess import Popen, PIPE
import shutil
docs_folder_path = os.path.join(os.path.dirname(__file__))
p1 = Popen(u'sphinx-build -M html {src} {dst}'.format(
src=docs_folder_path, dst=os.path.join(docs_folder_path, u'_build')
), shell=True, stdin=PIPE, stdout=PIPE)
result = p1.communicate()[0]
if not result or u'build succeeded' not in result:
raise RuntimeError(u'sphinx-build failed')
final_folder_path = os.path.join(docs_folder_path, u'documentation')
shutil.rmtree(final_folder_path)
shutil.copytree(os.path.join(docs_folder_path, u'_build', u'html') , final_folder_path)
| # -*- coding: utf-8 -*-
#!/usr/bin/python3
import os
from subprocess import Popen, PIPE
import shutil
docs_folder_path = os.path.abspath(os.path.dirname(__file__))
p1 = Popen('python -m sphinx -v -b html {src} {dst}'.format(
src=docs_folder_path, dst=os.path.join(docs_folder_path, '_build')
), shell=True, stdin=PIPE, stdout=PIPE)
result = p1.communicate()[0].decode('utf-8')
if not result or 'build succeeded' not in result:
raise RuntimeError('sphinx-build failed')
final_folder_path = os.path.join(docs_folder_path, 'documentation')
if os.path.isdir(final_folder_path):
shutil.rmtree(final_folder_path)
shutil.copytree(os.path.join(docs_folder_path, '_build') , final_folder_path)
| Fix documentation generating script to work with Python 3 | Fix documentation generating script to work with Python 3
| Python | agpl-3.0 | nabla-c0d3/sslyze | ---
+++
@@ -1,19 +1,20 @@
# -*- coding: utf-8 -*-
-#!/usr/bin/python
+#!/usr/bin/python3
import os
from subprocess import Popen, PIPE
import shutil
-docs_folder_path = os.path.join(os.path.dirname(__file__))
-p1 = Popen(u'sphinx-build -M html {src} {dst}'.format(
- src=docs_folder_path, dst=os.path.join(docs_folder_path, u'_build')
+docs_folder_path = os.path.abspath(os.path.dirname(__file__))
+p1 = Popen('python -m sphinx -v -b html {src} {dst}'.format(
+ src=docs_folder_path, dst=os.path.join(docs_folder_path, '_build')
), shell=True, stdin=PIPE, stdout=PIPE)
-result = p1.communicate()[0]
-if not result or u'build succeeded' not in result:
- raise RuntimeError(u'sphinx-build failed')
+result = p1.communicate()[0].decode('utf-8')
+if not result or 'build succeeded' not in result:
+ raise RuntimeError('sphinx-build failed')
-final_folder_path = os.path.join(docs_folder_path, u'documentation')
-shutil.rmtree(final_folder_path)
-shutil.copytree(os.path.join(docs_folder_path, u'_build', u'html') , final_folder_path)
+final_folder_path = os.path.join(docs_folder_path, 'documentation')
+if os.path.isdir(final_folder_path):
+ shutil.rmtree(final_folder_path)
+shutil.copytree(os.path.join(docs_folder_path, '_build') , final_folder_path) |
ceee44182b24ecdc0563a9e9a6841993d1978d0c | setup.py | setup.py | from distutils.core import setup
setup(
name='aJohnShots',
version="1.0.0",
description='Python module/library for saving Security Hash Algorithms into JSON format.',
author='funilrys',
author_email='contact@funilrys.com',
license='GPL-3.0 https://opensource.org/licenses/GPL-3.0',
url='https://github.com/funilrys/A-John-Shots',
platforms=['any'],
packages=['a_john_shots'],
keywords=['Python', 'JSON', 'SHA 1',
'SHA-512', 'SHA-224', 'SHA-384', 'SHA'],
classifiers=[
'Environment :: Console',
'Topic :: Software Development',
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'License :: OSI Approved :: GNU General Public License v3 or later (GPLv3+)'
],
)
'''
test_suite='testsuite',
entry_points="""
[console_scripts]
cmd = package:main
""",
'''
| from distutils.core import setup
setup(
name='a_john_shots',
version="1.0.0",
description='Python module/library for saving Security Hash Algorithms into JSON format.',
long_description=open('README').read(),
author='funilrys',
author_email='contact@funilrys.com',
license='GPL-3.0 https://opensource.org/licenses/GPL-3.0',
url='https://github.com/funilrys/A-John-Shots',
platforms=['any'],
packages=['a_john_shots'],
keywords=['Python', 'JSON', 'SHA-1',
'SHA-512', 'SHA-224', 'SHA-384', 'SHA', 'MD5'],
classifiers=[
'Environment :: Console',
'Topic :: Software Development',
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'License :: OSI Approved :: GNU General Public License v3 or later (GPLv3+)'
],
)
'''
test_suite='testsuite',
entry_points="""
[console_scripts]
cmd = package:main
""",
'''
| Rename + add long_description + update keywords | Rename + add long_description + update keywords
| Python | mit | funilrys/A-John-Shots | ---
+++
@@ -1,17 +1,18 @@
from distutils.core import setup
setup(
- name='aJohnShots',
+ name='a_john_shots',
version="1.0.0",
description='Python module/library for saving Security Hash Algorithms into JSON format.',
+ long_description=open('README').read(),
author='funilrys',
author_email='contact@funilrys.com',
license='GPL-3.0 https://opensource.org/licenses/GPL-3.0',
url='https://github.com/funilrys/A-John-Shots',
platforms=['any'],
packages=['a_john_shots'],
- keywords=['Python', 'JSON', 'SHA 1',
- 'SHA-512', 'SHA-224', 'SHA-384', 'SHA'],
+ keywords=['Python', 'JSON', 'SHA-1',
+ 'SHA-512', 'SHA-224', 'SHA-384', 'SHA', 'MD5'],
classifiers=[
'Environment :: Console',
'Topic :: Software Development', |
b2b691c1224e04d873c2673fb2d2734a92e88600 | setup.py | setup.py | # -*- coding: utf-8 -*-
from setuptools import setup, find_packages
setup(
name = "OpenFisca-Country-Template",
version = "3.7.0",
author = "OpenFisca Team",
author_email = "contact@openfisca.org",
classifiers=[
"Development Status :: 5 - Production/Stable",
"License :: OSI Approved :: GNU Affero General Public License v3",
"Operating System :: POSIX",
"Programming Language :: Python",
"Topic :: Scientific/Engineering :: Information Analysis",
],
description = "OpenFisca tax and benefit system for Country-Template",
keywords = "benefit microsimulation social tax",
license ="http://www.fsf.org/licensing/licenses/agpl-3.0.html",
url = "https://github.com/openfisca/country-template",
include_package_data = True, # Will read MANIFEST.in
data_files = [
("share/openfisca/openfisca-country-template", ["CHANGELOG.md", "LICENSE", "README.md"]),
],
install_requires = [
"OpenFisca-Core[web-api] >= 26.0, < 27.0",
],
extras_require = {
"dev": [
"autopep8 == 1.4.0",
"flake8 >= 3.5.0, < 3.6.0",
"flake8-print",
"pycodestyle >= 2.3.0, < 2.4.0", # To avoid incompatibility with flake
]
},
packages=find_packages(),
)
| # -*- coding: utf-8 -*-
from setuptools import setup, find_packages
setup(
name = "OpenFisca-Country-Template",
version = "3.7.0",
author = "OpenFisca Team",
author_email = "contact@openfisca.org",
classifiers=[
"Development Status :: 5 - Production/Stable",
"License :: OSI Approved :: GNU Affero General Public License v3",
"Operating System :: POSIX",
"Programming Language :: Python",
"Topic :: Scientific/Engineering :: Information Analysis",
],
description = "OpenFisca tax and benefit system for Country-Template",
keywords = "benefit microsimulation social tax",
license ="http://www.fsf.org/licensing/licenses/agpl-3.0.html",
url = "https://github.com/openfisca/country-template",
include_package_data = True, # Will read MANIFEST.in
data_files = [
("share/openfisca/openfisca-country-template", ["CHANGELOG.md", "LICENSE", "README.md"]),
],
install_requires = [
"OpenFisca-Core[web-api] >= 27.0, < 28.0",
],
extras_require = {
"dev": [
"autopep8 == 1.4.0",
"flake8 >= 3.5.0, < 3.6.0",
"flake8-print",
"pycodestyle >= 2.3.0, < 2.4.0", # To avoid incompatibility with flake
]
},
packages=find_packages(),
)
| Update Core dependency to v27 | Update Core dependency to v27
| Python | agpl-3.0 | openfisca/country-template,openfisca/country-template | ---
+++
@@ -23,7 +23,7 @@
("share/openfisca/openfisca-country-template", ["CHANGELOG.md", "LICENSE", "README.md"]),
],
install_requires = [
- "OpenFisca-Core[web-api] >= 26.0, < 27.0",
+ "OpenFisca-Core[web-api] >= 27.0, < 28.0",
],
extras_require = {
"dev": [ |
addc581df4fd58522329ca6febe6bd82e77938a5 | setup.py | setup.py | from setuptools import setup
import sys
# The argparse module was introduced in python 2.7 or python 3.2
REQUIRES = ["argparse"] if sys.version[:3] in ('2.6', '3.0', '3.1') else []
REQUIRES = REQUIRES + ["wand>=0.4.0"]
setup(
varsion='0.0.1',
name="anki-slides-import",
author="Utkarsh Upadhyay",
author_email="musically.ut@gmail.com",
description = "Convert text notes and slides into an Anki deck.",
license="MIT",
keywords="anki slides deck import",
install_requires=REQUIRES,
url="https://github.com/musically-ut/anki-slides-import",
packages=["slidesimport"],
entry_points={ "console_script": [ "slides2anki = slidesimport.run" ]},
classifiers = [
"License :: OSI Approved :: MIT License",
"Intended Audience :: Science/Research",
"Development Status :: 3 - Alpha",
"Operating System :: OS Independent",
"Topic :: Utilities",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python :: 2.7",
"Natural Language :: English"
],
)
| from setuptools import setup
import sys
# The argparse module was introduced in python 2.7 or python 3.2
REQUIRES = ["argparse"] if sys.version[:3] in ('2.6', '3.0', '3.1') else []
REQUIRES = REQUIRES + ["wand>=0.4.0"]
setup(
version='0.0.1',
name="anki-slides-import",
author="Utkarsh Upadhyay",
author_email="musically.ut@gmail.com",
description = "Convert text notes and slides into an Anki deck.",
license="MIT",
keywords="anki slides deck import",
install_requires=REQUIRES,
url="https://github.com/musically-ut/anki-slides-import",
packages=["slidesimport"],
entry_points={ "console_scripts": [ "slides2anki = slidesimport.slidesimport:run" ]},
classifiers = [
"License :: OSI Approved :: MIT License",
"Intended Audience :: Science/Research",
"Development Status :: 3 - Alpha",
"Operating System :: OS Independent",
"Topic :: Utilities",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python :: 2.7",
"Natural Language :: English"
],
)
| Fix format of console_scripts and spelling of version | Fix format of console_scripts and spelling of version
| Python | mit | musically-ut/anki-slides-import | ---
+++
@@ -6,7 +6,7 @@
REQUIRES = REQUIRES + ["wand>=0.4.0"]
setup(
- varsion='0.0.1',
+ version='0.0.1',
name="anki-slides-import",
author="Utkarsh Upadhyay",
author_email="musically.ut@gmail.com",
@@ -16,7 +16,7 @@
install_requires=REQUIRES,
url="https://github.com/musically-ut/anki-slides-import",
packages=["slidesimport"],
- entry_points={ "console_script": [ "slides2anki = slidesimport.run" ]},
+ entry_points={ "console_scripts": [ "slides2anki = slidesimport.slidesimport:run" ]},
classifiers = [
"License :: OSI Approved :: MIT License",
"Intended Audience :: Science/Research", |
588a4ebcc5cf3a01b6769f1ae1a4df50d50f9010 | setup.py | setup.py | #!/usr/bin/env python
import os
import sys
import crabpy
from setuptools import setup, find_packages
packages = [
'crabpy',
]
requires = [
'suds-jurko>=0.6.0',
'dogpile.cache'
]
setup(
name='crabpy',
version='0.3.1',
description='Interact with AGIV webservices.',
long_description=open('README.rst').read() + '\n\n' +
open('CHANGES.rst').read(),
author='Onroerend Erfgoed',
author_email='ict@onroerenderfgoed.be',
url='http://github.com/onroerenderfgoed/crabpy',
packages=find_packages(),
package_data={'': ['LICENSE']},
package_dir={'crabpy': 'crabpy'},
include_package_data=True,
install_requires=requires,
license='MIT',
zip_safe=False,
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Natural Language :: English',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
],
test_suite='nose.collector'
)
| #!/usr/bin/env python
import os
import sys
import crabpy
from setuptools import setup, find_packages
packages = [
'crabpy',
]
requires = [
'suds-jurko>=0.6.0',
'dogpile.cache',
'six'
]
setup(
name='crabpy',
version='0.3.1',
description='Interact with AGIV webservices.',
long_description=open('README.rst').read() + '\n\n' +
open('CHANGES.rst').read(),
author='Onroerend Erfgoed',
author_email='ict@onroerenderfgoed.be',
url='http://github.com/onroerenderfgoed/crabpy',
packages=find_packages(),
package_data={'': ['LICENSE']},
package_dir={'crabpy': 'crabpy'},
include_package_data=True,
install_requires=requires,
license='MIT',
zip_safe=False,
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Natural Language :: English',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
],
test_suite='nose.collector'
)
| Add six for py2/3 compatibility. | Add six for py2/3 compatibility.
| Python | mit | OnroerendErfgoed/crabpy | ---
+++
@@ -13,7 +13,8 @@
requires = [
'suds-jurko>=0.6.0',
- 'dogpile.cache'
+ 'dogpile.cache',
+ 'six'
]
setup( |
a6dd042ecc79cef66ab04704c88e30097e981aba | setup.py | setup.py | #!/usr/bin/env python
# coding: utf-8
from setuptools import setup, find_packages
setup(
name="bentoo",
description="Benchmarking tools",
version="0.14",
packages=find_packages(),
scripts=["scripts/bentoo-generator.py", "scripts/bentoo-runner.py",
"scripts/bentoo-collector.py", "scripts/bentoo-analyser.py",
"scripts/bentoo-likwid-metric.py", "scripts/bentoo-quickstart.py",
"scripts/bentoo-calltree.py", "scripts/bentoo-merge.py",
"scripts/bentoo-calltree-analyser.py",
"scripts/bentoo-viewer.py", "scripts/bentoo-svgconvert.py",
"scripts/bentoo-confreader.py"],
package_data={
'': ['*.adoc', '*.rst', '*.md']
},
author="Zhang YANG",
author_email="zyangmath@gmail.com",
license="PSF",
keywords="Benchmark;Performance Analysis",
url="http://github.com/ProgramFan/bentoo"
)
| #!/usr/bin/env python
# coding: utf-8
from setuptools import setup, find_packages
setup(
name="bentoo",
description="Benchmarking tools",
version="0.15.dev",
packages=find_packages(),
scripts=["scripts/bentoo-generator.py", "scripts/bentoo-runner.py",
"scripts/bentoo-collector.py", "scripts/bentoo-analyser.py",
"scripts/bentoo-likwid-metric.py", "scripts/bentoo-quickstart.py",
"scripts/bentoo-calltree.py", "scripts/bentoo-merge.py",
"scripts/bentoo-calltree-analyser.py",
"scripts/bentoo-viewer.py", "scripts/bentoo-svgconvert.py",
"scripts/bentoo-confreader.py"],
package_data={
'': ['*.adoc', '*.rst', '*.md']
},
author="Zhang YANG",
author_email="zyangmath@gmail.com",
license="PSF",
keywords="Benchmark;Performance Analysis",
url="http://github.com/ProgramFan/bentoo"
)
| Prepare for next dev cycle | Prepare for next dev cycle
| Python | mit | ProgramFan/bentoo | ---
+++
@@ -5,7 +5,7 @@
setup(
name="bentoo",
description="Benchmarking tools",
- version="0.14",
+ version="0.15.dev",
packages=find_packages(),
scripts=["scripts/bentoo-generator.py", "scripts/bentoo-runner.py",
"scripts/bentoo-collector.py", "scripts/bentoo-analyser.py", |
36909f73f0d59fdf3e919805e7dc19b66484549f | setup.py | setup.py | from parsable import parsable
from setuptools import setup
with open('README.md') as f:
long_description = f.read()
setup(
name='treecat',
version='0.0.1',
description='A tree-of-mixtures nonparametric Bayesian model',
long_description=long_description,
author='Fritz Obermeyer',
author_email='fritz.obermeyer@gmail.com',
packages=['treecat'],
entry_points=parsable.find_entry_points('treecat'),
install_requires=['numpy', 'six', 'parsable'],
extras_require={
'tensorflow': ['tensorflow>=1.1.0'],
'tensorflow with gpu': ['tensorflow-gpu>=1.1.0']
},
tests_require=['pytest', 'flake8', 'goftests'],
license='Apache License 2.0')
| from parsable import parsable
from setuptools import setup
try:
import pypandoc
long_description = pypandoc.convert('README.md', 'rst')
except(IOError, ImportError):
long_description = open('README.md').read()
setup(
name='treecat',
version='0.0.1',
description='A tree-of-mixtures nonparametric Bayesian model',
long_description=long_description,
author='Fritz Obermeyer',
author_email='fritz.obermeyer@gmail.com',
packages=['treecat'],
entry_points=parsable.find_entry_points('treecat'),
install_requires=['numpy', 'six', 'parsable'],
extras_require={
'tensorflow': ['tensorflow>=1.1.0'],
'tensorflow with gpu': ['tensorflow-gpu>=1.1.0']
},
tests_require=['pytest', 'flake8', 'goftests'],
license='Apache License 2.0')
| Use pypandoc to convert README from md to rst | Use pypandoc to convert README from md to rst
| Python | apache-2.0 | posterior/treecat,posterior/treecat | ---
+++
@@ -1,8 +1,11 @@
from parsable import parsable
from setuptools import setup
-with open('README.md') as f:
- long_description = f.read()
+try:
+ import pypandoc
+ long_description = pypandoc.convert('README.md', 'rst')
+except(IOError, ImportError):
+ long_description = open('README.md').read()
setup(
name='treecat', |
469016385d0fe11c397b6d0b4c5ce531a515dfe2 | setup.py | setup.py | from setuptools import setup
entry_points = {
'console_scripts': [
'whatportis=whatportis.cli:run',
]
}
readme = open('README.rst').read()
setup(
name="whatportis",
version="0.6",
url='http://github.com/ncrocfer/whatportis',
author='Nicolas Crocfer',
author_email='ncrocfer@gmail.com',
description="A command to search port names and numbers",
long_description=readme,
packages=['whatportis'],
include_package_data=True,
install_requires=[
"simplejson",
"tinydb",
"requests",
"prettytable",
"click"
],
extras_require={
"dev": [
"pytest",
"tox"
],
"server": [
"flask"
]
},
entry_points=entry_points,
classifiers=(
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'Natural Language :: English',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4'
),
)
| from setuptools import setup
entry_points = {
'console_scripts': [
'whatportis=whatportis.cli:run',
]
}
readme = open('README.rst').read()
setup(
name="whatportis",
version="0.7",
url='http://github.com/ncrocfer/whatportis',
author='Nicolas Crocfer',
author_email='ncrocfer@gmail.com',
description="A command to search port names and numbers",
long_description=readme,
packages=['whatportis'],
include_package_data=True,
install_requires=[
"simplejson",
"tinydb",
"requests",
"prettytable",
"click"
],
extras_require={
"dev": [
"pytest",
"tox"
],
"server": [
"flask"
]
},
entry_points=entry_points,
classifiers=(
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'Natural Language :: English',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4'
),
)
| Bump version (0.6 -> 0.7) | Bump version (0.6 -> 0.7)
| Python | mit | ncrocfer/whatportis | ---
+++
@@ -9,7 +9,7 @@
setup(
name="whatportis",
- version="0.6",
+ version="0.7",
url='http://github.com/ncrocfer/whatportis',
author='Nicolas Crocfer',
author_email='ncrocfer@gmail.com', |
a42ec3c9f01897d1309e1abc05e30e0ff396162e | setup.py | setup.py | from setuptools import setup, find_packages
setup(
name="lightmatchingengine",
url="https://github.com/gavincyi/LightMatchingEngine",
license='MIT',
author="Gavin Chan",
author_email="gavincyi@gmail.com",
description="A light matching engine",
packages=find_packages(exclude=('tests',)),
use_scm_version=True,
install_requires=[],
setup_requires=['setuptools_scm'],
tests_require=[
'pytest'
],
extra_requires={
'performance': ['pandas']
},
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
],
)
| from setuptools import setup, find_packages
setup(
name="lightmatchingengine",
url="https://github.com/gavincyi/LightMatchingEngine",
license='MIT',
author="Gavin Chan",
author_email="gavincyi@gmail.com",
description="A light matching engine",
packages=find_packages(exclude=('tests',)),
use_scm_version=True,
install_requires=[],
setup_requires=['setuptools_scm'],
tests_require=[
'pytest'
],
extra_requires={
'performance': ['pandas', 'docopt', 'tabulate']
},
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
],
)
| Add required dependencies for performance test | Add required dependencies for performance test
| Python | mit | gavincyi/LightMatchingEngine | ---
+++
@@ -19,7 +19,7 @@
'pytest'
],
extra_requires={
- 'performance': ['pandas']
+ 'performance': ['pandas', 'docopt', 'tabulate']
},
classifiers=[ |
485a5ffe382e45766c60d7b3f35d12c58d8cf2c3 | setup.py | setup.py | import os
from setuptools import setup, find_packages
PYPI_RESTRUCTURED_TEXT_INFO = \
"""
Basic Example
-------------
::
from assembla import API
assembla = API((
'', # Username
'', # Password
))
print assembla.space(name='Big Project').ticket(number=201).status_name
Full documentation at http://github.com/markfinger/assembla
"""
setup(
name = 'assembla',
version = '1.2.3',
packages = find_packages(),
install_requires = [
'requests>=0.7.4',
'lxml>=2.3.1',
],
package_data = {'assembla': []},
entry_points = {},
# metadata for upload to PyPI
author = 'Mark Finger',
author_email = 'markfinger@gmail.com',
description = 'An easy to use wrapper around the Assembla API',
license = 'BSD',
platforms=['any'],
keywords = 'Assembla API',
url = 'http://github.com/markfinger/assembla/',
long_description = PYPI_RESTRUCTURED_TEXT_INFO,
classifiers = [
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Topic :: Software Development',
'Operating System :: OS Independent',
'Programming Language :: Python',
'License :: OSI Approved :: BSD License',
],
)
| import os
from setuptools import setup, find_packages
PYPI_RESTRUCTURED_TEXT_INFO = \
"""
Basic Example
-------------
::
from assembla import API
assembla = API((
'', # Username
'', # Password
))
print assembla.space(name='Big Project').ticket(number=201).status_name
Full documentation at http://github.com/markfinger/assembla
"""
setup(
name = 'assembla',
version = '1.2.1',
packages = find_packages(),
install_requires = [
'requests>=0.7.4',
'lxml>=2.3.1',
],
package_data = {'assembla': []},
entry_points = {},
# metadata for upload to PyPI
author = 'Mark Finger',
author_email = 'markfinger@gmail.com',
description = 'An easy to use wrapper around the Assembla API',
license = 'BSD',
platforms=['any'],
keywords = 'Assembla API',
url = 'http://github.com/markfinger/assembla/',
long_description = PYPI_RESTRUCTURED_TEXT_INFO,
classifiers = [
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Topic :: Software Development',
'Operating System :: OS Independent',
'Programming Language :: Python',
'License :: OSI Approved :: BSD License',
],
)
| Revert "Bumping the version to 1.2.3." | Revert "Bumping the version to 1.2.3."
This reverts commit 1b60d37c79fd5da505fe9312756e4eebc1464ddb.
| Python | mit | markfinger/assembla | ---
+++
@@ -23,7 +23,7 @@
setup(
name = 'assembla',
- version = '1.2.3',
+ version = '1.2.1',
packages = find_packages(),
install_requires = [ |
0dbc27a762c60c7c38af8ea3ee3c463431269309 | setup.py | setup.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from setuptools import setup
setup(name='check_ruckus_vsz',
version='0.13',
description='Nagios Check for Ruckus SCG',
author='Lukas Schauer',
author_email='l.schauer@cygnusnetworks.de',
license='Apache 2.0',
packages=['ruckus_vsz_snmp'],
scripts=['check_ruckus_vsz', 'check_ruckus_ap'],
install_requires=['configparser', 'nagiosplugin', 'pysnmp', 'pysnmp-mibs', 'ipaddress'])
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
from setuptools import setup
setup(name='check_ruckus_vsz',
version='0.13',
description='Nagios Check for Ruckus SCG',
author='Lukas Schauer',
author_email='l.schauer@cygnusnetworks.de',
license='Apache 2.0',
packages=['ruckus_vsz_snmp'],
scripts=['check_ruckus_vsz', 'check_ruckus_ap'],
zip_safe=False,
install_requires=['configparser', 'nagiosplugin', 'pysnmp', 'pysnmp-mibs', 'ipaddress'])
| Fix zip_safe for local installs | Fix zip_safe for local installs
| Python | apache-2.0 | CygnusNetworks/check_ruckus_vsz,CygnusNetworks/check_ruckus_vsz,CygnusNetworks/check_ruckus_vsz | ---
+++
@@ -11,4 +11,5 @@
license='Apache 2.0',
packages=['ruckus_vsz_snmp'],
scripts=['check_ruckus_vsz', 'check_ruckus_ap'],
+ zip_safe=False,
install_requires=['configparser', 'nagiosplugin', 'pysnmp', 'pysnmp-mibs', 'ipaddress']) |
b4b2eeb7c5782a638d80acce369e920d671134f3 | setup.py | setup.py | #!/usr/bin/env python
try:
from setuptools import setup, find_packages
except ImportError:
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
setup(
name='django-bleach',
version="0.1.2",
description='Easily use bleach with Django models and templates',
author='Tim Heap',
author_email='heap.tim@gmail.com',
url='https://bitbucket.org/ionata/django-bleach',
packages=['django_bleach'],
install_requires=['bleach'],
package_data={},
classifiers=[
'Environment :: Web Environment',
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Framework :: Django',
],
)
| #!/usr/bin/env python
try:
from setuptools import setup, find_packages
except ImportError:
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
setup(
name='django-bleach',
version="0.1.2",
description='Easily use bleach with Django models and templates',
author='Tim Heap',
author_email='heap.tim@gmail.com',
url='https://bitbucket.org/ionata/django-bleach',
packages=find_packages(),
install_requires=['bleach'],
package_data={},
classifiers=[
'Environment :: Web Environment',
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Framework :: Django',
],
)
| Use `find_packages()` instead of naming packages | Use `find_packages()` instead of naming packages
The `templatetags/` files were missed because they were not named. Using
`find_packages()` will ensure this does not happen again.
| Python | bsd-2-clause | python-force/django-bleach | ---
+++
@@ -14,7 +14,7 @@
author='Tim Heap',
author_email='heap.tim@gmail.com',
url='https://bitbucket.org/ionata/django-bleach',
- packages=['django_bleach'],
+ packages=find_packages(),
install_requires=['bleach'],
package_data={},
classifiers=[ |
8c73f1845a1807b506cac1797e2f1247cde5a164 | setup.py | setup.py | """Setup for PyPi"""
from setuptools import setup
setup(
name='nik',
packages=['nik'],
version='1.0',
description='Nifty tools and containers',
author='Nik Vanderhoof',
author_email='pypi@vanderhoof.pw',
url='https://github.com/nvander1/nik',
download_url='https://github.com/nvander1/nik/archive/1.0.tar.gz',
keywords=['container', 'namedtuple'],
classifiers=[]
)
| """Setup for PyPi"""
from setuptools import setup
setup(
name='nik',
packages=['nik'],
version='1.0',
description='Nifty tools and containers',
author='Nik Vanderhoof',
author_email='pypi@vanderhoof.pw',
url='https://github.com/nvander1/nik',
download_url='https://github.com/nvander1/nik/archive/1.0.tar.gz',
keywords=['container', 'namedtuple'],
classifiers=[]
)
| Stop using borked local pylintrc | Stop using borked local pylintrc
| Python | mit | nvander1/skrt | ---
+++
@@ -2,14 +2,14 @@
from setuptools import setup
setup(
- name='nik',
- packages=['nik'],
- version='1.0',
- description='Nifty tools and containers',
- author='Nik Vanderhoof',
- author_email='pypi@vanderhoof.pw',
- url='https://github.com/nvander1/nik',
- download_url='https://github.com/nvander1/nik/archive/1.0.tar.gz',
- keywords=['container', 'namedtuple'],
- classifiers=[]
+ name='nik',
+ packages=['nik'],
+ version='1.0',
+ description='Nifty tools and containers',
+ author='Nik Vanderhoof',
+ author_email='pypi@vanderhoof.pw',
+ url='https://github.com/nvander1/nik',
+ download_url='https://github.com/nvander1/nik/archive/1.0.tar.gz',
+ keywords=['container', 'namedtuple'],
+ classifiers=[]
) |
5b598ac0a950652a5eb13985b9fd2df23149931c | setup.py | setup.py | #!/usr/bin/env python
#
# Copyright 2011-2012 Splunk, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"): you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from distutils.core import setup
import splunk
setup(
author="Splunk, Inc.",
author_email="devinfo@splunk.com",
description="The Splunk Software Development Kit for Python.",
license="http://www.apache.org/licenses/LICENSE-2.0",
name="splunk-sdk",
packages = ["splunk"],
url="http://github.com/splunk/splunk-sdk-python",
version=splunk.__version__,
classifiers = [
"Programming Language :: Python",
"Development Status :: 3 - Alpha",
"Environment :: Other Environment",
"Intended Audience :: Developers",
"License :: OSI Approved :: Apache Software License",
"Operating System :: OS Independent",
"Topic :: Software Development :: Libraries :: Python Modules",
"Topic :: Software Development :: Libraries :: Application Frameworks",
],
)
| #!/usr/bin/env python
#
# Copyright 2011-2012 Splunk, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"): you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from distutils.core import setup
import splunklib
setup(
author="Splunk, Inc.",
author_email="devinfo@splunk.com",
description="The Splunk Software Development Kit for Python.",
license="http://www.apache.org/licenses/LICENSE-2.0",
name="splunk-sdk",
packages = ["splunklib"],
url="http://github.com/splunk/splunk-sdk-python",
version=splunklib.__version__,
classifiers = [
"Programming Language :: Python",
"Development Status :: 3 - Alpha",
"Environment :: Other Environment",
"Intended Audience :: Developers",
"License :: OSI Approved :: Apache Software License",
"Operating System :: OS Independent",
"Topic :: Software Development :: Libraries :: Python Modules",
"Topic :: Software Development :: Libraries :: Application Frameworks",
],
)
| Fix splunk module name (splunk => splunklib) | Fix splunk module name (splunk => splunklib)
| Python | apache-2.0 | kkirsche/splunk-sdk-python,kkirsche/splunk-sdk-python,sullivanmatt/splunk-sdk-python,kkirsche/splunk-sdk-python,splunk/splunk-sdk-python,kkirsche/splunk-sdk-python,splunk/splunk-sdk-python,ww9rivers/splunk-sdk-python,lowtalker/splunk-sdk-python | ---
+++
@@ -16,7 +16,7 @@
from distutils.core import setup
-import splunk
+import splunklib
setup(
author="Splunk, Inc.",
@@ -29,11 +29,11 @@
name="splunk-sdk",
- packages = ["splunk"],
+ packages = ["splunklib"],
url="http://github.com/splunk/splunk-sdk-python",
- version=splunk.__version__,
+ version=splunklib.__version__,
classifiers = [
"Programming Language :: Python", |
5ad5bcef00c807dadd9c829f1eb459c4cc73cb5f | src/books/models.py | src/books/models.py | from django.db import models
from datetime import datetime
from django.utils import timezone
# Create your models here.
class Book(models.Model):
title = models.CharField(max_length=200)
author = models.CharField(max_length=200)
year = models.DateTimeField('year published',
help_text="Please use the following format: <em>YYYY-MM-DD</em>.")
pages = models.IntegerField(default=0)
isbn_10 = models.IntegerField(default=0)
isbn_13 = models.IntegerField(default=0)
description = models.TextField()
cover_image = models.ImageField('cover Image',
upload_to='cover_pics/%Y-%m-%d/',
null=True,
blank=True)
date_added = models.DateTimeField(default=datetime.now)
def __str__(self):
return self.title + " by " + self.author
def was_added_recently(self):
return self.date_added >= timezone.now() - datetime.timedelta(days=30) | from django.db import models
from datetime import datetime
from django.utils import timezone
# Create your models here.
class Book(models.Model):
title = models.CharField(max_length=200)
edition = models.SmallIntegerField(default=1)
author = models.CharField(max_length=200)
year = models.DateTimeField('year published',
help_text="Please use the following format: <em>YYYY-MM-DD</em>.",
blank=True)
pages = models.IntegerField(default=0)
isbn_10 = models.IntegerField(default=0,help_text="Do not include dashes")
isbn_13 = models.IntegerField(default=0,help_text="Do not include dashes")
description = models.TextField()
cover_image = models.ImageField('cover Image',
upload_to='cover_pics/%Y-%m-%d/',
null=True,
blank=True)
date_added = models.DateTimeField(default=datetime.now)
def __str__(self):
if self.edition==1:
nth="st"
elif self.edition==2:
nth="nd"
elif self.edition==3:
nth="rd"
else : nth="th"
return self.title + ", "+ str(self.edition)+nth + " Edition by " + self.author
def was_added_recently(self):
return self.date_added >= timezone.now() - datetime.timedelta(days=30) | Refactor some book model variables | Refactor some book model variables
Add an edition field, help texts to some of the fields.
Change behaviour of the __str__ method to be more informative.
| Python | mit | melkisedek/sen_project,melkisedek/sen_project,melkisedek/sen_project | ---
+++
@@ -5,12 +5,14 @@
class Book(models.Model):
title = models.CharField(max_length=200)
+ edition = models.SmallIntegerField(default=1)
author = models.CharField(max_length=200)
year = models.DateTimeField('year published',
- help_text="Please use the following format: <em>YYYY-MM-DD</em>.")
+ help_text="Please use the following format: <em>YYYY-MM-DD</em>.",
+ blank=True)
pages = models.IntegerField(default=0)
- isbn_10 = models.IntegerField(default=0)
- isbn_13 = models.IntegerField(default=0)
+ isbn_10 = models.IntegerField(default=0,help_text="Do not include dashes")
+ isbn_13 = models.IntegerField(default=0,help_text="Do not include dashes")
description = models.TextField()
cover_image = models.ImageField('cover Image',
upload_to='cover_pics/%Y-%m-%d/',
@@ -18,7 +20,14 @@
blank=True)
date_added = models.DateTimeField(default=datetime.now)
def __str__(self):
- return self.title + " by " + self.author
+ if self.edition==1:
+ nth="st"
+ elif self.edition==2:
+ nth="nd"
+ elif self.edition==3:
+ nth="rd"
+ else : nth="th"
+ return self.title + ", "+ str(self.edition)+nth + " Edition by " + self.author
def was_added_recently(self):
return self.date_added >= timezone.now() - datetime.timedelta(days=30) |
3d4ec635c04a48fea8c2909c1a6630f89e6abba4 | donut/modules/groups/routes.py | donut/modules/groups/routes.py | import flask
import json
from flask import jsonify
from donut.modules.groups import blueprint, helpers
@blueprint.route("/1/groups/")
def get_groups_list():
# Create a dict of the passed in attribute which are filterable
filterable_attrs = ["group_id", "group_name", "group_desc", "type"]
attrs = {
tup: flask.request.args[tup]
for tup in flask.request.args if tup in filterable_attrs
}
fields = None
if "fields" in flask.request.args:
fields = [f.strip() for f in flask.request.args["fields"].split(',')]
return jsonify(helpers.get_group_list_data(fields=fields, attrs=attrs))
@blueprint.route("/1/groups/<int:group_id>/")
def get_groups(group_id):
"""GET /1/groups/<int:group_id>/"""
return jsonify(helpers.get_group_data(group_id))
@blueprint.route("/1/groups/<int:group_id>/members/")
def get_group_members(group_id):
"""GET /1/groups/<int:group_id>/"""
return str(helpers.get_members_by_group(group_id))
| import flask
import json
from flask import jsonify
from donut.modules.groups import blueprint, helpers
@blueprint.route("/1/groups/")
def get_groups_list():
# Create a dict of the passed in attribute which are filterable
filterable_attrs = ["group_id", "group_name", "group_desc", "type"]
attrs = {
tup: flask.request.args[tup]
for tup in flask.request.args if tup in filterable_attrs
}
fields = None
if "fields" in flask.request.args:
fields = [f.strip() for f in flask.request.args["fields"].split(',')]
return jsonify(helpers.get_group_list_data(fields=fields, attrs=attrs))
@blueprint.route("/1/groups/<int:group_id>/")
def get_groups(group_id):
"""GET /1/groups/<int:group_id>/"""
return jsonify(helpers.get_group_data(group_id))
@blueprint.route("/1/groups/<int:group_id>/members/")
def get_group_members(group_id):
"""GET /1/groups/<int:group_id>/"""
return json.dumps(helpers.get_members_by_group(group_id))
| Send result as json instead of plaintext | Send result as json instead of plaintext
| Python | mit | ASCIT/donut,ASCIT/donut-python,ASCIT/donut-python,ASCIT/donut,ASCIT/donut | ---
+++
@@ -28,4 +28,4 @@
@blueprint.route("/1/groups/<int:group_id>/members/")
def get_group_members(group_id):
"""GET /1/groups/<int:group_id>/"""
- return str(helpers.get_members_by_group(group_id))
+ return json.dumps(helpers.get_members_by_group(group_id)) |
242d9dcb77f627852599381246bbf355a18b585d | setup.py | setup.py | from setuptools import setup
with open('README.rst') as f:
long_desc = f.read()
setup(
name='ttrss-python',
version='0.1.4',
description='A client library for the Tiny Tiny RSS web API',
long_description=long_desc,
url='https://github.com/Vassius/ttrss-python',
author='Markus Wiik',
author_email='markus.wiik@gmail.com',
packages=['ttrss'],
package_data={'': ['README.rst']},
include_package_data=True,
install_requires=['requests>=1.1.0'],
provides=['ttrss'],
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.7',
'Topic :: Internet :: WWW/HTTP',
],
)
| from setuptools import setup
with open('README.rst') as f:
long_desc = f.read()
setup(
name='ttrss-python',
version='0.1.5',
description='A client library for the Tiny Tiny RSS web API',
long_description=long_desc,
url='https://github.com/Vassius/ttrss-python',
author='Markus Wiik',
author_email='markus.wiik@gmail.com',
packages=['ttrss'],
package_data={'': ['README.rst']},
include_package_data=True,
install_requires=['requests>=1.1.0'],
provides=['ttrss'],
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.7',
'Topic :: Internet :: WWW/HTTP',
],
)
| Bump version for PyPI release | Bump version for PyPI release
| Python | mit | Vassius/ttrss-python | ---
+++
@@ -5,7 +5,7 @@
setup(
name='ttrss-python',
- version='0.1.4',
+ version='0.1.5',
description='A client library for the Tiny Tiny RSS web API',
long_description=long_desc,
url='https://github.com/Vassius/ttrss-python', |
48918fce2706a6f7a9aa0b001f8f1634a49972c8 | dthm4kaiako/config/__init__.py | dthm4kaiako/config/__init__.py | """Configuration for Django system."""
__version__ = "0.13.1"
__version_info__ = tuple(
[
int(num) if num.isdigit() else num
for num in __version__.replace("-", ".", 1).split(".")
]
)
| """Configuration for Django system."""
__version__ = "0.13.2"
__version_info__ = tuple(
[
int(num) if num.isdigit() else num
for num in __version__.replace("-", ".", 1).split(".")
]
)
| Increment version number to 0.13.2 | Increment version number to 0.13.2
| Python | mit | uccser/cs4teachers,uccser/cs4teachers,uccser/cs4teachers,uccser/cs4teachers | ---
+++
@@ -1,6 +1,6 @@
"""Configuration for Django system."""
-__version__ = "0.13.1"
+__version__ = "0.13.2"
__version_info__ = tuple(
[
int(num) if num.isdigit() else num |
d17677568c11fd5cf3c8d37ac2aa38dcd6c9f347 | setup.py | setup.py | #!/usr/bin/env python
# Copyright 2014-2015 Boxkite Inc.
# This file is part of the DataCats package and is released under
# the terms of the GNU Affero General Public License version 3.0.
# See LICENSE.txt or http://www.fsf.org/licensing/licenses/agpl-3.0.html
from setuptools import setup
import sys
install_requires=[
'setuptools',
'docopt',
'docker-py>=1.1.0',
'requests>=2.5.2', # help with docker-py requirement
]
exec(open("datacats/version.py").read())
setup(
name='datacats',
version=__version__,
description='CKAN Data Catalog Developer Tools built on Docker',
license='AGPL3',
author='Boxkite',
author_email='contact@boxkite.ca',
url='https://github.com/datacats/datacats',
packages=[
'datacats',
'datacats.tests',
'datacats.cli',
],
install_requires=install_requires,
include_package_data=True,
test_suite='datacats.tests',
zip_safe=False,
entry_points = """
[console_scripts]
datacats=datacats.cli.main:main
""",
)
| #!/usr/bin/env python
# Copyright 2014-2015 Boxkite Inc.
# This file is part of the DataCats package and is released under
# the terms of the GNU Affero General Public License version 3.0.
# See LICENSE.txt or http://www.fsf.org/licensing/licenses/agpl-3.0.html
from setuptools import setup
import sys
install_requires = [
'setuptools',
'docopt',
'docker-py>=1.1.0',
'clint', # to output colored text to terminal
'requests>=2.5.2', # help with docker-py requirement
]
exec(open("datacats/version.py").read())
setup(
name='datacats',
version=__version__,
description='CKAN Data Catalog Developer Tools built on Docker',
license='AGPL3',
author='Boxkite',
author_email='contact@boxkite.ca',
url='https://github.com/datacats/datacats',
packages=[
'datacats',
'datacats.tests',
'datacats.cli',
],
install_requires=install_requires,
include_package_data=True,
test_suite='datacats.tests',
zip_safe=False,
entry_points="""
[console_scripts]
datacats=datacats.cli.main:main
""",
)
| Add clint package to datacats requirements | Add clint package to datacats requirements
| Python | agpl-3.0 | datawagovau/datacats,datacats/datacats,JJediny/datacats,poguez/datacats,deniszgonjanin/datacats,florianm/datacats,reneenoble/datacats,deniszgonjanin/datacats,poguez/datacats,florianm/datacats,JackMc/datacats,JJediny/datacats,JackMc/datacats,dborzov/datacats,datawagovau/datacats,reneenoble/datacats,wardi/datacats,datacats/datacats,dborzov/datacats,wardi/datacats | ---
+++
@@ -9,10 +9,11 @@
from setuptools import setup
import sys
-install_requires=[
+install_requires = [
'setuptools',
'docopt',
'docker-py>=1.1.0',
+ 'clint', # to output colored text to terminal
'requests>=2.5.2', # help with docker-py requirement
]
@@ -35,9 +36,8 @@
include_package_data=True,
test_suite='datacats.tests',
zip_safe=False,
- entry_points = """
+ entry_points="""
[console_scripts]
datacats=datacats.cli.main:main
""",
)
- |
0443e6f70a8c2c072a4d80bac6aa4a0cc99df51d | setup.py | setup.py | from setuptools import setup
from setuptools import find_packages
from pip.req import parse_requirements
import pip
import xblog
REQUIREMENTS_FILE = "xblog/requirements.txt"
requirements = [str(ir.req) for ir in parse_requirements(REQUIREMENTS_FILE, session=pip.download.PipSession())]
setup(
name='django-xblog',
version=xblog.__version__,
description="A full-featured blogging application for your Django site",
long_description=open('README.md').read(),
keywords='django, blog, weblog, bootstrap, metaWeblog, wordpress',
author=xblog.__author__,
author_email=xblog.__email__,
url=xblog.__url__,
packages=find_packages(),
classifiers=[
'Framework :: Django :: 1.8',
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Programming Language :: Python',
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'License :: OSI Approved :: BSD License',
],
license=xblog.__license__,
include_package_data=True,
zip_safe=False,
install_requires=requirements,
)
| import pip
import xblog
from setuptools import setup
from setuptools import find_packages
try: # for pip >= 10
from pip._internal.req import parse_requirements
except ImportError: # for pip <= 9.0.3
from pip.req import parse_requirements
REQUIREMENTS_FILE = "xblog/requirements.txt"
requirements = [str(ir.req) for ir in parse_requirements(REQUIREMENTS_FILE, session=pip.download.PipSession())]
setup(
name='django-xblog',
version=xblog.__version__,
description="A full-featured blogging application for your Django site",
long_description=open('README.md').read(),
keywords='django, blog, weblog, bootstrap, metaWeblog, wordpress',
author=xblog.__author__,
author_email=xblog.__email__,
url=xblog.__url__,
packages=find_packages(),
classifiers=[
'Framework :: Django :: 1.8',
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Programming Language :: Python',
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'License :: OSI Approved :: BSD License',
],
license=xblog.__license__,
include_package_data=True,
zip_safe=False,
install_requires=requirements,
)
| Fix for pip 10.0 and later | Fix for pip 10.0 and later
| Python | bsd-2-clause | rubeon/django-xblog,rubeon/django-xblog,rubeon/django-xblog | ---
+++
@@ -1,8 +1,14 @@
+import pip
+import xblog
+
from setuptools import setup
from setuptools import find_packages
-from pip.req import parse_requirements
-import pip
-import xblog
+
+try: # for pip >= 10
+ from pip._internal.req import parse_requirements
+except ImportError: # for pip <= 9.0.3
+ from pip.req import parse_requirements
+
REQUIREMENTS_FILE = "xblog/requirements.txt"
|
91a5221c1f9c81ef9c430daf25839f76b676ebe1 | setup.py | setup.py | from setuptools import setup, find_packages
from setuptools.command.install import install as Install
import re
versionPattern = re.compile(r"""^__version__ = ['"](.*?)['"]$""", re.M)
with open("axiom/_version.py", "rt") as f:
version = versionPattern.search(f.read()).group(1)
class InstallAndRegenerate(Install):
def run(self):
"""
Runs the usual install logic, then regenerates the plugin cache.
"""
Install.run(self)
from twisted import plugin
list(plugin.getPlugins(plugin.IPlugin, "axiom.plugins"))
setup(
name="Axiom",
version=version,
description="An in-process object-relational database",
url="http://divmod.org/trac/wiki/DivmodAxiom",
maintainer="Divmod, Inc.",
maintainer_email="support@divmod.org",
install_requires=["twisted", "epsilon"],
packages=find_packages() + ['twisted.plugins'],
scripts=['bin/axiomatic'],
license="MIT",
platforms=["any"],
classifiers=[
"Development Status :: 5 - Production/Stable",
"Framework :: Twisted",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 2 :: Only",
"Topic :: Database"])
| from setuptools import setup, find_packages
from setuptools.command.install import install as Install
import re
versionPattern = re.compile(r"""^__version__ = ['"](.*?)['"]$""", re.M)
with open("axiom/_version.py", "rt") as f:
version = versionPattern.search(f.read()).group(1)
class InstallAndRegenerate(Install):
def run(self):
"""
Runs the usual install logic, then regenerates the plugin cache.
"""
Install.run(self)
from twisted import plugin
list(plugin.getPlugins(plugin.IPlugin, "axiom.plugins"))
setup(
name="Axiom",
version=version,
description="An in-process object-relational database",
url="https://launchpad.net/divmod.org",
maintainer="Divmod, Inc.",
maintainer_email="support@divmod.org",
install_requires=["twisted", "epsilon"],
packages=find_packages() + ['twisted.plugins'],
scripts=['bin/axiomatic'],
license="MIT",
platforms=["any"],
classifiers=[
"Development Status :: 5 - Production/Stable",
"Framework :: Twisted",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 2 :: Only",
"Topic :: Database"])
| Use Launchpad page as URL | Use Launchpad page as URL | Python | mit | twisted/axiom,hawkowl/axiom | ---
+++
@@ -19,7 +19,7 @@
name="Axiom",
version=version,
description="An in-process object-relational database",
- url="http://divmod.org/trac/wiki/DivmodAxiom",
+ url="https://launchpad.net/divmod.org",
maintainer="Divmod, Inc.",
maintainer_email="support@divmod.org", |
a5e3b1d45857feec1a7d0bfbfea2ba47e2e27e12 | setup.py | setup.py | #!/usr/bin/env python
from setuptools import setup
import os
# Utility function to read README file
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name='iss',
version='2.5.10',
description="Ideally Single Source app for MemberSuite data.",
author='AASHE',
author_email='it@aashe.org',
url='https://github.com/aashe/iss',
long_description=read("README.md"),
packages=[
'iss',
],
include_package_data=True,
classifiers=[
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Framework :: Django',
],
install_requires=[
"beatbox==32.1",
"membersuite_api_client==0.4.3",
"pycountry",
"pyYAML==3.12",
]
)
| #!/usr/bin/env python
from setuptools import setup
import os
# Utility function to read README file
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name='iss',
version='2.6',
description="Ideally Single Source app for MemberSuite data.",
author='AASHE',
author_email='it@aashe.org',
url='https://github.com/aashe/iss',
long_description=read("README.md"),
packages=[
'iss',
],
include_package_data=True,
classifiers=[
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Framework :: Django',
],
install_requires=[
"beatbox==32.1",
"membersuite_api_client==0.4.3",
"pycountry",
"pyYAML==3.12",
]
)
| Change version to 2.6 as feature was added | Change version to 2.6 as feature was added
| Python | mit | AASHE/iss | ---
+++
@@ -10,7 +10,7 @@
setup(
name='iss',
- version='2.5.10',
+ version='2.6',
description="Ideally Single Source app for MemberSuite data.",
author='AASHE',
author_email='it@aashe.org', |
860de1e0d027dc8cd8b67ae9924ff181d234c22f | setup.py | setup.py | from setuptools import setup
REPO_URL = 'http://github.com/datasciencebr/serenata-toolbox'
setup(
author='Serenata de Amor',
author_email='op.serenatadeamor@gmail.com',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3.5',
'Topic :: Utilities',
],
description='Toolbox for Serenata de Amor project',
zip_safe=False,
install_requires=[
'aiofiles',
'aiohttp',
'boto3',
'beautifulsoup4>=4.4',
'lxml>=3.6',
'pandas>=0.18',
'tqdm'
],
keywords='serenata de amor, data science, brazil, corruption',
license='MIT',
long_description='Check `Serenata Toolbox at GitHub <{}>`_.'.format(REPO_URL),
name='serenata-toolbox',
packages=[
'serenata_toolbox.federal_senate',
'serenata_toolbox.chamber_of_deputies',
'serenata_toolbox.datasets'
],
url=REPO_URL,
version='12.4.1'
)
| from setuptools import setup
REPO_URL = 'http://github.com/datasciencebr/serenata-toolbox'
setup(
author='Serenata de Amor',
author_email='op.serenatadeamor@gmail.com',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3.5',
'Topic :: Utilities',
],
description='Toolbox for Serenata de Amor project',
zip_safe=False,
install_requires=[
'aiofiles',
'aiohttp',
'boto3',
'beautifulsoup4>=4.4',
'lxml>=3.6',
'pandas>=0.18',
'tqdm'
],
keywords='serenata de amor, data science, brazil, corruption',
license='MIT',
long_description='Check `Serenata Toolbox at GitHub <{}>`_.'.format(REPO_URL),
name='serenata-toolbox',
packages=[
'serenata_toolbox.federal_senate',
'serenata_toolbox.chamber_of_deputies',
'serenata_toolbox.datasets'
],
url=REPO_URL,
version='12.3.2'
)
| Downgrade de version bump to a minor one | Downgrade de version bump to a minor one
| Python | mit | datasciencebr/serenata-toolbox | ---
+++
@@ -34,5 +34,5 @@
'serenata_toolbox.datasets'
],
url=REPO_URL,
- version='12.4.1'
+ version='12.3.2'
) |
febb5d9890b074985ca99f05c5b8ffc2572d2652 | apps/posters/forms.py | apps/posters/forms.py | # -*- coding: utf-8 -*-
from django import forms
from apps.posters.models import Poster
class AddPosterForm(forms.ModelForm):
when = forms.CharField(label=u"Event start", widget=forms.TextInput(attrs={'type': 'datetime-local'}))
display_from = forms.CharField(label=u"Vis plakat fra", widget=forms.TextInput(attrs={'type': 'date'}))
display_to = forms.CharField(label=u"Vis plakat til", widget=forms.TextInput(attrs={'type': 'date'}))
class Meta:
model = Poster
fields = ['event', 'amount', 'description',
'price', 'display_from', 'display_to', 'comments']
class EditPosterForm(forms.ModelForm):
class Meta:
model = Poster
fields = ['event', 'amount', 'description',
'price', 'display_to', 'display_from', 'comments', 'finished']
| # -*- coding: utf-8 -*-
from django import forms
from apps.posters.models import Poster
class AddPosterForm(forms.ModelForm):
display_from = forms.CharField(label=u"Vis plakat fra", widget=forms.TextInput(attrs={'type': 'date'}))
display_to = forms.CharField(label=u"Vis plakat til", widget=forms.TextInput(attrs={'type': 'date'}))
class Meta:
model = Poster
fields = ['event', 'amount', 'description',
'price', 'display_from', 'display_to', 'comments']
class EditPosterForm(forms.ModelForm):
class Meta:
model = Poster
fields = ['event', 'amount', 'description',
'price', 'display_to', 'display_from', 'comments', 'finished']
| Remove event start field from form | Remove event start field from form
| Python | mit | dotKom/onlineweb4,dotKom/onlineweb4,dotKom/onlineweb4,dotKom/onlineweb4 | ---
+++
@@ -6,7 +6,6 @@
class AddPosterForm(forms.ModelForm):
- when = forms.CharField(label=u"Event start", widget=forms.TextInput(attrs={'type': 'datetime-local'}))
display_from = forms.CharField(label=u"Vis plakat fra", widget=forms.TextInput(attrs={'type': 'date'}))
display_to = forms.CharField(label=u"Vis plakat til", widget=forms.TextInput(attrs={'type': 'date'}))
class Meta: |
6f20554c2a7b0223b2291227b67fb3ede003f525 | setup.py | setup.py | #!/usr/bin/env python
"""
Setup script
"""
import os
import sys
from distutils.core import setup
from fedwatch import __version__
setup(
name = 'fedwatch',
description = 'Module for creating simple scripts reacting to fedmsg messages',
version = __version__,
license = 'LGPLv2+',
py_modules = ['fedwatch'],
maintainer = 'Stanislav Ochotnicky',
maintainer_email = 'sochotnicky@redhat.com'
)
| #!/usr/bin/env python
"""
Setup script
"""
import os
import sys
from distutils.core import setup
from fedwatch import __version__
setup(
name = 'fedwatch',
description = 'Module for creating simple scripts reacting to fedmsg messages',
version = __version__,
license = 'LGPLv2+',
py_modules = ['fedwatch'],
scripts = ['fedwatch-cli'],
maintainer = 'Stanislav Ochotnicky',
maintainer_email = 'sochotnicky@redhat.com'
)
| Add fedwatch-cli as installed script | Add fedwatch-cli as installed script
| Python | lgpl-2.1 | pombredanne/fedwatch,sochotnicky/fedwatch,pombredanne/fedwatch,mizdebsk/fedwatch,sochotnicky/fedwatch,mizdebsk/fedwatch | ---
+++
@@ -14,6 +14,7 @@
version = __version__,
license = 'LGPLv2+',
py_modules = ['fedwatch'],
+ scripts = ['fedwatch-cli'],
maintainer = 'Stanislav Ochotnicky',
maintainer_email = 'sochotnicky@redhat.com'
) |
88451dba53aaf2257ece516d027bdbb0449cb8b4 | globus_cli/commands/task/event_list.py | globus_cli/commands/task/event_list.py | import click
from globus_cli.parsing import common_options, task_id_arg
from globus_cli.helpers import outformat_is_json, print_table
from globus_cli.services.transfer import print_json_from_iterator, get_client
@click.command('event-list', help='List Events for a given Task')
@common_options
@task_id_arg
def task_event_list(task_id):
"""
Executor for `globus task-event-list`
"""
client = get_client()
event_iterator = client.task_event_list(task_id)
if outformat_is_json():
print_json_from_iterator(event_iterator)
else:
print_table(event_iterator, [
('Time', 'time'), ('Code', 'code'), ('Is Error', 'is_error'),
('Details', 'details')])
| import click
from globus_cli.parsing import common_options, task_id_arg
from globus_cli.helpers import outformat_is_json, print_table
from globus_cli.services.transfer import print_json_from_iterator, get_client
@click.command('event-list', help='List Events for a given Task')
@common_options
@task_id_arg
@click.option(
"--limit", default=10, show_default=True, help="Limit number of results.")
@click.option(
"--filter-errors", is_flag=True, help="Filter results to errors")
@click.option(
"--filter-non-errors", is_flag=True, help="Filter results to non errors")
def task_event_list(task_id, limit, filter_errors, filter_non_errors):
"""
Executor for `globus task-event-list`
"""
client = get_client()
# set filter based on filter flags, if both set do nothing
filter_string = None
if filter_errors and not filter_non_errors:
filter_string = "is_error:1"
if filter_non_errors and not filter_errors:
filter_string = "is_error:1"
event_iterator = client.task_event_list(
task_id, num_results=limit, filter=filter_string)
if outformat_is_json():
print_json_from_iterator(event_iterator)
else:
print_table(event_iterator, [
('Time', 'time'), ('Code', 'code'), ('Is Error', 'is_error'),
('Details', 'details')])
| Add --limit and filtering options to task event-list | Add --limit and filtering options to task event-list
| Python | apache-2.0 | globus/globus-cli,globus/globus-cli | ---
+++
@@ -9,13 +9,27 @@
@click.command('event-list', help='List Events for a given Task')
@common_options
@task_id_arg
-def task_event_list(task_id):
+@click.option(
+ "--limit", default=10, show_default=True, help="Limit number of results.")
+@click.option(
+ "--filter-errors", is_flag=True, help="Filter results to errors")
+@click.option(
+ "--filter-non-errors", is_flag=True, help="Filter results to non errors")
+def task_event_list(task_id, limit, filter_errors, filter_non_errors):
"""
Executor for `globus task-event-list`
"""
client = get_client()
- event_iterator = client.task_event_list(task_id)
+ # set filter based on filter flags, if both set do nothing
+ filter_string = None
+ if filter_errors and not filter_non_errors:
+ filter_string = "is_error:1"
+ if filter_non_errors and not filter_errors:
+ filter_string = "is_error:1"
+
+ event_iterator = client.task_event_list(
+ task_id, num_results=limit, filter=filter_string)
if outformat_is_json():
print_json_from_iterator(event_iterator) |
89f3cd5367c537b19e0da495892eb7cd8a42390a | setup.py | setup.py | """
A utility to package subsets of large ASPECT PVD files to visualize elsewhere
without access to the original filesystem.
"""
from setuptools import find_packages, setup
package = 'sci_parameter_utils'
version = '0.2.0'
dependencies = [
'click',
'pyyaml',
'sympy'
]
test_deps = [
'pytest'
]
setup(
name=package,
version=version,
package_dir={'': 'src'},
packages=find_packages('src', exclude=['tests']),
install_requires=dependencies,
tests_require=test_deps,
entry_points={
'console_scripts': [
'sci_parameter_utils = sci_parameter_utils.cli:cli_main'
]
}
)
| """
A utility to package subsets of large ASPECT PVD files to visualize elsewhere
without access to the original filesystem.
"""
import sys
from setuptools import find_packages, setup
needs_pytest = {'pytest', 'test'}.intersection(sys.argv)
pytest_runner = ['pytest_runner'] if needs_pytest else []
package = 'sci_parameter_utils'
version = '0.2.0'
dependencies = [
'click',
'pyyaml',
'sympy',
]
setup_deps = [
] + pytest_runner
test_deps = [
'pytest',
]
setup(
name=package,
version=version,
package_dir={'': 'src'},
packages=find_packages('src', exclude=['tests']),
install_requires=dependencies,
setup_requires=setup_deps,
tests_require=test_deps,
entry_points={
'console_scripts': [
'sci_parameter_utils = sci_parameter_utils.cli:cli_main'
]
},
)
| Add better pytest test support | Add better pytest test support
| Python | mit | class4kayaker/Parameter_Utils | ---
+++
@@ -3,17 +3,23 @@
without access to the original filesystem.
"""
+import sys
from setuptools import find_packages, setup
+
+needs_pytest = {'pytest', 'test'}.intersection(sys.argv)
+pytest_runner = ['pytest_runner'] if needs_pytest else []
package = 'sci_parameter_utils'
version = '0.2.0'
dependencies = [
'click',
'pyyaml',
- 'sympy'
+ 'sympy',
]
+setup_deps = [
+] + pytest_runner
test_deps = [
- 'pytest'
+ 'pytest',
]
setup(
@@ -22,10 +28,11 @@
package_dir={'': 'src'},
packages=find_packages('src', exclude=['tests']),
install_requires=dependencies,
+ setup_requires=setup_deps,
tests_require=test_deps,
entry_points={
'console_scripts': [
'sci_parameter_utils = sci_parameter_utils.cli:cli_main'
]
- }
+ },
) |
44ac6ece920bb1602a053b31b78326a3f30be151 | setup.py | setup.py | from setuptools import setup
from guano import __version__
setup(
name='guano',
version=__version__,
description='GUANO, the "Grand Unified" bat acoustics metadata format',
long_description=open('README.md').read(),
url='https://github.com/riggsd/guano-py',
license='MIT',
author='David A. Riggs',
author_email='driggs@myotisoft.com',
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
],
keywords='bats acoustics metadata',
py_modules=['guano'],
scripts=['bin/sb2guano.py'],
)
| from setuptools import setup
from glob import glob
from guano import __version__
setup(
name='guano',
version=__version__,
description='GUANO, the "Grand Unified" bat acoustics metadata format',
long_description=open('README.md').read(),
url='https://github.com/riggsd/guano-py',
license='MIT',
author='David A. Riggs',
author_email='driggs@myotisoft.com',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
],
keywords='bats acoustics metadata guano',
py_modules=['guano'],
scripts=glob('bin/*.py'),
)
| Declare to the PyPI that we support Python 3 | Declare to the PyPI that we support Python 3
| Python | mit | riggsd/guano-py | ---
+++
@@ -1,4 +1,5 @@
from setuptools import setup
+from glob import glob
from guano import __version__
@@ -13,12 +14,14 @@
author='David A. Riggs',
author_email='driggs@myotisoft.com',
classifiers=[
- 'Development Status :: 3 - Alpha',
+ 'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
+ 'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
+ 'Programming Language :: Python :: 3',
],
- keywords='bats acoustics metadata',
+ keywords='bats acoustics metadata guano',
py_modules=['guano'],
- scripts=['bin/sb2guano.py'],
+ scripts=glob('bin/*.py'),
) |
973c99c616c4648d143407c7363a49bbeecea994 | setup.py | setup.py | #!/usr/bin/env python
import os
import sys
from setuptools import setup, find_packages
from moulinette import env
LOCALES_DIR = env["LOCALES_DIR"]
# Extend installation
locale_files = []
if "install" in sys.argv:
# Evaluate locale files
for f in os.listdir("locales"):
if f.endswith(".json"):
locale_files.append("locales/%s" % f)
install_deps = [
"psutil",
"pytz",
"pyyaml",
"toml",
"gevent-websocket",
"bottle",
]
test_deps = [
"pytest",
"pytest-cov",
"pytest-env",
"pytest-mock",
"requests",
"requests-mock",
"webtest",
]
extras = {
"install": install_deps,
"tests": test_deps,
}
setup(
name="Moulinette",
version="2.0.0",
description="Prototype interfaces quickly and easily",
author="Yunohost Team",
author_email="yunohost@yunohost.org",
url="http://yunohost.org",
license="AGPL",
packages=find_packages(exclude=["test"]),
data_files=[(LOCALES_DIR, locale_files)],
python_requires=">=3.7.*, <3.8",
install_requires=install_deps,
tests_require=test_deps,
extras_require=extras,
)
| #!/usr/bin/env python
import os
import sys
from setuptools import setup, find_packages
from moulinette import env
LOCALES_DIR = env["LOCALES_DIR"]
# Extend installation
locale_files = []
if "install" in sys.argv:
# Evaluate locale files
for f in os.listdir("locales"):
if f.endswith(".json"):
locale_files.append("locales/%s" % f)
install_deps = [
"psutil",
"pytz",
"pyyaml",
"toml",
"gevent-websocket",
"bottle",
]
test_deps = [
"pytest",
"pytest-cov",
"pytest-env",
"pytest-mock",
"mock",
"requests",
"requests-mock",
"webtest",
]
extras = {
"install": install_deps,
"tests": test_deps,
}
setup(
name="Moulinette",
version="2.0.0",
description="Prototype interfaces quickly and easily",
author="Yunohost Team",
author_email="yunohost@yunohost.org",
url="http://yunohost.org",
license="AGPL",
packages=find_packages(exclude=["test"]),
data_files=[(LOCALES_DIR, locale_files)],
python_requires=">=3.7.*, <3.8",
install_requires=install_deps,
tests_require=test_deps,
extras_require=extras,
)
| Add mock as a test dependency | Add mock as a test dependency
| Python | agpl-3.0 | YunoHost/moulinette | ---
+++
@@ -31,6 +31,7 @@
"pytest-cov",
"pytest-env",
"pytest-mock",
+ "mock",
"requests",
"requests-mock",
"webtest", |
29236db6d20d9039b5c32f34b1046466c86e3cf5 | setup.py | setup.py | from setuptools import setup
version = "0.6.0"
url = "https://github.com/AndyDeany/pygame-template"
with open("requirements.txt", "r") as requirements_file:
requirements = requirements_file.read().split()
setup(
name="pygametemplate",
version=version,
license="MIT",
description="Making making games with Pygame easier :)",
url=url,
download_url="{}/archive/v{}.tar.gz".format(url, version),
author="Andrew Dean",
author_email="oneandydean@hotmail.com",
packages=("pygametemplate",),
keywords=("pygame", "template", "gamedev"),
install_requires=requirements,
classifiers=(
"Development Status :: 2 - Pre-Alpha",
"Intended Audience :: Developers",
"Natural Language :: English",
"Topic :: Software Development :: Libraries :: pygame",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.6",
)
)
| from setuptools import setup
version = "0.6.0"
url = "https://github.com/AndyDeany/pygame-template"
with open("requirements.txt", "r") as requirements_file:
requirements = requirements_file.read().split()
setup(
name="pygametemplate",
version=version,
license="MIT",
description="Making making games with Pygame easier :)",
url=url,
download_url="{}/archive/v{}.tar.gz".format(url, version),
author="Andrew Dean",
author_email="oneandydean@hotmail.com",
packages=("pygametemplate",),
keywords=("pygame", "template", "gamedev"),
install_requires=requirements,
classifiers=(
"Development Status :: 2 - Pre-Alpha",
"Intended Audience :: Developers",
"Natural Language :: English",
"Topic :: Software Development :: Libraries :: pygame",
"Programming Language :: Python",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
)
)
| Update classifiers to represent currently supported python versions | Update classifiers to represent currently supported python versions
| Python | mit | AndyDeany/pygame-template | ---
+++
@@ -25,9 +25,9 @@
"Natural Language :: English",
"Topic :: Software Development :: Libraries :: pygame",
"Programming Language :: Python",
- "Programming Language :: Python :: 2",
- "Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
+ "Programming Language :: Python :: 3.4",
+ "Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
)
) |
155729f74a5bf54d9b4c5d4ed813849023af0157 | setup.py | setup.py | from setuptools import setup
setup(
name='Resizer',
version='0.0.1',
description='Resizer helps you generate thumbnails and '
'load images from various sources.',
packages=['resizer'],
platforms='any',
install_requires=['PIL>=1.1.6']
)
| from setuptools import setup
setup(
name='Resizer',
version='0.0.1',
description='Resizer helps you generate thumbnails and '
'load images from various sources.',
packages=['resizer'],
platforms='any',
install_requires=['Pillow>=1.7.7']
)
| Use Pillow instead of PIL. | Use Pillow instead of PIL.
| Python | mit | FelixLoether/Resizer,FelixLoether/Resizer | ---
+++
@@ -7,5 +7,5 @@
'load images from various sources.',
packages=['resizer'],
platforms='any',
- install_requires=['PIL>=1.1.6']
+ install_requires=['Pillow>=1.7.7']
) |
1d83021ab395804020d1907db7b6db897dbd1efd | bin/upload_version.py | bin/upload_version.py | #!python
import os
import sys
import json
import requests
if __name__ == '__main__':
version = sys.argv[1]
filepath = sys.argv[2]
filename = filepath.split('/')[-1]
github_token = os.environ['GITHUB_TOKEN']
auth = (github_token, 'x-oauth-basic')
commit_sha = os.environ['CIRCLE_SHA1']
params = json.dumps({
'tag_name': 'v{0}'.format(version),
'name': 're:dash v{0}'.format(version),
'target_commitish': commit_sha,
'prerelease': True
})
response = requests.post('https://api.github.com/repos/everythingme/redash/releases',
data=params,
auth=auth)
upload_url = response.json()['upload_url']
upload_url = upload_url.replace('{?name}', '')
with open(filepath) as file_content:
headers = {'Content-Type': 'application/gzip'}
response = requests.post(upload_url, file_content, params={'name': filename}, auth=auth, headers=headers, verify=False)
| #!python
import os
import sys
import json
import requests
import subprocess
def capture_output(command):
proc = subprocess.Popen(command, stdout=subprocess.PIPE)
return proc.stdout.read()
if __name__ == '__main__':
version = sys.argv[1]
filepath = sys.argv[2]
filename = filepath.split('/')[-1]
github_token = os.environ['GITHUB_TOKEN']
auth = (github_token, 'x-oauth-basic')
commit_sha = os.environ['CIRCLE_SHA1']
commit_body = capture_output(["git", "log", "--format=%b", "-n", "1", commit_sha])
file_md5_checksum = capture_output(["md5sum", filename]).split()[0]
file_sha256_checksum = capture_output(["sha256sum", filename]).split()[0]
version_body = "%s\n\nMD5: %s\nSHA256: %s" % (commit_body, file_md5_checksum, file_sha256_checksum)
params = json.dumps({
'tag_name': 'v{0}'.format(version),
'name': 're:dash v{0}'.format(version),
'body': version_body,
'target_commitish': commit_sha,
'prerelease': True
})
response = requests.post('https://api.github.com/repos/everythingme/redash/releases',
data=params,
auth=auth)
upload_url = response.json()['upload_url']
upload_url = upload_url.replace('{?name}', '')
with open(filepath) as file_content:
headers = {'Content-Type': 'application/gzip'}
response = requests.post(upload_url, file_content, params={'name': filename}, auth=auth,
headers=headers, verify=False)
| Update upload script to include checksums | Update upload script to include checksums
| Python | bsd-2-clause | moritz9/redash,pubnative/redash,ninneko/redash,hudl/redash,pubnative/redash,vishesh92/redash,imsally/redash,easytaxibr/redash,jmvasquez/redashtest,guaguadev/redash,vishesh92/redash,jmvasquez/redashtest,easytaxibr/redash,44px/redash,amino-data/redash,chriszs/redash,jmvasquez/redashtest,jmvasquez/redashtest,amino-data/redash,denisov-vlad/redash,easytaxibr/redash,chriszs/redash,denisov-vlad/redash,stefanseifert/redash,M32Media/redash,pubnative/redash,denisov-vlad/redash,getredash/redash,useabode/redash,moritz9/redash,ninneko/redash,easytaxibr/redash,EverlyWell/redash,rockwotj/redash,rockwotj/redash,ninneko/redash,guaguadev/redash,rockwotj/redash,hudl/redash,crowdworks/redash,getredash/redash,crowdworks/redash,useabode/redash,vishesh92/redash,hudl/redash,moritz9/redash,akariv/redash,guaguadev/redash,crowdworks/redash,useabode/redash,M32Media/redash,denisov-vlad/redash,stefanseifert/redash,stefanseifert/redash,vishesh92/redash,EverlyWell/redash,M32Media/redash,EverlyWell/redash,chriszs/redash,akariv/redash,amino-data/redash,rockwotj/redash,ninneko/redash,alexanderlz/redash,44px/redash,useabode/redash,getredash/redash,getredash/redash,amino-data/redash,alexanderlz/redash,alexanderlz/redash,pubnative/redash,44px/redash,easytaxibr/redash,pubnative/redash,jmvasquez/redashtest,44px/redash,hudl/redash,imsally/redash,crowdworks/redash,guaguadev/redash,stefanseifert/redash,moritz9/redash,imsally/redash,M32Media/redash,akariv/redash,stefanseifert/redash,EverlyWell/redash,ninneko/redash,akariv/redash,chriszs/redash,imsally/redash,alexanderlz/redash,denisov-vlad/redash,getredash/redash,akariv/redash,guaguadev/redash | ---
+++
@@ -3,30 +3,44 @@
import sys
import json
import requests
+import subprocess
+
+
+def capture_output(command):
+ proc = subprocess.Popen(command, stdout=subprocess.PIPE)
+ return proc.stdout.read()
+
if __name__ == '__main__':
- version = sys.argv[1]
- filepath = sys.argv[2]
- filename = filepath.split('/')[-1]
- github_token = os.environ['GITHUB_TOKEN']
- auth = (github_token, 'x-oauth-basic')
- commit_sha = os.environ['CIRCLE_SHA1']
+ version = sys.argv[1]
+ filepath = sys.argv[2]
+ filename = filepath.split('/')[-1]
+ github_token = os.environ['GITHUB_TOKEN']
+ auth = (github_token, 'x-oauth-basic')
+ commit_sha = os.environ['CIRCLE_SHA1']
- params = json.dumps({
- 'tag_name': 'v{0}'.format(version),
- 'name': 're:dash v{0}'.format(version),
- 'target_commitish': commit_sha,
- 'prerelease': True
- })
+ commit_body = capture_output(["git", "log", "--format=%b", "-n", "1", commit_sha])
+ file_md5_checksum = capture_output(["md5sum", filename]).split()[0]
+ file_sha256_checksum = capture_output(["sha256sum", filename]).split()[0]
+ version_body = "%s\n\nMD5: %s\nSHA256: %s" % (commit_body, file_md5_checksum, file_sha256_checksum)
- response = requests.post('https://api.github.com/repos/everythingme/redash/releases',
- data=params,
- auth=auth)
+ params = json.dumps({
+ 'tag_name': 'v{0}'.format(version),
+ 'name': 're:dash v{0}'.format(version),
+ 'body': version_body,
+ 'target_commitish': commit_sha,
+ 'prerelease': True
+ })
- upload_url = response.json()['upload_url']
- upload_url = upload_url.replace('{?name}', '')
+ response = requests.post('https://api.github.com/repos/everythingme/redash/releases',
+ data=params,
+ auth=auth)
- with open(filepath) as file_content:
- headers = {'Content-Type': 'application/gzip'}
- response = requests.post(upload_url, file_content, params={'name': filename}, auth=auth, headers=headers, verify=False)
+ upload_url = response.json()['upload_url']
+ upload_url = upload_url.replace('{?name}', '')
+ with open(filepath) as file_content:
+ headers = {'Content-Type': 'application/gzip'}
+ response = requests.post(upload_url, file_content, params={'name': filename}, auth=auth,
+ headers=headers, verify=False)
+ |
60e208200970201f37b0eb4f85b6008681e1f35d | tasks.py | tasks.py | """
Automation tasks, aided by the Invoke package.
"""
import os
import webbrowser
from invoke import task, run
DOCS_DIR = 'docs'
DOCS_OUTPUT_DIR = os.path.join(DOCS_DIR, '_build')
@task
def docs(output='html', rebuild=False, show=True):
"""Build the docs and show them in default web browser."""
build_cmd = 'sphinx-build -b {output} {all} docs docs/_build'.format(
output=output,
all='-a -E' if rebuild else '')
run(build_cmd)
if show:
webbrowser.open_new_tab(os.path.join(DOCS_OUTPUT_DIR, 'index.html'))
| """
Automation tasks, aided by the Invoke package.
"""
import os
import webbrowser
import sys
from invoke import task, run
DOCS_DIR = 'docs'
DOCS_OUTPUT_DIR = os.path.join(DOCS_DIR, '_build')
@task
def docs(output='html', rebuild=False, show=True):
"""Build the docs and show them in default web browser."""
build_cmd = 'sphinx-build -b {output} {all} docs docs/_build'.format(
output=output,
all='-a -E' if rebuild else '')
run(build_cmd)
if show:
path = os.path.join(DOCS_OUTPUT_DIR, 'index.html')
if sys.platform == 'darwin':
path = 'file://%s' % os.path.abspath(path)
webbrowser.open_new_tab(path)
| Fix to showing generated docs on OSX | Fix to showing generated docs on OSX
| Python | bsd-3-clause | Xion/callee | ---
+++
@@ -3,6 +3,7 @@
"""
import os
import webbrowser
+import sys
from invoke import task, run
@@ -20,4 +21,7 @@
run(build_cmd)
if show:
- webbrowser.open_new_tab(os.path.join(DOCS_OUTPUT_DIR, 'index.html'))
+ path = os.path.join(DOCS_OUTPUT_DIR, 'index.html')
+ if sys.platform == 'darwin':
+ path = 'file://%s' % os.path.abspath(path)
+ webbrowser.open_new_tab(path) |
af7122220447b1abe771f37400daeb4370603dd4 | collection_pipelines/core.py | collection_pipelines/core.py | import functools
def coroutine(fn):
def wrapper(*args, **kwargs):
generator = fn(*args, **kwargs)
next(generator)
return generator
return wrapper
class CollectionPipelineProcessor:
sink = None
start_source = None
receiver = None
def process(self, item):
raise NotImplementedError
def on_done(self):
if self.receiver:
self.receiver.close()
def source(self, start_source):
self.start_source = start_source
@coroutine
def make_generator(self):
while True:
try:
item = yield
self.process(item)
except GeneratorExit:
self.on_done()
break
def __or__(self, other):
self.sink = other
def exec():
self.receiver = self.sink.make_generator()
self.start_source()
other.source(exec)
return other
| import functools
def coroutine(fn):
def wrapper(*args, **kwargs):
generator = fn(*args, **kwargs)
next(generator)
return generator
return wrapper
class CollectionPipelineProcessor:
sink = None
start_source = None
receiver = None
def process(self, item):
raise NotImplementedError
def on_done(self):
if self.receiver:
self.receiver.close()
def source(self, start_source):
self.start_source = start_source
@coroutine
def make_generator(self):
while True:
try:
item = yield
self.process(item)
except GeneratorExit:
self.on_done()
break
def __or__(self, other):
self.sink = other
def exec():
self.receiver = self.sink.make_generator()
self.start_source()
other.source(exec)
return other
class CollectionPipelineOutput(CollectionPipelineProcessor):
"""Pipeline processor that ends the chain and starts outputing stream.
Output processor immediately starts consuming from the source.
Thus triggering the whole pipeline start.
"""
def source(self, start_source):
start_source()
| Add base class for output pipeline processors | Add base class for output pipeline processors
| Python | mit | povilasb/pycollection-pipelines | ---
+++
@@ -44,3 +44,13 @@
other.source(exec)
return other
+
+
+class CollectionPipelineOutput(CollectionPipelineProcessor):
+ """Pipeline processor that ends the chain and starts outputing stream.
+
+ Output processor immediately starts consuming from the source.
+ Thus triggering the whole pipeline start.
+ """
+ def source(self, start_source):
+ start_source() |
93358a04380f427f3ac1cea84689a430bcb0c883 | jenkins/management/commands/import_jenkinsserver.py | jenkins/management/commands/import_jenkinsserver.py | from __future__ import unicode_literals
from optparse import make_option
from django.core.management.base import BaseCommand, CommandError
from django.db import transaction
from jenkins.management.helpers import import_jenkinsserver
class Command(BaseCommand):
help = "Import or update a JenkinsServer"
args = "[name] [url] [username] [password] [remote]"
option_list = BaseCommand.option_list + (
make_option(
"--update", action="store_true", dest="update",
default=False, help="Update if server already exists."),
)
def handle(self, *args, **options):
if len(args) != 5:
raise CommandError("must provide all parameters")
name, url, username, password, remote = args
import_jenkinsserver(
update=options["update"], stdout=self.stdout)
transaction.commit_unless_managed()
| from __future__ import unicode_literals
from optparse import make_option
from django.core.management.base import BaseCommand, CommandError
from django.db import transaction
from jenkins.management.helpers import import_jenkinsserver
class Command(BaseCommand):
help = "Import or update a JenkinsServer"
args = "[name] [url] [username] [password] [remote]"
option_list = BaseCommand.option_list + (
make_option(
"--update", action="store_true", dest="update",
default=False, help="Update if server already exists."),
)
def handle(self, *args, **options):
if len(args) != 5:
raise CommandError("must provide all parameters")
name, url, username, password, remote = args
import_jenkinsserver(
name, url, username, password, remote,
update=options["update"], stdout=self.stdout)
transaction.commit_unless_managed()
| Make the command-line tool pass the right parameters. | Make the command-line tool pass the right parameters.
| Python | mit | timrchavez/capomastro,caio1982/capomastro,caio1982/capomastro,caio1982/capomastro,timrchavez/capomastro | ---
+++
@@ -23,6 +23,7 @@
name, url, username, password, remote = args
import_jenkinsserver(
+ name, url, username, password, remote,
update=options["update"], stdout=self.stdout)
transaction.commit_unless_managed() |
f93555f1039857d1c4ba06d3f5a95810f1f1d26e | devp2p/__init__.py | devp2p/__init__.py | # -*- coding: utf-8 -*-
# ############# version ##################
from pkg_resources import get_distribution, DistributionNotFound
import os.path
try:
_dist = get_distribution('devp2p')
# Normalize case for Windows systems
dist_loc = os.path.normcase(_dist.location)
here = os.path.normcase(__file__)
if not here.startswith(os.path.join(dist_loc, 'devp2p')):
# not installed, but there is another version that *is*
raise DistributionNotFound
except DistributionNotFound:
__version__ = 'dirty'
else:
__version__ = _dist.version
# ########### endversion ##################
| # -*- coding: utf-8 -*-
# ############# version ##################
from pkg_resources import get_distribution, DistributionNotFound
import os.path
import subprocess
import re
GIT_DESCRIBE_RE = re.compile('^(?P<version>v\d+\.\d+\.\d+)-(?P<git>\d+-g[a-fA-F0-9]+(?:-dirty)?)$')
__version__ = None
try:
_dist = get_distribution('pyethapp')
# Normalize case for Windows systems
dist_loc = os.path.normcase(_dist.location)
here = os.path.normcase(__file__)
if not here.startswith(os.path.join(dist_loc, 'pyethapp')):
# not installed, but there is another version that *is*
raise DistributionNotFound
__version__ = _dist.version
except DistributionNotFound:
pass
if not __version__:
try:
rev = subprocess.check_output(['git', 'describe', '--tags', '--dirty'],
stderr=subprocess.STDOUT)
match = GIT_DESCRIBE_RE.match(rev)
if match:
__version__ = "{}+git-{}".format(match.group("version"), match.group("git"))
except:
pass
if not __version__:
__version__ = 'undefined'
# ########### endversion ##################
| Use version extraction code from pyethapp / pyethereum | Use version extraction code from pyethapp / pyethereum | Python | mit | ethereum/pydevp2p,ms83/pydevp2p | ---
+++
@@ -2,16 +2,37 @@
# ############# version ##################
from pkg_resources import get_distribution, DistributionNotFound
import os.path
+import subprocess
+import re
+
+
+GIT_DESCRIBE_RE = re.compile('^(?P<version>v\d+\.\d+\.\d+)-(?P<git>\d+-g[a-fA-F0-9]+(?:-dirty)?)$')
+
+
+__version__ = None
try:
- _dist = get_distribution('devp2p')
+ _dist = get_distribution('pyethapp')
# Normalize case for Windows systems
dist_loc = os.path.normcase(_dist.location)
here = os.path.normcase(__file__)
- if not here.startswith(os.path.join(dist_loc, 'devp2p')):
+ if not here.startswith(os.path.join(dist_loc, 'pyethapp')):
# not installed, but there is another version that *is*
raise DistributionNotFound
+ __version__ = _dist.version
except DistributionNotFound:
- __version__ = 'dirty'
-else:
- __version__ = _dist.version
+ pass
+
+if not __version__:
+ try:
+ rev = subprocess.check_output(['git', 'describe', '--tags', '--dirty'],
+ stderr=subprocess.STDOUT)
+ match = GIT_DESCRIBE_RE.match(rev)
+ if match:
+ __version__ = "{}+git-{}".format(match.group("version"), match.group("git"))
+ except:
+ pass
+
+if not __version__:
+ __version__ = 'undefined'
+
# ########### endversion ################## |
4dd28beddc2df9efeef798491d1963800113f801 | django_bootstrap_calendar/models.py | django_bootstrap_calendar/models.py | # -*- coding: utf-8 -*-
__author__ = 'sandlbn'
from django.db import models
from django.utils.translation import ugettext_lazy as _
from utils import datetime_to_timestamp
class CalendarEvent(models.Model):
"""
Calendar Events
"""
CSS_CLASS_CHOICES = (
('', _('Normal')),
('event-warning', _('Warning')),
('event-info', _('Info')),
('event-success', _('Success')),
('event-inverse', _('Inverse')),
('event-special', _('Special')),
('event-important', _('Important')),
)
title = models.CharField(max_length=255, verbose_name=_('Title'))
url = models.URLField(verbose_name=_('URL'), null=True, blank=True)
css_class = models.CharField(max_length=20, verbose_name=_('CSS Class'),
choices=CSS_CLASS_CHOICES)
start = models.DateTimeField(verbose_name=_('Start Date'))
end = models.DateTimeField(verbose_name=_('End Date'), null=True,
blank=True)
@property
def start_timestamp(self):
"""
Return end date as timestamp
"""
return datetime_to_timestamp(self.start)
@property
def end_timestamp(self):
"""
Return end date as timestamp
"""
return datetime_to_timestamp(self.end)
def __unicode__(self):
return self.title
| # -*- coding: utf-8 -*-
__author__ = 'sandlbn'
from django.db import models
from django.utils.translation import ugettext_lazy as _
from utils import datetime_to_timestamp
class CalendarEvent(models.Model):
"""
Calendar Events
"""
CSS_CLASS_CHOICES = (
('', _('Normal')),
('event-warning', _('Warning')),
('event-info', _('Info')),
('event-success', _('Success')),
('event-inverse', _('Inverse')),
('event-special', _('Special')),
('event-important', _('Important')),
)
title = models.CharField(max_length=255, verbose_name=_('Title'))
url = models.URLField(verbose_name=_('URL'), null=True, blank=True)
css_class = models.CharField(blank=True, max_length=20, verbose_name=_('CSS Class'),
choices=CSS_CLASS_CHOICES)
start = models.DateTimeField(verbose_name=_('Start Date'))
end = models.DateTimeField(verbose_name=_('End Date'), null=True,
blank=True)
@property
def start_timestamp(self):
"""
Return end date as timestamp
"""
return datetime_to_timestamp(self.start)
@property
def end_timestamp(self):
"""
Return end date as timestamp
"""
return datetime_to_timestamp(self.end)
def __unicode__(self):
return self.title
| Allow `css_class` to have blank value. | Allow `css_class` to have blank value.
Currently the default value for the `css_class` field (name `Normal`)
has the value of a blank string. To allow the value to be used
`blank=True` must be set.
| Python | bsd-3-clause | sandlbn/django-bootstrap-calendar,sandlbn/django-bootstrap-calendar,sandlbn/django-bootstrap-calendar | ---
+++
@@ -21,7 +21,7 @@
)
title = models.CharField(max_length=255, verbose_name=_('Title'))
url = models.URLField(verbose_name=_('URL'), null=True, blank=True)
- css_class = models.CharField(max_length=20, verbose_name=_('CSS Class'),
+ css_class = models.CharField(blank=True, max_length=20, verbose_name=_('CSS Class'),
choices=CSS_CLASS_CHOICES)
start = models.DateTimeField(verbose_name=_('Start Date'))
end = models.DateTimeField(verbose_name=_('End Date'), null=True, |
cea4e45dc95310993e3b23ceadf83cbda810f536 | EasyEuler/commands/list.py | EasyEuler/commands/list.py | import click
from tabulate import tabulate
from EasyEuler import data
@click.command()
@click.option('--sort', '-s', type=click.Choice(['id', 'difficulty']),
default='id', help='Sort the list by problem attribute.')
def cli(sort):
""" Lists all available problems. """
problems = sorted(data.problems, key=lambda p: p[sort.lower()])
problem_list = [(problem['id'], problem['name'], problem['difficulty'])
for problem in problems]
problem_table = tabulate(problem_list, ['ID', 'Name', 'Difficulty'],
tablefmt='fancy_grid')
click.echo_via_pager(problem_table)
| import click
from tabulate import tabulate
from EasyEuler import data
@click.command()
@click.option('--sort', '-s', type=click.Choice(['id', 'difficulty']),
default='id', help='Sort the list by problem attribute.')
def cli(sort):
""" Lists all available problems. """
problems = sorted(data.problems, key=lambda p: p[sort.lower()])
problem_list = [(problem['id'], problem['name'],
'%d%%' % problem['difficulty']) for problem in problems]
problem_table = tabulate(problem_list, ['ID', 'Name', 'Difficulty'],
tablefmt='fancy_grid')
click.echo_via_pager(problem_table)
| Add percentage sign to difficulty | Add percentage sign to difficulty
| Python | mit | Encrylize/EasyEuler | ---
+++
@@ -11,8 +11,8 @@
""" Lists all available problems. """
problems = sorted(data.problems, key=lambda p: p[sort.lower()])
- problem_list = [(problem['id'], problem['name'], problem['difficulty'])
- for problem in problems]
+ problem_list = [(problem['id'], problem['name'],
+ '%d%%' % problem['difficulty']) for problem in problems]
problem_table = tabulate(problem_list, ['ID', 'Name', 'Difficulty'],
tablefmt='fancy_grid') |
13e141c1686198deaccc6c38d14cdbde8c4c8fb4 | passpie/_compat.py | passpie/_compat.py | import os
import sys
try:
from shutil import which as _which
except ImportError:
from distutils.spawn import find_executable as _which
def which(binary):
path = _which(binary)
if path:
realpath = os.path.realpath(path)
return realpath
return None
def is_python2():
return sys.version_info < (3,)
class FileNotFoundError(OSError):
def __init__(self, message="No such file or directory"):
super(FileNotFoundError, self).__init__(2, message)
class FileExistsError(OSError):
def __init__(self, message="File exists"):
super(FileExistsError, self).__init__(17, message)
| import os
import sys
try:
from shutil import which as _which
except ImportError:
from distutils.spawn import find_executable as _which
try:
basestring
except NameError:
basestring = str
def which(binary):
path = _which(binary)
if path:
realpath = os.path.realpath(path)
return realpath
return None
def is_python2():
return sys.version_info < (3,)
class FileNotFoundError(OSError):
def __init__(self, message="No such file or directory"):
super(FileNotFoundError, self).__init__(2, message)
class FileExistsError(OSError):
def __init__(self, message="File exists"):
super(FileExistsError, self).__init__(17, message)
| Add basestring py2/py3 to compat | Add basestring py2/py3 to compat
| Python | mit | marcwebbie/passpie,eiginn/passpie,scorphus/passpie,marcwebbie/passpie,eiginn/passpie,scorphus/passpie | ---
+++
@@ -4,6 +4,11 @@
from shutil import which as _which
except ImportError:
from distutils.spawn import find_executable as _which
+
+try:
+ basestring
+except NameError:
+ basestring = str
def which(binary): |
8259a733e1f039cea55cfc5aad7d69e0fb37c43c | tests.py | tests.py | from money_conversion.money import Money
import unittest
class MoneyClassTest(unittest.TestCase):
def setUp(self):
self.twenty_euro = Money(20, 'EUR')
def test_convert_euro_to_usd(self):
twenty_usd = self.twenty_euro.to_usd()
self.assertIsInstance(twenty_usd, Money)
self.assertEqual('USD', twenty_usd.currency)
self.assertEqual(21.8, twenty_usd.amount)
def test_convert_euro_to_brl(self):
twenty_brl = self.twenty_euro.to_brl()
self.assertIsInstance(twenty_brl, Money)
self.assertEqual('BRL', twenty_brl.currency)
self.assertEqual(85, twenty_brl.amount)
if __name__ == '__main__':
unittest.main() | from money_conversion.money import Money
import unittest
class MoneyClassTest(unittest.TestCase):
def setUp(self):
self.twenty_euro = Money(20, 'EUR')
def test_convert_euro_to_usd(self):
twenty_usd = self.twenty_euro.to_usd()
self.assertIsInstance(twenty_usd, Money)
self.assertEqual('USD', twenty_usd.currency)
self.assertEqual(21.8, twenty_usd.amount)
def test_convert_euro_to_brl(self):
twenty_brl = self.twenty_euro.to_brl()
self.assertIsInstance(twenty_brl, Money)
self.assertEqual('BRL', twenty_brl.currency)
self.assertEqual(85, twenty_brl.amount)
def test_invalid_method_pattern_call(self):
with self.assertRaises(AttributeError):
twenty_brl = self.twenty_euro.batman()
if __name__ == '__main__':
unittest.main() | Add test that validates method call | Add test that validates method call
| Python | mit | mdsrosa/money-conversion-py | ---
+++
@@ -21,5 +21,10 @@
self.assertEqual(85, twenty_brl.amount)
+ def test_invalid_method_pattern_call(self):
+ with self.assertRaises(AttributeError):
+ twenty_brl = self.twenty_euro.batman()
+
+
if __name__ == '__main__':
unittest.main() |
0c2dc7714f2dbb1140f8c03b2181f1fd15c434bf | djlint/parsers.py | djlint/parsers.py | import ast
import os
class Parser(object):
def __init__(self, repo_path):
if not os.path.isabs(repo_path):
raise ValueError('Repository path is not absolute: %s' % repo_path)
self.repo_path = repo_path
def walk(self):
for root, dirnames, filenames in os.walk(self.repo_path):
for filename in filenames:
if filename.endswith('.py'):
yield os.path.join(root, filename)
def relpath(self, path):
return os.path.relpath(path, self.repo_path)
def parse_file(self, path):
relpath = self.relpath(path)
with open(path) as f:
content = f.read()
try:
return (relpath, ast.parse(content, relpath))
except SyntaxError, e:
return (relpath, e)
def parse(self):
return dict(self.parse_file(filepath) for filepath in self.walk())
| import ast
import os
class Parser(object):
"""
Find all *.py files inside `repo_path` and parse its into ast nodes.
If file has syntax errors SyntaxError object will be returned except
ast node.
"""
def __init__(self, repo_path):
if not os.path.isabs(repo_path):
raise ValueError('Repository path is not absolute: %s' % repo_path)
self.repo_path = repo_path
def walk(self):
"""
Yield absolute paths to all *.py files inside `repo_path` directory.
"""
for root, dirnames, filenames in os.walk(self.repo_path):
for filename in filenames:
if filename.endswith('.py'):
yield os.path.join(root, filename)
def relpath(self, path):
return os.path.relpath(path, self.repo_path)
def parse_file(self, path):
relpath = self.relpath(path)
with open(path) as f:
content = f.read()
try:
return (relpath, ast.parse(content, relpath))
except SyntaxError, e:
return (relpath, e)
def parse(self):
return dict(self.parse_file(filepath) for filepath in self.walk())
| Add docstrings to Parser class | Add docstrings to Parser class
| Python | isc | alfredhq/djlint | ---
+++
@@ -3,6 +3,12 @@
class Parser(object):
+ """
+ Find all *.py files inside `repo_path` and parse its into ast nodes.
+
+ If file has syntax errors SyntaxError object will be returned except
+ ast node.
+ """
def __init__(self, repo_path):
if not os.path.isabs(repo_path):
@@ -10,6 +16,9 @@
self.repo_path = repo_path
def walk(self):
+ """
+ Yield absolute paths to all *.py files inside `repo_path` directory.
+ """
for root, dirnames, filenames in os.walk(self.repo_path):
for filename in filenames:
if filename.endswith('.py'): |
b91b0d667f64960fd1f07b7dc42290f287ab4c5b | scripts/endpoints_json.py | scripts/endpoints_json.py | #!/usr/bin/env python3
import lxml.html
from lxml.cssselect import CSSSelector
import requests
import json
class EndpointIdentifier:
_page = 'https://www.reddit.com/dev/api/oauth'
_no_scope = '(any scope)'
def __init__(self):
pass
def find(self):
page = requests.get(self._page)
if page.status_code != 200:
print("Bad status code:", page.status_code)
from sys import exit
exit(1)
tree = lxml.html.fromstring(page.text)
sel = CSSSelector('div[class="toc"] > ul > li > ul > li')
results = sel(tree)
sections = {}
for result in results:
scope = result.find('a').text_content()
if not scope:
scope = self._no_scope
endpointlist = []
endpoints = result.cssselect('li > a')
for endpoint in endpoints[1:]:
descriptor = endpoint.get('href')[1:].replace('_', ' /', 1).replace('_', '/')
endpointlist.append(descriptor)
sections[scope] = endpointlist
from pprint import pprint
pprint(sections)
return sections
if __name__ == "__main__":
json.dumps(EndpointIdentifier().find(), indent=4, sort_keys=True)
| #!/usr/bin/env python3
import lxml.html
from lxml.cssselect import CSSSelector
import requests
import json
class EndpointIdentifier:
_page = 'https://www.reddit.com/dev/api/oauth'
_no_scope = '(any scope)'
_headers = {
'User-Agent': 'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2228.0 Safari/537.36'
}
def __init__(self):
pass
def find(self):
page = requests.get(self._page, headers=self._headers)
if page.status_code != 200:
print("Bad status code:", page.status_code)
from sys import exit
exit(1)
tree = lxml.html.fromstring(page.text)
sel = CSSSelector('div[class="toc"] > ul > li > ul > li')
results = sel(tree)
sections = {}
for result in results:
scope = result.find('a').text_content()
if not scope:
scope = self._no_scope
endpointlist = []
endpoints = result.cssselect('li > a')
for endpoint in endpoints[1:]:
descriptor = endpoint.get('href')[1:].replace('_', ' /', 1).replace('_', '/')
endpointlist.append(descriptor)
sections[scope] = endpointlist
return sections
if __name__ == "__main__":
print(json.dumps(EndpointIdentifier().find(), indent=4, sort_keys=True))
| Add default headers, fix output | Add default headers, fix output
| Python | mit | thatJavaNerd/JRAW,ccrama/JRAW,fbis251/JRAW,fbis251/JRAW,fbis251/JRAW,thatJavaNerd/JRAW,Saketme/JRAW,hzsweers/JRAW,hzsweers/JRAW,ccrama/JRAW,thatJavaNerd/JRAW,ccrama/JRAW,Saketme/JRAW,hzsweers/JRAW,Saketme/JRAW | ---
+++
@@ -8,12 +8,15 @@
class EndpointIdentifier:
_page = 'https://www.reddit.com/dev/api/oauth'
_no_scope = '(any scope)'
+ _headers = {
+ 'User-Agent': 'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2228.0 Safari/537.36'
+ }
def __init__(self):
pass
def find(self):
- page = requests.get(self._page)
+ page = requests.get(self._page, headers=self._headers)
if page.status_code != 200:
print("Bad status code:", page.status_code)
from sys import exit
@@ -37,10 +40,8 @@
endpointlist.append(descriptor)
sections[scope] = endpointlist
- from pprint import pprint
- pprint(sections)
return sections
if __name__ == "__main__":
- json.dumps(EndpointIdentifier().find(), indent=4, sort_keys=True)
+ print(json.dumps(EndpointIdentifier().find(), indent=4, sort_keys=True))
|
9cb851af00b0b41c31f216bd062bdeea6251860c | examples/list_people.py | examples/list_people.py | #! /usr/bin/python
# See README.txt for information and build instructions.
import addressbook_pb2
import sys
# Iterates though all people in the AddressBook and prints info about them.
def ListPeople(address_book):
for person in address_book.person:
print "Person ID:", person.id
print " Name:", person.name
if person.HasField('email'):
print " E-mail address:", person.email
for phone_number in person.phone:
if phone_number.type == addressbook_pb2.Person.MOBILE:
print " Mobile phone #: ",
elif phone_number.type == addressbook_pb2.Person.HOME:
print " Home phone #: ",
elif phone_number.type == addressbook_pb2.Person.WORK:
print " Work phone #: ",
print phone_number.number
# Main procedure: Reads the entire address book from a file and prints all
# the information inside.
if len(sys.argv) != 2:
print "Usage:", sys.argv[0], "ADDRESS_BOOK_FILE"
sys.exit(-1)
address_book = addressbook_pb2.AddressBook()
# Read the existing address book.
f = open(sys.argv[1], "rb")
address_book.ParseFromString(f.read())
f.close()
ListPeople(address_book)
| #! /usr/bin/python
# See README.txt for information and build instructions.
import addressbook_pb2
import sys
# Iterates though all people in the AddressBook and prints info about them.
def ListPeople(address_book):
for person in address_book.person:
print "Person ID:", person.id
print " Name:", person.name
if person.HasField('email'):
print " E-mail address:", person.email
for phone_number in person.phone:
if phone_number.type == addressbook_pb2.Person.MOBILE:
print " Mobile phone #:",
elif phone_number.type == addressbook_pb2.Person.HOME:
print " Home phone #:",
elif phone_number.type == addressbook_pb2.Person.WORK:
print " Work phone #:",
print phone_number.number
# Main procedure: Reads the entire address book from a file and prints all
# the information inside.
if len(sys.argv) != 2:
print "Usage:", sys.argv[0], "ADDRESS_BOOK_FILE"
sys.exit(-1)
address_book = addressbook_pb2.AddressBook()
# Read the existing address book.
f = open(sys.argv[1], "rb")
address_book.ParseFromString(f.read())
f.close()
ListPeople(address_book)
| Make Python example output identical to C++ and Java by removing redundant spaces. | Make Python example output identical to C++ and Java by removing redundant
spaces.
git-svn-id: 6df6fa3ddd728f578ea1442598151d5900a6ed44@65 630680e5-0e50-0410-840e-4b1c322b438d
| Python | bsd-3-clause | svn2github/protobuf-mirror,chandlerc/protobuf-llvm,lcy03406/protobuf,svn2github/google-protobuf,da2ce7/protobuf,beyang/protobuf,patrickhartling/protobuf,mkrautz/external-protobuf,mkrautz/external-protobuf,spilgames/protobuf,Distrotech/protobuf,miracle2k/protobuf,kastnerkyle/protobuf-py3,svn2github/protobuf-mirror,mpapierski/protobuf,miracle2k/protobuf,patrickhartling/protobuf,machinalis/protobuf-python3,lcy03406/protobuf,da2ce7/protobuf,lcy03406/protobuf,svn2github/protobuf-mirror,datacratic/protobuf,svn2github/google-protobuf,machinalis/protobuf-python3,beyang/protobuf,mikelikespie/protobuf,svn2github/protobuf-mirror,Distrotech/protobuf,mpapierski/protobuf,mkrautz/external-protobuf,spilgames/protobuf,Distrotech/protobuf,aidansteele/protobuf-mirror,svn2github/google-protobuf,datacratic/protobuf,mikelikespie/protobuf,miracle2k/protobuf,kastnerkyle/protobuf-py3,aidansteele/protobuf-mirror,da2ce7/protobuf,machinalis/protobuf-python3,datacratic/protobuf,patrickhartling/protobuf,aidansteele/protobuf-mirror,chandlerc/protobuf-llvm,GreatFruitOmsk/protobuf-py3,spilgames/protobuf,datacratic/protobuf,lcy03406/protobuf,GreatFruitOmsk/protobuf-py3,kastnerkyle/protobuf-py3,da2ce7/protobuf,beyang/protobuf,mpapierski/protobuf,chandlerc/protobuf-llvm,lcy03406/protobuf,miracle2k/protobuf,spilgames/protobuf,machinalis/protobuf-python3,mpapierski/protobuf,GreatFruitOmsk/protobuf-py3,datacratic/protobuf,beyang/protobuf,Distrotech/protobuf,chandlerc/protobuf-llvm,mpapierski/protobuf,mkrautz/external-protobuf,Distrotech/protobuf,spilgames/protobuf,svn2github/google-protobuf,mikelikespie/protobuf,aidansteele/protobuf-mirror,chandlerc/protobuf-llvm,kastnerkyle/protobuf-py3,kastnerkyle/protobuf-py3,patrickhartling/protobuf,aidansteele/protobuf-mirror,da2ce7/protobuf,svn2github/protobuf-mirror,patrickhartling/protobuf,GreatFruitOmsk/protobuf-py3,GreatFruitOmsk/protobuf-py3,mkrautz/external-protobuf,mikelikespie/protobuf,svn2github/google-protobuf,beyang/protobuf,mikelikespie/protobuf | ---
+++
@@ -15,11 +15,11 @@
for phone_number in person.phone:
if phone_number.type == addressbook_pb2.Person.MOBILE:
- print " Mobile phone #: ",
+ print " Mobile phone #:",
elif phone_number.type == addressbook_pb2.Person.HOME:
- print " Home phone #: ",
+ print " Home phone #:",
elif phone_number.type == addressbook_pb2.Person.WORK:
- print " Work phone #: ",
+ print " Work phone #:",
print phone_number.number
# Main procedure: Reads the entire address book from a file and prints all |
4a92328908f38ec7cc6077289217a802283db03b | examples/list_people.py | examples/list_people.py | #! /usr/bin/python
# See README.txt for information and build instructions.
import addressbook_pb2
import sys
# Iterates though all people in the AddressBook and prints info about them.
def ListPeople(address_book):
for person in address_book.person:
print "Person ID:", person.id
print " Name:", person.name
if person.HasField('email'):
print " E-mail address:", person.email
for phone_number in person.phone:
if phone_number.type == addressbook_pb2.Person.MOBILE:
print " Mobile phone #: ",
elif phone_number.type == addressbook_pb2.Person.HOME:
print " Home phone #: ",
elif phone_number.type == addressbook_pb2.Person.WORK:
print " Work phone #: ",
print phone_number.number
# Main procedure: Reads the entire address book from a file and prints all
# the information inside.
if len(sys.argv) != 2:
print "Usage:", sys.argv[0], "ADDRESS_BOOK_FILE"
sys.exit(-1)
address_book = addressbook_pb2.AddressBook()
# Read the existing address book.
f = open(sys.argv[1], "rb")
address_book.ParseFromString(f.read())
f.close()
ListPeople(address_book)
| #! /usr/bin/python
# See README.txt for information and build instructions.
import addressbook_pb2
import sys
# Iterates though all people in the AddressBook and prints info about them.
def ListPeople(address_book):
for person in address_book.person:
print "Person ID:", person.id
print " Name:", person.name
if person.HasField('email'):
print " E-mail address:", person.email
for phone_number in person.phone:
if phone_number.type == addressbook_pb2.Person.MOBILE:
print " Mobile phone #:",
elif phone_number.type == addressbook_pb2.Person.HOME:
print " Home phone #:",
elif phone_number.type == addressbook_pb2.Person.WORK:
print " Work phone #:",
print phone_number.number
# Main procedure: Reads the entire address book from a file and prints all
# the information inside.
if len(sys.argv) != 2:
print "Usage:", sys.argv[0], "ADDRESS_BOOK_FILE"
sys.exit(-1)
address_book = addressbook_pb2.AddressBook()
# Read the existing address book.
f = open(sys.argv[1], "rb")
address_book.ParseFromString(f.read())
f.close()
ListPeople(address_book)
| Make Python example output identical to C++ and Java by removing redundant spaces. | Make Python example output identical to C++ and Java by removing redundant
spaces.
| Python | bsd-3-clause | LeslieW/protobuf,LeslieW/protobuf,LeslieW/protobuf,LeslieW/protobuf | ---
+++
@@ -15,11 +15,11 @@
for phone_number in person.phone:
if phone_number.type == addressbook_pb2.Person.MOBILE:
- print " Mobile phone #: ",
+ print " Mobile phone #:",
elif phone_number.type == addressbook_pb2.Person.HOME:
- print " Home phone #: ",
+ print " Home phone #:",
elif phone_number.type == addressbook_pb2.Person.WORK:
- print " Work phone #: ",
+ print " Work phone #:",
print phone_number.number
# Main procedure: Reads the entire address book from a file and prints all |
0a119e379150a8c1bbbd412d2b0734dd9748cf77 | windows_install/omni-config-setup.py | windows_install/omni-config-setup.py | from distutils.core import setup
import py2exe
import sys
setup(console=['..\src\omni-configure.py'],
name="omni-configure",
py_modules=['sfa','ConfigParser','logging','optparse',
'os','sys','string',
're','platform','shutil','zipfile','logging','subprocess'],
# options={
# 'py2exe':{
# 'includes':'sfa.trust.certificate,ConfigParser,logging,optparse\
# ,os,sys,string,re,platform,shutil,zipfile,logging'
# }
# }
)
| from distutils.core import setup
import py2exe
import sys
setup(console=['..\src\omni-configure.py', '..\examples/readyToLogin.py', '..\src\clear-passphrases.py'],
name="omni-configure",
py_modules=['sfa','ConfigParser','logging','optparse',
'os','sys','string',
're','platform','shutil','zipfile','logging','subprocess'],
# options={
# 'py2exe':{
# 'includes':'sfa.trust.certificate,ConfigParser,logging,optparse\
# ,os,sys,string,re,platform,shutil,zipfile,logging'
# }
# }
)
| Add readyToLogin.py and clear-passphrases.py to setup.py script. | Add readyToLogin.py and clear-passphrases.py to setup.py script.
| Python | mit | ahelsing/geni-tools,plantigrade/geni-tools,tcmitchell/geni-tools,tcmitchell/geni-tools,ahelsing/geni-tools,plantigrade/geni-tools | ---
+++
@@ -3,7 +3,7 @@
import py2exe
import sys
-setup(console=['..\src\omni-configure.py'],
+setup(console=['..\src\omni-configure.py', '..\examples/readyToLogin.py', '..\src\clear-passphrases.py'],
name="omni-configure",
py_modules=['sfa','ConfigParser','logging','optparse', |
812fd79675590659b3dc4251ed998f84c4bf2395 | utils.py | utils.py | import os
import sys
import hashlib
def e(s):
if type(s) == str:
return str
return s.encode('utf-8')
def d(s):
if type(s) == unicode:
return s
return unicode(s, 'utf-8')
def mkid(s):
return hashlib.sha1(e(s)).hexdigest()[:2*4]
def running_in_tools_labs():
return os.path.exists('/etc/wmflabs-project')
class Logger(object):
def __init__(self):
self._mode = 'INFO'
def progress(self, message):
message = e(message)
if not sys.stderr.isatty():
return
if self._mode == 'PROGRESS':
print >>sys.stderr, '\r',
print >>sys.stderr, message,
self._mode = 'PROGRESS'
def info(self, message):
message = e(message)
if self._mode == 'PROGRESS':
print >>sys.stderr
print >>sys.stderr, message
self._mode = 'INFO'
| import os
import sys
import hashlib
def e(s):
if type(s) == str:
return s
return s.encode('utf-8')
def d(s):
if type(s) == unicode:
return s
return unicode(s, 'utf-8')
def mkid(s):
return hashlib.sha1(e(s)).hexdigest()[:2*4]
def running_in_tools_labs():
return os.path.exists('/etc/wmflabs-project')
class Logger(object):
def __init__(self):
self._mode = 'INFO'
def progress(self, message):
message = e(message)
if not sys.stderr.isatty():
return
if self._mode == 'PROGRESS':
print >>sys.stderr, '\r',
print >>sys.stderr, message,
self._mode = 'PROGRESS'
def info(self, message):
message = e(message)
if self._mode == 'PROGRESS':
print >>sys.stderr
print >>sys.stderr, message
self._mode = 'INFO'
| Fix string encoding when the argument is already a str(). | Fix string encoding when the argument is already a str().
| Python | mit | Stryn/citationhunt,Stryn/citationhunt,Stryn/citationhunt,Stryn/citationhunt,jhsoby/citationhunt,jhsoby/citationhunt,jhsoby/citationhunt,jhsoby/citationhunt | ---
+++
@@ -4,7 +4,7 @@
def e(s):
if type(s) == str:
- return str
+ return s
return s.encode('utf-8')
def d(s): |
02f5da04b6082462adf07d2c29dbb36d88197047 | ct/py/csv-django-settings.py | ct/py/csv-django-settings.py | #!/usr/bin/env python
# Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
TEMPLATE_DIRS = os.path.join(os.path.dirname(os.path.realpath(__file__)),
'html-templates')
SECRET_KEY = '*lk^6@0l0(iulgar$j)fbvfy&^(^u+qk3j73d18@&+ur^xuTxY'
| #!/usr/bin/env python
# Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
TEMPLATE_DIRS = (os.path.join(os.path.dirname(os.path.realpath(__file__)),
'html-templates'),)
SECRET_KEY = '*lk^6@0l0(iulgar$j)fbvfy&^(^u+qk3j73d18@&+ur^xuTxY'
| Make TEMPLATE_DIRS a tuple to make newer version of django happy | Make TEMPLATE_DIRS a tuple to make newer version of django happy
BUG=skia:3465
Review URL: https://codereview.chromium.org/968223003
| Python | bsd-3-clause | Tiger66639/skia-buildbot,Tiger66639/skia-buildbot,Tiger66639/skia-buildbot,google/skia-buildbot,google/skia-buildbot,google/skia-buildbot,Tiger66639/skia-buildbot,google/skia-buildbot,google/skia-buildbot,google/skia-buildbot,Tiger66639/skia-buildbot,google/skia-buildbot,Tiger66639/skia-buildbot,google/skia-buildbot,Tiger66639/skia-buildbot | ---
+++
@@ -6,7 +6,7 @@
import os
-TEMPLATE_DIRS = os.path.join(os.path.dirname(os.path.realpath(__file__)),
- 'html-templates')
+TEMPLATE_DIRS = (os.path.join(os.path.dirname(os.path.realpath(__file__)),
+ 'html-templates'),)
SECRET_KEY = '*lk^6@0l0(iulgar$j)fbvfy&^(^u+qk3j73d18@&+ur^xuTxY' |
8f14ff9ec7c011c2e08f5eba1e87f03fbfede24c | extensions/rules/code_evaluation.py | extensions/rules/code_evaluation.py | # coding: utf-8
#
# Copyright 2014 The Oppia Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, softwar
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Rules for CodeEvaluation objects."""
__author__ = 'Koji Ashida'
from extensions.rules import base
class OutputEquals(base.CodeEvaluationRule):
description = (
'has output equal to {{x|NormalizedString}} (collapsing spaces)')
is_generic = False
def _evaluate(self, subject):
normalized_result = ' '.join(subject['output'].split())
normalized_expected_output = ' '.join(self.x.split())
return normalized_result == normalized_expected_output
class ResultsInError(base.CodeEvaluationRule):
description = 'results in an error when run'
is_generic = False
def _evaluate(self, subject):
error = subject['error'].strip()
return bool(error)
| # coding: utf-8
#
# Copyright 2014 The Oppia Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, softwar
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Rules for CodeEvaluation objects."""
__author__ = 'Koji Ashida'
from extensions.rules import base
class OutputEquals(base.CodeEvaluationRule):
description = (
'has output equal to {{x|UnicodeString}} (collapsing spaces)')
is_generic = False
def _evaluate(self, subject):
normalized_result = ' '.join(subject['output'].split())
normalized_expected_output = ' '.join(self.x.split())
return normalized_result == normalized_expected_output
class ResultsInError(base.CodeEvaluationRule):
description = 'results in an error when run'
is_generic = False
def _evaluate(self, subject):
error = subject['error'].strip()
return bool(error)
| Make the code evaluation rule use a UnicodeString rather than a NormalizedString, as part of the migration away from NormalizedStrings. | Make the code evaluation rule use a UnicodeString rather than a NormalizedString, as part of the migration away from NormalizedStrings.
| Python | apache-2.0 | fernandopinhati/oppia,danieljjh/oppia,Atlas-Sailed-Co/oppia,felipecocco/oppia,dippatel1994/oppia,sdulal/oppia,directorlive/oppia,virajprabhu/oppia,brylie/oppia,sbhowmik89/oppia,MAKOSCAFEE/oppia,himanshu-dixit/oppia,terrameijar/oppia,michaelWagner/oppia,gale320/oppia,sbhowmik89/oppia,kingctan/oppia,Dev4X/oppia,sunu/oppia,DewarM/oppia,wangsai/oppia,Cgruppo/oppia,raju249/oppia,rackstar17/oppia,infinyte/oppia,google-code-export/oppia,virajprabhu/oppia,leandrotoledo/oppia,danieljjh/oppia,sanyaade-teachings/oppia,BenHenning/oppia,whygee/oppia,sanyaade-teachings/oppia,oulan/oppia,prasanna08/oppia,fernandopinhati/oppia,sdulal/oppia,dippatel1994/oppia,google-code-export/oppia,Cgruppo/oppia,edallison/oppia,mit0110/oppia,oppia/oppia,kennho/oppia,zgchizi/oppia-uc,toooooper/oppia,cleophasmashiri/oppia,shaz13/oppia,brianrodri/oppia,toooooper/oppia,amgowano/oppia,brylie/oppia,brianrodri/oppia,amitdeutsch/oppia,CMDann/oppia,michaelWagner/oppia,wangsai/oppia,sarahfo/oppia,felipecocco/oppia,anggorodewanto/oppia,souravbadami/oppia,anthkris/oppia,MaximLich/oppia,amitdeutsch/oppia,jestapinski/oppia,toooooper/oppia,kevinlee12/oppia,himanshu-dixit/oppia,BenHenning/oppia,sarahfo/oppia,nagyistoce/oppia,BenHenning/oppia,nagyistoce/oppia,kevinlee12/oppia,won0089/oppia,anthkris/oppia,google-code-export/oppia,wangsai/oppia,fernandopinhati/oppia,wangsai/oppia,VictoriaRoux/oppia,dippatel1994/oppia,whygee/oppia,CMDann/oppia,kaffeel/oppia,kaffeel/oppia,michaelWagner/oppia,kingctan/oppia,sanyaade-teachings/oppia,terrameijar/oppia,kevinlee12/oppia,amgowano/oppia,souravbadami/oppia,DewarM/oppia,whygee/oppia,Dev4X/oppia,VictoriaRoux/oppia,rackstar17/oppia,raju249/oppia,won0089/oppia,felipecocco/oppia,rackstar17/oppia,bjvoth/oppia,leandrotoledo/oppia,danieljjh/oppia,sdulal/oppia,sdulal/oppia,Dev4X/oppia,oppia/oppia,toooooper/oppia,hazmatzo/oppia,himanshu-dixit/oppia,DewarM/oppia,mit0110/oppia,brianrodri/oppia,hazmatzo/oppia,BenHenning/oppia,souravbadami/oppia,amgowano/oppia,asandyz/oppia,sunu/oppia,michaelWagner/oppia,sbhowmik89/oppia,google-code-export/oppia,kingctan/oppia,dippatel1994/oppia,won0089/oppia,oulan/oppia,jestapinski/oppia,gale320/oppia,oulan/oppia,souravbadami/oppia,won0089/oppia,amitdeutsch/oppia,MaximLich/oppia,terrameijar/oppia,CMDann/oppia,directorlive/oppia,jestapinski/oppia,danieljjh/oppia,brylie/oppia,DewarM/oppia,fernandopinhati/oppia,sanyaade-teachings/oppia,MaximLich/oppia,Atlas-Sailed-Co/oppia,shaz13/oppia,kevinlee12/oppia,terrameijar/oppia,souravbadami/oppia,oppia/oppia,edallison/oppia,edallison/oppia,virajprabhu/oppia,brylie/oppia,sarahfo/oppia,bjvoth/oppia,jestapinski/oppia,dippatel1994/oppia,nagyistoce/oppia,zgchizi/oppia-uc,bjvoth/oppia,fernandopinhati/oppia,leandrotoledo/oppia,leandrotoledo/oppia,anggorodewanto/oppia,kevinlee12/oppia,asandyz/oppia,danieljjh/oppia,DewarM/oppia,MAKOSCAFEE/oppia,virajprabhu/oppia,himanshu-dixit/oppia,directorlive/oppia,bjvoth/oppia,amitdeutsch/oppia,amgowano/oppia,whygee/oppia,Cgruppo/oppia,toooooper/oppia,anggorodewanto/oppia,amitdeutsch/oppia,CMDann/oppia,gale320/oppia,sarahfo/oppia,raju249/oppia,sdulal/oppia,asandyz/oppia,AllanYangZhou/oppia,VictoriaRoux/oppia,anthkris/oppia,brylie/oppia,Dev4X/oppia,sunu/oppia,VictoriaRoux/oppia,cleophasmashiri/oppia,Atlas-Sailed-Co/oppia,mit0110/oppia,AllanYangZhou/oppia,shaz13/oppia,kingctan/oppia,kaffeel/oppia,prasanna08/oppia,anggorodewanto/oppia,sarahfo/oppia,bjvoth/oppia,rackstar17/oppia,nagyistoce/oppia,prasanna08/oppia,hazmatzo/oppia,CMDann/oppia,VictoriaRoux/oppia,cleophasmashiri/oppia,sunu/oppia,prasanna08/oppia,nagyistoce/oppia,kingctan/oppia,sanyaade-teachings/oppia,sunu/oppia,AllanYangZhou/oppia,directorlive/oppia,cleophasmashiri/oppia,felipecocco/oppia,leandrotoledo/oppia,prasanna08/oppia,sbhowmik89/oppia,kennho/oppia,whygee/oppia,oulan/oppia,kennho/oppia,edallison/oppia,gale320/oppia,Cgruppo/oppia,brianrodri/oppia,MAKOSCAFEE/oppia,anthkris/oppia,hazmatzo/oppia,kennho/oppia,kaffeel/oppia,google-code-export/oppia,kaffeel/oppia,mit0110/oppia,zgchizi/oppia-uc,asandyz/oppia,directorlive/oppia,Dev4X/oppia,infinyte/oppia,felipecocco/oppia,MAKOSCAFEE/oppia,kennho/oppia,cleophasmashiri/oppia,raju249/oppia,BenHenning/oppia,sbhowmik89/oppia,infinyte/oppia,hazmatzo/oppia,shaz13/oppia,brianrodri/oppia,wangsai/oppia,asandyz/oppia,infinyte/oppia,Atlas-Sailed-Co/oppia,Atlas-Sailed-Co/oppia,won0089/oppia,mit0110/oppia,oppia/oppia,virajprabhu/oppia,infinyte/oppia,oppia/oppia,Cgruppo/oppia,oulan/oppia,zgchizi/oppia-uc,michaelWagner/oppia,MaximLich/oppia,AllanYangZhou/oppia,gale320/oppia | ---
+++
@@ -23,7 +23,7 @@
class OutputEquals(base.CodeEvaluationRule):
description = (
- 'has output equal to {{x|NormalizedString}} (collapsing spaces)')
+ 'has output equal to {{x|UnicodeString}} (collapsing spaces)')
is_generic = False
def _evaluate(self, subject): |
2456af37c33cd1c7903c3641f8cc61550f6a113c | tests/app/test_application.py | tests/app/test_application.py | from .helpers import BaseApplicationTest
class TestApplication(BaseApplicationTest):
def test_index(self):
response = self.client.get('/')
assert 200 == response.status_code
def test_404(self):
response = self.client.get('/not-found')
assert 404 == response.status_code
| from .helpers import BaseApplicationTest
class TestApplication(BaseApplicationTest):
def test_index(self):
response = self.client.get('/')
assert 401 == response.status_code
def test_404(self):
response = self.client.get('/not-found')
assert 404 == response.status_code
| Make the tests expect authentication | Make the tests expect authentication
| Python | mit | alphagov/digitalmarketplace-admin-frontend,alphagov/digitalmarketplace-admin-frontend,mtekel/digitalmarketplace-admin-frontend,alphagov/digitalmarketplace-admin-frontend,mtekel/digitalmarketplace-admin-frontend,alphagov/digitalmarketplace-admin-frontend,mtekel/digitalmarketplace-admin-frontend,mtekel/digitalmarketplace-admin-frontend | ---
+++
@@ -4,7 +4,7 @@
class TestApplication(BaseApplicationTest):
def test_index(self):
response = self.client.get('/')
- assert 200 == response.status_code
+ assert 401 == response.status_code
def test_404(self):
response = self.client.get('/not-found') |
088e30bd675e2102f62493fd295808a7a48ae615 | project/functions/main.py | project/functions/main.py | # Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#!/usr/bin/env python3
"""
This is the entry point for the Cloud function for getting context data
on new investments.
"""
import base64, json
from google.cloud import datastore
from fetch_trends import get_updated_daily_data
from database_updates import update_investment_database
def update(event, context):
eventdata = event["data"]
decoded = base64.b64decode(eventdata)
data = json.loads(decoded)
start_date = int(data['date'])
google_search = data['search']
get_context(start_date, google_search)
def get_context(start_date, google_search):
# Instantiates a client
datastore_client = datastore.Client()
# Retrieve up to date trends data for each search term
daily_data = get_updated_daily_data(google_search, start_date)
# Add up to date data do datastore
update_investment_database(daily_data, datastore_client)
| import base64, json
from google.cloud import datastore
from fetch_trends import get_updated_daily_data
from database_updates import update_investment_database
def update(event, context):
eventdata = event["data"]
decoded = base64.b64decode(eventdata)
data = json.loads(decoded)
start_date = int(data['date'])
google_search = data['search']
entity = {
"initial_date" : start_date,
"search_term" : google_search
}
get_context(entity)
def get_context(entity):
print("Started running function")
# Instantiates a client
datastore_client = datastore.Client()
# Retrieve up to date trends data for each search term
daily_data = get_updated_daily_data(entity)
# Add up to date data do datastore
update_investment_database(daily_data, datastore_client)
| Update entry point for cloud function data fetch | Update entry point for cloud function data fetch
| Python | apache-2.0 | googleinterns/sgonks,googleinterns/sgonks,googleinterns/sgonks,googleinterns/sgonks | ---
+++
@@ -1,24 +1,3 @@
-# Copyright 2021 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# https://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-#!/usr/bin/env python3
-
-"""
-This is the entry point for the Cloud function for getting context data
-on new investments.
-"""
-
import base64, json
from google.cloud import datastore
from fetch_trends import get_updated_daily_data
@@ -31,13 +10,21 @@
start_date = int(data['date'])
google_search = data['search']
- get_context(start_date, google_search)
+
+ entity = {
+ "initial_date" : start_date,
+ "search_term" : google_search
+ }
+ get_context(entity)
-def get_context(start_date, google_search):
+def get_context(entity):
+ print("Started running function")
# Instantiates a client
datastore_client = datastore.Client()
# Retrieve up to date trends data for each search term
- daily_data = get_updated_daily_data(google_search, start_date)
+ daily_data = get_updated_daily_data(entity)
+
# Add up to date data do datastore
update_investment_database(daily_data, datastore_client)
+ |
e229779753f3c5f44319d882d19feab324abe119 | api/migrations/0011_user_preferences_update_troposphere_user.py | api/migrations/0011_user_preferences_update_troposphere_user.py | # -*- coding: utf-8 -*-
# Generated by Django 1.9.4 on 2016-09-28 19:30
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('api', '0010_sitemetadata_site_footer_link'),
]
operations = [
# migrations.RunSQL(
# '''ALTER TABLE api_userpreferences DROP CONSTRAINT api_userpreferences_user_id_6af783c3068af7fc_fk_auth_user_id''',
# reverse_sql='''ALTER TABLE api_userpreferences ADD CONSTRAINT api_userpreferences_user_id_6af783c3068af7fc_fk_auth_user_id
# FOREIGN KEY (user_id) REFERENCES auth_user(id) DEFERRABLE INITIALLY DEFERRED'''
# ),
# migrations.RunSQL(
# '''ALTER TABLE api_userpreferences ADD CONSTRAINT api_userpreferences_user_id_6af783c3068af7fc_fk_auth_user_id
# FOREIGN KEY (user_id) REFERENCES troposphere_user(id) DEFERRABLE INITIALLY DEFERRED''',
# reverse_sql='''ALTER TABLE api_userpreferences DROP CONSTRAINT api_userpreferences_user_id_6af783c3068af7fc_fk_auth_user_id'''
# ),
]
| # -*- coding: utf-8 -*-
# Generated by Django 1.9.4 on 2016-09-28 19:30
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('api', '0010_sitemetadata_site_footer_link'),
]
# These one-off operations are no longer necessary. The file was already executed in a production environment so it will stay as-is.
operations = [
# migrations.RunSQL(
# '''ALTER TABLE api_userpreferences DROP CONSTRAINT api_userpreferences_user_id_6af783c3068af7fc_fk_auth_user_id''',
# reverse_sql='''ALTER TABLE api_userpreferences ADD CONSTRAINT api_userpreferences_user_id_6af783c3068af7fc_fk_auth_user_id
# FOREIGN KEY (user_id) REFERENCES auth_user(id) DEFERRABLE INITIALLY DEFERRED'''
# ),
# migrations.RunSQL(
# '''ALTER TABLE api_userpreferences ADD CONSTRAINT api_userpreferences_user_id_6af783c3068af7fc_fk_auth_user_id
# FOREIGN KEY (user_id) REFERENCES troposphere_user(id) DEFERRABLE INITIALLY DEFERRED''',
# reverse_sql='''ALTER TABLE api_userpreferences DROP CONSTRAINT api_userpreferences_user_id_6af783c3068af7fc_fk_auth_user_id'''
# ),
]
| Add note after testing on all three 'valid' environment formats: (Clean, CyVerse, Jetstream) | Add note after testing on all three 'valid' environment formats: (Clean, CyVerse, Jetstream)
| Python | apache-2.0 | CCI-MOC/GUI-Frontend,CCI-MOC/GUI-Frontend,CCI-MOC/GUI-Frontend,CCI-MOC/GUI-Frontend,CCI-MOC/GUI-Frontend | ---
+++
@@ -11,6 +11,7 @@
('api', '0010_sitemetadata_site_footer_link'),
]
+ # These one-off operations are no longer necessary. The file was already executed in a production environment so it will stay as-is.
operations = [
# migrations.RunSQL(
# '''ALTER TABLE api_userpreferences DROP CONSTRAINT api_userpreferences_user_id_6af783c3068af7fc_fk_auth_user_id''', |
27844c3a386616834f92cd34f4790b3117e7ac05 | test_utils/templatetags/utils.py | test_utils/templatetags/utils.py | from django import template
register = template.Library()
def parse_ttag(token):
bits = token.split_contents()
tags = {}
possible_tags = ['as', 'for', 'limit', 'exclude']
for index, bit in enumerate(bits):
if bit.strip() in possible_tags:
tags[bit.strip()] = bits[index+1]
return tags
def context_for_object(token, Node):
"""This is a function that returns a Node.
It takes a string from a template tag in the format
TagName for [object] as [context variable]
"""
tags = parse_ttag(token)
if len(tags) == 2:
return Node(tags['for'], tags['as'])
elif len(tags) == 1:
return Node(tags['for'])
else:
#raise template.TemplateSyntaxError, "%s: Fail" % bits[]
print "ERROR"
| from django import template
def parse_ttag(token, possible_tags=['as', 'for', 'limit', 'exclude']):
"""
A function to parse a template tag.
Pass in the token to parse, and a list of keywords to look for.
It sets the name of the tag to 'tag_name' in the hash returned.
Default list of keywords is::
['as', 'for', 'limit', 'exclude']
>>> from django.template import Token, TOKEN_TEXT
>>> from test_utils.templatetags.utils import parse_ttag
>>> parse_ttag('super_cool_tag for my_object as bob', ['as'])
{'tag_name': u'super_cool_tag', u'as': u'bob'}
>>> parse_ttag('super_cool_tag for my_object as bob', ['as', 'for'])
{'tag_name': u'super_cool_tag', u'as': u'bob', u'for': u'my_object'}
"""
if isinstance(token, template.Token):
bits = token.split_contents()
else:
bits = token.split(' ')
tags = {'tag_name': bits.pop(0)}
for index, bit in enumerate(bits):
if bit.strip() in possible_tags:
if len(bits) != index-1:
tags[bit.strip()] = bits[index+1]
return tags
def context_for_object(token, Node):
"""
Example Usage
This is a function that returns a Node.
It takes a string from a template tag in the format
TagName for [object] as [context variable]
"""
tags = parse_ttag(token, ['for', 'as'])
if len(tags) == 2:
return Node(tags['for'], tags['as'])
elif len(tags) == 1:
return Node(tags['for'])
else:
#raise template.TemplateSyntaxError, "%s: Fail" % bits[]
print "ERROR"
| Add some hotness to the parse_ttags function. Now allows you to pass a custom list of things to parse, much nicer. | Add some hotness to the parse_ttags function. Now allows you to pass a custom list of things to parse, much nicer. | Python | mit | frac/django-test-utils,ericholscher/django-test-utils,frac/django-test-utils,acdha/django-test-utils,ericholscher/django-test-utils,acdha/django-test-utils | ---
+++
@@ -1,22 +1,44 @@
-from django import template
+from django import template
-register = template.Library()
+def parse_ttag(token, possible_tags=['as', 'for', 'limit', 'exclude']):
+ """
+ A function to parse a template tag.
+ Pass in the token to parse, and a list of keywords to look for.
-def parse_ttag(token):
- bits = token.split_contents()
- tags = {}
- possible_tags = ['as', 'for', 'limit', 'exclude']
+ It sets the name of the tag to 'tag_name' in the hash returned.
+
+ Default list of keywords is::
+ ['as', 'for', 'limit', 'exclude']
+
+ >>> from django.template import Token, TOKEN_TEXT
+ >>> from test_utils.templatetags.utils import parse_ttag
+ >>> parse_ttag('super_cool_tag for my_object as bob', ['as'])
+ {'tag_name': u'super_cool_tag', u'as': u'bob'}
+ >>> parse_ttag('super_cool_tag for my_object as bob', ['as', 'for'])
+ {'tag_name': u'super_cool_tag', u'as': u'bob', u'for': u'my_object'}
+
+ """
+
+ if isinstance(token, template.Token):
+ bits = token.split_contents()
+ else:
+ bits = token.split(' ')
+ tags = {'tag_name': bits.pop(0)}
for index, bit in enumerate(bits):
if bit.strip() in possible_tags:
- tags[bit.strip()] = bits[index+1]
+ if len(bits) != index-1:
+ tags[bit.strip()] = bits[index+1]
return tags
def context_for_object(token, Node):
- """This is a function that returns a Node.
+ """
+ Example Usage
+
+ This is a function that returns a Node.
It takes a string from a template tag in the format
TagName for [object] as [context variable]
"""
- tags = parse_ttag(token)
+ tags = parse_ttag(token, ['for', 'as'])
if len(tags) == 2:
return Node(tags['for'], tags['as'])
elif len(tags) == 1:
@@ -24,4 +46,3 @@
else:
#raise template.TemplateSyntaxError, "%s: Fail" % bits[]
print "ERROR"
- |
fd6f45aa96599a90d9ece06ffa71d8612f9f64a7 | run_tests.py | run_tests.py | #!/usr/bin/env python
import os, sys, re, shutil
os.environ['DJANGO_SETTINGS_MODULE'] = 'tests.settings'
import django
from django.core.management import call_command
names_prefix = 'tests.tests' if django.VERSION >= (1, 6) else 'tests'
names = next((a for a in sys.argv[1:] if not a.startswith('-')), None)
if names and re.search(r'^\d+$', names):
names = names_prefix + '.IssueTests.test_' + names
elif names and not names.startswith('tests.'):
names = names_prefix + '.' + names
else:
names = names_prefix
if hasattr(django, 'setup'):
django.setup()
# NOTE: we create migrations each time since they depend on type of database,
# python and django versions
try:
if django.VERSION >= (1, 7):
shutil.rmtree('tests/migrations', True)
call_command('makemigrations', 'tests', verbosity=0)
call_command('test', names, failfast='-x' in sys.argv, verbosity=2 if '-v' in sys.argv else 1)
finally:
shutil.rmtree('tests/migrations')
| #!/usr/bin/env python
import os, sys, re, shutil
os.environ['DJANGO_SETTINGS_MODULE'] = 'tests.settings'
import django
from django.core.management import call_command
names_prefix = 'tests.tests' if django.VERSION >= (1, 6) else 'tests'
names = next((a for a in sys.argv[1:] if not a.startswith('-')), None)
if names and re.search(r'^\d+$', names):
names = names_prefix + '.IssueTests.test_' + names
elif names and not names.startswith('tests.'):
names = names_prefix + '.' + names
else:
names = names_prefix
if hasattr(django, 'setup'):
django.setup()
# NOTE: we create migrations each time since they depend on type of database,
# python and django versions
try:
if django.VERSION >= (1, 7):
shutil.rmtree('tests/migrations', True)
call_command('makemigrations', 'tests', verbosity=2 if '-v' in sys.argv else 0)
call_command('test', names, failfast='-x' in sys.argv, verbosity=2 if '-v' in sys.argv else 1)
finally:
shutil.rmtree('tests/migrations')
| Make makemigrations verbose in ./runtests.py -v | Make makemigrations verbose in ./runtests.py -v
| Python | bsd-3-clause | whyflyru/django-cacheops,Suor/django-cacheops,andwun/django-cacheops,rutube/django-cacheops,bourivouh/django-cacheops,ErwinJunge/django-cacheops,LPgenerator/django-cacheops | ---
+++
@@ -23,7 +23,7 @@
try:
if django.VERSION >= (1, 7):
shutil.rmtree('tests/migrations', True)
- call_command('makemigrations', 'tests', verbosity=0)
+ call_command('makemigrations', 'tests', verbosity=2 if '-v' in sys.argv else 0)
call_command('test', names, failfast='-x' in sys.argv, verbosity=2 if '-v' in sys.argv else 1)
finally:
shutil.rmtree('tests/migrations') |
682ca50c6eb01ce39937fcceee6f0ae010055663 | employees/views.py | employees/views.py | from .models import Employee, BioData, Payroll
from django.http import HttpResponse
from django.shortcuts import render, get_object_or_404
def list(request):
employees = Employee.objects.all()[:20]
return render(request, 'employees/list.html', {'employees': employees})
def filtering(request):
employees = Employee.objects.all()
return render(request, 'employees/filtering.html', {'employees': employees})
| from .models import Employee, BioData, Payroll
from django.http import HttpResponse
from django.shortcuts import render, get_object_or_404
def list(request):
employees = Employee.objects.all()[:50]
return render(request, 'employees/list.html', {'employees': employees})
def filtering(request):
employees = Employee.objects.all()
return render(request, 'employees/filtering.html', {'employees': employees})
| Increase the list to query 50 employees. | Increase the list to query 50 employees.
| Python | mit | newrelic/newrelic-python-kata,tebriel/newrelic-python-kata,tebriel/newrelic-python-kata,tebriel/newrelic-python-kata,newrelic/newrelic-python-kata,newrelic/newrelic-python-kata | ---
+++
@@ -3,7 +3,7 @@
from django.shortcuts import render, get_object_or_404
def list(request):
- employees = Employee.objects.all()[:20]
+ employees = Employee.objects.all()[:50]
return render(request, 'employees/list.html', {'employees': employees})
def filtering(request): |
1c32ccaf09db53192f5341ca5b0c443faf1a0ee6 | pox.py | pox.py | #!/usr/bin/python
from pox.core import core
import pox.openflow.openflow
import pox.topology.topology
import pox.openflow.of_01
import pox.dumb_l3_switch.dumb_l3_switch
# Set default log level
import logging
logging.basicConfig(level=logging.DEBUG)
# Turn on extra info for event exceptions
import pox.lib.revent.revent as revent
revent.showEventExceptions = True
def startup ():
core.register("topology", pox.topology.topology.Topology())
core.register("openflow", pox.openflow.openflow.OpenFlowHub())
core.register("switch", pox.dumb_l3_switch.dumb_l3_switch.dumb_l3_switch())
pox.openflow.of_01.start()
if __name__ == '__main__':
try:
startup()
core.goUp()
except:
import traceback
traceback.print_exc()
import code
code.interact('Ready.', local=locals())
pox.core.core.quit()
| #!/usr/bin/python
# Set default log level
import logging
logging.basicConfig(level=logging.DEBUG)
from pox.core import core
import pox.openflow.openflow
import pox.topology.topology
import pox.openflow.of_01
import pox.dumb_l3_switch.dumb_l3_switch
# Turn on extra info for event exceptions
import pox.lib.revent.revent as revent
revent.showEventExceptions = True
def startup ():
core.register("topology", pox.topology.topology.Topology())
core.register("openflow", pox.openflow.openflow.OpenFlowHub())
core.register("switch", pox.dumb_l3_switch.dumb_l3_switch.dumb_l3_switch())
pox.openflow.of_01.start()
if __name__ == '__main__':
try:
startup()
core.goUp()
except:
import traceback
traceback.print_exc()
import code
code.interact('Ready.', local=locals())
pox.core.core.quit()
| Initialize logging very first thing | Initialize logging very first thing
| Python | apache-2.0 | kulawczukmarcin/mypox,adusia/pox,pthien92/sdn,xAKLx/pox,xAKLx/pox,carlye566/IoT-POX,diogommartins/pox,waltznetworks/pox,denovogroup/pox,kpengboy/pox-exercise,jacobq/csci5221-viro-project,andiwundsam/_of_normalize,chenyuntc/pox,kpengboy/pox-exercise,kulawczukmarcin/mypox,kulawczukmarcin/mypox,kavitshah8/SDNDeveloper,kavitshah8/SDNDeveloper,pthien92/sdn,adusia/pox,VamsikrishnaNallabothu/pox,denovogroup/pox,carlye566/IoT-POX,MurphyMc/pox,jacobq/csci5221-viro-project,pthien92/sdn,chenyuntc/pox,VamsikrishnaNallabothu/pox,PrincetonUniversity/pox,waltznetworks/pox,diogommartins/pox,adusia/pox,PrincetonUniversity/pox,VamsikrishnaNallabothu/pox,noxrepo/pox,jacobq/csci5221-viro-project,denovogroup/pox,carlye566/IoT-POX,kpengboy/pox-exercise,waltznetworks/pox,carlye566/IoT-POX,adusia/pox,carlye566/IoT-POX,andiwundsam/_of_normalize,kulawczukmarcin/mypox,waltznetworks/pox,PrincetonUniversity/pox,denovogroup/pox,diogommartins/pox,jacobq/csci5221-viro-project,adusia/pox,kavitshah8/SDNDeveloper,PrincetonUniversity/pox,VamsikrishnaNallabothu/pox,kpengboy/pox-exercise,waltznetworks/pox,xAKLx/pox,diogommartins/pox,chenyuntc/pox,xAKLx/pox,MurphyMc/pox,denovogroup/pox,andiwundsam/_of_normalize,PrincetonUniversity/pox,xAKLx/pox,MurphyMc/pox,andiwundsam/_of_normalize,pthien92/sdn,pthien92/sdn,diogommartins/pox,kavitshah8/SDNDeveloper,jacobq/csci5221-viro-project,MurphyMc/pox,MurphyMc/pox,kulawczukmarcin/mypox,chenyuntc/pox,chenyuntc/pox,noxrepo/pox,noxrepo/pox,noxrepo/pox,kpengboy/pox-exercise,VamsikrishnaNallabothu/pox | ---
+++
@@ -1,14 +1,14 @@
#!/usr/bin/python
+
+# Set default log level
+import logging
+logging.basicConfig(level=logging.DEBUG)
from pox.core import core
import pox.openflow.openflow
import pox.topology.topology
import pox.openflow.of_01
import pox.dumb_l3_switch.dumb_l3_switch
-
-# Set default log level
-import logging
-logging.basicConfig(level=logging.DEBUG)
# Turn on extra info for event exceptions
import pox.lib.revent.revent as revent |
4e2fd5e78922eea0f6a65afd6d50ed4b0f03448c | tests/blueprints/board/topic_moderation_base.py | tests/blueprints/board/topic_moderation_base.py | """
:Copyright: 2006-2018 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from byceps.services.board.models.topic import Topic
from testfixtures.board import create_board, create_category, create_topic
from tests.base import AbstractAppTestCase
from tests.helpers import assign_permissions_to_user
class AbstractTopicModerationTest(AbstractAppTestCase):
def setUp(self):
super().setUp()
self.admin = self.create_user('Admin')
self.create_session_token(self.admin.id)
self.user = self.create_user('User')
self.create_brand_and_party()
self.board = self.create_board()
# -------------------------------------------------------------------- #
# helpers
def setup_admin_with_permission(self, permission_id):
permission_ids = {'admin.access', permission_id}
assign_permissions_to_user(self.admin.id, 'admin', permission_ids)
def create_board(self):
board_id = self.brand.id
return create_board(self.brand.id, board_id)
def create_category(self, number):
return create_category(self.board.id, number=number)
def create_topic(self, category_id, creator_id, number):
return create_topic(category_id, creator_id, number=number)
def find_topic(self, id):
return Topic.query.get(id)
| """
:Copyright: 2006-2018 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from byceps.services.board.models.topic import Topic
from byceps.services.party import settings_service as party_settings_service
from testfixtures.board import create_board, create_category, create_topic
from tests.base import AbstractAppTestCase
from tests.helpers import assign_permissions_to_user
class AbstractTopicModerationTest(AbstractAppTestCase):
def setUp(self):
super().setUp()
self.admin = self.create_user('Admin')
self.create_session_token(self.admin.id)
self.user = self.create_user('User')
self.create_brand_and_party()
self.board = self.create_board()
party_settings_service.create_setting(self.party.id, 'board_id',
self.board.id)
# -------------------------------------------------------------------- #
# helpers
def setup_admin_with_permission(self, permission_id):
permission_ids = {'admin.access', permission_id}
assign_permissions_to_user(self.admin.id, 'admin', permission_ids)
def create_board(self):
board_id = self.brand.id
return create_board(self.brand.id, board_id)
def create_category(self, number):
return create_category(self.board.id, number=number)
def create_topic(self, category_id, creator_id, number):
return create_topic(category_id, creator_id, number=number)
def find_topic(self, id):
return Topic.query.get(id)
| Configure board ID as party setting for board tests | Configure board ID as party setting for board tests
| Python | bsd-3-clause | homeworkprod/byceps,m-ober/byceps,m-ober/byceps,m-ober/byceps,homeworkprod/byceps,homeworkprod/byceps | ---
+++
@@ -4,6 +4,7 @@
"""
from byceps.services.board.models.topic import Topic
+from byceps.services.party import settings_service as party_settings_service
from testfixtures.board import create_board, create_category, create_topic
@@ -25,6 +26,9 @@
self.board = self.create_board()
+ party_settings_service.create_setting(self.party.id, 'board_id',
+ self.board.id)
+
# -------------------------------------------------------------------- #
# helpers
|
06d3aeda83e54edf7a2b972b9b44ae9bfa7f7178 | icekit/project/hosts.py | icekit/project/hosts.py | from django_hosts import patterns, host
host_patterns = patterns(
'',
host(r'www', 'icekit.project.urls', name='www'),
host(r'api', 'icekit.api.urls', name='api'),
)
| from django_hosts import patterns, host
from django.conf import settings
host_patterns = patterns(
'',
host(r'www', settings.ROOT_URLCONF, name='www'),
host(r'api', 'icekit.api.urls', name='api'),
)
| Use project ROOT_URLCONF on default host, not hardcoded urls. | Use project ROOT_URLCONF on default host, not hardcoded urls.
| Python | mit | ic-labs/django-icekit,ic-labs/django-icekit,ic-labs/django-icekit,ic-labs/django-icekit | ---
+++
@@ -1,8 +1,8 @@
from django_hosts import patterns, host
-
+from django.conf import settings
host_patterns = patterns(
'',
- host(r'www', 'icekit.project.urls', name='www'),
+ host(r'www', settings.ROOT_URLCONF, name='www'),
host(r'api', 'icekit.api.urls', name='api'),
) |
6ee7d39c7c39a018a71cbb028dd847e3da521263 | views.py | views.py | from rest_framework import viewsets, permissions
from rest_framework_word_filter import FullWordSearchFilter
from quotedb.models import Quote
from quotedb.permissions import IsOwnerOrReadOnly
from quotedb.serializers import QuoteSerializer
class QuoteViewSet(viewsets.ModelViewSet):
"""
This viewset automatically provides `list`, `create`, `retrieve`,
`update` and `destroy` actions.
Additionally we also provide an extra `highlight` action.
"""
queryset = Quote.objects.all()
serializer_class = QuoteSerializer
permission_classes = (permissions.IsAuthenticatedOrReadOnly,
IsOwnerOrReadOnly,)
filter_backends = (FullWordSearchFilter,)
word_fields = ('body',)
def perform_create(self, serializer):
serializer.save(owner=self.request.user)
| from rest_framework import viewsets, permissions
from rest_framework.decorators import list_route
from rest_framework.response import Response
from rest_framework_word_filter import FullWordSearchFilter
from quotedb.models import Quote
from quotedb.permissions import IsOwnerOrReadOnly
from quotedb.serializers import QuoteSerializer
class QuoteViewSet(viewsets.ModelViewSet):
"""
This viewset automatically provides `list`, `create`, `retrieve`,
`update` and `destroy` actions.
Additionally we also provide an extra `highlight` action.
"""
queryset = Quote.objects.all()
serializer_class = QuoteSerializer
permission_classes = (permissions.IsAuthenticatedOrReadOnly,
IsOwnerOrReadOnly,)
filter_backends = (FullWordSearchFilter,)
word_fields = ('body',)
def perform_create(self, serializer):
serializer.save(owner=self.request.user)
@list_route()
def random(self, request):
queryset = Quote.objects.order_by('?').first()
serializer = self.serializer_class(queryset)
return Response(serializer.data)
| Add route to get random quote | Add route to get random quote
| Python | mit | kfdm/django-qdb,kfdm/django-qdb | ---
+++
@@ -1,5 +1,6 @@
from rest_framework import viewsets, permissions
-
+from rest_framework.decorators import list_route
+from rest_framework.response import Response
from rest_framework_word_filter import FullWordSearchFilter
from quotedb.models import Quote
@@ -23,3 +24,9 @@
def perform_create(self, serializer):
serializer.save(owner=self.request.user)
+
+ @list_route()
+ def random(self, request):
+ queryset = Quote.objects.order_by('?').first()
+ serializer = self.serializer_class(queryset)
+ return Response(serializer.data) |
4208538a2b7c5f2280f67520a73bd87b74de26dd | scripts/getsent.py | scripts/getsent.py | #!/usr/bin/python
import sys
import depio
sentnum = int(sys.argv[2])
fnames = [sys.argv[1]]
for fname in fnames:
sents = list(depio.depread(fname))
i=0
out = open("%d.%s" % (sentnum,fname),'w')
for outl in sents[sentnum]:
out.write('\t'.join(outl) + '\n')
break
out.close()
| #!/usr/bin/python
import sys
import depio
sentnum = int(sys.argv[2])
fnames = [sys.argv[1]]
for fname in fnames:
sents = list(depio.depread(fname))
i=0
out = open("%d.%s" % (sentnum,fname),'w')
for outl in sents[sentnum]:
out.write('\t'.join(outl) + '\n')
out.write('\n')
out.close()
| Fix script to output new line at end of file | Fix script to output new line at end of file
| Python | apache-2.0 | habeanf/yap,habeanf/yap | ---
+++
@@ -11,5 +11,5 @@
out = open("%d.%s" % (sentnum,fname),'w')
for outl in sents[sentnum]:
out.write('\t'.join(outl) + '\n')
- break
+ out.write('\n')
out.close() |
78e7fd90db429793c2b4ceee34f5296484bb0fd4 | run_tests.py | run_tests.py | #!/usr/bin/env python
# This file is closely based on tests.py from matplotlib
#
# This allows running the matplotlib tests from the command line: e.g.
#
# $ python tests.py -v -d
#
# The arguments are identical to the arguments accepted by nosetests.
#
# See https://nose.readthedocs.org/ for a detailed description of
# these options.
import nose
# from skxray.testing.noseclasses import KnownFailure
# plugins = [KnownFailure]
plugins = []
env = {"NOSE_WITH_COVERAGE": 1,
'NOSE_COVER_PACKAGE': 'metadatastore',
'NOSE_COVER_HTML': 1}
# Nose doesn't automatically instantiate all of the plugins in the
# child processes, so we have to provide the multiprocess plugin with
# a list.
from nose.plugins import multiprocess
multiprocess._instantiate_plugins = plugins
def run():
nose.main(addplugins=[x() for x in plugins], env=env)
if __name__ == '__main__':
run()
| #!/usr/bin/env python
import sys
import pytest
if __name__ == '__main__':
# show output results from every test function
args = ['-v']
# show the message output for skipped and expected failure tests
args.append('-rxs')
args.extend(sys.argv[1:])
# call pytest and exit with the return code from pytest so that
# travis will fail correctly if tests fail
sys.exit(pytest.main(args))
| Use pytest to run tests | TST: Use pytest to run tests
| Python | bsd-3-clause | NSLS-II/metadatastore,hhslepicka/metadatastore,arkilic/metadatastore,arkilic/metadatastore,ericdill/databroker,NSLS-II/metadatastore,ericdill/databroker,tacaswell/metadataStore,tacaswell/metadataStore,hhslepicka/metadatastore | ---
+++
@@ -1,35 +1,13 @@
#!/usr/bin/env python
-# This file is closely based on tests.py from matplotlib
-#
-# This allows running the matplotlib tests from the command line: e.g.
-#
-# $ python tests.py -v -d
-#
-# The arguments are identical to the arguments accepted by nosetests.
-#
-# See https://nose.readthedocs.org/ for a detailed description of
-# these options.
-
-
-import nose
-# from skxray.testing.noseclasses import KnownFailure
-
-# plugins = [KnownFailure]
-plugins = []
-env = {"NOSE_WITH_COVERAGE": 1,
- 'NOSE_COVER_PACKAGE': 'metadatastore',
- 'NOSE_COVER_HTML': 1}
-# Nose doesn't automatically instantiate all of the plugins in the
-# child processes, so we have to provide the multiprocess plugin with
-# a list.
-from nose.plugins import multiprocess
-multiprocess._instantiate_plugins = plugins
-
-
-def run():
-
- nose.main(addplugins=[x() for x in plugins], env=env)
-
+import sys
+import pytest
if __name__ == '__main__':
- run()
+ # show output results from every test function
+ args = ['-v']
+ # show the message output for skipped and expected failure tests
+ args.append('-rxs')
+ args.extend(sys.argv[1:])
+ # call pytest and exit with the return code from pytest so that
+ # travis will fail correctly if tests fail
+ sys.exit(pytest.main(args)) |
e6206c2bdacfbd2632beb6ed56ccb6856d299e08 | tests/test_suggestion_fetcher.py | tests/test_suggestion_fetcher.py | import unittest2
from google.appengine.ext import testbed
from models.account import Account
from models.suggestion import Suggestion
from helpers.suggestions.suggestion_fetcher import SuggestionFetcher
class TestEventTeamRepairer(unittest2.TestCase):
def setUp(self):
self.testbed = testbed.Testbed()
self.testbed.activate()
self.testbed.init_datastore_v3_stub()
self.testbed.init_memcache_stub()
account = Account.get_or_insert(
"123",
email="user@example.com",
registered=True).put()
suggestion = Suggestion(
author=account,
review_state=Suggestion.REVIEW_PENDING,
target_key="2012cmp",
target_model="event").put()
def testCount(self):
self.assertEqual(SuggestionFetcher.count(Suggestion.REVIEW_PENDING, "event"), 1)
self.assertEqual(SuggestionFetcher.count(Suggestion.REVIEW_PENDING, "media"), 0)
| import unittest2
from google.appengine.ext import testbed
from models.account import Account
from models.suggestion import Suggestion
from helpers.suggestions.suggestion_fetcher import SuggestionFetcher
class TestSuggestionFetcher(unittest2.TestCase):
def setUp(self):
self.testbed = testbed.Testbed()
self.testbed.activate()
self.testbed.init_datastore_v3_stub()
self.testbed.init_memcache_stub()
account = Account.get_or_insert(
"123",
email="user@example.com",
registered=True).put()
suggestion = Suggestion(
author=account,
review_state=Suggestion.REVIEW_PENDING,
target_key="2012cmp",
target_model="event").put()
def testCount(self):
self.assertEqual(SuggestionFetcher.count(Suggestion.REVIEW_PENDING, "event"), 1)
self.assertEqual(SuggestionFetcher.count(Suggestion.REVIEW_PENDING, "media"), 0)
| Fix class name of test. | Fix class name of test.
| Python | mit | phil-lopreiato/the-blue-alliance,jaredhasenklein/the-blue-alliance,fangeugene/the-blue-alliance,tsteward/the-blue-alliance,phil-lopreiato/the-blue-alliance,the-blue-alliance/the-blue-alliance,synth3tk/the-blue-alliance,synth3tk/the-blue-alliance,nwalters512/the-blue-alliance,the-blue-alliance/the-blue-alliance,fangeugene/the-blue-alliance,verycumbersome/the-blue-alliance,nwalters512/the-blue-alliance,fangeugene/the-blue-alliance,verycumbersome/the-blue-alliance,verycumbersome/the-blue-alliance,synth3tk/the-blue-alliance,the-blue-alliance/the-blue-alliance,phil-lopreiato/the-blue-alliance,bdaroz/the-blue-alliance,phil-lopreiato/the-blue-alliance,nwalters512/the-blue-alliance,verycumbersome/the-blue-alliance,tsteward/the-blue-alliance,jaredhasenklein/the-blue-alliance,verycumbersome/the-blue-alliance,the-blue-alliance/the-blue-alliance,synth3tk/the-blue-alliance,the-blue-alliance/the-blue-alliance,verycumbersome/the-blue-alliance,fangeugene/the-blue-alliance,jaredhasenklein/the-blue-alliance,bdaroz/the-blue-alliance,bdaroz/the-blue-alliance,phil-lopreiato/the-blue-alliance,phil-lopreiato/the-blue-alliance,tsteward/the-blue-alliance,bdaroz/the-blue-alliance,nwalters512/the-blue-alliance,jaredhasenklein/the-blue-alliance,the-blue-alliance/the-blue-alliance,fangeugene/the-blue-alliance,tsteward/the-blue-alliance,tsteward/the-blue-alliance,synth3tk/the-blue-alliance,nwalters512/the-blue-alliance,jaredhasenklein/the-blue-alliance,jaredhasenklein/the-blue-alliance,synth3tk/the-blue-alliance,fangeugene/the-blue-alliance,nwalters512/the-blue-alliance,tsteward/the-blue-alliance,bdaroz/the-blue-alliance,bdaroz/the-blue-alliance | ---
+++
@@ -8,7 +8,7 @@
from helpers.suggestions.suggestion_fetcher import SuggestionFetcher
-class TestEventTeamRepairer(unittest2.TestCase):
+class TestSuggestionFetcher(unittest2.TestCase):
def setUp(self):
self.testbed = testbed.Testbed()
self.testbed.activate() |
c6e1774c8c8d9b41509e183df2177f32427274df | tests/unit/test_LambdaContext.py | tests/unit/test_LambdaContext.py | from aws_lambda.helpers import LambdaContext
import time
import unittest
class TestLambdaContext(unittest.TestCase):
def test_get_remaining_time_in_millis(self):
context = LambdaContext('function_name',2000)
time.sleep(.5)
self.assertTrue(context.get_remaining_time_in_millis() < 2000)
if __name__ == '__main__':
unittest.main() | from aws_lambda.helpers import LambdaContext
import time
import unittest
class TestLambdaContext(unittest.TestCase):
def test_get_remaining_time_in_millis(self):
context = LambdaContext('function_name', 2000)
time.sleep(.5)
self.assertTrue(context.get_remaining_time_in_millis() < 2000000)
if __name__ == '__main__':
unittest.main() | Fix second/millisecond conversion issue in test case | Fix second/millisecond conversion issue in test case
The test for the `get_remaining_time_in_millis` method was
evaluating the output against a count in seconds, not milliseconds.
This corrects the issue to allow the test suite to pass
| Python | isc | nficano/python-lambda | ---
+++
@@ -5,9 +5,9 @@
class TestLambdaContext(unittest.TestCase):
def test_get_remaining_time_in_millis(self):
- context = LambdaContext('function_name',2000)
+ context = LambdaContext('function_name', 2000)
time.sleep(.5)
- self.assertTrue(context.get_remaining_time_in_millis() < 2000)
+ self.assertTrue(context.get_remaining_time_in_millis() < 2000000)
if __name__ == '__main__': |
fdbac9531215233b3ed5424401779c8edc33f16b | touchtechnology/common/fields.py | touchtechnology/common/fields.py | from django import forms
from django.utils.translation import ugettext_lazy as _
def boolean_coerce(value):
if value in {1, '1'}:
return True
if value in {0, '0'}:
return False
class BooleanChoiceField(forms.TypedChoiceField):
widget = forms.RadioSelect
def __init__(self, *args, **kwargs):
defaults = {
'choices': [
('1', _('Yes')),
('0', _('No')),
],
'coerce': boolean_coerce,
'required': True,
}
defaults.update(kwargs)
super(BooleanChoiceField, self).__init__(*args, **defaults)
def prepare_value(self, value):
if value is not None:
return str(int(value))
| from django import forms
from django.utils.translation import ugettext_lazy as _
def boolean_coerce(value):
if value in {1, '1'}:
return True
if value in {0, '0'}:
return False
class BooleanChoiceField(forms.TypedChoiceField):
widget = forms.Select
def __init__(self, *args, **kwargs):
defaults = {
'choices': [
('1', _('Yes')),
('0', _('No')),
],
'coerce': boolean_coerce,
'required': True,
}
defaults.update(kwargs)
super(BooleanChoiceField, self).__init__(*args, **defaults)
def prepare_value(self, value):
if value is not None:
return str(int(value))
| Switch BooleanChoiceField to use Select widget instead of the RadioSelect widget | Switch BooleanChoiceField to use Select widget instead of the RadioSelect widget
| Python | bsd-3-clause | goodtune/vitriolic,goodtune/vitriolic,goodtune/vitriolic,goodtune/vitriolic | ---
+++
@@ -10,7 +10,7 @@
class BooleanChoiceField(forms.TypedChoiceField):
- widget = forms.RadioSelect
+ widget = forms.Select
def __init__(self, *args, **kwargs):
defaults = { |
d65643e1bb74210a458b370aca5343f5c7059022 | wm_metrics/period.py | wm_metrics/period.py | """Representation of a period of time."""
class Period(object):
def __init__(self, start, end):
self.start = start
self.end = end
def __repr__(self):
return "%s-%s" % (self.start, self.end)
| """Representation of a period of time."""
class Period(object):
def __init__(self, start, end):
self.start = start
self.end = end
def __repr__(self):
return "%s-%s" % (self.start, self.end)
def __eq__(self, other):
return ((other.start == self.start) and
(other.end == self.end))
| Add __eq__ method to Period object | Add __eq__ method to Period object
Ultimately we probably want to reuse Python objects
like timestamps.
| Python | mit | Commonists/wm_metrics,danmichaelo/wm_metrics,Commonists/wm_metrics,danmichaelo/wm_metrics,danmichaelo/wm_metrics,Commonists/wm_metrics,Commonists/wm_metrics,danmichaelo/wm_metrics | ---
+++
@@ -9,3 +9,7 @@
def __repr__(self):
return "%s-%s" % (self.start, self.end)
+
+ def __eq__(self, other):
+ return ((other.start == self.start) and
+ (other.end == self.end)) |
f3a8c5504d75bde7fe66aeb736f4d45aa24bf6f7 | workbench/request.py | workbench/request.py | """Helpers for WebOb requests and responses."""
import webob
def webob_to_django_response(webob_response):
"""Returns a django response to the `webob_response`"""
from django.http import HttpResponse
django_response = HttpResponse(
webob_response.app_iter,
content_type=webob_response.content_type
)
for name, value in webob_response.headerlist:
django_response[name] = value
return django_response
def django_to_webob_request(django_request):
"""Returns a WebOb request to the `django_request`"""
environ = {}
environ.update(django_request.META)
webob_request = webob.Request(django_request.META)
webob_request.body = django_request.body
return webob_request
def requests_to_webob_response(requests):
"""Converts multiple django requests to a WebOb response."""
response = webob.Response()
response.status = requests.status_code
response.body = requests.content
for hname, hvalue in requests.headers.iteritems():
response.headers[hname] = hvalue
return response
| """Helpers for WebOb requests and responses."""
import webob
def webob_to_django_response(webob_response):
"""Returns a django response to the `webob_response`"""
from django.http import HttpResponse
django_response = HttpResponse(
webob_response.app_iter,
content_type=webob_response.content_type
)
for name, value in webob_response.headerlist:
django_response[name] = value
return django_response
def django_to_webob_request(django_request):
"""Returns a WebOb request to the `django_request`"""
environ = {}
environ.update(django_request.META)
webob_request = webob.Request(django_request.META)
webob_request.body = django_request.body
return webob_request
| Delete unused webob conversion method | Delete unused webob conversion method
| Python | apache-2.0 | nagyistoce/edx-XBlock,edx-solutions/xblock-sdk,Pilou81715/hackathon_edX,EDUlib/XBlock,jamiefolsom/xblock-sdk,nagyistoce/edx-xblock-sdk,edx/xblock-sdk,lovehhf/XBlock,lovehhf/xblock-sdk,mitodl/XBlock,lovehhf/xblock-sdk,lovehhf/xblock-sdk,jamiefolsom/xblock-sdk,edx/XBlock,cpennington/XBlock,nagyistoce/edx-xblock-sdk,edx/XBlock,Pilou81715/hackathon_edX,cpennington/XBlock,Pilou81715/hackathon_edX,4eek/XBlock,edx-solutions/xblock-sdk,edx-solutions/xblock-sdk,lovehhf/xblock-sdk,edx-solutions/xblock-sdk,open-craft/XBlock,nagyistoce/edx-xblock-sdk,edx/xblock-sdk,nagyistoce/edx-XBlock,jamiefolsom/xblock-sdk,Lyla-Fischer/xblock-sdk,dcadams/xblock-sdk,open-craft/XBlock,stvstnfrd/xblock-sdk,edx-solutions/XBlock,stvstnfrd/xblock-sdk,edx/xblock-sdk,cpennington/XBlock,nagyistoce/edx-xblock-sdk,stvstnfrd/xblock-sdk,Pilou81715/hackathon_edX,4eek/XBlock,Lyla-Fischer/xblock-sdk,mitodl/XBlock,Lyla-Fischer/xblock-sdk,lovehhf/XBlock,dcadams/xblock-sdk,edx-solutions/XBlock,dcadams/xblock-sdk,EDUlib/XBlock,jamiefolsom/xblock-sdk | ---
+++
@@ -23,13 +23,3 @@
webob_request = webob.Request(django_request.META)
webob_request.body = django_request.body
return webob_request
-
-
-def requests_to_webob_response(requests):
- """Converts multiple django requests to a WebOb response."""
- response = webob.Response()
- response.status = requests.status_code
- response.body = requests.content
- for hname, hvalue in requests.headers.iteritems():
- response.headers[hname] = hvalue
- return response |
fe3e43e4d894a0d7009dcbfcafb2546d12cb6296 | vumi/middleware/session_length.py | vumi/middleware/session_length.py | # -*- test-case-name: vumi.middleware.tests.test_session_length -*-
import time
from twisted.internet.defer import inlineCallbacks, returnValue
from vumi.message.TransportUserMessage import SESSION_NEW, SESSION_CLOSE
from vumi.middleware.base import BaseMiddleware
from vumi.persist.txredis_manager import TxRedisManager
class SessionLengthMiddleware(BaseMiddleware):
""" Middleware for storing the session length in the message.
Session length is stored if the end of the session is reached.
Configuration option:
:param dict redis:
Redis configuration parameters.
"""
@inlineCallbacks
def setup_middleware(self):
r_config = self.config.get('redis_manager', {})
self.redis = yield TxRedisManager.from_config(r_config)
@inlineCallbacks
def teardown_middleware(self):
yield self.redis.close_manager()
def handle_inbound(self, message, connector_name):
redis_key = '%s:%s' % (message.get('from_addr'), 'session_created')
if message.get('event_type') == SESSION_NEW:
yield self.redis.set(redis_key, str(time.time()))
elif message.get('event_type') == SESSION_CLOSE:
created_time = yield self.redis.get(redis_key)
if created_time:
created_time = float(created_time)
time_diff = time.time() - created_time
message['session_length'] = time_diff
yield self.redis.delete(redis_key)
returnValue(message)
| # -*- test-case-name: vumi.middleware.tests.test_session_length -*-
import time
from twisted.internet.defer import inlineCallbacks, returnValue
from vumi.message import TransportUserMessage
from vumi.middleware.base import BaseMiddleware
from vumi.persist.txredis_manager import TxRedisManager
class SessionLengthMiddleware(BaseMiddleware):
""" Middleware for storing the session length in the message.
Session length is stored if the end of the session is reached.
Configuration option:
:param dict redis:
Redis configuration parameters.
"""
SESSION_NEW, SESSION_CLOSE = (
TransportUserMessage.SESSION_NEW, TransportUserMessage.SESSION_CLOSE)
@inlineCallbacks
def setup_middleware(self):
r_config = self.config.get('redis_manager', {})
self.redis = yield TxRedisManager.from_config(r_config)
@inlineCallbacks
def teardown_middleware(self):
yield self.redis.close_manager()
def handle_inbound(self, message, connector_name):
redis_key = '%s:%s' % (message.get('from_addr'), 'session_created')
if message.get('event_type') == self.SESSION_NEW:
yield self.redis.set(redis_key, str(time.time()))
elif message.get('event_type') == self.SESSION_CLOSE:
created_time = yield self.redis.get(redis_key)
if created_time:
created_time = float(created_time)
time_diff = time.time() - created_time
message['session_length'] = time_diff
yield self.redis.delete(redis_key)
returnValue(message)
| Fix session event variable import. | Fix session event variable import.
| Python | bsd-3-clause | vishwaprakashmishra/xmatrix,vishwaprakashmishra/xmatrix,harrissoerja/vumi,TouK/vumi,vishwaprakashmishra/xmatrix,TouK/vumi,harrissoerja/vumi,harrissoerja/vumi,TouK/vumi | ---
+++
@@ -4,7 +4,7 @@
from twisted.internet.defer import inlineCallbacks, returnValue
-from vumi.message.TransportUserMessage import SESSION_NEW, SESSION_CLOSE
+from vumi.message import TransportUserMessage
from vumi.middleware.base import BaseMiddleware
from vumi.persist.txredis_manager import TxRedisManager
@@ -19,6 +19,9 @@
:param dict redis:
Redis configuration parameters.
"""
+ SESSION_NEW, SESSION_CLOSE = (
+ TransportUserMessage.SESSION_NEW, TransportUserMessage.SESSION_CLOSE)
+
@inlineCallbacks
def setup_middleware(self):
r_config = self.config.get('redis_manager', {})
@@ -30,9 +33,9 @@
def handle_inbound(self, message, connector_name):
redis_key = '%s:%s' % (message.get('from_addr'), 'session_created')
- if message.get('event_type') == SESSION_NEW:
+ if message.get('event_type') == self.SESSION_NEW:
yield self.redis.set(redis_key, str(time.time()))
- elif message.get('event_type') == SESSION_CLOSE:
+ elif message.get('event_type') == self.SESSION_CLOSE:
created_time = yield self.redis.get(redis_key)
if created_time:
created_time = float(created_time) |
a2a1e53d289d39d4df6c6552f89602e96e4775c6 | django_ses/tests/__init__.py | django_ses/tests/__init__.py | from backend import SESBackendTest
from commands import SESCommandTest
from stats import StatParsingTest
from configuration import SettingsImportTest
| from .backend import *
from .commands import *
from .stats import *
from .configuration import *
| Make sure to load *all* tests | Make sure to load *all* tests
| Python | mit | smaato/django-ses,django-ses/django-ses,ticosax/django-ses,piotrbulinski/django-ses-backend,ticosax/django-ses,brutasse/django-ses,grumbler/django-ses,brutasse/django-ses,django-ses/django-ses,grumbler/django-ses,350dotorg/django-ses,smaato/django-ses | ---
+++
@@ -1,5 +1,5 @@
-from backend import SESBackendTest
-from commands import SESCommandTest
-from stats import StatParsingTest
-from configuration import SettingsImportTest
+from .backend import *
+from .commands import *
+from .stats import *
+from .configuration import *
|
1607a12c80b09616f7607e167de8ebb720fb0f3d | demo.py | demo.py |
from __future__ import print_function
import pynbs
my_file = pynbs.read('demo_song.nbs')
print(my_file.header.song_length)
print(my_file.header.description)
print(my_file.notes)
print(my_file.layers)
print(my_file.instruments)
for tick, chord in my_file.song():
print(tick, [note.key for note in chord])
|
from __future__ import print_function
import pynbs
# read file
my_file = pynbs.read('demo_song.nbs')
print(my_file.header.song_length)
print(my_file.header.description)
print(my_file.notes)
print(my_file.layers)
print(my_file.instruments)
for tick, chord in my_file.song():
print(tick, [note.key for note in chord])
# new file
new_file = pynbs.blank_file()
# edit file
new_file.notes = [
pynbs.Note(tick=0, layer=0, instrument=0, key=45),
pynbs.Note(tick=2, layer=0, instrument=0, key=45),
pynbs.Note(tick=4, layer=0, instrument=0, key=45),
pynbs.Note(tick=6, layer=0, instrument=0, key=45),
pynbs.Note(tick=8, layer=0, instrument=0, key=45),
]
new_file.header.song_name = 'foo'
new_file.header.song_author = 'bar'
new_file.header.blocks_added = 9000
new_file.save('new_file.nbs')
| Add examples for editing and saving file | Add examples for editing and saving file
| Python | mit | fizzy81/pynbs | ---
+++
@@ -3,6 +3,8 @@
import pynbs
+
+# read file
my_file = pynbs.read('demo_song.nbs')
@@ -15,3 +17,25 @@
for tick, chord in my_file.song():
print(tick, [note.key for note in chord])
+
+
+# new file
+
+new_file = pynbs.blank_file()
+
+
+# edit file
+
+new_file.notes = [
+ pynbs.Note(tick=0, layer=0, instrument=0, key=45),
+ pynbs.Note(tick=2, layer=0, instrument=0, key=45),
+ pynbs.Note(tick=4, layer=0, instrument=0, key=45),
+ pynbs.Note(tick=6, layer=0, instrument=0, key=45),
+ pynbs.Note(tick=8, layer=0, instrument=0, key=45),
+]
+
+new_file.header.song_name = 'foo'
+new_file.header.song_author = 'bar'
+new_file.header.blocks_added = 9000
+
+new_file.save('new_file.nbs') |
7a7059a28a43e736d963f83b4ea42b08e9200691 | fellowms/tests.py | fellowms/tests.py | from django.test import TestCase
from .models import Event
class EventTestCase(TestCase):
def setUp(self):
events = (
{
"fellow": 1,
"category": "O",
"name": "CW16",
"url": "http://www.software.ac.uk/cw16",
"location": "Edinburgh",
"start_date": "2016-03-18",
"end_date": "2016-03-20",
"budget_request_travel": 100.00,
"budget_request_attendance_fees": 50.00,
"budget_request_subsistence_cost": 50.00,
"budget_request_venue_hire": 0.00,
"budget_request_catering": 0.00,
"budget_request_others": 0.00,
"justification": "Collaborate.",
},
)
for event in events:
Event.objects.create(**event)
| from django.test import TestCase
from .models import Fellow, Event
class FellowTestCase(TestCase):
def setUp(self):
fellows = (
{
"forenames": "A",
"surname": "C",
"affiliation": "King's College",
"research_area": "L391",
"email": "a.c@mail.com",
"phone": "+441111111111",
"gender": "M",
"work_description": "Sociology of science & technology",
"year": "2013",
},
)
for fellow in fellows:
Fellow.objects.create(**fellow)
class EventTestCase(TestCase):
def setUp(self):
events = (
{
"fellow": 1,
"category": "O",
"name": "CW16",
"url": "http://www.software.ac.uk/cw16",
"location": "Edinburgh",
"start_date": "2016-03-18",
"end_date": "2016-03-20",
"budget_request_travel": 100.00,
"budget_request_attendance_fees": 50.00,
"budget_request_subsistence_cost": 50.00,
"budget_request_venue_hire": 0.00,
"budget_request_catering": 0.00,
"budget_request_others": 0.00,
"justification": "Collaborate.",
},
)
for event in events:
Event.objects.create(**event)
| Add test for fellow model | Add test for fellow model
| Python | bsd-3-clause | softwaresaved/fat,softwaresaved/fat,softwaresaved/fat,softwaresaved/fat | ---
+++
@@ -1,5 +1,24 @@
from django.test import TestCase
-from .models import Event
+from .models import Fellow, Event
+
+class FellowTestCase(TestCase):
+ def setUp(self):
+ fellows = (
+ {
+ "forenames": "A",
+ "surname": "C",
+ "affiliation": "King's College",
+ "research_area": "L391",
+ "email": "a.c@mail.com",
+ "phone": "+441111111111",
+ "gender": "M",
+ "work_description": "Sociology of science & technology",
+ "year": "2013",
+ },
+ )
+
+ for fellow in fellows:
+ Fellow.objects.create(**fellow)
class EventTestCase(TestCase):
def setUp(self): |
31ddb8ec0cba2afd4787906c6b6e299df33a8714 | ldapdb/__init__.py | ldapdb/__init__.py | # -*- coding: utf-8 -*-
# This software is distributed under the two-clause BSD license.
# Copyright (c) The django-ldapdb project
from django.conf import settings
import ldap.filter
def escape_ldap_filter(value):
return ldap.filter.escape_filter_chars(str(value))
# Legacy single database support
if hasattr(settings, 'LDAPDB_SERVER_URI'):
from django import db
from ldapdb.router import Router
# Add the LDAP backend
settings.DATABASES['ldap'] = {
'ENGINE': 'ldapdb.backends.ldap',
'NAME': settings.LDAPDB_SERVER_URI,
'USER': settings.LDAPDB_BIND_DN,
'PASSWORD': settings.LDAPDB_BIND_PASSWORD}
# Add the LDAP router
db.router.routers.append(Router())
| # -*- coding: utf-8 -*-
# This software is distributed under the two-clause BSD license.
# Copyright (c) The django-ldapdb project
from django.conf import settings
import sys
import ldap.filter
def escape_ldap_filter(value):
if sys.version_info[0] < 3:
text_value = unicode(value)
else:
text_value = str(value)
return ldap.filter.escape_filter_chars(text_value)
# Legacy single database support
if hasattr(settings, 'LDAPDB_SERVER_URI'):
from django import db
from ldapdb.router import Router
# Add the LDAP backend
settings.DATABASES['ldap'] = {
'ENGINE': 'ldapdb.backends.ldap',
'NAME': settings.LDAPDB_SERVER_URI,
'USER': settings.LDAPDB_BIND_DN,
'PASSWORD': settings.LDAPDB_BIND_PASSWORD}
# Add the LDAP router
db.router.routers.append(Router())
| Fix encoding issue with legacy python (2.7) | Fix encoding issue with legacy python (2.7)
| Python | bsd-2-clause | django-ldapdb/django-ldapdb,jlaine/django-ldapdb | ---
+++
@@ -3,11 +3,17 @@
# Copyright (c) The django-ldapdb project
from django.conf import settings
+import sys
+
import ldap.filter
def escape_ldap_filter(value):
- return ldap.filter.escape_filter_chars(str(value))
+ if sys.version_info[0] < 3:
+ text_value = unicode(value)
+ else:
+ text_value = str(value)
+ return ldap.filter.escape_filter_chars(text_value)
# Legacy single database support
if hasattr(settings, 'LDAPDB_SERVER_URI'): |
b1380edda27c021c67cfd686a30410c15c1f023e | scoring_engine/engine/execute_command.py | scoring_engine/engine/execute_command.py | from scoring_engine.celery_app import celery_app
from billiard.exceptions import SoftTimeLimitExceeded
import subprocess
from scoring_engine.logger import logger
@celery_app.task(name='execute_command', soft_time_limit=30)
def execute_command(job):
output = ""
logger.info("Running cmd for " + str(job))
try:
cmd_result = subprocess.run(
job['command'],
shell=True,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT
)
output = cmd_result.stdout.decode("utf-8")
job['errored_out'] = False
except SoftTimeLimitExceeded:
job['errored_out'] = True
job['output'] = output
return job
| from scoring_engine.celery_app import celery_app
from billiard.exceptions import SoftTimeLimitExceeded
import subprocess
from scoring_engine.logger import logger
@celery_app.task(name='execute_command', soft_time_limit=30)
def execute_command(job):
output = ""
# Disable duplicate celery log messages
if logger.propagate:
logger.propagate = False
logger.info("Running cmd for " + str(job))
try:
cmd_result = subprocess.run(
job['command'],
shell=True,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT
)
output = cmd_result.stdout.decode("utf-8")
job['errored_out'] = False
except SoftTimeLimitExceeded:
job['errored_out'] = True
job['output'] = output
return job
| Disable duplicate celery log messages | Disable duplicate celery log messages
| Python | mit | pwnbus/scoring_engine,pwnbus/scoring_engine,pwnbus/scoring_engine,pwnbus/scoring_engine | ---
+++
@@ -8,6 +8,9 @@
@celery_app.task(name='execute_command', soft_time_limit=30)
def execute_command(job):
output = ""
+ # Disable duplicate celery log messages
+ if logger.propagate:
+ logger.propagate = False
logger.info("Running cmd for " + str(job))
try:
cmd_result = subprocess.run( |
66c341ecb6d93758bbccfd65dae6317fce8ed8f5 | jobsboard/jobs/views.py | jobsboard/jobs/views.py | from django.views.generic.list import ListView
from django.views.generic.detail import DetailView
from django.views.generic.edit import CreateView
from django.shortcuts import redirect, render
from django.http import HttpResponse
from .models import Job
from jobsboard.jobs.forms import JobForm
class JobListView(ListView):
model = Job
queryset = Job.objects.all()
template_name = 'job_list.html'
context_object_name = 'jobs'
class JobDetailView(DetailView):
model = Job
context_object_name = 'job'
template_name = 'job_detail.html'
class JobCreateView(CreateView):
model = Job
template_name = 'job_create.html'
form_class = JobForm
def post(self, request, *args, **kwargs):
form = JobForm(request.POST)
if form.is_valid():
job = form.save(commit=False)
job.creator = request.user
job.save()
return HttpResponse("Saved!") | from django.views.generic.list import ListView
from django.views.generic.detail import DetailView
from django.views.generic.edit import CreateView
from django.shortcuts import redirect, render
from django.http import HttpResponse
from .models import Job
from jobsboard.jobs.forms import JobForm
class JobListView(ListView):
model = Job
queryset = Job.objects.all()
template_name = 'job_list.html'
context_object_name = 'jobs'
class JobDetailView(DetailView):
model = Job
context_object_name = 'job'
template_name = 'job_detail.html'
class JobCreateView(CreateView):
model = Job
template_name = 'job_create.html'
form_class = JobForm
def post(self, request, *args, **kwargs):
form = JobForm(request.POST)
if form.is_valid():
job = form.save(commit=False)
job.creator = request.user
job.save()
return HttpResponse("Saved!")
| Add new line at end of file | Add new line at end of file
| Python | mit | pythonph/jobs-board,pythonph/jobs-board,pythonph/jobs-board | |
9216224d96770e32778c46b4959731ac70cb2c88 | london_commute_alert.py | london_commute_alert.py | import datetime
import os
import requests
def update():
requests.packages.urllib3.disable_warnings()
resp = requests.get('http://api.tfl.gov.uk/Line/Mode/tube/Status').json()
return {el['id']: el['lineStatuses'][0]['statusSeverityDescription'] for el in resp}
def email(lines):
with open('curl_raw_command.sh') as f:
raw_command = f.read()
if lines:
subject = 'Tube delays for commute'
body = ', '.join(': '.join([line.capitalize(), s]) for line, s in status.items())
else:
subject = 'Good service for commute'
body = 'Good service on all lines'
# We must have this running on PythonAnywhere - Monday to Sunday.
# Ignore Saturday and Sunday
if datetime.date.today().isoweekday() in range(1, 6):
os.system(raw_command.format(subject=subject, body=body))
def main():
commute_lines = ['metropolitan', 'jubilee', 'central']
status = update()
delays = {c: status[c] for c in commute_lines if status[c] != 'Good Service'}
email(delays)
if __name__ == '__main__':
main()
| import datetime
import os
import requests
def update():
requests.packages.urllib3.disable_warnings()
resp = requests.get('http://api.tfl.gov.uk/Line/Mode/tube/Status').json()
return {el['id']: el['lineStatuses'][0]['statusSeverityDescription'] for el in resp}
def email(lines):
with open('curl_raw_command.sh') as f:
raw_command = f.read()
if lines:
subject = 'Tube delays for commute'
body = ', '.join(': '.join([line.capitalize(), s]) for line, s in status.items())
else:
subject = 'Good service for commute'
body = 'Good service on all lines'
os.system(raw_command.format(subject=subject, body=body))
def main():
commute_lines = ['metropolitan', 'jubilee', 'central']
status = update()
delays = {c: status[c] for c in commute_lines if status[c] != 'Good Service'}
email(delays)
if __name__ == '__main__':
main()
| Move from python anywhere to webfaction | Move from python anywhere to webfaction
| Python | mit | noelevans/sandpit,noelevans/sandpit,noelevans/sandpit,noelevans/sandpit,noelevans/sandpit,noelevans/sandpit | ---
+++
@@ -21,10 +21,7 @@
subject = 'Good service for commute'
body = 'Good service on all lines'
- # We must have this running on PythonAnywhere - Monday to Sunday.
- # Ignore Saturday and Sunday
- if datetime.date.today().isoweekday() in range(1, 6):
- os.system(raw_command.format(subject=subject, body=body))
+ os.system(raw_command.format(subject=subject, body=body))
def main(): |
7a59999961b67dbd480c80a4a4f95fa6738b2949 | day-20/solution.py | day-20/solution.py | from __future__ import print_function
def findFirst(data, target):
for idx, value in enumerate(data):
if value >= target:
return idx
return None
target = 34000000
# Target is achieved at itself/10, so reasonable upper bound.
upperbound = target // 10
# Use a varation of Erathostenes' sieve to compute the results
sieve1 = [10] * (upperbound + 1)
sieve2 = [10] * (upperbound + 1)
for x in range(1, upperbound):
for y in range(x, upperbound, x):
sieve1[y] += 10 * x
for y in range(x, min(50 * x, upperbound) + 1, x):
sieve2[y] += 11 * x
print("House", findFirst(sieve1, target))
print("House", findFirst(sieve2, target))
| from __future__ import print_function
def findFirst(data, target):
return next(idx for idx, value in enumerate(data) if value >= target)
target = 34000000
# Target is achieved at itself/10, so reasonable upper bound.
upperbound = target // 10
# Use a varation of Erathostenes' sieve to compute the results
sieve1 = [10] * (upperbound + 1)
sieve2 = [10] * (upperbound + 1)
for x in range(1, upperbound):
for y in range(x, upperbound, x):
sieve1[y] += 10 * x
for y in range(x, min(50 * x, upperbound) + 1, x):
sieve2[y] += 11 * x
print("House", findFirst(sieve1, target))
print("House", findFirst(sieve2, target))
| Improve getting the first valid value. | Improve getting the first valid value.
| Python | mit | bertptrs/adventofcode,bertptrs/adventofcode,bertptrs/adventofcode,bertptrs/adventofcode,bertptrs/adventofcode,bertptrs/adventofcode,bertptrs/adventofcode,bertptrs/adventofcode,bertptrs/adventofcode,bertptrs/adventofcode,bertptrs/adventofcode,bertptrs/adventofcode,bertptrs/adventofcode,bertptrs/adventofcode,bertptrs/adventofcode,bertptrs/adventofcode | ---
+++
@@ -1,11 +1,7 @@
from __future__ import print_function
def findFirst(data, target):
- for idx, value in enumerate(data):
- if value >= target:
- return idx
-
- return None
+ return next(idx for idx, value in enumerate(data) if value >= target)
target = 34000000
|
61bbfbcdd2b23b24233ea6543cbfc880b6b01ea9 | husk/decorators.py | husk/decorators.py | from argparse import ArgumentParser
def cached_property(func):
cach_attr = '_{}'.format(func.__name__)
@property
def wrap(self):
if not hasattr(self, cach_attr):
value = func(self)
if value is not None:
setattr(self, cach_attr, value)
return getattr(self, cach_attr, None)
return wrap
def cli(*args, **kwargs):
def decorator(func):
class Parser(ArgumentParser):
def handle(self, *args, **kwargs):
try:
func(*args, **kwargs)
except Exception, e:
self.error(e.message)
return Parser(*args, **kwargs)
return decorator
| from argparse import ArgumentParser
def cached_property(func):
cach_attr = '_{}'.format(func.__name__)
@property
def wrap(self):
if not hasattr(self, cach_attr):
value = func(self)
if value is not None:
setattr(self, cach_attr, value)
return getattr(self, cach_attr, None)
return wrap
def cli(*args, **kwargs):
def decorator(func):
class Parser(ArgumentParser):
def handle(self, *args, **kwargs):
try:
func(*args, **kwargs)
except Exception, e:
self.error(e.message)
# No catching of exceptions
def handle_raw(self, *args, **kwargs):
func(*args, **kwargs)
return Parser(*args, **kwargs)
return decorator
| Add `handle_raw` method that does not catch all upstream exceptions | Add `handle_raw` method that does not catch all upstream exceptions | Python | bsd-2-clause | husk/husk | ---
+++
@@ -22,5 +22,10 @@
func(*args, **kwargs)
except Exception, e:
self.error(e.message)
+
+ # No catching of exceptions
+ def handle_raw(self, *args, **kwargs):
+ func(*args, **kwargs)
+
return Parser(*args, **kwargs)
return decorator |
ff31630a582ae2353e18e67cdcfc5495672bd423 | ironic/__init__.py | ironic/__init__.py | # Copyright (c) 2014 Hewlett-Packard Development Company, L.P.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
os.environ['EVENTLET_NO_GREENDNS'] = 'yes'
import eventlet
eventlet.monkey_patch(os=False)
| Move eventlent monkeypatch out of cmd/ | Move eventlent monkeypatch out of cmd/
Completing a #TODO that I left for myself months ago,
now that we are using PBR for our build system,
move the eventlent monkey patching out of ironic/cmd/__init__
and into ironic/__init__
Change-Id: I1fd352919de0a2afe7058ab84c208781ce095dab
| Python | apache-2.0 | faizan-barmawer/elytics,NaohiroTamura/ironic-lib,openstack/ironic-lib,citrix-openstack-build/ironic-lib,faizan-barmawer/ironic-lib | ---
+++
@@ -0,0 +1,22 @@
+# Copyright (c) 2014 Hewlett-Packard Development Company, L.P.
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import os
+
+os.environ['EVENTLET_NO_GREENDNS'] = 'yes'
+
+import eventlet
+
+eventlet.monkey_patch(os=False) | |
8e2a42369228f3d19b046a610c93de4bec06d5bf | avocado/core/structures.py | avocado/core/structures.py | try:
from collections import OrderedDict
except ImportError:
from ordereddict import OrderedDict
class ChoicesDict(OrderedDict):
"OrdereDict that yields the key and value on iteration."
def __iter__(self):
iterator = super(ChoicesDict, self).__iter__()
for key in iterator:
yield key, self[key]
| try:
from collections import OrderedDict
except ImportError:
from ordereddict import OrderedDict
REPR_OUTPUT_SIZE = 20
class ChoicesDict(OrderedDict):
"OrdereDict that yields the key and value on iteration."
def __iter__(self):
iterator = super(ChoicesDict, self).__iter__()
for key in iterator:
yield key, self[key]
def __repr__(self):
data = list(self[:REPR_OUTPUT_SIZE + 1])
if len(data) > REPR_OUTPUT_SIZE:
data[-1] = '...(remaining elements truncated)...'
return repr(tuple(data))
| Add __repr__ to ChoicesDict structure | Add __repr__ to ChoicesDict structure | Python | bsd-2-clause | murphyke/avocado,murphyke/avocado,murphyke/avocado,murphyke/avocado | ---
+++
@@ -2,6 +2,9 @@
from collections import OrderedDict
except ImportError:
from ordereddict import OrderedDict
+
+
+REPR_OUTPUT_SIZE = 20
class ChoicesDict(OrderedDict):
@@ -11,3 +14,11 @@
for key in iterator:
yield key, self[key]
+
+ def __repr__(self):
+ data = list(self[:REPR_OUTPUT_SIZE + 1])
+
+ if len(data) > REPR_OUTPUT_SIZE:
+ data[-1] = '...(remaining elements truncated)...'
+
+ return repr(tuple(data)) |
9af50ecde67e593533898040e63e6a456fc16da5 | tests/test_style.py | tests/test_style.py | import pkg_resources
import unittest
class CodeStyleTestCase(unittest.TestCase):
def test_code_style(self):
flake8 = pkg_resources.load_entry_point('flake8', 'console_scripts', 'flake8')
try:
flake8([])
except SystemExit as e:
if e.code != 0:
self.fail('Code style checks failed')
| import logging
import pkg_resources
import unittest
class CodeStyleTestCase(unittest.TestCase):
def test_code_style(self):
logger = logging.getLogger('flake8')
logger.setLevel(logging.ERROR)
flake8 = pkg_resources.load_entry_point('flake8', 'console_scripts', 'flake8')
try:
flake8([])
except SystemExit as e:
if e.code != 0:
self.fail('Code style checks failed')
| Decrease noise from code-style test | Decrease noise from code-style test
| Python | mit | ministryofjustice/django-zendesk-tickets,ministryofjustice/django-zendesk-tickets | ---
+++
@@ -1,9 +1,12 @@
+import logging
import pkg_resources
import unittest
class CodeStyleTestCase(unittest.TestCase):
def test_code_style(self):
+ logger = logging.getLogger('flake8')
+ logger.setLevel(logging.ERROR)
flake8 = pkg_resources.load_entry_point('flake8', 'console_scripts', 'flake8')
try:
flake8([]) |
2b9828541066ec4adb09f80fe29468cb0ce2a1e0 | readthedocs/core/management/commands/build_files.py | readthedocs/core/management/commands/build_files.py | import logging
from django.core.management.base import BaseCommand
from django.conf import settings
from projects import tasks
from projects.models import ImportedFile
from builds.models import Version
log = logging.getLogger(__name__)
class Command(BaseCommand):
help = '''\
Delete and re-create ImportedFile objects for all latest Versions, such
that they can be added to the search index. This is accomplished by walking the
filesystem for each project.
'''
def handle(self, *args, **kwargs):
'''
Build/index all versions or a single project's version
'''
# Delete all existing as a cleanup for any deleted projects.
#ImportedFile.objects.all().delete()
if getattr(settings, 'INDEX_ONLY_LATEST', True):
queryset = Version.objects.filter(slug='latst')
else:
queryset = Version.objects.public()
for v in queryset:
log.info("Building files for %s" % v)
try:
tasks.fileify(v)
except Exception:
log.error('Build failed for %s' % v, exc_info=True)
| import logging
from django.core.management.base import BaseCommand
from django.conf import settings
from projects import tasks
from projects.models import ImportedFile
from builds.models import Version
log = logging.getLogger(__name__)
class Command(BaseCommand):
help = '''\
Delete and re-create ImportedFile objects for all latest Versions, such
that they can be added to the search index. This is accomplished by walking the
filesystem for each project.
'''
option_list = BaseCommand.option_list + (
make_option('-p',
action='store_true',
dest='project',
default='',
help='Project to index'
),
)
def handle(self, *args, **kwargs):
'''
Build/index all versions or a single project's version
'''
# Delete all existing as a cleanup for any deleted projects.
#ImportedFile.objects.all().delete()
project = options['project']
if project:
queryset = Version.objects.get(slug=project)
elif getattr(settings, 'INDEX_ONLY_LATEST', True):
queryset = Version.objects.filter(slug='latst')
else:
queryset = Version.objects.public()
for v in queryset:
log.info("Building files for %s" % v)
try:
tasks.fileify(v)
except Exception:
log.error('Build failed for %s' % v, exc_info=True)
| Add ability to index specific project. | Add ability to index specific project.
| Python | mit | SteveViss/readthedocs.org,takluyver/readthedocs.org,VishvajitP/readthedocs.org,davidfischer/readthedocs.org,stevepiercy/readthedocs.org,fujita-shintaro/readthedocs.org,espdev/readthedocs.org,royalwang/readthedocs.org,raven47git/readthedocs.org,royalwang/readthedocs.org,agjohnson/readthedocs.org,pombredanne/readthedocs.org,sunnyzwh/readthedocs.org,safwanrahman/readthedocs.org,rtfd/readthedocs.org,GovReady/readthedocs.org,wijerasa/readthedocs.org,michaelmcandrew/readthedocs.org,techtonik/readthedocs.org,gjtorikian/readthedocs.org,sils1297/readthedocs.org,VishvajitP/readthedocs.org,sunnyzwh/readthedocs.org,nikolas/readthedocs.org,mrshoki/readthedocs.org,clarkperkins/readthedocs.org,asampat3090/readthedocs.org,safwanrahman/readthedocs.org,kenshinthebattosai/readthedocs.org,mhils/readthedocs.org,laplaceliu/readthedocs.org,LukasBoersma/readthedocs.org,sid-kap/readthedocs.org,Tazer/readthedocs.org,espdev/readthedocs.org,rtfd/readthedocs.org,GovReady/readthedocs.org,KamranMackey/readthedocs.org,asampat3090/readthedocs.org,singingwolfboy/readthedocs.org,sunnyzwh/readthedocs.org,titiushko/readthedocs.org,kenshinthebattosai/readthedocs.org,espdev/readthedocs.org,soulshake/readthedocs.org,istresearch/readthedocs.org,gjtorikian/readthedocs.org,techtonik/readthedocs.org,atsuyim/readthedocs.org,wijerasa/readthedocs.org,Carreau/readthedocs.org,davidfischer/readthedocs.org,Carreau/readthedocs.org,singingwolfboy/readthedocs.org,Carreau/readthedocs.org,CedarLogic/readthedocs.org,wijerasa/readthedocs.org,Tazer/readthedocs.org,wanghaven/readthedocs.org,royalwang/readthedocs.org,hach-que/readthedocs.org,singingwolfboy/readthedocs.org,soulshake/readthedocs.org,wanghaven/readthedocs.org,attakei/readthedocs-oauth,kenwang76/readthedocs.org,kdkeyser/readthedocs.org,hach-que/readthedocs.org,atsuyim/readthedocs.org,agjohnson/readthedocs.org,stevepiercy/readthedocs.org,dirn/readthedocs.org,safwanrahman/readthedocs.org,sils1297/readthedocs.org,techtonik/readthedocs.org,VishvajitP/readthedocs.org,d0ugal/readthedocs.org,nyergler/pythonslides,kdkeyser/readthedocs.org,mrshoki/readthedocs.org,safwanrahman/readthedocs.org,Tazer/readthedocs.org,kenwang76/readthedocs.org,titiushko/readthedocs.org,takluyver/readthedocs.org,agjohnson/readthedocs.org,GovReady/readthedocs.org,espdev/readthedocs.org,istresearch/readthedocs.org,kenwang76/readthedocs.org,soulshake/readthedocs.org,davidfischer/readthedocs.org,fujita-shintaro/readthedocs.org,laplaceliu/readthedocs.org,jerel/readthedocs.org,hach-que/readthedocs.org,titiushko/readthedocs.org,atsuyim/readthedocs.org,techtonik/readthedocs.org,raven47git/readthedocs.org,jerel/readthedocs.org,laplaceliu/readthedocs.org,SteveViss/readthedocs.org,wijerasa/readthedocs.org,emawind84/readthedocs.org,laplaceliu/readthedocs.org,kdkeyser/readthedocs.org,CedarLogic/readthedocs.org,jerel/readthedocs.org,emawind84/readthedocs.org,dirn/readthedocs.org,wanghaven/readthedocs.org,kenwang76/readthedocs.org,cgourlay/readthedocs.org,SteveViss/readthedocs.org,cgourlay/readthedocs.org,d0ugal/readthedocs.org,dirn/readthedocs.org,wanghaven/readthedocs.org,Carreau/readthedocs.org,kenshinthebattosai/readthedocs.org,pombredanne/readthedocs.org,emawind84/readthedocs.org,michaelmcandrew/readthedocs.org,Tazer/readthedocs.org,asampat3090/readthedocs.org,dirn/readthedocs.org,rtfd/readthedocs.org,davidfischer/readthedocs.org,sils1297/readthedocs.org,VishvajitP/readthedocs.org,kdkeyser/readthedocs.org,cgourlay/readthedocs.org,emawind84/readthedocs.org,nikolas/readthedocs.org,agjohnson/readthedocs.org,fujita-shintaro/readthedocs.org,nikolas/readthedocs.org,stevepiercy/readthedocs.org,michaelmcandrew/readthedocs.org,jerel/readthedocs.org,gjtorikian/readthedocs.org,fujita-shintaro/readthedocs.org,CedarLogic/readthedocs.org,mhils/readthedocs.org,CedarLogic/readthedocs.org,rtfd/readthedocs.org,kenshinthebattosai/readthedocs.org,nikolas/readthedocs.org,singingwolfboy/readthedocs.org,asampat3090/readthedocs.org,sils1297/readthedocs.org,LukasBoersma/readthedocs.org,clarkperkins/readthedocs.org,tddv/readthedocs.org,nyergler/pythonslides,takluyver/readthedocs.org,mhils/readthedocs.org,clarkperkins/readthedocs.org,pombredanne/readthedocs.org,SteveViss/readthedocs.org,GovReady/readthedocs.org,gjtorikian/readthedocs.org,sid-kap/readthedocs.org,LukasBoersma/readthedocs.org,titiushko/readthedocs.org,clarkperkins/readthedocs.org,cgourlay/readthedocs.org,hach-que/readthedocs.org,KamranMackey/readthedocs.org,mrshoki/readthedocs.org,tddv/readthedocs.org,nyergler/pythonslides,attakei/readthedocs-oauth,tddv/readthedocs.org,atsuyim/readthedocs.org,LukasBoersma/readthedocs.org,raven47git/readthedocs.org,attakei/readthedocs-oauth,takluyver/readthedocs.org,soulshake/readthedocs.org,mhils/readthedocs.org,sunnyzwh/readthedocs.org,istresearch/readthedocs.org,attakei/readthedocs-oauth,raven47git/readthedocs.org,michaelmcandrew/readthedocs.org,istresearch/readthedocs.org,KamranMackey/readthedocs.org,mrshoki/readthedocs.org,d0ugal/readthedocs.org,sid-kap/readthedocs.org,KamranMackey/readthedocs.org,stevepiercy/readthedocs.org,d0ugal/readthedocs.org,sid-kap/readthedocs.org,nyergler/pythonslides,espdev/readthedocs.org,royalwang/readthedocs.org | ---
+++
@@ -18,13 +18,27 @@
filesystem for each project.
'''
+ option_list = BaseCommand.option_list + (
+ make_option('-p',
+ action='store_true',
+ dest='project',
+ default='',
+ help='Project to index'
+ ),
+ )
+
+
def handle(self, *args, **kwargs):
'''
Build/index all versions or a single project's version
'''
# Delete all existing as a cleanup for any deleted projects.
#ImportedFile.objects.all().delete()
- if getattr(settings, 'INDEX_ONLY_LATEST', True):
+ project = options['project']
+
+ if project:
+ queryset = Version.objects.get(slug=project)
+ elif getattr(settings, 'INDEX_ONLY_LATEST', True):
queryset = Version.objects.filter(slug='latst')
else:
queryset = Version.objects.public() |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.