commit
stringlengths 40
40
| old_file
stringlengths 4
118
| new_file
stringlengths 4
118
| old_contents
stringlengths 0
2.94k
| new_contents
stringlengths 1
4.43k
| subject
stringlengths 15
444
| message
stringlengths 16
3.45k
| lang
stringclasses 1
value | license
stringclasses 13
values | repos
stringlengths 5
43.2k
| prompt
stringlengths 17
4.58k
| response
stringlengths 1
4.43k
| prompt_tagged
stringlengths 58
4.62k
| response_tagged
stringlengths 1
4.43k
| text
stringlengths 132
7.29k
| text_tagged
stringlengths 173
7.33k
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
e86431ab5fbac2c99c0551b8ba261ba09077fc23
|
setup.py
|
setup.py
|
#!/usr/bin/env python
# encoding: utf-8
"""
tempodb/setup.py
Copyright (c) 2012 TempoDB Inc. All rights reserved.
"""
import os
from setuptools import setup
import tempodb
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name="python-tempodb",
version=tempodb.__version__,
author="TempoDB Inc",
author_email="dev@tempo-db.com",
description="A client for the TempoDB API",
packages=["tempodb"],
long_description=read('README'),
install_requires=[
'python-dateutil==1.5',
'requests',
'simplejson',
]
)
|
#!/usr/bin/env python
# encoding: utf-8
"""
tempodb/setup.py
Copyright (c) 2012 TempoDB Inc. All rights reserved.
"""
import os
from setuptools import setup
import tempodb
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name="tempodb",
version=tempodb.__version__,
author="TempoDB Inc",
author_email="dev@tempo-db.com",
description="A client for the TempoDB API",
packages=["tempodb"],
long_description=read('README'),
install_requires=[
'python-dateutil==1.5',
'requests',
'simplejson',
]
)
|
Change package name to tempodb
|
Change package name to tempodb
|
Python
|
mit
|
tempodb/tempodb-python,tempodb/tempodb-python,mrgaaron/tempoiq-python,TempoIQ/tempoiq-python
|
#!/usr/bin/env python
# encoding: utf-8
"""
tempodb/setup.py
Copyright (c) 2012 TempoDB Inc. All rights reserved.
"""
import os
from setuptools import setup
import tempodb
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name="python-tempodb",
version=tempodb.__version__,
author="TempoDB Inc",
author_email="dev@tempo-db.com",
description="A client for the TempoDB API",
packages=["tempodb"],
long_description=read('README'),
install_requires=[
'python-dateutil==1.5',
'requests',
'simplejson',
]
)
Change package name to tempodb
|
#!/usr/bin/env python
# encoding: utf-8
"""
tempodb/setup.py
Copyright (c) 2012 TempoDB Inc. All rights reserved.
"""
import os
from setuptools import setup
import tempodb
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name="tempodb",
version=tempodb.__version__,
author="TempoDB Inc",
author_email="dev@tempo-db.com",
description="A client for the TempoDB API",
packages=["tempodb"],
long_description=read('README'),
install_requires=[
'python-dateutil==1.5',
'requests',
'simplejson',
]
)
|
<commit_before>
#!/usr/bin/env python
# encoding: utf-8
"""
tempodb/setup.py
Copyright (c) 2012 TempoDB Inc. All rights reserved.
"""
import os
from setuptools import setup
import tempodb
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name="python-tempodb",
version=tempodb.__version__,
author="TempoDB Inc",
author_email="dev@tempo-db.com",
description="A client for the TempoDB API",
packages=["tempodb"],
long_description=read('README'),
install_requires=[
'python-dateutil==1.5',
'requests',
'simplejson',
]
)
<commit_msg>Change package name to tempodb<commit_after>
|
#!/usr/bin/env python
# encoding: utf-8
"""
tempodb/setup.py
Copyright (c) 2012 TempoDB Inc. All rights reserved.
"""
import os
from setuptools import setup
import tempodb
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name="tempodb",
version=tempodb.__version__,
author="TempoDB Inc",
author_email="dev@tempo-db.com",
description="A client for the TempoDB API",
packages=["tempodb"],
long_description=read('README'),
install_requires=[
'python-dateutil==1.5',
'requests',
'simplejson',
]
)
|
#!/usr/bin/env python
# encoding: utf-8
"""
tempodb/setup.py
Copyright (c) 2012 TempoDB Inc. All rights reserved.
"""
import os
from setuptools import setup
import tempodb
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name="python-tempodb",
version=tempodb.__version__,
author="TempoDB Inc",
author_email="dev@tempo-db.com",
description="A client for the TempoDB API",
packages=["tempodb"],
long_description=read('README'),
install_requires=[
'python-dateutil==1.5',
'requests',
'simplejson',
]
)
Change package name to tempodb#!/usr/bin/env python
# encoding: utf-8
"""
tempodb/setup.py
Copyright (c) 2012 TempoDB Inc. All rights reserved.
"""
import os
from setuptools import setup
import tempodb
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name="tempodb",
version=tempodb.__version__,
author="TempoDB Inc",
author_email="dev@tempo-db.com",
description="A client for the TempoDB API",
packages=["tempodb"],
long_description=read('README'),
install_requires=[
'python-dateutil==1.5',
'requests',
'simplejson',
]
)
|
<commit_before>
#!/usr/bin/env python
# encoding: utf-8
"""
tempodb/setup.py
Copyright (c) 2012 TempoDB Inc. All rights reserved.
"""
import os
from setuptools import setup
import tempodb
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name="python-tempodb",
version=tempodb.__version__,
author="TempoDB Inc",
author_email="dev@tempo-db.com",
description="A client for the TempoDB API",
packages=["tempodb"],
long_description=read('README'),
install_requires=[
'python-dateutil==1.5',
'requests',
'simplejson',
]
)
<commit_msg>Change package name to tempodb<commit_after>#!/usr/bin/env python
# encoding: utf-8
"""
tempodb/setup.py
Copyright (c) 2012 TempoDB Inc. All rights reserved.
"""
import os
from setuptools import setup
import tempodb
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name="tempodb",
version=tempodb.__version__,
author="TempoDB Inc",
author_email="dev@tempo-db.com",
description="A client for the TempoDB API",
packages=["tempodb"],
long_description=read('README'),
install_requires=[
'python-dateutil==1.5',
'requests',
'simplejson',
]
)
|
1d58d74e9a19d700d1654c8e9e2858f16535d7cb
|
setup.py
|
setup.py
|
"""
Flask-Babel
-----------
Adds i18n/l10n support to Flask applications with the help of the
`Babel`_ library.
Links
`````
* `documentation <http://packages.python.org/Flask-Babel>`_
* `development version
<http://github.com/mitsuhiko/flask-babel/zipball/master#egg=Flask-Babel-dev>`_
.. _Babel: http://babel.edgewall.org/
"""
from setuptools import setup
setup(
name='Flask-Babel',
version='0.9',
url='http://github.com/mitsuhiko/flask-babel',
license='BSD',
author='Armin Ronacher',
author_email='armin.ronacher@active-4.com',
description='Adds i18n/l10n support to Flask applications',
long_description=__doc__,
packages=['flask_babel'],
zip_safe=False,
platforms='any',
install_requires=[
'Flask',
'Babel>=1.0',
'speaklater>=1.2',
'Jinja2>=2.5'
],
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules'
]
)
|
"""
Flask-Babel
-----------
Adds i18n/l10n support to Flask applications with the help of the
`Babel`_ library.
Links
`````
* `documentation <http://packages.python.org/Flask-Babel>`_
* `development version
<http://github.com/mitsuhiko/flask-babel/zipball/master#egg=Flask-Babel-dev>`_
.. _Babel: http://babel.edgewall.org/
"""
from setuptools import setup
setup(
name='Flask-Babel',
version='0.9',
url='http://github.com/mitsuhiko/flask-babel',
license='BSD',
author='Armin Ronacher',
author_email='armin.ronacher@active-4.com',
description='Adds i18n/l10n support to Flask applications',
long_description=__doc__,
packages=['flask_babel'],
zip_safe=False,
platforms='any',
install_requires=[
'Flask',
'Babel>=2.3',
'speaklater>=1.2',
'Jinja2>=2.5'
],
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules'
]
)
|
Update minimum babel version to >=2.3.
|
Update minimum babel version to >=2.3.
Babel 1.0 is now 3 years obsolete. Numerous critical bug fixes are
included in the 1.0 - 2.3 range.
|
Python
|
bsd-3-clause
|
mitsuhiko/flask-babel,TkTech/flask-babel,mitsuhiko/flask-babel
|
"""
Flask-Babel
-----------
Adds i18n/l10n support to Flask applications with the help of the
`Babel`_ library.
Links
`````
* `documentation <http://packages.python.org/Flask-Babel>`_
* `development version
<http://github.com/mitsuhiko/flask-babel/zipball/master#egg=Flask-Babel-dev>`_
.. _Babel: http://babel.edgewall.org/
"""
from setuptools import setup
setup(
name='Flask-Babel',
version='0.9',
url='http://github.com/mitsuhiko/flask-babel',
license='BSD',
author='Armin Ronacher',
author_email='armin.ronacher@active-4.com',
description='Adds i18n/l10n support to Flask applications',
long_description=__doc__,
packages=['flask_babel'],
zip_safe=False,
platforms='any',
install_requires=[
'Flask',
'Babel>=1.0',
'speaklater>=1.2',
'Jinja2>=2.5'
],
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules'
]
)
Update minimum babel version to >=2.3.
Babel 1.0 is now 3 years obsolete. Numerous critical bug fixes are
included in the 1.0 - 2.3 range.
|
"""
Flask-Babel
-----------
Adds i18n/l10n support to Flask applications with the help of the
`Babel`_ library.
Links
`````
* `documentation <http://packages.python.org/Flask-Babel>`_
* `development version
<http://github.com/mitsuhiko/flask-babel/zipball/master#egg=Flask-Babel-dev>`_
.. _Babel: http://babel.edgewall.org/
"""
from setuptools import setup
setup(
name='Flask-Babel',
version='0.9',
url='http://github.com/mitsuhiko/flask-babel',
license='BSD',
author='Armin Ronacher',
author_email='armin.ronacher@active-4.com',
description='Adds i18n/l10n support to Flask applications',
long_description=__doc__,
packages=['flask_babel'],
zip_safe=False,
platforms='any',
install_requires=[
'Flask',
'Babel>=2.3',
'speaklater>=1.2',
'Jinja2>=2.5'
],
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules'
]
)
|
<commit_before>"""
Flask-Babel
-----------
Adds i18n/l10n support to Flask applications with the help of the
`Babel`_ library.
Links
`````
* `documentation <http://packages.python.org/Flask-Babel>`_
* `development version
<http://github.com/mitsuhiko/flask-babel/zipball/master#egg=Flask-Babel-dev>`_
.. _Babel: http://babel.edgewall.org/
"""
from setuptools import setup
setup(
name='Flask-Babel',
version='0.9',
url='http://github.com/mitsuhiko/flask-babel',
license='BSD',
author='Armin Ronacher',
author_email='armin.ronacher@active-4.com',
description='Adds i18n/l10n support to Flask applications',
long_description=__doc__,
packages=['flask_babel'],
zip_safe=False,
platforms='any',
install_requires=[
'Flask',
'Babel>=1.0',
'speaklater>=1.2',
'Jinja2>=2.5'
],
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules'
]
)
<commit_msg>Update minimum babel version to >=2.3.
Babel 1.0 is now 3 years obsolete. Numerous critical bug fixes are
included in the 1.0 - 2.3 range.<commit_after>
|
"""
Flask-Babel
-----------
Adds i18n/l10n support to Flask applications with the help of the
`Babel`_ library.
Links
`````
* `documentation <http://packages.python.org/Flask-Babel>`_
* `development version
<http://github.com/mitsuhiko/flask-babel/zipball/master#egg=Flask-Babel-dev>`_
.. _Babel: http://babel.edgewall.org/
"""
from setuptools import setup
setup(
name='Flask-Babel',
version='0.9',
url='http://github.com/mitsuhiko/flask-babel',
license='BSD',
author='Armin Ronacher',
author_email='armin.ronacher@active-4.com',
description='Adds i18n/l10n support to Flask applications',
long_description=__doc__,
packages=['flask_babel'],
zip_safe=False,
platforms='any',
install_requires=[
'Flask',
'Babel>=2.3',
'speaklater>=1.2',
'Jinja2>=2.5'
],
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules'
]
)
|
"""
Flask-Babel
-----------
Adds i18n/l10n support to Flask applications with the help of the
`Babel`_ library.
Links
`````
* `documentation <http://packages.python.org/Flask-Babel>`_
* `development version
<http://github.com/mitsuhiko/flask-babel/zipball/master#egg=Flask-Babel-dev>`_
.. _Babel: http://babel.edgewall.org/
"""
from setuptools import setup
setup(
name='Flask-Babel',
version='0.9',
url='http://github.com/mitsuhiko/flask-babel',
license='BSD',
author='Armin Ronacher',
author_email='armin.ronacher@active-4.com',
description='Adds i18n/l10n support to Flask applications',
long_description=__doc__,
packages=['flask_babel'],
zip_safe=False,
platforms='any',
install_requires=[
'Flask',
'Babel>=1.0',
'speaklater>=1.2',
'Jinja2>=2.5'
],
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules'
]
)
Update minimum babel version to >=2.3.
Babel 1.0 is now 3 years obsolete. Numerous critical bug fixes are
included in the 1.0 - 2.3 range."""
Flask-Babel
-----------
Adds i18n/l10n support to Flask applications with the help of the
`Babel`_ library.
Links
`````
* `documentation <http://packages.python.org/Flask-Babel>`_
* `development version
<http://github.com/mitsuhiko/flask-babel/zipball/master#egg=Flask-Babel-dev>`_
.. _Babel: http://babel.edgewall.org/
"""
from setuptools import setup
setup(
name='Flask-Babel',
version='0.9',
url='http://github.com/mitsuhiko/flask-babel',
license='BSD',
author='Armin Ronacher',
author_email='armin.ronacher@active-4.com',
description='Adds i18n/l10n support to Flask applications',
long_description=__doc__,
packages=['flask_babel'],
zip_safe=False,
platforms='any',
install_requires=[
'Flask',
'Babel>=2.3',
'speaklater>=1.2',
'Jinja2>=2.5'
],
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules'
]
)
|
<commit_before>"""
Flask-Babel
-----------
Adds i18n/l10n support to Flask applications with the help of the
`Babel`_ library.
Links
`````
* `documentation <http://packages.python.org/Flask-Babel>`_
* `development version
<http://github.com/mitsuhiko/flask-babel/zipball/master#egg=Flask-Babel-dev>`_
.. _Babel: http://babel.edgewall.org/
"""
from setuptools import setup
setup(
name='Flask-Babel',
version='0.9',
url='http://github.com/mitsuhiko/flask-babel',
license='BSD',
author='Armin Ronacher',
author_email='armin.ronacher@active-4.com',
description='Adds i18n/l10n support to Flask applications',
long_description=__doc__,
packages=['flask_babel'],
zip_safe=False,
platforms='any',
install_requires=[
'Flask',
'Babel>=1.0',
'speaklater>=1.2',
'Jinja2>=2.5'
],
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules'
]
)
<commit_msg>Update minimum babel version to >=2.3.
Babel 1.0 is now 3 years obsolete. Numerous critical bug fixes are
included in the 1.0 - 2.3 range.<commit_after>"""
Flask-Babel
-----------
Adds i18n/l10n support to Flask applications with the help of the
`Babel`_ library.
Links
`````
* `documentation <http://packages.python.org/Flask-Babel>`_
* `development version
<http://github.com/mitsuhiko/flask-babel/zipball/master#egg=Flask-Babel-dev>`_
.. _Babel: http://babel.edgewall.org/
"""
from setuptools import setup
setup(
name='Flask-Babel',
version='0.9',
url='http://github.com/mitsuhiko/flask-babel',
license='BSD',
author='Armin Ronacher',
author_email='armin.ronacher@active-4.com',
description='Adds i18n/l10n support to Flask applications',
long_description=__doc__,
packages=['flask_babel'],
zip_safe=False,
platforms='any',
install_requires=[
'Flask',
'Babel>=2.3',
'speaklater>=1.2',
'Jinja2>=2.5'
],
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules'
]
)
|
efe24b5b9d25bc499de0aff57f7d28f0f3c73991
|
setup.py
|
setup.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from setuptools import setup
with open('README.rst') as readme_file:
readme = readme_file.read()
requirements = [
'furl==0.5.6',
'six==1.10.0'
]
test_requirements = [
# TODO: put package test requirements here
]
setup(
name='surveymonkey',
version='0.4.2',
description="Python wrapper for the Survey Monkey v3 API",
long_description=readme,
author="Aaron Bassett",
author_email='engineering@getadministrate.com',
url='https://github.com/Administrate/surveymonkey',
packages=[
'surveymonkey',
'surveymonkey.collectors',
'surveymonkey.webhooks',
'surveymonkey.messages'
],
package_dir={'surveymonkey':
'surveymonkey'},
include_package_data=True,
install_requires=requirements,
license="MIT license",
zip_safe=False,
keywords='surveymonkey',
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
"Programming Language :: Python :: 2",
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
],
test_suite='tests',
tests_require=test_requirements
)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from setuptools import setup
with open('README.rst') as readme_file:
readme = readme_file.read()
requirements = [
'furl==0.5.6',
'six==1.10.0',
'pytest==3.0.3'
]
test_requirements = [
# TODO: put package test requirements here
]
setup(
name='surveymonkey',
version='0.4.2',
description="Python wrapper for the Survey Monkey v3 API",
long_description=readme,
author="Aaron Bassett",
author_email='engineering@getadministrate.com',
url='https://github.com/Administrate/surveymonkey',
packages=[
'surveymonkey',
'surveymonkey.collectors',
'surveymonkey.webhooks',
'surveymonkey.messages'
],
package_dir={'surveymonkey':
'surveymonkey'},
include_package_data=True,
install_requires=requirements,
license="MIT license",
zip_safe=False,
keywords='surveymonkey',
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
"Programming Language :: Python :: 2",
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
],
test_suite='tests',
tests_require=test_requirements
)
|
Add pytest to surveymonkey dependencies
|
Add pytest to surveymonkey dependencies
|
Python
|
mit
|
Administrate/surveymonkey
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from setuptools import setup
with open('README.rst') as readme_file:
readme = readme_file.read()
requirements = [
'furl==0.5.6',
'six==1.10.0'
]
test_requirements = [
# TODO: put package test requirements here
]
setup(
name='surveymonkey',
version='0.4.2',
description="Python wrapper for the Survey Monkey v3 API",
long_description=readme,
author="Aaron Bassett",
author_email='engineering@getadministrate.com',
url='https://github.com/Administrate/surveymonkey',
packages=[
'surveymonkey',
'surveymonkey.collectors',
'surveymonkey.webhooks',
'surveymonkey.messages'
],
package_dir={'surveymonkey':
'surveymonkey'},
include_package_data=True,
install_requires=requirements,
license="MIT license",
zip_safe=False,
keywords='surveymonkey',
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
"Programming Language :: Python :: 2",
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
],
test_suite='tests',
tests_require=test_requirements
)
Add pytest to surveymonkey dependencies
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from setuptools import setup
with open('README.rst') as readme_file:
readme = readme_file.read()
requirements = [
'furl==0.5.6',
'six==1.10.0',
'pytest==3.0.3'
]
test_requirements = [
# TODO: put package test requirements here
]
setup(
name='surveymonkey',
version='0.4.2',
description="Python wrapper for the Survey Monkey v3 API",
long_description=readme,
author="Aaron Bassett",
author_email='engineering@getadministrate.com',
url='https://github.com/Administrate/surveymonkey',
packages=[
'surveymonkey',
'surveymonkey.collectors',
'surveymonkey.webhooks',
'surveymonkey.messages'
],
package_dir={'surveymonkey':
'surveymonkey'},
include_package_data=True,
install_requires=requirements,
license="MIT license",
zip_safe=False,
keywords='surveymonkey',
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
"Programming Language :: Python :: 2",
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
],
test_suite='tests',
tests_require=test_requirements
)
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from setuptools import setup
with open('README.rst') as readme_file:
readme = readme_file.read()
requirements = [
'furl==0.5.6',
'six==1.10.0'
]
test_requirements = [
# TODO: put package test requirements here
]
setup(
name='surveymonkey',
version='0.4.2',
description="Python wrapper for the Survey Monkey v3 API",
long_description=readme,
author="Aaron Bassett",
author_email='engineering@getadministrate.com',
url='https://github.com/Administrate/surveymonkey',
packages=[
'surveymonkey',
'surveymonkey.collectors',
'surveymonkey.webhooks',
'surveymonkey.messages'
],
package_dir={'surveymonkey':
'surveymonkey'},
include_package_data=True,
install_requires=requirements,
license="MIT license",
zip_safe=False,
keywords='surveymonkey',
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
"Programming Language :: Python :: 2",
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
],
test_suite='tests',
tests_require=test_requirements
)
<commit_msg>Add pytest to surveymonkey dependencies<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from setuptools import setup
with open('README.rst') as readme_file:
readme = readme_file.read()
requirements = [
'furl==0.5.6',
'six==1.10.0',
'pytest==3.0.3'
]
test_requirements = [
# TODO: put package test requirements here
]
setup(
name='surveymonkey',
version='0.4.2',
description="Python wrapper for the Survey Monkey v3 API",
long_description=readme,
author="Aaron Bassett",
author_email='engineering@getadministrate.com',
url='https://github.com/Administrate/surveymonkey',
packages=[
'surveymonkey',
'surveymonkey.collectors',
'surveymonkey.webhooks',
'surveymonkey.messages'
],
package_dir={'surveymonkey':
'surveymonkey'},
include_package_data=True,
install_requires=requirements,
license="MIT license",
zip_safe=False,
keywords='surveymonkey',
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
"Programming Language :: Python :: 2",
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
],
test_suite='tests',
tests_require=test_requirements
)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from setuptools import setup
with open('README.rst') as readme_file:
readme = readme_file.read()
requirements = [
'furl==0.5.6',
'six==1.10.0'
]
test_requirements = [
# TODO: put package test requirements here
]
setup(
name='surveymonkey',
version='0.4.2',
description="Python wrapper for the Survey Monkey v3 API",
long_description=readme,
author="Aaron Bassett",
author_email='engineering@getadministrate.com',
url='https://github.com/Administrate/surveymonkey',
packages=[
'surveymonkey',
'surveymonkey.collectors',
'surveymonkey.webhooks',
'surveymonkey.messages'
],
package_dir={'surveymonkey':
'surveymonkey'},
include_package_data=True,
install_requires=requirements,
license="MIT license",
zip_safe=False,
keywords='surveymonkey',
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
"Programming Language :: Python :: 2",
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
],
test_suite='tests',
tests_require=test_requirements
)
Add pytest to surveymonkey dependencies#!/usr/bin/env python
# -*- coding: utf-8 -*-
from setuptools import setup
with open('README.rst') as readme_file:
readme = readme_file.read()
requirements = [
'furl==0.5.6',
'six==1.10.0',
'pytest==3.0.3'
]
test_requirements = [
# TODO: put package test requirements here
]
setup(
name='surveymonkey',
version='0.4.2',
description="Python wrapper for the Survey Monkey v3 API",
long_description=readme,
author="Aaron Bassett",
author_email='engineering@getadministrate.com',
url='https://github.com/Administrate/surveymonkey',
packages=[
'surveymonkey',
'surveymonkey.collectors',
'surveymonkey.webhooks',
'surveymonkey.messages'
],
package_dir={'surveymonkey':
'surveymonkey'},
include_package_data=True,
install_requires=requirements,
license="MIT license",
zip_safe=False,
keywords='surveymonkey',
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
"Programming Language :: Python :: 2",
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
],
test_suite='tests',
tests_require=test_requirements
)
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from setuptools import setup
with open('README.rst') as readme_file:
readme = readme_file.read()
requirements = [
'furl==0.5.6',
'six==1.10.0'
]
test_requirements = [
# TODO: put package test requirements here
]
setup(
name='surveymonkey',
version='0.4.2',
description="Python wrapper for the Survey Monkey v3 API",
long_description=readme,
author="Aaron Bassett",
author_email='engineering@getadministrate.com',
url='https://github.com/Administrate/surveymonkey',
packages=[
'surveymonkey',
'surveymonkey.collectors',
'surveymonkey.webhooks',
'surveymonkey.messages'
],
package_dir={'surveymonkey':
'surveymonkey'},
include_package_data=True,
install_requires=requirements,
license="MIT license",
zip_safe=False,
keywords='surveymonkey',
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
"Programming Language :: Python :: 2",
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
],
test_suite='tests',
tests_require=test_requirements
)
<commit_msg>Add pytest to surveymonkey dependencies<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from setuptools import setup
with open('README.rst') as readme_file:
readme = readme_file.read()
requirements = [
'furl==0.5.6',
'six==1.10.0',
'pytest==3.0.3'
]
test_requirements = [
# TODO: put package test requirements here
]
setup(
name='surveymonkey',
version='0.4.2',
description="Python wrapper for the Survey Monkey v3 API",
long_description=readme,
author="Aaron Bassett",
author_email='engineering@getadministrate.com',
url='https://github.com/Administrate/surveymonkey',
packages=[
'surveymonkey',
'surveymonkey.collectors',
'surveymonkey.webhooks',
'surveymonkey.messages'
],
package_dir={'surveymonkey':
'surveymonkey'},
include_package_data=True,
install_requires=requirements,
license="MIT license",
zip_safe=False,
keywords='surveymonkey',
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
"Programming Language :: Python :: 2",
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
],
test_suite='tests',
tests_require=test_requirements
)
|
8a76fdf13bd00cd11d8a2bc37111c83c175e6517
|
setup.py
|
setup.py
|
# -*- coding: utf-8 -*-
from setuptools import setup
VERSION = '0.5'
setup(
name='conllu',
packages=["conllu"],
version=VERSION,
description='CoNLL-U Parser parses a CoNLL-U formatted string into a nested python dictionary',
author=u'Emil Stenström',
author_email='em@kth.se',
url='https://github.com/EmilStenstrom/conllu/',
download_url='https://github.com/EmilStenstrom/conllu/archive/%s.zip' % VERSION,
install_requires=[],
tests_require=["nose>=1.3.7", "flake8>=3.0.4"],
test_suite="nose.collector",
keywords=['conllu', 'conll', 'conllu-u', 'parser', 'nlp'],
classifiers=[
"Programming Language :: Python",
"Programming Language :: Python :: 2.7",
"Operating System :: OS Independent",
],
)
|
# -*- coding: utf-8 -*-
from setuptools import setup
VERSION = '0.6'
setup(
name='conllu',
packages=["conllu"],
version=VERSION,
description='CoNLL-U Parser parses a CoNLL-U formatted string into a nested python dictionary',
author=u'Emil Stenström',
author_email='em@kth.se',
url='https://github.com/EmilStenstrom/conllu/',
download_url='https://github.com/EmilStenstrom/conllu/archive/%s.zip' % VERSION,
install_requires=[],
tests_require=["nose>=1.3.7", "flake8>=3.0.4"],
test_suite="nose.collector",
keywords=['conllu', 'conll', 'conllu-u', 'parser', 'nlp'],
classifiers=[
"Programming Language :: Python",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Operating System :: OS Independent",
],
)
|
Declare Python 3 support, bump version.
|
Declare Python 3 support, bump version.
|
Python
|
mit
|
EmilStenstrom/conllu
|
# -*- coding: utf-8 -*-
from setuptools import setup
VERSION = '0.5'
setup(
name='conllu',
packages=["conllu"],
version=VERSION,
description='CoNLL-U Parser parses a CoNLL-U formatted string into a nested python dictionary',
author=u'Emil Stenström',
author_email='em@kth.se',
url='https://github.com/EmilStenstrom/conllu/',
download_url='https://github.com/EmilStenstrom/conllu/archive/%s.zip' % VERSION,
install_requires=[],
tests_require=["nose>=1.3.7", "flake8>=3.0.4"],
test_suite="nose.collector",
keywords=['conllu', 'conll', 'conllu-u', 'parser', 'nlp'],
classifiers=[
"Programming Language :: Python",
"Programming Language :: Python :: 2.7",
"Operating System :: OS Independent",
],
)
Declare Python 3 support, bump version.
|
# -*- coding: utf-8 -*-
from setuptools import setup
VERSION = '0.6'
setup(
name='conllu',
packages=["conllu"],
version=VERSION,
description='CoNLL-U Parser parses a CoNLL-U formatted string into a nested python dictionary',
author=u'Emil Stenström',
author_email='em@kth.se',
url='https://github.com/EmilStenstrom/conllu/',
download_url='https://github.com/EmilStenstrom/conllu/archive/%s.zip' % VERSION,
install_requires=[],
tests_require=["nose>=1.3.7", "flake8>=3.0.4"],
test_suite="nose.collector",
keywords=['conllu', 'conll', 'conllu-u', 'parser', 'nlp'],
classifiers=[
"Programming Language :: Python",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Operating System :: OS Independent",
],
)
|
<commit_before># -*- coding: utf-8 -*-
from setuptools import setup
VERSION = '0.5'
setup(
name='conllu',
packages=["conllu"],
version=VERSION,
description='CoNLL-U Parser parses a CoNLL-U formatted string into a nested python dictionary',
author=u'Emil Stenström',
author_email='em@kth.se',
url='https://github.com/EmilStenstrom/conllu/',
download_url='https://github.com/EmilStenstrom/conllu/archive/%s.zip' % VERSION,
install_requires=[],
tests_require=["nose>=1.3.7", "flake8>=3.0.4"],
test_suite="nose.collector",
keywords=['conllu', 'conll', 'conllu-u', 'parser', 'nlp'],
classifiers=[
"Programming Language :: Python",
"Programming Language :: Python :: 2.7",
"Operating System :: OS Independent",
],
)
<commit_msg>Declare Python 3 support, bump version.<commit_after>
|
# -*- coding: utf-8 -*-
from setuptools import setup
VERSION = '0.6'
setup(
name='conllu',
packages=["conllu"],
version=VERSION,
description='CoNLL-U Parser parses a CoNLL-U formatted string into a nested python dictionary',
author=u'Emil Stenström',
author_email='em@kth.se',
url='https://github.com/EmilStenstrom/conllu/',
download_url='https://github.com/EmilStenstrom/conllu/archive/%s.zip' % VERSION,
install_requires=[],
tests_require=["nose>=1.3.7", "flake8>=3.0.4"],
test_suite="nose.collector",
keywords=['conllu', 'conll', 'conllu-u', 'parser', 'nlp'],
classifiers=[
"Programming Language :: Python",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Operating System :: OS Independent",
],
)
|
# -*- coding: utf-8 -*-
from setuptools import setup
VERSION = '0.5'
setup(
name='conllu',
packages=["conllu"],
version=VERSION,
description='CoNLL-U Parser parses a CoNLL-U formatted string into a nested python dictionary',
author=u'Emil Stenström',
author_email='em@kth.se',
url='https://github.com/EmilStenstrom/conllu/',
download_url='https://github.com/EmilStenstrom/conllu/archive/%s.zip' % VERSION,
install_requires=[],
tests_require=["nose>=1.3.7", "flake8>=3.0.4"],
test_suite="nose.collector",
keywords=['conllu', 'conll', 'conllu-u', 'parser', 'nlp'],
classifiers=[
"Programming Language :: Python",
"Programming Language :: Python :: 2.7",
"Operating System :: OS Independent",
],
)
Declare Python 3 support, bump version.# -*- coding: utf-8 -*-
from setuptools import setup
VERSION = '0.6'
setup(
name='conllu',
packages=["conllu"],
version=VERSION,
description='CoNLL-U Parser parses a CoNLL-U formatted string into a nested python dictionary',
author=u'Emil Stenström',
author_email='em@kth.se',
url='https://github.com/EmilStenstrom/conllu/',
download_url='https://github.com/EmilStenstrom/conllu/archive/%s.zip' % VERSION,
install_requires=[],
tests_require=["nose>=1.3.7", "flake8>=3.0.4"],
test_suite="nose.collector",
keywords=['conllu', 'conll', 'conllu-u', 'parser', 'nlp'],
classifiers=[
"Programming Language :: Python",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Operating System :: OS Independent",
],
)
|
<commit_before># -*- coding: utf-8 -*-
from setuptools import setup
VERSION = '0.5'
setup(
name='conllu',
packages=["conllu"],
version=VERSION,
description='CoNLL-U Parser parses a CoNLL-U formatted string into a nested python dictionary',
author=u'Emil Stenström',
author_email='em@kth.se',
url='https://github.com/EmilStenstrom/conllu/',
download_url='https://github.com/EmilStenstrom/conllu/archive/%s.zip' % VERSION,
install_requires=[],
tests_require=["nose>=1.3.7", "flake8>=3.0.4"],
test_suite="nose.collector",
keywords=['conllu', 'conll', 'conllu-u', 'parser', 'nlp'],
classifiers=[
"Programming Language :: Python",
"Programming Language :: Python :: 2.7",
"Operating System :: OS Independent",
],
)
<commit_msg>Declare Python 3 support, bump version.<commit_after># -*- coding: utf-8 -*-
from setuptools import setup
VERSION = '0.6'
setup(
name='conllu',
packages=["conllu"],
version=VERSION,
description='CoNLL-U Parser parses a CoNLL-U formatted string into a nested python dictionary',
author=u'Emil Stenström',
author_email='em@kth.se',
url='https://github.com/EmilStenstrom/conllu/',
download_url='https://github.com/EmilStenstrom/conllu/archive/%s.zip' % VERSION,
install_requires=[],
tests_require=["nose>=1.3.7", "flake8>=3.0.4"],
test_suite="nose.collector",
keywords=['conllu', 'conll', 'conllu-u', 'parser', 'nlp'],
classifiers=[
"Programming Language :: Python",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Operating System :: OS Independent",
],
)
|
a63ded95f6de31d0b433966e9369cb04190cb23f
|
setup.py
|
setup.py
|
import os
from distutils.core import setup
here = os.path.abspath(os.path.dirname(__file__))
README = open(os.path.join(here, 'README.rst')).read()
CHANGES = open(os.path.join(here, 'CHANGES.rst')).read()
LICENSE = open(os.path.join(here, 'LICENSE')).read()
setup(
name='saferedisqueue',
version='1.1.0',
description='A small wrapper around Redis that provides access to a FIFO queue.',
long_description=README + '\n\n' + CHANGES,
license=LICENSE,
author="Fabian Neumann, ferret go GmbH",
author_email="neumann@ferret-go.com",
classifiers=[
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"License :: OSI Approved :: BSD License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 2.7",
"Topic :: Internet",
],
keywords='Redis, key-value store, queue, queueing, Storm',
url='https://github.com/hellp/saferedisqueue',
py_modules=['saferedisqueue'],
install_requires=[
'redis >= 2.4.10, < 2.5',
],
zip_safe=False,
)
|
import os
from distutils.core import setup
here = os.path.abspath(os.path.dirname(__file__))
README = open(os.path.join(here, 'README.rst')).read()
CHANGES = open(os.path.join(here, 'CHANGES.rst')).read()
LICENSE = open(os.path.join(here, 'LICENSE')).read()
setup(
name='saferedisqueue',
version='1.1.0',
description='A small wrapper around Redis that provides access to a FIFO queue.',
long_description=README + '\n\n' + CHANGES,
license=LICENSE,
author="Fabian Neumann, ferret go GmbH",
author_email="neumann@ferret-go.com",
classifiers=[
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"License :: OSI Approved :: BSD License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 2.7",
"Topic :: Internet",
],
keywords='Redis, key-value store, queue, queueing, Storm',
url='https://github.com/hellp/saferedisqueue',
py_modules=['saferedisqueue'],
install_requires=[
'redis >= 2.4.10, < 2.7',
],
zip_safe=False,
)
|
Mark this version as compatible with redis.py up to 2.6.x
|
Mark this version as compatible with redis.py up to 2.6.x
|
Python
|
bsd-3-clause
|
hellp/saferedisqueue
|
import os
from distutils.core import setup
here = os.path.abspath(os.path.dirname(__file__))
README = open(os.path.join(here, 'README.rst')).read()
CHANGES = open(os.path.join(here, 'CHANGES.rst')).read()
LICENSE = open(os.path.join(here, 'LICENSE')).read()
setup(
name='saferedisqueue',
version='1.1.0',
description='A small wrapper around Redis that provides access to a FIFO queue.',
long_description=README + '\n\n' + CHANGES,
license=LICENSE,
author="Fabian Neumann, ferret go GmbH",
author_email="neumann@ferret-go.com",
classifiers=[
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"License :: OSI Approved :: BSD License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 2.7",
"Topic :: Internet",
],
keywords='Redis, key-value store, queue, queueing, Storm',
url='https://github.com/hellp/saferedisqueue',
py_modules=['saferedisqueue'],
install_requires=[
'redis >= 2.4.10, < 2.5',
],
zip_safe=False,
)
Mark this version as compatible with redis.py up to 2.6.x
|
import os
from distutils.core import setup
here = os.path.abspath(os.path.dirname(__file__))
README = open(os.path.join(here, 'README.rst')).read()
CHANGES = open(os.path.join(here, 'CHANGES.rst')).read()
LICENSE = open(os.path.join(here, 'LICENSE')).read()
setup(
name='saferedisqueue',
version='1.1.0',
description='A small wrapper around Redis that provides access to a FIFO queue.',
long_description=README + '\n\n' + CHANGES,
license=LICENSE,
author="Fabian Neumann, ferret go GmbH",
author_email="neumann@ferret-go.com",
classifiers=[
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"License :: OSI Approved :: BSD License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 2.7",
"Topic :: Internet",
],
keywords='Redis, key-value store, queue, queueing, Storm',
url='https://github.com/hellp/saferedisqueue',
py_modules=['saferedisqueue'],
install_requires=[
'redis >= 2.4.10, < 2.7',
],
zip_safe=False,
)
|
<commit_before>import os
from distutils.core import setup
here = os.path.abspath(os.path.dirname(__file__))
README = open(os.path.join(here, 'README.rst')).read()
CHANGES = open(os.path.join(here, 'CHANGES.rst')).read()
LICENSE = open(os.path.join(here, 'LICENSE')).read()
setup(
name='saferedisqueue',
version='1.1.0',
description='A small wrapper around Redis that provides access to a FIFO queue.',
long_description=README + '\n\n' + CHANGES,
license=LICENSE,
author="Fabian Neumann, ferret go GmbH",
author_email="neumann@ferret-go.com",
classifiers=[
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"License :: OSI Approved :: BSD License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 2.7",
"Topic :: Internet",
],
keywords='Redis, key-value store, queue, queueing, Storm',
url='https://github.com/hellp/saferedisqueue',
py_modules=['saferedisqueue'],
install_requires=[
'redis >= 2.4.10, < 2.5',
],
zip_safe=False,
)
<commit_msg>Mark this version as compatible with redis.py up to 2.6.x<commit_after>
|
import os
from distutils.core import setup
here = os.path.abspath(os.path.dirname(__file__))
README = open(os.path.join(here, 'README.rst')).read()
CHANGES = open(os.path.join(here, 'CHANGES.rst')).read()
LICENSE = open(os.path.join(here, 'LICENSE')).read()
setup(
name='saferedisqueue',
version='1.1.0',
description='A small wrapper around Redis that provides access to a FIFO queue.',
long_description=README + '\n\n' + CHANGES,
license=LICENSE,
author="Fabian Neumann, ferret go GmbH",
author_email="neumann@ferret-go.com",
classifiers=[
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"License :: OSI Approved :: BSD License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 2.7",
"Topic :: Internet",
],
keywords='Redis, key-value store, queue, queueing, Storm',
url='https://github.com/hellp/saferedisqueue',
py_modules=['saferedisqueue'],
install_requires=[
'redis >= 2.4.10, < 2.7',
],
zip_safe=False,
)
|
import os
from distutils.core import setup
here = os.path.abspath(os.path.dirname(__file__))
README = open(os.path.join(here, 'README.rst')).read()
CHANGES = open(os.path.join(here, 'CHANGES.rst')).read()
LICENSE = open(os.path.join(here, 'LICENSE')).read()
setup(
name='saferedisqueue',
version='1.1.0',
description='A small wrapper around Redis that provides access to a FIFO queue.',
long_description=README + '\n\n' + CHANGES,
license=LICENSE,
author="Fabian Neumann, ferret go GmbH",
author_email="neumann@ferret-go.com",
classifiers=[
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"License :: OSI Approved :: BSD License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 2.7",
"Topic :: Internet",
],
keywords='Redis, key-value store, queue, queueing, Storm',
url='https://github.com/hellp/saferedisqueue',
py_modules=['saferedisqueue'],
install_requires=[
'redis >= 2.4.10, < 2.5',
],
zip_safe=False,
)
Mark this version as compatible with redis.py up to 2.6.ximport os
from distutils.core import setup
here = os.path.abspath(os.path.dirname(__file__))
README = open(os.path.join(here, 'README.rst')).read()
CHANGES = open(os.path.join(here, 'CHANGES.rst')).read()
LICENSE = open(os.path.join(here, 'LICENSE')).read()
setup(
name='saferedisqueue',
version='1.1.0',
description='A small wrapper around Redis that provides access to a FIFO queue.',
long_description=README + '\n\n' + CHANGES,
license=LICENSE,
author="Fabian Neumann, ferret go GmbH",
author_email="neumann@ferret-go.com",
classifiers=[
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"License :: OSI Approved :: BSD License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 2.7",
"Topic :: Internet",
],
keywords='Redis, key-value store, queue, queueing, Storm',
url='https://github.com/hellp/saferedisqueue',
py_modules=['saferedisqueue'],
install_requires=[
'redis >= 2.4.10, < 2.7',
],
zip_safe=False,
)
|
<commit_before>import os
from distutils.core import setup
here = os.path.abspath(os.path.dirname(__file__))
README = open(os.path.join(here, 'README.rst')).read()
CHANGES = open(os.path.join(here, 'CHANGES.rst')).read()
LICENSE = open(os.path.join(here, 'LICENSE')).read()
setup(
name='saferedisqueue',
version='1.1.0',
description='A small wrapper around Redis that provides access to a FIFO queue.',
long_description=README + '\n\n' + CHANGES,
license=LICENSE,
author="Fabian Neumann, ferret go GmbH",
author_email="neumann@ferret-go.com",
classifiers=[
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"License :: OSI Approved :: BSD License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 2.7",
"Topic :: Internet",
],
keywords='Redis, key-value store, queue, queueing, Storm',
url='https://github.com/hellp/saferedisqueue',
py_modules=['saferedisqueue'],
install_requires=[
'redis >= 2.4.10, < 2.5',
],
zip_safe=False,
)
<commit_msg>Mark this version as compatible with redis.py up to 2.6.x<commit_after>import os
from distutils.core import setup
here = os.path.abspath(os.path.dirname(__file__))
README = open(os.path.join(here, 'README.rst')).read()
CHANGES = open(os.path.join(here, 'CHANGES.rst')).read()
LICENSE = open(os.path.join(here, 'LICENSE')).read()
setup(
name='saferedisqueue',
version='1.1.0',
description='A small wrapper around Redis that provides access to a FIFO queue.',
long_description=README + '\n\n' + CHANGES,
license=LICENSE,
author="Fabian Neumann, ferret go GmbH",
author_email="neumann@ferret-go.com",
classifiers=[
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"License :: OSI Approved :: BSD License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 2.7",
"Topic :: Internet",
],
keywords='Redis, key-value store, queue, queueing, Storm',
url='https://github.com/hellp/saferedisqueue',
py_modules=['saferedisqueue'],
install_requires=[
'redis >= 2.4.10, < 2.7',
],
zip_safe=False,
)
|
f17db5655cc436aa85c81a6d35555787528075ea
|
setup.py
|
setup.py
|
import os
from setuptools import setup
def get_version():
"""Get the version info from the mpld3 package without importing it"""
import ast
with open(os.path.join("geneimpacts", "__init__.py"), "r") as init_file:
module = ast.parse(init_file.read())
version = (ast.literal_eval(node.value) for node in ast.walk(module)
if isinstance(node, ast.Assign)
and node.targets[0].id == "__version__")
try:
return next(version)
except StopIteration:
raise ValueError("version could not be located")
setup(version=get_version(),
name='geneimpacts',
description="normalize effects from variant annotation tools (snpEff, VEP)",
packages=['geneimpacts', 'geneimpacts.tests'],
long_description=open('README.md').read(),
long_description_content_type="text/markdown",
author="Brent Pedersen",
author_email="bpederse@gmail.com",
zip_safe=False,
test_suite='nose.collector',
include_package_data=True,
tests_require=['nose'],
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Topic :: Scientific/Engineering :: Bio-Informatics'
])
|
import os
from setuptools import setup
def get_version():
"""Get the version info from the mpld3 package without importing it"""
import ast
with open(os.path.join("geneimpacts", "__init__.py"), "r") as init_file:
module = ast.parse(init_file.read())
version = (ast.literal_eval(node.value) for node in ast.walk(module)
if isinstance(node, ast.Assign)
and node.targets[0].id == "__version__")
try:
return next(version)
except StopIteration:
raise ValueError("version could not be located")
setup(version=get_version(),
name='geneimpacts',
description="normalize effects from variant annotation tools (snpEff, VEP)",
packages=['geneimpacts', 'geneimpacts.tests'],
long_description=open('README.md').read(),
long_description_content_type="text/markdown",
author="Brent Pedersen",
author_email="bpederse@gmail.com",
zip_safe=False,
test_suite='pytest',
include_package_data=True,
tests_require=['pytest'],
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Topic :: Scientific/Engineering :: Bio-Informatics'
])
|
Switch from nosetest -> pytest
|
Switch from nosetest -> pytest
|
Python
|
mit
|
brentp/geneimpacts
|
import os
from setuptools import setup
def get_version():
"""Get the version info from the mpld3 package without importing it"""
import ast
with open(os.path.join("geneimpacts", "__init__.py"), "r") as init_file:
module = ast.parse(init_file.read())
version = (ast.literal_eval(node.value) for node in ast.walk(module)
if isinstance(node, ast.Assign)
and node.targets[0].id == "__version__")
try:
return next(version)
except StopIteration:
raise ValueError("version could not be located")
setup(version=get_version(),
name='geneimpacts',
description="normalize effects from variant annotation tools (snpEff, VEP)",
packages=['geneimpacts', 'geneimpacts.tests'],
long_description=open('README.md').read(),
long_description_content_type="text/markdown",
author="Brent Pedersen",
author_email="bpederse@gmail.com",
zip_safe=False,
test_suite='nose.collector',
include_package_data=True,
tests_require=['nose'],
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Topic :: Scientific/Engineering :: Bio-Informatics'
])
Switch from nosetest -> pytest
|
import os
from setuptools import setup
def get_version():
"""Get the version info from the mpld3 package without importing it"""
import ast
with open(os.path.join("geneimpacts", "__init__.py"), "r") as init_file:
module = ast.parse(init_file.read())
version = (ast.literal_eval(node.value) for node in ast.walk(module)
if isinstance(node, ast.Assign)
and node.targets[0].id == "__version__")
try:
return next(version)
except StopIteration:
raise ValueError("version could not be located")
setup(version=get_version(),
name='geneimpacts',
description="normalize effects from variant annotation tools (snpEff, VEP)",
packages=['geneimpacts', 'geneimpacts.tests'],
long_description=open('README.md').read(),
long_description_content_type="text/markdown",
author="Brent Pedersen",
author_email="bpederse@gmail.com",
zip_safe=False,
test_suite='pytest',
include_package_data=True,
tests_require=['pytest'],
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Topic :: Scientific/Engineering :: Bio-Informatics'
])
|
<commit_before>import os
from setuptools import setup
def get_version():
"""Get the version info from the mpld3 package without importing it"""
import ast
with open(os.path.join("geneimpacts", "__init__.py"), "r") as init_file:
module = ast.parse(init_file.read())
version = (ast.literal_eval(node.value) for node in ast.walk(module)
if isinstance(node, ast.Assign)
and node.targets[0].id == "__version__")
try:
return next(version)
except StopIteration:
raise ValueError("version could not be located")
setup(version=get_version(),
name='geneimpacts',
description="normalize effects from variant annotation tools (snpEff, VEP)",
packages=['geneimpacts', 'geneimpacts.tests'],
long_description=open('README.md').read(),
long_description_content_type="text/markdown",
author="Brent Pedersen",
author_email="bpederse@gmail.com",
zip_safe=False,
test_suite='nose.collector',
include_package_data=True,
tests_require=['nose'],
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Topic :: Scientific/Engineering :: Bio-Informatics'
])
<commit_msg>Switch from nosetest -> pytest<commit_after>
|
import os
from setuptools import setup
def get_version():
"""Get the version info from the mpld3 package without importing it"""
import ast
with open(os.path.join("geneimpacts", "__init__.py"), "r") as init_file:
module = ast.parse(init_file.read())
version = (ast.literal_eval(node.value) for node in ast.walk(module)
if isinstance(node, ast.Assign)
and node.targets[0].id == "__version__")
try:
return next(version)
except StopIteration:
raise ValueError("version could not be located")
setup(version=get_version(),
name='geneimpacts',
description="normalize effects from variant annotation tools (snpEff, VEP)",
packages=['geneimpacts', 'geneimpacts.tests'],
long_description=open('README.md').read(),
long_description_content_type="text/markdown",
author="Brent Pedersen",
author_email="bpederse@gmail.com",
zip_safe=False,
test_suite='pytest',
include_package_data=True,
tests_require=['pytest'],
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Topic :: Scientific/Engineering :: Bio-Informatics'
])
|
import os
from setuptools import setup
def get_version():
"""Get the version info from the mpld3 package without importing it"""
import ast
with open(os.path.join("geneimpacts", "__init__.py"), "r") as init_file:
module = ast.parse(init_file.read())
version = (ast.literal_eval(node.value) for node in ast.walk(module)
if isinstance(node, ast.Assign)
and node.targets[0].id == "__version__")
try:
return next(version)
except StopIteration:
raise ValueError("version could not be located")
setup(version=get_version(),
name='geneimpacts',
description="normalize effects from variant annotation tools (snpEff, VEP)",
packages=['geneimpacts', 'geneimpacts.tests'],
long_description=open('README.md').read(),
long_description_content_type="text/markdown",
author="Brent Pedersen",
author_email="bpederse@gmail.com",
zip_safe=False,
test_suite='nose.collector',
include_package_data=True,
tests_require=['nose'],
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Topic :: Scientific/Engineering :: Bio-Informatics'
])
Switch from nosetest -> pytestimport os
from setuptools import setup
def get_version():
"""Get the version info from the mpld3 package without importing it"""
import ast
with open(os.path.join("geneimpacts", "__init__.py"), "r") as init_file:
module = ast.parse(init_file.read())
version = (ast.literal_eval(node.value) for node in ast.walk(module)
if isinstance(node, ast.Assign)
and node.targets[0].id == "__version__")
try:
return next(version)
except StopIteration:
raise ValueError("version could not be located")
setup(version=get_version(),
name='geneimpacts',
description="normalize effects from variant annotation tools (snpEff, VEP)",
packages=['geneimpacts', 'geneimpacts.tests'],
long_description=open('README.md').read(),
long_description_content_type="text/markdown",
author="Brent Pedersen",
author_email="bpederse@gmail.com",
zip_safe=False,
test_suite='pytest',
include_package_data=True,
tests_require=['pytest'],
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Topic :: Scientific/Engineering :: Bio-Informatics'
])
|
<commit_before>import os
from setuptools import setup
def get_version():
"""Get the version info from the mpld3 package without importing it"""
import ast
with open(os.path.join("geneimpacts", "__init__.py"), "r") as init_file:
module = ast.parse(init_file.read())
version = (ast.literal_eval(node.value) for node in ast.walk(module)
if isinstance(node, ast.Assign)
and node.targets[0].id == "__version__")
try:
return next(version)
except StopIteration:
raise ValueError("version could not be located")
setup(version=get_version(),
name='geneimpacts',
description="normalize effects from variant annotation tools (snpEff, VEP)",
packages=['geneimpacts', 'geneimpacts.tests'],
long_description=open('README.md').read(),
long_description_content_type="text/markdown",
author="Brent Pedersen",
author_email="bpederse@gmail.com",
zip_safe=False,
test_suite='nose.collector',
include_package_data=True,
tests_require=['nose'],
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Topic :: Scientific/Engineering :: Bio-Informatics'
])
<commit_msg>Switch from nosetest -> pytest<commit_after>import os
from setuptools import setup
def get_version():
"""Get the version info from the mpld3 package without importing it"""
import ast
with open(os.path.join("geneimpacts", "__init__.py"), "r") as init_file:
module = ast.parse(init_file.read())
version = (ast.literal_eval(node.value) for node in ast.walk(module)
if isinstance(node, ast.Assign)
and node.targets[0].id == "__version__")
try:
return next(version)
except StopIteration:
raise ValueError("version could not be located")
setup(version=get_version(),
name='geneimpacts',
description="normalize effects from variant annotation tools (snpEff, VEP)",
packages=['geneimpacts', 'geneimpacts.tests'],
long_description=open('README.md').read(),
long_description_content_type="text/markdown",
author="Brent Pedersen",
author_email="bpederse@gmail.com",
zip_safe=False,
test_suite='pytest',
include_package_data=True,
tests_require=['pytest'],
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Topic :: Scientific/Engineering :: Bio-Informatics'
])
|
0f5224c825c5fbd1aa4d4c89eeb9d35f55e845ee
|
setup.py
|
setup.py
|
#-*- coding: utf-8 -*-
#
# setup.py
# anytop
#
# Created by Lars Yencken on 2011-10-09.
# Copyright 2011 Lars Yencken. All rights reserved.
#
from distutils.core import setup
setup(
name='anytop',
version='0.1.2',
description='Streaming frequency distribution viewer.',
long_description=open('README.rst').read(),
author='Lars Yencken',
author_email='lars@yencken.org',
url='http://bitbucket.org/larsyencken/anytop',
scripts=['anytop'],
packages=['anyutil'],
license='ISC',
)
|
#-*- coding: utf-8 -*-
#
# setup.py
# anytop
#
# Created by Lars Yencken on 2011-10-09.
# Copyright 2011 Lars Yencken. All rights reserved.
#
from distutils.core import setup
setup(
name='anytop',
version='0.1.2',
description='Streaming frequency distribution viewer.',
long_description=open('README.rst').read(),
author='Lars Yencken',
author_email='lars@yencken.org',
url='http://bitbucket.org/larsyencken/anytop',
scripts=['anytop', 'anyhist'],
packages=['anyutil'],
license='ISC',
)
|
Add anyhist as an installed script.
|
Add anyhist as an installed script.
|
Python
|
isc
|
larsyencken/anytop
|
#-*- coding: utf-8 -*-
#
# setup.py
# anytop
#
# Created by Lars Yencken on 2011-10-09.
# Copyright 2011 Lars Yencken. All rights reserved.
#
from distutils.core import setup
setup(
name='anytop',
version='0.1.2',
description='Streaming frequency distribution viewer.',
long_description=open('README.rst').read(),
author='Lars Yencken',
author_email='lars@yencken.org',
url='http://bitbucket.org/larsyencken/anytop',
scripts=['anytop'],
packages=['anyutil'],
license='ISC',
)
Add anyhist as an installed script.
|
#-*- coding: utf-8 -*-
#
# setup.py
# anytop
#
# Created by Lars Yencken on 2011-10-09.
# Copyright 2011 Lars Yencken. All rights reserved.
#
from distutils.core import setup
setup(
name='anytop',
version='0.1.2',
description='Streaming frequency distribution viewer.',
long_description=open('README.rst').read(),
author='Lars Yencken',
author_email='lars@yencken.org',
url='http://bitbucket.org/larsyencken/anytop',
scripts=['anytop', 'anyhist'],
packages=['anyutil'],
license='ISC',
)
|
<commit_before>#-*- coding: utf-8 -*-
#
# setup.py
# anytop
#
# Created by Lars Yencken on 2011-10-09.
# Copyright 2011 Lars Yencken. All rights reserved.
#
from distutils.core import setup
setup(
name='anytop',
version='0.1.2',
description='Streaming frequency distribution viewer.',
long_description=open('README.rst').read(),
author='Lars Yencken',
author_email='lars@yencken.org',
url='http://bitbucket.org/larsyencken/anytop',
scripts=['anytop'],
packages=['anyutil'],
license='ISC',
)
<commit_msg>Add anyhist as an installed script.<commit_after>
|
#-*- coding: utf-8 -*-
#
# setup.py
# anytop
#
# Created by Lars Yencken on 2011-10-09.
# Copyright 2011 Lars Yencken. All rights reserved.
#
from distutils.core import setup
setup(
name='anytop',
version='0.1.2',
description='Streaming frequency distribution viewer.',
long_description=open('README.rst').read(),
author='Lars Yencken',
author_email='lars@yencken.org',
url='http://bitbucket.org/larsyencken/anytop',
scripts=['anytop', 'anyhist'],
packages=['anyutil'],
license='ISC',
)
|
#-*- coding: utf-8 -*-
#
# setup.py
# anytop
#
# Created by Lars Yencken on 2011-10-09.
# Copyright 2011 Lars Yencken. All rights reserved.
#
from distutils.core import setup
setup(
name='anytop',
version='0.1.2',
description='Streaming frequency distribution viewer.',
long_description=open('README.rst').read(),
author='Lars Yencken',
author_email='lars@yencken.org',
url='http://bitbucket.org/larsyencken/anytop',
scripts=['anytop'],
packages=['anyutil'],
license='ISC',
)
Add anyhist as an installed script.#-*- coding: utf-8 -*-
#
# setup.py
# anytop
#
# Created by Lars Yencken on 2011-10-09.
# Copyright 2011 Lars Yencken. All rights reserved.
#
from distutils.core import setup
setup(
name='anytop',
version='0.1.2',
description='Streaming frequency distribution viewer.',
long_description=open('README.rst').read(),
author='Lars Yencken',
author_email='lars@yencken.org',
url='http://bitbucket.org/larsyencken/anytop',
scripts=['anytop', 'anyhist'],
packages=['anyutil'],
license='ISC',
)
|
<commit_before>#-*- coding: utf-8 -*-
#
# setup.py
# anytop
#
# Created by Lars Yencken on 2011-10-09.
# Copyright 2011 Lars Yencken. All rights reserved.
#
from distutils.core import setup
setup(
name='anytop',
version='0.1.2',
description='Streaming frequency distribution viewer.',
long_description=open('README.rst').read(),
author='Lars Yencken',
author_email='lars@yencken.org',
url='http://bitbucket.org/larsyencken/anytop',
scripts=['anytop'],
packages=['anyutil'],
license='ISC',
)
<commit_msg>Add anyhist as an installed script.<commit_after>#-*- coding: utf-8 -*-
#
# setup.py
# anytop
#
# Created by Lars Yencken on 2011-10-09.
# Copyright 2011 Lars Yencken. All rights reserved.
#
from distutils.core import setup
setup(
name='anytop',
version='0.1.2',
description='Streaming frequency distribution viewer.',
long_description=open('README.rst').read(),
author='Lars Yencken',
author_email='lars@yencken.org',
url='http://bitbucket.org/larsyencken/anytop',
scripts=['anytop', 'anyhist'],
packages=['anyutil'],
license='ISC',
)
|
cfcb0ab86e8d8d6cf8fb17f91f5ac49214af6bd9
|
setup.py
|
setup.py
|
import io
from setuptools import find_packages, setup
def read(fname):
with io.open(fname, encoding='utf-8') as f:
return f.read()
setup(
name='wagtail-pg-search-backend',
version='1.0.0.dev0',
packages=find_packages(exclude=['tests']),
include_package_data=True,
zip_safe=False,
url='https://github.com/leukeleu/wagtail-pg-search-backend',
description='PostgreSQL full text search backend for Wagtail CMS',
long_description=read('README.rst'),
keywords=['wagtail', 'postgres', 'fulltext', 'search'],
classifiers=[
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'License :: OSI Approved :: MIT License'
],
license='MIT',
install_requires=[
'Django>=1.10',
'psycopg2',
'six',
'wagtail'
],
test_suite='runtests.runtests'
)
|
import io
from setuptools import find_packages, setup
def read(fname):
with io.open(fname, encoding='utf-8') as f:
return f.read()
setup(
name='wagtail-pg-search-backend',
version='1.0.0.dev0',
author='Bertrand Bordage',
author_email='bordage.bertrand@gmail.com',
packages=find_packages(exclude=['tests']),
include_package_data=True,
zip_safe=False,
url='https://github.com/leukeleu/wagtail-pg-search-backend',
description='PostgreSQL full text search backend for Wagtail CMS',
long_description=read('README.rst'),
keywords=['wagtail', 'postgres', 'fulltext', 'search'],
classifiers=[
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'License :: OSI Approved :: MIT License'
],
license='MIT',
install_requires=[
'Django>=1.10',
'psycopg2',
'six',
'wagtail'
],
test_suite='runtests.runtests'
)
|
Set package author to Bertrand Bordage
|
Set package author to Bertrand Bordage
|
Python
|
mit
|
wagtail/wagtail-pg-search-backend
|
import io
from setuptools import find_packages, setup
def read(fname):
with io.open(fname, encoding='utf-8') as f:
return f.read()
setup(
name='wagtail-pg-search-backend',
version='1.0.0.dev0',
packages=find_packages(exclude=['tests']),
include_package_data=True,
zip_safe=False,
url='https://github.com/leukeleu/wagtail-pg-search-backend',
description='PostgreSQL full text search backend for Wagtail CMS',
long_description=read('README.rst'),
keywords=['wagtail', 'postgres', 'fulltext', 'search'],
classifiers=[
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'License :: OSI Approved :: MIT License'
],
license='MIT',
install_requires=[
'Django>=1.10',
'psycopg2',
'six',
'wagtail'
],
test_suite='runtests.runtests'
)
Set package author to Bertrand Bordage
|
import io
from setuptools import find_packages, setup
def read(fname):
with io.open(fname, encoding='utf-8') as f:
return f.read()
setup(
name='wagtail-pg-search-backend',
version='1.0.0.dev0',
author='Bertrand Bordage',
author_email='bordage.bertrand@gmail.com',
packages=find_packages(exclude=['tests']),
include_package_data=True,
zip_safe=False,
url='https://github.com/leukeleu/wagtail-pg-search-backend',
description='PostgreSQL full text search backend for Wagtail CMS',
long_description=read('README.rst'),
keywords=['wagtail', 'postgres', 'fulltext', 'search'],
classifiers=[
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'License :: OSI Approved :: MIT License'
],
license='MIT',
install_requires=[
'Django>=1.10',
'psycopg2',
'six',
'wagtail'
],
test_suite='runtests.runtests'
)
|
<commit_before>import io
from setuptools import find_packages, setup
def read(fname):
with io.open(fname, encoding='utf-8') as f:
return f.read()
setup(
name='wagtail-pg-search-backend',
version='1.0.0.dev0',
packages=find_packages(exclude=['tests']),
include_package_data=True,
zip_safe=False,
url='https://github.com/leukeleu/wagtail-pg-search-backend',
description='PostgreSQL full text search backend for Wagtail CMS',
long_description=read('README.rst'),
keywords=['wagtail', 'postgres', 'fulltext', 'search'],
classifiers=[
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'License :: OSI Approved :: MIT License'
],
license='MIT',
install_requires=[
'Django>=1.10',
'psycopg2',
'six',
'wagtail'
],
test_suite='runtests.runtests'
)
<commit_msg>Set package author to Bertrand Bordage<commit_after>
|
import io
from setuptools import find_packages, setup
def read(fname):
with io.open(fname, encoding='utf-8') as f:
return f.read()
setup(
name='wagtail-pg-search-backend',
version='1.0.0.dev0',
author='Bertrand Bordage',
author_email='bordage.bertrand@gmail.com',
packages=find_packages(exclude=['tests']),
include_package_data=True,
zip_safe=False,
url='https://github.com/leukeleu/wagtail-pg-search-backend',
description='PostgreSQL full text search backend for Wagtail CMS',
long_description=read('README.rst'),
keywords=['wagtail', 'postgres', 'fulltext', 'search'],
classifiers=[
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'License :: OSI Approved :: MIT License'
],
license='MIT',
install_requires=[
'Django>=1.10',
'psycopg2',
'six',
'wagtail'
],
test_suite='runtests.runtests'
)
|
import io
from setuptools import find_packages, setup
def read(fname):
with io.open(fname, encoding='utf-8') as f:
return f.read()
setup(
name='wagtail-pg-search-backend',
version='1.0.0.dev0',
packages=find_packages(exclude=['tests']),
include_package_data=True,
zip_safe=False,
url='https://github.com/leukeleu/wagtail-pg-search-backend',
description='PostgreSQL full text search backend for Wagtail CMS',
long_description=read('README.rst'),
keywords=['wagtail', 'postgres', 'fulltext', 'search'],
classifiers=[
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'License :: OSI Approved :: MIT License'
],
license='MIT',
install_requires=[
'Django>=1.10',
'psycopg2',
'six',
'wagtail'
],
test_suite='runtests.runtests'
)
Set package author to Bertrand Bordageimport io
from setuptools import find_packages, setup
def read(fname):
with io.open(fname, encoding='utf-8') as f:
return f.read()
setup(
name='wagtail-pg-search-backend',
version='1.0.0.dev0',
author='Bertrand Bordage',
author_email='bordage.bertrand@gmail.com',
packages=find_packages(exclude=['tests']),
include_package_data=True,
zip_safe=False,
url='https://github.com/leukeleu/wagtail-pg-search-backend',
description='PostgreSQL full text search backend for Wagtail CMS',
long_description=read('README.rst'),
keywords=['wagtail', 'postgres', 'fulltext', 'search'],
classifiers=[
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'License :: OSI Approved :: MIT License'
],
license='MIT',
install_requires=[
'Django>=1.10',
'psycopg2',
'six',
'wagtail'
],
test_suite='runtests.runtests'
)
|
<commit_before>import io
from setuptools import find_packages, setup
def read(fname):
with io.open(fname, encoding='utf-8') as f:
return f.read()
setup(
name='wagtail-pg-search-backend',
version='1.0.0.dev0',
packages=find_packages(exclude=['tests']),
include_package_data=True,
zip_safe=False,
url='https://github.com/leukeleu/wagtail-pg-search-backend',
description='PostgreSQL full text search backend for Wagtail CMS',
long_description=read('README.rst'),
keywords=['wagtail', 'postgres', 'fulltext', 'search'],
classifiers=[
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'License :: OSI Approved :: MIT License'
],
license='MIT',
install_requires=[
'Django>=1.10',
'psycopg2',
'six',
'wagtail'
],
test_suite='runtests.runtests'
)
<commit_msg>Set package author to Bertrand Bordage<commit_after>import io
from setuptools import find_packages, setup
def read(fname):
with io.open(fname, encoding='utf-8') as f:
return f.read()
setup(
name='wagtail-pg-search-backend',
version='1.0.0.dev0',
author='Bertrand Bordage',
author_email='bordage.bertrand@gmail.com',
packages=find_packages(exclude=['tests']),
include_package_data=True,
zip_safe=False,
url='https://github.com/leukeleu/wagtail-pg-search-backend',
description='PostgreSQL full text search backend for Wagtail CMS',
long_description=read('README.rst'),
keywords=['wagtail', 'postgres', 'fulltext', 'search'],
classifiers=[
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'License :: OSI Approved :: MIT License'
],
license='MIT',
install_requires=[
'Django>=1.10',
'psycopg2',
'six',
'wagtail'
],
test_suite='runtests.runtests'
)
|
71ca6c8516e5b887a22d2e16f3fdea72f7d041d9
|
STAF/__init__.py
|
STAF/__init__.py
|
# Copyright 2012 Kevin Goodsell
#
# This software is licensed under the Eclipse Public License (EPL) V1.0.
'''
Interface to the STAF API.
'''
# Using __all__ makes pydoc work properly. Otherwise it looks at the modules the
# items actually come from and assumes they don't belong in the docs for
# __init__.
__all__ = [
'Handle', 'wrap_data', 'add_privacy_delimiters',
'remove_privacy_delimiters', 'mask_private_data',
'escape_privacy_delimiters', 'errors', 'strerror', 'STAFError',
'STAFResultError', 'unmarshal', 'STAFUnmarshalError', 'MapClassDefinition',
'MapClass', 'unmarshal_recursive', 'unmarshal_non_recursive',
'unmarshal_none',
]
from ._staf import (
Handle,
wrap_data,
add_privacy_delimiters,
remove_privacy_delimiters,
mask_private_data,
escape_privacy_delimiters,
)
from ._errors import (
errors,
strerror,
STAFError,
STAFResultError,
)
from ._marshal import (
unmarshal,
unmarshal_force,
STAFUnmarshalError,
MapClassDefinition,
MapClass,
unmarshal_recursive,
unmarshal_non_recursive,
unmarshal_none,
)
|
# Copyright 2012 Kevin Goodsell
#
# This software is licensed under the Eclipse Public License (EPL) V1.0.
'''
Interface to the STAF API.
'''
# Using __all__ makes pydoc work properly. Otherwise it looks at the modules the
# items actually come from and assumes they don't belong in the docs for
# __init__.
__all__ = [
'Handle', 'wrap_data', 'add_privacy_delimiters',
'remove_privacy_delimiters', 'mask_private_data',
'escape_privacy_delimiters', 'errors', 'strerror', 'STAFError',
'STAFResultError', 'unmarshal', 'unmarshal_force', 'STAFUnmarshalError',
'MapClassDefinition', 'MapClass', 'unmarshal_recursive',
'unmarshal_non_recursive', 'unmarshal_none',
]
from ._staf import (
Handle,
wrap_data,
add_privacy_delimiters,
remove_privacy_delimiters,
mask_private_data,
escape_privacy_delimiters,
)
from ._errors import (
errors,
strerror,
STAFError,
STAFResultError,
)
from ._marshal import (
unmarshal,
unmarshal_force,
STAFUnmarshalError,
MapClassDefinition,
MapClass,
unmarshal_recursive,
unmarshal_non_recursive,
unmarshal_none,
)
|
Add missing function in __all__.
|
Add missing function in __all__.
|
Python
|
epl-1.0
|
KevinGoodsell/caduceus
|
# Copyright 2012 Kevin Goodsell
#
# This software is licensed under the Eclipse Public License (EPL) V1.0.
'''
Interface to the STAF API.
'''
# Using __all__ makes pydoc work properly. Otherwise it looks at the modules the
# items actually come from and assumes they don't belong in the docs for
# __init__.
__all__ = [
'Handle', 'wrap_data', 'add_privacy_delimiters',
'remove_privacy_delimiters', 'mask_private_data',
'escape_privacy_delimiters', 'errors', 'strerror', 'STAFError',
'STAFResultError', 'unmarshal', 'STAFUnmarshalError', 'MapClassDefinition',
'MapClass', 'unmarshal_recursive', 'unmarshal_non_recursive',
'unmarshal_none',
]
from ._staf import (
Handle,
wrap_data,
add_privacy_delimiters,
remove_privacy_delimiters,
mask_private_data,
escape_privacy_delimiters,
)
from ._errors import (
errors,
strerror,
STAFError,
STAFResultError,
)
from ._marshal import (
unmarshal,
unmarshal_force,
STAFUnmarshalError,
MapClassDefinition,
MapClass,
unmarshal_recursive,
unmarshal_non_recursive,
unmarshal_none,
)
Add missing function in __all__.
|
# Copyright 2012 Kevin Goodsell
#
# This software is licensed under the Eclipse Public License (EPL) V1.0.
'''
Interface to the STAF API.
'''
# Using __all__ makes pydoc work properly. Otherwise it looks at the modules the
# items actually come from and assumes they don't belong in the docs for
# __init__.
__all__ = [
'Handle', 'wrap_data', 'add_privacy_delimiters',
'remove_privacy_delimiters', 'mask_private_data',
'escape_privacy_delimiters', 'errors', 'strerror', 'STAFError',
'STAFResultError', 'unmarshal', 'unmarshal_force', 'STAFUnmarshalError',
'MapClassDefinition', 'MapClass', 'unmarshal_recursive',
'unmarshal_non_recursive', 'unmarshal_none',
]
from ._staf import (
Handle,
wrap_data,
add_privacy_delimiters,
remove_privacy_delimiters,
mask_private_data,
escape_privacy_delimiters,
)
from ._errors import (
errors,
strerror,
STAFError,
STAFResultError,
)
from ._marshal import (
unmarshal,
unmarshal_force,
STAFUnmarshalError,
MapClassDefinition,
MapClass,
unmarshal_recursive,
unmarshal_non_recursive,
unmarshal_none,
)
|
<commit_before># Copyright 2012 Kevin Goodsell
#
# This software is licensed under the Eclipse Public License (EPL) V1.0.
'''
Interface to the STAF API.
'''
# Using __all__ makes pydoc work properly. Otherwise it looks at the modules the
# items actually come from and assumes they don't belong in the docs for
# __init__.
__all__ = [
'Handle', 'wrap_data', 'add_privacy_delimiters',
'remove_privacy_delimiters', 'mask_private_data',
'escape_privacy_delimiters', 'errors', 'strerror', 'STAFError',
'STAFResultError', 'unmarshal', 'STAFUnmarshalError', 'MapClassDefinition',
'MapClass', 'unmarshal_recursive', 'unmarshal_non_recursive',
'unmarshal_none',
]
from ._staf import (
Handle,
wrap_data,
add_privacy_delimiters,
remove_privacy_delimiters,
mask_private_data,
escape_privacy_delimiters,
)
from ._errors import (
errors,
strerror,
STAFError,
STAFResultError,
)
from ._marshal import (
unmarshal,
unmarshal_force,
STAFUnmarshalError,
MapClassDefinition,
MapClass,
unmarshal_recursive,
unmarshal_non_recursive,
unmarshal_none,
)
<commit_msg>Add missing function in __all__.<commit_after>
|
# Copyright 2012 Kevin Goodsell
#
# This software is licensed under the Eclipse Public License (EPL) V1.0.
'''
Interface to the STAF API.
'''
# Using __all__ makes pydoc work properly. Otherwise it looks at the modules the
# items actually come from and assumes they don't belong in the docs for
# __init__.
__all__ = [
'Handle', 'wrap_data', 'add_privacy_delimiters',
'remove_privacy_delimiters', 'mask_private_data',
'escape_privacy_delimiters', 'errors', 'strerror', 'STAFError',
'STAFResultError', 'unmarshal', 'unmarshal_force', 'STAFUnmarshalError',
'MapClassDefinition', 'MapClass', 'unmarshal_recursive',
'unmarshal_non_recursive', 'unmarshal_none',
]
from ._staf import (
Handle,
wrap_data,
add_privacy_delimiters,
remove_privacy_delimiters,
mask_private_data,
escape_privacy_delimiters,
)
from ._errors import (
errors,
strerror,
STAFError,
STAFResultError,
)
from ._marshal import (
unmarshal,
unmarshal_force,
STAFUnmarshalError,
MapClassDefinition,
MapClass,
unmarshal_recursive,
unmarshal_non_recursive,
unmarshal_none,
)
|
# Copyright 2012 Kevin Goodsell
#
# This software is licensed under the Eclipse Public License (EPL) V1.0.
'''
Interface to the STAF API.
'''
# Using __all__ makes pydoc work properly. Otherwise it looks at the modules the
# items actually come from and assumes they don't belong in the docs for
# __init__.
__all__ = [
'Handle', 'wrap_data', 'add_privacy_delimiters',
'remove_privacy_delimiters', 'mask_private_data',
'escape_privacy_delimiters', 'errors', 'strerror', 'STAFError',
'STAFResultError', 'unmarshal', 'STAFUnmarshalError', 'MapClassDefinition',
'MapClass', 'unmarshal_recursive', 'unmarshal_non_recursive',
'unmarshal_none',
]
from ._staf import (
Handle,
wrap_data,
add_privacy_delimiters,
remove_privacy_delimiters,
mask_private_data,
escape_privacy_delimiters,
)
from ._errors import (
errors,
strerror,
STAFError,
STAFResultError,
)
from ._marshal import (
unmarshal,
unmarshal_force,
STAFUnmarshalError,
MapClassDefinition,
MapClass,
unmarshal_recursive,
unmarshal_non_recursive,
unmarshal_none,
)
Add missing function in __all__.# Copyright 2012 Kevin Goodsell
#
# This software is licensed under the Eclipse Public License (EPL) V1.0.
'''
Interface to the STAF API.
'''
# Using __all__ makes pydoc work properly. Otherwise it looks at the modules the
# items actually come from and assumes they don't belong in the docs for
# __init__.
__all__ = [
'Handle', 'wrap_data', 'add_privacy_delimiters',
'remove_privacy_delimiters', 'mask_private_data',
'escape_privacy_delimiters', 'errors', 'strerror', 'STAFError',
'STAFResultError', 'unmarshal', 'unmarshal_force', 'STAFUnmarshalError',
'MapClassDefinition', 'MapClass', 'unmarshal_recursive',
'unmarshal_non_recursive', 'unmarshal_none',
]
from ._staf import (
Handle,
wrap_data,
add_privacy_delimiters,
remove_privacy_delimiters,
mask_private_data,
escape_privacy_delimiters,
)
from ._errors import (
errors,
strerror,
STAFError,
STAFResultError,
)
from ._marshal import (
unmarshal,
unmarshal_force,
STAFUnmarshalError,
MapClassDefinition,
MapClass,
unmarshal_recursive,
unmarshal_non_recursive,
unmarshal_none,
)
|
<commit_before># Copyright 2012 Kevin Goodsell
#
# This software is licensed under the Eclipse Public License (EPL) V1.0.
'''
Interface to the STAF API.
'''
# Using __all__ makes pydoc work properly. Otherwise it looks at the modules the
# items actually come from and assumes they don't belong in the docs for
# __init__.
__all__ = [
'Handle', 'wrap_data', 'add_privacy_delimiters',
'remove_privacy_delimiters', 'mask_private_data',
'escape_privacy_delimiters', 'errors', 'strerror', 'STAFError',
'STAFResultError', 'unmarshal', 'STAFUnmarshalError', 'MapClassDefinition',
'MapClass', 'unmarshal_recursive', 'unmarshal_non_recursive',
'unmarshal_none',
]
from ._staf import (
Handle,
wrap_data,
add_privacy_delimiters,
remove_privacy_delimiters,
mask_private_data,
escape_privacy_delimiters,
)
from ._errors import (
errors,
strerror,
STAFError,
STAFResultError,
)
from ._marshal import (
unmarshal,
unmarshal_force,
STAFUnmarshalError,
MapClassDefinition,
MapClass,
unmarshal_recursive,
unmarshal_non_recursive,
unmarshal_none,
)
<commit_msg>Add missing function in __all__.<commit_after># Copyright 2012 Kevin Goodsell
#
# This software is licensed under the Eclipse Public License (EPL) V1.0.
'''
Interface to the STAF API.
'''
# Using __all__ makes pydoc work properly. Otherwise it looks at the modules the
# items actually come from and assumes they don't belong in the docs for
# __init__.
__all__ = [
'Handle', 'wrap_data', 'add_privacy_delimiters',
'remove_privacy_delimiters', 'mask_private_data',
'escape_privacy_delimiters', 'errors', 'strerror', 'STAFError',
'STAFResultError', 'unmarshal', 'unmarshal_force', 'STAFUnmarshalError',
'MapClassDefinition', 'MapClass', 'unmarshal_recursive',
'unmarshal_non_recursive', 'unmarshal_none',
]
from ._staf import (
Handle,
wrap_data,
add_privacy_delimiters,
remove_privacy_delimiters,
mask_private_data,
escape_privacy_delimiters,
)
from ._errors import (
errors,
strerror,
STAFError,
STAFResultError,
)
from ._marshal import (
unmarshal,
unmarshal_force,
STAFUnmarshalError,
MapClassDefinition,
MapClass,
unmarshal_recursive,
unmarshal_non_recursive,
unmarshal_none,
)
|
bdacb16673b2435249ee7cff42a6cc4cacd07e41
|
setup.py
|
setup.py
|
"""Setuptools configuration for rpmvenv."""
from setuptools import setup
from setuptools import find_packages
with open('README.rst', 'r') as readmefile:
README = readmefile.read()
setup(
name='rpmvenv',
version='0.23.0',
url='https://github.com/kevinconway/rpmvenv',
description='RPM packager for Python virtualenv.',
author="Kevin Conway",
author_email="kevinjacobconway@gmail.com",
long_description=README,
license='MIT',
packages=find_packages(exclude=['tests', 'build', 'dist', 'docs']),
install_requires=[
'jinja2',
'venvctrl',
'argparse',
'confpy',
'ordereddict',
'semver',
],
entry_points={
'console_scripts': [
'rpmvenv = rpmvenv.cli:main',
],
'rpmvenv.extensions': [
'core = rpmvenv.extensions.core:Extension',
'file_permissions = rpmvenv.extensions.files.permissions:Extension',
'file_extras = rpmvenv.extensions.files.extras:Extension',
'python_venv = rpmvenv.extensions.python.venv:Extension',
'blocks = rpmvenv.extensions.blocks.generic:Extension',
]
},
package_data={
"rpmvenv": ["templates/*"],
},
)
|
"""Setuptools configuration for rpmvenv."""
from setuptools import setup
from setuptools import find_packages
with open('README.rst', 'r') as readmefile:
README = readmefile.read()
setup(
name='rpmvenv',
version='0.23.0',
url='https://github.com/kevinconway/rpmvenv',
description='RPM packager for Python virtualenv.',
author="Kevin Conway",
author_email="kevinjacobconway@gmail.com",
long_description=README,
license='MIT',
packages=find_packages(exclude=['tests', 'build', 'dist', 'docs']),
install_requires=[
'jinja2',
'venvctrl',
'argparse',
'confpy',
'ordereddict',
'semver<2.10', # semver dropped support for legacy Python at 2.10.
],
entry_points={
'console_scripts': [
'rpmvenv = rpmvenv.cli:main',
],
'rpmvenv.extensions': [
'core = rpmvenv.extensions.core:Extension',
'file_permissions = rpmvenv.extensions.files.permissions:Extension',
'file_extras = rpmvenv.extensions.files.extras:Extension',
'python_venv = rpmvenv.extensions.python.venv:Extension',
'blocks = rpmvenv.extensions.blocks.generic:Extension',
]
},
package_data={
"rpmvenv": ["templates/*"],
},
)
|
Add semver version limit due to compat changes
|
Add semver version limit due to compat changes
The semver library stopped working for legacy Python versions after the
2.9.1 release. This adds a less than 2.10 restriction to the abstract
dependency requirements so that folks don't need to all add a pin to
their requirements.txt.
|
Python
|
mit
|
kevinconway/rpmvenv
|
"""Setuptools configuration for rpmvenv."""
from setuptools import setup
from setuptools import find_packages
with open('README.rst', 'r') as readmefile:
README = readmefile.read()
setup(
name='rpmvenv',
version='0.23.0',
url='https://github.com/kevinconway/rpmvenv',
description='RPM packager for Python virtualenv.',
author="Kevin Conway",
author_email="kevinjacobconway@gmail.com",
long_description=README,
license='MIT',
packages=find_packages(exclude=['tests', 'build', 'dist', 'docs']),
install_requires=[
'jinja2',
'venvctrl',
'argparse',
'confpy',
'ordereddict',
'semver',
],
entry_points={
'console_scripts': [
'rpmvenv = rpmvenv.cli:main',
],
'rpmvenv.extensions': [
'core = rpmvenv.extensions.core:Extension',
'file_permissions = rpmvenv.extensions.files.permissions:Extension',
'file_extras = rpmvenv.extensions.files.extras:Extension',
'python_venv = rpmvenv.extensions.python.venv:Extension',
'blocks = rpmvenv.extensions.blocks.generic:Extension',
]
},
package_data={
"rpmvenv": ["templates/*"],
},
)
Add semver version limit due to compat changes
The semver library stopped working for legacy Python versions after the
2.9.1 release. This adds a less than 2.10 restriction to the abstract
dependency requirements so that folks don't need to all add a pin to
their requirements.txt.
|
"""Setuptools configuration for rpmvenv."""
from setuptools import setup
from setuptools import find_packages
with open('README.rst', 'r') as readmefile:
README = readmefile.read()
setup(
name='rpmvenv',
version='0.23.0',
url='https://github.com/kevinconway/rpmvenv',
description='RPM packager for Python virtualenv.',
author="Kevin Conway",
author_email="kevinjacobconway@gmail.com",
long_description=README,
license='MIT',
packages=find_packages(exclude=['tests', 'build', 'dist', 'docs']),
install_requires=[
'jinja2',
'venvctrl',
'argparse',
'confpy',
'ordereddict',
'semver<2.10', # semver dropped support for legacy Python at 2.10.
],
entry_points={
'console_scripts': [
'rpmvenv = rpmvenv.cli:main',
],
'rpmvenv.extensions': [
'core = rpmvenv.extensions.core:Extension',
'file_permissions = rpmvenv.extensions.files.permissions:Extension',
'file_extras = rpmvenv.extensions.files.extras:Extension',
'python_venv = rpmvenv.extensions.python.venv:Extension',
'blocks = rpmvenv.extensions.blocks.generic:Extension',
]
},
package_data={
"rpmvenv": ["templates/*"],
},
)
|
<commit_before>"""Setuptools configuration for rpmvenv."""
from setuptools import setup
from setuptools import find_packages
with open('README.rst', 'r') as readmefile:
README = readmefile.read()
setup(
name='rpmvenv',
version='0.23.0',
url='https://github.com/kevinconway/rpmvenv',
description='RPM packager for Python virtualenv.',
author="Kevin Conway",
author_email="kevinjacobconway@gmail.com",
long_description=README,
license='MIT',
packages=find_packages(exclude=['tests', 'build', 'dist', 'docs']),
install_requires=[
'jinja2',
'venvctrl',
'argparse',
'confpy',
'ordereddict',
'semver',
],
entry_points={
'console_scripts': [
'rpmvenv = rpmvenv.cli:main',
],
'rpmvenv.extensions': [
'core = rpmvenv.extensions.core:Extension',
'file_permissions = rpmvenv.extensions.files.permissions:Extension',
'file_extras = rpmvenv.extensions.files.extras:Extension',
'python_venv = rpmvenv.extensions.python.venv:Extension',
'blocks = rpmvenv.extensions.blocks.generic:Extension',
]
},
package_data={
"rpmvenv": ["templates/*"],
},
)
<commit_msg>Add semver version limit due to compat changes
The semver library stopped working for legacy Python versions after the
2.9.1 release. This adds a less than 2.10 restriction to the abstract
dependency requirements so that folks don't need to all add a pin to
their requirements.txt.<commit_after>
|
"""Setuptools configuration for rpmvenv."""
from setuptools import setup
from setuptools import find_packages
with open('README.rst', 'r') as readmefile:
README = readmefile.read()
setup(
name='rpmvenv',
version='0.23.0',
url='https://github.com/kevinconway/rpmvenv',
description='RPM packager for Python virtualenv.',
author="Kevin Conway",
author_email="kevinjacobconway@gmail.com",
long_description=README,
license='MIT',
packages=find_packages(exclude=['tests', 'build', 'dist', 'docs']),
install_requires=[
'jinja2',
'venvctrl',
'argparse',
'confpy',
'ordereddict',
'semver<2.10', # semver dropped support for legacy Python at 2.10.
],
entry_points={
'console_scripts': [
'rpmvenv = rpmvenv.cli:main',
],
'rpmvenv.extensions': [
'core = rpmvenv.extensions.core:Extension',
'file_permissions = rpmvenv.extensions.files.permissions:Extension',
'file_extras = rpmvenv.extensions.files.extras:Extension',
'python_venv = rpmvenv.extensions.python.venv:Extension',
'blocks = rpmvenv.extensions.blocks.generic:Extension',
]
},
package_data={
"rpmvenv": ["templates/*"],
},
)
|
"""Setuptools configuration for rpmvenv."""
from setuptools import setup
from setuptools import find_packages
with open('README.rst', 'r') as readmefile:
README = readmefile.read()
setup(
name='rpmvenv',
version='0.23.0',
url='https://github.com/kevinconway/rpmvenv',
description='RPM packager for Python virtualenv.',
author="Kevin Conway",
author_email="kevinjacobconway@gmail.com",
long_description=README,
license='MIT',
packages=find_packages(exclude=['tests', 'build', 'dist', 'docs']),
install_requires=[
'jinja2',
'venvctrl',
'argparse',
'confpy',
'ordereddict',
'semver',
],
entry_points={
'console_scripts': [
'rpmvenv = rpmvenv.cli:main',
],
'rpmvenv.extensions': [
'core = rpmvenv.extensions.core:Extension',
'file_permissions = rpmvenv.extensions.files.permissions:Extension',
'file_extras = rpmvenv.extensions.files.extras:Extension',
'python_venv = rpmvenv.extensions.python.venv:Extension',
'blocks = rpmvenv.extensions.blocks.generic:Extension',
]
},
package_data={
"rpmvenv": ["templates/*"],
},
)
Add semver version limit due to compat changes
The semver library stopped working for legacy Python versions after the
2.9.1 release. This adds a less than 2.10 restriction to the abstract
dependency requirements so that folks don't need to all add a pin to
their requirements.txt."""Setuptools configuration for rpmvenv."""
from setuptools import setup
from setuptools import find_packages
with open('README.rst', 'r') as readmefile:
README = readmefile.read()
setup(
name='rpmvenv',
version='0.23.0',
url='https://github.com/kevinconway/rpmvenv',
description='RPM packager for Python virtualenv.',
author="Kevin Conway",
author_email="kevinjacobconway@gmail.com",
long_description=README,
license='MIT',
packages=find_packages(exclude=['tests', 'build', 'dist', 'docs']),
install_requires=[
'jinja2',
'venvctrl',
'argparse',
'confpy',
'ordereddict',
'semver<2.10', # semver dropped support for legacy Python at 2.10.
],
entry_points={
'console_scripts': [
'rpmvenv = rpmvenv.cli:main',
],
'rpmvenv.extensions': [
'core = rpmvenv.extensions.core:Extension',
'file_permissions = rpmvenv.extensions.files.permissions:Extension',
'file_extras = rpmvenv.extensions.files.extras:Extension',
'python_venv = rpmvenv.extensions.python.venv:Extension',
'blocks = rpmvenv.extensions.blocks.generic:Extension',
]
},
package_data={
"rpmvenv": ["templates/*"],
},
)
|
<commit_before>"""Setuptools configuration for rpmvenv."""
from setuptools import setup
from setuptools import find_packages
with open('README.rst', 'r') as readmefile:
README = readmefile.read()
setup(
name='rpmvenv',
version='0.23.0',
url='https://github.com/kevinconway/rpmvenv',
description='RPM packager for Python virtualenv.',
author="Kevin Conway",
author_email="kevinjacobconway@gmail.com",
long_description=README,
license='MIT',
packages=find_packages(exclude=['tests', 'build', 'dist', 'docs']),
install_requires=[
'jinja2',
'venvctrl',
'argparse',
'confpy',
'ordereddict',
'semver',
],
entry_points={
'console_scripts': [
'rpmvenv = rpmvenv.cli:main',
],
'rpmvenv.extensions': [
'core = rpmvenv.extensions.core:Extension',
'file_permissions = rpmvenv.extensions.files.permissions:Extension',
'file_extras = rpmvenv.extensions.files.extras:Extension',
'python_venv = rpmvenv.extensions.python.venv:Extension',
'blocks = rpmvenv.extensions.blocks.generic:Extension',
]
},
package_data={
"rpmvenv": ["templates/*"],
},
)
<commit_msg>Add semver version limit due to compat changes
The semver library stopped working for legacy Python versions after the
2.9.1 release. This adds a less than 2.10 restriction to the abstract
dependency requirements so that folks don't need to all add a pin to
their requirements.txt.<commit_after>"""Setuptools configuration for rpmvenv."""
from setuptools import setup
from setuptools import find_packages
with open('README.rst', 'r') as readmefile:
README = readmefile.read()
setup(
name='rpmvenv',
version='0.23.0',
url='https://github.com/kevinconway/rpmvenv',
description='RPM packager for Python virtualenv.',
author="Kevin Conway",
author_email="kevinjacobconway@gmail.com",
long_description=README,
license='MIT',
packages=find_packages(exclude=['tests', 'build', 'dist', 'docs']),
install_requires=[
'jinja2',
'venvctrl',
'argparse',
'confpy',
'ordereddict',
'semver<2.10', # semver dropped support for legacy Python at 2.10.
],
entry_points={
'console_scripts': [
'rpmvenv = rpmvenv.cli:main',
],
'rpmvenv.extensions': [
'core = rpmvenv.extensions.core:Extension',
'file_permissions = rpmvenv.extensions.files.permissions:Extension',
'file_extras = rpmvenv.extensions.files.extras:Extension',
'python_venv = rpmvenv.extensions.python.venv:Extension',
'blocks = rpmvenv.extensions.blocks.generic:Extension',
]
},
package_data={
"rpmvenv": ["templates/*"],
},
)
|
37427ee9052d83cb4bef850487a0e8b7783a64a7
|
setup.py
|
setup.py
|
from setuptools import setup, find_packages
from codecs import open
from os import path
here = path.abspath(path.dirname(__file__))
with open(path.join(here, 'README.rst'), encoding='utf-8') as f:
long_description = f.read()
setup(
name='yarntf',
version='0.0.1.dev15',
description='TensorFlow on YARN',
long_description=long_description,
url='https://github.com/tobiajo/yarntf',
author='Tobias Johansson',
author_email='tobias@johansson.xyz',
license='Apache License 2.0',
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Intended Audience :: Developers',
'Topic :: Software Development :: Libraries',
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python :: 2.7',
],
keywords='yarn tf hadoop tensorflow',
packages=find_packages(exclude=['tests']),
install_requires=['grpc', 'tensorflow'],
)
|
from setuptools import setup, find_packages
from codecs import open
from os import path
here = path.abspath(path.dirname(__file__))
with open(path.join(here, 'README.rst'), encoding='utf-8') as f:
long_description = f.read()
setup(
name='yarntf',
version='0.0.1.dev16',
description='TensorFlow on YARN',
long_description=long_description,
url='https://github.com/tobiajo/yarntf',
author='Tobias Johansson',
author_email='tobias@johansson.xyz',
license='Apache License 2.0',
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Intended Audience :: Developers',
'Topic :: Software Development :: Libraries',
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python :: 2.7',
],
keywords='yarn tf hadoop tensorflow',
packages=find_packages(exclude=['tests']),
install_requires=['grpcio', 'tensorflow'],
)
|
Change require from grpc to grpcio
|
Change require from grpc to grpcio
|
Python
|
apache-2.0
|
tobiajo/tfyarn
|
from setuptools import setup, find_packages
from codecs import open
from os import path
here = path.abspath(path.dirname(__file__))
with open(path.join(here, 'README.rst'), encoding='utf-8') as f:
long_description = f.read()
setup(
name='yarntf',
version='0.0.1.dev15',
description='TensorFlow on YARN',
long_description=long_description,
url='https://github.com/tobiajo/yarntf',
author='Tobias Johansson',
author_email='tobias@johansson.xyz',
license='Apache License 2.0',
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Intended Audience :: Developers',
'Topic :: Software Development :: Libraries',
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python :: 2.7',
],
keywords='yarn tf hadoop tensorflow',
packages=find_packages(exclude=['tests']),
install_requires=['grpc', 'tensorflow'],
)
Change require from grpc to grpcio
|
from setuptools import setup, find_packages
from codecs import open
from os import path
here = path.abspath(path.dirname(__file__))
with open(path.join(here, 'README.rst'), encoding='utf-8') as f:
long_description = f.read()
setup(
name='yarntf',
version='0.0.1.dev16',
description='TensorFlow on YARN',
long_description=long_description,
url='https://github.com/tobiajo/yarntf',
author='Tobias Johansson',
author_email='tobias@johansson.xyz',
license='Apache License 2.0',
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Intended Audience :: Developers',
'Topic :: Software Development :: Libraries',
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python :: 2.7',
],
keywords='yarn tf hadoop tensorflow',
packages=find_packages(exclude=['tests']),
install_requires=['grpcio', 'tensorflow'],
)
|
<commit_before>from setuptools import setup, find_packages
from codecs import open
from os import path
here = path.abspath(path.dirname(__file__))
with open(path.join(here, 'README.rst'), encoding='utf-8') as f:
long_description = f.read()
setup(
name='yarntf',
version='0.0.1.dev15',
description='TensorFlow on YARN',
long_description=long_description,
url='https://github.com/tobiajo/yarntf',
author='Tobias Johansson',
author_email='tobias@johansson.xyz',
license='Apache License 2.0',
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Intended Audience :: Developers',
'Topic :: Software Development :: Libraries',
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python :: 2.7',
],
keywords='yarn tf hadoop tensorflow',
packages=find_packages(exclude=['tests']),
install_requires=['grpc', 'tensorflow'],
)
<commit_msg>Change require from grpc to grpcio<commit_after>
|
from setuptools import setup, find_packages
from codecs import open
from os import path
here = path.abspath(path.dirname(__file__))
with open(path.join(here, 'README.rst'), encoding='utf-8') as f:
long_description = f.read()
setup(
name='yarntf',
version='0.0.1.dev16',
description='TensorFlow on YARN',
long_description=long_description,
url='https://github.com/tobiajo/yarntf',
author='Tobias Johansson',
author_email='tobias@johansson.xyz',
license='Apache License 2.0',
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Intended Audience :: Developers',
'Topic :: Software Development :: Libraries',
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python :: 2.7',
],
keywords='yarn tf hadoop tensorflow',
packages=find_packages(exclude=['tests']),
install_requires=['grpcio', 'tensorflow'],
)
|
from setuptools import setup, find_packages
from codecs import open
from os import path
here = path.abspath(path.dirname(__file__))
with open(path.join(here, 'README.rst'), encoding='utf-8') as f:
long_description = f.read()
setup(
name='yarntf',
version='0.0.1.dev15',
description='TensorFlow on YARN',
long_description=long_description,
url='https://github.com/tobiajo/yarntf',
author='Tobias Johansson',
author_email='tobias@johansson.xyz',
license='Apache License 2.0',
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Intended Audience :: Developers',
'Topic :: Software Development :: Libraries',
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python :: 2.7',
],
keywords='yarn tf hadoop tensorflow',
packages=find_packages(exclude=['tests']),
install_requires=['grpc', 'tensorflow'],
)
Change require from grpc to grpciofrom setuptools import setup, find_packages
from codecs import open
from os import path
here = path.abspath(path.dirname(__file__))
with open(path.join(here, 'README.rst'), encoding='utf-8') as f:
long_description = f.read()
setup(
name='yarntf',
version='0.0.1.dev16',
description='TensorFlow on YARN',
long_description=long_description,
url='https://github.com/tobiajo/yarntf',
author='Tobias Johansson',
author_email='tobias@johansson.xyz',
license='Apache License 2.0',
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Intended Audience :: Developers',
'Topic :: Software Development :: Libraries',
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python :: 2.7',
],
keywords='yarn tf hadoop tensorflow',
packages=find_packages(exclude=['tests']),
install_requires=['grpcio', 'tensorflow'],
)
|
<commit_before>from setuptools import setup, find_packages
from codecs import open
from os import path
here = path.abspath(path.dirname(__file__))
with open(path.join(here, 'README.rst'), encoding='utf-8') as f:
long_description = f.read()
setup(
name='yarntf',
version='0.0.1.dev15',
description='TensorFlow on YARN',
long_description=long_description,
url='https://github.com/tobiajo/yarntf',
author='Tobias Johansson',
author_email='tobias@johansson.xyz',
license='Apache License 2.0',
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Intended Audience :: Developers',
'Topic :: Software Development :: Libraries',
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python :: 2.7',
],
keywords='yarn tf hadoop tensorflow',
packages=find_packages(exclude=['tests']),
install_requires=['grpc', 'tensorflow'],
)
<commit_msg>Change require from grpc to grpcio<commit_after>from setuptools import setup, find_packages
from codecs import open
from os import path
here = path.abspath(path.dirname(__file__))
with open(path.join(here, 'README.rst'), encoding='utf-8') as f:
long_description = f.read()
setup(
name='yarntf',
version='0.0.1.dev16',
description='TensorFlow on YARN',
long_description=long_description,
url='https://github.com/tobiajo/yarntf',
author='Tobias Johansson',
author_email='tobias@johansson.xyz',
license='Apache License 2.0',
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Intended Audience :: Developers',
'Topic :: Software Development :: Libraries',
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python :: 2.7',
],
keywords='yarn tf hadoop tensorflow',
packages=find_packages(exclude=['tests']),
install_requires=['grpcio', 'tensorflow'],
)
|
f8d1ac65452339f628345a9663794772348d6193
|
setup.py
|
setup.py
|
#!/usr/bin/env python
# coding: utf-8
import os
import sys
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
def publish():
"""
Publish this package to PyPI (aka "the Cheeseshop").
"""
os.system('python setup.py sdist upload')
if sys.argv[-1] == 'publish':
publish()
sys.exit()
setup(name='pystache',
version='0.3.1',
description='Mustache for Python',
long_description=open('README.rst').read() + '\n\n' + open('HISTORY.rst').read(),
author='Chris Wanstrath',
author_email='chris@ozmm.org',
url='http://github.com/defunkt/pystache',
packages=['pystache'],
license='MIT',
entry_points = {
'console_scripts': ['pystache=pystache.commands:main'],
},
classifiers = (
'Development Status :: 4 - Beta',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 2.5',
'Programming Language :: Python :: 2.6',
)
)
|
#!/usr/bin/env python
# coding: utf-8
import os
import sys
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
def publish():
"""
Publish this package to PyPI (aka "the Cheeseshop").
"""
os.system('python setup.py sdist upload')
def make_long_description():
"""
Return the long description for the package.
"""
long_description = open('README.rst').read() + '\n\n' + open('HISTORY.rst').read()
return long_description
if sys.argv[-1] == 'publish':
publish()
sys.exit()
long_description = make_long_description()
setup(name='pystache',
version='0.3.1',
description='Mustache for Python',
long_description=long_description,
author='Chris Wanstrath',
author_email='chris@ozmm.org',
url='http://github.com/defunkt/pystache',
packages=['pystache'],
license='MIT',
entry_points = {
'console_scripts': ['pystache=pystache.commands:main'],
},
classifiers = (
'Development Status :: 4 - Beta',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 2.5',
'Programming Language :: Python :: 2.6',
)
)
|
Put the code to create the package's long_description into a function.
|
Put the code to create the package's long_description into a function.
|
Python
|
mit
|
harsh00008/pystache,rismalrv/pystache,arlenesr28/pystache,nitish116/pystache,rismalrv/pystache,harsh00008/pystache,nitish116/pystache,charbeljc/pystache,rismalrv/pystache,nitish116/pystache,arlenesr28/pystache,beni55/pystache,jrnold/pystache,charbeljc/pystache,arlenesr28/pystache,beni55/pystache,jrnold/pystache,harsh00008/pystache,defunkt/pystache
|
#!/usr/bin/env python
# coding: utf-8
import os
import sys
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
def publish():
"""
Publish this package to PyPI (aka "the Cheeseshop").
"""
os.system('python setup.py sdist upload')
if sys.argv[-1] == 'publish':
publish()
sys.exit()
setup(name='pystache',
version='0.3.1',
description='Mustache for Python',
long_description=open('README.rst').read() + '\n\n' + open('HISTORY.rst').read(),
author='Chris Wanstrath',
author_email='chris@ozmm.org',
url='http://github.com/defunkt/pystache',
packages=['pystache'],
license='MIT',
entry_points = {
'console_scripts': ['pystache=pystache.commands:main'],
},
classifiers = (
'Development Status :: 4 - Beta',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 2.5',
'Programming Language :: Python :: 2.6',
)
)
Put the code to create the package's long_description into a function.
|
#!/usr/bin/env python
# coding: utf-8
import os
import sys
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
def publish():
"""
Publish this package to PyPI (aka "the Cheeseshop").
"""
os.system('python setup.py sdist upload')
def make_long_description():
"""
Return the long description for the package.
"""
long_description = open('README.rst').read() + '\n\n' + open('HISTORY.rst').read()
return long_description
if sys.argv[-1] == 'publish':
publish()
sys.exit()
long_description = make_long_description()
setup(name='pystache',
version='0.3.1',
description='Mustache for Python',
long_description=long_description,
author='Chris Wanstrath',
author_email='chris@ozmm.org',
url='http://github.com/defunkt/pystache',
packages=['pystache'],
license='MIT',
entry_points = {
'console_scripts': ['pystache=pystache.commands:main'],
},
classifiers = (
'Development Status :: 4 - Beta',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 2.5',
'Programming Language :: Python :: 2.6',
)
)
|
<commit_before>#!/usr/bin/env python
# coding: utf-8
import os
import sys
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
def publish():
"""
Publish this package to PyPI (aka "the Cheeseshop").
"""
os.system('python setup.py sdist upload')
if sys.argv[-1] == 'publish':
publish()
sys.exit()
setup(name='pystache',
version='0.3.1',
description='Mustache for Python',
long_description=open('README.rst').read() + '\n\n' + open('HISTORY.rst').read(),
author='Chris Wanstrath',
author_email='chris@ozmm.org',
url='http://github.com/defunkt/pystache',
packages=['pystache'],
license='MIT',
entry_points = {
'console_scripts': ['pystache=pystache.commands:main'],
},
classifiers = (
'Development Status :: 4 - Beta',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 2.5',
'Programming Language :: Python :: 2.6',
)
)
<commit_msg>Put the code to create the package's long_description into a function.<commit_after>
|
#!/usr/bin/env python
# coding: utf-8
import os
import sys
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
def publish():
"""
Publish this package to PyPI (aka "the Cheeseshop").
"""
os.system('python setup.py sdist upload')
def make_long_description():
"""
Return the long description for the package.
"""
long_description = open('README.rst').read() + '\n\n' + open('HISTORY.rst').read()
return long_description
if sys.argv[-1] == 'publish':
publish()
sys.exit()
long_description = make_long_description()
setup(name='pystache',
version='0.3.1',
description='Mustache for Python',
long_description=long_description,
author='Chris Wanstrath',
author_email='chris@ozmm.org',
url='http://github.com/defunkt/pystache',
packages=['pystache'],
license='MIT',
entry_points = {
'console_scripts': ['pystache=pystache.commands:main'],
},
classifiers = (
'Development Status :: 4 - Beta',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 2.5',
'Programming Language :: Python :: 2.6',
)
)
|
#!/usr/bin/env python
# coding: utf-8
import os
import sys
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
def publish():
"""
Publish this package to PyPI (aka "the Cheeseshop").
"""
os.system('python setup.py sdist upload')
if sys.argv[-1] == 'publish':
publish()
sys.exit()
setup(name='pystache',
version='0.3.1',
description='Mustache for Python',
long_description=open('README.rst').read() + '\n\n' + open('HISTORY.rst').read(),
author='Chris Wanstrath',
author_email='chris@ozmm.org',
url='http://github.com/defunkt/pystache',
packages=['pystache'],
license='MIT',
entry_points = {
'console_scripts': ['pystache=pystache.commands:main'],
},
classifiers = (
'Development Status :: 4 - Beta',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 2.5',
'Programming Language :: Python :: 2.6',
)
)
Put the code to create the package's long_description into a function.#!/usr/bin/env python
# coding: utf-8
import os
import sys
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
def publish():
"""
Publish this package to PyPI (aka "the Cheeseshop").
"""
os.system('python setup.py sdist upload')
def make_long_description():
"""
Return the long description for the package.
"""
long_description = open('README.rst').read() + '\n\n' + open('HISTORY.rst').read()
return long_description
if sys.argv[-1] == 'publish':
publish()
sys.exit()
long_description = make_long_description()
setup(name='pystache',
version='0.3.1',
description='Mustache for Python',
long_description=long_description,
author='Chris Wanstrath',
author_email='chris@ozmm.org',
url='http://github.com/defunkt/pystache',
packages=['pystache'],
license='MIT',
entry_points = {
'console_scripts': ['pystache=pystache.commands:main'],
},
classifiers = (
'Development Status :: 4 - Beta',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 2.5',
'Programming Language :: Python :: 2.6',
)
)
|
<commit_before>#!/usr/bin/env python
# coding: utf-8
import os
import sys
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
def publish():
"""
Publish this package to PyPI (aka "the Cheeseshop").
"""
os.system('python setup.py sdist upload')
if sys.argv[-1] == 'publish':
publish()
sys.exit()
setup(name='pystache',
version='0.3.1',
description='Mustache for Python',
long_description=open('README.rst').read() + '\n\n' + open('HISTORY.rst').read(),
author='Chris Wanstrath',
author_email='chris@ozmm.org',
url='http://github.com/defunkt/pystache',
packages=['pystache'],
license='MIT',
entry_points = {
'console_scripts': ['pystache=pystache.commands:main'],
},
classifiers = (
'Development Status :: 4 - Beta',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 2.5',
'Programming Language :: Python :: 2.6',
)
)
<commit_msg>Put the code to create the package's long_description into a function.<commit_after>#!/usr/bin/env python
# coding: utf-8
import os
import sys
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
def publish():
"""
Publish this package to PyPI (aka "the Cheeseshop").
"""
os.system('python setup.py sdist upload')
def make_long_description():
"""
Return the long description for the package.
"""
long_description = open('README.rst').read() + '\n\n' + open('HISTORY.rst').read()
return long_description
if sys.argv[-1] == 'publish':
publish()
sys.exit()
long_description = make_long_description()
setup(name='pystache',
version='0.3.1',
description='Mustache for Python',
long_description=long_description,
author='Chris Wanstrath',
author_email='chris@ozmm.org',
url='http://github.com/defunkt/pystache',
packages=['pystache'],
license='MIT',
entry_points = {
'console_scripts': ['pystache=pystache.commands:main'],
},
classifiers = (
'Development Status :: 4 - Beta',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 2.5',
'Programming Language :: Python :: 2.6',
)
)
|
bbc1d23f61086c63bc57215f3202050440f7e2a0
|
setup.py
|
setup.py
|
# Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from setuptools import setup
setup(
name='fontreport',
version='1.0',
packages=['fontreport'],
package_data = {'fontreport': ['*.json']},
entry_points = {
'console_scripts': [
'fontreport = fontreport.fontreport:main'
],
},
install_requires=[
'fonttools>=3.0',
]
)
|
# Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from setuptools import setup
setup(
name='fontreport',
version='1.0',
packages=['fontreport'],
package_data = {'fontreport': ['*.json']},
entry_points = {
'console_scripts': [
'fontreport = fontreport.fontreport:main'
],
},
install_requires=[
'fonttools>=3.0',
],
dependency_links=[
'https://github.com/behdad/fonttools/tarball/master#egg=fonttools-3.0'
]
)
|
Add dependency on github version of fonttools
|
Add dependency on github version of fonttools
|
Python
|
apache-2.0
|
googlefonts/fontreport,googlei18n/fontreport
|
# Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from setuptools import setup
setup(
name='fontreport',
version='1.0',
packages=['fontreport'],
package_data = {'fontreport': ['*.json']},
entry_points = {
'console_scripts': [
'fontreport = fontreport.fontreport:main'
],
},
install_requires=[
'fonttools>=3.0',
]
)
Add dependency on github version of fonttools
|
# Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from setuptools import setup
setup(
name='fontreport',
version='1.0',
packages=['fontreport'],
package_data = {'fontreport': ['*.json']},
entry_points = {
'console_scripts': [
'fontreport = fontreport.fontreport:main'
],
},
install_requires=[
'fonttools>=3.0',
],
dependency_links=[
'https://github.com/behdad/fonttools/tarball/master#egg=fonttools-3.0'
]
)
|
<commit_before># Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from setuptools import setup
setup(
name='fontreport',
version='1.0',
packages=['fontreport'],
package_data = {'fontreport': ['*.json']},
entry_points = {
'console_scripts': [
'fontreport = fontreport.fontreport:main'
],
},
install_requires=[
'fonttools>=3.0',
]
)
<commit_msg>Add dependency on github version of fonttools<commit_after>
|
# Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from setuptools import setup
setup(
name='fontreport',
version='1.0',
packages=['fontreport'],
package_data = {'fontreport': ['*.json']},
entry_points = {
'console_scripts': [
'fontreport = fontreport.fontreport:main'
],
},
install_requires=[
'fonttools>=3.0',
],
dependency_links=[
'https://github.com/behdad/fonttools/tarball/master#egg=fonttools-3.0'
]
)
|
# Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from setuptools import setup
setup(
name='fontreport',
version='1.0',
packages=['fontreport'],
package_data = {'fontreport': ['*.json']},
entry_points = {
'console_scripts': [
'fontreport = fontreport.fontreport:main'
],
},
install_requires=[
'fonttools>=3.0',
]
)
Add dependency on github version of fonttools# Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from setuptools import setup
setup(
name='fontreport',
version='1.0',
packages=['fontreport'],
package_data = {'fontreport': ['*.json']},
entry_points = {
'console_scripts': [
'fontreport = fontreport.fontreport:main'
],
},
install_requires=[
'fonttools>=3.0',
],
dependency_links=[
'https://github.com/behdad/fonttools/tarball/master#egg=fonttools-3.0'
]
)
|
<commit_before># Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from setuptools import setup
setup(
name='fontreport',
version='1.0',
packages=['fontreport'],
package_data = {'fontreport': ['*.json']},
entry_points = {
'console_scripts': [
'fontreport = fontreport.fontreport:main'
],
},
install_requires=[
'fonttools>=3.0',
]
)
<commit_msg>Add dependency on github version of fonttools<commit_after># Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from setuptools import setup
setup(
name='fontreport',
version='1.0',
packages=['fontreport'],
package_data = {'fontreport': ['*.json']},
entry_points = {
'console_scripts': [
'fontreport = fontreport.fontreport:main'
],
},
install_requires=[
'fonttools>=3.0',
],
dependency_links=[
'https://github.com/behdad/fonttools/tarball/master#egg=fonttools-3.0'
]
)
|
a50e899e568080e99926acb7c9e9dce8e825b9e5
|
setup.py
|
setup.py
|
#!/usr/bin/env python
from __future__ import absolute_import
from __future__ import unicode_literals
from setuptools import setup, find_packages
from milestones import __version__ as VERSION
setup(
name='edx-milestones',
version=VERSION,
description='Significant events module for Open edX',
long_description=open('README.md').read(),
author='edX',
url='https://github.com/edx/edx-milestones',
license='AGPL',
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: GNU Affero General Public License v3',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Framework :: Django',
],
packages=find_packages(exclude=["tests"]),
install_requires=[
"django>=1.8,<2.0",
"django-model-utils",
"edx-opaque-keys>=0.2.1,<1.0.0",
],
tests_require=[
"coverage==4.5.3",
"nose==1.3.3",
"httpretty==0.8.0",
"pep8==1.5.7",
"pylint==1.2.1",
"pep257==0.3.2"
]
)
|
#!/usr/bin/env python
from __future__ import absolute_import
from __future__ import unicode_literals
from setuptools import setup, find_packages
from milestones import __version__ as VERSION
setup(
name='edx-milestones',
version=VERSION,
description='Significant events module for Open edX',
long_description=open('README.md').read(),
author='edX',
url='https://github.com/edx/edx-milestones',
license='AGPL',
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: GNU Affero General Public License v3',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Framework :: Django',
],
packages=find_packages(exclude=["tests"]),
install_requires=[
"django>=1.8,<2.0",
"django-model-utils",
"edx-opaque-keys>=0.2.1,<1.0.0",
"six",
],
tests_require=[
"coverage==4.5.3",
"nose==1.3.3",
"httpretty==0.8.0",
"pep8==1.5.7",
"pylint==1.2.1",
"pep257==0.3.2"
]
)
|
Add six as a requirement.
|
Add six as a requirement.
|
Python
|
agpl-3.0
|
edx/edx-milestones
|
#!/usr/bin/env python
from __future__ import absolute_import
from __future__ import unicode_literals
from setuptools import setup, find_packages
from milestones import __version__ as VERSION
setup(
name='edx-milestones',
version=VERSION,
description='Significant events module for Open edX',
long_description=open('README.md').read(),
author='edX',
url='https://github.com/edx/edx-milestones',
license='AGPL',
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: GNU Affero General Public License v3',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Framework :: Django',
],
packages=find_packages(exclude=["tests"]),
install_requires=[
"django>=1.8,<2.0",
"django-model-utils",
"edx-opaque-keys>=0.2.1,<1.0.0",
],
tests_require=[
"coverage==4.5.3",
"nose==1.3.3",
"httpretty==0.8.0",
"pep8==1.5.7",
"pylint==1.2.1",
"pep257==0.3.2"
]
)
Add six as a requirement.
|
#!/usr/bin/env python
from __future__ import absolute_import
from __future__ import unicode_literals
from setuptools import setup, find_packages
from milestones import __version__ as VERSION
setup(
name='edx-milestones',
version=VERSION,
description='Significant events module for Open edX',
long_description=open('README.md').read(),
author='edX',
url='https://github.com/edx/edx-milestones',
license='AGPL',
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: GNU Affero General Public License v3',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Framework :: Django',
],
packages=find_packages(exclude=["tests"]),
install_requires=[
"django>=1.8,<2.0",
"django-model-utils",
"edx-opaque-keys>=0.2.1,<1.0.0",
"six",
],
tests_require=[
"coverage==4.5.3",
"nose==1.3.3",
"httpretty==0.8.0",
"pep8==1.5.7",
"pylint==1.2.1",
"pep257==0.3.2"
]
)
|
<commit_before>#!/usr/bin/env python
from __future__ import absolute_import
from __future__ import unicode_literals
from setuptools import setup, find_packages
from milestones import __version__ as VERSION
setup(
name='edx-milestones',
version=VERSION,
description='Significant events module for Open edX',
long_description=open('README.md').read(),
author='edX',
url='https://github.com/edx/edx-milestones',
license='AGPL',
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: GNU Affero General Public License v3',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Framework :: Django',
],
packages=find_packages(exclude=["tests"]),
install_requires=[
"django>=1.8,<2.0",
"django-model-utils",
"edx-opaque-keys>=0.2.1,<1.0.0",
],
tests_require=[
"coverage==4.5.3",
"nose==1.3.3",
"httpretty==0.8.0",
"pep8==1.5.7",
"pylint==1.2.1",
"pep257==0.3.2"
]
)
<commit_msg>Add six as a requirement.<commit_after>
|
#!/usr/bin/env python
from __future__ import absolute_import
from __future__ import unicode_literals
from setuptools import setup, find_packages
from milestones import __version__ as VERSION
setup(
name='edx-milestones',
version=VERSION,
description='Significant events module for Open edX',
long_description=open('README.md').read(),
author='edX',
url='https://github.com/edx/edx-milestones',
license='AGPL',
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: GNU Affero General Public License v3',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Framework :: Django',
],
packages=find_packages(exclude=["tests"]),
install_requires=[
"django>=1.8,<2.0",
"django-model-utils",
"edx-opaque-keys>=0.2.1,<1.0.0",
"six",
],
tests_require=[
"coverage==4.5.3",
"nose==1.3.3",
"httpretty==0.8.0",
"pep8==1.5.7",
"pylint==1.2.1",
"pep257==0.3.2"
]
)
|
#!/usr/bin/env python
from __future__ import absolute_import
from __future__ import unicode_literals
from setuptools import setup, find_packages
from milestones import __version__ as VERSION
setup(
name='edx-milestones',
version=VERSION,
description='Significant events module for Open edX',
long_description=open('README.md').read(),
author='edX',
url='https://github.com/edx/edx-milestones',
license='AGPL',
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: GNU Affero General Public License v3',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Framework :: Django',
],
packages=find_packages(exclude=["tests"]),
install_requires=[
"django>=1.8,<2.0",
"django-model-utils",
"edx-opaque-keys>=0.2.1,<1.0.0",
],
tests_require=[
"coverage==4.5.3",
"nose==1.3.3",
"httpretty==0.8.0",
"pep8==1.5.7",
"pylint==1.2.1",
"pep257==0.3.2"
]
)
Add six as a requirement.#!/usr/bin/env python
from __future__ import absolute_import
from __future__ import unicode_literals
from setuptools import setup, find_packages
from milestones import __version__ as VERSION
setup(
name='edx-milestones',
version=VERSION,
description='Significant events module for Open edX',
long_description=open('README.md').read(),
author='edX',
url='https://github.com/edx/edx-milestones',
license='AGPL',
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: GNU Affero General Public License v3',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Framework :: Django',
],
packages=find_packages(exclude=["tests"]),
install_requires=[
"django>=1.8,<2.0",
"django-model-utils",
"edx-opaque-keys>=0.2.1,<1.0.0",
"six",
],
tests_require=[
"coverage==4.5.3",
"nose==1.3.3",
"httpretty==0.8.0",
"pep8==1.5.7",
"pylint==1.2.1",
"pep257==0.3.2"
]
)
|
<commit_before>#!/usr/bin/env python
from __future__ import absolute_import
from __future__ import unicode_literals
from setuptools import setup, find_packages
from milestones import __version__ as VERSION
setup(
name='edx-milestones',
version=VERSION,
description='Significant events module for Open edX',
long_description=open('README.md').read(),
author='edX',
url='https://github.com/edx/edx-milestones',
license='AGPL',
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: GNU Affero General Public License v3',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Framework :: Django',
],
packages=find_packages(exclude=["tests"]),
install_requires=[
"django>=1.8,<2.0",
"django-model-utils",
"edx-opaque-keys>=0.2.1,<1.0.0",
],
tests_require=[
"coverage==4.5.3",
"nose==1.3.3",
"httpretty==0.8.0",
"pep8==1.5.7",
"pylint==1.2.1",
"pep257==0.3.2"
]
)
<commit_msg>Add six as a requirement.<commit_after>#!/usr/bin/env python
from __future__ import absolute_import
from __future__ import unicode_literals
from setuptools import setup, find_packages
from milestones import __version__ as VERSION
setup(
name='edx-milestones',
version=VERSION,
description='Significant events module for Open edX',
long_description=open('README.md').read(),
author='edX',
url='https://github.com/edx/edx-milestones',
license='AGPL',
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: GNU Affero General Public License v3',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Framework :: Django',
],
packages=find_packages(exclude=["tests"]),
install_requires=[
"django>=1.8,<2.0",
"django-model-utils",
"edx-opaque-keys>=0.2.1,<1.0.0",
"six",
],
tests_require=[
"coverage==4.5.3",
"nose==1.3.3",
"httpretty==0.8.0",
"pep8==1.5.7",
"pylint==1.2.1",
"pep257==0.3.2"
]
)
|
5b1a353e77f20c8516b20f4ca85d3fe201fb3544
|
setup.py
|
setup.py
|
from setuptools import setup, find_packages
setup(
name='formal',
version='0.8.2',
description='HTML forms framework for Nevow',
author='Matt Goodall',
author_email='matt@pollenation.net',
packages=find_packages(),
package_data={
'formal': ['formal.css', 'html/*', 'js/*'],
},
zip_safe = True,
)
|
from setuptools import setup, find_packages
setup(
name='formal',
version='0.8.2',
description='HTML forms framework for Nevow',
author='Matt Goodall',
author_email='matt@pollenation.net',
packages=find_packages(),
package_data={
'': ['*.css', '*.html', '*.js'],
},
zip_safe = True,
)
|
Make the package_data globs less specific to pick up examples.css too and (hopefully) not miss resource files added in future.
|
Make the package_data globs less specific to pick up examples.css too and
(hopefully) not miss resource files added in future.
|
Python
|
mit
|
emgee/formal,emgee/formal,emgee/formal
|
from setuptools import setup, find_packages
setup(
name='formal',
version='0.8.2',
description='HTML forms framework for Nevow',
author='Matt Goodall',
author_email='matt@pollenation.net',
packages=find_packages(),
package_data={
'formal': ['formal.css', 'html/*', 'js/*'],
},
zip_safe = True,
)
Make the package_data globs less specific to pick up examples.css too and
(hopefully) not miss resource files added in future.
|
from setuptools import setup, find_packages
setup(
name='formal',
version='0.8.2',
description='HTML forms framework for Nevow',
author='Matt Goodall',
author_email='matt@pollenation.net',
packages=find_packages(),
package_data={
'': ['*.css', '*.html', '*.js'],
},
zip_safe = True,
)
|
<commit_before>from setuptools import setup, find_packages
setup(
name='formal',
version='0.8.2',
description='HTML forms framework for Nevow',
author='Matt Goodall',
author_email='matt@pollenation.net',
packages=find_packages(),
package_data={
'formal': ['formal.css', 'html/*', 'js/*'],
},
zip_safe = True,
)
<commit_msg>Make the package_data globs less specific to pick up examples.css too and
(hopefully) not miss resource files added in future.<commit_after>
|
from setuptools import setup, find_packages
setup(
name='formal',
version='0.8.2',
description='HTML forms framework for Nevow',
author='Matt Goodall',
author_email='matt@pollenation.net',
packages=find_packages(),
package_data={
'': ['*.css', '*.html', '*.js'],
},
zip_safe = True,
)
|
from setuptools import setup, find_packages
setup(
name='formal',
version='0.8.2',
description='HTML forms framework for Nevow',
author='Matt Goodall',
author_email='matt@pollenation.net',
packages=find_packages(),
package_data={
'formal': ['formal.css', 'html/*', 'js/*'],
},
zip_safe = True,
)
Make the package_data globs less specific to pick up examples.css too and
(hopefully) not miss resource files added in future.from setuptools import setup, find_packages
setup(
name='formal',
version='0.8.2',
description='HTML forms framework for Nevow',
author='Matt Goodall',
author_email='matt@pollenation.net',
packages=find_packages(),
package_data={
'': ['*.css', '*.html', '*.js'],
},
zip_safe = True,
)
|
<commit_before>from setuptools import setup, find_packages
setup(
name='formal',
version='0.8.2',
description='HTML forms framework for Nevow',
author='Matt Goodall',
author_email='matt@pollenation.net',
packages=find_packages(),
package_data={
'formal': ['formal.css', 'html/*', 'js/*'],
},
zip_safe = True,
)
<commit_msg>Make the package_data globs less specific to pick up examples.css too and
(hopefully) not miss resource files added in future.<commit_after>from setuptools import setup, find_packages
setup(
name='formal',
version='0.8.2',
description='HTML forms framework for Nevow',
author='Matt Goodall',
author_email='matt@pollenation.net',
packages=find_packages(),
package_data={
'': ['*.css', '*.html', '*.js'],
},
zip_safe = True,
)
|
e3c9adf810ac97f8780dbed03fb190d388e11926
|
setup.py
|
setup.py
|
import os
from setuptools import find_packages
from setuptools import setup
import sys
sys.path.insert(0, os.path.abspath('lib'))
exec(open('lib/ansiblereview/version.py').read())
setup(
name='ansible-review',
version=__version__,
description=('reviews ansible playbooks, roles and inventory and suggests improvements'),
keywords='ansible, code review',
author='Will Thames',
author_email='will@thames.id.au',
url='https://github.com/willthames/ansible-review',
package_dir={'': 'lib'},
packages=find_packages('lib'),
zip_safe=False,
install_requires=['ansible-lint>=3.0.0', 'pyyaml', 'appdirs', 'unidiff'],
scripts=['bin/ansible-review'],
classifiers=[
'License :: OSI Approved :: MIT License',
],
test_suite="test"
)
|
import os
from setuptools import find_packages
from setuptools import setup
import sys
sys.path.insert(0, os.path.abspath('lib'))
exec(open('lib/ansiblereview/version.py').read())
setup(
name='ansible-review',
version=__version__,
description=('reviews ansible playbooks, roles and inventory and suggests improvements'),
keywords='ansible, code review',
author='Will Thames',
author_email='will@thames.id.au',
url='https://github.com/willthames/ansible-review',
package_dir={'': 'lib'},
packages=find_packages('lib'),
zip_safe=False,
install_requires=['ansible-lint>=3.0.0', 'pyyaml', 'appdirs', 'unidiff', 'flake8'],
scripts=['bin/ansible-review'],
classifiers=[
'License :: OSI Approved :: MIT License',
],
test_suite="test"
)
|
Add flake8 to installation dependencies
|
Add flake8 to installation dependencies
|
Python
|
mit
|
willthames/ansible-review
|
import os
from setuptools import find_packages
from setuptools import setup
import sys
sys.path.insert(0, os.path.abspath('lib'))
exec(open('lib/ansiblereview/version.py').read())
setup(
name='ansible-review',
version=__version__,
description=('reviews ansible playbooks, roles and inventory and suggests improvements'),
keywords='ansible, code review',
author='Will Thames',
author_email='will@thames.id.au',
url='https://github.com/willthames/ansible-review',
package_dir={'': 'lib'},
packages=find_packages('lib'),
zip_safe=False,
install_requires=['ansible-lint>=3.0.0', 'pyyaml', 'appdirs', 'unidiff'],
scripts=['bin/ansible-review'],
classifiers=[
'License :: OSI Approved :: MIT License',
],
test_suite="test"
)
Add flake8 to installation dependencies
|
import os
from setuptools import find_packages
from setuptools import setup
import sys
sys.path.insert(0, os.path.abspath('lib'))
exec(open('lib/ansiblereview/version.py').read())
setup(
name='ansible-review',
version=__version__,
description=('reviews ansible playbooks, roles and inventory and suggests improvements'),
keywords='ansible, code review',
author='Will Thames',
author_email='will@thames.id.au',
url='https://github.com/willthames/ansible-review',
package_dir={'': 'lib'},
packages=find_packages('lib'),
zip_safe=False,
install_requires=['ansible-lint>=3.0.0', 'pyyaml', 'appdirs', 'unidiff', 'flake8'],
scripts=['bin/ansible-review'],
classifiers=[
'License :: OSI Approved :: MIT License',
],
test_suite="test"
)
|
<commit_before>import os
from setuptools import find_packages
from setuptools import setup
import sys
sys.path.insert(0, os.path.abspath('lib'))
exec(open('lib/ansiblereview/version.py').read())
setup(
name='ansible-review',
version=__version__,
description=('reviews ansible playbooks, roles and inventory and suggests improvements'),
keywords='ansible, code review',
author='Will Thames',
author_email='will@thames.id.au',
url='https://github.com/willthames/ansible-review',
package_dir={'': 'lib'},
packages=find_packages('lib'),
zip_safe=False,
install_requires=['ansible-lint>=3.0.0', 'pyyaml', 'appdirs', 'unidiff'],
scripts=['bin/ansible-review'],
classifiers=[
'License :: OSI Approved :: MIT License',
],
test_suite="test"
)
<commit_msg>Add flake8 to installation dependencies<commit_after>
|
import os
from setuptools import find_packages
from setuptools import setup
import sys
sys.path.insert(0, os.path.abspath('lib'))
exec(open('lib/ansiblereview/version.py').read())
setup(
name='ansible-review',
version=__version__,
description=('reviews ansible playbooks, roles and inventory and suggests improvements'),
keywords='ansible, code review',
author='Will Thames',
author_email='will@thames.id.au',
url='https://github.com/willthames/ansible-review',
package_dir={'': 'lib'},
packages=find_packages('lib'),
zip_safe=False,
install_requires=['ansible-lint>=3.0.0', 'pyyaml', 'appdirs', 'unidiff', 'flake8'],
scripts=['bin/ansible-review'],
classifiers=[
'License :: OSI Approved :: MIT License',
],
test_suite="test"
)
|
import os
from setuptools import find_packages
from setuptools import setup
import sys
sys.path.insert(0, os.path.abspath('lib'))
exec(open('lib/ansiblereview/version.py').read())
setup(
name='ansible-review',
version=__version__,
description=('reviews ansible playbooks, roles and inventory and suggests improvements'),
keywords='ansible, code review',
author='Will Thames',
author_email='will@thames.id.au',
url='https://github.com/willthames/ansible-review',
package_dir={'': 'lib'},
packages=find_packages('lib'),
zip_safe=False,
install_requires=['ansible-lint>=3.0.0', 'pyyaml', 'appdirs', 'unidiff'],
scripts=['bin/ansible-review'],
classifiers=[
'License :: OSI Approved :: MIT License',
],
test_suite="test"
)
Add flake8 to installation dependenciesimport os
from setuptools import find_packages
from setuptools import setup
import sys
sys.path.insert(0, os.path.abspath('lib'))
exec(open('lib/ansiblereview/version.py').read())
setup(
name='ansible-review',
version=__version__,
description=('reviews ansible playbooks, roles and inventory and suggests improvements'),
keywords='ansible, code review',
author='Will Thames',
author_email='will@thames.id.au',
url='https://github.com/willthames/ansible-review',
package_dir={'': 'lib'},
packages=find_packages('lib'),
zip_safe=False,
install_requires=['ansible-lint>=3.0.0', 'pyyaml', 'appdirs', 'unidiff', 'flake8'],
scripts=['bin/ansible-review'],
classifiers=[
'License :: OSI Approved :: MIT License',
],
test_suite="test"
)
|
<commit_before>import os
from setuptools import find_packages
from setuptools import setup
import sys
sys.path.insert(0, os.path.abspath('lib'))
exec(open('lib/ansiblereview/version.py').read())
setup(
name='ansible-review',
version=__version__,
description=('reviews ansible playbooks, roles and inventory and suggests improvements'),
keywords='ansible, code review',
author='Will Thames',
author_email='will@thames.id.au',
url='https://github.com/willthames/ansible-review',
package_dir={'': 'lib'},
packages=find_packages('lib'),
zip_safe=False,
install_requires=['ansible-lint>=3.0.0', 'pyyaml', 'appdirs', 'unidiff'],
scripts=['bin/ansible-review'],
classifiers=[
'License :: OSI Approved :: MIT License',
],
test_suite="test"
)
<commit_msg>Add flake8 to installation dependencies<commit_after>import os
from setuptools import find_packages
from setuptools import setup
import sys
sys.path.insert(0, os.path.abspath('lib'))
exec(open('lib/ansiblereview/version.py').read())
setup(
name='ansible-review',
version=__version__,
description=('reviews ansible playbooks, roles and inventory and suggests improvements'),
keywords='ansible, code review',
author='Will Thames',
author_email='will@thames.id.au',
url='https://github.com/willthames/ansible-review',
package_dir={'': 'lib'},
packages=find_packages('lib'),
zip_safe=False,
install_requires=['ansible-lint>=3.0.0', 'pyyaml', 'appdirs', 'unidiff', 'flake8'],
scripts=['bin/ansible-review'],
classifiers=[
'License :: OSI Approved :: MIT License',
],
test_suite="test"
)
|
b53e53e6098af55254977b9b98af277de8304fbe
|
setup.py
|
setup.py
|
from setuptools import setup
setup(
name='EasyEuler',
version='0.1',
py_modules=['EasyEuler'],
install_requires=[
'Click',
'Jinja2'
],
entry_points='''
[console_scripts]
easyeuler=EasyEuler.cli:cli
'''
)
|
import sys
from setuptools import setup
requirements = ['Click', 'Jinja2']
if 'win32' in sys.platform.lower():
# Windows needs colorama for the terminal colors to work.
requirements.append('colorama')
setup(
name='EasyEuler',
version='0.1',
py_modules=['EasyEuler'],
install_requires=requirements,
entry_points='''
[console_scripts]
easyeuler=EasyEuler.cli:cli
'''
)
|
Add colorama to requirements for Windows
|
Add colorama to requirements for Windows
|
Python
|
mit
|
Encrylize/EasyEuler
|
from setuptools import setup
setup(
name='EasyEuler',
version='0.1',
py_modules=['EasyEuler'],
install_requires=[
'Click',
'Jinja2'
],
entry_points='''
[console_scripts]
easyeuler=EasyEuler.cli:cli
'''
)
Add colorama to requirements for Windows
|
import sys
from setuptools import setup
requirements = ['Click', 'Jinja2']
if 'win32' in sys.platform.lower():
# Windows needs colorama for the terminal colors to work.
requirements.append('colorama')
setup(
name='EasyEuler',
version='0.1',
py_modules=['EasyEuler'],
install_requires=requirements,
entry_points='''
[console_scripts]
easyeuler=EasyEuler.cli:cli
'''
)
|
<commit_before>from setuptools import setup
setup(
name='EasyEuler',
version='0.1',
py_modules=['EasyEuler'],
install_requires=[
'Click',
'Jinja2'
],
entry_points='''
[console_scripts]
easyeuler=EasyEuler.cli:cli
'''
)
<commit_msg>Add colorama to requirements for Windows<commit_after>
|
import sys
from setuptools import setup
requirements = ['Click', 'Jinja2']
if 'win32' in sys.platform.lower():
# Windows needs colorama for the terminal colors to work.
requirements.append('colorama')
setup(
name='EasyEuler',
version='0.1',
py_modules=['EasyEuler'],
install_requires=requirements,
entry_points='''
[console_scripts]
easyeuler=EasyEuler.cli:cli
'''
)
|
from setuptools import setup
setup(
name='EasyEuler',
version='0.1',
py_modules=['EasyEuler'],
install_requires=[
'Click',
'Jinja2'
],
entry_points='''
[console_scripts]
easyeuler=EasyEuler.cli:cli
'''
)
Add colorama to requirements for Windowsimport sys
from setuptools import setup
requirements = ['Click', 'Jinja2']
if 'win32' in sys.platform.lower():
# Windows needs colorama for the terminal colors to work.
requirements.append('colorama')
setup(
name='EasyEuler',
version='0.1',
py_modules=['EasyEuler'],
install_requires=requirements,
entry_points='''
[console_scripts]
easyeuler=EasyEuler.cli:cli
'''
)
|
<commit_before>from setuptools import setup
setup(
name='EasyEuler',
version='0.1',
py_modules=['EasyEuler'],
install_requires=[
'Click',
'Jinja2'
],
entry_points='''
[console_scripts]
easyeuler=EasyEuler.cli:cli
'''
)
<commit_msg>Add colorama to requirements for Windows<commit_after>import sys
from setuptools import setup
requirements = ['Click', 'Jinja2']
if 'win32' in sys.platform.lower():
# Windows needs colorama for the terminal colors to work.
requirements.append('colorama')
setup(
name='EasyEuler',
version='0.1',
py_modules=['EasyEuler'],
install_requires=requirements,
entry_points='''
[console_scripts]
easyeuler=EasyEuler.cli:cli
'''
)
|
7fb5df76dc3b0e044c2ae6fe5f860a9edad76f83
|
backend/breach/tests/test_sniffer.py
|
backend/breach/tests/test_sniffer.py
|
from mock import patch
from django.test import TestCase
from breach.sniffer import Sniffer
class SnifferTest(TestCase):
def setUp(self):
self.endpoint = 'http://localhost'
self.sniffer = Sniffer(self.endpoint)
self.source_ip = '147.102.239.229'
self.destination_host = 'dionyziz.com'
@patch('breach.sniffer.requests')
def test_sniffer_start(self, requests):
self.sniffer.start(self.source_ip, self.destination_host)
self.assertTrue(requests.post.called)
@patch('breach.sniffer.requests')
def test_sniffer_read(self, requests):
self.sniffer.read(self.source_ip, self.destination_host)
self.assertTrue(requests.get.called)
@patch('breach.sniffer.requests')
def test_sniffer_stop(self, requests):
self.sniffer.stop(self.source_ip, self.destination_host)
self.assertTrue(requests.post.called)
|
from mock import patch
from django.test import TestCase
from breach.sniffer import Sniffer
class SnifferTest(TestCase):
def setUp(self):
self.endpoint = 'http://localhost'
self.sniffer = Sniffer(self.endpoint)
self.source_ip = '147.102.239.229'
self.destination_host = 'dionyziz.com'
@patch('breach.sniffer.requests')
def test_sniffer_start(self, requests):
self.sniffer.start(self.source_ip, self.destination_host)
self.assertTrue(requests.post.called)
@patch('breach.sniffer.requests')
def test_sniffer_read(self, requests):
self.sniffer.read(self.source_ip, self.destination_host)
self.assertTrue(requests.get.called)
@patch('breach.sniffer.requests')
def test_sniffer_delete(self, requests):
self.sniffer.delete(self.source_ip, self.destination_host)
self.assertTrue(requests.post.called)
|
Fix sniffer test, rename stop to delete
|
Fix sniffer test, rename stop to delete
|
Python
|
mit
|
dimkarakostas/rupture,dimkarakostas/rupture,dimriou/rupture,dionyziz/rupture,dionyziz/rupture,dionyziz/rupture,dimkarakostas/rupture,dimriou/rupture,dimkarakostas/rupture,esarafianou/rupture,dimriou/rupture,dionyziz/rupture,dimriou/rupture,dimriou/rupture,esarafianou/rupture,dimkarakostas/rupture,esarafianou/rupture,esarafianou/rupture,dionyziz/rupture
|
from mock import patch
from django.test import TestCase
from breach.sniffer import Sniffer
class SnifferTest(TestCase):
def setUp(self):
self.endpoint = 'http://localhost'
self.sniffer = Sniffer(self.endpoint)
self.source_ip = '147.102.239.229'
self.destination_host = 'dionyziz.com'
@patch('breach.sniffer.requests')
def test_sniffer_start(self, requests):
self.sniffer.start(self.source_ip, self.destination_host)
self.assertTrue(requests.post.called)
@patch('breach.sniffer.requests')
def test_sniffer_read(self, requests):
self.sniffer.read(self.source_ip, self.destination_host)
self.assertTrue(requests.get.called)
@patch('breach.sniffer.requests')
def test_sniffer_stop(self, requests):
self.sniffer.stop(self.source_ip, self.destination_host)
self.assertTrue(requests.post.called)
Fix sniffer test, rename stop to delete
|
from mock import patch
from django.test import TestCase
from breach.sniffer import Sniffer
class SnifferTest(TestCase):
def setUp(self):
self.endpoint = 'http://localhost'
self.sniffer = Sniffer(self.endpoint)
self.source_ip = '147.102.239.229'
self.destination_host = 'dionyziz.com'
@patch('breach.sniffer.requests')
def test_sniffer_start(self, requests):
self.sniffer.start(self.source_ip, self.destination_host)
self.assertTrue(requests.post.called)
@patch('breach.sniffer.requests')
def test_sniffer_read(self, requests):
self.sniffer.read(self.source_ip, self.destination_host)
self.assertTrue(requests.get.called)
@patch('breach.sniffer.requests')
def test_sniffer_delete(self, requests):
self.sniffer.delete(self.source_ip, self.destination_host)
self.assertTrue(requests.post.called)
|
<commit_before>from mock import patch
from django.test import TestCase
from breach.sniffer import Sniffer
class SnifferTest(TestCase):
def setUp(self):
self.endpoint = 'http://localhost'
self.sniffer = Sniffer(self.endpoint)
self.source_ip = '147.102.239.229'
self.destination_host = 'dionyziz.com'
@patch('breach.sniffer.requests')
def test_sniffer_start(self, requests):
self.sniffer.start(self.source_ip, self.destination_host)
self.assertTrue(requests.post.called)
@patch('breach.sniffer.requests')
def test_sniffer_read(self, requests):
self.sniffer.read(self.source_ip, self.destination_host)
self.assertTrue(requests.get.called)
@patch('breach.sniffer.requests')
def test_sniffer_stop(self, requests):
self.sniffer.stop(self.source_ip, self.destination_host)
self.assertTrue(requests.post.called)
<commit_msg>Fix sniffer test, rename stop to delete<commit_after>
|
from mock import patch
from django.test import TestCase
from breach.sniffer import Sniffer
class SnifferTest(TestCase):
def setUp(self):
self.endpoint = 'http://localhost'
self.sniffer = Sniffer(self.endpoint)
self.source_ip = '147.102.239.229'
self.destination_host = 'dionyziz.com'
@patch('breach.sniffer.requests')
def test_sniffer_start(self, requests):
self.sniffer.start(self.source_ip, self.destination_host)
self.assertTrue(requests.post.called)
@patch('breach.sniffer.requests')
def test_sniffer_read(self, requests):
self.sniffer.read(self.source_ip, self.destination_host)
self.assertTrue(requests.get.called)
@patch('breach.sniffer.requests')
def test_sniffer_delete(self, requests):
self.sniffer.delete(self.source_ip, self.destination_host)
self.assertTrue(requests.post.called)
|
from mock import patch
from django.test import TestCase
from breach.sniffer import Sniffer
class SnifferTest(TestCase):
def setUp(self):
self.endpoint = 'http://localhost'
self.sniffer = Sniffer(self.endpoint)
self.source_ip = '147.102.239.229'
self.destination_host = 'dionyziz.com'
@patch('breach.sniffer.requests')
def test_sniffer_start(self, requests):
self.sniffer.start(self.source_ip, self.destination_host)
self.assertTrue(requests.post.called)
@patch('breach.sniffer.requests')
def test_sniffer_read(self, requests):
self.sniffer.read(self.source_ip, self.destination_host)
self.assertTrue(requests.get.called)
@patch('breach.sniffer.requests')
def test_sniffer_stop(self, requests):
self.sniffer.stop(self.source_ip, self.destination_host)
self.assertTrue(requests.post.called)
Fix sniffer test, rename stop to deletefrom mock import patch
from django.test import TestCase
from breach.sniffer import Sniffer
class SnifferTest(TestCase):
def setUp(self):
self.endpoint = 'http://localhost'
self.sniffer = Sniffer(self.endpoint)
self.source_ip = '147.102.239.229'
self.destination_host = 'dionyziz.com'
@patch('breach.sniffer.requests')
def test_sniffer_start(self, requests):
self.sniffer.start(self.source_ip, self.destination_host)
self.assertTrue(requests.post.called)
@patch('breach.sniffer.requests')
def test_sniffer_read(self, requests):
self.sniffer.read(self.source_ip, self.destination_host)
self.assertTrue(requests.get.called)
@patch('breach.sniffer.requests')
def test_sniffer_delete(self, requests):
self.sniffer.delete(self.source_ip, self.destination_host)
self.assertTrue(requests.post.called)
|
<commit_before>from mock import patch
from django.test import TestCase
from breach.sniffer import Sniffer
class SnifferTest(TestCase):
def setUp(self):
self.endpoint = 'http://localhost'
self.sniffer = Sniffer(self.endpoint)
self.source_ip = '147.102.239.229'
self.destination_host = 'dionyziz.com'
@patch('breach.sniffer.requests')
def test_sniffer_start(self, requests):
self.sniffer.start(self.source_ip, self.destination_host)
self.assertTrue(requests.post.called)
@patch('breach.sniffer.requests')
def test_sniffer_read(self, requests):
self.sniffer.read(self.source_ip, self.destination_host)
self.assertTrue(requests.get.called)
@patch('breach.sniffer.requests')
def test_sniffer_stop(self, requests):
self.sniffer.stop(self.source_ip, self.destination_host)
self.assertTrue(requests.post.called)
<commit_msg>Fix sniffer test, rename stop to delete<commit_after>from mock import patch
from django.test import TestCase
from breach.sniffer import Sniffer
class SnifferTest(TestCase):
def setUp(self):
self.endpoint = 'http://localhost'
self.sniffer = Sniffer(self.endpoint)
self.source_ip = '147.102.239.229'
self.destination_host = 'dionyziz.com'
@patch('breach.sniffer.requests')
def test_sniffer_start(self, requests):
self.sniffer.start(self.source_ip, self.destination_host)
self.assertTrue(requests.post.called)
@patch('breach.sniffer.requests')
def test_sniffer_read(self, requests):
self.sniffer.read(self.source_ip, self.destination_host)
self.assertTrue(requests.get.called)
@patch('breach.sniffer.requests')
def test_sniffer_delete(self, requests):
self.sniffer.delete(self.source_ip, self.destination_host)
self.assertTrue(requests.post.called)
|
6ccc4a267f939d60ab8948874d9b066ff2b2e5ee
|
grader/grader/test/test_build.py
|
grader/grader/test/test_build.py
|
import os
import pytest
from subprocess import Popen, PIPE
def has_installed(program):
"""Checks to see if a program is installed using ``which``.
:param str program: the name of the program we're looking for
:rtype bool:
:return: True if it's installed, otherwise False.
"""
proc = Popen(["which", program], stdout=PIPE, stderr=PIPE)
exit_code = proc.wait()
return exit_code == 0
hasdocker = pytest.mark.skipif(not has_installed("docker"),
reason="Docker must be installed.")
"""A decorator to skip a test if docker is not installed."""
@hasdocker
def test_build(parse_and_run):
"""Test vanilla assignment build
"""
path = parse_and_run(["init", "cpl"])
parse_and_run(["new", "a1"])
dockerfile_path = os.path.join(path, "assignments", "a1",
"gradesheet", "Dockerfile")
with open(dockerfile_path, 'w') as dockerfile:
dockerfile.write("FROM ubuntu:12.04")
parse_and_run(["build", "a1"])
|
import os
import pytest
import shutil
hasdocker = pytest.mark.skipif(shutil.which("docker") is None,
reason="Docker must be installed.")
"""A decorator to skip a test if docker is not installed."""
@hasdocker
def test_build(parse_and_run):
"""Test vanilla assignment build
"""
path = parse_and_run(["init", "cpl"])
parse_and_run(["new", "a1"])
dockerfile_path = os.path.join(path, "assignments", "a1",
"gradesheet", "Dockerfile")
with open(dockerfile_path, 'w') as dockerfile:
dockerfile.write("FROM ubuntu:12.04")
parse_and_run(["build", "a1"])
|
Use shutil for 'which docker'
|
Use shutil for 'which docker'
|
Python
|
mit
|
redkyn/grader,redkyn/grader,grade-it/grader
|
import os
import pytest
from subprocess import Popen, PIPE
def has_installed(program):
"""Checks to see if a program is installed using ``which``.
:param str program: the name of the program we're looking for
:rtype bool:
:return: True if it's installed, otherwise False.
"""
proc = Popen(["which", program], stdout=PIPE, stderr=PIPE)
exit_code = proc.wait()
return exit_code == 0
hasdocker = pytest.mark.skipif(not has_installed("docker"),
reason="Docker must be installed.")
"""A decorator to skip a test if docker is not installed."""
@hasdocker
def test_build(parse_and_run):
"""Test vanilla assignment build
"""
path = parse_and_run(["init", "cpl"])
parse_and_run(["new", "a1"])
dockerfile_path = os.path.join(path, "assignments", "a1",
"gradesheet", "Dockerfile")
with open(dockerfile_path, 'w') as dockerfile:
dockerfile.write("FROM ubuntu:12.04")
parse_and_run(["build", "a1"])
Use shutil for 'which docker'
|
import os
import pytest
import shutil
hasdocker = pytest.mark.skipif(shutil.which("docker") is None,
reason="Docker must be installed.")
"""A decorator to skip a test if docker is not installed."""
@hasdocker
def test_build(parse_and_run):
"""Test vanilla assignment build
"""
path = parse_and_run(["init", "cpl"])
parse_and_run(["new", "a1"])
dockerfile_path = os.path.join(path, "assignments", "a1",
"gradesheet", "Dockerfile")
with open(dockerfile_path, 'w') as dockerfile:
dockerfile.write("FROM ubuntu:12.04")
parse_and_run(["build", "a1"])
|
<commit_before>import os
import pytest
from subprocess import Popen, PIPE
def has_installed(program):
"""Checks to see if a program is installed using ``which``.
:param str program: the name of the program we're looking for
:rtype bool:
:return: True if it's installed, otherwise False.
"""
proc = Popen(["which", program], stdout=PIPE, stderr=PIPE)
exit_code = proc.wait()
return exit_code == 0
hasdocker = pytest.mark.skipif(not has_installed("docker"),
reason="Docker must be installed.")
"""A decorator to skip a test if docker is not installed."""
@hasdocker
def test_build(parse_and_run):
"""Test vanilla assignment build
"""
path = parse_and_run(["init", "cpl"])
parse_and_run(["new", "a1"])
dockerfile_path = os.path.join(path, "assignments", "a1",
"gradesheet", "Dockerfile")
with open(dockerfile_path, 'w') as dockerfile:
dockerfile.write("FROM ubuntu:12.04")
parse_and_run(["build", "a1"])
<commit_msg>Use shutil for 'which docker'<commit_after>
|
import os
import pytest
import shutil
hasdocker = pytest.mark.skipif(shutil.which("docker") is None,
reason="Docker must be installed.")
"""A decorator to skip a test if docker is not installed."""
@hasdocker
def test_build(parse_and_run):
"""Test vanilla assignment build
"""
path = parse_and_run(["init", "cpl"])
parse_and_run(["new", "a1"])
dockerfile_path = os.path.join(path, "assignments", "a1",
"gradesheet", "Dockerfile")
with open(dockerfile_path, 'w') as dockerfile:
dockerfile.write("FROM ubuntu:12.04")
parse_and_run(["build", "a1"])
|
import os
import pytest
from subprocess import Popen, PIPE
def has_installed(program):
"""Checks to see if a program is installed using ``which``.
:param str program: the name of the program we're looking for
:rtype bool:
:return: True if it's installed, otherwise False.
"""
proc = Popen(["which", program], stdout=PIPE, stderr=PIPE)
exit_code = proc.wait()
return exit_code == 0
hasdocker = pytest.mark.skipif(not has_installed("docker"),
reason="Docker must be installed.")
"""A decorator to skip a test if docker is not installed."""
@hasdocker
def test_build(parse_and_run):
"""Test vanilla assignment build
"""
path = parse_and_run(["init", "cpl"])
parse_and_run(["new", "a1"])
dockerfile_path = os.path.join(path, "assignments", "a1",
"gradesheet", "Dockerfile")
with open(dockerfile_path, 'w') as dockerfile:
dockerfile.write("FROM ubuntu:12.04")
parse_and_run(["build", "a1"])
Use shutil for 'which docker'import os
import pytest
import shutil
hasdocker = pytest.mark.skipif(shutil.which("docker") is None,
reason="Docker must be installed.")
"""A decorator to skip a test if docker is not installed."""
@hasdocker
def test_build(parse_and_run):
"""Test vanilla assignment build
"""
path = parse_and_run(["init", "cpl"])
parse_and_run(["new", "a1"])
dockerfile_path = os.path.join(path, "assignments", "a1",
"gradesheet", "Dockerfile")
with open(dockerfile_path, 'w') as dockerfile:
dockerfile.write("FROM ubuntu:12.04")
parse_and_run(["build", "a1"])
|
<commit_before>import os
import pytest
from subprocess import Popen, PIPE
def has_installed(program):
"""Checks to see if a program is installed using ``which``.
:param str program: the name of the program we're looking for
:rtype bool:
:return: True if it's installed, otherwise False.
"""
proc = Popen(["which", program], stdout=PIPE, stderr=PIPE)
exit_code = proc.wait()
return exit_code == 0
hasdocker = pytest.mark.skipif(not has_installed("docker"),
reason="Docker must be installed.")
"""A decorator to skip a test if docker is not installed."""
@hasdocker
def test_build(parse_and_run):
"""Test vanilla assignment build
"""
path = parse_and_run(["init", "cpl"])
parse_and_run(["new", "a1"])
dockerfile_path = os.path.join(path, "assignments", "a1",
"gradesheet", "Dockerfile")
with open(dockerfile_path, 'w') as dockerfile:
dockerfile.write("FROM ubuntu:12.04")
parse_and_run(["build", "a1"])
<commit_msg>Use shutil for 'which docker'<commit_after>import os
import pytest
import shutil
hasdocker = pytest.mark.skipif(shutil.which("docker") is None,
reason="Docker must be installed.")
"""A decorator to skip a test if docker is not installed."""
@hasdocker
def test_build(parse_and_run):
"""Test vanilla assignment build
"""
path = parse_and_run(["init", "cpl"])
parse_and_run(["new", "a1"])
dockerfile_path = os.path.join(path, "assignments", "a1",
"gradesheet", "Dockerfile")
with open(dockerfile_path, 'w') as dockerfile:
dockerfile.write("FROM ubuntu:12.04")
parse_and_run(["build", "a1"])
|
de43482266fa71adb8393823680675145ffe93e0
|
hr_switzerland/models/hr_expense.py
|
hr_switzerland/models/hr_expense.py
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2017 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Emanuel Cino <ecino@compassion.ch>
#
# The licence is in the file __manifest__.py
#
##############################################################################
from odoo import api, models, fields
class HrExpense(models.Model):
_inherit = "hr.expense"
# Make product editable when expense is submitted
product_id = fields.Many2one(
states={
'draft': [('readonly', False)],
'submit': [('readonly', False)]
}
)
@api.onchange('product_id')
def _onchange_product_id(self):
"""
Prevent changing amounts if expense is submitted.
"""
if self.state == 'draft':
super(HrExpense, self)._onchange_product_id()
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2017 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Emanuel Cino <ecino@compassion.ch>
#
# The licence is in the file __manifest__.py
#
##############################################################################
from odoo import api, models, fields
class HrExpense(models.Model):
_inherit = "hr.expense"
# Make product editable when expense is submitted
product_id = fields.Many2one(
states={
'draft': [('readonly', False)],
'submit': [('readonly', False)]
}
)
@api.onchange('product_id')
def _onchange_product_id(self):
"""
Prevent changing amounts if expense is submitted.
"""
if self.state == 'draft':
super(HrExpense, self)._onchange_product_id()
class HrExpenseSheet(models.Model):
_inherit = 'hr.expense.sheet'
# Adding a user_id field for the assign notification to work
user_id = fields.Many2one(related='employee_id.user_id')
@api.model
def create(self, vals):
"""Notify managers when expense is created."""
sheet = super(HrExpenseSheet, self).create(vals)
users = sheet._get_users_to_subscribe() - self.env.user
sheet._message_auto_subscribe_notify(users.mapped('partner_id').ids)
return sheet
def _add_followers(self):
"""Notify managers when employee is changed."""
super(HrExpenseSheet, self)._add_followers()
users = self._get_users_to_subscribe() - self.env.user
self._message_auto_subscribe_notify(users.mapped('partner_id').ids)
|
Add notification to manager at expense creation
|
Add notification to manager at expense creation
|
Python
|
agpl-3.0
|
CompassionCH/compassion-switzerland,CompassionCH/compassion-switzerland,ecino/compassion-switzerland,eicher31/compassion-switzerland,eicher31/compassion-switzerland,CompassionCH/compassion-switzerland,ecino/compassion-switzerland,eicher31/compassion-switzerland,ecino/compassion-switzerland
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2017 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Emanuel Cino <ecino@compassion.ch>
#
# The licence is in the file __manifest__.py
#
##############################################################################
from odoo import api, models, fields
class HrExpense(models.Model):
_inherit = "hr.expense"
# Make product editable when expense is submitted
product_id = fields.Many2one(
states={
'draft': [('readonly', False)],
'submit': [('readonly', False)]
}
)
@api.onchange('product_id')
def _onchange_product_id(self):
"""
Prevent changing amounts if expense is submitted.
"""
if self.state == 'draft':
super(HrExpense, self)._onchange_product_id()
Add notification to manager at expense creation
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2017 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Emanuel Cino <ecino@compassion.ch>
#
# The licence is in the file __manifest__.py
#
##############################################################################
from odoo import api, models, fields
class HrExpense(models.Model):
_inherit = "hr.expense"
# Make product editable when expense is submitted
product_id = fields.Many2one(
states={
'draft': [('readonly', False)],
'submit': [('readonly', False)]
}
)
@api.onchange('product_id')
def _onchange_product_id(self):
"""
Prevent changing amounts if expense is submitted.
"""
if self.state == 'draft':
super(HrExpense, self)._onchange_product_id()
class HrExpenseSheet(models.Model):
_inherit = 'hr.expense.sheet'
# Adding a user_id field for the assign notification to work
user_id = fields.Many2one(related='employee_id.user_id')
@api.model
def create(self, vals):
"""Notify managers when expense is created."""
sheet = super(HrExpenseSheet, self).create(vals)
users = sheet._get_users_to_subscribe() - self.env.user
sheet._message_auto_subscribe_notify(users.mapped('partner_id').ids)
return sheet
def _add_followers(self):
"""Notify managers when employee is changed."""
super(HrExpenseSheet, self)._add_followers()
users = self._get_users_to_subscribe() - self.env.user
self._message_auto_subscribe_notify(users.mapped('partner_id').ids)
|
<commit_before># -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2017 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Emanuel Cino <ecino@compassion.ch>
#
# The licence is in the file __manifest__.py
#
##############################################################################
from odoo import api, models, fields
class HrExpense(models.Model):
_inherit = "hr.expense"
# Make product editable when expense is submitted
product_id = fields.Many2one(
states={
'draft': [('readonly', False)],
'submit': [('readonly', False)]
}
)
@api.onchange('product_id')
def _onchange_product_id(self):
"""
Prevent changing amounts if expense is submitted.
"""
if self.state == 'draft':
super(HrExpense, self)._onchange_product_id()
<commit_msg>Add notification to manager at expense creation<commit_after>
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2017 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Emanuel Cino <ecino@compassion.ch>
#
# The licence is in the file __manifest__.py
#
##############################################################################
from odoo import api, models, fields
class HrExpense(models.Model):
_inherit = "hr.expense"
# Make product editable when expense is submitted
product_id = fields.Many2one(
states={
'draft': [('readonly', False)],
'submit': [('readonly', False)]
}
)
@api.onchange('product_id')
def _onchange_product_id(self):
"""
Prevent changing amounts if expense is submitted.
"""
if self.state == 'draft':
super(HrExpense, self)._onchange_product_id()
class HrExpenseSheet(models.Model):
_inherit = 'hr.expense.sheet'
# Adding a user_id field for the assign notification to work
user_id = fields.Many2one(related='employee_id.user_id')
@api.model
def create(self, vals):
"""Notify managers when expense is created."""
sheet = super(HrExpenseSheet, self).create(vals)
users = sheet._get_users_to_subscribe() - self.env.user
sheet._message_auto_subscribe_notify(users.mapped('partner_id').ids)
return sheet
def _add_followers(self):
"""Notify managers when employee is changed."""
super(HrExpenseSheet, self)._add_followers()
users = self._get_users_to_subscribe() - self.env.user
self._message_auto_subscribe_notify(users.mapped('partner_id').ids)
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2017 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Emanuel Cino <ecino@compassion.ch>
#
# The licence is in the file __manifest__.py
#
##############################################################################
from odoo import api, models, fields
class HrExpense(models.Model):
_inherit = "hr.expense"
# Make product editable when expense is submitted
product_id = fields.Many2one(
states={
'draft': [('readonly', False)],
'submit': [('readonly', False)]
}
)
@api.onchange('product_id')
def _onchange_product_id(self):
"""
Prevent changing amounts if expense is submitted.
"""
if self.state == 'draft':
super(HrExpense, self)._onchange_product_id()
Add notification to manager at expense creation# -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2017 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Emanuel Cino <ecino@compassion.ch>
#
# The licence is in the file __manifest__.py
#
##############################################################################
from odoo import api, models, fields
class HrExpense(models.Model):
_inherit = "hr.expense"
# Make product editable when expense is submitted
product_id = fields.Many2one(
states={
'draft': [('readonly', False)],
'submit': [('readonly', False)]
}
)
@api.onchange('product_id')
def _onchange_product_id(self):
"""
Prevent changing amounts if expense is submitted.
"""
if self.state == 'draft':
super(HrExpense, self)._onchange_product_id()
class HrExpenseSheet(models.Model):
_inherit = 'hr.expense.sheet'
# Adding a user_id field for the assign notification to work
user_id = fields.Many2one(related='employee_id.user_id')
@api.model
def create(self, vals):
"""Notify managers when expense is created."""
sheet = super(HrExpenseSheet, self).create(vals)
users = sheet._get_users_to_subscribe() - self.env.user
sheet._message_auto_subscribe_notify(users.mapped('partner_id').ids)
return sheet
def _add_followers(self):
"""Notify managers when employee is changed."""
super(HrExpenseSheet, self)._add_followers()
users = self._get_users_to_subscribe() - self.env.user
self._message_auto_subscribe_notify(users.mapped('partner_id').ids)
|
<commit_before># -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2017 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Emanuel Cino <ecino@compassion.ch>
#
# The licence is in the file __manifest__.py
#
##############################################################################
from odoo import api, models, fields
class HrExpense(models.Model):
_inherit = "hr.expense"
# Make product editable when expense is submitted
product_id = fields.Many2one(
states={
'draft': [('readonly', False)],
'submit': [('readonly', False)]
}
)
@api.onchange('product_id')
def _onchange_product_id(self):
"""
Prevent changing amounts if expense is submitted.
"""
if self.state == 'draft':
super(HrExpense, self)._onchange_product_id()
<commit_msg>Add notification to manager at expense creation<commit_after># -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2017 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Emanuel Cino <ecino@compassion.ch>
#
# The licence is in the file __manifest__.py
#
##############################################################################
from odoo import api, models, fields
class HrExpense(models.Model):
_inherit = "hr.expense"
# Make product editable when expense is submitted
product_id = fields.Many2one(
states={
'draft': [('readonly', False)],
'submit': [('readonly', False)]
}
)
@api.onchange('product_id')
def _onchange_product_id(self):
"""
Prevent changing amounts if expense is submitted.
"""
if self.state == 'draft':
super(HrExpense, self)._onchange_product_id()
class HrExpenseSheet(models.Model):
_inherit = 'hr.expense.sheet'
# Adding a user_id field for the assign notification to work
user_id = fields.Many2one(related='employee_id.user_id')
@api.model
def create(self, vals):
"""Notify managers when expense is created."""
sheet = super(HrExpenseSheet, self).create(vals)
users = sheet._get_users_to_subscribe() - self.env.user
sheet._message_auto_subscribe_notify(users.mapped('partner_id').ids)
return sheet
def _add_followers(self):
"""Notify managers when employee is changed."""
super(HrExpenseSheet, self)._add_followers()
users = self._get_users_to_subscribe() - self.env.user
self._message_auto_subscribe_notify(users.mapped('partner_id').ids)
|
9830a8d3cf140af5af53918db51ede4b82392dd5
|
dbcollection/datasets/mscoco/load_data_test.py
|
dbcollection/datasets/mscoco/load_data_test.py
|
import os
from dbcollection.utils.file_load import load_json
def load_data_test(set_name, image_dir, annotation_path, verbose=True):
"""
Load test data annotations.
"""
data = {}
# load annotation file
if verbose:
print('> Loading annotation file: ' + annotation_path)
annotations = load_json(annotation_path)
# parse annotations
# images
if verbose:
print('> Processing image annotations... ')
for i, annot in enumerate(annotations['images']):
data[annot['id']] = {
"width" : annot['width'],
"height" : annot['height'],
"filename" : os.path.join(image_dir, annot['file_name'])
}
# categories
if verbose:
print('> Processing category annotations... ')
categories = {}
category_list, supercategory_list = [], []
for i, annot in enumerate(annotations['categories']):
categories[annot['id']] = {
"name" : annot['name'],
"supercategory" : annot['supercategory']
}
category_list.append(annot['name'])
supercategory_list.append(annot['supercategory'])
supercategory_list = list(set(supercategory_list))
return {set_name : [data, category_list, supercategory_list]}
|
import os
from dbcollection.utils.file_load import load_json
def load_data_test(set_name, image_dir, annotation_path, verbose=True):
"""
Load test data annotations.
"""
data = {}
# load annotation file
if verbose:
print('> Loading annotation file: ' + annotation_path)
annotations = load_json(annotation_path)
# parse annotations
# images
if verbose:
print('> Processing image annotations... ')
for i, annot in enumerate(annotations['images']):
data[annot['file_name']] = {
"file_name" : os.path.join(image_dir, annot['file_name']),
"width" : annot['width'],
"height" : annot['height'],
"id" : annot['id'],
"coco_url" : annot['coco_url'],
}
# categories
if verbose:
print('> Processing category annotations... ')
categories = {}
category_list, supercategory_list = [], []
for i, annot in enumerate(annotations['categories']):
categories[annot['id']] = {
"name" : annot['name'],
"supercategory" : annot['supercategory'],
"id" : annot['id']
}
category_list.append(annot['name'])
supercategory_list.append(annot['supercategory'])
supercategory_list = list(set(supercategory_list))
return {set_name : [sorted(data), annotations, category_list, supercategory_list]}
|
Add annotations var to returned data
|
db: Add annotations var to returned data
|
Python
|
mit
|
dbcollection/dbcollection,farrajota/dbcollection
|
import os
from dbcollection.utils.file_load import load_json
def load_data_test(set_name, image_dir, annotation_path, verbose=True):
"""
Load test data annotations.
"""
data = {}
# load annotation file
if verbose:
print('> Loading annotation file: ' + annotation_path)
annotations = load_json(annotation_path)
# parse annotations
# images
if verbose:
print('> Processing image annotations... ')
for i, annot in enumerate(annotations['images']):
data[annot['id']] = {
"width" : annot['width'],
"height" : annot['height'],
"filename" : os.path.join(image_dir, annot['file_name'])
}
# categories
if verbose:
print('> Processing category annotations... ')
categories = {}
category_list, supercategory_list = [], []
for i, annot in enumerate(annotations['categories']):
categories[annot['id']] = {
"name" : annot['name'],
"supercategory" : annot['supercategory']
}
category_list.append(annot['name'])
supercategory_list.append(annot['supercategory'])
supercategory_list = list(set(supercategory_list))
return {set_name : [data, category_list, supercategory_list]}db: Add annotations var to returned data
|
import os
from dbcollection.utils.file_load import load_json
def load_data_test(set_name, image_dir, annotation_path, verbose=True):
"""
Load test data annotations.
"""
data = {}
# load annotation file
if verbose:
print('> Loading annotation file: ' + annotation_path)
annotations = load_json(annotation_path)
# parse annotations
# images
if verbose:
print('> Processing image annotations... ')
for i, annot in enumerate(annotations['images']):
data[annot['file_name']] = {
"file_name" : os.path.join(image_dir, annot['file_name']),
"width" : annot['width'],
"height" : annot['height'],
"id" : annot['id'],
"coco_url" : annot['coco_url'],
}
# categories
if verbose:
print('> Processing category annotations... ')
categories = {}
category_list, supercategory_list = [], []
for i, annot in enumerate(annotations['categories']):
categories[annot['id']] = {
"name" : annot['name'],
"supercategory" : annot['supercategory'],
"id" : annot['id']
}
category_list.append(annot['name'])
supercategory_list.append(annot['supercategory'])
supercategory_list = list(set(supercategory_list))
return {set_name : [sorted(data), annotations, category_list, supercategory_list]}
|
<commit_before>import os
from dbcollection.utils.file_load import load_json
def load_data_test(set_name, image_dir, annotation_path, verbose=True):
"""
Load test data annotations.
"""
data = {}
# load annotation file
if verbose:
print('> Loading annotation file: ' + annotation_path)
annotations = load_json(annotation_path)
# parse annotations
# images
if verbose:
print('> Processing image annotations... ')
for i, annot in enumerate(annotations['images']):
data[annot['id']] = {
"width" : annot['width'],
"height" : annot['height'],
"filename" : os.path.join(image_dir, annot['file_name'])
}
# categories
if verbose:
print('> Processing category annotations... ')
categories = {}
category_list, supercategory_list = [], []
for i, annot in enumerate(annotations['categories']):
categories[annot['id']] = {
"name" : annot['name'],
"supercategory" : annot['supercategory']
}
category_list.append(annot['name'])
supercategory_list.append(annot['supercategory'])
supercategory_list = list(set(supercategory_list))
return {set_name : [data, category_list, supercategory_list]}<commit_msg>db: Add annotations var to returned data<commit_after>
|
import os
from dbcollection.utils.file_load import load_json
def load_data_test(set_name, image_dir, annotation_path, verbose=True):
"""
Load test data annotations.
"""
data = {}
# load annotation file
if verbose:
print('> Loading annotation file: ' + annotation_path)
annotations = load_json(annotation_path)
# parse annotations
# images
if verbose:
print('> Processing image annotations... ')
for i, annot in enumerate(annotations['images']):
data[annot['file_name']] = {
"file_name" : os.path.join(image_dir, annot['file_name']),
"width" : annot['width'],
"height" : annot['height'],
"id" : annot['id'],
"coco_url" : annot['coco_url'],
}
# categories
if verbose:
print('> Processing category annotations... ')
categories = {}
category_list, supercategory_list = [], []
for i, annot in enumerate(annotations['categories']):
categories[annot['id']] = {
"name" : annot['name'],
"supercategory" : annot['supercategory'],
"id" : annot['id']
}
category_list.append(annot['name'])
supercategory_list.append(annot['supercategory'])
supercategory_list = list(set(supercategory_list))
return {set_name : [sorted(data), annotations, category_list, supercategory_list]}
|
import os
from dbcollection.utils.file_load import load_json
def load_data_test(set_name, image_dir, annotation_path, verbose=True):
"""
Load test data annotations.
"""
data = {}
# load annotation file
if verbose:
print('> Loading annotation file: ' + annotation_path)
annotations = load_json(annotation_path)
# parse annotations
# images
if verbose:
print('> Processing image annotations... ')
for i, annot in enumerate(annotations['images']):
data[annot['id']] = {
"width" : annot['width'],
"height" : annot['height'],
"filename" : os.path.join(image_dir, annot['file_name'])
}
# categories
if verbose:
print('> Processing category annotations... ')
categories = {}
category_list, supercategory_list = [], []
for i, annot in enumerate(annotations['categories']):
categories[annot['id']] = {
"name" : annot['name'],
"supercategory" : annot['supercategory']
}
category_list.append(annot['name'])
supercategory_list.append(annot['supercategory'])
supercategory_list = list(set(supercategory_list))
return {set_name : [data, category_list, supercategory_list]}db: Add annotations var to returned dataimport os
from dbcollection.utils.file_load import load_json
def load_data_test(set_name, image_dir, annotation_path, verbose=True):
"""
Load test data annotations.
"""
data = {}
# load annotation file
if verbose:
print('> Loading annotation file: ' + annotation_path)
annotations = load_json(annotation_path)
# parse annotations
# images
if verbose:
print('> Processing image annotations... ')
for i, annot in enumerate(annotations['images']):
data[annot['file_name']] = {
"file_name" : os.path.join(image_dir, annot['file_name']),
"width" : annot['width'],
"height" : annot['height'],
"id" : annot['id'],
"coco_url" : annot['coco_url'],
}
# categories
if verbose:
print('> Processing category annotations... ')
categories = {}
category_list, supercategory_list = [], []
for i, annot in enumerate(annotations['categories']):
categories[annot['id']] = {
"name" : annot['name'],
"supercategory" : annot['supercategory'],
"id" : annot['id']
}
category_list.append(annot['name'])
supercategory_list.append(annot['supercategory'])
supercategory_list = list(set(supercategory_list))
return {set_name : [sorted(data), annotations, category_list, supercategory_list]}
|
<commit_before>import os
from dbcollection.utils.file_load import load_json
def load_data_test(set_name, image_dir, annotation_path, verbose=True):
"""
Load test data annotations.
"""
data = {}
# load annotation file
if verbose:
print('> Loading annotation file: ' + annotation_path)
annotations = load_json(annotation_path)
# parse annotations
# images
if verbose:
print('> Processing image annotations... ')
for i, annot in enumerate(annotations['images']):
data[annot['id']] = {
"width" : annot['width'],
"height" : annot['height'],
"filename" : os.path.join(image_dir, annot['file_name'])
}
# categories
if verbose:
print('> Processing category annotations... ')
categories = {}
category_list, supercategory_list = [], []
for i, annot in enumerate(annotations['categories']):
categories[annot['id']] = {
"name" : annot['name'],
"supercategory" : annot['supercategory']
}
category_list.append(annot['name'])
supercategory_list.append(annot['supercategory'])
supercategory_list = list(set(supercategory_list))
return {set_name : [data, category_list, supercategory_list]}<commit_msg>db: Add annotations var to returned data<commit_after>import os
from dbcollection.utils.file_load import load_json
def load_data_test(set_name, image_dir, annotation_path, verbose=True):
"""
Load test data annotations.
"""
data = {}
# load annotation file
if verbose:
print('> Loading annotation file: ' + annotation_path)
annotations = load_json(annotation_path)
# parse annotations
# images
if verbose:
print('> Processing image annotations... ')
for i, annot in enumerate(annotations['images']):
data[annot['file_name']] = {
"file_name" : os.path.join(image_dir, annot['file_name']),
"width" : annot['width'],
"height" : annot['height'],
"id" : annot['id'],
"coco_url" : annot['coco_url'],
}
# categories
if verbose:
print('> Processing category annotations... ')
categories = {}
category_list, supercategory_list = [], []
for i, annot in enumerate(annotations['categories']):
categories[annot['id']] = {
"name" : annot['name'],
"supercategory" : annot['supercategory'],
"id" : annot['id']
}
category_list.append(annot['name'])
supercategory_list.append(annot['supercategory'])
supercategory_list = list(set(supercategory_list))
return {set_name : [sorted(data), annotations, category_list, supercategory_list]}
|
867c71f0f2d3c2898815334a5d76063cd7671fae
|
processors/fix_changeline_budget_titles.py
|
processors/fix_changeline_budget_titles.py
|
import json
if __name__ == "__main__":
input = sys.argv[1]
output = sys.argv[2]
processor = fix_changeline_budget_titles().process(input,output,[])
class fix_changeline_budget_titles(object):
def process(self,inputs,output):
out = []
budgets = {}
changes_jsons, budget_jsons = inputs
for line in file(budget_jsons):
line = json.loads(line.strip())
budgets["%(year)s/%(code)s" % line] = line['title']
outfile = file(output,"w")
changed_num = 0
for line in file(changes_jsons):
line = json.loads(line.strip())
title = budgets.get("%(year)s/%(budget_code)s" % line)
if title != None and title != line['budget_title']:
line['budget_title'] = title
changed_num += 1
outfile.write(json.dumps(line,sort_keys=True)+"\n")
print "updated %d entries" % changed_num
|
import json
import logging
if __name__ == "__main__":
input = sys.argv[1]
output = sys.argv[2]
processor = fix_changeline_budget_titles().process(input,output,[])
class fix_changeline_budget_titles(object):
def process(self,inputs,output):
out = []
budgets = {}
changes_jsons, budget_jsons = inputs
for line in file(budget_jsons):
line = json.loads(line.strip())
budgets["%(year)s/%(code)s" % line] = line['title']
outfile = file(output,"w")
changed_num = 0
for line in file(changes_jsons):
line = json.loads(line.strip())
key = "%(year)s/%(budget_code)s" % line
title = budgets.get(key)
if title != None and title != line['budget_title']:
line['budget_title'] = title
changed_num += 1
else:
logging.error("Failed to find title for change with key %s" % key)
outfile.write(json.dumps(line,sort_keys=True)+"\n")
print "updated %d entries" % changed_num
|
Fix bug in changeling title fix - it used to remove some lines on the way...
|
Fix bug in changeling title fix - it used to remove some lines on the way...
|
Python
|
mit
|
OpenBudget/open-budget-data,OpenBudget/open-budget-data,omerbartal/open-budget-data,omerbartal/open-budget-data
|
import json
if __name__ == "__main__":
input = sys.argv[1]
output = sys.argv[2]
processor = fix_changeline_budget_titles().process(input,output,[])
class fix_changeline_budget_titles(object):
def process(self,inputs,output):
out = []
budgets = {}
changes_jsons, budget_jsons = inputs
for line in file(budget_jsons):
line = json.loads(line.strip())
budgets["%(year)s/%(code)s" % line] = line['title']
outfile = file(output,"w")
changed_num = 0
for line in file(changes_jsons):
line = json.loads(line.strip())
title = budgets.get("%(year)s/%(budget_code)s" % line)
if title != None and title != line['budget_title']:
line['budget_title'] = title
changed_num += 1
outfile.write(json.dumps(line,sort_keys=True)+"\n")
print "updated %d entries" % changed_num
Fix bug in changeling title fix - it used to remove some lines on the way...
|
import json
import logging
if __name__ == "__main__":
input = sys.argv[1]
output = sys.argv[2]
processor = fix_changeline_budget_titles().process(input,output,[])
class fix_changeline_budget_titles(object):
def process(self,inputs,output):
out = []
budgets = {}
changes_jsons, budget_jsons = inputs
for line in file(budget_jsons):
line = json.loads(line.strip())
budgets["%(year)s/%(code)s" % line] = line['title']
outfile = file(output,"w")
changed_num = 0
for line in file(changes_jsons):
line = json.loads(line.strip())
key = "%(year)s/%(budget_code)s" % line
title = budgets.get(key)
if title != None and title != line['budget_title']:
line['budget_title'] = title
changed_num += 1
else:
logging.error("Failed to find title for change with key %s" % key)
outfile.write(json.dumps(line,sort_keys=True)+"\n")
print "updated %d entries" % changed_num
|
<commit_before>import json
if __name__ == "__main__":
input = sys.argv[1]
output = sys.argv[2]
processor = fix_changeline_budget_titles().process(input,output,[])
class fix_changeline_budget_titles(object):
def process(self,inputs,output):
out = []
budgets = {}
changes_jsons, budget_jsons = inputs
for line in file(budget_jsons):
line = json.loads(line.strip())
budgets["%(year)s/%(code)s" % line] = line['title']
outfile = file(output,"w")
changed_num = 0
for line in file(changes_jsons):
line = json.loads(line.strip())
title = budgets.get("%(year)s/%(budget_code)s" % line)
if title != None and title != line['budget_title']:
line['budget_title'] = title
changed_num += 1
outfile.write(json.dumps(line,sort_keys=True)+"\n")
print "updated %d entries" % changed_num
<commit_msg>Fix bug in changeling title fix - it used to remove some lines on the way...<commit_after>
|
import json
import logging
if __name__ == "__main__":
input = sys.argv[1]
output = sys.argv[2]
processor = fix_changeline_budget_titles().process(input,output,[])
class fix_changeline_budget_titles(object):
def process(self,inputs,output):
out = []
budgets = {}
changes_jsons, budget_jsons = inputs
for line in file(budget_jsons):
line = json.loads(line.strip())
budgets["%(year)s/%(code)s" % line] = line['title']
outfile = file(output,"w")
changed_num = 0
for line in file(changes_jsons):
line = json.loads(line.strip())
key = "%(year)s/%(budget_code)s" % line
title = budgets.get(key)
if title != None and title != line['budget_title']:
line['budget_title'] = title
changed_num += 1
else:
logging.error("Failed to find title for change with key %s" % key)
outfile.write(json.dumps(line,sort_keys=True)+"\n")
print "updated %d entries" % changed_num
|
import json
if __name__ == "__main__":
input = sys.argv[1]
output = sys.argv[2]
processor = fix_changeline_budget_titles().process(input,output,[])
class fix_changeline_budget_titles(object):
def process(self,inputs,output):
out = []
budgets = {}
changes_jsons, budget_jsons = inputs
for line in file(budget_jsons):
line = json.loads(line.strip())
budgets["%(year)s/%(code)s" % line] = line['title']
outfile = file(output,"w")
changed_num = 0
for line in file(changes_jsons):
line = json.loads(line.strip())
title = budgets.get("%(year)s/%(budget_code)s" % line)
if title != None and title != line['budget_title']:
line['budget_title'] = title
changed_num += 1
outfile.write(json.dumps(line,sort_keys=True)+"\n")
print "updated %d entries" % changed_num
Fix bug in changeling title fix - it used to remove some lines on the way...import json
import logging
if __name__ == "__main__":
input = sys.argv[1]
output = sys.argv[2]
processor = fix_changeline_budget_titles().process(input,output,[])
class fix_changeline_budget_titles(object):
def process(self,inputs,output):
out = []
budgets = {}
changes_jsons, budget_jsons = inputs
for line in file(budget_jsons):
line = json.loads(line.strip())
budgets["%(year)s/%(code)s" % line] = line['title']
outfile = file(output,"w")
changed_num = 0
for line in file(changes_jsons):
line = json.loads(line.strip())
key = "%(year)s/%(budget_code)s" % line
title = budgets.get(key)
if title != None and title != line['budget_title']:
line['budget_title'] = title
changed_num += 1
else:
logging.error("Failed to find title for change with key %s" % key)
outfile.write(json.dumps(line,sort_keys=True)+"\n")
print "updated %d entries" % changed_num
|
<commit_before>import json
if __name__ == "__main__":
input = sys.argv[1]
output = sys.argv[2]
processor = fix_changeline_budget_titles().process(input,output,[])
class fix_changeline_budget_titles(object):
def process(self,inputs,output):
out = []
budgets = {}
changes_jsons, budget_jsons = inputs
for line in file(budget_jsons):
line = json.loads(line.strip())
budgets["%(year)s/%(code)s" % line] = line['title']
outfile = file(output,"w")
changed_num = 0
for line in file(changes_jsons):
line = json.loads(line.strip())
title = budgets.get("%(year)s/%(budget_code)s" % line)
if title != None and title != line['budget_title']:
line['budget_title'] = title
changed_num += 1
outfile.write(json.dumps(line,sort_keys=True)+"\n")
print "updated %d entries" % changed_num
<commit_msg>Fix bug in changeling title fix - it used to remove some lines on the way...<commit_after>import json
import logging
if __name__ == "__main__":
input = sys.argv[1]
output = sys.argv[2]
processor = fix_changeline_budget_titles().process(input,output,[])
class fix_changeline_budget_titles(object):
def process(self,inputs,output):
out = []
budgets = {}
changes_jsons, budget_jsons = inputs
for line in file(budget_jsons):
line = json.loads(line.strip())
budgets["%(year)s/%(code)s" % line] = line['title']
outfile = file(output,"w")
changed_num = 0
for line in file(changes_jsons):
line = json.loads(line.strip())
key = "%(year)s/%(budget_code)s" % line
title = budgets.get(key)
if title != None and title != line['budget_title']:
line['budget_title'] = title
changed_num += 1
else:
logging.error("Failed to find title for change with key %s" % key)
outfile.write(json.dumps(line,sort_keys=True)+"\n")
print "updated %d entries" % changed_num
|
c988925927ec9d50ded81c92b85c3abce6c2638f
|
fireplace/carddata/minions/neutral/legendary.py
|
fireplace/carddata/minions/neutral/legendary.py
|
import random
from ...card import *
# Ragnaros the Firelord
class EX1_298(Card):
cantAttack = True
def onTurnEnd(self, player):
self.hit(random.choice(self.controller.getTargets(TARGET_ENEMY_CHARACTERS)), 8)
# Harrison Jones
class EX1_558(Card):
def action(self):
weapon = self.controller.opponent.hero.weapon
if weapon:
weapon.destroy()
self.controller.draw(weapon.durability)
# Deathwing
class NEW1_030(Card):
def action(self):
for target in self.controller.getTargets(TARGET_ALL_MINIONS):
# Let's not kill ourselves in the process
if target is not self:
target.destroy()
self.controller.discardHand()
|
import random
from ...card import *
# Cairne Bloodhoof
class EX1_110(Card):
deathrattle = summonMinion("EX1_110t")
# Baron Geddon
class EX1_249(Card):
def action(self):
for target in self.controller.getTargets(TARGET_ALL_MINIONS):
if target is not self:
self.hit(target, 2)
# Ragnaros the Firelord
class EX1_298(Card):
cantAttack = True
def onTurnEnd(self, player):
self.hit(random.choice(self.controller.getTargets(TARGET_ENEMY_CHARACTERS)), 8)
# Harrison Jones
class EX1_558(Card):
def action(self):
weapon = self.controller.opponent.hero.weapon
if weapon:
weapon.destroy()
self.controller.draw(weapon.durability)
# Malygos
class EX1_563(Card):
spellpower = 5
# Deathwing
class NEW1_030(Card):
def action(self):
for target in self.controller.getTargets(TARGET_ALL_MINIONS):
# Let's not kill ourselves in the process
if target is not self:
target.destroy()
self.controller.discardHand()
|
Implement Baron Geddon, Cairne Bloodhoof and Malygos
|
Implement Baron Geddon, Cairne Bloodhoof and Malygos
|
Python
|
agpl-3.0
|
amw2104/fireplace,beheh/fireplace,smallnamespace/fireplace,liujimj/fireplace,butozerca/fireplace,oftc-ftw/fireplace,Ragowit/fireplace,Ragowit/fireplace,smallnamespace/fireplace,NightKev/fireplace,Meerkov/fireplace,jleclanche/fireplace,butozerca/fireplace,Meerkov/fireplace,amw2104/fireplace,oftc-ftw/fireplace,liujimj/fireplace
|
import random
from ...card import *
# Ragnaros the Firelord
class EX1_298(Card):
cantAttack = True
def onTurnEnd(self, player):
self.hit(random.choice(self.controller.getTargets(TARGET_ENEMY_CHARACTERS)), 8)
# Harrison Jones
class EX1_558(Card):
def action(self):
weapon = self.controller.opponent.hero.weapon
if weapon:
weapon.destroy()
self.controller.draw(weapon.durability)
# Deathwing
class NEW1_030(Card):
def action(self):
for target in self.controller.getTargets(TARGET_ALL_MINIONS):
# Let's not kill ourselves in the process
if target is not self:
target.destroy()
self.controller.discardHand()
Implement Baron Geddon, Cairne Bloodhoof and Malygos
|
import random
from ...card import *
# Cairne Bloodhoof
class EX1_110(Card):
deathrattle = summonMinion("EX1_110t")
# Baron Geddon
class EX1_249(Card):
def action(self):
for target in self.controller.getTargets(TARGET_ALL_MINIONS):
if target is not self:
self.hit(target, 2)
# Ragnaros the Firelord
class EX1_298(Card):
cantAttack = True
def onTurnEnd(self, player):
self.hit(random.choice(self.controller.getTargets(TARGET_ENEMY_CHARACTERS)), 8)
# Harrison Jones
class EX1_558(Card):
def action(self):
weapon = self.controller.opponent.hero.weapon
if weapon:
weapon.destroy()
self.controller.draw(weapon.durability)
# Malygos
class EX1_563(Card):
spellpower = 5
# Deathwing
class NEW1_030(Card):
def action(self):
for target in self.controller.getTargets(TARGET_ALL_MINIONS):
# Let's not kill ourselves in the process
if target is not self:
target.destroy()
self.controller.discardHand()
|
<commit_before>import random
from ...card import *
# Ragnaros the Firelord
class EX1_298(Card):
cantAttack = True
def onTurnEnd(self, player):
self.hit(random.choice(self.controller.getTargets(TARGET_ENEMY_CHARACTERS)), 8)
# Harrison Jones
class EX1_558(Card):
def action(self):
weapon = self.controller.opponent.hero.weapon
if weapon:
weapon.destroy()
self.controller.draw(weapon.durability)
# Deathwing
class NEW1_030(Card):
def action(self):
for target in self.controller.getTargets(TARGET_ALL_MINIONS):
# Let's not kill ourselves in the process
if target is not self:
target.destroy()
self.controller.discardHand()
<commit_msg>Implement Baron Geddon, Cairne Bloodhoof and Malygos<commit_after>
|
import random
from ...card import *
# Cairne Bloodhoof
class EX1_110(Card):
deathrattle = summonMinion("EX1_110t")
# Baron Geddon
class EX1_249(Card):
def action(self):
for target in self.controller.getTargets(TARGET_ALL_MINIONS):
if target is not self:
self.hit(target, 2)
# Ragnaros the Firelord
class EX1_298(Card):
cantAttack = True
def onTurnEnd(self, player):
self.hit(random.choice(self.controller.getTargets(TARGET_ENEMY_CHARACTERS)), 8)
# Harrison Jones
class EX1_558(Card):
def action(self):
weapon = self.controller.opponent.hero.weapon
if weapon:
weapon.destroy()
self.controller.draw(weapon.durability)
# Malygos
class EX1_563(Card):
spellpower = 5
# Deathwing
class NEW1_030(Card):
def action(self):
for target in self.controller.getTargets(TARGET_ALL_MINIONS):
# Let's not kill ourselves in the process
if target is not self:
target.destroy()
self.controller.discardHand()
|
import random
from ...card import *
# Ragnaros the Firelord
class EX1_298(Card):
cantAttack = True
def onTurnEnd(self, player):
self.hit(random.choice(self.controller.getTargets(TARGET_ENEMY_CHARACTERS)), 8)
# Harrison Jones
class EX1_558(Card):
def action(self):
weapon = self.controller.opponent.hero.weapon
if weapon:
weapon.destroy()
self.controller.draw(weapon.durability)
# Deathwing
class NEW1_030(Card):
def action(self):
for target in self.controller.getTargets(TARGET_ALL_MINIONS):
# Let's not kill ourselves in the process
if target is not self:
target.destroy()
self.controller.discardHand()
Implement Baron Geddon, Cairne Bloodhoof and Malygosimport random
from ...card import *
# Cairne Bloodhoof
class EX1_110(Card):
deathrattle = summonMinion("EX1_110t")
# Baron Geddon
class EX1_249(Card):
def action(self):
for target in self.controller.getTargets(TARGET_ALL_MINIONS):
if target is not self:
self.hit(target, 2)
# Ragnaros the Firelord
class EX1_298(Card):
cantAttack = True
def onTurnEnd(self, player):
self.hit(random.choice(self.controller.getTargets(TARGET_ENEMY_CHARACTERS)), 8)
# Harrison Jones
class EX1_558(Card):
def action(self):
weapon = self.controller.opponent.hero.weapon
if weapon:
weapon.destroy()
self.controller.draw(weapon.durability)
# Malygos
class EX1_563(Card):
spellpower = 5
# Deathwing
class NEW1_030(Card):
def action(self):
for target in self.controller.getTargets(TARGET_ALL_MINIONS):
# Let's not kill ourselves in the process
if target is not self:
target.destroy()
self.controller.discardHand()
|
<commit_before>import random
from ...card import *
# Ragnaros the Firelord
class EX1_298(Card):
cantAttack = True
def onTurnEnd(self, player):
self.hit(random.choice(self.controller.getTargets(TARGET_ENEMY_CHARACTERS)), 8)
# Harrison Jones
class EX1_558(Card):
def action(self):
weapon = self.controller.opponent.hero.weapon
if weapon:
weapon.destroy()
self.controller.draw(weapon.durability)
# Deathwing
class NEW1_030(Card):
def action(self):
for target in self.controller.getTargets(TARGET_ALL_MINIONS):
# Let's not kill ourselves in the process
if target is not self:
target.destroy()
self.controller.discardHand()
<commit_msg>Implement Baron Geddon, Cairne Bloodhoof and Malygos<commit_after>import random
from ...card import *
# Cairne Bloodhoof
class EX1_110(Card):
deathrattle = summonMinion("EX1_110t")
# Baron Geddon
class EX1_249(Card):
def action(self):
for target in self.controller.getTargets(TARGET_ALL_MINIONS):
if target is not self:
self.hit(target, 2)
# Ragnaros the Firelord
class EX1_298(Card):
cantAttack = True
def onTurnEnd(self, player):
self.hit(random.choice(self.controller.getTargets(TARGET_ENEMY_CHARACTERS)), 8)
# Harrison Jones
class EX1_558(Card):
def action(self):
weapon = self.controller.opponent.hero.weapon
if weapon:
weapon.destroy()
self.controller.draw(weapon.durability)
# Malygos
class EX1_563(Card):
spellpower = 5
# Deathwing
class NEW1_030(Card):
def action(self):
for target in self.controller.getTargets(TARGET_ALL_MINIONS):
# Let's not kill ourselves in the process
if target is not self:
target.destroy()
self.controller.discardHand()
|
679c2daceb7f4e9d193e345ee42b0334dd576c64
|
changes/web/index.py
|
changes/web/index.py
|
import changes
import urlparse
from flask import render_template, current_app, redirect, url_for, session
from flask.views import MethodView
class IndexView(MethodView):
def get(self, path=''):
# require auth
if not session.get('email'):
return redirect(url_for('login'))
if current_app.config['SENTRY_DSN']:
parsed = urlparse.urlparse(current_app.config['SENTRY_DSN'])
dsn = '%s://%s@%s/%s' % (
parsed.scheme,
parsed.username,
parsed.hostname + (':%s' % (parsed.port,) if parsed.port else ''),
parsed.path,
)
else:
dsn = None
return render_template('index.html', **{
'SENTRY_PUBLIC_DSN': dsn,
'VERSION': changes.get_version(),
})
|
import changes
import urlparse
from flask import render_template, current_app, redirect, url_for, session
from flask.views import MethodView
class IndexView(MethodView):
def get(self, path=''):
# require auth
if not session.get('email'):
return redirect(url_for('login'))
if current_app.config['SENTRY_DSN'] and False:
parsed = urlparse.urlparse(current_app.config['SENTRY_DSN'])
dsn = '%s://%s@%s/%s' % (
parsed.scheme,
parsed.username,
parsed.hostname + (':%s' % (parsed.port,) if parsed.port else ''),
parsed.path,
)
else:
dsn = None
return render_template('index.html', **{
'SENTRY_PUBLIC_DSN': dsn,
'VERSION': changes.get_version(),
})
|
Disable Sentry due to sync behavior
|
Disable Sentry due to sync behavior
|
Python
|
apache-2.0
|
bowlofstew/changes,wfxiang08/changes,wfxiang08/changes,dropbox/changes,bowlofstew/changes,wfxiang08/changes,dropbox/changes,dropbox/changes,bowlofstew/changes,wfxiang08/changes,dropbox/changes,bowlofstew/changes
|
import changes
import urlparse
from flask import render_template, current_app, redirect, url_for, session
from flask.views import MethodView
class IndexView(MethodView):
def get(self, path=''):
# require auth
if not session.get('email'):
return redirect(url_for('login'))
if current_app.config['SENTRY_DSN']:
parsed = urlparse.urlparse(current_app.config['SENTRY_DSN'])
dsn = '%s://%s@%s/%s' % (
parsed.scheme,
parsed.username,
parsed.hostname + (':%s' % (parsed.port,) if parsed.port else ''),
parsed.path,
)
else:
dsn = None
return render_template('index.html', **{
'SENTRY_PUBLIC_DSN': dsn,
'VERSION': changes.get_version(),
})
Disable Sentry due to sync behavior
|
import changes
import urlparse
from flask import render_template, current_app, redirect, url_for, session
from flask.views import MethodView
class IndexView(MethodView):
def get(self, path=''):
# require auth
if not session.get('email'):
return redirect(url_for('login'))
if current_app.config['SENTRY_DSN'] and False:
parsed = urlparse.urlparse(current_app.config['SENTRY_DSN'])
dsn = '%s://%s@%s/%s' % (
parsed.scheme,
parsed.username,
parsed.hostname + (':%s' % (parsed.port,) if parsed.port else ''),
parsed.path,
)
else:
dsn = None
return render_template('index.html', **{
'SENTRY_PUBLIC_DSN': dsn,
'VERSION': changes.get_version(),
})
|
<commit_before>import changes
import urlparse
from flask import render_template, current_app, redirect, url_for, session
from flask.views import MethodView
class IndexView(MethodView):
def get(self, path=''):
# require auth
if not session.get('email'):
return redirect(url_for('login'))
if current_app.config['SENTRY_DSN']:
parsed = urlparse.urlparse(current_app.config['SENTRY_DSN'])
dsn = '%s://%s@%s/%s' % (
parsed.scheme,
parsed.username,
parsed.hostname + (':%s' % (parsed.port,) if parsed.port else ''),
parsed.path,
)
else:
dsn = None
return render_template('index.html', **{
'SENTRY_PUBLIC_DSN': dsn,
'VERSION': changes.get_version(),
})
<commit_msg>Disable Sentry due to sync behavior<commit_after>
|
import changes
import urlparse
from flask import render_template, current_app, redirect, url_for, session
from flask.views import MethodView
class IndexView(MethodView):
def get(self, path=''):
# require auth
if not session.get('email'):
return redirect(url_for('login'))
if current_app.config['SENTRY_DSN'] and False:
parsed = urlparse.urlparse(current_app.config['SENTRY_DSN'])
dsn = '%s://%s@%s/%s' % (
parsed.scheme,
parsed.username,
parsed.hostname + (':%s' % (parsed.port,) if parsed.port else ''),
parsed.path,
)
else:
dsn = None
return render_template('index.html', **{
'SENTRY_PUBLIC_DSN': dsn,
'VERSION': changes.get_version(),
})
|
import changes
import urlparse
from flask import render_template, current_app, redirect, url_for, session
from flask.views import MethodView
class IndexView(MethodView):
def get(self, path=''):
# require auth
if not session.get('email'):
return redirect(url_for('login'))
if current_app.config['SENTRY_DSN']:
parsed = urlparse.urlparse(current_app.config['SENTRY_DSN'])
dsn = '%s://%s@%s/%s' % (
parsed.scheme,
parsed.username,
parsed.hostname + (':%s' % (parsed.port,) if parsed.port else ''),
parsed.path,
)
else:
dsn = None
return render_template('index.html', **{
'SENTRY_PUBLIC_DSN': dsn,
'VERSION': changes.get_version(),
})
Disable Sentry due to sync behaviorimport changes
import urlparse
from flask import render_template, current_app, redirect, url_for, session
from flask.views import MethodView
class IndexView(MethodView):
def get(self, path=''):
# require auth
if not session.get('email'):
return redirect(url_for('login'))
if current_app.config['SENTRY_DSN'] and False:
parsed = urlparse.urlparse(current_app.config['SENTRY_DSN'])
dsn = '%s://%s@%s/%s' % (
parsed.scheme,
parsed.username,
parsed.hostname + (':%s' % (parsed.port,) if parsed.port else ''),
parsed.path,
)
else:
dsn = None
return render_template('index.html', **{
'SENTRY_PUBLIC_DSN': dsn,
'VERSION': changes.get_version(),
})
|
<commit_before>import changes
import urlparse
from flask import render_template, current_app, redirect, url_for, session
from flask.views import MethodView
class IndexView(MethodView):
def get(self, path=''):
# require auth
if not session.get('email'):
return redirect(url_for('login'))
if current_app.config['SENTRY_DSN']:
parsed = urlparse.urlparse(current_app.config['SENTRY_DSN'])
dsn = '%s://%s@%s/%s' % (
parsed.scheme,
parsed.username,
parsed.hostname + (':%s' % (parsed.port,) if parsed.port else ''),
parsed.path,
)
else:
dsn = None
return render_template('index.html', **{
'SENTRY_PUBLIC_DSN': dsn,
'VERSION': changes.get_version(),
})
<commit_msg>Disable Sentry due to sync behavior<commit_after>import changes
import urlparse
from flask import render_template, current_app, redirect, url_for, session
from flask.views import MethodView
class IndexView(MethodView):
def get(self, path=''):
# require auth
if not session.get('email'):
return redirect(url_for('login'))
if current_app.config['SENTRY_DSN'] and False:
parsed = urlparse.urlparse(current_app.config['SENTRY_DSN'])
dsn = '%s://%s@%s/%s' % (
parsed.scheme,
parsed.username,
parsed.hostname + (':%s' % (parsed.port,) if parsed.port else ''),
parsed.path,
)
else:
dsn = None
return render_template('index.html', **{
'SENTRY_PUBLIC_DSN': dsn,
'VERSION': changes.get_version(),
})
|
1a0c9fd8e8d6ce59c2d6ea42c59dfa6497400753
|
buildscripts/condarecipe/run_test.py
|
buildscripts/condarecipe/run_test.py
|
import sys
import platform
import llvm
from llvm.core import Module
from llvm.ee import EngineBuilder
from llvm.utils import check_intrinsics
m = Module.new('fjoidajfa')
eb = EngineBuilder.new(m)
target = eb.select_target()
print('target.triple=%r' % target.triple)
if sys.platform == 'darwin':
s = {'64bit': 'x86_64', '32bit': 'x86'}[platform.architecture()[0]]
assert target.triple.startswith(s + '-apple-darwin')
assert llvm.test(verbosity=2, run_isolated=False) == 0
#check_intrinsics.main()
print('llvm.__version__: %s' % llvm.__version__)
#assert llvm.__version__ == '0.12.0'
|
import sys
import platform
import llvm
from llvm.ee import TargetMachine
target = TargetMachine.new()
print('target.triple=%r' % target.triple)
if sys.platform == 'darwin':
s = {'64bit': 'x86_64', '32bit': 'x86'}[platform.architecture()[0]]
assert target.triple.startswith(s + '-apple-darwin')
assert llvm.test(verbosity=2, run_isolated=False) == 0
print('llvm.__version__: %s' % llvm.__version__)
|
Fix buildscript for Python2 on OSX
|
Fix buildscript for Python2 on OSX
|
Python
|
bsd-3-clause
|
llvmpy/llvmpy,llvmpy/llvmpy,llvmpy/llvmpy,llvmpy/llvmpy,llvmpy/llvmpy,llvmpy/llvmpy
|
import sys
import platform
import llvm
from llvm.core import Module
from llvm.ee import EngineBuilder
from llvm.utils import check_intrinsics
m = Module.new('fjoidajfa')
eb = EngineBuilder.new(m)
target = eb.select_target()
print('target.triple=%r' % target.triple)
if sys.platform == 'darwin':
s = {'64bit': 'x86_64', '32bit': 'x86'}[platform.architecture()[0]]
assert target.triple.startswith(s + '-apple-darwin')
assert llvm.test(verbosity=2, run_isolated=False) == 0
#check_intrinsics.main()
print('llvm.__version__: %s' % llvm.__version__)
#assert llvm.__version__ == '0.12.0'
Fix buildscript for Python2 on OSX
|
import sys
import platform
import llvm
from llvm.ee import TargetMachine
target = TargetMachine.new()
print('target.triple=%r' % target.triple)
if sys.platform == 'darwin':
s = {'64bit': 'x86_64', '32bit': 'x86'}[platform.architecture()[0]]
assert target.triple.startswith(s + '-apple-darwin')
assert llvm.test(verbosity=2, run_isolated=False) == 0
print('llvm.__version__: %s' % llvm.__version__)
|
<commit_before>import sys
import platform
import llvm
from llvm.core import Module
from llvm.ee import EngineBuilder
from llvm.utils import check_intrinsics
m = Module.new('fjoidajfa')
eb = EngineBuilder.new(m)
target = eb.select_target()
print('target.triple=%r' % target.triple)
if sys.platform == 'darwin':
s = {'64bit': 'x86_64', '32bit': 'x86'}[platform.architecture()[0]]
assert target.triple.startswith(s + '-apple-darwin')
assert llvm.test(verbosity=2, run_isolated=False) == 0
#check_intrinsics.main()
print('llvm.__version__: %s' % llvm.__version__)
#assert llvm.__version__ == '0.12.0'
<commit_msg>Fix buildscript for Python2 on OSX<commit_after>
|
import sys
import platform
import llvm
from llvm.ee import TargetMachine
target = TargetMachine.new()
print('target.triple=%r' % target.triple)
if sys.platform == 'darwin':
s = {'64bit': 'x86_64', '32bit': 'x86'}[platform.architecture()[0]]
assert target.triple.startswith(s + '-apple-darwin')
assert llvm.test(verbosity=2, run_isolated=False) == 0
print('llvm.__version__: %s' % llvm.__version__)
|
import sys
import platform
import llvm
from llvm.core import Module
from llvm.ee import EngineBuilder
from llvm.utils import check_intrinsics
m = Module.new('fjoidajfa')
eb = EngineBuilder.new(m)
target = eb.select_target()
print('target.triple=%r' % target.triple)
if sys.platform == 'darwin':
s = {'64bit': 'x86_64', '32bit': 'x86'}[platform.architecture()[0]]
assert target.triple.startswith(s + '-apple-darwin')
assert llvm.test(verbosity=2, run_isolated=False) == 0
#check_intrinsics.main()
print('llvm.__version__: %s' % llvm.__version__)
#assert llvm.__version__ == '0.12.0'
Fix buildscript for Python2 on OSXimport sys
import platform
import llvm
from llvm.ee import TargetMachine
target = TargetMachine.new()
print('target.triple=%r' % target.triple)
if sys.platform == 'darwin':
s = {'64bit': 'x86_64', '32bit': 'x86'}[platform.architecture()[0]]
assert target.triple.startswith(s + '-apple-darwin')
assert llvm.test(verbosity=2, run_isolated=False) == 0
print('llvm.__version__: %s' % llvm.__version__)
|
<commit_before>import sys
import platform
import llvm
from llvm.core import Module
from llvm.ee import EngineBuilder
from llvm.utils import check_intrinsics
m = Module.new('fjoidajfa')
eb = EngineBuilder.new(m)
target = eb.select_target()
print('target.triple=%r' % target.triple)
if sys.platform == 'darwin':
s = {'64bit': 'x86_64', '32bit': 'x86'}[platform.architecture()[0]]
assert target.triple.startswith(s + '-apple-darwin')
assert llvm.test(verbosity=2, run_isolated=False) == 0
#check_intrinsics.main()
print('llvm.__version__: %s' % llvm.__version__)
#assert llvm.__version__ == '0.12.0'
<commit_msg>Fix buildscript for Python2 on OSX<commit_after>import sys
import platform
import llvm
from llvm.ee import TargetMachine
target = TargetMachine.new()
print('target.triple=%r' % target.triple)
if sys.platform == 'darwin':
s = {'64bit': 'x86_64', '32bit': 'x86'}[platform.architecture()[0]]
assert target.triple.startswith(s + '-apple-darwin')
assert llvm.test(verbosity=2, run_isolated=False) == 0
print('llvm.__version__: %s' % llvm.__version__)
|
9021b035cc7bc63603fce3f626ca6c92c0ba3f9b
|
pygraphc/clustering/ConnectedComponents.py
|
pygraphc/clustering/ConnectedComponents.py
|
import networkx as nx
from ClusterUtility import ClusterUtility
class ConnectedComponents:
"""This is a class for connected component detection method to cluster event logs [1]_.
References
----------
.. [1] H. Studiawan, B. A. Pratomo, and R. Anggoro, Connected component detection for authentication log
clustering, The 2nd International Seminar on Science and Technology, 2016, pp. 495-496.
"""
def __init__(self, graph):
"""This is a constructor for ConnectedComponent class.
Parameters
----------
graph : graph
A graph to be clustered.
"""
self.graph = graph
def get_clusters(self):
"""This method find any connected component in a graph.
A component represents a cluster and each component will be given a cluster identifier.
This method heavily rely on the cosine similarity threshold to build an edge in a graph.
Returns
-------
clusters : dict[list]
Dictionary of cluster list, where each list contains index (line number) of event log.
"""
clusters = {}
cluster_id = 0
for components in nx.connected_components(self.graph):
clusters[cluster_id] = components
cluster_id += 1
ClusterUtility.set_cluster_id(self.graph, clusters)
return clusters
|
import networkx as nx
from ClusterUtility import ClusterUtility
class ConnectedComponents:
"""This is a class for connected component detection method to cluster event logs [Studiawan2016a]_.
References
----------
.. [Studiawan2016a] H. Studiawan, B. A. Pratomo, and R. Anggoro, Connected component detection for authentication log
clustering, The 2nd International Seminar on Science and Technology, 2016, pp. 495-496.
"""
def __init__(self, graph):
"""This is a constructor for ConnectedComponent class.
Parameters
----------
graph : graph
A graph to be clustered.
"""
self.graph = graph
def get_clusters(self):
"""This method find any connected component in a graph.
A component represents a cluster and each component will be given a cluster identifier.
This method heavily rely on the cosine similarity threshold to build an edge in a graph.
Returns
-------
clusters : dict[list]
Dictionary of cluster list, where each list contains index (line number) of event log.
"""
clusters = {}
cluster_id = 0
for components in nx.connected_components(self.graph):
clusters[cluster_id] = components
cluster_id += 1
ClusterUtility.set_cluster_id(self.graph, clusters)
return clusters
|
Fix bug in docstring references Studiawan2016a
|
Fix bug in docstring references Studiawan2016a
|
Python
|
mit
|
studiawan/pygraphc
|
import networkx as nx
from ClusterUtility import ClusterUtility
class ConnectedComponents:
"""This is a class for connected component detection method to cluster event logs [1]_.
References
----------
.. [1] H. Studiawan, B. A. Pratomo, and R. Anggoro, Connected component detection for authentication log
clustering, The 2nd International Seminar on Science and Technology, 2016, pp. 495-496.
"""
def __init__(self, graph):
"""This is a constructor for ConnectedComponent class.
Parameters
----------
graph : graph
A graph to be clustered.
"""
self.graph = graph
def get_clusters(self):
"""This method find any connected component in a graph.
A component represents a cluster and each component will be given a cluster identifier.
This method heavily rely on the cosine similarity threshold to build an edge in a graph.
Returns
-------
clusters : dict[list]
Dictionary of cluster list, where each list contains index (line number) of event log.
"""
clusters = {}
cluster_id = 0
for components in nx.connected_components(self.graph):
clusters[cluster_id] = components
cluster_id += 1
ClusterUtility.set_cluster_id(self.graph, clusters)
return clusters
Fix bug in docstring references Studiawan2016a
|
import networkx as nx
from ClusterUtility import ClusterUtility
class ConnectedComponents:
"""This is a class for connected component detection method to cluster event logs [Studiawan2016a]_.
References
----------
.. [Studiawan2016a] H. Studiawan, B. A. Pratomo, and R. Anggoro, Connected component detection for authentication log
clustering, The 2nd International Seminar on Science and Technology, 2016, pp. 495-496.
"""
def __init__(self, graph):
"""This is a constructor for ConnectedComponent class.
Parameters
----------
graph : graph
A graph to be clustered.
"""
self.graph = graph
def get_clusters(self):
"""This method find any connected component in a graph.
A component represents a cluster and each component will be given a cluster identifier.
This method heavily rely on the cosine similarity threshold to build an edge in a graph.
Returns
-------
clusters : dict[list]
Dictionary of cluster list, where each list contains index (line number) of event log.
"""
clusters = {}
cluster_id = 0
for components in nx.connected_components(self.graph):
clusters[cluster_id] = components
cluster_id += 1
ClusterUtility.set_cluster_id(self.graph, clusters)
return clusters
|
<commit_before>import networkx as nx
from ClusterUtility import ClusterUtility
class ConnectedComponents:
"""This is a class for connected component detection method to cluster event logs [1]_.
References
----------
.. [1] H. Studiawan, B. A. Pratomo, and R. Anggoro, Connected component detection for authentication log
clustering, The 2nd International Seminar on Science and Technology, 2016, pp. 495-496.
"""
def __init__(self, graph):
"""This is a constructor for ConnectedComponent class.
Parameters
----------
graph : graph
A graph to be clustered.
"""
self.graph = graph
def get_clusters(self):
"""This method find any connected component in a graph.
A component represents a cluster and each component will be given a cluster identifier.
This method heavily rely on the cosine similarity threshold to build an edge in a graph.
Returns
-------
clusters : dict[list]
Dictionary of cluster list, where each list contains index (line number) of event log.
"""
clusters = {}
cluster_id = 0
for components in nx.connected_components(self.graph):
clusters[cluster_id] = components
cluster_id += 1
ClusterUtility.set_cluster_id(self.graph, clusters)
return clusters
<commit_msg>Fix bug in docstring references Studiawan2016a<commit_after>
|
import networkx as nx
from ClusterUtility import ClusterUtility
class ConnectedComponents:
"""This is a class for connected component detection method to cluster event logs [Studiawan2016a]_.
References
----------
.. [Studiawan2016a] H. Studiawan, B. A. Pratomo, and R. Anggoro, Connected component detection for authentication log
clustering, The 2nd International Seminar on Science and Technology, 2016, pp. 495-496.
"""
def __init__(self, graph):
"""This is a constructor for ConnectedComponent class.
Parameters
----------
graph : graph
A graph to be clustered.
"""
self.graph = graph
def get_clusters(self):
"""This method find any connected component in a graph.
A component represents a cluster and each component will be given a cluster identifier.
This method heavily rely on the cosine similarity threshold to build an edge in a graph.
Returns
-------
clusters : dict[list]
Dictionary of cluster list, where each list contains index (line number) of event log.
"""
clusters = {}
cluster_id = 0
for components in nx.connected_components(self.graph):
clusters[cluster_id] = components
cluster_id += 1
ClusterUtility.set_cluster_id(self.graph, clusters)
return clusters
|
import networkx as nx
from ClusterUtility import ClusterUtility
class ConnectedComponents:
"""This is a class for connected component detection method to cluster event logs [1]_.
References
----------
.. [1] H. Studiawan, B. A. Pratomo, and R. Anggoro, Connected component detection for authentication log
clustering, The 2nd International Seminar on Science and Technology, 2016, pp. 495-496.
"""
def __init__(self, graph):
"""This is a constructor for ConnectedComponent class.
Parameters
----------
graph : graph
A graph to be clustered.
"""
self.graph = graph
def get_clusters(self):
"""This method find any connected component in a graph.
A component represents a cluster and each component will be given a cluster identifier.
This method heavily rely on the cosine similarity threshold to build an edge in a graph.
Returns
-------
clusters : dict[list]
Dictionary of cluster list, where each list contains index (line number) of event log.
"""
clusters = {}
cluster_id = 0
for components in nx.connected_components(self.graph):
clusters[cluster_id] = components
cluster_id += 1
ClusterUtility.set_cluster_id(self.graph, clusters)
return clusters
Fix bug in docstring references Studiawan2016aimport networkx as nx
from ClusterUtility import ClusterUtility
class ConnectedComponents:
"""This is a class for connected component detection method to cluster event logs [Studiawan2016a]_.
References
----------
.. [Studiawan2016a] H. Studiawan, B. A. Pratomo, and R. Anggoro, Connected component detection for authentication log
clustering, The 2nd International Seminar on Science and Technology, 2016, pp. 495-496.
"""
def __init__(self, graph):
"""This is a constructor for ConnectedComponent class.
Parameters
----------
graph : graph
A graph to be clustered.
"""
self.graph = graph
def get_clusters(self):
"""This method find any connected component in a graph.
A component represents a cluster and each component will be given a cluster identifier.
This method heavily rely on the cosine similarity threshold to build an edge in a graph.
Returns
-------
clusters : dict[list]
Dictionary of cluster list, where each list contains index (line number) of event log.
"""
clusters = {}
cluster_id = 0
for components in nx.connected_components(self.graph):
clusters[cluster_id] = components
cluster_id += 1
ClusterUtility.set_cluster_id(self.graph, clusters)
return clusters
|
<commit_before>import networkx as nx
from ClusterUtility import ClusterUtility
class ConnectedComponents:
"""This is a class for connected component detection method to cluster event logs [1]_.
References
----------
.. [1] H. Studiawan, B. A. Pratomo, and R. Anggoro, Connected component detection for authentication log
clustering, The 2nd International Seminar on Science and Technology, 2016, pp. 495-496.
"""
def __init__(self, graph):
"""This is a constructor for ConnectedComponent class.
Parameters
----------
graph : graph
A graph to be clustered.
"""
self.graph = graph
def get_clusters(self):
"""This method find any connected component in a graph.
A component represents a cluster and each component will be given a cluster identifier.
This method heavily rely on the cosine similarity threshold to build an edge in a graph.
Returns
-------
clusters : dict[list]
Dictionary of cluster list, where each list contains index (line number) of event log.
"""
clusters = {}
cluster_id = 0
for components in nx.connected_components(self.graph):
clusters[cluster_id] = components
cluster_id += 1
ClusterUtility.set_cluster_id(self.graph, clusters)
return clusters
<commit_msg>Fix bug in docstring references Studiawan2016a<commit_after>import networkx as nx
from ClusterUtility import ClusterUtility
class ConnectedComponents:
"""This is a class for connected component detection method to cluster event logs [Studiawan2016a]_.
References
----------
.. [Studiawan2016a] H. Studiawan, B. A. Pratomo, and R. Anggoro, Connected component detection for authentication log
clustering, The 2nd International Seminar on Science and Technology, 2016, pp. 495-496.
"""
def __init__(self, graph):
"""This is a constructor for ConnectedComponent class.
Parameters
----------
graph : graph
A graph to be clustered.
"""
self.graph = graph
def get_clusters(self):
"""This method find any connected component in a graph.
A component represents a cluster and each component will be given a cluster identifier.
This method heavily rely on the cosine similarity threshold to build an edge in a graph.
Returns
-------
clusters : dict[list]
Dictionary of cluster list, where each list contains index (line number) of event log.
"""
clusters = {}
cluster_id = 0
for components in nx.connected_components(self.graph):
clusters[cluster_id] = components
cluster_id += 1
ClusterUtility.set_cluster_id(self.graph, clusters)
return clusters
|
c858c4dcf255949cb6ccbf01ac4e5cbf5b2c4bfb
|
pygraphc/clustering/ConnectedComponents.py
|
pygraphc/clustering/ConnectedComponents.py
|
import networkx as nx
class ConnectedComponents:
"""This is a class for connected component detection method to cluster event logs [1]_.
.. [1] H. Studiawan, B. A. Pratomo, and R. Anggoro, Connected component detection for authentication log
clustering, in Proceedings of the International Seminar on Science and Technology, 2016, pp. 495-496.
"""
def __init__(self, g):
"""This is a constructor for ConnectedComponent class
Parameters
----------
g : graph
a graph to be clustered
"""
self.g = g
def get_clusters(self):
"""This method find any connected component in a graph.
A component represents a cluster and each component will be given a cluster identifier.
Returns
-------
clusters : list[list]
List of cluster list, where each list contains index (line number) of event log.
"""
clusters = []
for components in nx.connected_components(self.g):
clusters.append(components)
cluster_id = 0
for cluster in clusters:
for node in cluster:
self.g.node[node]['cluster'] = cluster_id
cluster_id += 1
return clusters
|
import networkx as nx
class ConnectedComponents:
"""This is a class for connected component detection method to cluster event logs [1]_.
.. [1] H. Studiawan, B. A. Pratomo, and R. Anggoro, Connected component detection for authentication log
clustering, in Proceedings of the International Seminar on Science and Technology, 2016, pp. 495-496.
"""
def __init__(self, g):
"""This is a constructor for ConnectedComponent class
Parameters
----------
g : graph
a graph to be clustered
"""
self.g = g
def get_clusters(self):
"""This method find any connected component in a graph.
A component represents a cluster and each component will be given a cluster identifier.
Returns
-------
clusters : list[list]
List of cluster list, where each list contains index (line number) of event log.
"""
clusters = []
for components in nx.connected_components(self.g):
clusters.append(components)
cluster_id = 0
for cluster in clusters:
for node in cluster:
self.g.node[node]['cluster'] = cluster_id
cluster_id += 1
return clusters
|
Add new line in the end of the file
|
Add new line in the end of the file
|
Python
|
mit
|
studiawan/pygraphc
|
import networkx as nx
class ConnectedComponents:
"""This is a class for connected component detection method to cluster event logs [1]_.
.. [1] H. Studiawan, B. A. Pratomo, and R. Anggoro, Connected component detection for authentication log
clustering, in Proceedings of the International Seminar on Science and Technology, 2016, pp. 495-496.
"""
def __init__(self, g):
"""This is a constructor for ConnectedComponent class
Parameters
----------
g : graph
a graph to be clustered
"""
self.g = g
def get_clusters(self):
"""This method find any connected component in a graph.
A component represents a cluster and each component will be given a cluster identifier.
Returns
-------
clusters : list[list]
List of cluster list, where each list contains index (line number) of event log.
"""
clusters = []
for components in nx.connected_components(self.g):
clusters.append(components)
cluster_id = 0
for cluster in clusters:
for node in cluster:
self.g.node[node]['cluster'] = cluster_id
cluster_id += 1
return clustersAdd new line in the end of the file
|
import networkx as nx
class ConnectedComponents:
"""This is a class for connected component detection method to cluster event logs [1]_.
.. [1] H. Studiawan, B. A. Pratomo, and R. Anggoro, Connected component detection for authentication log
clustering, in Proceedings of the International Seminar on Science and Technology, 2016, pp. 495-496.
"""
def __init__(self, g):
"""This is a constructor for ConnectedComponent class
Parameters
----------
g : graph
a graph to be clustered
"""
self.g = g
def get_clusters(self):
"""This method find any connected component in a graph.
A component represents a cluster and each component will be given a cluster identifier.
Returns
-------
clusters : list[list]
List of cluster list, where each list contains index (line number) of event log.
"""
clusters = []
for components in nx.connected_components(self.g):
clusters.append(components)
cluster_id = 0
for cluster in clusters:
for node in cluster:
self.g.node[node]['cluster'] = cluster_id
cluster_id += 1
return clusters
|
<commit_before>import networkx as nx
class ConnectedComponents:
"""This is a class for connected component detection method to cluster event logs [1]_.
.. [1] H. Studiawan, B. A. Pratomo, and R. Anggoro, Connected component detection for authentication log
clustering, in Proceedings of the International Seminar on Science and Technology, 2016, pp. 495-496.
"""
def __init__(self, g):
"""This is a constructor for ConnectedComponent class
Parameters
----------
g : graph
a graph to be clustered
"""
self.g = g
def get_clusters(self):
"""This method find any connected component in a graph.
A component represents a cluster and each component will be given a cluster identifier.
Returns
-------
clusters : list[list]
List of cluster list, where each list contains index (line number) of event log.
"""
clusters = []
for components in nx.connected_components(self.g):
clusters.append(components)
cluster_id = 0
for cluster in clusters:
for node in cluster:
self.g.node[node]['cluster'] = cluster_id
cluster_id += 1
return clusters<commit_msg>Add new line in the end of the file<commit_after>
|
import networkx as nx
class ConnectedComponents:
"""This is a class for connected component detection method to cluster event logs [1]_.
.. [1] H. Studiawan, B. A. Pratomo, and R. Anggoro, Connected component detection for authentication log
clustering, in Proceedings of the International Seminar on Science and Technology, 2016, pp. 495-496.
"""
def __init__(self, g):
"""This is a constructor for ConnectedComponent class
Parameters
----------
g : graph
a graph to be clustered
"""
self.g = g
def get_clusters(self):
"""This method find any connected component in a graph.
A component represents a cluster and each component will be given a cluster identifier.
Returns
-------
clusters : list[list]
List of cluster list, where each list contains index (line number) of event log.
"""
clusters = []
for components in nx.connected_components(self.g):
clusters.append(components)
cluster_id = 0
for cluster in clusters:
for node in cluster:
self.g.node[node]['cluster'] = cluster_id
cluster_id += 1
return clusters
|
import networkx as nx
class ConnectedComponents:
"""This is a class for connected component detection method to cluster event logs [1]_.
.. [1] H. Studiawan, B. A. Pratomo, and R. Anggoro, Connected component detection for authentication log
clustering, in Proceedings of the International Seminar on Science and Technology, 2016, pp. 495-496.
"""
def __init__(self, g):
"""This is a constructor for ConnectedComponent class
Parameters
----------
g : graph
a graph to be clustered
"""
self.g = g
def get_clusters(self):
"""This method find any connected component in a graph.
A component represents a cluster and each component will be given a cluster identifier.
Returns
-------
clusters : list[list]
List of cluster list, where each list contains index (line number) of event log.
"""
clusters = []
for components in nx.connected_components(self.g):
clusters.append(components)
cluster_id = 0
for cluster in clusters:
for node in cluster:
self.g.node[node]['cluster'] = cluster_id
cluster_id += 1
return clustersAdd new line in the end of the fileimport networkx as nx
class ConnectedComponents:
"""This is a class for connected component detection method to cluster event logs [1]_.
.. [1] H. Studiawan, B. A. Pratomo, and R. Anggoro, Connected component detection for authentication log
clustering, in Proceedings of the International Seminar on Science and Technology, 2016, pp. 495-496.
"""
def __init__(self, g):
"""This is a constructor for ConnectedComponent class
Parameters
----------
g : graph
a graph to be clustered
"""
self.g = g
def get_clusters(self):
"""This method find any connected component in a graph.
A component represents a cluster and each component will be given a cluster identifier.
Returns
-------
clusters : list[list]
List of cluster list, where each list contains index (line number) of event log.
"""
clusters = []
for components in nx.connected_components(self.g):
clusters.append(components)
cluster_id = 0
for cluster in clusters:
for node in cluster:
self.g.node[node]['cluster'] = cluster_id
cluster_id += 1
return clusters
|
<commit_before>import networkx as nx
class ConnectedComponents:
"""This is a class for connected component detection method to cluster event logs [1]_.
.. [1] H. Studiawan, B. A. Pratomo, and R. Anggoro, Connected component detection for authentication log
clustering, in Proceedings of the International Seminar on Science and Technology, 2016, pp. 495-496.
"""
def __init__(self, g):
"""This is a constructor for ConnectedComponent class
Parameters
----------
g : graph
a graph to be clustered
"""
self.g = g
def get_clusters(self):
"""This method find any connected component in a graph.
A component represents a cluster and each component will be given a cluster identifier.
Returns
-------
clusters : list[list]
List of cluster list, where each list contains index (line number) of event log.
"""
clusters = []
for components in nx.connected_components(self.g):
clusters.append(components)
cluster_id = 0
for cluster in clusters:
for node in cluster:
self.g.node[node]['cluster'] = cluster_id
cluster_id += 1
return clusters<commit_msg>Add new line in the end of the file<commit_after>import networkx as nx
class ConnectedComponents:
"""This is a class for connected component detection method to cluster event logs [1]_.
.. [1] H. Studiawan, B. A. Pratomo, and R. Anggoro, Connected component detection for authentication log
clustering, in Proceedings of the International Seminar on Science and Technology, 2016, pp. 495-496.
"""
def __init__(self, g):
"""This is a constructor for ConnectedComponent class
Parameters
----------
g : graph
a graph to be clustered
"""
self.g = g
def get_clusters(self):
"""This method find any connected component in a graph.
A component represents a cluster and each component will be given a cluster identifier.
Returns
-------
clusters : list[list]
List of cluster list, where each list contains index (line number) of event log.
"""
clusters = []
for components in nx.connected_components(self.g):
clusters.append(components)
cluster_id = 0
for cluster in clusters:
for node in cluster:
self.g.node[node]['cluster'] = cluster_id
cluster_id += 1
return clusters
|
895af0411bc8f45f48265872ccbba9c2a040f7d1
|
ds_graph.py
|
ds_graph.py
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
class Vertex(object):
"""Vertex class.
It uses a dict to keep track of the vertices which it's connected.
"""
class Graph(object):
"""Graph class.
It contains a dict to map vertex name to vertex objects.
"""
pass
def main():
pass
if __name__ == '__main__':
main()
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
class Vertex(object):
"""Vertex class.
It uses a dict to keep track of the vertices which it's connected.
"""
def __init__(self, key):
self.id = key
self.connected_to_dict = {}
def add_neighnor(self, new_nb, weight=0):
pass
def __str__(self):
pass
def get_connections(self):
pass
def get_id(self):
pass
def get_weight(self, new_nb):
pass
class Graph(object):
"""Graph class.
It contains a dict to map vertex name to vertex objects.
"""
pass
def main():
pass
if __name__ == '__main__':
main()
|
Implement Vertex class’s __init__ and helper functions
|
Implement Vertex class’s __init__ and helper functions
|
Python
|
bsd-2-clause
|
bowen0701/algorithms_data_structures
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
class Vertex(object):
"""Vertex class.
It uses a dict to keep track of the vertices which it's connected.
"""
class Graph(object):
"""Graph class.
It contains a dict to map vertex name to vertex objects.
"""
pass
def main():
pass
if __name__ == '__main__':
main()
Implement Vertex class’s __init__ and helper functions
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
class Vertex(object):
"""Vertex class.
It uses a dict to keep track of the vertices which it's connected.
"""
def __init__(self, key):
self.id = key
self.connected_to_dict = {}
def add_neighnor(self, new_nb, weight=0):
pass
def __str__(self):
pass
def get_connections(self):
pass
def get_id(self):
pass
def get_weight(self, new_nb):
pass
class Graph(object):
"""Graph class.
It contains a dict to map vertex name to vertex objects.
"""
pass
def main():
pass
if __name__ == '__main__':
main()
|
<commit_before>from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
class Vertex(object):
"""Vertex class.
It uses a dict to keep track of the vertices which it's connected.
"""
class Graph(object):
"""Graph class.
It contains a dict to map vertex name to vertex objects.
"""
pass
def main():
pass
if __name__ == '__main__':
main()
<commit_msg>Implement Vertex class’s __init__ and helper functions<commit_after>
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
class Vertex(object):
"""Vertex class.
It uses a dict to keep track of the vertices which it's connected.
"""
def __init__(self, key):
self.id = key
self.connected_to_dict = {}
def add_neighnor(self, new_nb, weight=0):
pass
def __str__(self):
pass
def get_connections(self):
pass
def get_id(self):
pass
def get_weight(self, new_nb):
pass
class Graph(object):
"""Graph class.
It contains a dict to map vertex name to vertex objects.
"""
pass
def main():
pass
if __name__ == '__main__':
main()
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
class Vertex(object):
"""Vertex class.
It uses a dict to keep track of the vertices which it's connected.
"""
class Graph(object):
"""Graph class.
It contains a dict to map vertex name to vertex objects.
"""
pass
def main():
pass
if __name__ == '__main__':
main()
Implement Vertex class’s __init__ and helper functionsfrom __future__ import absolute_import
from __future__ import division
from __future__ import print_function
class Vertex(object):
"""Vertex class.
It uses a dict to keep track of the vertices which it's connected.
"""
def __init__(self, key):
self.id = key
self.connected_to_dict = {}
def add_neighnor(self, new_nb, weight=0):
pass
def __str__(self):
pass
def get_connections(self):
pass
def get_id(self):
pass
def get_weight(self, new_nb):
pass
class Graph(object):
"""Graph class.
It contains a dict to map vertex name to vertex objects.
"""
pass
def main():
pass
if __name__ == '__main__':
main()
|
<commit_before>from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
class Vertex(object):
"""Vertex class.
It uses a dict to keep track of the vertices which it's connected.
"""
class Graph(object):
"""Graph class.
It contains a dict to map vertex name to vertex objects.
"""
pass
def main():
pass
if __name__ == '__main__':
main()
<commit_msg>Implement Vertex class’s __init__ and helper functions<commit_after>from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
class Vertex(object):
"""Vertex class.
It uses a dict to keep track of the vertices which it's connected.
"""
def __init__(self, key):
self.id = key
self.connected_to_dict = {}
def add_neighnor(self, new_nb, weight=0):
pass
def __str__(self):
pass
def get_connections(self):
pass
def get_id(self):
pass
def get_weight(self, new_nb):
pass
class Graph(object):
"""Graph class.
It contains a dict to map vertex name to vertex objects.
"""
pass
def main():
pass
if __name__ == '__main__':
main()
|
9769f66101a2927ec4fc2f978a8c6401219624ad
|
account_move_fiscal_year/models/account_move_line.py
|
account_move_fiscal_year/models/account_move_line.py
|
# Copyright 2017 ACSONE SA/NV
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
from odoo import fields, models
class AccountMoveLine(models.Model):
_inherit = 'account.move.line'
date_range_fy_id = fields.Many2one(
related='move_id.date_range_fy_id',
store=True, copy=False)
|
# Copyright 2017 ACSONE SA/NV
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
from odoo import fields, models
class AccountMoveLine(models.Model):
_inherit = 'account.move.line'
date_range_fy_id = fields.Many2one(
related='move_id.date_range_fy_id',
)
|
Remove unneeded and inefficient "store=True"
|
[FIX] Remove unneeded and inefficient "store=True"
|
Python
|
agpl-3.0
|
OCA/account-financial-tools,OCA/account-financial-tools
|
# Copyright 2017 ACSONE SA/NV
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
from odoo import fields, models
class AccountMoveLine(models.Model):
_inherit = 'account.move.line'
date_range_fy_id = fields.Many2one(
related='move_id.date_range_fy_id',
store=True, copy=False)
[FIX] Remove unneeded and inefficient "store=True"
|
# Copyright 2017 ACSONE SA/NV
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
from odoo import fields, models
class AccountMoveLine(models.Model):
_inherit = 'account.move.line'
date_range_fy_id = fields.Many2one(
related='move_id.date_range_fy_id',
)
|
<commit_before># Copyright 2017 ACSONE SA/NV
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
from odoo import fields, models
class AccountMoveLine(models.Model):
_inherit = 'account.move.line'
date_range_fy_id = fields.Many2one(
related='move_id.date_range_fy_id',
store=True, copy=False)
<commit_msg>[FIX] Remove unneeded and inefficient "store=True"<commit_after>
|
# Copyright 2017 ACSONE SA/NV
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
from odoo import fields, models
class AccountMoveLine(models.Model):
_inherit = 'account.move.line'
date_range_fy_id = fields.Many2one(
related='move_id.date_range_fy_id',
)
|
# Copyright 2017 ACSONE SA/NV
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
from odoo import fields, models
class AccountMoveLine(models.Model):
_inherit = 'account.move.line'
date_range_fy_id = fields.Many2one(
related='move_id.date_range_fy_id',
store=True, copy=False)
[FIX] Remove unneeded and inefficient "store=True"# Copyright 2017 ACSONE SA/NV
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
from odoo import fields, models
class AccountMoveLine(models.Model):
_inherit = 'account.move.line'
date_range_fy_id = fields.Many2one(
related='move_id.date_range_fy_id',
)
|
<commit_before># Copyright 2017 ACSONE SA/NV
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
from odoo import fields, models
class AccountMoveLine(models.Model):
_inherit = 'account.move.line'
date_range_fy_id = fields.Many2one(
related='move_id.date_range_fy_id',
store=True, copy=False)
<commit_msg>[FIX] Remove unneeded and inefficient "store=True"<commit_after># Copyright 2017 ACSONE SA/NV
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
from odoo import fields, models
class AccountMoveLine(models.Model):
_inherit = 'account.move.line'
date_range_fy_id = fields.Many2one(
related='move_id.date_range_fy_id',
)
|
7dd723874ac5bae83039b313abd00393636f1d80
|
modernrpc/tests/test_entry_points.py
|
modernrpc/tests/test_entry_points.py
|
# coding: utf-8
import requests
def test_forbidden_get(live_server):
r = requests.get(live_server.url + '/all-rpc/')
assert r.status_code == 405
r2 = requests.post(live_server.url + '/all-rpc/')
assert r2.status_code == 200
def test_allowed_get(live_server):
r = requests.get(live_server.url + '/all-rpc-doc/')
assert r.status_code == 200
r2 = requests.post(live_server.url + '/all-rpc-doc/')
assert r2.status_code == 405
|
# coding: utf-8
import requests
from django.core.exceptions import ImproperlyConfigured
from pytest import raises
from modernrpc.views import RPCEntryPoint
def test_forbidden_get(live_server):
r = requests.get(live_server.url + '/all-rpc/')
assert r.status_code == 405
r2 = requests.post(live_server.url + '/all-rpc/')
assert r2.status_code == 200
def test_allowed_get(live_server):
r = requests.get(live_server.url + '/all-rpc-doc/')
assert r.status_code == 200
r2 = requests.post(live_server.url + '/all-rpc-doc/')
assert r2.status_code == 405
def test_invalid_entry_point(settings, rf):
settings.MODERNRPC_HANDLERS = []
entry_point = RPCEntryPoint.as_view()
with raises(ImproperlyConfigured) as e:
entry_point(rf.post('xxx'))
assert 'handler' in str(e.value)
|
Test for bad setting value
|
Test for bad setting value
|
Python
|
mit
|
alorence/django-modern-rpc,alorence/django-modern-rpc
|
# coding: utf-8
import requests
def test_forbidden_get(live_server):
r = requests.get(live_server.url + '/all-rpc/')
assert r.status_code == 405
r2 = requests.post(live_server.url + '/all-rpc/')
assert r2.status_code == 200
def test_allowed_get(live_server):
r = requests.get(live_server.url + '/all-rpc-doc/')
assert r.status_code == 200
r2 = requests.post(live_server.url + '/all-rpc-doc/')
assert r2.status_code == 405
Test for bad setting value
|
# coding: utf-8
import requests
from django.core.exceptions import ImproperlyConfigured
from pytest import raises
from modernrpc.views import RPCEntryPoint
def test_forbidden_get(live_server):
r = requests.get(live_server.url + '/all-rpc/')
assert r.status_code == 405
r2 = requests.post(live_server.url + '/all-rpc/')
assert r2.status_code == 200
def test_allowed_get(live_server):
r = requests.get(live_server.url + '/all-rpc-doc/')
assert r.status_code == 200
r2 = requests.post(live_server.url + '/all-rpc-doc/')
assert r2.status_code == 405
def test_invalid_entry_point(settings, rf):
settings.MODERNRPC_HANDLERS = []
entry_point = RPCEntryPoint.as_view()
with raises(ImproperlyConfigured) as e:
entry_point(rf.post('xxx'))
assert 'handler' in str(e.value)
|
<commit_before># coding: utf-8
import requests
def test_forbidden_get(live_server):
r = requests.get(live_server.url + '/all-rpc/')
assert r.status_code == 405
r2 = requests.post(live_server.url + '/all-rpc/')
assert r2.status_code == 200
def test_allowed_get(live_server):
r = requests.get(live_server.url + '/all-rpc-doc/')
assert r.status_code == 200
r2 = requests.post(live_server.url + '/all-rpc-doc/')
assert r2.status_code == 405
<commit_msg>Test for bad setting value<commit_after>
|
# coding: utf-8
import requests
from django.core.exceptions import ImproperlyConfigured
from pytest import raises
from modernrpc.views import RPCEntryPoint
def test_forbidden_get(live_server):
r = requests.get(live_server.url + '/all-rpc/')
assert r.status_code == 405
r2 = requests.post(live_server.url + '/all-rpc/')
assert r2.status_code == 200
def test_allowed_get(live_server):
r = requests.get(live_server.url + '/all-rpc-doc/')
assert r.status_code == 200
r2 = requests.post(live_server.url + '/all-rpc-doc/')
assert r2.status_code == 405
def test_invalid_entry_point(settings, rf):
settings.MODERNRPC_HANDLERS = []
entry_point = RPCEntryPoint.as_view()
with raises(ImproperlyConfigured) as e:
entry_point(rf.post('xxx'))
assert 'handler' in str(e.value)
|
# coding: utf-8
import requests
def test_forbidden_get(live_server):
r = requests.get(live_server.url + '/all-rpc/')
assert r.status_code == 405
r2 = requests.post(live_server.url + '/all-rpc/')
assert r2.status_code == 200
def test_allowed_get(live_server):
r = requests.get(live_server.url + '/all-rpc-doc/')
assert r.status_code == 200
r2 = requests.post(live_server.url + '/all-rpc-doc/')
assert r2.status_code == 405
Test for bad setting value# coding: utf-8
import requests
from django.core.exceptions import ImproperlyConfigured
from pytest import raises
from modernrpc.views import RPCEntryPoint
def test_forbidden_get(live_server):
r = requests.get(live_server.url + '/all-rpc/')
assert r.status_code == 405
r2 = requests.post(live_server.url + '/all-rpc/')
assert r2.status_code == 200
def test_allowed_get(live_server):
r = requests.get(live_server.url + '/all-rpc-doc/')
assert r.status_code == 200
r2 = requests.post(live_server.url + '/all-rpc-doc/')
assert r2.status_code == 405
def test_invalid_entry_point(settings, rf):
settings.MODERNRPC_HANDLERS = []
entry_point = RPCEntryPoint.as_view()
with raises(ImproperlyConfigured) as e:
entry_point(rf.post('xxx'))
assert 'handler' in str(e.value)
|
<commit_before># coding: utf-8
import requests
def test_forbidden_get(live_server):
r = requests.get(live_server.url + '/all-rpc/')
assert r.status_code == 405
r2 = requests.post(live_server.url + '/all-rpc/')
assert r2.status_code == 200
def test_allowed_get(live_server):
r = requests.get(live_server.url + '/all-rpc-doc/')
assert r.status_code == 200
r2 = requests.post(live_server.url + '/all-rpc-doc/')
assert r2.status_code == 405
<commit_msg>Test for bad setting value<commit_after># coding: utf-8
import requests
from django.core.exceptions import ImproperlyConfigured
from pytest import raises
from modernrpc.views import RPCEntryPoint
def test_forbidden_get(live_server):
r = requests.get(live_server.url + '/all-rpc/')
assert r.status_code == 405
r2 = requests.post(live_server.url + '/all-rpc/')
assert r2.status_code == 200
def test_allowed_get(live_server):
r = requests.get(live_server.url + '/all-rpc-doc/')
assert r.status_code == 200
r2 = requests.post(live_server.url + '/all-rpc-doc/')
assert r2.status_code == 405
def test_invalid_entry_point(settings, rf):
settings.MODERNRPC_HANDLERS = []
entry_point = RPCEntryPoint.as_view()
with raises(ImproperlyConfigured) as e:
entry_point(rf.post('xxx'))
assert 'handler' in str(e.value)
|
9cc7b71d69a10f9e166d7264366669ac8ec4e212
|
cochlear/__init__.py
|
cochlear/__init__.py
|
import logging.config
def configure_logging(filename):
time_format = '[%(asctime)s] :: %(name)s - %(levelname)s - %(message)s'
simple_format = '%(name)s - %(message)s'
logging_config = {
'version': 1,
'formatters': {
'time': {'format': time_format},
'simple': {'format': simple_format},
},
'handlers': {
# This is what gets printed out to the console
'console': {
'class': 'logging.StreamHandler',
'formatter': 'simple',
'level': 'DEBUG',
},
# This is what gets saved to the file
'file': {
'class': 'logging.FileHandler',
'formatter': 'time',
'filename': filename,
'level': 'DEBUG',
}
},
'loggers': {
'__main__': {'level': 'DEBUG'},
'cochlear': {'level': 'DEBUG'},
'cochlear.nidaqmx': {'level': 'ERROR'},
},
'root': {
'handlers': ['console'],
},
}
logging.config.dictConfig(logging_config)
|
import logging.config
def configure_logging(filename=None):
time_format = '[%(asctime)s] :: %(name)s - %(levelname)s - %(message)s'
simple_format = '%(name)s - %(message)s'
logging_config = {
'version': 1,
'formatters': {
'time': {'format': time_format},
'simple': {'format': simple_format},
},
'handlers': {
# This is what gets printed out to the console
'console': {
'class': 'logging.StreamHandler',
'formatter': 'simple',
'level': 'DEBUG',
},
},
'loggers': {
'__main__': {'level': 'DEBUG'},
'cochlear': {'level': 'DEBUG'},
'cochlear.nidaqmx': {'level': 'ERROR'},
},
'root': {
'handlers': ['console'],
},
}
if filename is not None:
logging_config['handlers']['file'] = {
'class': 'logging.FileHandler',
'formatter': 'time',
'filename': filename,
'level': 'DEBUG',
}
logging_config['root']['handlers'].append('file')
logging.config.dictConfig(logging_config)
|
Allow logging to be config w/o filename
|
ENH: Allow logging to be config w/o filename
|
Python
|
bsd-3-clause
|
bburan/cochlear
|
import logging.config
def configure_logging(filename):
time_format = '[%(asctime)s] :: %(name)s - %(levelname)s - %(message)s'
simple_format = '%(name)s - %(message)s'
logging_config = {
'version': 1,
'formatters': {
'time': {'format': time_format},
'simple': {'format': simple_format},
},
'handlers': {
# This is what gets printed out to the console
'console': {
'class': 'logging.StreamHandler',
'formatter': 'simple',
'level': 'DEBUG',
},
# This is what gets saved to the file
'file': {
'class': 'logging.FileHandler',
'formatter': 'time',
'filename': filename,
'level': 'DEBUG',
}
},
'loggers': {
'__main__': {'level': 'DEBUG'},
'cochlear': {'level': 'DEBUG'},
'cochlear.nidaqmx': {'level': 'ERROR'},
},
'root': {
'handlers': ['console'],
},
}
logging.config.dictConfig(logging_config)
ENH: Allow logging to be config w/o filename
|
import logging.config
def configure_logging(filename=None):
time_format = '[%(asctime)s] :: %(name)s - %(levelname)s - %(message)s'
simple_format = '%(name)s - %(message)s'
logging_config = {
'version': 1,
'formatters': {
'time': {'format': time_format},
'simple': {'format': simple_format},
},
'handlers': {
# This is what gets printed out to the console
'console': {
'class': 'logging.StreamHandler',
'formatter': 'simple',
'level': 'DEBUG',
},
},
'loggers': {
'__main__': {'level': 'DEBUG'},
'cochlear': {'level': 'DEBUG'},
'cochlear.nidaqmx': {'level': 'ERROR'},
},
'root': {
'handlers': ['console'],
},
}
if filename is not None:
logging_config['handlers']['file'] = {
'class': 'logging.FileHandler',
'formatter': 'time',
'filename': filename,
'level': 'DEBUG',
}
logging_config['root']['handlers'].append('file')
logging.config.dictConfig(logging_config)
|
<commit_before>import logging.config
def configure_logging(filename):
time_format = '[%(asctime)s] :: %(name)s - %(levelname)s - %(message)s'
simple_format = '%(name)s - %(message)s'
logging_config = {
'version': 1,
'formatters': {
'time': {'format': time_format},
'simple': {'format': simple_format},
},
'handlers': {
# This is what gets printed out to the console
'console': {
'class': 'logging.StreamHandler',
'formatter': 'simple',
'level': 'DEBUG',
},
# This is what gets saved to the file
'file': {
'class': 'logging.FileHandler',
'formatter': 'time',
'filename': filename,
'level': 'DEBUG',
}
},
'loggers': {
'__main__': {'level': 'DEBUG'},
'cochlear': {'level': 'DEBUG'},
'cochlear.nidaqmx': {'level': 'ERROR'},
},
'root': {
'handlers': ['console'],
},
}
logging.config.dictConfig(logging_config)
<commit_msg>ENH: Allow logging to be config w/o filename<commit_after>
|
import logging.config
def configure_logging(filename=None):
time_format = '[%(asctime)s] :: %(name)s - %(levelname)s - %(message)s'
simple_format = '%(name)s - %(message)s'
logging_config = {
'version': 1,
'formatters': {
'time': {'format': time_format},
'simple': {'format': simple_format},
},
'handlers': {
# This is what gets printed out to the console
'console': {
'class': 'logging.StreamHandler',
'formatter': 'simple',
'level': 'DEBUG',
},
},
'loggers': {
'__main__': {'level': 'DEBUG'},
'cochlear': {'level': 'DEBUG'},
'cochlear.nidaqmx': {'level': 'ERROR'},
},
'root': {
'handlers': ['console'],
},
}
if filename is not None:
logging_config['handlers']['file'] = {
'class': 'logging.FileHandler',
'formatter': 'time',
'filename': filename,
'level': 'DEBUG',
}
logging_config['root']['handlers'].append('file')
logging.config.dictConfig(logging_config)
|
import logging.config
def configure_logging(filename):
time_format = '[%(asctime)s] :: %(name)s - %(levelname)s - %(message)s'
simple_format = '%(name)s - %(message)s'
logging_config = {
'version': 1,
'formatters': {
'time': {'format': time_format},
'simple': {'format': simple_format},
},
'handlers': {
# This is what gets printed out to the console
'console': {
'class': 'logging.StreamHandler',
'formatter': 'simple',
'level': 'DEBUG',
},
# This is what gets saved to the file
'file': {
'class': 'logging.FileHandler',
'formatter': 'time',
'filename': filename,
'level': 'DEBUG',
}
},
'loggers': {
'__main__': {'level': 'DEBUG'},
'cochlear': {'level': 'DEBUG'},
'cochlear.nidaqmx': {'level': 'ERROR'},
},
'root': {
'handlers': ['console'],
},
}
logging.config.dictConfig(logging_config)
ENH: Allow logging to be config w/o filenameimport logging.config
def configure_logging(filename=None):
time_format = '[%(asctime)s] :: %(name)s - %(levelname)s - %(message)s'
simple_format = '%(name)s - %(message)s'
logging_config = {
'version': 1,
'formatters': {
'time': {'format': time_format},
'simple': {'format': simple_format},
},
'handlers': {
# This is what gets printed out to the console
'console': {
'class': 'logging.StreamHandler',
'formatter': 'simple',
'level': 'DEBUG',
},
},
'loggers': {
'__main__': {'level': 'DEBUG'},
'cochlear': {'level': 'DEBUG'},
'cochlear.nidaqmx': {'level': 'ERROR'},
},
'root': {
'handlers': ['console'],
},
}
if filename is not None:
logging_config['handlers']['file'] = {
'class': 'logging.FileHandler',
'formatter': 'time',
'filename': filename,
'level': 'DEBUG',
}
logging_config['root']['handlers'].append('file')
logging.config.dictConfig(logging_config)
|
<commit_before>import logging.config
def configure_logging(filename):
time_format = '[%(asctime)s] :: %(name)s - %(levelname)s - %(message)s'
simple_format = '%(name)s - %(message)s'
logging_config = {
'version': 1,
'formatters': {
'time': {'format': time_format},
'simple': {'format': simple_format},
},
'handlers': {
# This is what gets printed out to the console
'console': {
'class': 'logging.StreamHandler',
'formatter': 'simple',
'level': 'DEBUG',
},
# This is what gets saved to the file
'file': {
'class': 'logging.FileHandler',
'formatter': 'time',
'filename': filename,
'level': 'DEBUG',
}
},
'loggers': {
'__main__': {'level': 'DEBUG'},
'cochlear': {'level': 'DEBUG'},
'cochlear.nidaqmx': {'level': 'ERROR'},
},
'root': {
'handlers': ['console'],
},
}
logging.config.dictConfig(logging_config)
<commit_msg>ENH: Allow logging to be config w/o filename<commit_after>import logging.config
def configure_logging(filename=None):
time_format = '[%(asctime)s] :: %(name)s - %(levelname)s - %(message)s'
simple_format = '%(name)s - %(message)s'
logging_config = {
'version': 1,
'formatters': {
'time': {'format': time_format},
'simple': {'format': simple_format},
},
'handlers': {
# This is what gets printed out to the console
'console': {
'class': 'logging.StreamHandler',
'formatter': 'simple',
'level': 'DEBUG',
},
},
'loggers': {
'__main__': {'level': 'DEBUG'},
'cochlear': {'level': 'DEBUG'},
'cochlear.nidaqmx': {'level': 'ERROR'},
},
'root': {
'handlers': ['console'],
},
}
if filename is not None:
logging_config['handlers']['file'] = {
'class': 'logging.FileHandler',
'formatter': 'time',
'filename': filename,
'level': 'DEBUG',
}
logging_config['root']['handlers'].append('file')
logging.config.dictConfig(logging_config)
|
bf312434a9b52264dc63667c986ff353d0379e5b
|
cogs/utils/dataIO.py
|
cogs/utils/dataIO.py
|
import redis_collections
import threading
import time
import __main__
class RedisDict(redis_collections.Dict):
def __init__(self, **kwargs):
super().__init__(**kwargs)
self.die = False
self.thread = threading.Thread(target=self.update_loop)
self.thread.start()
self.prev = None
def update_loop(self):
time.sleep(2)
while not (__main__.liara.stopped or self.die):
if self.prev != repr(self):
self.prev = repr(self)
self.sync()
time.sleep(0.1)
else:
self.cache.clear()
time.sleep(0.1)
class dataIO:
@staticmethod
def save_json(filename, content):
pass # "oops"
@staticmethod
def load_json(filename):
return RedisDict(key=filename, redis=__main__.redis_conn, writeback=True)
|
import redis_collections
import threading
import time
import __main__
class RedisDict(redis_collections.Dict):
def __init__(self, **kwargs):
super().__init__(**kwargs)
self.die = False
self.thread = threading.Thread(target=self.update_loop, daemon=True)
self.thread.start()
self.prev = None
def update_loop(self):
time.sleep(2)
while not (__main__.liara.stopped or self.die):
if self.prev != repr(self):
self.prev = repr(self)
self.sync()
time.sleep(0.1)
else:
self.cache.clear()
time.sleep(0.1)
class dataIO:
@staticmethod
def save_json(filename, content):
pass # "oops"
@staticmethod
def load_json(filename):
return RedisDict(key=filename, redis=__main__.redis_conn, writeback=True)
|
Make threads run in daemon mode
|
Make threads run in daemon mode
|
Python
|
mit
|
Thessia/Liara
|
import redis_collections
import threading
import time
import __main__
class RedisDict(redis_collections.Dict):
def __init__(self, **kwargs):
super().__init__(**kwargs)
self.die = False
self.thread = threading.Thread(target=self.update_loop)
self.thread.start()
self.prev = None
def update_loop(self):
time.sleep(2)
while not (__main__.liara.stopped or self.die):
if self.prev != repr(self):
self.prev = repr(self)
self.sync()
time.sleep(0.1)
else:
self.cache.clear()
time.sleep(0.1)
class dataIO:
@staticmethod
def save_json(filename, content):
pass # "oops"
@staticmethod
def load_json(filename):
return RedisDict(key=filename, redis=__main__.redis_conn, writeback=True)
Make threads run in daemon mode
|
import redis_collections
import threading
import time
import __main__
class RedisDict(redis_collections.Dict):
def __init__(self, **kwargs):
super().__init__(**kwargs)
self.die = False
self.thread = threading.Thread(target=self.update_loop, daemon=True)
self.thread.start()
self.prev = None
def update_loop(self):
time.sleep(2)
while not (__main__.liara.stopped or self.die):
if self.prev != repr(self):
self.prev = repr(self)
self.sync()
time.sleep(0.1)
else:
self.cache.clear()
time.sleep(0.1)
class dataIO:
@staticmethod
def save_json(filename, content):
pass # "oops"
@staticmethod
def load_json(filename):
return RedisDict(key=filename, redis=__main__.redis_conn, writeback=True)
|
<commit_before>import redis_collections
import threading
import time
import __main__
class RedisDict(redis_collections.Dict):
def __init__(self, **kwargs):
super().__init__(**kwargs)
self.die = False
self.thread = threading.Thread(target=self.update_loop)
self.thread.start()
self.prev = None
def update_loop(self):
time.sleep(2)
while not (__main__.liara.stopped or self.die):
if self.prev != repr(self):
self.prev = repr(self)
self.sync()
time.sleep(0.1)
else:
self.cache.clear()
time.sleep(0.1)
class dataIO:
@staticmethod
def save_json(filename, content):
pass # "oops"
@staticmethod
def load_json(filename):
return RedisDict(key=filename, redis=__main__.redis_conn, writeback=True)
<commit_msg>Make threads run in daemon mode<commit_after>
|
import redis_collections
import threading
import time
import __main__
class RedisDict(redis_collections.Dict):
def __init__(self, **kwargs):
super().__init__(**kwargs)
self.die = False
self.thread = threading.Thread(target=self.update_loop, daemon=True)
self.thread.start()
self.prev = None
def update_loop(self):
time.sleep(2)
while not (__main__.liara.stopped or self.die):
if self.prev != repr(self):
self.prev = repr(self)
self.sync()
time.sleep(0.1)
else:
self.cache.clear()
time.sleep(0.1)
class dataIO:
@staticmethod
def save_json(filename, content):
pass # "oops"
@staticmethod
def load_json(filename):
return RedisDict(key=filename, redis=__main__.redis_conn, writeback=True)
|
import redis_collections
import threading
import time
import __main__
class RedisDict(redis_collections.Dict):
def __init__(self, **kwargs):
super().__init__(**kwargs)
self.die = False
self.thread = threading.Thread(target=self.update_loop)
self.thread.start()
self.prev = None
def update_loop(self):
time.sleep(2)
while not (__main__.liara.stopped or self.die):
if self.prev != repr(self):
self.prev = repr(self)
self.sync()
time.sleep(0.1)
else:
self.cache.clear()
time.sleep(0.1)
class dataIO:
@staticmethod
def save_json(filename, content):
pass # "oops"
@staticmethod
def load_json(filename):
return RedisDict(key=filename, redis=__main__.redis_conn, writeback=True)
Make threads run in daemon modeimport redis_collections
import threading
import time
import __main__
class RedisDict(redis_collections.Dict):
def __init__(self, **kwargs):
super().__init__(**kwargs)
self.die = False
self.thread = threading.Thread(target=self.update_loop, daemon=True)
self.thread.start()
self.prev = None
def update_loop(self):
time.sleep(2)
while not (__main__.liara.stopped or self.die):
if self.prev != repr(self):
self.prev = repr(self)
self.sync()
time.sleep(0.1)
else:
self.cache.clear()
time.sleep(0.1)
class dataIO:
@staticmethod
def save_json(filename, content):
pass # "oops"
@staticmethod
def load_json(filename):
return RedisDict(key=filename, redis=__main__.redis_conn, writeback=True)
|
<commit_before>import redis_collections
import threading
import time
import __main__
class RedisDict(redis_collections.Dict):
def __init__(self, **kwargs):
super().__init__(**kwargs)
self.die = False
self.thread = threading.Thread(target=self.update_loop)
self.thread.start()
self.prev = None
def update_loop(self):
time.sleep(2)
while not (__main__.liara.stopped or self.die):
if self.prev != repr(self):
self.prev = repr(self)
self.sync()
time.sleep(0.1)
else:
self.cache.clear()
time.sleep(0.1)
class dataIO:
@staticmethod
def save_json(filename, content):
pass # "oops"
@staticmethod
def load_json(filename):
return RedisDict(key=filename, redis=__main__.redis_conn, writeback=True)
<commit_msg>Make threads run in daemon mode<commit_after>import redis_collections
import threading
import time
import __main__
class RedisDict(redis_collections.Dict):
def __init__(self, **kwargs):
super().__init__(**kwargs)
self.die = False
self.thread = threading.Thread(target=self.update_loop, daemon=True)
self.thread.start()
self.prev = None
def update_loop(self):
time.sleep(2)
while not (__main__.liara.stopped or self.die):
if self.prev != repr(self):
self.prev = repr(self)
self.sync()
time.sleep(0.1)
else:
self.cache.clear()
time.sleep(0.1)
class dataIO:
@staticmethod
def save_json(filename, content):
pass # "oops"
@staticmethod
def load_json(filename):
return RedisDict(key=filename, redis=__main__.redis_conn, writeback=True)
|
950b18ed6a4eaabd99ec6ce769247fc84676eb3b
|
tests.py
|
tests.py
|
#! usr/bin/env python3
import unittest
from sqlviz import Schema
# Tests will go here...eventually
class InventorySchemaSpec (unittest.TestCase):
def setUp (self):
self.schema = Schema(
"""DROP TABLE Inventory;
CREATE TABLE Inventory
(
id INT PRIMARY KEY,
product VARCHAR(50) UNIQUE,
quantity INT,
price DECIMAL(18,2)
);""")
def test_n_tables(self):
self.assertEqual(self.schema.n_tables(), 1,
"The Inventory schema should contain 1 table.")
def test_n_keys(self):
self.assertEqual(self.schema.n_keys(), {"PRIMARY KEY": 1, "FOREIGN KEY:" 0},
"The Inventory schema should contain 1 primary key and 0 foreign keys.")
def test_n_datatypes(self):
self.assertEqual(self.schema.n_datatypes(), {"INT": 2, "VARCHAR": 1, "DECIMAL": 1},
"The Inventory schema should contain two INTs, one VARCHAR, and one DECIMAL")
def lengths(self):
self.assertEqual(self.schema.lengths(), {"VARCHAR": [50], "DECIMAL": [(18,2)]},
"The Inventory schema should contain one VARCHAR(50) and one DECIMAL(18,2)")
if __name__ == '__main__':
unittest.main()
|
#! usr/bin/env python3
import unittest
from sqlviz import Schema
# Tests will go here...eventually
class InventorySchemaSpec (unittest.TestCase):
def setUp (self):
self.schema = Schema(
"""DROP TABLE Inventory;
CREATE TABLE Inventory
(
id INT PRIMARY KEY,
product VARCHAR(50) UNIQUE,
quantity INT,
price DECIMAL(18,2)
);""")
def test_n_tables(self):
self.assertEqual(self.schema.n_tables(), 1,
"The Inventory schema should contain 1 table.")
def test_n_keys(self):
self.assertEqual(self.schema.n_keys(), {"PRIMARY KEY": 1, "FOREIGN KEY": 0},
"The Inventory schema should contain 1 primary key and 0 foreign keys.")
def test_n_datatypes(self):
self.assertEqual(self.schema.n_datatypes(), {"INT": 2, "VARCHAR": 1, "DECIMAL": 1},
"The Inventory schema should contain two INTs, one VARCHAR, and one DECIMAL")
def lengths(self):
self.assertEqual(self.schema.lengths(), {"VARCHAR": [50], "DECIMAL": [(18,2)]},
"The Inventory schema should contain one VARCHAR(50) and one DECIMAL(18,2)")
if __name__ == '__main__':
unittest.main()
|
Fix misplaced colon in test suite
|
Fix misplaced colon in test suite
|
Python
|
mit
|
hawkw/sqlviz
|
#! usr/bin/env python3
import unittest
from sqlviz import Schema
# Tests will go here...eventually
class InventorySchemaSpec (unittest.TestCase):
def setUp (self):
self.schema = Schema(
"""DROP TABLE Inventory;
CREATE TABLE Inventory
(
id INT PRIMARY KEY,
product VARCHAR(50) UNIQUE,
quantity INT,
price DECIMAL(18,2)
);""")
def test_n_tables(self):
self.assertEqual(self.schema.n_tables(), 1,
"The Inventory schema should contain 1 table.")
def test_n_keys(self):
self.assertEqual(self.schema.n_keys(), {"PRIMARY KEY": 1, "FOREIGN KEY:" 0},
"The Inventory schema should contain 1 primary key and 0 foreign keys.")
def test_n_datatypes(self):
self.assertEqual(self.schema.n_datatypes(), {"INT": 2, "VARCHAR": 1, "DECIMAL": 1},
"The Inventory schema should contain two INTs, one VARCHAR, and one DECIMAL")
def lengths(self):
self.assertEqual(self.schema.lengths(), {"VARCHAR": [50], "DECIMAL": [(18,2)]},
"The Inventory schema should contain one VARCHAR(50) and one DECIMAL(18,2)")
if __name__ == '__main__':
unittest.main()
Fix misplaced colon in test suite
|
#! usr/bin/env python3
import unittest
from sqlviz import Schema
# Tests will go here...eventually
class InventorySchemaSpec (unittest.TestCase):
def setUp (self):
self.schema = Schema(
"""DROP TABLE Inventory;
CREATE TABLE Inventory
(
id INT PRIMARY KEY,
product VARCHAR(50) UNIQUE,
quantity INT,
price DECIMAL(18,2)
);""")
def test_n_tables(self):
self.assertEqual(self.schema.n_tables(), 1,
"The Inventory schema should contain 1 table.")
def test_n_keys(self):
self.assertEqual(self.schema.n_keys(), {"PRIMARY KEY": 1, "FOREIGN KEY": 0},
"The Inventory schema should contain 1 primary key and 0 foreign keys.")
def test_n_datatypes(self):
self.assertEqual(self.schema.n_datatypes(), {"INT": 2, "VARCHAR": 1, "DECIMAL": 1},
"The Inventory schema should contain two INTs, one VARCHAR, and one DECIMAL")
def lengths(self):
self.assertEqual(self.schema.lengths(), {"VARCHAR": [50], "DECIMAL": [(18,2)]},
"The Inventory schema should contain one VARCHAR(50) and one DECIMAL(18,2)")
if __name__ == '__main__':
unittest.main()
|
<commit_before>#! usr/bin/env python3
import unittest
from sqlviz import Schema
# Tests will go here...eventually
class InventorySchemaSpec (unittest.TestCase):
def setUp (self):
self.schema = Schema(
"""DROP TABLE Inventory;
CREATE TABLE Inventory
(
id INT PRIMARY KEY,
product VARCHAR(50) UNIQUE,
quantity INT,
price DECIMAL(18,2)
);""")
def test_n_tables(self):
self.assertEqual(self.schema.n_tables(), 1,
"The Inventory schema should contain 1 table.")
def test_n_keys(self):
self.assertEqual(self.schema.n_keys(), {"PRIMARY KEY": 1, "FOREIGN KEY:" 0},
"The Inventory schema should contain 1 primary key and 0 foreign keys.")
def test_n_datatypes(self):
self.assertEqual(self.schema.n_datatypes(), {"INT": 2, "VARCHAR": 1, "DECIMAL": 1},
"The Inventory schema should contain two INTs, one VARCHAR, and one DECIMAL")
def lengths(self):
self.assertEqual(self.schema.lengths(), {"VARCHAR": [50], "DECIMAL": [(18,2)]},
"The Inventory schema should contain one VARCHAR(50) and one DECIMAL(18,2)")
if __name__ == '__main__':
unittest.main()
<commit_msg>Fix misplaced colon in test suite<commit_after>
|
#! usr/bin/env python3
import unittest
from sqlviz import Schema
# Tests will go here...eventually
class InventorySchemaSpec (unittest.TestCase):
def setUp (self):
self.schema = Schema(
"""DROP TABLE Inventory;
CREATE TABLE Inventory
(
id INT PRIMARY KEY,
product VARCHAR(50) UNIQUE,
quantity INT,
price DECIMAL(18,2)
);""")
def test_n_tables(self):
self.assertEqual(self.schema.n_tables(), 1,
"The Inventory schema should contain 1 table.")
def test_n_keys(self):
self.assertEqual(self.schema.n_keys(), {"PRIMARY KEY": 1, "FOREIGN KEY": 0},
"The Inventory schema should contain 1 primary key and 0 foreign keys.")
def test_n_datatypes(self):
self.assertEqual(self.schema.n_datatypes(), {"INT": 2, "VARCHAR": 1, "DECIMAL": 1},
"The Inventory schema should contain two INTs, one VARCHAR, and one DECIMAL")
def lengths(self):
self.assertEqual(self.schema.lengths(), {"VARCHAR": [50], "DECIMAL": [(18,2)]},
"The Inventory schema should contain one VARCHAR(50) and one DECIMAL(18,2)")
if __name__ == '__main__':
unittest.main()
|
#! usr/bin/env python3
import unittest
from sqlviz import Schema
# Tests will go here...eventually
class InventorySchemaSpec (unittest.TestCase):
def setUp (self):
self.schema = Schema(
"""DROP TABLE Inventory;
CREATE TABLE Inventory
(
id INT PRIMARY KEY,
product VARCHAR(50) UNIQUE,
quantity INT,
price DECIMAL(18,2)
);""")
def test_n_tables(self):
self.assertEqual(self.schema.n_tables(), 1,
"The Inventory schema should contain 1 table.")
def test_n_keys(self):
self.assertEqual(self.schema.n_keys(), {"PRIMARY KEY": 1, "FOREIGN KEY:" 0},
"The Inventory schema should contain 1 primary key and 0 foreign keys.")
def test_n_datatypes(self):
self.assertEqual(self.schema.n_datatypes(), {"INT": 2, "VARCHAR": 1, "DECIMAL": 1},
"The Inventory schema should contain two INTs, one VARCHAR, and one DECIMAL")
def lengths(self):
self.assertEqual(self.schema.lengths(), {"VARCHAR": [50], "DECIMAL": [(18,2)]},
"The Inventory schema should contain one VARCHAR(50) and one DECIMAL(18,2)")
if __name__ == '__main__':
unittest.main()
Fix misplaced colon in test suite#! usr/bin/env python3
import unittest
from sqlviz import Schema
# Tests will go here...eventually
class InventorySchemaSpec (unittest.TestCase):
def setUp (self):
self.schema = Schema(
"""DROP TABLE Inventory;
CREATE TABLE Inventory
(
id INT PRIMARY KEY,
product VARCHAR(50) UNIQUE,
quantity INT,
price DECIMAL(18,2)
);""")
def test_n_tables(self):
self.assertEqual(self.schema.n_tables(), 1,
"The Inventory schema should contain 1 table.")
def test_n_keys(self):
self.assertEqual(self.schema.n_keys(), {"PRIMARY KEY": 1, "FOREIGN KEY": 0},
"The Inventory schema should contain 1 primary key and 0 foreign keys.")
def test_n_datatypes(self):
self.assertEqual(self.schema.n_datatypes(), {"INT": 2, "VARCHAR": 1, "DECIMAL": 1},
"The Inventory schema should contain two INTs, one VARCHAR, and one DECIMAL")
def lengths(self):
self.assertEqual(self.schema.lengths(), {"VARCHAR": [50], "DECIMAL": [(18,2)]},
"The Inventory schema should contain one VARCHAR(50) and one DECIMAL(18,2)")
if __name__ == '__main__':
unittest.main()
|
<commit_before>#! usr/bin/env python3
import unittest
from sqlviz import Schema
# Tests will go here...eventually
class InventorySchemaSpec (unittest.TestCase):
def setUp (self):
self.schema = Schema(
"""DROP TABLE Inventory;
CREATE TABLE Inventory
(
id INT PRIMARY KEY,
product VARCHAR(50) UNIQUE,
quantity INT,
price DECIMAL(18,2)
);""")
def test_n_tables(self):
self.assertEqual(self.schema.n_tables(), 1,
"The Inventory schema should contain 1 table.")
def test_n_keys(self):
self.assertEqual(self.schema.n_keys(), {"PRIMARY KEY": 1, "FOREIGN KEY:" 0},
"The Inventory schema should contain 1 primary key and 0 foreign keys.")
def test_n_datatypes(self):
self.assertEqual(self.schema.n_datatypes(), {"INT": 2, "VARCHAR": 1, "DECIMAL": 1},
"The Inventory schema should contain two INTs, one VARCHAR, and one DECIMAL")
def lengths(self):
self.assertEqual(self.schema.lengths(), {"VARCHAR": [50], "DECIMAL": [(18,2)]},
"The Inventory schema should contain one VARCHAR(50) and one DECIMAL(18,2)")
if __name__ == '__main__':
unittest.main()
<commit_msg>Fix misplaced colon in test suite<commit_after>#! usr/bin/env python3
import unittest
from sqlviz import Schema
# Tests will go here...eventually
class InventorySchemaSpec (unittest.TestCase):
def setUp (self):
self.schema = Schema(
"""DROP TABLE Inventory;
CREATE TABLE Inventory
(
id INT PRIMARY KEY,
product VARCHAR(50) UNIQUE,
quantity INT,
price DECIMAL(18,2)
);""")
def test_n_tables(self):
self.assertEqual(self.schema.n_tables(), 1,
"The Inventory schema should contain 1 table.")
def test_n_keys(self):
self.assertEqual(self.schema.n_keys(), {"PRIMARY KEY": 1, "FOREIGN KEY": 0},
"The Inventory schema should contain 1 primary key and 0 foreign keys.")
def test_n_datatypes(self):
self.assertEqual(self.schema.n_datatypes(), {"INT": 2, "VARCHAR": 1, "DECIMAL": 1},
"The Inventory schema should contain two INTs, one VARCHAR, and one DECIMAL")
def lengths(self):
self.assertEqual(self.schema.lengths(), {"VARCHAR": [50], "DECIMAL": [(18,2)]},
"The Inventory schema should contain one VARCHAR(50) and one DECIMAL(18,2)")
if __name__ == '__main__':
unittest.main()
|
ad6b7fe871be502220de5bcb6c2a65f4e7999294
|
etcd3/client.py
|
etcd3/client.py
|
import grpc
from etcd3.etcdrpc import rpc_pb2 as etcdrpc
import etcd3.exceptions as exceptions
class Etcd3Client(object):
def __init__(self, host='localhost', port=2379):
self.channel = grpc.insecure_channel('{host}:{port}'.format(
host=host, port=port)
)
self.kvstub = etcdrpc.KVStub(self.channel)
def get(self, key):
'''
Get the value of a key from etcd.
'''
range_request = etcdrpc.RangeRequest()
range_request.key = key.encode('utf-8')
range_response = self.kvstub.Range(range_request)
if range_response.count < 1:
raise exceptions.KeyNotFoundError(
'the key "{}" was not found'.format(key))
else:
# smells funny - there must be a cleaner way to get the value?
return range_response.kvs.pop().value
def put(self, key, value):
'''
Save a value to etcd.
'''
put_request = etcdrpc.PutRequest()
put_request.key = key.encode('utf-8')
put_request.value = value.encode('utf-8')
self.kvstub.Put(put_request)
def client():
'''Return an instance of an Etcd3Client'''
return Etcd3Client(host='localhost', port=2379)
|
import grpc
from etcd3.etcdrpc import rpc_pb2 as etcdrpc
import etcd3.exceptions as exceptions
class Etcd3Client(object):
def __init__(self, host='localhost', port=2379):
self.channel = grpc.insecure_channel('{host}:{port}'.format(
host=host, port=port)
)
self.kvstub = etcdrpc.KVStub(self.channel)
def get(self, key):
'''
Get the value of a key from etcd.
'''
range_request = etcdrpc.RangeRequest()
range_request.key = key.encode('utf-8')
range_response = self.kvstub.Range(range_request)
if range_response.count < 1:
raise exceptions.KeyNotFoundError(
'the key "{}" was not found'.format(key))
else:
# smells funny - there must be a cleaner way to get the value?
return range_response.kvs.pop().value
def put(self, key, value):
'''
Save a value to etcd.
'''
put_request = etcdrpc.PutRequest()
put_request.key = key.encode('utf-8')
put_request.value = value.encode('utf-8')
self.kvstub.Put(put_request)
def delete(self, key):
pass
def compact(self):
pass
def client():
'''Return an instance of an Etcd3Client'''
return Etcd3Client(host='localhost', port=2379)
|
Add compact and delete stubs
|
Add compact and delete stubs
|
Python
|
apache-2.0
|
kragniz/python-etcd3
|
import grpc
from etcd3.etcdrpc import rpc_pb2 as etcdrpc
import etcd3.exceptions as exceptions
class Etcd3Client(object):
def __init__(self, host='localhost', port=2379):
self.channel = grpc.insecure_channel('{host}:{port}'.format(
host=host, port=port)
)
self.kvstub = etcdrpc.KVStub(self.channel)
def get(self, key):
'''
Get the value of a key from etcd.
'''
range_request = etcdrpc.RangeRequest()
range_request.key = key.encode('utf-8')
range_response = self.kvstub.Range(range_request)
if range_response.count < 1:
raise exceptions.KeyNotFoundError(
'the key "{}" was not found'.format(key))
else:
# smells funny - there must be a cleaner way to get the value?
return range_response.kvs.pop().value
def put(self, key, value):
'''
Save a value to etcd.
'''
put_request = etcdrpc.PutRequest()
put_request.key = key.encode('utf-8')
put_request.value = value.encode('utf-8')
self.kvstub.Put(put_request)
def client():
'''Return an instance of an Etcd3Client'''
return Etcd3Client(host='localhost', port=2379)
Add compact and delete stubs
|
import grpc
from etcd3.etcdrpc import rpc_pb2 as etcdrpc
import etcd3.exceptions as exceptions
class Etcd3Client(object):
def __init__(self, host='localhost', port=2379):
self.channel = grpc.insecure_channel('{host}:{port}'.format(
host=host, port=port)
)
self.kvstub = etcdrpc.KVStub(self.channel)
def get(self, key):
'''
Get the value of a key from etcd.
'''
range_request = etcdrpc.RangeRequest()
range_request.key = key.encode('utf-8')
range_response = self.kvstub.Range(range_request)
if range_response.count < 1:
raise exceptions.KeyNotFoundError(
'the key "{}" was not found'.format(key))
else:
# smells funny - there must be a cleaner way to get the value?
return range_response.kvs.pop().value
def put(self, key, value):
'''
Save a value to etcd.
'''
put_request = etcdrpc.PutRequest()
put_request.key = key.encode('utf-8')
put_request.value = value.encode('utf-8')
self.kvstub.Put(put_request)
def delete(self, key):
pass
def compact(self):
pass
def client():
'''Return an instance of an Etcd3Client'''
return Etcd3Client(host='localhost', port=2379)
|
<commit_before>import grpc
from etcd3.etcdrpc import rpc_pb2 as etcdrpc
import etcd3.exceptions as exceptions
class Etcd3Client(object):
def __init__(self, host='localhost', port=2379):
self.channel = grpc.insecure_channel('{host}:{port}'.format(
host=host, port=port)
)
self.kvstub = etcdrpc.KVStub(self.channel)
def get(self, key):
'''
Get the value of a key from etcd.
'''
range_request = etcdrpc.RangeRequest()
range_request.key = key.encode('utf-8')
range_response = self.kvstub.Range(range_request)
if range_response.count < 1:
raise exceptions.KeyNotFoundError(
'the key "{}" was not found'.format(key))
else:
# smells funny - there must be a cleaner way to get the value?
return range_response.kvs.pop().value
def put(self, key, value):
'''
Save a value to etcd.
'''
put_request = etcdrpc.PutRequest()
put_request.key = key.encode('utf-8')
put_request.value = value.encode('utf-8')
self.kvstub.Put(put_request)
def client():
'''Return an instance of an Etcd3Client'''
return Etcd3Client(host='localhost', port=2379)
<commit_msg>Add compact and delete stubs<commit_after>
|
import grpc
from etcd3.etcdrpc import rpc_pb2 as etcdrpc
import etcd3.exceptions as exceptions
class Etcd3Client(object):
def __init__(self, host='localhost', port=2379):
self.channel = grpc.insecure_channel('{host}:{port}'.format(
host=host, port=port)
)
self.kvstub = etcdrpc.KVStub(self.channel)
def get(self, key):
'''
Get the value of a key from etcd.
'''
range_request = etcdrpc.RangeRequest()
range_request.key = key.encode('utf-8')
range_response = self.kvstub.Range(range_request)
if range_response.count < 1:
raise exceptions.KeyNotFoundError(
'the key "{}" was not found'.format(key))
else:
# smells funny - there must be a cleaner way to get the value?
return range_response.kvs.pop().value
def put(self, key, value):
'''
Save a value to etcd.
'''
put_request = etcdrpc.PutRequest()
put_request.key = key.encode('utf-8')
put_request.value = value.encode('utf-8')
self.kvstub.Put(put_request)
def delete(self, key):
pass
def compact(self):
pass
def client():
'''Return an instance of an Etcd3Client'''
return Etcd3Client(host='localhost', port=2379)
|
import grpc
from etcd3.etcdrpc import rpc_pb2 as etcdrpc
import etcd3.exceptions as exceptions
class Etcd3Client(object):
def __init__(self, host='localhost', port=2379):
self.channel = grpc.insecure_channel('{host}:{port}'.format(
host=host, port=port)
)
self.kvstub = etcdrpc.KVStub(self.channel)
def get(self, key):
'''
Get the value of a key from etcd.
'''
range_request = etcdrpc.RangeRequest()
range_request.key = key.encode('utf-8')
range_response = self.kvstub.Range(range_request)
if range_response.count < 1:
raise exceptions.KeyNotFoundError(
'the key "{}" was not found'.format(key))
else:
# smells funny - there must be a cleaner way to get the value?
return range_response.kvs.pop().value
def put(self, key, value):
'''
Save a value to etcd.
'''
put_request = etcdrpc.PutRequest()
put_request.key = key.encode('utf-8')
put_request.value = value.encode('utf-8')
self.kvstub.Put(put_request)
def client():
'''Return an instance of an Etcd3Client'''
return Etcd3Client(host='localhost', port=2379)
Add compact and delete stubsimport grpc
from etcd3.etcdrpc import rpc_pb2 as etcdrpc
import etcd3.exceptions as exceptions
class Etcd3Client(object):
def __init__(self, host='localhost', port=2379):
self.channel = grpc.insecure_channel('{host}:{port}'.format(
host=host, port=port)
)
self.kvstub = etcdrpc.KVStub(self.channel)
def get(self, key):
'''
Get the value of a key from etcd.
'''
range_request = etcdrpc.RangeRequest()
range_request.key = key.encode('utf-8')
range_response = self.kvstub.Range(range_request)
if range_response.count < 1:
raise exceptions.KeyNotFoundError(
'the key "{}" was not found'.format(key))
else:
# smells funny - there must be a cleaner way to get the value?
return range_response.kvs.pop().value
def put(self, key, value):
'''
Save a value to etcd.
'''
put_request = etcdrpc.PutRequest()
put_request.key = key.encode('utf-8')
put_request.value = value.encode('utf-8')
self.kvstub.Put(put_request)
def delete(self, key):
pass
def compact(self):
pass
def client():
'''Return an instance of an Etcd3Client'''
return Etcd3Client(host='localhost', port=2379)
|
<commit_before>import grpc
from etcd3.etcdrpc import rpc_pb2 as etcdrpc
import etcd3.exceptions as exceptions
class Etcd3Client(object):
def __init__(self, host='localhost', port=2379):
self.channel = grpc.insecure_channel('{host}:{port}'.format(
host=host, port=port)
)
self.kvstub = etcdrpc.KVStub(self.channel)
def get(self, key):
'''
Get the value of a key from etcd.
'''
range_request = etcdrpc.RangeRequest()
range_request.key = key.encode('utf-8')
range_response = self.kvstub.Range(range_request)
if range_response.count < 1:
raise exceptions.KeyNotFoundError(
'the key "{}" was not found'.format(key))
else:
# smells funny - there must be a cleaner way to get the value?
return range_response.kvs.pop().value
def put(self, key, value):
'''
Save a value to etcd.
'''
put_request = etcdrpc.PutRequest()
put_request.key = key.encode('utf-8')
put_request.value = value.encode('utf-8')
self.kvstub.Put(put_request)
def client():
'''Return an instance of an Etcd3Client'''
return Etcd3Client(host='localhost', port=2379)
<commit_msg>Add compact and delete stubs<commit_after>import grpc
from etcd3.etcdrpc import rpc_pb2 as etcdrpc
import etcd3.exceptions as exceptions
class Etcd3Client(object):
def __init__(self, host='localhost', port=2379):
self.channel = grpc.insecure_channel('{host}:{port}'.format(
host=host, port=port)
)
self.kvstub = etcdrpc.KVStub(self.channel)
def get(self, key):
'''
Get the value of a key from etcd.
'''
range_request = etcdrpc.RangeRequest()
range_request.key = key.encode('utf-8')
range_response = self.kvstub.Range(range_request)
if range_response.count < 1:
raise exceptions.KeyNotFoundError(
'the key "{}" was not found'.format(key))
else:
# smells funny - there must be a cleaner way to get the value?
return range_response.kvs.pop().value
def put(self, key, value):
'''
Save a value to etcd.
'''
put_request = etcdrpc.PutRequest()
put_request.key = key.encode('utf-8')
put_request.value = value.encode('utf-8')
self.kvstub.Put(put_request)
def delete(self, key):
pass
def compact(self):
pass
def client():
'''Return an instance of an Etcd3Client'''
return Etcd3Client(host='localhost', port=2379)
|
a4a58384733abd23e5e9074d6c181c112c909f2d
|
tosp_auth/tests.py
|
tosp_auth/tests.py
|
from django.test import TestCase
from django.core.urlresolvers import reverse
from django.contrib.auth.models import User
class ControlerLogoutTest(TestCase):
"""Unit test suite for testing the controler of
Logout in the app: tosp_auth.
Test that if the functionality of logout is correct.
"""
def setUp(self):
"""Initialize the browser and create a user, before running the tests.
"""
User.objects.create_user(
username='thelma', email='juan@pablo.com', password='junipero')
def test_logout_does_not_do_that(self):
"""Verify if the Logout works.
"""
self.client.login(username='thelma', password='junipero')
response = self.client.get(reverse('home'))
self.assertEqual(response.status_code, 200)
self.assertContains(response, "Log Out")
self.client.logout()
response = self.client.get(reverse('home'))
self.assertEqual(response.status_code, 200)
self.assertContains(response, "login")
def test_expected_url(self):
"""Verify if redirect to the right url.
"""
self.client.login(username='thelma', password='junipero')
response = self.client.get(reverse('home'))
self.assertEqual(response.status_code, 200)
self.client.logout()
|
from django.test import TestCase
from django.core.urlresolvers import reverse
from django.contrib.auth.models import User
class ControlerLogoutTest(TestCase):
"""Unit test suite for testing the controler of
Logout in the app: tosp_auth.
Test that if the functionality of logout is correct.
"""
def setUp(self):
"""Initialize the browser and create a user, before running the tests.
"""
User.objects.create_user(
username='thelma', email='juan@pablo.com', password='junipero')
def test_logout_does_not_do_that(self):
"""Verify if the Logout works.
"""
self.client.login(username='thelma', password='junipero')
response = self.client.get(reverse('home'))
self.assertEqual(response.status_code, 200)
self.assertContains(response, "Log Out")
self.client.logout()
response = self.client.get(reverse('home'))
self.assertEqual(response.status_code, 200)
self.assertContains(response, "login")
def test_expected_url(self):
"""Verify if redirect to the right url.
"""
self.client.login(username='thelma', password='junipero')
response = self.client.get(reverse('home'))
self.assertEqual(response.status_code, 200)
self.client.logout()
|
Delete whitespace at line 7
|
Delete whitespace at line 7
Delete a white space for travis
|
Python
|
mit
|
erikiado/jp2_online,erikiado/jp2_online,erikiado/jp2_online
|
from django.test import TestCase
from django.core.urlresolvers import reverse
from django.contrib.auth.models import User
class ControlerLogoutTest(TestCase):
"""Unit test suite for testing the controler of
Logout in the app: tosp_auth.
Test that if the functionality of logout is correct.
"""
def setUp(self):
"""Initialize the browser and create a user, before running the tests.
"""
User.objects.create_user(
username='thelma', email='juan@pablo.com', password='junipero')
def test_logout_does_not_do_that(self):
"""Verify if the Logout works.
"""
self.client.login(username='thelma', password='junipero')
response = self.client.get(reverse('home'))
self.assertEqual(response.status_code, 200)
self.assertContains(response, "Log Out")
self.client.logout()
response = self.client.get(reverse('home'))
self.assertEqual(response.status_code, 200)
self.assertContains(response, "login")
def test_expected_url(self):
"""Verify if redirect to the right url.
"""
self.client.login(username='thelma', password='junipero')
response = self.client.get(reverse('home'))
self.assertEqual(response.status_code, 200)
self.client.logout()
Delete whitespace at line 7
Delete a white space for travis
|
from django.test import TestCase
from django.core.urlresolvers import reverse
from django.contrib.auth.models import User
class ControlerLogoutTest(TestCase):
"""Unit test suite for testing the controler of
Logout in the app: tosp_auth.
Test that if the functionality of logout is correct.
"""
def setUp(self):
"""Initialize the browser and create a user, before running the tests.
"""
User.objects.create_user(
username='thelma', email='juan@pablo.com', password='junipero')
def test_logout_does_not_do_that(self):
"""Verify if the Logout works.
"""
self.client.login(username='thelma', password='junipero')
response = self.client.get(reverse('home'))
self.assertEqual(response.status_code, 200)
self.assertContains(response, "Log Out")
self.client.logout()
response = self.client.get(reverse('home'))
self.assertEqual(response.status_code, 200)
self.assertContains(response, "login")
def test_expected_url(self):
"""Verify if redirect to the right url.
"""
self.client.login(username='thelma', password='junipero')
response = self.client.get(reverse('home'))
self.assertEqual(response.status_code, 200)
self.client.logout()
|
<commit_before>from django.test import TestCase
from django.core.urlresolvers import reverse
from django.contrib.auth.models import User
class ControlerLogoutTest(TestCase):
"""Unit test suite for testing the controler of
Logout in the app: tosp_auth.
Test that if the functionality of logout is correct.
"""
def setUp(self):
"""Initialize the browser and create a user, before running the tests.
"""
User.objects.create_user(
username='thelma', email='juan@pablo.com', password='junipero')
def test_logout_does_not_do_that(self):
"""Verify if the Logout works.
"""
self.client.login(username='thelma', password='junipero')
response = self.client.get(reverse('home'))
self.assertEqual(response.status_code, 200)
self.assertContains(response, "Log Out")
self.client.logout()
response = self.client.get(reverse('home'))
self.assertEqual(response.status_code, 200)
self.assertContains(response, "login")
def test_expected_url(self):
"""Verify if redirect to the right url.
"""
self.client.login(username='thelma', password='junipero')
response = self.client.get(reverse('home'))
self.assertEqual(response.status_code, 200)
self.client.logout()
<commit_msg>Delete whitespace at line 7
Delete a white space for travis<commit_after>
|
from django.test import TestCase
from django.core.urlresolvers import reverse
from django.contrib.auth.models import User
class ControlerLogoutTest(TestCase):
"""Unit test suite for testing the controler of
Logout in the app: tosp_auth.
Test that if the functionality of logout is correct.
"""
def setUp(self):
"""Initialize the browser and create a user, before running the tests.
"""
User.objects.create_user(
username='thelma', email='juan@pablo.com', password='junipero')
def test_logout_does_not_do_that(self):
"""Verify if the Logout works.
"""
self.client.login(username='thelma', password='junipero')
response = self.client.get(reverse('home'))
self.assertEqual(response.status_code, 200)
self.assertContains(response, "Log Out")
self.client.logout()
response = self.client.get(reverse('home'))
self.assertEqual(response.status_code, 200)
self.assertContains(response, "login")
def test_expected_url(self):
"""Verify if redirect to the right url.
"""
self.client.login(username='thelma', password='junipero')
response = self.client.get(reverse('home'))
self.assertEqual(response.status_code, 200)
self.client.logout()
|
from django.test import TestCase
from django.core.urlresolvers import reverse
from django.contrib.auth.models import User
class ControlerLogoutTest(TestCase):
"""Unit test suite for testing the controler of
Logout in the app: tosp_auth.
Test that if the functionality of logout is correct.
"""
def setUp(self):
"""Initialize the browser and create a user, before running the tests.
"""
User.objects.create_user(
username='thelma', email='juan@pablo.com', password='junipero')
def test_logout_does_not_do_that(self):
"""Verify if the Logout works.
"""
self.client.login(username='thelma', password='junipero')
response = self.client.get(reverse('home'))
self.assertEqual(response.status_code, 200)
self.assertContains(response, "Log Out")
self.client.logout()
response = self.client.get(reverse('home'))
self.assertEqual(response.status_code, 200)
self.assertContains(response, "login")
def test_expected_url(self):
"""Verify if redirect to the right url.
"""
self.client.login(username='thelma', password='junipero')
response = self.client.get(reverse('home'))
self.assertEqual(response.status_code, 200)
self.client.logout()
Delete whitespace at line 7
Delete a white space for travisfrom django.test import TestCase
from django.core.urlresolvers import reverse
from django.contrib.auth.models import User
class ControlerLogoutTest(TestCase):
"""Unit test suite for testing the controler of
Logout in the app: tosp_auth.
Test that if the functionality of logout is correct.
"""
def setUp(self):
"""Initialize the browser and create a user, before running the tests.
"""
User.objects.create_user(
username='thelma', email='juan@pablo.com', password='junipero')
def test_logout_does_not_do_that(self):
"""Verify if the Logout works.
"""
self.client.login(username='thelma', password='junipero')
response = self.client.get(reverse('home'))
self.assertEqual(response.status_code, 200)
self.assertContains(response, "Log Out")
self.client.logout()
response = self.client.get(reverse('home'))
self.assertEqual(response.status_code, 200)
self.assertContains(response, "login")
def test_expected_url(self):
"""Verify if redirect to the right url.
"""
self.client.login(username='thelma', password='junipero')
response = self.client.get(reverse('home'))
self.assertEqual(response.status_code, 200)
self.client.logout()
|
<commit_before>from django.test import TestCase
from django.core.urlresolvers import reverse
from django.contrib.auth.models import User
class ControlerLogoutTest(TestCase):
"""Unit test suite for testing the controler of
Logout in the app: tosp_auth.
Test that if the functionality of logout is correct.
"""
def setUp(self):
"""Initialize the browser and create a user, before running the tests.
"""
User.objects.create_user(
username='thelma', email='juan@pablo.com', password='junipero')
def test_logout_does_not_do_that(self):
"""Verify if the Logout works.
"""
self.client.login(username='thelma', password='junipero')
response = self.client.get(reverse('home'))
self.assertEqual(response.status_code, 200)
self.assertContains(response, "Log Out")
self.client.logout()
response = self.client.get(reverse('home'))
self.assertEqual(response.status_code, 200)
self.assertContains(response, "login")
def test_expected_url(self):
"""Verify if redirect to the right url.
"""
self.client.login(username='thelma', password='junipero')
response = self.client.get(reverse('home'))
self.assertEqual(response.status_code, 200)
self.client.logout()
<commit_msg>Delete whitespace at line 7
Delete a white space for travis<commit_after>from django.test import TestCase
from django.core.urlresolvers import reverse
from django.contrib.auth.models import User
class ControlerLogoutTest(TestCase):
"""Unit test suite for testing the controler of
Logout in the app: tosp_auth.
Test that if the functionality of logout is correct.
"""
def setUp(self):
"""Initialize the browser and create a user, before running the tests.
"""
User.objects.create_user(
username='thelma', email='juan@pablo.com', password='junipero')
def test_logout_does_not_do_that(self):
"""Verify if the Logout works.
"""
self.client.login(username='thelma', password='junipero')
response = self.client.get(reverse('home'))
self.assertEqual(response.status_code, 200)
self.assertContains(response, "Log Out")
self.client.logout()
response = self.client.get(reverse('home'))
self.assertEqual(response.status_code, 200)
self.assertContains(response, "login")
def test_expected_url(self):
"""Verify if redirect to the right url.
"""
self.client.login(username='thelma', password='junipero')
response = self.client.get(reverse('home'))
self.assertEqual(response.status_code, 200)
self.client.logout()
|
5e61515eb6d07004ae2d3eb8f8d7ffe59b351a8c
|
migrations/versions/0003_create_tokens.py
|
migrations/versions/0003_create_tokens.py
|
"""empty message
Revision ID: 0003_create_tokens
Revises: 0001_initialise_data
Create Date: 2016-01-13 17:07:49.061776
"""
# revision identifiers, used by Alembic.
revision = '0003_create_tokens'
down_revision = '0001_initialise_data'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.create_table('tokens',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('token', sa.String(length=255), nullable=False),
sa.Column('service_id', sa.Integer(), nullable=False),
sa.Column('expiry_date', sa.DateTime(), nullable=True),
sa.ForeignKeyConstraint(['service_id'], ['services.id'], ),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('token')
)
op.create_index(op.f('ix_tokens_service_id'), 'tokens', ['service_id'], unique=False)
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_tokens_service_id'), table_name='tokens')
op.drop_table('tokens')
### end Alembic commands ###
|
"""empty message
Revision ID: 0003_create_tokens
Revises: 0001_initialise_data
Create Date: 2016-01-13 17:07:49.061776
"""
# revision identifiers, used by Alembic.
revision = '0003_create_tokens'
down_revision = '0002_add_templates'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.create_table('tokens',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('token', sa.String(length=255), nullable=False),
sa.Column('service_id', sa.Integer(), nullable=False),
sa.Column('expiry_date', sa.DateTime(), nullable=True),
sa.ForeignKeyConstraint(['service_id'], ['services.id'], ),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('token')
)
op.create_index(op.f('ix_tokens_service_id'), 'tokens', ['service_id'], unique=False)
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_tokens_service_id'), table_name='tokens')
op.drop_table('tokens')
### end Alembic commands ###
|
Fix the migration script so that the down revision is pointing to the migration scripts in the merge
|
Fix the migration script so that the down revision is pointing to the migration scripts in the merge
|
Python
|
mit
|
alphagov/notifications-api,alphagov/notifications-api
|
"""empty message
Revision ID: 0003_create_tokens
Revises: 0001_initialise_data
Create Date: 2016-01-13 17:07:49.061776
"""
# revision identifiers, used by Alembic.
revision = '0003_create_tokens'
down_revision = '0001_initialise_data'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.create_table('tokens',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('token', sa.String(length=255), nullable=False),
sa.Column('service_id', sa.Integer(), nullable=False),
sa.Column('expiry_date', sa.DateTime(), nullable=True),
sa.ForeignKeyConstraint(['service_id'], ['services.id'], ),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('token')
)
op.create_index(op.f('ix_tokens_service_id'), 'tokens', ['service_id'], unique=False)
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_tokens_service_id'), table_name='tokens')
op.drop_table('tokens')
### end Alembic commands ###
Fix the migration script so that the down revision is pointing to the migration scripts in the merge
|
"""empty message
Revision ID: 0003_create_tokens
Revises: 0001_initialise_data
Create Date: 2016-01-13 17:07:49.061776
"""
# revision identifiers, used by Alembic.
revision = '0003_create_tokens'
down_revision = '0002_add_templates'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.create_table('tokens',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('token', sa.String(length=255), nullable=False),
sa.Column('service_id', sa.Integer(), nullable=False),
sa.Column('expiry_date', sa.DateTime(), nullable=True),
sa.ForeignKeyConstraint(['service_id'], ['services.id'], ),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('token')
)
op.create_index(op.f('ix_tokens_service_id'), 'tokens', ['service_id'], unique=False)
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_tokens_service_id'), table_name='tokens')
op.drop_table('tokens')
### end Alembic commands ###
|
<commit_before>"""empty message
Revision ID: 0003_create_tokens
Revises: 0001_initialise_data
Create Date: 2016-01-13 17:07:49.061776
"""
# revision identifiers, used by Alembic.
revision = '0003_create_tokens'
down_revision = '0001_initialise_data'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.create_table('tokens',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('token', sa.String(length=255), nullable=False),
sa.Column('service_id', sa.Integer(), nullable=False),
sa.Column('expiry_date', sa.DateTime(), nullable=True),
sa.ForeignKeyConstraint(['service_id'], ['services.id'], ),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('token')
)
op.create_index(op.f('ix_tokens_service_id'), 'tokens', ['service_id'], unique=False)
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_tokens_service_id'), table_name='tokens')
op.drop_table('tokens')
### end Alembic commands ###
<commit_msg>Fix the migration script so that the down revision is pointing to the migration scripts in the merge<commit_after>
|
"""empty message
Revision ID: 0003_create_tokens
Revises: 0001_initialise_data
Create Date: 2016-01-13 17:07:49.061776
"""
# revision identifiers, used by Alembic.
revision = '0003_create_tokens'
down_revision = '0002_add_templates'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.create_table('tokens',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('token', sa.String(length=255), nullable=False),
sa.Column('service_id', sa.Integer(), nullable=False),
sa.Column('expiry_date', sa.DateTime(), nullable=True),
sa.ForeignKeyConstraint(['service_id'], ['services.id'], ),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('token')
)
op.create_index(op.f('ix_tokens_service_id'), 'tokens', ['service_id'], unique=False)
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_tokens_service_id'), table_name='tokens')
op.drop_table('tokens')
### end Alembic commands ###
|
"""empty message
Revision ID: 0003_create_tokens
Revises: 0001_initialise_data
Create Date: 2016-01-13 17:07:49.061776
"""
# revision identifiers, used by Alembic.
revision = '0003_create_tokens'
down_revision = '0001_initialise_data'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.create_table('tokens',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('token', sa.String(length=255), nullable=False),
sa.Column('service_id', sa.Integer(), nullable=False),
sa.Column('expiry_date', sa.DateTime(), nullable=True),
sa.ForeignKeyConstraint(['service_id'], ['services.id'], ),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('token')
)
op.create_index(op.f('ix_tokens_service_id'), 'tokens', ['service_id'], unique=False)
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_tokens_service_id'), table_name='tokens')
op.drop_table('tokens')
### end Alembic commands ###
Fix the migration script so that the down revision is pointing to the migration scripts in the merge"""empty message
Revision ID: 0003_create_tokens
Revises: 0001_initialise_data
Create Date: 2016-01-13 17:07:49.061776
"""
# revision identifiers, used by Alembic.
revision = '0003_create_tokens'
down_revision = '0002_add_templates'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.create_table('tokens',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('token', sa.String(length=255), nullable=False),
sa.Column('service_id', sa.Integer(), nullable=False),
sa.Column('expiry_date', sa.DateTime(), nullable=True),
sa.ForeignKeyConstraint(['service_id'], ['services.id'], ),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('token')
)
op.create_index(op.f('ix_tokens_service_id'), 'tokens', ['service_id'], unique=False)
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_tokens_service_id'), table_name='tokens')
op.drop_table('tokens')
### end Alembic commands ###
|
<commit_before>"""empty message
Revision ID: 0003_create_tokens
Revises: 0001_initialise_data
Create Date: 2016-01-13 17:07:49.061776
"""
# revision identifiers, used by Alembic.
revision = '0003_create_tokens'
down_revision = '0001_initialise_data'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.create_table('tokens',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('token', sa.String(length=255), nullable=False),
sa.Column('service_id', sa.Integer(), nullable=False),
sa.Column('expiry_date', sa.DateTime(), nullable=True),
sa.ForeignKeyConstraint(['service_id'], ['services.id'], ),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('token')
)
op.create_index(op.f('ix_tokens_service_id'), 'tokens', ['service_id'], unique=False)
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_tokens_service_id'), table_name='tokens')
op.drop_table('tokens')
### end Alembic commands ###
<commit_msg>Fix the migration script so that the down revision is pointing to the migration scripts in the merge<commit_after>"""empty message
Revision ID: 0003_create_tokens
Revises: 0001_initialise_data
Create Date: 2016-01-13 17:07:49.061776
"""
# revision identifiers, used by Alembic.
revision = '0003_create_tokens'
down_revision = '0002_add_templates'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.create_table('tokens',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('token', sa.String(length=255), nullable=False),
sa.Column('service_id', sa.Integer(), nullable=False),
sa.Column('expiry_date', sa.DateTime(), nullable=True),
sa.ForeignKeyConstraint(['service_id'], ['services.id'], ),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('token')
)
op.create_index(op.f('ix_tokens_service_id'), 'tokens', ['service_id'], unique=False)
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_tokens_service_id'), table_name='tokens')
op.drop_table('tokens')
### end Alembic commands ###
|
6ae61fe99c6ab98b866a8ecf28a5503febc697d6
|
pypuppetdbquery/__init__.py
|
pypuppetdbquery/__init__.py
|
# -*- coding: utf-8 -*-
#
# This file is part of pypuppetdbquery.
# Copyright © 2016 Chris Boot <bootc@bootc.net>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from .evaluator import Evaluator
from .parser import Parser
def parse(s, parser_opts=None, evaluator_opts=None):
parser = Parser(**(parser_opts or {}))
evaluator = Evaluator(**(evaluator_opts or {}))
ast = parser.parse(s)
return evaluator.evaluate(ast)
|
# -*- coding: utf-8 -*-
#
# This file is part of pypuppetdbquery.
# Copyright © 2016 Chris Boot <bootc@bootc.net>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from json import dumps as json_dumps
from .evaluator import Evaluator
from .parser import Parser
def parse(s, json=True, parser_opts=None, evaluator_opts=None):
parser = Parser(**(parser_opts or {}))
evaluator = Evaluator(**(evaluator_opts or {}))
ast = parser.parse(s)
raw = evaluator.evaluate(ast)
if json and raw is not None:
return json_dumps(raw)
else:
return raw
|
Return JSON from pypuppetdbquery.parse() by default
|
Return JSON from pypuppetdbquery.parse() by default
This will be the most useful mode of operation when combined with
pypuppetdb, so make life easy for people.
|
Python
|
apache-2.0
|
bootc/pypuppetdbquery
|
# -*- coding: utf-8 -*-
#
# This file is part of pypuppetdbquery.
# Copyright © 2016 Chris Boot <bootc@bootc.net>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from .evaluator import Evaluator
from .parser import Parser
def parse(s, parser_opts=None, evaluator_opts=None):
parser = Parser(**(parser_opts or {}))
evaluator = Evaluator(**(evaluator_opts or {}))
ast = parser.parse(s)
return evaluator.evaluate(ast)
Return JSON from pypuppetdbquery.parse() by default
This will be the most useful mode of operation when combined with
pypuppetdb, so make life easy for people.
|
# -*- coding: utf-8 -*-
#
# This file is part of pypuppetdbquery.
# Copyright © 2016 Chris Boot <bootc@bootc.net>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from json import dumps as json_dumps
from .evaluator import Evaluator
from .parser import Parser
def parse(s, json=True, parser_opts=None, evaluator_opts=None):
parser = Parser(**(parser_opts or {}))
evaluator = Evaluator(**(evaluator_opts or {}))
ast = parser.parse(s)
raw = evaluator.evaluate(ast)
if json and raw is not None:
return json_dumps(raw)
else:
return raw
|
<commit_before># -*- coding: utf-8 -*-
#
# This file is part of pypuppetdbquery.
# Copyright © 2016 Chris Boot <bootc@bootc.net>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from .evaluator import Evaluator
from .parser import Parser
def parse(s, parser_opts=None, evaluator_opts=None):
parser = Parser(**(parser_opts or {}))
evaluator = Evaluator(**(evaluator_opts or {}))
ast = parser.parse(s)
return evaluator.evaluate(ast)
<commit_msg>Return JSON from pypuppetdbquery.parse() by default
This will be the most useful mode of operation when combined with
pypuppetdb, so make life easy for people.<commit_after>
|
# -*- coding: utf-8 -*-
#
# This file is part of pypuppetdbquery.
# Copyright © 2016 Chris Boot <bootc@bootc.net>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from json import dumps as json_dumps
from .evaluator import Evaluator
from .parser import Parser
def parse(s, json=True, parser_opts=None, evaluator_opts=None):
parser = Parser(**(parser_opts or {}))
evaluator = Evaluator(**(evaluator_opts or {}))
ast = parser.parse(s)
raw = evaluator.evaluate(ast)
if json and raw is not None:
return json_dumps(raw)
else:
return raw
|
# -*- coding: utf-8 -*-
#
# This file is part of pypuppetdbquery.
# Copyright © 2016 Chris Boot <bootc@bootc.net>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from .evaluator import Evaluator
from .parser import Parser
def parse(s, parser_opts=None, evaluator_opts=None):
parser = Parser(**(parser_opts or {}))
evaluator = Evaluator(**(evaluator_opts or {}))
ast = parser.parse(s)
return evaluator.evaluate(ast)
Return JSON from pypuppetdbquery.parse() by default
This will be the most useful mode of operation when combined with
pypuppetdb, so make life easy for people.# -*- coding: utf-8 -*-
#
# This file is part of pypuppetdbquery.
# Copyright © 2016 Chris Boot <bootc@bootc.net>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from json import dumps as json_dumps
from .evaluator import Evaluator
from .parser import Parser
def parse(s, json=True, parser_opts=None, evaluator_opts=None):
parser = Parser(**(parser_opts or {}))
evaluator = Evaluator(**(evaluator_opts or {}))
ast = parser.parse(s)
raw = evaluator.evaluate(ast)
if json and raw is not None:
return json_dumps(raw)
else:
return raw
|
<commit_before># -*- coding: utf-8 -*-
#
# This file is part of pypuppetdbquery.
# Copyright © 2016 Chris Boot <bootc@bootc.net>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from .evaluator import Evaluator
from .parser import Parser
def parse(s, parser_opts=None, evaluator_opts=None):
parser = Parser(**(parser_opts or {}))
evaluator = Evaluator(**(evaluator_opts or {}))
ast = parser.parse(s)
return evaluator.evaluate(ast)
<commit_msg>Return JSON from pypuppetdbquery.parse() by default
This will be the most useful mode of operation when combined with
pypuppetdb, so make life easy for people.<commit_after># -*- coding: utf-8 -*-
#
# This file is part of pypuppetdbquery.
# Copyright © 2016 Chris Boot <bootc@bootc.net>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from json import dumps as json_dumps
from .evaluator import Evaluator
from .parser import Parser
def parse(s, json=True, parser_opts=None, evaluator_opts=None):
parser = Parser(**(parser_opts or {}))
evaluator = Evaluator(**(evaluator_opts or {}))
ast = parser.parse(s)
raw = evaluator.evaluate(ast)
if json and raw is not None:
return json_dumps(raw)
else:
return raw
|
4c240f17571b5e63805a2632e5e8a6c1d3695d54
|
examples/00-load/create-tri-surface.py
|
examples/00-load/create-tri-surface.py
|
"""
Create Triangulated Surface
~~~~~~~~~~~~~~~~~~~~~~~~~~~
Create a surface from a set of points through a Delaunay triangulation.
"""
# sphinx_gallery_thumbnail_number = 2
import vtki
import numpy as np
################################################################################
# First, create some points for the surface.
# Define a simple Gaussian surface
xx, yy = np.meshgrid(np.linspace(-200,200,20), np.linspace(-200,200,20))
A, b = 100, 100
zz = A*np.exp(-0.5*((xx/b)**2. + (yy/b)**2.))
# Get the points as a 2D NumPy array (N by 3)
points = np.c_[xx.reshape(-1), yy.reshape(-1), zz.reshape(-1)]
print(points[0:5, :])
################################################################################
# Now use those points to create a point cloud ``vtki`` data object. This will
# be encompassed in a :class:`vtki.PolyData` object.
# simply pass the numpy points to the PolyData constructor
cloud = vtki.PolyData(points)
cloud.plot()
################################################################################
# Now that we have a ``vtki`` data structure of the points, we can perform a
# triangulation to turn those boring discrete points into a connected surface.
surf = cloud.delaunay_2d()
surf.plot(show_edges=True)
|
"""
Create Triangulated Surface
~~~~~~~~~~~~~~~~~~~~~~~~~~~
Create a surface from a set of points through a Delaunay triangulation.
"""
# sphinx_gallery_thumbnail_number = 2
import vtki
import numpy as np
################################################################################
# First, create some points for the surface.
# Define a simple Gaussian surface
xx, yy = np.meshgrid(np.linspace(-200,200,20), np.linspace(-200,200,20))
A, b = 100, 100
zz = A*np.exp(-0.5*((xx/b)**2. + (yy/b)**2.))
# Get the points as a 2D NumPy array (N by 3)
points = np.c_[xx.reshape(-1), yy.reshape(-1), zz.reshape(-1)]
print(points[0:5, :])
################################################################################
# Now use those points to create a point cloud ``vtki`` data object. This will
# be encompassed in a :class:`vtki.PolyData` object.
# simply pass the numpy points to the PolyData constructor
cloud = vtki.PolyData(points)
vtki.set_plot_theme('doc')
cloud.plot(point_size=15, use_panel=False)
################################################################################
# Now that we have a ``vtki`` data structure of the points, we can perform a
# triangulation to turn those boring discrete points into a connected surface.
surf = cloud.delaunay_2d()
surf.plot(show_edges=True)
|
Increase point size in example
|
Increase point size in example
|
Python
|
mit
|
akaszynski/vtkInterface
|
"""
Create Triangulated Surface
~~~~~~~~~~~~~~~~~~~~~~~~~~~
Create a surface from a set of points through a Delaunay triangulation.
"""
# sphinx_gallery_thumbnail_number = 2
import vtki
import numpy as np
################################################################################
# First, create some points for the surface.
# Define a simple Gaussian surface
xx, yy = np.meshgrid(np.linspace(-200,200,20), np.linspace(-200,200,20))
A, b = 100, 100
zz = A*np.exp(-0.5*((xx/b)**2. + (yy/b)**2.))
# Get the points as a 2D NumPy array (N by 3)
points = np.c_[xx.reshape(-1), yy.reshape(-1), zz.reshape(-1)]
print(points[0:5, :])
################################################################################
# Now use those points to create a point cloud ``vtki`` data object. This will
# be encompassed in a :class:`vtki.PolyData` object.
# simply pass the numpy points to the PolyData constructor
cloud = vtki.PolyData(points)
cloud.plot()
################################################################################
# Now that we have a ``vtki`` data structure of the points, we can perform a
# triangulation to turn those boring discrete points into a connected surface.
surf = cloud.delaunay_2d()
surf.plot(show_edges=True)
Increase point size in example
|
"""
Create Triangulated Surface
~~~~~~~~~~~~~~~~~~~~~~~~~~~
Create a surface from a set of points through a Delaunay triangulation.
"""
# sphinx_gallery_thumbnail_number = 2
import vtki
import numpy as np
################################################################################
# First, create some points for the surface.
# Define a simple Gaussian surface
xx, yy = np.meshgrid(np.linspace(-200,200,20), np.linspace(-200,200,20))
A, b = 100, 100
zz = A*np.exp(-0.5*((xx/b)**2. + (yy/b)**2.))
# Get the points as a 2D NumPy array (N by 3)
points = np.c_[xx.reshape(-1), yy.reshape(-1), zz.reshape(-1)]
print(points[0:5, :])
################################################################################
# Now use those points to create a point cloud ``vtki`` data object. This will
# be encompassed in a :class:`vtki.PolyData` object.
# simply pass the numpy points to the PolyData constructor
cloud = vtki.PolyData(points)
vtki.set_plot_theme('doc')
cloud.plot(point_size=15, use_panel=False)
################################################################################
# Now that we have a ``vtki`` data structure of the points, we can perform a
# triangulation to turn those boring discrete points into a connected surface.
surf = cloud.delaunay_2d()
surf.plot(show_edges=True)
|
<commit_before>"""
Create Triangulated Surface
~~~~~~~~~~~~~~~~~~~~~~~~~~~
Create a surface from a set of points through a Delaunay triangulation.
"""
# sphinx_gallery_thumbnail_number = 2
import vtki
import numpy as np
################################################################################
# First, create some points for the surface.
# Define a simple Gaussian surface
xx, yy = np.meshgrid(np.linspace(-200,200,20), np.linspace(-200,200,20))
A, b = 100, 100
zz = A*np.exp(-0.5*((xx/b)**2. + (yy/b)**2.))
# Get the points as a 2D NumPy array (N by 3)
points = np.c_[xx.reshape(-1), yy.reshape(-1), zz.reshape(-1)]
print(points[0:5, :])
################################################################################
# Now use those points to create a point cloud ``vtki`` data object. This will
# be encompassed in a :class:`vtki.PolyData` object.
# simply pass the numpy points to the PolyData constructor
cloud = vtki.PolyData(points)
cloud.plot()
################################################################################
# Now that we have a ``vtki`` data structure of the points, we can perform a
# triangulation to turn those boring discrete points into a connected surface.
surf = cloud.delaunay_2d()
surf.plot(show_edges=True)
<commit_msg>Increase point size in example<commit_after>
|
"""
Create Triangulated Surface
~~~~~~~~~~~~~~~~~~~~~~~~~~~
Create a surface from a set of points through a Delaunay triangulation.
"""
# sphinx_gallery_thumbnail_number = 2
import vtki
import numpy as np
################################################################################
# First, create some points for the surface.
# Define a simple Gaussian surface
xx, yy = np.meshgrid(np.linspace(-200,200,20), np.linspace(-200,200,20))
A, b = 100, 100
zz = A*np.exp(-0.5*((xx/b)**2. + (yy/b)**2.))
# Get the points as a 2D NumPy array (N by 3)
points = np.c_[xx.reshape(-1), yy.reshape(-1), zz.reshape(-1)]
print(points[0:5, :])
################################################################################
# Now use those points to create a point cloud ``vtki`` data object. This will
# be encompassed in a :class:`vtki.PolyData` object.
# simply pass the numpy points to the PolyData constructor
cloud = vtki.PolyData(points)
vtki.set_plot_theme('doc')
cloud.plot(point_size=15, use_panel=False)
################################################################################
# Now that we have a ``vtki`` data structure of the points, we can perform a
# triangulation to turn those boring discrete points into a connected surface.
surf = cloud.delaunay_2d()
surf.plot(show_edges=True)
|
"""
Create Triangulated Surface
~~~~~~~~~~~~~~~~~~~~~~~~~~~
Create a surface from a set of points through a Delaunay triangulation.
"""
# sphinx_gallery_thumbnail_number = 2
import vtki
import numpy as np
################################################################################
# First, create some points for the surface.
# Define a simple Gaussian surface
xx, yy = np.meshgrid(np.linspace(-200,200,20), np.linspace(-200,200,20))
A, b = 100, 100
zz = A*np.exp(-0.5*((xx/b)**2. + (yy/b)**2.))
# Get the points as a 2D NumPy array (N by 3)
points = np.c_[xx.reshape(-1), yy.reshape(-1), zz.reshape(-1)]
print(points[0:5, :])
################################################################################
# Now use those points to create a point cloud ``vtki`` data object. This will
# be encompassed in a :class:`vtki.PolyData` object.
# simply pass the numpy points to the PolyData constructor
cloud = vtki.PolyData(points)
cloud.plot()
################################################################################
# Now that we have a ``vtki`` data structure of the points, we can perform a
# triangulation to turn those boring discrete points into a connected surface.
surf = cloud.delaunay_2d()
surf.plot(show_edges=True)
Increase point size in example"""
Create Triangulated Surface
~~~~~~~~~~~~~~~~~~~~~~~~~~~
Create a surface from a set of points through a Delaunay triangulation.
"""
# sphinx_gallery_thumbnail_number = 2
import vtki
import numpy as np
################################################################################
# First, create some points for the surface.
# Define a simple Gaussian surface
xx, yy = np.meshgrid(np.linspace(-200,200,20), np.linspace(-200,200,20))
A, b = 100, 100
zz = A*np.exp(-0.5*((xx/b)**2. + (yy/b)**2.))
# Get the points as a 2D NumPy array (N by 3)
points = np.c_[xx.reshape(-1), yy.reshape(-1), zz.reshape(-1)]
print(points[0:5, :])
################################################################################
# Now use those points to create a point cloud ``vtki`` data object. This will
# be encompassed in a :class:`vtki.PolyData` object.
# simply pass the numpy points to the PolyData constructor
cloud = vtki.PolyData(points)
vtki.set_plot_theme('doc')
cloud.plot(point_size=15, use_panel=False)
################################################################################
# Now that we have a ``vtki`` data structure of the points, we can perform a
# triangulation to turn those boring discrete points into a connected surface.
surf = cloud.delaunay_2d()
surf.plot(show_edges=True)
|
<commit_before>"""
Create Triangulated Surface
~~~~~~~~~~~~~~~~~~~~~~~~~~~
Create a surface from a set of points through a Delaunay triangulation.
"""
# sphinx_gallery_thumbnail_number = 2
import vtki
import numpy as np
################################################################################
# First, create some points for the surface.
# Define a simple Gaussian surface
xx, yy = np.meshgrid(np.linspace(-200,200,20), np.linspace(-200,200,20))
A, b = 100, 100
zz = A*np.exp(-0.5*((xx/b)**2. + (yy/b)**2.))
# Get the points as a 2D NumPy array (N by 3)
points = np.c_[xx.reshape(-1), yy.reshape(-1), zz.reshape(-1)]
print(points[0:5, :])
################################################################################
# Now use those points to create a point cloud ``vtki`` data object. This will
# be encompassed in a :class:`vtki.PolyData` object.
# simply pass the numpy points to the PolyData constructor
cloud = vtki.PolyData(points)
cloud.plot()
################################################################################
# Now that we have a ``vtki`` data structure of the points, we can perform a
# triangulation to turn those boring discrete points into a connected surface.
surf = cloud.delaunay_2d()
surf.plot(show_edges=True)
<commit_msg>Increase point size in example<commit_after>"""
Create Triangulated Surface
~~~~~~~~~~~~~~~~~~~~~~~~~~~
Create a surface from a set of points through a Delaunay triangulation.
"""
# sphinx_gallery_thumbnail_number = 2
import vtki
import numpy as np
################################################################################
# First, create some points for the surface.
# Define a simple Gaussian surface
xx, yy = np.meshgrid(np.linspace(-200,200,20), np.linspace(-200,200,20))
A, b = 100, 100
zz = A*np.exp(-0.5*((xx/b)**2. + (yy/b)**2.))
# Get the points as a 2D NumPy array (N by 3)
points = np.c_[xx.reshape(-1), yy.reshape(-1), zz.reshape(-1)]
print(points[0:5, :])
################################################################################
# Now use those points to create a point cloud ``vtki`` data object. This will
# be encompassed in a :class:`vtki.PolyData` object.
# simply pass the numpy points to the PolyData constructor
cloud = vtki.PolyData(points)
vtki.set_plot_theme('doc')
cloud.plot(point_size=15, use_panel=False)
################################################################################
# Now that we have a ``vtki`` data structure of the points, we can perform a
# triangulation to turn those boring discrete points into a connected surface.
surf = cloud.delaunay_2d()
surf.plot(show_edges=True)
|
5437d5135213fec4390d208174cda1e5c1a57674
|
manager/context_processor.py
|
manager/context_processor.py
|
# This file is part of Workout Manager.
#
# Workout Manager is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Workout Manager is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Workout Manager. If not, see <http://www.gnu.org/licenses/>.
from workout_manager import get_version
def processor(request):
return {
# Application version
'version' : get_version(),
# Do not track header
'DNT': request.META['HTTP_DNT']
}
|
# This file is part of Workout Manager.
#
# Workout Manager is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Workout Manager is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Workout Manager. If not, see <http://www.gnu.org/licenses/>.
from workout_manager import get_version
def processor(request):
return {
# Application version
'version' : get_version(),
# Do not track header
'DNT': request.META.get('HTTP_DNT', False)
}
|
Set a default if the browser doesn't send the DNT header
|
Set a default if the browser doesn't send the DNT header
|
Python
|
agpl-3.0
|
wger-project/wger,wger-project/wger,DeveloperMal/wger,kjagoo/wger_stark,wger-project/wger,rolandgeider/wger,DeveloperMal/wger,petervanderdoes/wger,DeveloperMal/wger,DeveloperMal/wger,kjagoo/wger_stark,rolandgeider/wger,rolandgeider/wger,wger-project/wger,petervanderdoes/wger,rolandgeider/wger,petervanderdoes/wger,petervanderdoes/wger,kjagoo/wger_stark,kjagoo/wger_stark
|
# This file is part of Workout Manager.
#
# Workout Manager is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Workout Manager is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Workout Manager. If not, see <http://www.gnu.org/licenses/>.
from workout_manager import get_version
def processor(request):
return {
# Application version
'version' : get_version(),
# Do not track header
'DNT': request.META['HTTP_DNT']
}
Set a default if the browser doesn't send the DNT header
|
# This file is part of Workout Manager.
#
# Workout Manager is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Workout Manager is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Workout Manager. If not, see <http://www.gnu.org/licenses/>.
from workout_manager import get_version
def processor(request):
return {
# Application version
'version' : get_version(),
# Do not track header
'DNT': request.META.get('HTTP_DNT', False)
}
|
<commit_before># This file is part of Workout Manager.
#
# Workout Manager is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Workout Manager is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Workout Manager. If not, see <http://www.gnu.org/licenses/>.
from workout_manager import get_version
def processor(request):
return {
# Application version
'version' : get_version(),
# Do not track header
'DNT': request.META['HTTP_DNT']
}
<commit_msg>Set a default if the browser doesn't send the DNT header<commit_after>
|
# This file is part of Workout Manager.
#
# Workout Manager is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Workout Manager is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Workout Manager. If not, see <http://www.gnu.org/licenses/>.
from workout_manager import get_version
def processor(request):
return {
# Application version
'version' : get_version(),
# Do not track header
'DNT': request.META.get('HTTP_DNT', False)
}
|
# This file is part of Workout Manager.
#
# Workout Manager is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Workout Manager is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Workout Manager. If not, see <http://www.gnu.org/licenses/>.
from workout_manager import get_version
def processor(request):
return {
# Application version
'version' : get_version(),
# Do not track header
'DNT': request.META['HTTP_DNT']
}
Set a default if the browser doesn't send the DNT header# This file is part of Workout Manager.
#
# Workout Manager is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Workout Manager is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Workout Manager. If not, see <http://www.gnu.org/licenses/>.
from workout_manager import get_version
def processor(request):
return {
# Application version
'version' : get_version(),
# Do not track header
'DNT': request.META.get('HTTP_DNT', False)
}
|
<commit_before># This file is part of Workout Manager.
#
# Workout Manager is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Workout Manager is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Workout Manager. If not, see <http://www.gnu.org/licenses/>.
from workout_manager import get_version
def processor(request):
return {
# Application version
'version' : get_version(),
# Do not track header
'DNT': request.META['HTTP_DNT']
}
<commit_msg>Set a default if the browser doesn't send the DNT header<commit_after># This file is part of Workout Manager.
#
# Workout Manager is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Workout Manager is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Workout Manager. If not, see <http://www.gnu.org/licenses/>.
from workout_manager import get_version
def processor(request):
return {
# Application version
'version' : get_version(),
# Do not track header
'DNT': request.META.get('HTTP_DNT', False)
}
|
e41867e8d36803f22f8a64db65bd1bf5c04315c8
|
mediachain/translation/lookup.py
|
mediachain/translation/lookup.py
|
from mediachain.datastore.ipfs import get_ipfs_datastore
import sys
import os
from os.path import expanduser, join
class ChDir(object):
"""
Step into a directory temporarily
"""
def __init__(self, path):
self.old_dir = os.getcwd()
self.new_dir = path
def __enter__(self):
os.chdir(self.new_dir)
def __exit__(self, *args):
os.chdir(self.old_dir)
def get_translator(translator_id):
try:
name, version = translator_id.split('@')
except ValueError:
raise LookupError(
"Bad translator id `{}`, must be `name@multihash` format".format(translator_id)
)
path = join(expanduser('~'), '.mediachain')
if not os.path.exists(path):
os.makedirs(path)
try:
return load_translator(path, name, version)
except ImportError:
pass
ipfs = get_ipfs_datastore() # FIXME: memoize this
with ChDir(path):
translator = ipfs.client.get(version) # FIXME: timeout, error handling
return load_translator(path, name, version)
def load_translator(base_path, name, version):
if base_path not in sys.path:
sys.path.append(base_path)
module_path = version + '.translator'
translator_module = __import__(module_path, globals(), locals(), [name])
translator = getattr(translator_module, name.capitalize())
return translator
|
from mediachain.datastore.ipfs import get_ipfs_datastore
import sys
import os
from os.path import expanduser, join
class ChDir(object):
"""
Step into a directory temporarily
"""
def __init__(self, path):
self.old_dir = os.getcwd()
self.new_dir = path
def __enter__(self):
os.chdir(self.new_dir)
def __exit__(self, *args):
os.chdir(self.old_dir)
def get_translator(translator_id):
try:
name, version = translator_id.split('@')
except ValueError:
raise LookupError(
"Bad translator id `{}`, must be `name@multihash` format".format(translator_id)
)
ipfs = get_ipfs_datastore() # FIXME: memoize this
path = join(expanduser('~'), '.mediachain')
if not os.path.exists(path):
os.makedirs(path)
with ChDir(path):
translator = ipfs.client.get(version) # FIXME: timeout, error handling
sys.path.append(path)
# print('dynamic module load path: {}'.format(path))
full_path = version + '.translator'
# print('loading translator module from {}'.format(full_path))
translator_module = __import__(full_path, globals(), locals(), [name])
translator = getattr(translator_module, name.capitalize())
return translator
|
Revert "only fetch translator from ipfs if it doesn't exist locally from prev run"
|
Revert "only fetch translator from ipfs if it doesn't exist locally from prev run"
- IPFS cache has these on the fs already
This reverts commit a77349df4b80e4b3cd037c518fc9c69f784072a0.
|
Python
|
mit
|
mediachain/mediachain-client,mediachain/mediachain-client
|
from mediachain.datastore.ipfs import get_ipfs_datastore
import sys
import os
from os.path import expanduser, join
class ChDir(object):
"""
Step into a directory temporarily
"""
def __init__(self, path):
self.old_dir = os.getcwd()
self.new_dir = path
def __enter__(self):
os.chdir(self.new_dir)
def __exit__(self, *args):
os.chdir(self.old_dir)
def get_translator(translator_id):
try:
name, version = translator_id.split('@')
except ValueError:
raise LookupError(
"Bad translator id `{}`, must be `name@multihash` format".format(translator_id)
)
path = join(expanduser('~'), '.mediachain')
if not os.path.exists(path):
os.makedirs(path)
try:
return load_translator(path, name, version)
except ImportError:
pass
ipfs = get_ipfs_datastore() # FIXME: memoize this
with ChDir(path):
translator = ipfs.client.get(version) # FIXME: timeout, error handling
return load_translator(path, name, version)
def load_translator(base_path, name, version):
if base_path not in sys.path:
sys.path.append(base_path)
module_path = version + '.translator'
translator_module = __import__(module_path, globals(), locals(), [name])
translator = getattr(translator_module, name.capitalize())
return translator
Revert "only fetch translator from ipfs if it doesn't exist locally from prev run"
- IPFS cache has these on the fs already
This reverts commit a77349df4b80e4b3cd037c518fc9c69f784072a0.
|
from mediachain.datastore.ipfs import get_ipfs_datastore
import sys
import os
from os.path import expanduser, join
class ChDir(object):
"""
Step into a directory temporarily
"""
def __init__(self, path):
self.old_dir = os.getcwd()
self.new_dir = path
def __enter__(self):
os.chdir(self.new_dir)
def __exit__(self, *args):
os.chdir(self.old_dir)
def get_translator(translator_id):
try:
name, version = translator_id.split('@')
except ValueError:
raise LookupError(
"Bad translator id `{}`, must be `name@multihash` format".format(translator_id)
)
ipfs = get_ipfs_datastore() # FIXME: memoize this
path = join(expanduser('~'), '.mediachain')
if not os.path.exists(path):
os.makedirs(path)
with ChDir(path):
translator = ipfs.client.get(version) # FIXME: timeout, error handling
sys.path.append(path)
# print('dynamic module load path: {}'.format(path))
full_path = version + '.translator'
# print('loading translator module from {}'.format(full_path))
translator_module = __import__(full_path, globals(), locals(), [name])
translator = getattr(translator_module, name.capitalize())
return translator
|
<commit_before>from mediachain.datastore.ipfs import get_ipfs_datastore
import sys
import os
from os.path import expanduser, join
class ChDir(object):
"""
Step into a directory temporarily
"""
def __init__(self, path):
self.old_dir = os.getcwd()
self.new_dir = path
def __enter__(self):
os.chdir(self.new_dir)
def __exit__(self, *args):
os.chdir(self.old_dir)
def get_translator(translator_id):
try:
name, version = translator_id.split('@')
except ValueError:
raise LookupError(
"Bad translator id `{}`, must be `name@multihash` format".format(translator_id)
)
path = join(expanduser('~'), '.mediachain')
if not os.path.exists(path):
os.makedirs(path)
try:
return load_translator(path, name, version)
except ImportError:
pass
ipfs = get_ipfs_datastore() # FIXME: memoize this
with ChDir(path):
translator = ipfs.client.get(version) # FIXME: timeout, error handling
return load_translator(path, name, version)
def load_translator(base_path, name, version):
if base_path not in sys.path:
sys.path.append(base_path)
module_path = version + '.translator'
translator_module = __import__(module_path, globals(), locals(), [name])
translator = getattr(translator_module, name.capitalize())
return translator
<commit_msg>Revert "only fetch translator from ipfs if it doesn't exist locally from prev run"
- IPFS cache has these on the fs already
This reverts commit a77349df4b80e4b3cd037c518fc9c69f784072a0.<commit_after>
|
from mediachain.datastore.ipfs import get_ipfs_datastore
import sys
import os
from os.path import expanduser, join
class ChDir(object):
"""
Step into a directory temporarily
"""
def __init__(self, path):
self.old_dir = os.getcwd()
self.new_dir = path
def __enter__(self):
os.chdir(self.new_dir)
def __exit__(self, *args):
os.chdir(self.old_dir)
def get_translator(translator_id):
try:
name, version = translator_id.split('@')
except ValueError:
raise LookupError(
"Bad translator id `{}`, must be `name@multihash` format".format(translator_id)
)
ipfs = get_ipfs_datastore() # FIXME: memoize this
path = join(expanduser('~'), '.mediachain')
if not os.path.exists(path):
os.makedirs(path)
with ChDir(path):
translator = ipfs.client.get(version) # FIXME: timeout, error handling
sys.path.append(path)
# print('dynamic module load path: {}'.format(path))
full_path = version + '.translator'
# print('loading translator module from {}'.format(full_path))
translator_module = __import__(full_path, globals(), locals(), [name])
translator = getattr(translator_module, name.capitalize())
return translator
|
from mediachain.datastore.ipfs import get_ipfs_datastore
import sys
import os
from os.path import expanduser, join
class ChDir(object):
"""
Step into a directory temporarily
"""
def __init__(self, path):
self.old_dir = os.getcwd()
self.new_dir = path
def __enter__(self):
os.chdir(self.new_dir)
def __exit__(self, *args):
os.chdir(self.old_dir)
def get_translator(translator_id):
try:
name, version = translator_id.split('@')
except ValueError:
raise LookupError(
"Bad translator id `{}`, must be `name@multihash` format".format(translator_id)
)
path = join(expanduser('~'), '.mediachain')
if not os.path.exists(path):
os.makedirs(path)
try:
return load_translator(path, name, version)
except ImportError:
pass
ipfs = get_ipfs_datastore() # FIXME: memoize this
with ChDir(path):
translator = ipfs.client.get(version) # FIXME: timeout, error handling
return load_translator(path, name, version)
def load_translator(base_path, name, version):
if base_path not in sys.path:
sys.path.append(base_path)
module_path = version + '.translator'
translator_module = __import__(module_path, globals(), locals(), [name])
translator = getattr(translator_module, name.capitalize())
return translator
Revert "only fetch translator from ipfs if it doesn't exist locally from prev run"
- IPFS cache has these on the fs already
This reverts commit a77349df4b80e4b3cd037c518fc9c69f784072a0.from mediachain.datastore.ipfs import get_ipfs_datastore
import sys
import os
from os.path import expanduser, join
class ChDir(object):
"""
Step into a directory temporarily
"""
def __init__(self, path):
self.old_dir = os.getcwd()
self.new_dir = path
def __enter__(self):
os.chdir(self.new_dir)
def __exit__(self, *args):
os.chdir(self.old_dir)
def get_translator(translator_id):
try:
name, version = translator_id.split('@')
except ValueError:
raise LookupError(
"Bad translator id `{}`, must be `name@multihash` format".format(translator_id)
)
ipfs = get_ipfs_datastore() # FIXME: memoize this
path = join(expanduser('~'), '.mediachain')
if not os.path.exists(path):
os.makedirs(path)
with ChDir(path):
translator = ipfs.client.get(version) # FIXME: timeout, error handling
sys.path.append(path)
# print('dynamic module load path: {}'.format(path))
full_path = version + '.translator'
# print('loading translator module from {}'.format(full_path))
translator_module = __import__(full_path, globals(), locals(), [name])
translator = getattr(translator_module, name.capitalize())
return translator
|
<commit_before>from mediachain.datastore.ipfs import get_ipfs_datastore
import sys
import os
from os.path import expanduser, join
class ChDir(object):
"""
Step into a directory temporarily
"""
def __init__(self, path):
self.old_dir = os.getcwd()
self.new_dir = path
def __enter__(self):
os.chdir(self.new_dir)
def __exit__(self, *args):
os.chdir(self.old_dir)
def get_translator(translator_id):
try:
name, version = translator_id.split('@')
except ValueError:
raise LookupError(
"Bad translator id `{}`, must be `name@multihash` format".format(translator_id)
)
path = join(expanduser('~'), '.mediachain')
if not os.path.exists(path):
os.makedirs(path)
try:
return load_translator(path, name, version)
except ImportError:
pass
ipfs = get_ipfs_datastore() # FIXME: memoize this
with ChDir(path):
translator = ipfs.client.get(version) # FIXME: timeout, error handling
return load_translator(path, name, version)
def load_translator(base_path, name, version):
if base_path not in sys.path:
sys.path.append(base_path)
module_path = version + '.translator'
translator_module = __import__(module_path, globals(), locals(), [name])
translator = getattr(translator_module, name.capitalize())
return translator
<commit_msg>Revert "only fetch translator from ipfs if it doesn't exist locally from prev run"
- IPFS cache has these on the fs already
This reverts commit a77349df4b80e4b3cd037c518fc9c69f784072a0.<commit_after>from mediachain.datastore.ipfs import get_ipfs_datastore
import sys
import os
from os.path import expanduser, join
class ChDir(object):
"""
Step into a directory temporarily
"""
def __init__(self, path):
self.old_dir = os.getcwd()
self.new_dir = path
def __enter__(self):
os.chdir(self.new_dir)
def __exit__(self, *args):
os.chdir(self.old_dir)
def get_translator(translator_id):
try:
name, version = translator_id.split('@')
except ValueError:
raise LookupError(
"Bad translator id `{}`, must be `name@multihash` format".format(translator_id)
)
ipfs = get_ipfs_datastore() # FIXME: memoize this
path = join(expanduser('~'), '.mediachain')
if not os.path.exists(path):
os.makedirs(path)
with ChDir(path):
translator = ipfs.client.get(version) # FIXME: timeout, error handling
sys.path.append(path)
# print('dynamic module load path: {}'.format(path))
full_path = version + '.translator'
# print('loading translator module from {}'.format(full_path))
translator_module = __import__(full_path, globals(), locals(), [name])
translator = getattr(translator_module, name.capitalize())
return translator
|
d8d3f01bf9fdbae8b5eed05d44b5e811c1af3de4
|
billjobs/urls.py
|
billjobs/urls.py
|
from django.conf.urls import url, include
from rest_framework import routers
from . import views
router = routers.DefaultRouter()
router.register(r'users', views.UserViewSet)
urlpatterns = [
url(r'^generate_pdf/(?P<bill_id>\d+)$', views.generate_pdf,
name='generate-pdf'),
url(r'^', include(router.urls)),
url(r'^api-auth/', include('rest_framework.urls', namespace='rest_framework'))
]
|
from django.conf.urls import url, include
from . import views
urlpatterns = [
url(r'^generate_pdf/(?P<bill_id>\d+)$', views.generate_pdf,
name='generate-pdf'),
url(r'^users/$', views.UserAdmin.as_view(), name='users'),
url(r'^users/(?P<pk>[0-9]+)/$', views.UserAdminDetail.as_view(), name='user-detail'),
url(r'^api-auth/', include('rest_framework.urls', namespace='rest_framework'))
]
|
Remove rest_framework routers, add urlpattern for users api
|
Remove rest_framework routers, add urlpattern for users api
|
Python
|
mit
|
ioO/billjobs
|
from django.conf.urls import url, include
from rest_framework import routers
from . import views
router = routers.DefaultRouter()
router.register(r'users', views.UserViewSet)
urlpatterns = [
url(r'^generate_pdf/(?P<bill_id>\d+)$', views.generate_pdf,
name='generate-pdf'),
url(r'^', include(router.urls)),
url(r'^api-auth/', include('rest_framework.urls', namespace='rest_framework'))
]
Remove rest_framework routers, add urlpattern for users api
|
from django.conf.urls import url, include
from . import views
urlpatterns = [
url(r'^generate_pdf/(?P<bill_id>\d+)$', views.generate_pdf,
name='generate-pdf'),
url(r'^users/$', views.UserAdmin.as_view(), name='users'),
url(r'^users/(?P<pk>[0-9]+)/$', views.UserAdminDetail.as_view(), name='user-detail'),
url(r'^api-auth/', include('rest_framework.urls', namespace='rest_framework'))
]
|
<commit_before>from django.conf.urls import url, include
from rest_framework import routers
from . import views
router = routers.DefaultRouter()
router.register(r'users', views.UserViewSet)
urlpatterns = [
url(r'^generate_pdf/(?P<bill_id>\d+)$', views.generate_pdf,
name='generate-pdf'),
url(r'^', include(router.urls)),
url(r'^api-auth/', include('rest_framework.urls', namespace='rest_framework'))
]
<commit_msg>Remove rest_framework routers, add urlpattern for users api<commit_after>
|
from django.conf.urls import url, include
from . import views
urlpatterns = [
url(r'^generate_pdf/(?P<bill_id>\d+)$', views.generate_pdf,
name='generate-pdf'),
url(r'^users/$', views.UserAdmin.as_view(), name='users'),
url(r'^users/(?P<pk>[0-9]+)/$', views.UserAdminDetail.as_view(), name='user-detail'),
url(r'^api-auth/', include('rest_framework.urls', namespace='rest_framework'))
]
|
from django.conf.urls import url, include
from rest_framework import routers
from . import views
router = routers.DefaultRouter()
router.register(r'users', views.UserViewSet)
urlpatterns = [
url(r'^generate_pdf/(?P<bill_id>\d+)$', views.generate_pdf,
name='generate-pdf'),
url(r'^', include(router.urls)),
url(r'^api-auth/', include('rest_framework.urls', namespace='rest_framework'))
]
Remove rest_framework routers, add urlpattern for users apifrom django.conf.urls import url, include
from . import views
urlpatterns = [
url(r'^generate_pdf/(?P<bill_id>\d+)$', views.generate_pdf,
name='generate-pdf'),
url(r'^users/$', views.UserAdmin.as_view(), name='users'),
url(r'^users/(?P<pk>[0-9]+)/$', views.UserAdminDetail.as_view(), name='user-detail'),
url(r'^api-auth/', include('rest_framework.urls', namespace='rest_framework'))
]
|
<commit_before>from django.conf.urls import url, include
from rest_framework import routers
from . import views
router = routers.DefaultRouter()
router.register(r'users', views.UserViewSet)
urlpatterns = [
url(r'^generate_pdf/(?P<bill_id>\d+)$', views.generate_pdf,
name='generate-pdf'),
url(r'^', include(router.urls)),
url(r'^api-auth/', include('rest_framework.urls', namespace='rest_framework'))
]
<commit_msg>Remove rest_framework routers, add urlpattern for users api<commit_after>from django.conf.urls import url, include
from . import views
urlpatterns = [
url(r'^generate_pdf/(?P<bill_id>\d+)$', views.generate_pdf,
name='generate-pdf'),
url(r'^users/$', views.UserAdmin.as_view(), name='users'),
url(r'^users/(?P<pk>[0-9]+)/$', views.UserAdminDetail.as_view(), name='user-detail'),
url(r'^api-auth/', include('rest_framework.urls', namespace='rest_framework'))
]
|
88fa9a0841f7f7774b57b9d8731fb7334d799259
|
formidable/json_migrations/__init__.py
|
formidable/json_migrations/__init__.py
|
import os
import sys
from glob import glob
from importlib import import_module
__all__ = ['migrate', 'get_migrations']
HERE = os.path.dirname(__file__)
package = sys.modules[__name__].__name__
def get_migrations():
for module in sorted(glob(os.path.join(HERE, '[0-9]*.py'))):
module_name, _ = os.path.basename(module).rsplit('.', 1)
mod = import_module('.' + module_name, package=package)
version, label = module_name.split('_', 1)
yield int(version), label, mod.migrate
def migrate(data, version_src=0):
for version, label, func in list(get_migrations()):
if version_src < version:
data = func(data)
version_src = version
return data
|
import os
import sys
from glob import glob
from importlib import import_module
__all__ = ['migrate', 'get_migrations']
HERE = os.path.dirname(__file__)
package = sys.modules[__name__].__name__
def get_migrations():
"""
Return a generator with all JSON migrations sorted.
Each item is a tuple with:
- the version number (int)
- the label of the migration
- the reference to the migrate() function
"""
for module in sorted(glob(os.path.join(HERE, '[0-9]*.py'))):
module_name, _ = os.path.basename(module).rsplit('.', 1)
mod = import_module('.' + module_name, package=package)
version, label = module_name.split('_', 1)
yield int(version), label, mod.migrate
def migrate(data, version_src=0):
"""
Apply all migrations from ``version_src`` to the latest found on
``data``.
"""
for version, label, func in list(get_migrations()):
if version_src < version:
data = func(data)
version_src = version
return data
|
Add docstrings to JSON migrations functions
|
Add docstrings to JSON migrations functions
|
Python
|
mit
|
novafloss/django-formidable
|
import os
import sys
from glob import glob
from importlib import import_module
__all__ = ['migrate', 'get_migrations']
HERE = os.path.dirname(__file__)
package = sys.modules[__name__].__name__
def get_migrations():
for module in sorted(glob(os.path.join(HERE, '[0-9]*.py'))):
module_name, _ = os.path.basename(module).rsplit('.', 1)
mod = import_module('.' + module_name, package=package)
version, label = module_name.split('_', 1)
yield int(version), label, mod.migrate
def migrate(data, version_src=0):
for version, label, func in list(get_migrations()):
if version_src < version:
data = func(data)
version_src = version
return data
Add docstrings to JSON migrations functions
|
import os
import sys
from glob import glob
from importlib import import_module
__all__ = ['migrate', 'get_migrations']
HERE = os.path.dirname(__file__)
package = sys.modules[__name__].__name__
def get_migrations():
"""
Return a generator with all JSON migrations sorted.
Each item is a tuple with:
- the version number (int)
- the label of the migration
- the reference to the migrate() function
"""
for module in sorted(glob(os.path.join(HERE, '[0-9]*.py'))):
module_name, _ = os.path.basename(module).rsplit('.', 1)
mod = import_module('.' + module_name, package=package)
version, label = module_name.split('_', 1)
yield int(version), label, mod.migrate
def migrate(data, version_src=0):
"""
Apply all migrations from ``version_src`` to the latest found on
``data``.
"""
for version, label, func in list(get_migrations()):
if version_src < version:
data = func(data)
version_src = version
return data
|
<commit_before>import os
import sys
from glob import glob
from importlib import import_module
__all__ = ['migrate', 'get_migrations']
HERE = os.path.dirname(__file__)
package = sys.modules[__name__].__name__
def get_migrations():
for module in sorted(glob(os.path.join(HERE, '[0-9]*.py'))):
module_name, _ = os.path.basename(module).rsplit('.', 1)
mod = import_module('.' + module_name, package=package)
version, label = module_name.split('_', 1)
yield int(version), label, mod.migrate
def migrate(data, version_src=0):
for version, label, func in list(get_migrations()):
if version_src < version:
data = func(data)
version_src = version
return data
<commit_msg>Add docstrings to JSON migrations functions<commit_after>
|
import os
import sys
from glob import glob
from importlib import import_module
__all__ = ['migrate', 'get_migrations']
HERE = os.path.dirname(__file__)
package = sys.modules[__name__].__name__
def get_migrations():
"""
Return a generator with all JSON migrations sorted.
Each item is a tuple with:
- the version number (int)
- the label of the migration
- the reference to the migrate() function
"""
for module in sorted(glob(os.path.join(HERE, '[0-9]*.py'))):
module_name, _ = os.path.basename(module).rsplit('.', 1)
mod = import_module('.' + module_name, package=package)
version, label = module_name.split('_', 1)
yield int(version), label, mod.migrate
def migrate(data, version_src=0):
"""
Apply all migrations from ``version_src`` to the latest found on
``data``.
"""
for version, label, func in list(get_migrations()):
if version_src < version:
data = func(data)
version_src = version
return data
|
import os
import sys
from glob import glob
from importlib import import_module
__all__ = ['migrate', 'get_migrations']
HERE = os.path.dirname(__file__)
package = sys.modules[__name__].__name__
def get_migrations():
for module in sorted(glob(os.path.join(HERE, '[0-9]*.py'))):
module_name, _ = os.path.basename(module).rsplit('.', 1)
mod = import_module('.' + module_name, package=package)
version, label = module_name.split('_', 1)
yield int(version), label, mod.migrate
def migrate(data, version_src=0):
for version, label, func in list(get_migrations()):
if version_src < version:
data = func(data)
version_src = version
return data
Add docstrings to JSON migrations functionsimport os
import sys
from glob import glob
from importlib import import_module
__all__ = ['migrate', 'get_migrations']
HERE = os.path.dirname(__file__)
package = sys.modules[__name__].__name__
def get_migrations():
"""
Return a generator with all JSON migrations sorted.
Each item is a tuple with:
- the version number (int)
- the label of the migration
- the reference to the migrate() function
"""
for module in sorted(glob(os.path.join(HERE, '[0-9]*.py'))):
module_name, _ = os.path.basename(module).rsplit('.', 1)
mod = import_module('.' + module_name, package=package)
version, label = module_name.split('_', 1)
yield int(version), label, mod.migrate
def migrate(data, version_src=0):
"""
Apply all migrations from ``version_src`` to the latest found on
``data``.
"""
for version, label, func in list(get_migrations()):
if version_src < version:
data = func(data)
version_src = version
return data
|
<commit_before>import os
import sys
from glob import glob
from importlib import import_module
__all__ = ['migrate', 'get_migrations']
HERE = os.path.dirname(__file__)
package = sys.modules[__name__].__name__
def get_migrations():
for module in sorted(glob(os.path.join(HERE, '[0-9]*.py'))):
module_name, _ = os.path.basename(module).rsplit('.', 1)
mod = import_module('.' + module_name, package=package)
version, label = module_name.split('_', 1)
yield int(version), label, mod.migrate
def migrate(data, version_src=0):
for version, label, func in list(get_migrations()):
if version_src < version:
data = func(data)
version_src = version
return data
<commit_msg>Add docstrings to JSON migrations functions<commit_after>import os
import sys
from glob import glob
from importlib import import_module
__all__ = ['migrate', 'get_migrations']
HERE = os.path.dirname(__file__)
package = sys.modules[__name__].__name__
def get_migrations():
"""
Return a generator with all JSON migrations sorted.
Each item is a tuple with:
- the version number (int)
- the label of the migration
- the reference to the migrate() function
"""
for module in sorted(glob(os.path.join(HERE, '[0-9]*.py'))):
module_name, _ = os.path.basename(module).rsplit('.', 1)
mod = import_module('.' + module_name, package=package)
version, label = module_name.split('_', 1)
yield int(version), label, mod.migrate
def migrate(data, version_src=0):
"""
Apply all migrations from ``version_src`` to the latest found on
``data``.
"""
for version, label, func in list(get_migrations()):
if version_src < version:
data = func(data)
version_src = version
return data
|
04b64f521763ed9cc55103455a93ed51656868a0
|
core/loaders/news.py
|
core/loaders/news.py
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
from __future__ import division, print_function
import json
import os
import sys
import re
import hashlib
from datetime import datetime, timedelta
import common
def load( terms = ['.json'], data_dir = '', folder = 'media/' ):
import dateparser
path = data_dir + folder
for dirpath, subdirs, files in os.walk(path):
for f in files:
f = os.path.join( dirpath, f )
if any( term in f for term in terms ):
for d in json.load( open( f ) ):
common_data_keys = {'_author' : 'creator',
'_title' : 'text_content',
'_ingress' : 'text_content',
'_text' : 'text_content',
'_url' : 'url',
'_domain' : 'source_detail'}
d = common.__init_harmonize_data( d, 'news_media', common_data_keys )
## ensure data is always in a list
if isinstance( d['_datetime_list'] , str) or isinstance( d['_datetime_list'] , unicode):
d['_datetime_list'] = [ d['_datetime_list'] ]
try:
d['timestamp'] = dateparser.parse( min( d['_datetime_list'] ), ) ## should take care of the various formats
except Exception, e:
d['broken']['_datetime_list'] = e
d['images'] = d['_images']
d = common.__post_harmonize_data( d )
yield d
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
from __future__ import division, print_function
import json
import os
import sys
import re
import hashlib
from datetime import datetime, timedelta
import common
def load( terms = ['.json'], data_dir = '', folder = 'news/' ):
import dateparser
path = data_dir + folder
for dirpath, subdirs, files in os.walk(path):
for f in files:
f = os.path.join( dirpath, f )
if any( term in f for term in terms ):
for d in json.load( open( f ) ):
common_data_keys = {'_author' : 'creator',
'_title' : 'text_content',
'_ingress' : 'text_content',
'_text' : 'text_content',
'_url' : 'url',
'_domain' : 'source_detail'}
d = common.__init_harmonize_data( d, 'news_media', common_data_keys )
## ensure data is always in a list
if isinstance( d['_datetime_list'] , str) or isinstance( d['_datetime_list'] , unicode):
d['_datetime_list'] = [ d['_datetime_list'] ]
try:
d['timestamp'] = dateparser.parse( min( d['_datetime_list'] ), ) ## should take care of the various formats
except Exception, e:
d['broken']['_datetime_list'] = e
d['images'] = d['_images']
d = common.__post_harmonize_data( d )
yield d
|
Change default value for folder parameter.
|
Change default value for folder parameter.
|
Python
|
mit
|
HIIT/hybra-core,HIIT/hybra-core,HIIT/hybra-core,HIIT/hybra-core,HIIT/hybra-core
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
from __future__ import division, print_function
import json
import os
import sys
import re
import hashlib
from datetime import datetime, timedelta
import common
def load( terms = ['.json'], data_dir = '', folder = 'media/' ):
import dateparser
path = data_dir + folder
for dirpath, subdirs, files in os.walk(path):
for f in files:
f = os.path.join( dirpath, f )
if any( term in f for term in terms ):
for d in json.load( open( f ) ):
common_data_keys = {'_author' : 'creator',
'_title' : 'text_content',
'_ingress' : 'text_content',
'_text' : 'text_content',
'_url' : 'url',
'_domain' : 'source_detail'}
d = common.__init_harmonize_data( d, 'news_media', common_data_keys )
## ensure data is always in a list
if isinstance( d['_datetime_list'] , str) or isinstance( d['_datetime_list'] , unicode):
d['_datetime_list'] = [ d['_datetime_list'] ]
try:
d['timestamp'] = dateparser.parse( min( d['_datetime_list'] ), ) ## should take care of the various formats
except Exception, e:
d['broken']['_datetime_list'] = e
d['images'] = d['_images']
d = common.__post_harmonize_data( d )
yield d
Change default value for folder parameter.
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
from __future__ import division, print_function
import json
import os
import sys
import re
import hashlib
from datetime import datetime, timedelta
import common
def load( terms = ['.json'], data_dir = '', folder = 'news/' ):
import dateparser
path = data_dir + folder
for dirpath, subdirs, files in os.walk(path):
for f in files:
f = os.path.join( dirpath, f )
if any( term in f for term in terms ):
for d in json.load( open( f ) ):
common_data_keys = {'_author' : 'creator',
'_title' : 'text_content',
'_ingress' : 'text_content',
'_text' : 'text_content',
'_url' : 'url',
'_domain' : 'source_detail'}
d = common.__init_harmonize_data( d, 'news_media', common_data_keys )
## ensure data is always in a list
if isinstance( d['_datetime_list'] , str) or isinstance( d['_datetime_list'] , unicode):
d['_datetime_list'] = [ d['_datetime_list'] ]
try:
d['timestamp'] = dateparser.parse( min( d['_datetime_list'] ), ) ## should take care of the various formats
except Exception, e:
d['broken']['_datetime_list'] = e
d['images'] = d['_images']
d = common.__post_harmonize_data( d )
yield d
|
<commit_before>#!/usr/bin/python
# -*- coding: utf-8 -*-
from __future__ import division, print_function
import json
import os
import sys
import re
import hashlib
from datetime import datetime, timedelta
import common
def load( terms = ['.json'], data_dir = '', folder = 'media/' ):
import dateparser
path = data_dir + folder
for dirpath, subdirs, files in os.walk(path):
for f in files:
f = os.path.join( dirpath, f )
if any( term in f for term in terms ):
for d in json.load( open( f ) ):
common_data_keys = {'_author' : 'creator',
'_title' : 'text_content',
'_ingress' : 'text_content',
'_text' : 'text_content',
'_url' : 'url',
'_domain' : 'source_detail'}
d = common.__init_harmonize_data( d, 'news_media', common_data_keys )
## ensure data is always in a list
if isinstance( d['_datetime_list'] , str) or isinstance( d['_datetime_list'] , unicode):
d['_datetime_list'] = [ d['_datetime_list'] ]
try:
d['timestamp'] = dateparser.parse( min( d['_datetime_list'] ), ) ## should take care of the various formats
except Exception, e:
d['broken']['_datetime_list'] = e
d['images'] = d['_images']
d = common.__post_harmonize_data( d )
yield d
<commit_msg>Change default value for folder parameter.<commit_after>
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
from __future__ import division, print_function
import json
import os
import sys
import re
import hashlib
from datetime import datetime, timedelta
import common
def load( terms = ['.json'], data_dir = '', folder = 'news/' ):
import dateparser
path = data_dir + folder
for dirpath, subdirs, files in os.walk(path):
for f in files:
f = os.path.join( dirpath, f )
if any( term in f for term in terms ):
for d in json.load( open( f ) ):
common_data_keys = {'_author' : 'creator',
'_title' : 'text_content',
'_ingress' : 'text_content',
'_text' : 'text_content',
'_url' : 'url',
'_domain' : 'source_detail'}
d = common.__init_harmonize_data( d, 'news_media', common_data_keys )
## ensure data is always in a list
if isinstance( d['_datetime_list'] , str) or isinstance( d['_datetime_list'] , unicode):
d['_datetime_list'] = [ d['_datetime_list'] ]
try:
d['timestamp'] = dateparser.parse( min( d['_datetime_list'] ), ) ## should take care of the various formats
except Exception, e:
d['broken']['_datetime_list'] = e
d['images'] = d['_images']
d = common.__post_harmonize_data( d )
yield d
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
from __future__ import division, print_function
import json
import os
import sys
import re
import hashlib
from datetime import datetime, timedelta
import common
def load( terms = ['.json'], data_dir = '', folder = 'media/' ):
import dateparser
path = data_dir + folder
for dirpath, subdirs, files in os.walk(path):
for f in files:
f = os.path.join( dirpath, f )
if any( term in f for term in terms ):
for d in json.load( open( f ) ):
common_data_keys = {'_author' : 'creator',
'_title' : 'text_content',
'_ingress' : 'text_content',
'_text' : 'text_content',
'_url' : 'url',
'_domain' : 'source_detail'}
d = common.__init_harmonize_data( d, 'news_media', common_data_keys )
## ensure data is always in a list
if isinstance( d['_datetime_list'] , str) or isinstance( d['_datetime_list'] , unicode):
d['_datetime_list'] = [ d['_datetime_list'] ]
try:
d['timestamp'] = dateparser.parse( min( d['_datetime_list'] ), ) ## should take care of the various formats
except Exception, e:
d['broken']['_datetime_list'] = e
d['images'] = d['_images']
d = common.__post_harmonize_data( d )
yield d
Change default value for folder parameter.#!/usr/bin/python
# -*- coding: utf-8 -*-
from __future__ import division, print_function
import json
import os
import sys
import re
import hashlib
from datetime import datetime, timedelta
import common
def load( terms = ['.json'], data_dir = '', folder = 'news/' ):
import dateparser
path = data_dir + folder
for dirpath, subdirs, files in os.walk(path):
for f in files:
f = os.path.join( dirpath, f )
if any( term in f for term in terms ):
for d in json.load( open( f ) ):
common_data_keys = {'_author' : 'creator',
'_title' : 'text_content',
'_ingress' : 'text_content',
'_text' : 'text_content',
'_url' : 'url',
'_domain' : 'source_detail'}
d = common.__init_harmonize_data( d, 'news_media', common_data_keys )
## ensure data is always in a list
if isinstance( d['_datetime_list'] , str) or isinstance( d['_datetime_list'] , unicode):
d['_datetime_list'] = [ d['_datetime_list'] ]
try:
d['timestamp'] = dateparser.parse( min( d['_datetime_list'] ), ) ## should take care of the various formats
except Exception, e:
d['broken']['_datetime_list'] = e
d['images'] = d['_images']
d = common.__post_harmonize_data( d )
yield d
|
<commit_before>#!/usr/bin/python
# -*- coding: utf-8 -*-
from __future__ import division, print_function
import json
import os
import sys
import re
import hashlib
from datetime import datetime, timedelta
import common
def load( terms = ['.json'], data_dir = '', folder = 'media/' ):
import dateparser
path = data_dir + folder
for dirpath, subdirs, files in os.walk(path):
for f in files:
f = os.path.join( dirpath, f )
if any( term in f for term in terms ):
for d in json.load( open( f ) ):
common_data_keys = {'_author' : 'creator',
'_title' : 'text_content',
'_ingress' : 'text_content',
'_text' : 'text_content',
'_url' : 'url',
'_domain' : 'source_detail'}
d = common.__init_harmonize_data( d, 'news_media', common_data_keys )
## ensure data is always in a list
if isinstance( d['_datetime_list'] , str) or isinstance( d['_datetime_list'] , unicode):
d['_datetime_list'] = [ d['_datetime_list'] ]
try:
d['timestamp'] = dateparser.parse( min( d['_datetime_list'] ), ) ## should take care of the various formats
except Exception, e:
d['broken']['_datetime_list'] = e
d['images'] = d['_images']
d = common.__post_harmonize_data( d )
yield d
<commit_msg>Change default value for folder parameter.<commit_after>#!/usr/bin/python
# -*- coding: utf-8 -*-
from __future__ import division, print_function
import json
import os
import sys
import re
import hashlib
from datetime import datetime, timedelta
import common
def load( terms = ['.json'], data_dir = '', folder = 'news/' ):
import dateparser
path = data_dir + folder
for dirpath, subdirs, files in os.walk(path):
for f in files:
f = os.path.join( dirpath, f )
if any( term in f for term in terms ):
for d in json.load( open( f ) ):
common_data_keys = {'_author' : 'creator',
'_title' : 'text_content',
'_ingress' : 'text_content',
'_text' : 'text_content',
'_url' : 'url',
'_domain' : 'source_detail'}
d = common.__init_harmonize_data( d, 'news_media', common_data_keys )
## ensure data is always in a list
if isinstance( d['_datetime_list'] , str) or isinstance( d['_datetime_list'] , unicode):
d['_datetime_list'] = [ d['_datetime_list'] ]
try:
d['timestamp'] = dateparser.parse( min( d['_datetime_list'] ), ) ## should take care of the various formats
except Exception, e:
d['broken']['_datetime_list'] = e
d['images'] = d['_images']
d = common.__post_harmonize_data( d )
yield d
|
43ad2ecd572e85b5c9f92025b0826e88770fc33c
|
moscowdjango/settings_staging.py
|
moscowdjango/settings_staging.py
|
# Django settings for moscowdjango project.
from .settings import *
DEBUG = True
EMBEDLY_KEY = os.environ.get('EMBEDLY_KEY')
SECRET_KEY = os.environ.get('SECRET_KEY')
# Amazon credentials
AWS_ACCESS_KEY_ID = os.environ.get('AWS_ACCESS_KEY_ID')
AWS_SECRET_ACCESS_KEY = os.environ.get('AWS_SECRET_ACCESS_KEY')
AWS_STORAGE_BUCKET_NAME = 'moscowdjango-staging'
AWS_QUERYSTRING_AUTH = False
AWS_CALLING_FORMAT = 2 # SUBDOMAIN
AWS_S3_SECURE_URLS = True
# Media & static
DEFAULT_FILE_STORAGE = 'moscowdjango.amazon.DefaultStorage'
DEFAULT_S3_PATH = "media"
MEDIA_ROOT = '/%s/' % DEFAULT_S3_PATH
MEDIA_URL = 'https://%s.s3.amazonaws.com/media/' % AWS_STORAGE_BUCKET_NAME
STATICFILES_STORAGE = 'whitenoise.django.GzipManifestStaticFilesStorage'
# Django compressor
COMPRESS_ENABLED = False
|
# Django settings for moscowdjango project.
from .settings import *
DEBUG = os.environ.get('DEBUG', False)
EMBEDLY_KEY = os.environ.get('EMBEDLY_KEY')
SECRET_KEY = os.environ.get('SECRET_KEY')
# Amazon credentials
AWS_ACCESS_KEY_ID = os.environ.get('AWS_ACCESS_KEY_ID')
AWS_SECRET_ACCESS_KEY = os.environ.get('AWS_SECRET_ACCESS_KEY')
AWS_STORAGE_BUCKET_NAME = 'moscowdjango-staging'
AWS_QUERYSTRING_AUTH = False
AWS_CALLING_FORMAT = 2 # SUBDOMAIN
AWS_S3_SECURE_URLS = True
# Media & static
DEFAULT_FILE_STORAGE = 'moscowdjango.amazon.DefaultStorage'
STATICFILES_STORAGE = 'whitenoise.django.GzipManifestStaticFilesStorage'
DEFAULT_S3_PATH = "media"
MEDIA_ROOT = '/%s/' % DEFAULT_S3_PATH
MEDIA_URL = 'https://%s.s3.amazonaws.com/media/' % AWS_STORAGE_BUCKET_NAME
# Django compressor
COMPRESS_ENABLED = False
|
Make DEBUG configurable on staging
|
Make DEBUG configurable on staging
|
Python
|
bsd-3-clause
|
moscowpython/moscowpython,VladimirFilonov/moscowdjango,moscowdjango/moscowdjango,VladimirFilonov/moscowdjango,moscowpython/moscowpython,moscowdjango/moscowdjango,moscowdjango/moscowdjango,VladimirFilonov/moscowdjango,moscowpython/moscowpython
|
# Django settings for moscowdjango project.
from .settings import *
DEBUG = True
EMBEDLY_KEY = os.environ.get('EMBEDLY_KEY')
SECRET_KEY = os.environ.get('SECRET_KEY')
# Amazon credentials
AWS_ACCESS_KEY_ID = os.environ.get('AWS_ACCESS_KEY_ID')
AWS_SECRET_ACCESS_KEY = os.environ.get('AWS_SECRET_ACCESS_KEY')
AWS_STORAGE_BUCKET_NAME = 'moscowdjango-staging'
AWS_QUERYSTRING_AUTH = False
AWS_CALLING_FORMAT = 2 # SUBDOMAIN
AWS_S3_SECURE_URLS = True
# Media & static
DEFAULT_FILE_STORAGE = 'moscowdjango.amazon.DefaultStorage'
DEFAULT_S3_PATH = "media"
MEDIA_ROOT = '/%s/' % DEFAULT_S3_PATH
MEDIA_URL = 'https://%s.s3.amazonaws.com/media/' % AWS_STORAGE_BUCKET_NAME
STATICFILES_STORAGE = 'whitenoise.django.GzipManifestStaticFilesStorage'
# Django compressor
COMPRESS_ENABLED = False
Make DEBUG configurable on staging
|
# Django settings for moscowdjango project.
from .settings import *
DEBUG = os.environ.get('DEBUG', False)
EMBEDLY_KEY = os.environ.get('EMBEDLY_KEY')
SECRET_KEY = os.environ.get('SECRET_KEY')
# Amazon credentials
AWS_ACCESS_KEY_ID = os.environ.get('AWS_ACCESS_KEY_ID')
AWS_SECRET_ACCESS_KEY = os.environ.get('AWS_SECRET_ACCESS_KEY')
AWS_STORAGE_BUCKET_NAME = 'moscowdjango-staging'
AWS_QUERYSTRING_AUTH = False
AWS_CALLING_FORMAT = 2 # SUBDOMAIN
AWS_S3_SECURE_URLS = True
# Media & static
DEFAULT_FILE_STORAGE = 'moscowdjango.amazon.DefaultStorage'
STATICFILES_STORAGE = 'whitenoise.django.GzipManifestStaticFilesStorage'
DEFAULT_S3_PATH = "media"
MEDIA_ROOT = '/%s/' % DEFAULT_S3_PATH
MEDIA_URL = 'https://%s.s3.amazonaws.com/media/' % AWS_STORAGE_BUCKET_NAME
# Django compressor
COMPRESS_ENABLED = False
|
<commit_before># Django settings for moscowdjango project.
from .settings import *
DEBUG = True
EMBEDLY_KEY = os.environ.get('EMBEDLY_KEY')
SECRET_KEY = os.environ.get('SECRET_KEY')
# Amazon credentials
AWS_ACCESS_KEY_ID = os.environ.get('AWS_ACCESS_KEY_ID')
AWS_SECRET_ACCESS_KEY = os.environ.get('AWS_SECRET_ACCESS_KEY')
AWS_STORAGE_BUCKET_NAME = 'moscowdjango-staging'
AWS_QUERYSTRING_AUTH = False
AWS_CALLING_FORMAT = 2 # SUBDOMAIN
AWS_S3_SECURE_URLS = True
# Media & static
DEFAULT_FILE_STORAGE = 'moscowdjango.amazon.DefaultStorage'
DEFAULT_S3_PATH = "media"
MEDIA_ROOT = '/%s/' % DEFAULT_S3_PATH
MEDIA_URL = 'https://%s.s3.amazonaws.com/media/' % AWS_STORAGE_BUCKET_NAME
STATICFILES_STORAGE = 'whitenoise.django.GzipManifestStaticFilesStorage'
# Django compressor
COMPRESS_ENABLED = False
<commit_msg>Make DEBUG configurable on staging<commit_after>
|
# Django settings for moscowdjango project.
from .settings import *
DEBUG = os.environ.get('DEBUG', False)
EMBEDLY_KEY = os.environ.get('EMBEDLY_KEY')
SECRET_KEY = os.environ.get('SECRET_KEY')
# Amazon credentials
AWS_ACCESS_KEY_ID = os.environ.get('AWS_ACCESS_KEY_ID')
AWS_SECRET_ACCESS_KEY = os.environ.get('AWS_SECRET_ACCESS_KEY')
AWS_STORAGE_BUCKET_NAME = 'moscowdjango-staging'
AWS_QUERYSTRING_AUTH = False
AWS_CALLING_FORMAT = 2 # SUBDOMAIN
AWS_S3_SECURE_URLS = True
# Media & static
DEFAULT_FILE_STORAGE = 'moscowdjango.amazon.DefaultStorage'
STATICFILES_STORAGE = 'whitenoise.django.GzipManifestStaticFilesStorage'
DEFAULT_S3_PATH = "media"
MEDIA_ROOT = '/%s/' % DEFAULT_S3_PATH
MEDIA_URL = 'https://%s.s3.amazonaws.com/media/' % AWS_STORAGE_BUCKET_NAME
# Django compressor
COMPRESS_ENABLED = False
|
# Django settings for moscowdjango project.
from .settings import *
DEBUG = True
EMBEDLY_KEY = os.environ.get('EMBEDLY_KEY')
SECRET_KEY = os.environ.get('SECRET_KEY')
# Amazon credentials
AWS_ACCESS_KEY_ID = os.environ.get('AWS_ACCESS_KEY_ID')
AWS_SECRET_ACCESS_KEY = os.environ.get('AWS_SECRET_ACCESS_KEY')
AWS_STORAGE_BUCKET_NAME = 'moscowdjango-staging'
AWS_QUERYSTRING_AUTH = False
AWS_CALLING_FORMAT = 2 # SUBDOMAIN
AWS_S3_SECURE_URLS = True
# Media & static
DEFAULT_FILE_STORAGE = 'moscowdjango.amazon.DefaultStorage'
DEFAULT_S3_PATH = "media"
MEDIA_ROOT = '/%s/' % DEFAULT_S3_PATH
MEDIA_URL = 'https://%s.s3.amazonaws.com/media/' % AWS_STORAGE_BUCKET_NAME
STATICFILES_STORAGE = 'whitenoise.django.GzipManifestStaticFilesStorage'
# Django compressor
COMPRESS_ENABLED = False
Make DEBUG configurable on staging# Django settings for moscowdjango project.
from .settings import *
DEBUG = os.environ.get('DEBUG', False)
EMBEDLY_KEY = os.environ.get('EMBEDLY_KEY')
SECRET_KEY = os.environ.get('SECRET_KEY')
# Amazon credentials
AWS_ACCESS_KEY_ID = os.environ.get('AWS_ACCESS_KEY_ID')
AWS_SECRET_ACCESS_KEY = os.environ.get('AWS_SECRET_ACCESS_KEY')
AWS_STORAGE_BUCKET_NAME = 'moscowdjango-staging'
AWS_QUERYSTRING_AUTH = False
AWS_CALLING_FORMAT = 2 # SUBDOMAIN
AWS_S3_SECURE_URLS = True
# Media & static
DEFAULT_FILE_STORAGE = 'moscowdjango.amazon.DefaultStorage'
STATICFILES_STORAGE = 'whitenoise.django.GzipManifestStaticFilesStorage'
DEFAULT_S3_PATH = "media"
MEDIA_ROOT = '/%s/' % DEFAULT_S3_PATH
MEDIA_URL = 'https://%s.s3.amazonaws.com/media/' % AWS_STORAGE_BUCKET_NAME
# Django compressor
COMPRESS_ENABLED = False
|
<commit_before># Django settings for moscowdjango project.
from .settings import *
DEBUG = True
EMBEDLY_KEY = os.environ.get('EMBEDLY_KEY')
SECRET_KEY = os.environ.get('SECRET_KEY')
# Amazon credentials
AWS_ACCESS_KEY_ID = os.environ.get('AWS_ACCESS_KEY_ID')
AWS_SECRET_ACCESS_KEY = os.environ.get('AWS_SECRET_ACCESS_KEY')
AWS_STORAGE_BUCKET_NAME = 'moscowdjango-staging'
AWS_QUERYSTRING_AUTH = False
AWS_CALLING_FORMAT = 2 # SUBDOMAIN
AWS_S3_SECURE_URLS = True
# Media & static
DEFAULT_FILE_STORAGE = 'moscowdjango.amazon.DefaultStorage'
DEFAULT_S3_PATH = "media"
MEDIA_ROOT = '/%s/' % DEFAULT_S3_PATH
MEDIA_URL = 'https://%s.s3.amazonaws.com/media/' % AWS_STORAGE_BUCKET_NAME
STATICFILES_STORAGE = 'whitenoise.django.GzipManifestStaticFilesStorage'
# Django compressor
COMPRESS_ENABLED = False
<commit_msg>Make DEBUG configurable on staging<commit_after># Django settings for moscowdjango project.
from .settings import *
DEBUG = os.environ.get('DEBUG', False)
EMBEDLY_KEY = os.environ.get('EMBEDLY_KEY')
SECRET_KEY = os.environ.get('SECRET_KEY')
# Amazon credentials
AWS_ACCESS_KEY_ID = os.environ.get('AWS_ACCESS_KEY_ID')
AWS_SECRET_ACCESS_KEY = os.environ.get('AWS_SECRET_ACCESS_KEY')
AWS_STORAGE_BUCKET_NAME = 'moscowdjango-staging'
AWS_QUERYSTRING_AUTH = False
AWS_CALLING_FORMAT = 2 # SUBDOMAIN
AWS_S3_SECURE_URLS = True
# Media & static
DEFAULT_FILE_STORAGE = 'moscowdjango.amazon.DefaultStorage'
STATICFILES_STORAGE = 'whitenoise.django.GzipManifestStaticFilesStorage'
DEFAULT_S3_PATH = "media"
MEDIA_ROOT = '/%s/' % DEFAULT_S3_PATH
MEDIA_URL = 'https://%s.s3.amazonaws.com/media/' % AWS_STORAGE_BUCKET_NAME
# Django compressor
COMPRESS_ENABLED = False
|
35b5215cd16493fea00c7ebb2106c633ce4c6a9b
|
qutebrowser/config.py
|
qutebrowser/config.py
|
config.load_autoconfig()
c.tabs.background = True
c.new_instance_open_target = 'window'
c.downloads.position = 'bottom'
c.spellcheck.languages = ['en-US']
config.bind(',ce', 'config-edit')
config.bind(',p', 'config-cycle -p content.plugins ;; reload')
config.bind(',rta', 'open {url}top/?sort=top&t=all')
config.bind(',rtv', 'spawn termite -e "rtv {url}"')
config.bind(',c', 'spawn -d chromium {url}')
css = '~/code/solarized-everything-css/css/solarized-all-sites-dark.css'
config.bind(',n', f'config-cycle content.user_stylesheets {css} ""')
c.url.searchengines['rfc'] = 'https://tools.ietf.org/html/rfc{}'
#c.url.searchengines['maps'] = 'https://www.google.com/maps?q=%s'
c.fonts.tabs = '8pt monospace'
c.fonts.statusbar = '8pt monospace'
c.search.incremental = False
c.editor.command = ['emacs', '{}']
c.qt.args = ['ppapi-widevine-path=/usr/lib/qt/plugins/ppapi/libwidevinecdmadapter.so']
c.content.javascript.enabled = False
|
config.load_autoconfig()
c.tabs.background = True
c.new_instance_open_target = 'window'
c.downloads.position = 'bottom'
c.spellcheck.languages = ['en-US']
config.bind(',ce', 'config-edit')
config.bind(',p', 'config-cycle -p content.plugins ;; reload')
config.bind(',rta', 'open {url}top/?sort=top&t=all')
config.bind(',rtv', 'spawn termite -e "rtv {url}"')
config.bind(',c', 'spawn -d chromium {url}')
css = '~/code/solarized-everything-css/css/solarized-all-sites-dark.css'
config.bind(',n', f'config-cycle content.user_stylesheets {css} ""')
c.url.searchengines['rfc'] = 'https://tools.ietf.org/html/rfc{}'
#c.url.searchengines['maps'] = 'https://www.google.com/maps?q=%s'
c.fonts.tabs = '8pt monospace'
c.fonts.statusbar = '8pt monospace'
c.fonts.web.family.fantasy = 'Arial'
c.search.incremental = False
c.editor.command = ['emacs', '{}']
c.qt.args = ['ppapi-widevine-path=/usr/lib/qt/plugins/ppapi/libwidevinecdmadapter.so']
c.content.javascript.enabled = False
|
Use Arial as Fantasy font
|
qutebrowser: Use Arial as Fantasy font
|
Python
|
mit
|
The-Compiler/dotfiles,The-Compiler/dotfiles,The-Compiler/dotfiles
|
config.load_autoconfig()
c.tabs.background = True
c.new_instance_open_target = 'window'
c.downloads.position = 'bottom'
c.spellcheck.languages = ['en-US']
config.bind(',ce', 'config-edit')
config.bind(',p', 'config-cycle -p content.plugins ;; reload')
config.bind(',rta', 'open {url}top/?sort=top&t=all')
config.bind(',rtv', 'spawn termite -e "rtv {url}"')
config.bind(',c', 'spawn -d chromium {url}')
css = '~/code/solarized-everything-css/css/solarized-all-sites-dark.css'
config.bind(',n', f'config-cycle content.user_stylesheets {css} ""')
c.url.searchengines['rfc'] = 'https://tools.ietf.org/html/rfc{}'
#c.url.searchengines['maps'] = 'https://www.google.com/maps?q=%s'
c.fonts.tabs = '8pt monospace'
c.fonts.statusbar = '8pt monospace'
c.search.incremental = False
c.editor.command = ['emacs', '{}']
c.qt.args = ['ppapi-widevine-path=/usr/lib/qt/plugins/ppapi/libwidevinecdmadapter.so']
c.content.javascript.enabled = False
qutebrowser: Use Arial as Fantasy font
|
config.load_autoconfig()
c.tabs.background = True
c.new_instance_open_target = 'window'
c.downloads.position = 'bottom'
c.spellcheck.languages = ['en-US']
config.bind(',ce', 'config-edit')
config.bind(',p', 'config-cycle -p content.plugins ;; reload')
config.bind(',rta', 'open {url}top/?sort=top&t=all')
config.bind(',rtv', 'spawn termite -e "rtv {url}"')
config.bind(',c', 'spawn -d chromium {url}')
css = '~/code/solarized-everything-css/css/solarized-all-sites-dark.css'
config.bind(',n', f'config-cycle content.user_stylesheets {css} ""')
c.url.searchengines['rfc'] = 'https://tools.ietf.org/html/rfc{}'
#c.url.searchengines['maps'] = 'https://www.google.com/maps?q=%s'
c.fonts.tabs = '8pt monospace'
c.fonts.statusbar = '8pt monospace'
c.fonts.web.family.fantasy = 'Arial'
c.search.incremental = False
c.editor.command = ['emacs', '{}']
c.qt.args = ['ppapi-widevine-path=/usr/lib/qt/plugins/ppapi/libwidevinecdmadapter.so']
c.content.javascript.enabled = False
|
<commit_before>config.load_autoconfig()
c.tabs.background = True
c.new_instance_open_target = 'window'
c.downloads.position = 'bottom'
c.spellcheck.languages = ['en-US']
config.bind(',ce', 'config-edit')
config.bind(',p', 'config-cycle -p content.plugins ;; reload')
config.bind(',rta', 'open {url}top/?sort=top&t=all')
config.bind(',rtv', 'spawn termite -e "rtv {url}"')
config.bind(',c', 'spawn -d chromium {url}')
css = '~/code/solarized-everything-css/css/solarized-all-sites-dark.css'
config.bind(',n', f'config-cycle content.user_stylesheets {css} ""')
c.url.searchengines['rfc'] = 'https://tools.ietf.org/html/rfc{}'
#c.url.searchengines['maps'] = 'https://www.google.com/maps?q=%s'
c.fonts.tabs = '8pt monospace'
c.fonts.statusbar = '8pt monospace'
c.search.incremental = False
c.editor.command = ['emacs', '{}']
c.qt.args = ['ppapi-widevine-path=/usr/lib/qt/plugins/ppapi/libwidevinecdmadapter.so']
c.content.javascript.enabled = False
<commit_msg>qutebrowser: Use Arial as Fantasy font<commit_after>
|
config.load_autoconfig()
c.tabs.background = True
c.new_instance_open_target = 'window'
c.downloads.position = 'bottom'
c.spellcheck.languages = ['en-US']
config.bind(',ce', 'config-edit')
config.bind(',p', 'config-cycle -p content.plugins ;; reload')
config.bind(',rta', 'open {url}top/?sort=top&t=all')
config.bind(',rtv', 'spawn termite -e "rtv {url}"')
config.bind(',c', 'spawn -d chromium {url}')
css = '~/code/solarized-everything-css/css/solarized-all-sites-dark.css'
config.bind(',n', f'config-cycle content.user_stylesheets {css} ""')
c.url.searchengines['rfc'] = 'https://tools.ietf.org/html/rfc{}'
#c.url.searchengines['maps'] = 'https://www.google.com/maps?q=%s'
c.fonts.tabs = '8pt monospace'
c.fonts.statusbar = '8pt monospace'
c.fonts.web.family.fantasy = 'Arial'
c.search.incremental = False
c.editor.command = ['emacs', '{}']
c.qt.args = ['ppapi-widevine-path=/usr/lib/qt/plugins/ppapi/libwidevinecdmadapter.so']
c.content.javascript.enabled = False
|
config.load_autoconfig()
c.tabs.background = True
c.new_instance_open_target = 'window'
c.downloads.position = 'bottom'
c.spellcheck.languages = ['en-US']
config.bind(',ce', 'config-edit')
config.bind(',p', 'config-cycle -p content.plugins ;; reload')
config.bind(',rta', 'open {url}top/?sort=top&t=all')
config.bind(',rtv', 'spawn termite -e "rtv {url}"')
config.bind(',c', 'spawn -d chromium {url}')
css = '~/code/solarized-everything-css/css/solarized-all-sites-dark.css'
config.bind(',n', f'config-cycle content.user_stylesheets {css} ""')
c.url.searchengines['rfc'] = 'https://tools.ietf.org/html/rfc{}'
#c.url.searchengines['maps'] = 'https://www.google.com/maps?q=%s'
c.fonts.tabs = '8pt monospace'
c.fonts.statusbar = '8pt monospace'
c.search.incremental = False
c.editor.command = ['emacs', '{}']
c.qt.args = ['ppapi-widevine-path=/usr/lib/qt/plugins/ppapi/libwidevinecdmadapter.so']
c.content.javascript.enabled = False
qutebrowser: Use Arial as Fantasy fontconfig.load_autoconfig()
c.tabs.background = True
c.new_instance_open_target = 'window'
c.downloads.position = 'bottom'
c.spellcheck.languages = ['en-US']
config.bind(',ce', 'config-edit')
config.bind(',p', 'config-cycle -p content.plugins ;; reload')
config.bind(',rta', 'open {url}top/?sort=top&t=all')
config.bind(',rtv', 'spawn termite -e "rtv {url}"')
config.bind(',c', 'spawn -d chromium {url}')
css = '~/code/solarized-everything-css/css/solarized-all-sites-dark.css'
config.bind(',n', f'config-cycle content.user_stylesheets {css} ""')
c.url.searchengines['rfc'] = 'https://tools.ietf.org/html/rfc{}'
#c.url.searchengines['maps'] = 'https://www.google.com/maps?q=%s'
c.fonts.tabs = '8pt monospace'
c.fonts.statusbar = '8pt monospace'
c.fonts.web.family.fantasy = 'Arial'
c.search.incremental = False
c.editor.command = ['emacs', '{}']
c.qt.args = ['ppapi-widevine-path=/usr/lib/qt/plugins/ppapi/libwidevinecdmadapter.so']
c.content.javascript.enabled = False
|
<commit_before>config.load_autoconfig()
c.tabs.background = True
c.new_instance_open_target = 'window'
c.downloads.position = 'bottom'
c.spellcheck.languages = ['en-US']
config.bind(',ce', 'config-edit')
config.bind(',p', 'config-cycle -p content.plugins ;; reload')
config.bind(',rta', 'open {url}top/?sort=top&t=all')
config.bind(',rtv', 'spawn termite -e "rtv {url}"')
config.bind(',c', 'spawn -d chromium {url}')
css = '~/code/solarized-everything-css/css/solarized-all-sites-dark.css'
config.bind(',n', f'config-cycle content.user_stylesheets {css} ""')
c.url.searchengines['rfc'] = 'https://tools.ietf.org/html/rfc{}'
#c.url.searchengines['maps'] = 'https://www.google.com/maps?q=%s'
c.fonts.tabs = '8pt monospace'
c.fonts.statusbar = '8pt monospace'
c.search.incremental = False
c.editor.command = ['emacs', '{}']
c.qt.args = ['ppapi-widevine-path=/usr/lib/qt/plugins/ppapi/libwidevinecdmadapter.so']
c.content.javascript.enabled = False
<commit_msg>qutebrowser: Use Arial as Fantasy font<commit_after>config.load_autoconfig()
c.tabs.background = True
c.new_instance_open_target = 'window'
c.downloads.position = 'bottom'
c.spellcheck.languages = ['en-US']
config.bind(',ce', 'config-edit')
config.bind(',p', 'config-cycle -p content.plugins ;; reload')
config.bind(',rta', 'open {url}top/?sort=top&t=all')
config.bind(',rtv', 'spawn termite -e "rtv {url}"')
config.bind(',c', 'spawn -d chromium {url}')
css = '~/code/solarized-everything-css/css/solarized-all-sites-dark.css'
config.bind(',n', f'config-cycle content.user_stylesheets {css} ""')
c.url.searchengines['rfc'] = 'https://tools.ietf.org/html/rfc{}'
#c.url.searchengines['maps'] = 'https://www.google.com/maps?q=%s'
c.fonts.tabs = '8pt monospace'
c.fonts.statusbar = '8pt monospace'
c.fonts.web.family.fantasy = 'Arial'
c.search.incremental = False
c.editor.command = ['emacs', '{}']
c.qt.args = ['ppapi-widevine-path=/usr/lib/qt/plugins/ppapi/libwidevinecdmadapter.so']
c.content.javascript.enabled = False
|
467c1fc9e56ac6d6adc9e82cb546951d1ce5fdcd
|
grammpy/Rule.py
|
grammpy/Rule.py
|
#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 23.06.2017 16:39
:Licence GNUv3
Part of grammpy
"""
from .Constants import EPSILON
class Rule:
right = [EPSILON]
left = [EPSILON]
rule = ([EPSILON], [EPSILON])
rules = [([EPSILON], [EPSILON])]
@staticmethod
def is_regular():
return False
@staticmethod
def is_contextfree():
return False
@staticmethod
def is_context():
return False
@staticmethod
def is_unrestricted():
return False
|
#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 23.06.2017 16:39
:Licence GNUv3
Part of grammpy
"""
from .Constants import EPSILON
class Rule:
#TODO rules -> rule -> left/right -> rules
right = [EPSILON]
left = [EPSILON]
rule = ([EPSILON], [EPSILON])
rules = [([EPSILON], [EPSILON])]
__active = True
@staticmethod
def is_regular():
return False
@staticmethod
def is_contextfree():
return False
@staticmethod
def is_context():
return False
@staticmethod
def is_unrestricted():
return False
|
Add __active property to rule
|
Add __active property to rule
|
Python
|
mit
|
PatrikValkovic/grammpy
|
#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 23.06.2017 16:39
:Licence GNUv3
Part of grammpy
"""
from .Constants import EPSILON
class Rule:
right = [EPSILON]
left = [EPSILON]
rule = ([EPSILON], [EPSILON])
rules = [([EPSILON], [EPSILON])]
@staticmethod
def is_regular():
return False
@staticmethod
def is_contextfree():
return False
@staticmethod
def is_context():
return False
@staticmethod
def is_unrestricted():
return False
Add __active property to rule
|
#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 23.06.2017 16:39
:Licence GNUv3
Part of grammpy
"""
from .Constants import EPSILON
class Rule:
#TODO rules -> rule -> left/right -> rules
right = [EPSILON]
left = [EPSILON]
rule = ([EPSILON], [EPSILON])
rules = [([EPSILON], [EPSILON])]
__active = True
@staticmethod
def is_regular():
return False
@staticmethod
def is_contextfree():
return False
@staticmethod
def is_context():
return False
@staticmethod
def is_unrestricted():
return False
|
<commit_before>#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 23.06.2017 16:39
:Licence GNUv3
Part of grammpy
"""
from .Constants import EPSILON
class Rule:
right = [EPSILON]
left = [EPSILON]
rule = ([EPSILON], [EPSILON])
rules = [([EPSILON], [EPSILON])]
@staticmethod
def is_regular():
return False
@staticmethod
def is_contextfree():
return False
@staticmethod
def is_context():
return False
@staticmethod
def is_unrestricted():
return False
<commit_msg>Add __active property to rule<commit_after>
|
#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 23.06.2017 16:39
:Licence GNUv3
Part of grammpy
"""
from .Constants import EPSILON
class Rule:
#TODO rules -> rule -> left/right -> rules
right = [EPSILON]
left = [EPSILON]
rule = ([EPSILON], [EPSILON])
rules = [([EPSILON], [EPSILON])]
__active = True
@staticmethod
def is_regular():
return False
@staticmethod
def is_contextfree():
return False
@staticmethod
def is_context():
return False
@staticmethod
def is_unrestricted():
return False
|
#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 23.06.2017 16:39
:Licence GNUv3
Part of grammpy
"""
from .Constants import EPSILON
class Rule:
right = [EPSILON]
left = [EPSILON]
rule = ([EPSILON], [EPSILON])
rules = [([EPSILON], [EPSILON])]
@staticmethod
def is_regular():
return False
@staticmethod
def is_contextfree():
return False
@staticmethod
def is_context():
return False
@staticmethod
def is_unrestricted():
return False
Add __active property to rule#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 23.06.2017 16:39
:Licence GNUv3
Part of grammpy
"""
from .Constants import EPSILON
class Rule:
#TODO rules -> rule -> left/right -> rules
right = [EPSILON]
left = [EPSILON]
rule = ([EPSILON], [EPSILON])
rules = [([EPSILON], [EPSILON])]
__active = True
@staticmethod
def is_regular():
return False
@staticmethod
def is_contextfree():
return False
@staticmethod
def is_context():
return False
@staticmethod
def is_unrestricted():
return False
|
<commit_before>#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 23.06.2017 16:39
:Licence GNUv3
Part of grammpy
"""
from .Constants import EPSILON
class Rule:
right = [EPSILON]
left = [EPSILON]
rule = ([EPSILON], [EPSILON])
rules = [([EPSILON], [EPSILON])]
@staticmethod
def is_regular():
return False
@staticmethod
def is_contextfree():
return False
@staticmethod
def is_context():
return False
@staticmethod
def is_unrestricted():
return False
<commit_msg>Add __active property to rule<commit_after>#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 23.06.2017 16:39
:Licence GNUv3
Part of grammpy
"""
from .Constants import EPSILON
class Rule:
#TODO rules -> rule -> left/right -> rules
right = [EPSILON]
left = [EPSILON]
rule = ([EPSILON], [EPSILON])
rules = [([EPSILON], [EPSILON])]
__active = True
@staticmethod
def is_regular():
return False
@staticmethod
def is_contextfree():
return False
@staticmethod
def is_context():
return False
@staticmethod
def is_unrestricted():
return False
|
ebe1fed581c7a2eeab6dc7c4f6304e7aa634e942
|
regenesis/database.py
|
regenesis/database.py
|
import logging
import sqlaload as sl
from regenesis.core import app, engine
log = logging.getLogger(__name__)
def load_cube(cube):
cube_table = sl.get_table(engine, 'cube')
sl.upsert(engine, cube_table, cube.to_row(), ['name'])
statistic_table = sl.get_table(engine, 'statistic')
sl.upsert(engine, statistic_table, cube.metadata.get('statistic'), ['name'])
dimension_table = sl.get_table(engine, 'dimension')
value_table = sl.get_table(engine, 'value')
for dimension in cube.dimensions.values():
sl.upsert(engine, dimension_table,
dimension.to_row(), ['name'])
for value in dimension.values:
sl.upsert(engine, value_table,
value.to_row(), ['value_id'])
reference_table = sl.get_table(engine, 'reference')
for reference in cube.references:
sl.upsert(engine, reference_table,
reference.to_row(), ['cube_name', 'dimension_name'])
fact_table = sl.get_table(engine, 'fact_' + cube.name)
for fact in cube.facts:
sl.upsert(engine, fact_table,
fact.to_row(), ['fact_id'])
|
import logging
import sqlaload as sl
from regenesis.core import app, engine
log = logging.getLogger(__name__)
def load_cube(cube):
cube_table = sl.get_table(engine, 'cube')
if sl.find_one(engine, cube_table, name=cube.name):
return
sl.upsert(engine, cube_table, cube.to_row(), ['name'])
statistic_table = sl.get_table(engine, 'statistic')
sl.upsert(engine, statistic_table, cube.metadata.get('statistic'), ['name'])
dimension_table = sl.get_table(engine, 'dimension')
value_table = sl.get_table(engine, 'value')
for dimension in cube.dimensions.values():
sl.upsert(engine, dimension_table,
dimension.to_row(), ['name'])
for value in dimension.values:
sl.upsert(engine, value_table,
value.to_row(), ['value_id'])
reference_table = sl.get_table(engine, 'reference')
for reference in cube.references:
sl.upsert(engine, reference_table,
reference.to_row(), ['cube_name', 'dimension_name'])
fact_table = sl.get_table(engine, 'fact_' + cube.name)
sl.delete(engine, fact_table)
for i, fact in enumerate(cube.facts):
sl.add_row(engine, fact_table, fact.to_row())
if i and i % 1000 == 0:
log.info("Loaded: %s rows", i)
|
Speed up loading, don't update for now.
|
Speed up loading, don't update for now.
|
Python
|
mit
|
pudo/regenesis,pudo/regenesis
|
import logging
import sqlaload as sl
from regenesis.core import app, engine
log = logging.getLogger(__name__)
def load_cube(cube):
cube_table = sl.get_table(engine, 'cube')
sl.upsert(engine, cube_table, cube.to_row(), ['name'])
statistic_table = sl.get_table(engine, 'statistic')
sl.upsert(engine, statistic_table, cube.metadata.get('statistic'), ['name'])
dimension_table = sl.get_table(engine, 'dimension')
value_table = sl.get_table(engine, 'value')
for dimension in cube.dimensions.values():
sl.upsert(engine, dimension_table,
dimension.to_row(), ['name'])
for value in dimension.values:
sl.upsert(engine, value_table,
value.to_row(), ['value_id'])
reference_table = sl.get_table(engine, 'reference')
for reference in cube.references:
sl.upsert(engine, reference_table,
reference.to_row(), ['cube_name', 'dimension_name'])
fact_table = sl.get_table(engine, 'fact_' + cube.name)
for fact in cube.facts:
sl.upsert(engine, fact_table,
fact.to_row(), ['fact_id'])
Speed up loading, don't update for now.
|
import logging
import sqlaload as sl
from regenesis.core import app, engine
log = logging.getLogger(__name__)
def load_cube(cube):
cube_table = sl.get_table(engine, 'cube')
if sl.find_one(engine, cube_table, name=cube.name):
return
sl.upsert(engine, cube_table, cube.to_row(), ['name'])
statistic_table = sl.get_table(engine, 'statistic')
sl.upsert(engine, statistic_table, cube.metadata.get('statistic'), ['name'])
dimension_table = sl.get_table(engine, 'dimension')
value_table = sl.get_table(engine, 'value')
for dimension in cube.dimensions.values():
sl.upsert(engine, dimension_table,
dimension.to_row(), ['name'])
for value in dimension.values:
sl.upsert(engine, value_table,
value.to_row(), ['value_id'])
reference_table = sl.get_table(engine, 'reference')
for reference in cube.references:
sl.upsert(engine, reference_table,
reference.to_row(), ['cube_name', 'dimension_name'])
fact_table = sl.get_table(engine, 'fact_' + cube.name)
sl.delete(engine, fact_table)
for i, fact in enumerate(cube.facts):
sl.add_row(engine, fact_table, fact.to_row())
if i and i % 1000 == 0:
log.info("Loaded: %s rows", i)
|
<commit_before>import logging
import sqlaload as sl
from regenesis.core import app, engine
log = logging.getLogger(__name__)
def load_cube(cube):
cube_table = sl.get_table(engine, 'cube')
sl.upsert(engine, cube_table, cube.to_row(), ['name'])
statistic_table = sl.get_table(engine, 'statistic')
sl.upsert(engine, statistic_table, cube.metadata.get('statistic'), ['name'])
dimension_table = sl.get_table(engine, 'dimension')
value_table = sl.get_table(engine, 'value')
for dimension in cube.dimensions.values():
sl.upsert(engine, dimension_table,
dimension.to_row(), ['name'])
for value in dimension.values:
sl.upsert(engine, value_table,
value.to_row(), ['value_id'])
reference_table = sl.get_table(engine, 'reference')
for reference in cube.references:
sl.upsert(engine, reference_table,
reference.to_row(), ['cube_name', 'dimension_name'])
fact_table = sl.get_table(engine, 'fact_' + cube.name)
for fact in cube.facts:
sl.upsert(engine, fact_table,
fact.to_row(), ['fact_id'])
<commit_msg>Speed up loading, don't update for now. <commit_after>
|
import logging
import sqlaload as sl
from regenesis.core import app, engine
log = logging.getLogger(__name__)
def load_cube(cube):
cube_table = sl.get_table(engine, 'cube')
if sl.find_one(engine, cube_table, name=cube.name):
return
sl.upsert(engine, cube_table, cube.to_row(), ['name'])
statistic_table = sl.get_table(engine, 'statistic')
sl.upsert(engine, statistic_table, cube.metadata.get('statistic'), ['name'])
dimension_table = sl.get_table(engine, 'dimension')
value_table = sl.get_table(engine, 'value')
for dimension in cube.dimensions.values():
sl.upsert(engine, dimension_table,
dimension.to_row(), ['name'])
for value in dimension.values:
sl.upsert(engine, value_table,
value.to_row(), ['value_id'])
reference_table = sl.get_table(engine, 'reference')
for reference in cube.references:
sl.upsert(engine, reference_table,
reference.to_row(), ['cube_name', 'dimension_name'])
fact_table = sl.get_table(engine, 'fact_' + cube.name)
sl.delete(engine, fact_table)
for i, fact in enumerate(cube.facts):
sl.add_row(engine, fact_table, fact.to_row())
if i and i % 1000 == 0:
log.info("Loaded: %s rows", i)
|
import logging
import sqlaload as sl
from regenesis.core import app, engine
log = logging.getLogger(__name__)
def load_cube(cube):
cube_table = sl.get_table(engine, 'cube')
sl.upsert(engine, cube_table, cube.to_row(), ['name'])
statistic_table = sl.get_table(engine, 'statistic')
sl.upsert(engine, statistic_table, cube.metadata.get('statistic'), ['name'])
dimension_table = sl.get_table(engine, 'dimension')
value_table = sl.get_table(engine, 'value')
for dimension in cube.dimensions.values():
sl.upsert(engine, dimension_table,
dimension.to_row(), ['name'])
for value in dimension.values:
sl.upsert(engine, value_table,
value.to_row(), ['value_id'])
reference_table = sl.get_table(engine, 'reference')
for reference in cube.references:
sl.upsert(engine, reference_table,
reference.to_row(), ['cube_name', 'dimension_name'])
fact_table = sl.get_table(engine, 'fact_' + cube.name)
for fact in cube.facts:
sl.upsert(engine, fact_table,
fact.to_row(), ['fact_id'])
Speed up loading, don't update for now. import logging
import sqlaload as sl
from regenesis.core import app, engine
log = logging.getLogger(__name__)
def load_cube(cube):
cube_table = sl.get_table(engine, 'cube')
if sl.find_one(engine, cube_table, name=cube.name):
return
sl.upsert(engine, cube_table, cube.to_row(), ['name'])
statistic_table = sl.get_table(engine, 'statistic')
sl.upsert(engine, statistic_table, cube.metadata.get('statistic'), ['name'])
dimension_table = sl.get_table(engine, 'dimension')
value_table = sl.get_table(engine, 'value')
for dimension in cube.dimensions.values():
sl.upsert(engine, dimension_table,
dimension.to_row(), ['name'])
for value in dimension.values:
sl.upsert(engine, value_table,
value.to_row(), ['value_id'])
reference_table = sl.get_table(engine, 'reference')
for reference in cube.references:
sl.upsert(engine, reference_table,
reference.to_row(), ['cube_name', 'dimension_name'])
fact_table = sl.get_table(engine, 'fact_' + cube.name)
sl.delete(engine, fact_table)
for i, fact in enumerate(cube.facts):
sl.add_row(engine, fact_table, fact.to_row())
if i and i % 1000 == 0:
log.info("Loaded: %s rows", i)
|
<commit_before>import logging
import sqlaload as sl
from regenesis.core import app, engine
log = logging.getLogger(__name__)
def load_cube(cube):
cube_table = sl.get_table(engine, 'cube')
sl.upsert(engine, cube_table, cube.to_row(), ['name'])
statistic_table = sl.get_table(engine, 'statistic')
sl.upsert(engine, statistic_table, cube.metadata.get('statistic'), ['name'])
dimension_table = sl.get_table(engine, 'dimension')
value_table = sl.get_table(engine, 'value')
for dimension in cube.dimensions.values():
sl.upsert(engine, dimension_table,
dimension.to_row(), ['name'])
for value in dimension.values:
sl.upsert(engine, value_table,
value.to_row(), ['value_id'])
reference_table = sl.get_table(engine, 'reference')
for reference in cube.references:
sl.upsert(engine, reference_table,
reference.to_row(), ['cube_name', 'dimension_name'])
fact_table = sl.get_table(engine, 'fact_' + cube.name)
for fact in cube.facts:
sl.upsert(engine, fact_table,
fact.to_row(), ['fact_id'])
<commit_msg>Speed up loading, don't update for now. <commit_after>import logging
import sqlaload as sl
from regenesis.core import app, engine
log = logging.getLogger(__name__)
def load_cube(cube):
cube_table = sl.get_table(engine, 'cube')
if sl.find_one(engine, cube_table, name=cube.name):
return
sl.upsert(engine, cube_table, cube.to_row(), ['name'])
statistic_table = sl.get_table(engine, 'statistic')
sl.upsert(engine, statistic_table, cube.metadata.get('statistic'), ['name'])
dimension_table = sl.get_table(engine, 'dimension')
value_table = sl.get_table(engine, 'value')
for dimension in cube.dimensions.values():
sl.upsert(engine, dimension_table,
dimension.to_row(), ['name'])
for value in dimension.values:
sl.upsert(engine, value_table,
value.to_row(), ['value_id'])
reference_table = sl.get_table(engine, 'reference')
for reference in cube.references:
sl.upsert(engine, reference_table,
reference.to_row(), ['cube_name', 'dimension_name'])
fact_table = sl.get_table(engine, 'fact_' + cube.name)
sl.delete(engine, fact_table)
for i, fact in enumerate(cube.facts):
sl.add_row(engine, fact_table, fact.to_row())
if i and i % 1000 == 0:
log.info("Loaded: %s rows", i)
|
9c848315eba6580249d1f9fc5b598a08ec818fed
|
tests/test_functions.py
|
tests/test_functions.py
|
"""This module tests the TimeFunction class"""
import pytest
import pandas as pd
from tssim.functions import TimeFunction
@pytest.fixture
def ts():
"""Setup test data.
"""
periods = 10
index = pd.date_range("2017-04-12", periods=periods)
return pd.Series(range(periods), index)
def test_vectorized_no_condition(ts):
func = lambda x: x * 2
assert func(ts).equals(TimeFunction(func).generate(ts))
|
"""This module tests the TimeFunction class"""
import pandas as pd
import pytest
import tssim
@pytest.fixture
def ts():
"""Setup test data.
"""
periods = 10
index = pd.date_range("2017-04-12", periods=periods)
return pd.Series(range(periods), index)
def test_vectorized_no_condition(ts):
func = lambda x: x * 2
assert func(ts).equals(tssim.TimeFunction(func).generate(ts))
|
Update reference in TimeFunction test.
|
Update reference in TimeFunction test.
|
Python
|
mit
|
mansenfranzen/tssim
|
"""This module tests the TimeFunction class"""
import pytest
import pandas as pd
from tssim.functions import TimeFunction
@pytest.fixture
def ts():
"""Setup test data.
"""
periods = 10
index = pd.date_range("2017-04-12", periods=periods)
return pd.Series(range(periods), index)
def test_vectorized_no_condition(ts):
func = lambda x: x * 2
assert func(ts).equals(TimeFunction(func).generate(ts))
Update reference in TimeFunction test.
|
"""This module tests the TimeFunction class"""
import pandas as pd
import pytest
import tssim
@pytest.fixture
def ts():
"""Setup test data.
"""
periods = 10
index = pd.date_range("2017-04-12", periods=periods)
return pd.Series(range(periods), index)
def test_vectorized_no_condition(ts):
func = lambda x: x * 2
assert func(ts).equals(tssim.TimeFunction(func).generate(ts))
|
<commit_before>"""This module tests the TimeFunction class"""
import pytest
import pandas as pd
from tssim.functions import TimeFunction
@pytest.fixture
def ts():
"""Setup test data.
"""
periods = 10
index = pd.date_range("2017-04-12", periods=periods)
return pd.Series(range(periods), index)
def test_vectorized_no_condition(ts):
func = lambda x: x * 2
assert func(ts).equals(TimeFunction(func).generate(ts))
<commit_msg>Update reference in TimeFunction test.<commit_after>
|
"""This module tests the TimeFunction class"""
import pandas as pd
import pytest
import tssim
@pytest.fixture
def ts():
"""Setup test data.
"""
periods = 10
index = pd.date_range("2017-04-12", periods=periods)
return pd.Series(range(periods), index)
def test_vectorized_no_condition(ts):
func = lambda x: x * 2
assert func(ts).equals(tssim.TimeFunction(func).generate(ts))
|
"""This module tests the TimeFunction class"""
import pytest
import pandas as pd
from tssim.functions import TimeFunction
@pytest.fixture
def ts():
"""Setup test data.
"""
periods = 10
index = pd.date_range("2017-04-12", periods=periods)
return pd.Series(range(periods), index)
def test_vectorized_no_condition(ts):
func = lambda x: x * 2
assert func(ts).equals(TimeFunction(func).generate(ts))
Update reference in TimeFunction test."""This module tests the TimeFunction class"""
import pandas as pd
import pytest
import tssim
@pytest.fixture
def ts():
"""Setup test data.
"""
periods = 10
index = pd.date_range("2017-04-12", periods=periods)
return pd.Series(range(periods), index)
def test_vectorized_no_condition(ts):
func = lambda x: x * 2
assert func(ts).equals(tssim.TimeFunction(func).generate(ts))
|
<commit_before>"""This module tests the TimeFunction class"""
import pytest
import pandas as pd
from tssim.functions import TimeFunction
@pytest.fixture
def ts():
"""Setup test data.
"""
periods = 10
index = pd.date_range("2017-04-12", periods=periods)
return pd.Series(range(periods), index)
def test_vectorized_no_condition(ts):
func = lambda x: x * 2
assert func(ts).equals(TimeFunction(func).generate(ts))
<commit_msg>Update reference in TimeFunction test.<commit_after>"""This module tests the TimeFunction class"""
import pandas as pd
import pytest
import tssim
@pytest.fixture
def ts():
"""Setup test data.
"""
periods = 10
index = pd.date_range("2017-04-12", periods=periods)
return pd.Series(range(periods), index)
def test_vectorized_no_condition(ts):
func = lambda x: x * 2
assert func(ts).equals(tssim.TimeFunction(func).generate(ts))
|
1f5d4fed6d8ad9493c50cbacb2bf8e116de46ff8
|
thinc/extra/load_nlp.py
|
thinc/extra/load_nlp.py
|
import numpy
SPACY_MODELS = {}
VECTORS = {}
def get_spacy(lang, **kwargs):
global SPACY_MODELS
import spacy
if lang not in SPACY_MODELS:
SPACY_MODELS[lang] = spacy.load(lang, **kwargs)
return SPACY_MODELS[lang]
def get_vectors(ops, lang):
global VECTORS
key = (ops.device, lang)
if key not in VECTORS:
nlp = get_spacy(lang)
nV = max(lex.rank for lex in nlp.vocab)+1
nM = nlp.vocab.vectors_length
vectors = numpy.zeros((nV, nM), dtype='float32')
for lex in nlp.vocab:
if lex.has_vector:
vectors[lex.rank] = lex.vector / lex.vector_norm
VECTORS[key] = ops.asarray(vectors)
return VECTORS[key]
|
import numpy
SPACY_MODELS = {}
VECTORS = {}
def get_spacy(lang, **kwargs):
global SPACY_MODELS
import spacy
if lang not in SPACY_MODELS:
SPACY_MODELS[lang] = spacy.load(lang, **kwargs)
return SPACY_MODELS[lang]
def get_vectors(ops, lang):
global VECTORS
key = (ops.device, lang)
if key not in VECTORS:
nlp = get_spacy(lang)
nV = max(lex.rank for lex in nlp.vocab)+1
nM = nlp.vocab.vectors_length
vectors = numpy.zeros((nV, nM), dtype='float32')
for lex in nlp.vocab:
if lex.has_vector:
vectors[lex.rank] = lex.vector / (lex.vector_norm+1e-8)
VECTORS[key] = ops.asarray(vectors)
return VECTORS[key]
|
Fix divide by zero error in vectors loading
|
Fix divide by zero error in vectors loading
|
Python
|
mit
|
explosion/thinc,spacy-io/thinc,explosion/thinc,explosion/thinc,spacy-io/thinc,explosion/thinc,spacy-io/thinc
|
import numpy
SPACY_MODELS = {}
VECTORS = {}
def get_spacy(lang, **kwargs):
global SPACY_MODELS
import spacy
if lang not in SPACY_MODELS:
SPACY_MODELS[lang] = spacy.load(lang, **kwargs)
return SPACY_MODELS[lang]
def get_vectors(ops, lang):
global VECTORS
key = (ops.device, lang)
if key not in VECTORS:
nlp = get_spacy(lang)
nV = max(lex.rank for lex in nlp.vocab)+1
nM = nlp.vocab.vectors_length
vectors = numpy.zeros((nV, nM), dtype='float32')
for lex in nlp.vocab:
if lex.has_vector:
vectors[lex.rank] = lex.vector / lex.vector_norm
VECTORS[key] = ops.asarray(vectors)
return VECTORS[key]
Fix divide by zero error in vectors loading
|
import numpy
SPACY_MODELS = {}
VECTORS = {}
def get_spacy(lang, **kwargs):
global SPACY_MODELS
import spacy
if lang not in SPACY_MODELS:
SPACY_MODELS[lang] = spacy.load(lang, **kwargs)
return SPACY_MODELS[lang]
def get_vectors(ops, lang):
global VECTORS
key = (ops.device, lang)
if key not in VECTORS:
nlp = get_spacy(lang)
nV = max(lex.rank for lex in nlp.vocab)+1
nM = nlp.vocab.vectors_length
vectors = numpy.zeros((nV, nM), dtype='float32')
for lex in nlp.vocab:
if lex.has_vector:
vectors[lex.rank] = lex.vector / (lex.vector_norm+1e-8)
VECTORS[key] = ops.asarray(vectors)
return VECTORS[key]
|
<commit_before>import numpy
SPACY_MODELS = {}
VECTORS = {}
def get_spacy(lang, **kwargs):
global SPACY_MODELS
import spacy
if lang not in SPACY_MODELS:
SPACY_MODELS[lang] = spacy.load(lang, **kwargs)
return SPACY_MODELS[lang]
def get_vectors(ops, lang):
global VECTORS
key = (ops.device, lang)
if key not in VECTORS:
nlp = get_spacy(lang)
nV = max(lex.rank for lex in nlp.vocab)+1
nM = nlp.vocab.vectors_length
vectors = numpy.zeros((nV, nM), dtype='float32')
for lex in nlp.vocab:
if lex.has_vector:
vectors[lex.rank] = lex.vector / lex.vector_norm
VECTORS[key] = ops.asarray(vectors)
return VECTORS[key]
<commit_msg>Fix divide by zero error in vectors loading<commit_after>
|
import numpy
SPACY_MODELS = {}
VECTORS = {}
def get_spacy(lang, **kwargs):
global SPACY_MODELS
import spacy
if lang not in SPACY_MODELS:
SPACY_MODELS[lang] = spacy.load(lang, **kwargs)
return SPACY_MODELS[lang]
def get_vectors(ops, lang):
global VECTORS
key = (ops.device, lang)
if key not in VECTORS:
nlp = get_spacy(lang)
nV = max(lex.rank for lex in nlp.vocab)+1
nM = nlp.vocab.vectors_length
vectors = numpy.zeros((nV, nM), dtype='float32')
for lex in nlp.vocab:
if lex.has_vector:
vectors[lex.rank] = lex.vector / (lex.vector_norm+1e-8)
VECTORS[key] = ops.asarray(vectors)
return VECTORS[key]
|
import numpy
SPACY_MODELS = {}
VECTORS = {}
def get_spacy(lang, **kwargs):
global SPACY_MODELS
import spacy
if lang not in SPACY_MODELS:
SPACY_MODELS[lang] = spacy.load(lang, **kwargs)
return SPACY_MODELS[lang]
def get_vectors(ops, lang):
global VECTORS
key = (ops.device, lang)
if key not in VECTORS:
nlp = get_spacy(lang)
nV = max(lex.rank for lex in nlp.vocab)+1
nM = nlp.vocab.vectors_length
vectors = numpy.zeros((nV, nM), dtype='float32')
for lex in nlp.vocab:
if lex.has_vector:
vectors[lex.rank] = lex.vector / lex.vector_norm
VECTORS[key] = ops.asarray(vectors)
return VECTORS[key]
Fix divide by zero error in vectors loadingimport numpy
SPACY_MODELS = {}
VECTORS = {}
def get_spacy(lang, **kwargs):
global SPACY_MODELS
import spacy
if lang not in SPACY_MODELS:
SPACY_MODELS[lang] = spacy.load(lang, **kwargs)
return SPACY_MODELS[lang]
def get_vectors(ops, lang):
global VECTORS
key = (ops.device, lang)
if key not in VECTORS:
nlp = get_spacy(lang)
nV = max(lex.rank for lex in nlp.vocab)+1
nM = nlp.vocab.vectors_length
vectors = numpy.zeros((nV, nM), dtype='float32')
for lex in nlp.vocab:
if lex.has_vector:
vectors[lex.rank] = lex.vector / (lex.vector_norm+1e-8)
VECTORS[key] = ops.asarray(vectors)
return VECTORS[key]
|
<commit_before>import numpy
SPACY_MODELS = {}
VECTORS = {}
def get_spacy(lang, **kwargs):
global SPACY_MODELS
import spacy
if lang not in SPACY_MODELS:
SPACY_MODELS[lang] = spacy.load(lang, **kwargs)
return SPACY_MODELS[lang]
def get_vectors(ops, lang):
global VECTORS
key = (ops.device, lang)
if key not in VECTORS:
nlp = get_spacy(lang)
nV = max(lex.rank for lex in nlp.vocab)+1
nM = nlp.vocab.vectors_length
vectors = numpy.zeros((nV, nM), dtype='float32')
for lex in nlp.vocab:
if lex.has_vector:
vectors[lex.rank] = lex.vector / lex.vector_norm
VECTORS[key] = ops.asarray(vectors)
return VECTORS[key]
<commit_msg>Fix divide by zero error in vectors loading<commit_after>import numpy
SPACY_MODELS = {}
VECTORS = {}
def get_spacy(lang, **kwargs):
global SPACY_MODELS
import spacy
if lang not in SPACY_MODELS:
SPACY_MODELS[lang] = spacy.load(lang, **kwargs)
return SPACY_MODELS[lang]
def get_vectors(ops, lang):
global VECTORS
key = (ops.device, lang)
if key not in VECTORS:
nlp = get_spacy(lang)
nV = max(lex.rank for lex in nlp.vocab)+1
nM = nlp.vocab.vectors_length
vectors = numpy.zeros((nV, nM), dtype='float32')
for lex in nlp.vocab:
if lex.has_vector:
vectors[lex.rank] = lex.vector / (lex.vector_norm+1e-8)
VECTORS[key] = ops.asarray(vectors)
return VECTORS[key]
|
5b2f835f377481c6c217dd886f28c1bb400db553
|
linter.py
|
linter.py
|
#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by ckaznocha
# Copyright (c) 2014 ckaznocha
#
# License: MIT
#
"""This module exports the CFLint plugin class."""
from SublimeLinter.lint import Linter, util
class CFLint(Linter):
"""Provides an interface to CFLint."""
syntax = ('coldfusioncfc', 'html+cfml')
cmd = 'cflint -q -text -file'
version_args = '-version'
version_re = r'\b(?P<version>\d+\.\d+\.\d+)'
version_requirement = '>= 0.1.8'
regex = r'''(?xi)
# The severity
^\s*Severity:(?:(?P<warning>(INFO|WARNING))|(?P<error>ERROR))\s*$\r?\n
# The file name
^.*$\r?\n
# The Message Code
^.*$\r?\n
# The Column number
^\s*Column:(?P<col>\d+)\s*$\r?\n
# The Line number
^\s*Line:(?P<line>\d+)\s*$\r?\n
# The Error Message
^\s*Message:(?P<message>.+)$\r?\n
'''
multiline = True
error_stream = util.STREAM_STDOUT
word_re = r'^<?(#?[-\w]+)'
tempfile_suffix = '-'
|
#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by ckaznocha
# Copyright (c) 2014 ckaznocha
#
# License: MIT
#
"""This module exports the CFLint plugin class."""
from SublimeLinter.lint import Linter, util
class CFLint(Linter):
"""Provides an interface to CFLint."""
syntax = ('coldfusioncfc', 'html+cfml')
cmd = 'cflint -file @ -q -text'
version_args = '-version'
version_re = r'\b(?P<version>\d+\.\d+\.\d+)'
version_requirement = '>= 0.1.8'
regex = r'''(?xi)
# The severity
^\s*Severity:(?:(?P<warning>(INFO|WARNING))|(?P<error>ERROR))\s*$\r?\n
# The file name
^.*$\r?\n
# The Message Code
^.*$\r?\n
# The Column number
^\s*Column:(?P<col>\d+)\s*$\r?\n
# The Line number
^\s*Line:(?P<line>\d+)\s*$\r?\n
# The Error Message
^\s*Message:(?P<message>.+)$\r?\n
'''
multiline = True
error_stream = util.STREAM_STDOUT
word_re = r'^<?(#?[-\w]+)'
tempfile_suffix = '-'
|
Update cmd to allow args
|
Update cmd to allow args
Change the cmd string so that the "args" argument can be used in linter settings. The way it was any args would be inserted between the '-file' and the filename which broke the '-file' argument.
For this config,
"cflint": {
"@disable": false,
"args": ['-configfile c:\cflintrc.xml'],
"excludes": []
}
The results are:
old: cflint -q -text -file -configfile c:\cflintrc.xml index.cfm
new: cflint -file index.cfm -q -text -configfile c:\cflintrc.xml
|
Python
|
mit
|
ckaznocha/SublimeLinter-contrib-CFLint
|
#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by ckaznocha
# Copyright (c) 2014 ckaznocha
#
# License: MIT
#
"""This module exports the CFLint plugin class."""
from SublimeLinter.lint import Linter, util
class CFLint(Linter):
"""Provides an interface to CFLint."""
syntax = ('coldfusioncfc', 'html+cfml')
cmd = 'cflint -q -text -file'
version_args = '-version'
version_re = r'\b(?P<version>\d+\.\d+\.\d+)'
version_requirement = '>= 0.1.8'
regex = r'''(?xi)
# The severity
^\s*Severity:(?:(?P<warning>(INFO|WARNING))|(?P<error>ERROR))\s*$\r?\n
# The file name
^.*$\r?\n
# The Message Code
^.*$\r?\n
# The Column number
^\s*Column:(?P<col>\d+)\s*$\r?\n
# The Line number
^\s*Line:(?P<line>\d+)\s*$\r?\n
# The Error Message
^\s*Message:(?P<message>.+)$\r?\n
'''
multiline = True
error_stream = util.STREAM_STDOUT
word_re = r'^<?(#?[-\w]+)'
tempfile_suffix = '-'
Update cmd to allow args
Change the cmd string so that the "args" argument can be used in linter settings. The way it was any args would be inserted between the '-file' and the filename which broke the '-file' argument.
For this config,
"cflint": {
"@disable": false,
"args": ['-configfile c:\cflintrc.xml'],
"excludes": []
}
The results are:
old: cflint -q -text -file -configfile c:\cflintrc.xml index.cfm
new: cflint -file index.cfm -q -text -configfile c:\cflintrc.xml
|
#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by ckaznocha
# Copyright (c) 2014 ckaznocha
#
# License: MIT
#
"""This module exports the CFLint plugin class."""
from SublimeLinter.lint import Linter, util
class CFLint(Linter):
"""Provides an interface to CFLint."""
syntax = ('coldfusioncfc', 'html+cfml')
cmd = 'cflint -file @ -q -text'
version_args = '-version'
version_re = r'\b(?P<version>\d+\.\d+\.\d+)'
version_requirement = '>= 0.1.8'
regex = r'''(?xi)
# The severity
^\s*Severity:(?:(?P<warning>(INFO|WARNING))|(?P<error>ERROR))\s*$\r?\n
# The file name
^.*$\r?\n
# The Message Code
^.*$\r?\n
# The Column number
^\s*Column:(?P<col>\d+)\s*$\r?\n
# The Line number
^\s*Line:(?P<line>\d+)\s*$\r?\n
# The Error Message
^\s*Message:(?P<message>.+)$\r?\n
'''
multiline = True
error_stream = util.STREAM_STDOUT
word_re = r'^<?(#?[-\w]+)'
tempfile_suffix = '-'
|
<commit_before>#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by ckaznocha
# Copyright (c) 2014 ckaznocha
#
# License: MIT
#
"""This module exports the CFLint plugin class."""
from SublimeLinter.lint import Linter, util
class CFLint(Linter):
"""Provides an interface to CFLint."""
syntax = ('coldfusioncfc', 'html+cfml')
cmd = 'cflint -q -text -file'
version_args = '-version'
version_re = r'\b(?P<version>\d+\.\d+\.\d+)'
version_requirement = '>= 0.1.8'
regex = r'''(?xi)
# The severity
^\s*Severity:(?:(?P<warning>(INFO|WARNING))|(?P<error>ERROR))\s*$\r?\n
# The file name
^.*$\r?\n
# The Message Code
^.*$\r?\n
# The Column number
^\s*Column:(?P<col>\d+)\s*$\r?\n
# The Line number
^\s*Line:(?P<line>\d+)\s*$\r?\n
# The Error Message
^\s*Message:(?P<message>.+)$\r?\n
'''
multiline = True
error_stream = util.STREAM_STDOUT
word_re = r'^<?(#?[-\w]+)'
tempfile_suffix = '-'
<commit_msg>Update cmd to allow args
Change the cmd string so that the "args" argument can be used in linter settings. The way it was any args would be inserted between the '-file' and the filename which broke the '-file' argument.
For this config,
"cflint": {
"@disable": false,
"args": ['-configfile c:\cflintrc.xml'],
"excludes": []
}
The results are:
old: cflint -q -text -file -configfile c:\cflintrc.xml index.cfm
new: cflint -file index.cfm -q -text -configfile c:\cflintrc.xml<commit_after>
|
#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by ckaznocha
# Copyright (c) 2014 ckaznocha
#
# License: MIT
#
"""This module exports the CFLint plugin class."""
from SublimeLinter.lint import Linter, util
class CFLint(Linter):
"""Provides an interface to CFLint."""
syntax = ('coldfusioncfc', 'html+cfml')
cmd = 'cflint -file @ -q -text'
version_args = '-version'
version_re = r'\b(?P<version>\d+\.\d+\.\d+)'
version_requirement = '>= 0.1.8'
regex = r'''(?xi)
# The severity
^\s*Severity:(?:(?P<warning>(INFO|WARNING))|(?P<error>ERROR))\s*$\r?\n
# The file name
^.*$\r?\n
# The Message Code
^.*$\r?\n
# The Column number
^\s*Column:(?P<col>\d+)\s*$\r?\n
# The Line number
^\s*Line:(?P<line>\d+)\s*$\r?\n
# The Error Message
^\s*Message:(?P<message>.+)$\r?\n
'''
multiline = True
error_stream = util.STREAM_STDOUT
word_re = r'^<?(#?[-\w]+)'
tempfile_suffix = '-'
|
#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by ckaznocha
# Copyright (c) 2014 ckaznocha
#
# License: MIT
#
"""This module exports the CFLint plugin class."""
from SublimeLinter.lint import Linter, util
class CFLint(Linter):
"""Provides an interface to CFLint."""
syntax = ('coldfusioncfc', 'html+cfml')
cmd = 'cflint -q -text -file'
version_args = '-version'
version_re = r'\b(?P<version>\d+\.\d+\.\d+)'
version_requirement = '>= 0.1.8'
regex = r'''(?xi)
# The severity
^\s*Severity:(?:(?P<warning>(INFO|WARNING))|(?P<error>ERROR))\s*$\r?\n
# The file name
^.*$\r?\n
# The Message Code
^.*$\r?\n
# The Column number
^\s*Column:(?P<col>\d+)\s*$\r?\n
# The Line number
^\s*Line:(?P<line>\d+)\s*$\r?\n
# The Error Message
^\s*Message:(?P<message>.+)$\r?\n
'''
multiline = True
error_stream = util.STREAM_STDOUT
word_re = r'^<?(#?[-\w]+)'
tempfile_suffix = '-'
Update cmd to allow args
Change the cmd string so that the "args" argument can be used in linter settings. The way it was any args would be inserted between the '-file' and the filename which broke the '-file' argument.
For this config,
"cflint": {
"@disable": false,
"args": ['-configfile c:\cflintrc.xml'],
"excludes": []
}
The results are:
old: cflint -q -text -file -configfile c:\cflintrc.xml index.cfm
new: cflint -file index.cfm -q -text -configfile c:\cflintrc.xml#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by ckaznocha
# Copyright (c) 2014 ckaznocha
#
# License: MIT
#
"""This module exports the CFLint plugin class."""
from SublimeLinter.lint import Linter, util
class CFLint(Linter):
"""Provides an interface to CFLint."""
syntax = ('coldfusioncfc', 'html+cfml')
cmd = 'cflint -file @ -q -text'
version_args = '-version'
version_re = r'\b(?P<version>\d+\.\d+\.\d+)'
version_requirement = '>= 0.1.8'
regex = r'''(?xi)
# The severity
^\s*Severity:(?:(?P<warning>(INFO|WARNING))|(?P<error>ERROR))\s*$\r?\n
# The file name
^.*$\r?\n
# The Message Code
^.*$\r?\n
# The Column number
^\s*Column:(?P<col>\d+)\s*$\r?\n
# The Line number
^\s*Line:(?P<line>\d+)\s*$\r?\n
# The Error Message
^\s*Message:(?P<message>.+)$\r?\n
'''
multiline = True
error_stream = util.STREAM_STDOUT
word_re = r'^<?(#?[-\w]+)'
tempfile_suffix = '-'
|
<commit_before>#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by ckaznocha
# Copyright (c) 2014 ckaznocha
#
# License: MIT
#
"""This module exports the CFLint plugin class."""
from SublimeLinter.lint import Linter, util
class CFLint(Linter):
"""Provides an interface to CFLint."""
syntax = ('coldfusioncfc', 'html+cfml')
cmd = 'cflint -q -text -file'
version_args = '-version'
version_re = r'\b(?P<version>\d+\.\d+\.\d+)'
version_requirement = '>= 0.1.8'
regex = r'''(?xi)
# The severity
^\s*Severity:(?:(?P<warning>(INFO|WARNING))|(?P<error>ERROR))\s*$\r?\n
# The file name
^.*$\r?\n
# The Message Code
^.*$\r?\n
# The Column number
^\s*Column:(?P<col>\d+)\s*$\r?\n
# The Line number
^\s*Line:(?P<line>\d+)\s*$\r?\n
# The Error Message
^\s*Message:(?P<message>.+)$\r?\n
'''
multiline = True
error_stream = util.STREAM_STDOUT
word_re = r'^<?(#?[-\w]+)'
tempfile_suffix = '-'
<commit_msg>Update cmd to allow args
Change the cmd string so that the "args" argument can be used in linter settings. The way it was any args would be inserted between the '-file' and the filename which broke the '-file' argument.
For this config,
"cflint": {
"@disable": false,
"args": ['-configfile c:\cflintrc.xml'],
"excludes": []
}
The results are:
old: cflint -q -text -file -configfile c:\cflintrc.xml index.cfm
new: cflint -file index.cfm -q -text -configfile c:\cflintrc.xml<commit_after>#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by ckaznocha
# Copyright (c) 2014 ckaznocha
#
# License: MIT
#
"""This module exports the CFLint plugin class."""
from SublimeLinter.lint import Linter, util
class CFLint(Linter):
"""Provides an interface to CFLint."""
syntax = ('coldfusioncfc', 'html+cfml')
cmd = 'cflint -file @ -q -text'
version_args = '-version'
version_re = r'\b(?P<version>\d+\.\d+\.\d+)'
version_requirement = '>= 0.1.8'
regex = r'''(?xi)
# The severity
^\s*Severity:(?:(?P<warning>(INFO|WARNING))|(?P<error>ERROR))\s*$\r?\n
# The file name
^.*$\r?\n
# The Message Code
^.*$\r?\n
# The Column number
^\s*Column:(?P<col>\d+)\s*$\r?\n
# The Line number
^\s*Line:(?P<line>\d+)\s*$\r?\n
# The Error Message
^\s*Message:(?P<message>.+)$\r?\n
'''
multiline = True
error_stream = util.STREAM_STDOUT
word_re = r'^<?(#?[-\w]+)'
tempfile_suffix = '-'
|
e3780b2751aac7a1a0c261b4b058aaff855b8e8b
|
docido_sdk/toolbox/contextlib_ext.py
|
docido_sdk/toolbox/contextlib_ext.py
|
from contextlib import contextmanager
import copy
@contextmanager
def restore(obj, copy_func=copy.deepcopy):
"""Backup an object in a with context and restore it when leaving
the scope.
:param obj: object to backup
:param copy_func: callbable object used to create an object copy.
default is `copy.deepcopy`
"""
backup = copy_func(obj)
try:
yield obj
finally:
obj = backup
|
from contextlib import contextmanager
import copy
@contextmanager
def restore_dict_kv(a_dict, key, copy_func=copy.deepcopy):
"""Backup an object in a with context and restore it when leaving
the scope.
:param a_dict:
associative table
:param: key
key whose value has to be backed up
:param copy_func: callbable object used to create an object copy.
default is `copy.deepcopy`
"""
backup = copy_func(a_dict[key])
try:
yield
finally:
a_dict[key] = backup
|
Apply `restore' utility to dictionary only
|
Apply `restore' utility to dictionary only
|
Python
|
apache-2.0
|
cogniteev/docido-python-sdk,LilliJane/docido-python-sdk
|
from contextlib import contextmanager
import copy
@contextmanager
def restore(obj, copy_func=copy.deepcopy):
"""Backup an object in a with context and restore it when leaving
the scope.
:param obj: object to backup
:param copy_func: callbable object used to create an object copy.
default is `copy.deepcopy`
"""
backup = copy_func(obj)
try:
yield obj
finally:
obj = backup
Apply `restore' utility to dictionary only
|
from contextlib import contextmanager
import copy
@contextmanager
def restore_dict_kv(a_dict, key, copy_func=copy.deepcopy):
"""Backup an object in a with context and restore it when leaving
the scope.
:param a_dict:
associative table
:param: key
key whose value has to be backed up
:param copy_func: callbable object used to create an object copy.
default is `copy.deepcopy`
"""
backup = copy_func(a_dict[key])
try:
yield
finally:
a_dict[key] = backup
|
<commit_before>
from contextlib import contextmanager
import copy
@contextmanager
def restore(obj, copy_func=copy.deepcopy):
"""Backup an object in a with context and restore it when leaving
the scope.
:param obj: object to backup
:param copy_func: callbable object used to create an object copy.
default is `copy.deepcopy`
"""
backup = copy_func(obj)
try:
yield obj
finally:
obj = backup
<commit_msg>Apply `restore' utility to dictionary only<commit_after>
|
from contextlib import contextmanager
import copy
@contextmanager
def restore_dict_kv(a_dict, key, copy_func=copy.deepcopy):
"""Backup an object in a with context and restore it when leaving
the scope.
:param a_dict:
associative table
:param: key
key whose value has to be backed up
:param copy_func: callbable object used to create an object copy.
default is `copy.deepcopy`
"""
backup = copy_func(a_dict[key])
try:
yield
finally:
a_dict[key] = backup
|
from contextlib import contextmanager
import copy
@contextmanager
def restore(obj, copy_func=copy.deepcopy):
"""Backup an object in a with context and restore it when leaving
the scope.
:param obj: object to backup
:param copy_func: callbable object used to create an object copy.
default is `copy.deepcopy`
"""
backup = copy_func(obj)
try:
yield obj
finally:
obj = backup
Apply `restore' utility to dictionary only
from contextlib import contextmanager
import copy
@contextmanager
def restore_dict_kv(a_dict, key, copy_func=copy.deepcopy):
"""Backup an object in a with context and restore it when leaving
the scope.
:param a_dict:
associative table
:param: key
key whose value has to be backed up
:param copy_func: callbable object used to create an object copy.
default is `copy.deepcopy`
"""
backup = copy_func(a_dict[key])
try:
yield
finally:
a_dict[key] = backup
|
<commit_before>
from contextlib import contextmanager
import copy
@contextmanager
def restore(obj, copy_func=copy.deepcopy):
"""Backup an object in a with context and restore it when leaving
the scope.
:param obj: object to backup
:param copy_func: callbable object used to create an object copy.
default is `copy.deepcopy`
"""
backup = copy_func(obj)
try:
yield obj
finally:
obj = backup
<commit_msg>Apply `restore' utility to dictionary only<commit_after>
from contextlib import contextmanager
import copy
@contextmanager
def restore_dict_kv(a_dict, key, copy_func=copy.deepcopy):
"""Backup an object in a with context and restore it when leaving
the scope.
:param a_dict:
associative table
:param: key
key whose value has to be backed up
:param copy_func: callbable object used to create an object copy.
default is `copy.deepcopy`
"""
backup = copy_func(a_dict[key])
try:
yield
finally:
a_dict[key] = backup
|
0f5b08e8aa0a0ad1106e217064f8e11da98ebc0d
|
linter.py
|
linter.py
|
from SublimeLinter.lint import Linter, util
class Govet(Linter):
cmd = ('go', 'tool', 'vet')
regex = r'.+?:(?P<line>\d+):((?P<col>\d+):)?\s+(?P<message>.+)'
tempfile_suffix = 'go'
error_stream = util.STREAM_STDERR
defaults = {
'selector': 'source.go'
}
|
from SublimeLinter.lint import Linter, util
class Govet(Linter):
cmd = ('go', 'vet', '${file_path}')
regex = r'(?P<filename>^.+?):(?P<line>\d+):((?P<col>\d+):)?\s+(?P<message>(.|\n\t)+)'
tempfile_suffix = '-'
multiline = True
error_stream = util.STREAM_STDERR
defaults = {
'selector': 'source.go'
}
def split_match(self, match):
if self.filename.find(match.group('filename')) > 0: # self.filename is an absolute path
error = super().split_match(match)
flat_message = ' '.join(line.strip() for line in error.message.split('\n'))
return error._replace(message=flat_message)
|
Handle multiline go vet messages.
|
Handle multiline go vet messages.
|
Python
|
mit
|
sirreal/SublimeLinter-contrib-govet
|
from SublimeLinter.lint import Linter, util
class Govet(Linter):
cmd = ('go', 'tool', 'vet')
regex = r'.+?:(?P<line>\d+):((?P<col>\d+):)?\s+(?P<message>.+)'
tempfile_suffix = 'go'
error_stream = util.STREAM_STDERR
defaults = {
'selector': 'source.go'
}
Handle multiline go vet messages.
|
from SublimeLinter.lint import Linter, util
class Govet(Linter):
cmd = ('go', 'vet', '${file_path}')
regex = r'(?P<filename>^.+?):(?P<line>\d+):((?P<col>\d+):)?\s+(?P<message>(.|\n\t)+)'
tempfile_suffix = '-'
multiline = True
error_stream = util.STREAM_STDERR
defaults = {
'selector': 'source.go'
}
def split_match(self, match):
if self.filename.find(match.group('filename')) > 0: # self.filename is an absolute path
error = super().split_match(match)
flat_message = ' '.join(line.strip() for line in error.message.split('\n'))
return error._replace(message=flat_message)
|
<commit_before>from SublimeLinter.lint import Linter, util
class Govet(Linter):
cmd = ('go', 'tool', 'vet')
regex = r'.+?:(?P<line>\d+):((?P<col>\d+):)?\s+(?P<message>.+)'
tempfile_suffix = 'go'
error_stream = util.STREAM_STDERR
defaults = {
'selector': 'source.go'
}
<commit_msg>Handle multiline go vet messages.<commit_after>
|
from SublimeLinter.lint import Linter, util
class Govet(Linter):
cmd = ('go', 'vet', '${file_path}')
regex = r'(?P<filename>^.+?):(?P<line>\d+):((?P<col>\d+):)?\s+(?P<message>(.|\n\t)+)'
tempfile_suffix = '-'
multiline = True
error_stream = util.STREAM_STDERR
defaults = {
'selector': 'source.go'
}
def split_match(self, match):
if self.filename.find(match.group('filename')) > 0: # self.filename is an absolute path
error = super().split_match(match)
flat_message = ' '.join(line.strip() for line in error.message.split('\n'))
return error._replace(message=flat_message)
|
from SublimeLinter.lint import Linter, util
class Govet(Linter):
cmd = ('go', 'tool', 'vet')
regex = r'.+?:(?P<line>\d+):((?P<col>\d+):)?\s+(?P<message>.+)'
tempfile_suffix = 'go'
error_stream = util.STREAM_STDERR
defaults = {
'selector': 'source.go'
}
Handle multiline go vet messages.from SublimeLinter.lint import Linter, util
class Govet(Linter):
cmd = ('go', 'vet', '${file_path}')
regex = r'(?P<filename>^.+?):(?P<line>\d+):((?P<col>\d+):)?\s+(?P<message>(.|\n\t)+)'
tempfile_suffix = '-'
multiline = True
error_stream = util.STREAM_STDERR
defaults = {
'selector': 'source.go'
}
def split_match(self, match):
if self.filename.find(match.group('filename')) > 0: # self.filename is an absolute path
error = super().split_match(match)
flat_message = ' '.join(line.strip() for line in error.message.split('\n'))
return error._replace(message=flat_message)
|
<commit_before>from SublimeLinter.lint import Linter, util
class Govet(Linter):
cmd = ('go', 'tool', 'vet')
regex = r'.+?:(?P<line>\d+):((?P<col>\d+):)?\s+(?P<message>.+)'
tempfile_suffix = 'go'
error_stream = util.STREAM_STDERR
defaults = {
'selector': 'source.go'
}
<commit_msg>Handle multiline go vet messages.<commit_after>from SublimeLinter.lint import Linter, util
class Govet(Linter):
cmd = ('go', 'vet', '${file_path}')
regex = r'(?P<filename>^.+?):(?P<line>\d+):((?P<col>\d+):)?\s+(?P<message>(.|\n\t)+)'
tempfile_suffix = '-'
multiline = True
error_stream = util.STREAM_STDERR
defaults = {
'selector': 'source.go'
}
def split_match(self, match):
if self.filename.find(match.group('filename')) > 0: # self.filename is an absolute path
error = super().split_match(match)
flat_message = ' '.join(line.strip() for line in error.message.split('\n'))
return error._replace(message=flat_message)
|
8dbd39a1e1d1f17da40d6a032f1b5d5b125fd025
|
IPython/parallel/tests/test_mongodb.py
|
IPython/parallel/tests/test_mongodb.py
|
"""Tests for mongodb backend
Authors:
* Min RK
"""
#-------------------------------------------------------------------------------
# Copyright (C) 2011 The IPython Development Team
#
# Distributed under the terms of the BSD License. The full license is in
# the file COPYING, distributed as part of this software.
#-------------------------------------------------------------------------------
#-------------------------------------------------------------------------------
# Imports
#-------------------------------------------------------------------------------
import os
from unittest import TestCase
from nose import SkipTest
from pymongo import Connection
from IPython.parallel.controller.mongodb import MongoDB
from . import test_db
conn_kwargs = {}
if 'DB_IP' in os.environ:
conn_kwargs['host'] = os.environ['DB_IP']
if 'DB_PORT' in os.environ:
conn_kwargs['port'] = int(os.environ['DB_PORT'])
try:
c = Connection(**conn_kwargs)
except Exception:
c=None
class TestMongoBackend(test_db.TaskDBTest, TestCase):
"""MongoDB backend tests"""
def create_db(self):
try:
return MongoDB(database='iptestdb', _connection=c)
except Exception:
raise SkipTest("Couldn't connect to mongodb")
def teardown(self):
if c is not None:
c.drop_database('iptestdb')
|
"""Tests for mongodb backend
Authors:
* Min RK
"""
#-------------------------------------------------------------------------------
# Copyright (C) 2011 The IPython Development Team
#
# Distributed under the terms of the BSD License. The full license is in
# the file COPYING, distributed as part of this software.
#-------------------------------------------------------------------------------
#-------------------------------------------------------------------------------
# Imports
#-------------------------------------------------------------------------------
import os
from unittest import TestCase
from nose import SkipTest
from pymongo import Connection
from IPython.parallel.controller.mongodb import MongoDB
from . import test_db
conn_kwargs = {}
if 'DB_IP' in os.environ:
conn_kwargs['host'] = os.environ['DB_IP']
if 'DBA_MONGODB_ADMIN_URI' in os.environ:
# On ShiningPanda, we need a username and password to connect. They are
# passed in a mongodb:// URI.
conn_kwargs['host'] = os.environ['DBA_MONGODB_ADMIN_URI']
if 'DB_PORT' in os.environ:
conn_kwargs['port'] = int(os.environ['DB_PORT'])
try:
c = Connection(**conn_kwargs)
except Exception:
c=None
class TestMongoBackend(test_db.TaskDBTest, TestCase):
"""MongoDB backend tests"""
def create_db(self):
try:
return MongoDB(database='iptestdb', _connection=c)
except Exception:
raise SkipTest("Couldn't connect to mongodb")
def teardown(self):
if c is not None:
c.drop_database('iptestdb')
|
Use username and password for MongoDB on ShiningPanda.
|
Use username and password for MongoDB on ShiningPanda.
|
Python
|
bsd-3-clause
|
ipython/ipython,ipython/ipython
|
"""Tests for mongodb backend
Authors:
* Min RK
"""
#-------------------------------------------------------------------------------
# Copyright (C) 2011 The IPython Development Team
#
# Distributed under the terms of the BSD License. The full license is in
# the file COPYING, distributed as part of this software.
#-------------------------------------------------------------------------------
#-------------------------------------------------------------------------------
# Imports
#-------------------------------------------------------------------------------
import os
from unittest import TestCase
from nose import SkipTest
from pymongo import Connection
from IPython.parallel.controller.mongodb import MongoDB
from . import test_db
conn_kwargs = {}
if 'DB_IP' in os.environ:
conn_kwargs['host'] = os.environ['DB_IP']
if 'DB_PORT' in os.environ:
conn_kwargs['port'] = int(os.environ['DB_PORT'])
try:
c = Connection(**conn_kwargs)
except Exception:
c=None
class TestMongoBackend(test_db.TaskDBTest, TestCase):
"""MongoDB backend tests"""
def create_db(self):
try:
return MongoDB(database='iptestdb', _connection=c)
except Exception:
raise SkipTest("Couldn't connect to mongodb")
def teardown(self):
if c is not None:
c.drop_database('iptestdb')
Use username and password for MongoDB on ShiningPanda.
|
"""Tests for mongodb backend
Authors:
* Min RK
"""
#-------------------------------------------------------------------------------
# Copyright (C) 2011 The IPython Development Team
#
# Distributed under the terms of the BSD License. The full license is in
# the file COPYING, distributed as part of this software.
#-------------------------------------------------------------------------------
#-------------------------------------------------------------------------------
# Imports
#-------------------------------------------------------------------------------
import os
from unittest import TestCase
from nose import SkipTest
from pymongo import Connection
from IPython.parallel.controller.mongodb import MongoDB
from . import test_db
conn_kwargs = {}
if 'DB_IP' in os.environ:
conn_kwargs['host'] = os.environ['DB_IP']
if 'DBA_MONGODB_ADMIN_URI' in os.environ:
# On ShiningPanda, we need a username and password to connect. They are
# passed in a mongodb:// URI.
conn_kwargs['host'] = os.environ['DBA_MONGODB_ADMIN_URI']
if 'DB_PORT' in os.environ:
conn_kwargs['port'] = int(os.environ['DB_PORT'])
try:
c = Connection(**conn_kwargs)
except Exception:
c=None
class TestMongoBackend(test_db.TaskDBTest, TestCase):
"""MongoDB backend tests"""
def create_db(self):
try:
return MongoDB(database='iptestdb', _connection=c)
except Exception:
raise SkipTest("Couldn't connect to mongodb")
def teardown(self):
if c is not None:
c.drop_database('iptestdb')
|
<commit_before>"""Tests for mongodb backend
Authors:
* Min RK
"""
#-------------------------------------------------------------------------------
# Copyright (C) 2011 The IPython Development Team
#
# Distributed under the terms of the BSD License. The full license is in
# the file COPYING, distributed as part of this software.
#-------------------------------------------------------------------------------
#-------------------------------------------------------------------------------
# Imports
#-------------------------------------------------------------------------------
import os
from unittest import TestCase
from nose import SkipTest
from pymongo import Connection
from IPython.parallel.controller.mongodb import MongoDB
from . import test_db
conn_kwargs = {}
if 'DB_IP' in os.environ:
conn_kwargs['host'] = os.environ['DB_IP']
if 'DB_PORT' in os.environ:
conn_kwargs['port'] = int(os.environ['DB_PORT'])
try:
c = Connection(**conn_kwargs)
except Exception:
c=None
class TestMongoBackend(test_db.TaskDBTest, TestCase):
"""MongoDB backend tests"""
def create_db(self):
try:
return MongoDB(database='iptestdb', _connection=c)
except Exception:
raise SkipTest("Couldn't connect to mongodb")
def teardown(self):
if c is not None:
c.drop_database('iptestdb')
<commit_msg>Use username and password for MongoDB on ShiningPanda.<commit_after>
|
"""Tests for mongodb backend
Authors:
* Min RK
"""
#-------------------------------------------------------------------------------
# Copyright (C) 2011 The IPython Development Team
#
# Distributed under the terms of the BSD License. The full license is in
# the file COPYING, distributed as part of this software.
#-------------------------------------------------------------------------------
#-------------------------------------------------------------------------------
# Imports
#-------------------------------------------------------------------------------
import os
from unittest import TestCase
from nose import SkipTest
from pymongo import Connection
from IPython.parallel.controller.mongodb import MongoDB
from . import test_db
conn_kwargs = {}
if 'DB_IP' in os.environ:
conn_kwargs['host'] = os.environ['DB_IP']
if 'DBA_MONGODB_ADMIN_URI' in os.environ:
# On ShiningPanda, we need a username and password to connect. They are
# passed in a mongodb:// URI.
conn_kwargs['host'] = os.environ['DBA_MONGODB_ADMIN_URI']
if 'DB_PORT' in os.environ:
conn_kwargs['port'] = int(os.environ['DB_PORT'])
try:
c = Connection(**conn_kwargs)
except Exception:
c=None
class TestMongoBackend(test_db.TaskDBTest, TestCase):
"""MongoDB backend tests"""
def create_db(self):
try:
return MongoDB(database='iptestdb', _connection=c)
except Exception:
raise SkipTest("Couldn't connect to mongodb")
def teardown(self):
if c is not None:
c.drop_database('iptestdb')
|
"""Tests for mongodb backend
Authors:
* Min RK
"""
#-------------------------------------------------------------------------------
# Copyright (C) 2011 The IPython Development Team
#
# Distributed under the terms of the BSD License. The full license is in
# the file COPYING, distributed as part of this software.
#-------------------------------------------------------------------------------
#-------------------------------------------------------------------------------
# Imports
#-------------------------------------------------------------------------------
import os
from unittest import TestCase
from nose import SkipTest
from pymongo import Connection
from IPython.parallel.controller.mongodb import MongoDB
from . import test_db
conn_kwargs = {}
if 'DB_IP' in os.environ:
conn_kwargs['host'] = os.environ['DB_IP']
if 'DB_PORT' in os.environ:
conn_kwargs['port'] = int(os.environ['DB_PORT'])
try:
c = Connection(**conn_kwargs)
except Exception:
c=None
class TestMongoBackend(test_db.TaskDBTest, TestCase):
"""MongoDB backend tests"""
def create_db(self):
try:
return MongoDB(database='iptestdb', _connection=c)
except Exception:
raise SkipTest("Couldn't connect to mongodb")
def teardown(self):
if c is not None:
c.drop_database('iptestdb')
Use username and password for MongoDB on ShiningPanda."""Tests for mongodb backend
Authors:
* Min RK
"""
#-------------------------------------------------------------------------------
# Copyright (C) 2011 The IPython Development Team
#
# Distributed under the terms of the BSD License. The full license is in
# the file COPYING, distributed as part of this software.
#-------------------------------------------------------------------------------
#-------------------------------------------------------------------------------
# Imports
#-------------------------------------------------------------------------------
import os
from unittest import TestCase
from nose import SkipTest
from pymongo import Connection
from IPython.parallel.controller.mongodb import MongoDB
from . import test_db
conn_kwargs = {}
if 'DB_IP' in os.environ:
conn_kwargs['host'] = os.environ['DB_IP']
if 'DBA_MONGODB_ADMIN_URI' in os.environ:
# On ShiningPanda, we need a username and password to connect. They are
# passed in a mongodb:// URI.
conn_kwargs['host'] = os.environ['DBA_MONGODB_ADMIN_URI']
if 'DB_PORT' in os.environ:
conn_kwargs['port'] = int(os.environ['DB_PORT'])
try:
c = Connection(**conn_kwargs)
except Exception:
c=None
class TestMongoBackend(test_db.TaskDBTest, TestCase):
"""MongoDB backend tests"""
def create_db(self):
try:
return MongoDB(database='iptestdb', _connection=c)
except Exception:
raise SkipTest("Couldn't connect to mongodb")
def teardown(self):
if c is not None:
c.drop_database('iptestdb')
|
<commit_before>"""Tests for mongodb backend
Authors:
* Min RK
"""
#-------------------------------------------------------------------------------
# Copyright (C) 2011 The IPython Development Team
#
# Distributed under the terms of the BSD License. The full license is in
# the file COPYING, distributed as part of this software.
#-------------------------------------------------------------------------------
#-------------------------------------------------------------------------------
# Imports
#-------------------------------------------------------------------------------
import os
from unittest import TestCase
from nose import SkipTest
from pymongo import Connection
from IPython.parallel.controller.mongodb import MongoDB
from . import test_db
conn_kwargs = {}
if 'DB_IP' in os.environ:
conn_kwargs['host'] = os.environ['DB_IP']
if 'DB_PORT' in os.environ:
conn_kwargs['port'] = int(os.environ['DB_PORT'])
try:
c = Connection(**conn_kwargs)
except Exception:
c=None
class TestMongoBackend(test_db.TaskDBTest, TestCase):
"""MongoDB backend tests"""
def create_db(self):
try:
return MongoDB(database='iptestdb', _connection=c)
except Exception:
raise SkipTest("Couldn't connect to mongodb")
def teardown(self):
if c is not None:
c.drop_database('iptestdb')
<commit_msg>Use username and password for MongoDB on ShiningPanda.<commit_after>"""Tests for mongodb backend
Authors:
* Min RK
"""
#-------------------------------------------------------------------------------
# Copyright (C) 2011 The IPython Development Team
#
# Distributed under the terms of the BSD License. The full license is in
# the file COPYING, distributed as part of this software.
#-------------------------------------------------------------------------------
#-------------------------------------------------------------------------------
# Imports
#-------------------------------------------------------------------------------
import os
from unittest import TestCase
from nose import SkipTest
from pymongo import Connection
from IPython.parallel.controller.mongodb import MongoDB
from . import test_db
conn_kwargs = {}
if 'DB_IP' in os.environ:
conn_kwargs['host'] = os.environ['DB_IP']
if 'DBA_MONGODB_ADMIN_URI' in os.environ:
# On ShiningPanda, we need a username and password to connect. They are
# passed in a mongodb:// URI.
conn_kwargs['host'] = os.environ['DBA_MONGODB_ADMIN_URI']
if 'DB_PORT' in os.environ:
conn_kwargs['port'] = int(os.environ['DB_PORT'])
try:
c = Connection(**conn_kwargs)
except Exception:
c=None
class TestMongoBackend(test_db.TaskDBTest, TestCase):
"""MongoDB backend tests"""
def create_db(self):
try:
return MongoDB(database='iptestdb', _connection=c)
except Exception:
raise SkipTest("Couldn't connect to mongodb")
def teardown(self):
if c is not None:
c.drop_database('iptestdb')
|
cec76801dc870ae3e1f8682e84126ee69a2a25a2
|
spacy/__main__.py
|
spacy/__main__.py
|
# coding: utf8
from __future__ import print_function
# NB! This breaks in plac on Python 2!!
#from __future__ import unicode_literals
if __name__ == '__main__':
import plac
import sys
from spacy.cli import download, link, info, package, train, convert, model
from spacy.util import prints
commands = {
'download': download,
'link': link,
'info': info,
'train': train,
'convert': convert,
'package': package,
'model': model
}
if len(sys.argv) == 1:
prints(', '.join(commands), title="Available commands", exits=1)
command = sys.argv.pop(1)
sys.argv[0] = 'spacy %s' % command
if command in commands:
plac.call(commands[command])
else:
prints(
"Available: %s" % ', '.join(commands),
title="Unknown command: %s" % command,
exits=1)
|
# coding: utf8
from __future__ import print_function
# NB! This breaks in plac on Python 2!!
#from __future__ import unicode_literals
if __name__ == '__main__':
import plac
import sys
from spacy.cli import download, link, info, package, train, convert, model
from spacy.cli import profile
from spacy.util import prints
commands = {
'download': download,
'link': link,
'info': info,
'train': train,
'convert': convert,
'package': package,
'model': model,
'profile': profile,
}
if len(sys.argv) == 1:
prints(', '.join(commands), title="Available commands", exits=1)
command = sys.argv.pop(1)
sys.argv[0] = 'spacy %s' % command
if command in commands:
plac.call(commands[command])
else:
prints(
"Available: %s" % ', '.join(commands),
title="Unknown command: %s" % command,
exits=1)
|
Add profile command to CLI
|
Add profile command to CLI
|
Python
|
mit
|
spacy-io/spaCy,explosion/spaCy,aikramer2/spaCy,honnibal/spaCy,explosion/spaCy,recognai/spaCy,honnibal/spaCy,aikramer2/spaCy,spacy-io/spaCy,recognai/spaCy,spacy-io/spaCy,recognai/spaCy,recognai/spaCy,explosion/spaCy,recognai/spaCy,aikramer2/spaCy,explosion/spaCy,spacy-io/spaCy,aikramer2/spaCy,aikramer2/spaCy,honnibal/spaCy,spacy-io/spaCy,explosion/spaCy,recognai/spaCy,spacy-io/spaCy,honnibal/spaCy,aikramer2/spaCy,explosion/spaCy
|
# coding: utf8
from __future__ import print_function
# NB! This breaks in plac on Python 2!!
#from __future__ import unicode_literals
if __name__ == '__main__':
import plac
import sys
from spacy.cli import download, link, info, package, train, convert, model
from spacy.util import prints
commands = {
'download': download,
'link': link,
'info': info,
'train': train,
'convert': convert,
'package': package,
'model': model
}
if len(sys.argv) == 1:
prints(', '.join(commands), title="Available commands", exits=1)
command = sys.argv.pop(1)
sys.argv[0] = 'spacy %s' % command
if command in commands:
plac.call(commands[command])
else:
prints(
"Available: %s" % ', '.join(commands),
title="Unknown command: %s" % command,
exits=1)
Add profile command to CLI
|
# coding: utf8
from __future__ import print_function
# NB! This breaks in plac on Python 2!!
#from __future__ import unicode_literals
if __name__ == '__main__':
import plac
import sys
from spacy.cli import download, link, info, package, train, convert, model
from spacy.cli import profile
from spacy.util import prints
commands = {
'download': download,
'link': link,
'info': info,
'train': train,
'convert': convert,
'package': package,
'model': model,
'profile': profile,
}
if len(sys.argv) == 1:
prints(', '.join(commands), title="Available commands", exits=1)
command = sys.argv.pop(1)
sys.argv[0] = 'spacy %s' % command
if command in commands:
plac.call(commands[command])
else:
prints(
"Available: %s" % ', '.join(commands),
title="Unknown command: %s" % command,
exits=1)
|
<commit_before># coding: utf8
from __future__ import print_function
# NB! This breaks in plac on Python 2!!
#from __future__ import unicode_literals
if __name__ == '__main__':
import plac
import sys
from spacy.cli import download, link, info, package, train, convert, model
from spacy.util import prints
commands = {
'download': download,
'link': link,
'info': info,
'train': train,
'convert': convert,
'package': package,
'model': model
}
if len(sys.argv) == 1:
prints(', '.join(commands), title="Available commands", exits=1)
command = sys.argv.pop(1)
sys.argv[0] = 'spacy %s' % command
if command in commands:
plac.call(commands[command])
else:
prints(
"Available: %s" % ', '.join(commands),
title="Unknown command: %s" % command,
exits=1)
<commit_msg>Add profile command to CLI<commit_after>
|
# coding: utf8
from __future__ import print_function
# NB! This breaks in plac on Python 2!!
#from __future__ import unicode_literals
if __name__ == '__main__':
import plac
import sys
from spacy.cli import download, link, info, package, train, convert, model
from spacy.cli import profile
from spacy.util import prints
commands = {
'download': download,
'link': link,
'info': info,
'train': train,
'convert': convert,
'package': package,
'model': model,
'profile': profile,
}
if len(sys.argv) == 1:
prints(', '.join(commands), title="Available commands", exits=1)
command = sys.argv.pop(1)
sys.argv[0] = 'spacy %s' % command
if command in commands:
plac.call(commands[command])
else:
prints(
"Available: %s" % ', '.join(commands),
title="Unknown command: %s" % command,
exits=1)
|
# coding: utf8
from __future__ import print_function
# NB! This breaks in plac on Python 2!!
#from __future__ import unicode_literals
if __name__ == '__main__':
import plac
import sys
from spacy.cli import download, link, info, package, train, convert, model
from spacy.util import prints
commands = {
'download': download,
'link': link,
'info': info,
'train': train,
'convert': convert,
'package': package,
'model': model
}
if len(sys.argv) == 1:
prints(', '.join(commands), title="Available commands", exits=1)
command = sys.argv.pop(1)
sys.argv[0] = 'spacy %s' % command
if command in commands:
plac.call(commands[command])
else:
prints(
"Available: %s" % ', '.join(commands),
title="Unknown command: %s" % command,
exits=1)
Add profile command to CLI# coding: utf8
from __future__ import print_function
# NB! This breaks in plac on Python 2!!
#from __future__ import unicode_literals
if __name__ == '__main__':
import plac
import sys
from spacy.cli import download, link, info, package, train, convert, model
from spacy.cli import profile
from spacy.util import prints
commands = {
'download': download,
'link': link,
'info': info,
'train': train,
'convert': convert,
'package': package,
'model': model,
'profile': profile,
}
if len(sys.argv) == 1:
prints(', '.join(commands), title="Available commands", exits=1)
command = sys.argv.pop(1)
sys.argv[0] = 'spacy %s' % command
if command in commands:
plac.call(commands[command])
else:
prints(
"Available: %s" % ', '.join(commands),
title="Unknown command: %s" % command,
exits=1)
|
<commit_before># coding: utf8
from __future__ import print_function
# NB! This breaks in plac on Python 2!!
#from __future__ import unicode_literals
if __name__ == '__main__':
import plac
import sys
from spacy.cli import download, link, info, package, train, convert, model
from spacy.util import prints
commands = {
'download': download,
'link': link,
'info': info,
'train': train,
'convert': convert,
'package': package,
'model': model
}
if len(sys.argv) == 1:
prints(', '.join(commands), title="Available commands", exits=1)
command = sys.argv.pop(1)
sys.argv[0] = 'spacy %s' % command
if command in commands:
plac.call(commands[command])
else:
prints(
"Available: %s" % ', '.join(commands),
title="Unknown command: %s" % command,
exits=1)
<commit_msg>Add profile command to CLI<commit_after># coding: utf8
from __future__ import print_function
# NB! This breaks in plac on Python 2!!
#from __future__ import unicode_literals
if __name__ == '__main__':
import plac
import sys
from spacy.cli import download, link, info, package, train, convert, model
from spacy.cli import profile
from spacy.util import prints
commands = {
'download': download,
'link': link,
'info': info,
'train': train,
'convert': convert,
'package': package,
'model': model,
'profile': profile,
}
if len(sys.argv) == 1:
prints(', '.join(commands), title="Available commands", exits=1)
command = sys.argv.pop(1)
sys.argv[0] = 'spacy %s' % command
if command in commands:
plac.call(commands[command])
else:
prints(
"Available: %s" % ', '.join(commands),
title="Unknown command: %s" % command,
exits=1)
|
76b52c988f6b3a23bf52e8c1c2a8993e6f9112c8
|
nightreads/user_manager/forms.py
|
nightreads/user_manager/forms.py
|
from django.contrib.auth.models import User
from django.core.signing import BadSignature, SignatureExpired
from django import forms
from . import user_service
class SubscribeForm(forms.Form):
email = forms.EmailField()
tags = forms.CharField()
def clean_tags(self):
tags = self.cleaned_data['tags'].split(',')
return [t.strip().lower() for t in tags]
class UnsubscribeForm(forms.Form):
email = forms.EmailField()
class ConfirmEmailForm(forms.Form):
user = forms.IntegerField()
subscribe = forms.IntegerField()
code = forms.CharField(max_length=80)
def clean_subscribe(self):
value = bool(self.cleaned_data['subscribe'])
self.cleaned_data['subscribe'] = value
return value
def clean(self):
cleaned_data = super(ConfirmEmailForm, self).clean()
if self.errors:
return cleaned_data
user_id = cleaned_data['user']
code = cleaned_data['code']
for_subscription = cleaned_data['subscribe']
user = User.objects.filter(id=user_id).first()
if not user:
raise forms.ValidationError('Invalid Link')
self.cleaned_data['user'] = user
try:
user_service.validate_key(key=code, user=user,
for_subscription=for_subscription)
except BadSignature:
raise forms.ValidationError('Invalid Link')
except SignatureExpired:
raise forms.ValidationError('Link expired, please regenerate')
return cleaned_data
|
from django.contrib.auth.models import User
from django.core.signing import BadSignature, SignatureExpired
from django import forms
from nightreads.posts.models import Tag
from . import user_service
class SubscribeForm(forms.Form):
email = forms.EmailField()
tags = forms.MultipleChoiceField(choices=[(
t.name, t.name) for t in Tag.objects.all()])
class UnsubscribeForm(forms.Form):
email = forms.EmailField()
class ConfirmEmailForm(forms.Form):
user = forms.IntegerField()
subscribe = forms.IntegerField()
code = forms.CharField(max_length=80)
def clean_subscribe(self):
value = bool(self.cleaned_data['subscribe'])
self.cleaned_data['subscribe'] = value
return value
def clean(self):
cleaned_data = super(ConfirmEmailForm, self).clean()
if self.errors:
return cleaned_data
user_id = cleaned_data['user']
code = cleaned_data['code']
for_subscription = cleaned_data['subscribe']
user = User.objects.filter(id=user_id).first()
if not user:
raise forms.ValidationError('Invalid Link')
self.cleaned_data['user'] = user
try:
user_service.validate_key(key=code, user=user,
for_subscription=for_subscription)
except BadSignature:
raise forms.ValidationError('Invalid Link')
except SignatureExpired:
raise forms.ValidationError('Link expired, please regenerate')
return cleaned_data
|
Use `MultipleChoiceField` for `tags` field
|
Use `MultipleChoiceField` for `tags` field
|
Python
|
mit
|
avinassh/nightreads,avinassh/nightreads
|
from django.contrib.auth.models import User
from django.core.signing import BadSignature, SignatureExpired
from django import forms
from . import user_service
class SubscribeForm(forms.Form):
email = forms.EmailField()
tags = forms.CharField()
def clean_tags(self):
tags = self.cleaned_data['tags'].split(',')
return [t.strip().lower() for t in tags]
class UnsubscribeForm(forms.Form):
email = forms.EmailField()
class ConfirmEmailForm(forms.Form):
user = forms.IntegerField()
subscribe = forms.IntegerField()
code = forms.CharField(max_length=80)
def clean_subscribe(self):
value = bool(self.cleaned_data['subscribe'])
self.cleaned_data['subscribe'] = value
return value
def clean(self):
cleaned_data = super(ConfirmEmailForm, self).clean()
if self.errors:
return cleaned_data
user_id = cleaned_data['user']
code = cleaned_data['code']
for_subscription = cleaned_data['subscribe']
user = User.objects.filter(id=user_id).first()
if not user:
raise forms.ValidationError('Invalid Link')
self.cleaned_data['user'] = user
try:
user_service.validate_key(key=code, user=user,
for_subscription=for_subscription)
except BadSignature:
raise forms.ValidationError('Invalid Link')
except SignatureExpired:
raise forms.ValidationError('Link expired, please regenerate')
return cleaned_data
Use `MultipleChoiceField` for `tags` field
|
from django.contrib.auth.models import User
from django.core.signing import BadSignature, SignatureExpired
from django import forms
from nightreads.posts.models import Tag
from . import user_service
class SubscribeForm(forms.Form):
email = forms.EmailField()
tags = forms.MultipleChoiceField(choices=[(
t.name, t.name) for t in Tag.objects.all()])
class UnsubscribeForm(forms.Form):
email = forms.EmailField()
class ConfirmEmailForm(forms.Form):
user = forms.IntegerField()
subscribe = forms.IntegerField()
code = forms.CharField(max_length=80)
def clean_subscribe(self):
value = bool(self.cleaned_data['subscribe'])
self.cleaned_data['subscribe'] = value
return value
def clean(self):
cleaned_data = super(ConfirmEmailForm, self).clean()
if self.errors:
return cleaned_data
user_id = cleaned_data['user']
code = cleaned_data['code']
for_subscription = cleaned_data['subscribe']
user = User.objects.filter(id=user_id).first()
if not user:
raise forms.ValidationError('Invalid Link')
self.cleaned_data['user'] = user
try:
user_service.validate_key(key=code, user=user,
for_subscription=for_subscription)
except BadSignature:
raise forms.ValidationError('Invalid Link')
except SignatureExpired:
raise forms.ValidationError('Link expired, please regenerate')
return cleaned_data
|
<commit_before>from django.contrib.auth.models import User
from django.core.signing import BadSignature, SignatureExpired
from django import forms
from . import user_service
class SubscribeForm(forms.Form):
email = forms.EmailField()
tags = forms.CharField()
def clean_tags(self):
tags = self.cleaned_data['tags'].split(',')
return [t.strip().lower() for t in tags]
class UnsubscribeForm(forms.Form):
email = forms.EmailField()
class ConfirmEmailForm(forms.Form):
user = forms.IntegerField()
subscribe = forms.IntegerField()
code = forms.CharField(max_length=80)
def clean_subscribe(self):
value = bool(self.cleaned_data['subscribe'])
self.cleaned_data['subscribe'] = value
return value
def clean(self):
cleaned_data = super(ConfirmEmailForm, self).clean()
if self.errors:
return cleaned_data
user_id = cleaned_data['user']
code = cleaned_data['code']
for_subscription = cleaned_data['subscribe']
user = User.objects.filter(id=user_id).first()
if not user:
raise forms.ValidationError('Invalid Link')
self.cleaned_data['user'] = user
try:
user_service.validate_key(key=code, user=user,
for_subscription=for_subscription)
except BadSignature:
raise forms.ValidationError('Invalid Link')
except SignatureExpired:
raise forms.ValidationError('Link expired, please regenerate')
return cleaned_data
<commit_msg>Use `MultipleChoiceField` for `tags` field<commit_after>
|
from django.contrib.auth.models import User
from django.core.signing import BadSignature, SignatureExpired
from django import forms
from nightreads.posts.models import Tag
from . import user_service
class SubscribeForm(forms.Form):
email = forms.EmailField()
tags = forms.MultipleChoiceField(choices=[(
t.name, t.name) for t in Tag.objects.all()])
class UnsubscribeForm(forms.Form):
email = forms.EmailField()
class ConfirmEmailForm(forms.Form):
user = forms.IntegerField()
subscribe = forms.IntegerField()
code = forms.CharField(max_length=80)
def clean_subscribe(self):
value = bool(self.cleaned_data['subscribe'])
self.cleaned_data['subscribe'] = value
return value
def clean(self):
cleaned_data = super(ConfirmEmailForm, self).clean()
if self.errors:
return cleaned_data
user_id = cleaned_data['user']
code = cleaned_data['code']
for_subscription = cleaned_data['subscribe']
user = User.objects.filter(id=user_id).first()
if not user:
raise forms.ValidationError('Invalid Link')
self.cleaned_data['user'] = user
try:
user_service.validate_key(key=code, user=user,
for_subscription=for_subscription)
except BadSignature:
raise forms.ValidationError('Invalid Link')
except SignatureExpired:
raise forms.ValidationError('Link expired, please regenerate')
return cleaned_data
|
from django.contrib.auth.models import User
from django.core.signing import BadSignature, SignatureExpired
from django import forms
from . import user_service
class SubscribeForm(forms.Form):
email = forms.EmailField()
tags = forms.CharField()
def clean_tags(self):
tags = self.cleaned_data['tags'].split(',')
return [t.strip().lower() for t in tags]
class UnsubscribeForm(forms.Form):
email = forms.EmailField()
class ConfirmEmailForm(forms.Form):
user = forms.IntegerField()
subscribe = forms.IntegerField()
code = forms.CharField(max_length=80)
def clean_subscribe(self):
value = bool(self.cleaned_data['subscribe'])
self.cleaned_data['subscribe'] = value
return value
def clean(self):
cleaned_data = super(ConfirmEmailForm, self).clean()
if self.errors:
return cleaned_data
user_id = cleaned_data['user']
code = cleaned_data['code']
for_subscription = cleaned_data['subscribe']
user = User.objects.filter(id=user_id).first()
if not user:
raise forms.ValidationError('Invalid Link')
self.cleaned_data['user'] = user
try:
user_service.validate_key(key=code, user=user,
for_subscription=for_subscription)
except BadSignature:
raise forms.ValidationError('Invalid Link')
except SignatureExpired:
raise forms.ValidationError('Link expired, please regenerate')
return cleaned_data
Use `MultipleChoiceField` for `tags` fieldfrom django.contrib.auth.models import User
from django.core.signing import BadSignature, SignatureExpired
from django import forms
from nightreads.posts.models import Tag
from . import user_service
class SubscribeForm(forms.Form):
email = forms.EmailField()
tags = forms.MultipleChoiceField(choices=[(
t.name, t.name) for t in Tag.objects.all()])
class UnsubscribeForm(forms.Form):
email = forms.EmailField()
class ConfirmEmailForm(forms.Form):
user = forms.IntegerField()
subscribe = forms.IntegerField()
code = forms.CharField(max_length=80)
def clean_subscribe(self):
value = bool(self.cleaned_data['subscribe'])
self.cleaned_data['subscribe'] = value
return value
def clean(self):
cleaned_data = super(ConfirmEmailForm, self).clean()
if self.errors:
return cleaned_data
user_id = cleaned_data['user']
code = cleaned_data['code']
for_subscription = cleaned_data['subscribe']
user = User.objects.filter(id=user_id).first()
if not user:
raise forms.ValidationError('Invalid Link')
self.cleaned_data['user'] = user
try:
user_service.validate_key(key=code, user=user,
for_subscription=for_subscription)
except BadSignature:
raise forms.ValidationError('Invalid Link')
except SignatureExpired:
raise forms.ValidationError('Link expired, please regenerate')
return cleaned_data
|
<commit_before>from django.contrib.auth.models import User
from django.core.signing import BadSignature, SignatureExpired
from django import forms
from . import user_service
class SubscribeForm(forms.Form):
email = forms.EmailField()
tags = forms.CharField()
def clean_tags(self):
tags = self.cleaned_data['tags'].split(',')
return [t.strip().lower() for t in tags]
class UnsubscribeForm(forms.Form):
email = forms.EmailField()
class ConfirmEmailForm(forms.Form):
user = forms.IntegerField()
subscribe = forms.IntegerField()
code = forms.CharField(max_length=80)
def clean_subscribe(self):
value = bool(self.cleaned_data['subscribe'])
self.cleaned_data['subscribe'] = value
return value
def clean(self):
cleaned_data = super(ConfirmEmailForm, self).clean()
if self.errors:
return cleaned_data
user_id = cleaned_data['user']
code = cleaned_data['code']
for_subscription = cleaned_data['subscribe']
user = User.objects.filter(id=user_id).first()
if not user:
raise forms.ValidationError('Invalid Link')
self.cleaned_data['user'] = user
try:
user_service.validate_key(key=code, user=user,
for_subscription=for_subscription)
except BadSignature:
raise forms.ValidationError('Invalid Link')
except SignatureExpired:
raise forms.ValidationError('Link expired, please regenerate')
return cleaned_data
<commit_msg>Use `MultipleChoiceField` for `tags` field<commit_after>from django.contrib.auth.models import User
from django.core.signing import BadSignature, SignatureExpired
from django import forms
from nightreads.posts.models import Tag
from . import user_service
class SubscribeForm(forms.Form):
email = forms.EmailField()
tags = forms.MultipleChoiceField(choices=[(
t.name, t.name) for t in Tag.objects.all()])
class UnsubscribeForm(forms.Form):
email = forms.EmailField()
class ConfirmEmailForm(forms.Form):
user = forms.IntegerField()
subscribe = forms.IntegerField()
code = forms.CharField(max_length=80)
def clean_subscribe(self):
value = bool(self.cleaned_data['subscribe'])
self.cleaned_data['subscribe'] = value
return value
def clean(self):
cleaned_data = super(ConfirmEmailForm, self).clean()
if self.errors:
return cleaned_data
user_id = cleaned_data['user']
code = cleaned_data['code']
for_subscription = cleaned_data['subscribe']
user = User.objects.filter(id=user_id).first()
if not user:
raise forms.ValidationError('Invalid Link')
self.cleaned_data['user'] = user
try:
user_service.validate_key(key=code, user=user,
for_subscription=for_subscription)
except BadSignature:
raise forms.ValidationError('Invalid Link')
except SignatureExpired:
raise forms.ValidationError('Link expired, please regenerate')
return cleaned_data
|
061c83bce03b1ae0261ae345f72f82625f12ff0a
|
ovp_organizations/serializers.py
|
ovp_organizations/serializers.py
|
from django.core.exceptions import ValidationError
from ovp_core import validators as core_validators
from ovp_core.serializers import GoogleAddressSerializer
from ovp_organizations import models
from rest_framework import serializers
from rest_framework import permissions
class OrganizationCreateSerializer(serializers.ModelSerializer):
address = GoogleAddressSerializer(
validators=[core_validators.address_validate]
)
class Meta:
model = models.Organization
fields = ['id', 'owner', 'name', 'website', 'facebook_page', 'address', 'details', 'description', 'type']
def create(self, validated_data):
# Address
address_data = validated_data.pop('address', {})
address_sr = GoogleAddressSerializer(data=address_data)
address = address_sr.create(address_data)
validated_data['address'] = address
# Organization
organization = models.Organization.objects.create(**validated_data)
return organization
#class NonprofitUpdateSerializer(NonprofitCreateSerializer):
# class Meta:
# model = models.Nonprofit
# permission_classes = (permissions.IsAuthenticated,)
# fields = ['name', 'image', 'cover', 'details', 'description', 'websitefacebook_page', 'google_page', 'twitter_handle']
class OrganizationSearchSerializer(serializers.ModelSerializer):
class Meta:
model = models.Organization
fields = ['id', 'owner', 'name', 'website', 'facebook_page', 'address', 'details', 'description', 'type']
|
from django.core.exceptions import ValidationError
from ovp_core import validators as core_validators
from ovp_core.serializers import GoogleAddressSerializer, GoogleAddressCityStateSerializer
from ovp_organizations import models
from rest_framework import serializers
from rest_framework import permissions
class OrganizationCreateSerializer(serializers.ModelSerializer):
address = GoogleAddressSerializer(
validators=[core_validators.address_validate]
)
class Meta:
model = models.Organization
fields = ['id', 'owner', 'name', 'website', 'facebook_page', 'address', 'details', 'description', 'type']
def create(self, validated_data):
# Address
address_data = validated_data.pop('address', {})
address_sr = GoogleAddressSerializer(data=address_data)
address = address_sr.create(address_data)
validated_data['address'] = address
# Organization
organization = models.Organization.objects.create(**validated_data)
return organization
#class NonprofitUpdateSerializer(NonprofitCreateSerializer):
# class Meta:
# model = models.Nonprofit
# permission_classes = (permissions.IsAuthenticated,)
# fields = ['name', 'image', 'cover', 'details', 'description', 'websitefacebook_page', 'google_page', 'twitter_handle']
class OrganizationSearchSerializer(serializers.ModelSerializer):
address = GoogleAddressCityStateSerializer()
class Meta:
model = models.Organization
fields = ['id', 'owner', 'name', 'website', 'facebook_page', 'address', 'details', 'description', 'type']
|
Return GoogleAddressCityStateSerializer on address field instead of pk in OrganizationSearchSerializer
|
Return GoogleAddressCityStateSerializer on address field instead of pk in OrganizationSearchSerializer
|
Python
|
agpl-3.0
|
OpenVolunteeringPlatform/django-ovp-organizations,OpenVolunteeringPlatform/django-ovp-organizations
|
from django.core.exceptions import ValidationError
from ovp_core import validators as core_validators
from ovp_core.serializers import GoogleAddressSerializer
from ovp_organizations import models
from rest_framework import serializers
from rest_framework import permissions
class OrganizationCreateSerializer(serializers.ModelSerializer):
address = GoogleAddressSerializer(
validators=[core_validators.address_validate]
)
class Meta:
model = models.Organization
fields = ['id', 'owner', 'name', 'website', 'facebook_page', 'address', 'details', 'description', 'type']
def create(self, validated_data):
# Address
address_data = validated_data.pop('address', {})
address_sr = GoogleAddressSerializer(data=address_data)
address = address_sr.create(address_data)
validated_data['address'] = address
# Organization
organization = models.Organization.objects.create(**validated_data)
return organization
#class NonprofitUpdateSerializer(NonprofitCreateSerializer):
# class Meta:
# model = models.Nonprofit
# permission_classes = (permissions.IsAuthenticated,)
# fields = ['name', 'image', 'cover', 'details', 'description', 'websitefacebook_page', 'google_page', 'twitter_handle']
class OrganizationSearchSerializer(serializers.ModelSerializer):
class Meta:
model = models.Organization
fields = ['id', 'owner', 'name', 'website', 'facebook_page', 'address', 'details', 'description', 'type']
Return GoogleAddressCityStateSerializer on address field instead of pk in OrganizationSearchSerializer
|
from django.core.exceptions import ValidationError
from ovp_core import validators as core_validators
from ovp_core.serializers import GoogleAddressSerializer, GoogleAddressCityStateSerializer
from ovp_organizations import models
from rest_framework import serializers
from rest_framework import permissions
class OrganizationCreateSerializer(serializers.ModelSerializer):
address = GoogleAddressSerializer(
validators=[core_validators.address_validate]
)
class Meta:
model = models.Organization
fields = ['id', 'owner', 'name', 'website', 'facebook_page', 'address', 'details', 'description', 'type']
def create(self, validated_data):
# Address
address_data = validated_data.pop('address', {})
address_sr = GoogleAddressSerializer(data=address_data)
address = address_sr.create(address_data)
validated_data['address'] = address
# Organization
organization = models.Organization.objects.create(**validated_data)
return organization
#class NonprofitUpdateSerializer(NonprofitCreateSerializer):
# class Meta:
# model = models.Nonprofit
# permission_classes = (permissions.IsAuthenticated,)
# fields = ['name', 'image', 'cover', 'details', 'description', 'websitefacebook_page', 'google_page', 'twitter_handle']
class OrganizationSearchSerializer(serializers.ModelSerializer):
address = GoogleAddressCityStateSerializer()
class Meta:
model = models.Organization
fields = ['id', 'owner', 'name', 'website', 'facebook_page', 'address', 'details', 'description', 'type']
|
<commit_before>from django.core.exceptions import ValidationError
from ovp_core import validators as core_validators
from ovp_core.serializers import GoogleAddressSerializer
from ovp_organizations import models
from rest_framework import serializers
from rest_framework import permissions
class OrganizationCreateSerializer(serializers.ModelSerializer):
address = GoogleAddressSerializer(
validators=[core_validators.address_validate]
)
class Meta:
model = models.Organization
fields = ['id', 'owner', 'name', 'website', 'facebook_page', 'address', 'details', 'description', 'type']
def create(self, validated_data):
# Address
address_data = validated_data.pop('address', {})
address_sr = GoogleAddressSerializer(data=address_data)
address = address_sr.create(address_data)
validated_data['address'] = address
# Organization
organization = models.Organization.objects.create(**validated_data)
return organization
#class NonprofitUpdateSerializer(NonprofitCreateSerializer):
# class Meta:
# model = models.Nonprofit
# permission_classes = (permissions.IsAuthenticated,)
# fields = ['name', 'image', 'cover', 'details', 'description', 'websitefacebook_page', 'google_page', 'twitter_handle']
class OrganizationSearchSerializer(serializers.ModelSerializer):
class Meta:
model = models.Organization
fields = ['id', 'owner', 'name', 'website', 'facebook_page', 'address', 'details', 'description', 'type']
<commit_msg>Return GoogleAddressCityStateSerializer on address field instead of pk in OrganizationSearchSerializer<commit_after>
|
from django.core.exceptions import ValidationError
from ovp_core import validators as core_validators
from ovp_core.serializers import GoogleAddressSerializer, GoogleAddressCityStateSerializer
from ovp_organizations import models
from rest_framework import serializers
from rest_framework import permissions
class OrganizationCreateSerializer(serializers.ModelSerializer):
address = GoogleAddressSerializer(
validators=[core_validators.address_validate]
)
class Meta:
model = models.Organization
fields = ['id', 'owner', 'name', 'website', 'facebook_page', 'address', 'details', 'description', 'type']
def create(self, validated_data):
# Address
address_data = validated_data.pop('address', {})
address_sr = GoogleAddressSerializer(data=address_data)
address = address_sr.create(address_data)
validated_data['address'] = address
# Organization
organization = models.Organization.objects.create(**validated_data)
return organization
#class NonprofitUpdateSerializer(NonprofitCreateSerializer):
# class Meta:
# model = models.Nonprofit
# permission_classes = (permissions.IsAuthenticated,)
# fields = ['name', 'image', 'cover', 'details', 'description', 'websitefacebook_page', 'google_page', 'twitter_handle']
class OrganizationSearchSerializer(serializers.ModelSerializer):
address = GoogleAddressCityStateSerializer()
class Meta:
model = models.Organization
fields = ['id', 'owner', 'name', 'website', 'facebook_page', 'address', 'details', 'description', 'type']
|
from django.core.exceptions import ValidationError
from ovp_core import validators as core_validators
from ovp_core.serializers import GoogleAddressSerializer
from ovp_organizations import models
from rest_framework import serializers
from rest_framework import permissions
class OrganizationCreateSerializer(serializers.ModelSerializer):
address = GoogleAddressSerializer(
validators=[core_validators.address_validate]
)
class Meta:
model = models.Organization
fields = ['id', 'owner', 'name', 'website', 'facebook_page', 'address', 'details', 'description', 'type']
def create(self, validated_data):
# Address
address_data = validated_data.pop('address', {})
address_sr = GoogleAddressSerializer(data=address_data)
address = address_sr.create(address_data)
validated_data['address'] = address
# Organization
organization = models.Organization.objects.create(**validated_data)
return organization
#class NonprofitUpdateSerializer(NonprofitCreateSerializer):
# class Meta:
# model = models.Nonprofit
# permission_classes = (permissions.IsAuthenticated,)
# fields = ['name', 'image', 'cover', 'details', 'description', 'websitefacebook_page', 'google_page', 'twitter_handle']
class OrganizationSearchSerializer(serializers.ModelSerializer):
class Meta:
model = models.Organization
fields = ['id', 'owner', 'name', 'website', 'facebook_page', 'address', 'details', 'description', 'type']
Return GoogleAddressCityStateSerializer on address field instead of pk in OrganizationSearchSerializerfrom django.core.exceptions import ValidationError
from ovp_core import validators as core_validators
from ovp_core.serializers import GoogleAddressSerializer, GoogleAddressCityStateSerializer
from ovp_organizations import models
from rest_framework import serializers
from rest_framework import permissions
class OrganizationCreateSerializer(serializers.ModelSerializer):
address = GoogleAddressSerializer(
validators=[core_validators.address_validate]
)
class Meta:
model = models.Organization
fields = ['id', 'owner', 'name', 'website', 'facebook_page', 'address', 'details', 'description', 'type']
def create(self, validated_data):
# Address
address_data = validated_data.pop('address', {})
address_sr = GoogleAddressSerializer(data=address_data)
address = address_sr.create(address_data)
validated_data['address'] = address
# Organization
organization = models.Organization.objects.create(**validated_data)
return organization
#class NonprofitUpdateSerializer(NonprofitCreateSerializer):
# class Meta:
# model = models.Nonprofit
# permission_classes = (permissions.IsAuthenticated,)
# fields = ['name', 'image', 'cover', 'details', 'description', 'websitefacebook_page', 'google_page', 'twitter_handle']
class OrganizationSearchSerializer(serializers.ModelSerializer):
address = GoogleAddressCityStateSerializer()
class Meta:
model = models.Organization
fields = ['id', 'owner', 'name', 'website', 'facebook_page', 'address', 'details', 'description', 'type']
|
<commit_before>from django.core.exceptions import ValidationError
from ovp_core import validators as core_validators
from ovp_core.serializers import GoogleAddressSerializer
from ovp_organizations import models
from rest_framework import serializers
from rest_framework import permissions
class OrganizationCreateSerializer(serializers.ModelSerializer):
address = GoogleAddressSerializer(
validators=[core_validators.address_validate]
)
class Meta:
model = models.Organization
fields = ['id', 'owner', 'name', 'website', 'facebook_page', 'address', 'details', 'description', 'type']
def create(self, validated_data):
# Address
address_data = validated_data.pop('address', {})
address_sr = GoogleAddressSerializer(data=address_data)
address = address_sr.create(address_data)
validated_data['address'] = address
# Organization
organization = models.Organization.objects.create(**validated_data)
return organization
#class NonprofitUpdateSerializer(NonprofitCreateSerializer):
# class Meta:
# model = models.Nonprofit
# permission_classes = (permissions.IsAuthenticated,)
# fields = ['name', 'image', 'cover', 'details', 'description', 'websitefacebook_page', 'google_page', 'twitter_handle']
class OrganizationSearchSerializer(serializers.ModelSerializer):
class Meta:
model = models.Organization
fields = ['id', 'owner', 'name', 'website', 'facebook_page', 'address', 'details', 'description', 'type']
<commit_msg>Return GoogleAddressCityStateSerializer on address field instead of pk in OrganizationSearchSerializer<commit_after>from django.core.exceptions import ValidationError
from ovp_core import validators as core_validators
from ovp_core.serializers import GoogleAddressSerializer, GoogleAddressCityStateSerializer
from ovp_organizations import models
from rest_framework import serializers
from rest_framework import permissions
class OrganizationCreateSerializer(serializers.ModelSerializer):
address = GoogleAddressSerializer(
validators=[core_validators.address_validate]
)
class Meta:
model = models.Organization
fields = ['id', 'owner', 'name', 'website', 'facebook_page', 'address', 'details', 'description', 'type']
def create(self, validated_data):
# Address
address_data = validated_data.pop('address', {})
address_sr = GoogleAddressSerializer(data=address_data)
address = address_sr.create(address_data)
validated_data['address'] = address
# Organization
organization = models.Organization.objects.create(**validated_data)
return organization
#class NonprofitUpdateSerializer(NonprofitCreateSerializer):
# class Meta:
# model = models.Nonprofit
# permission_classes = (permissions.IsAuthenticated,)
# fields = ['name', 'image', 'cover', 'details', 'description', 'websitefacebook_page', 'google_page', 'twitter_handle']
class OrganizationSearchSerializer(serializers.ModelSerializer):
address = GoogleAddressCityStateSerializer()
class Meta:
model = models.Organization
fields = ['id', 'owner', 'name', 'website', 'facebook_page', 'address', 'details', 'description', 'type']
|
9d058688986838459edf9f6ec40fac04867e0c2c
|
knights/compat/django.py
|
knights/compat/django.py
|
import ast
from django.core.urlresolvers import reverse
from django.utils.encoding import iri_to_uri
import datetime
from knights.library import Library
register = Library()
@register.tag
def static(parser, token):
src = parser.parse_expression(token)
return ast.Yield(value=ast.BinOp(
left=ast.Str(s='/static/%s'),
op=ast.Mod(),
right=src,
))
@register.helper
def capfirst(value):
return value and value[0].upper() + value[1:]
@register.helper
def safe(value):
return str(value)
@register.helper
def now(fmt):
return datetime.datetime.now().strftime(fmt)
@register.helper
def url(name, *args, **kwargs):
try:
return reverse(name, args=args, kwargs=kwargs)
except:
return None
@register.helper
def static(filename):
try:
from django.conf import settings
except ImportError:
prefix = ''
else:
prefix = iri_to_uri(getattr(settings, name, ''))
return prefix
|
import ast
from django.core.urlresolvers import reverse
from django.utils.encoding import iri_to_uri
import datetime
from knights.library import Library
register = Library()
@register.helper
def now(fmt):
return datetime.datetime.now().strftime(fmt)
@register.helper
def url(name, *args, **kwargs):
try:
return reverse(name, args=args, kwargs=kwargs)
except:
return None
@register.helper
def static(filename):
try:
from django.conf import settings
except ImportError:
prefix = ''
else:
prefix = iri_to_uri(getattr(settings, name, ''))
return prefix
|
Remove bodgy static tag Remove duplicate capfirst helper
|
Remove bodgy static tag
Remove duplicate capfirst helper
|
Python
|
mit
|
funkybob/knights-templater,funkybob/knights-templater
|
import ast
from django.core.urlresolvers import reverse
from django.utils.encoding import iri_to_uri
import datetime
from knights.library import Library
register = Library()
@register.tag
def static(parser, token):
src = parser.parse_expression(token)
return ast.Yield(value=ast.BinOp(
left=ast.Str(s='/static/%s'),
op=ast.Mod(),
right=src,
))
@register.helper
def capfirst(value):
return value and value[0].upper() + value[1:]
@register.helper
def safe(value):
return str(value)
@register.helper
def now(fmt):
return datetime.datetime.now().strftime(fmt)
@register.helper
def url(name, *args, **kwargs):
try:
return reverse(name, args=args, kwargs=kwargs)
except:
return None
@register.helper
def static(filename):
try:
from django.conf import settings
except ImportError:
prefix = ''
else:
prefix = iri_to_uri(getattr(settings, name, ''))
return prefix
Remove bodgy static tag
Remove duplicate capfirst helper
|
import ast
from django.core.urlresolvers import reverse
from django.utils.encoding import iri_to_uri
import datetime
from knights.library import Library
register = Library()
@register.helper
def now(fmt):
return datetime.datetime.now().strftime(fmt)
@register.helper
def url(name, *args, **kwargs):
try:
return reverse(name, args=args, kwargs=kwargs)
except:
return None
@register.helper
def static(filename):
try:
from django.conf import settings
except ImportError:
prefix = ''
else:
prefix = iri_to_uri(getattr(settings, name, ''))
return prefix
|
<commit_before>import ast
from django.core.urlresolvers import reverse
from django.utils.encoding import iri_to_uri
import datetime
from knights.library import Library
register = Library()
@register.tag
def static(parser, token):
src = parser.parse_expression(token)
return ast.Yield(value=ast.BinOp(
left=ast.Str(s='/static/%s'),
op=ast.Mod(),
right=src,
))
@register.helper
def capfirst(value):
return value and value[0].upper() + value[1:]
@register.helper
def safe(value):
return str(value)
@register.helper
def now(fmt):
return datetime.datetime.now().strftime(fmt)
@register.helper
def url(name, *args, **kwargs):
try:
return reverse(name, args=args, kwargs=kwargs)
except:
return None
@register.helper
def static(filename):
try:
from django.conf import settings
except ImportError:
prefix = ''
else:
prefix = iri_to_uri(getattr(settings, name, ''))
return prefix
<commit_msg>Remove bodgy static tag
Remove duplicate capfirst helper<commit_after>
|
import ast
from django.core.urlresolvers import reverse
from django.utils.encoding import iri_to_uri
import datetime
from knights.library import Library
register = Library()
@register.helper
def now(fmt):
return datetime.datetime.now().strftime(fmt)
@register.helper
def url(name, *args, **kwargs):
try:
return reverse(name, args=args, kwargs=kwargs)
except:
return None
@register.helper
def static(filename):
try:
from django.conf import settings
except ImportError:
prefix = ''
else:
prefix = iri_to_uri(getattr(settings, name, ''))
return prefix
|
import ast
from django.core.urlresolvers import reverse
from django.utils.encoding import iri_to_uri
import datetime
from knights.library import Library
register = Library()
@register.tag
def static(parser, token):
src = parser.parse_expression(token)
return ast.Yield(value=ast.BinOp(
left=ast.Str(s='/static/%s'),
op=ast.Mod(),
right=src,
))
@register.helper
def capfirst(value):
return value and value[0].upper() + value[1:]
@register.helper
def safe(value):
return str(value)
@register.helper
def now(fmt):
return datetime.datetime.now().strftime(fmt)
@register.helper
def url(name, *args, **kwargs):
try:
return reverse(name, args=args, kwargs=kwargs)
except:
return None
@register.helper
def static(filename):
try:
from django.conf import settings
except ImportError:
prefix = ''
else:
prefix = iri_to_uri(getattr(settings, name, ''))
return prefix
Remove bodgy static tag
Remove duplicate capfirst helperimport ast
from django.core.urlresolvers import reverse
from django.utils.encoding import iri_to_uri
import datetime
from knights.library import Library
register = Library()
@register.helper
def now(fmt):
return datetime.datetime.now().strftime(fmt)
@register.helper
def url(name, *args, **kwargs):
try:
return reverse(name, args=args, kwargs=kwargs)
except:
return None
@register.helper
def static(filename):
try:
from django.conf import settings
except ImportError:
prefix = ''
else:
prefix = iri_to_uri(getattr(settings, name, ''))
return prefix
|
<commit_before>import ast
from django.core.urlresolvers import reverse
from django.utils.encoding import iri_to_uri
import datetime
from knights.library import Library
register = Library()
@register.tag
def static(parser, token):
src = parser.parse_expression(token)
return ast.Yield(value=ast.BinOp(
left=ast.Str(s='/static/%s'),
op=ast.Mod(),
right=src,
))
@register.helper
def capfirst(value):
return value and value[0].upper() + value[1:]
@register.helper
def safe(value):
return str(value)
@register.helper
def now(fmt):
return datetime.datetime.now().strftime(fmt)
@register.helper
def url(name, *args, **kwargs):
try:
return reverse(name, args=args, kwargs=kwargs)
except:
return None
@register.helper
def static(filename):
try:
from django.conf import settings
except ImportError:
prefix = ''
else:
prefix = iri_to_uri(getattr(settings, name, ''))
return prefix
<commit_msg>Remove bodgy static tag
Remove duplicate capfirst helper<commit_after>import ast
from django.core.urlresolvers import reverse
from django.utils.encoding import iri_to_uri
import datetime
from knights.library import Library
register = Library()
@register.helper
def now(fmt):
return datetime.datetime.now().strftime(fmt)
@register.helper
def url(name, *args, **kwargs):
try:
return reverse(name, args=args, kwargs=kwargs)
except:
return None
@register.helper
def static(filename):
try:
from django.conf import settings
except ImportError:
prefix = ''
else:
prefix = iri_to_uri(getattr(settings, name, ''))
return prefix
|
f96d26e8686cb2d1a15860414b90e48418e41f38
|
tests/integration/conftest.py
|
tests/integration/conftest.py
|
import pytest
import io
import contextlib
import tempfile
import shutil
import os
from xd.docker.client import *
DOCKER_HOST = os.environ.get('DOCKER_HOST', None)
@pytest.fixture(scope="module")
def docker(request):
return DockerClient(host=DOCKER_HOST)
class StreamRedirector(object):
def __init__(self):
self.stream = io.StringIO()
def redirect(self):
return contextlib.redirect_stdout(self.stream)
def get(self):
return self.stream.getvalue()
def getlines(self):
return self.stream.getvalue().rstrip('\n').split('\n')
def lastline(self):
lines = self.getlines()
if not lines:
return None
return lines[-1]
@pytest.fixture
def stdout():
return StreamRedirector()
@pytest.fixture
def cleandir(request):
newdir = tempfile.mkdtemp()
os.chdir(newdir)
def remove_cleandir():
shutil.rmtree(newdir)
request.addfinalizer(remove_cleandir)
return newdir
|
import pytest
import io
import contextlib
import tempfile
import shutil
import os
from xd.docker.client import *
DOCKER_HOST = os.environ.get('DOCKER_HOST', None)
@pytest.fixture(scope="function")
def docker(request):
os.system("for c in `docker ps -a -q`;do docker rm $c;done")
os.system("for i in `docker images -q`;do docker rmi $i;done")
return DockerClient(host=DOCKER_HOST)
class StreamRedirector(object):
def __init__(self):
self.stream = io.StringIO()
def redirect(self):
return contextlib.redirect_stdout(self.stream)
def get(self):
return self.stream.getvalue()
def getlines(self):
return self.stream.getvalue().rstrip('\n').split('\n')
def lastline(self):
lines = self.getlines()
if not lines:
return None
return lines[-1]
@pytest.fixture
def stdout():
return StreamRedirector()
@pytest.fixture
def cleandir(request):
newdir = tempfile.mkdtemp()
os.chdir(newdir)
def remove_cleandir():
shutil.rmtree(newdir)
request.addfinalizer(remove_cleandir)
return newdir
|
Purge images and containers before each test
|
tests: Purge images and containers before each test
Signed-off-by: Esben Haabendal <da90c138e4a9573086862393cde34fa33d74f6e5@haabendal.dk>
|
Python
|
mit
|
XD-embedded/xd-docker,XD-embedded/xd-docker,esben/xd-docker,esben/xd-docker
|
import pytest
import io
import contextlib
import tempfile
import shutil
import os
from xd.docker.client import *
DOCKER_HOST = os.environ.get('DOCKER_HOST', None)
@pytest.fixture(scope="module")
def docker(request):
return DockerClient(host=DOCKER_HOST)
class StreamRedirector(object):
def __init__(self):
self.stream = io.StringIO()
def redirect(self):
return contextlib.redirect_stdout(self.stream)
def get(self):
return self.stream.getvalue()
def getlines(self):
return self.stream.getvalue().rstrip('\n').split('\n')
def lastline(self):
lines = self.getlines()
if not lines:
return None
return lines[-1]
@pytest.fixture
def stdout():
return StreamRedirector()
@pytest.fixture
def cleandir(request):
newdir = tempfile.mkdtemp()
os.chdir(newdir)
def remove_cleandir():
shutil.rmtree(newdir)
request.addfinalizer(remove_cleandir)
return newdir
tests: Purge images and containers before each test
Signed-off-by: Esben Haabendal <da90c138e4a9573086862393cde34fa33d74f6e5@haabendal.dk>
|
import pytest
import io
import contextlib
import tempfile
import shutil
import os
from xd.docker.client import *
DOCKER_HOST = os.environ.get('DOCKER_HOST', None)
@pytest.fixture(scope="function")
def docker(request):
os.system("for c in `docker ps -a -q`;do docker rm $c;done")
os.system("for i in `docker images -q`;do docker rmi $i;done")
return DockerClient(host=DOCKER_HOST)
class StreamRedirector(object):
def __init__(self):
self.stream = io.StringIO()
def redirect(self):
return contextlib.redirect_stdout(self.stream)
def get(self):
return self.stream.getvalue()
def getlines(self):
return self.stream.getvalue().rstrip('\n').split('\n')
def lastline(self):
lines = self.getlines()
if not lines:
return None
return lines[-1]
@pytest.fixture
def stdout():
return StreamRedirector()
@pytest.fixture
def cleandir(request):
newdir = tempfile.mkdtemp()
os.chdir(newdir)
def remove_cleandir():
shutil.rmtree(newdir)
request.addfinalizer(remove_cleandir)
return newdir
|
<commit_before>import pytest
import io
import contextlib
import tempfile
import shutil
import os
from xd.docker.client import *
DOCKER_HOST = os.environ.get('DOCKER_HOST', None)
@pytest.fixture(scope="module")
def docker(request):
return DockerClient(host=DOCKER_HOST)
class StreamRedirector(object):
def __init__(self):
self.stream = io.StringIO()
def redirect(self):
return contextlib.redirect_stdout(self.stream)
def get(self):
return self.stream.getvalue()
def getlines(self):
return self.stream.getvalue().rstrip('\n').split('\n')
def lastline(self):
lines = self.getlines()
if not lines:
return None
return lines[-1]
@pytest.fixture
def stdout():
return StreamRedirector()
@pytest.fixture
def cleandir(request):
newdir = tempfile.mkdtemp()
os.chdir(newdir)
def remove_cleandir():
shutil.rmtree(newdir)
request.addfinalizer(remove_cleandir)
return newdir
<commit_msg>tests: Purge images and containers before each test
Signed-off-by: Esben Haabendal <da90c138e4a9573086862393cde34fa33d74f6e5@haabendal.dk><commit_after>
|
import pytest
import io
import contextlib
import tempfile
import shutil
import os
from xd.docker.client import *
DOCKER_HOST = os.environ.get('DOCKER_HOST', None)
@pytest.fixture(scope="function")
def docker(request):
os.system("for c in `docker ps -a -q`;do docker rm $c;done")
os.system("for i in `docker images -q`;do docker rmi $i;done")
return DockerClient(host=DOCKER_HOST)
class StreamRedirector(object):
def __init__(self):
self.stream = io.StringIO()
def redirect(self):
return contextlib.redirect_stdout(self.stream)
def get(self):
return self.stream.getvalue()
def getlines(self):
return self.stream.getvalue().rstrip('\n').split('\n')
def lastline(self):
lines = self.getlines()
if not lines:
return None
return lines[-1]
@pytest.fixture
def stdout():
return StreamRedirector()
@pytest.fixture
def cleandir(request):
newdir = tempfile.mkdtemp()
os.chdir(newdir)
def remove_cleandir():
shutil.rmtree(newdir)
request.addfinalizer(remove_cleandir)
return newdir
|
import pytest
import io
import contextlib
import tempfile
import shutil
import os
from xd.docker.client import *
DOCKER_HOST = os.environ.get('DOCKER_HOST', None)
@pytest.fixture(scope="module")
def docker(request):
return DockerClient(host=DOCKER_HOST)
class StreamRedirector(object):
def __init__(self):
self.stream = io.StringIO()
def redirect(self):
return contextlib.redirect_stdout(self.stream)
def get(self):
return self.stream.getvalue()
def getlines(self):
return self.stream.getvalue().rstrip('\n').split('\n')
def lastline(self):
lines = self.getlines()
if not lines:
return None
return lines[-1]
@pytest.fixture
def stdout():
return StreamRedirector()
@pytest.fixture
def cleandir(request):
newdir = tempfile.mkdtemp()
os.chdir(newdir)
def remove_cleandir():
shutil.rmtree(newdir)
request.addfinalizer(remove_cleandir)
return newdir
tests: Purge images and containers before each test
Signed-off-by: Esben Haabendal <da90c138e4a9573086862393cde34fa33d74f6e5@haabendal.dk>import pytest
import io
import contextlib
import tempfile
import shutil
import os
from xd.docker.client import *
DOCKER_HOST = os.environ.get('DOCKER_HOST', None)
@pytest.fixture(scope="function")
def docker(request):
os.system("for c in `docker ps -a -q`;do docker rm $c;done")
os.system("for i in `docker images -q`;do docker rmi $i;done")
return DockerClient(host=DOCKER_HOST)
class StreamRedirector(object):
def __init__(self):
self.stream = io.StringIO()
def redirect(self):
return contextlib.redirect_stdout(self.stream)
def get(self):
return self.stream.getvalue()
def getlines(self):
return self.stream.getvalue().rstrip('\n').split('\n')
def lastline(self):
lines = self.getlines()
if not lines:
return None
return lines[-1]
@pytest.fixture
def stdout():
return StreamRedirector()
@pytest.fixture
def cleandir(request):
newdir = tempfile.mkdtemp()
os.chdir(newdir)
def remove_cleandir():
shutil.rmtree(newdir)
request.addfinalizer(remove_cleandir)
return newdir
|
<commit_before>import pytest
import io
import contextlib
import tempfile
import shutil
import os
from xd.docker.client import *
DOCKER_HOST = os.environ.get('DOCKER_HOST', None)
@pytest.fixture(scope="module")
def docker(request):
return DockerClient(host=DOCKER_HOST)
class StreamRedirector(object):
def __init__(self):
self.stream = io.StringIO()
def redirect(self):
return contextlib.redirect_stdout(self.stream)
def get(self):
return self.stream.getvalue()
def getlines(self):
return self.stream.getvalue().rstrip('\n').split('\n')
def lastline(self):
lines = self.getlines()
if not lines:
return None
return lines[-1]
@pytest.fixture
def stdout():
return StreamRedirector()
@pytest.fixture
def cleandir(request):
newdir = tempfile.mkdtemp()
os.chdir(newdir)
def remove_cleandir():
shutil.rmtree(newdir)
request.addfinalizer(remove_cleandir)
return newdir
<commit_msg>tests: Purge images and containers before each test
Signed-off-by: Esben Haabendal <da90c138e4a9573086862393cde34fa33d74f6e5@haabendal.dk><commit_after>import pytest
import io
import contextlib
import tempfile
import shutil
import os
from xd.docker.client import *
DOCKER_HOST = os.environ.get('DOCKER_HOST', None)
@pytest.fixture(scope="function")
def docker(request):
os.system("for c in `docker ps -a -q`;do docker rm $c;done")
os.system("for i in `docker images -q`;do docker rmi $i;done")
return DockerClient(host=DOCKER_HOST)
class StreamRedirector(object):
def __init__(self):
self.stream = io.StringIO()
def redirect(self):
return contextlib.redirect_stdout(self.stream)
def get(self):
return self.stream.getvalue()
def getlines(self):
return self.stream.getvalue().rstrip('\n').split('\n')
def lastline(self):
lines = self.getlines()
if not lines:
return None
return lines[-1]
@pytest.fixture
def stdout():
return StreamRedirector()
@pytest.fixture
def cleandir(request):
newdir = tempfile.mkdtemp()
os.chdir(newdir)
def remove_cleandir():
shutil.rmtree(newdir)
request.addfinalizer(remove_cleandir)
return newdir
|
e4e4e8d5c3acf5851d33700f8b55aa2e1f9c33f2
|
server/app/migrations/0003_region.py
|
server/app/migrations/0003_region.py
|
import os
import json
from django.db import migrations
from django.conf import settings
def dfs(apps, root, deep, superset=None, leaf=True):
Region = apps.get_model('app', 'Region')
if isinstance(root, dict):
for k, v in root.items():
s = dfs(apps, k, deep, superset, not v)
dfs(apps, v, deep + 1, s)
elif isinstance(root, list):
for k in root:
dfs(apps, k, deep, superset, True)
else:
region = Region(name=root, superset=superset, admin_level=deep, leaf=leaf)
region.save()
#print("{tab}{name}".format(tab="".join([" " * (deep-1)]), name=region.name))
return region
def add_region(apps, schema_editor):
if settings.TESTING:
data_file = "regions_for_test.json"
else:
data_file = "regions.txt"
regions = json.load(open(os.path.join(os.path.dirname(__file__),
data_file)))
#print("添加省份")
dfs(apps, regions, 1)
class Migration(migrations.Migration):
dependencies = [
('app', '0002_subject'),
]
operations = [
migrations.RunPython(add_region),
]
|
import os
import json
from collections import OrderedDict
from django.db import migrations
from django.conf import settings
def dfs(apps, root, deep, superset=None, leaf=True):
Region = apps.get_model('app', 'Region')
if isinstance(root, dict):
for k, v in root.items():
s = dfs(apps, k, deep, superset, not v)
dfs(apps, v, deep + 1, s)
elif isinstance(root, list):
for k in root:
dfs(apps, k, deep, superset, True)
else:
region = Region(
name=root, superset=superset, admin_level=deep, leaf=leaf)
region.save()
return region
def add_region(apps, schema_editor):
if settings.TESTING:
data_file = "regions_for_test.json"
else:
data_file = "regions.txt"
regions = json.load(open(
os.path.join(os.path.dirname(__file__), data_file)),
object_pairs_hook=OrderedDict)
dfs(apps, regions, 1)
class Migration(migrations.Migration):
dependencies = [
('app', '0002_subject'),
]
operations = [
migrations.RunPython(add_region),
]
|
Make ID of regions be definite.
|
SERVER-242: Make ID of regions be definite.
|
Python
|
mit
|
malaonline/Server,malaonline/iOS,malaonline/Android,malaonline/Android,malaonline/iOS,malaonline/Android,malaonline/Server,malaonline/Server,malaonline/iOS,malaonline/Server
|
import os
import json
from django.db import migrations
from django.conf import settings
def dfs(apps, root, deep, superset=None, leaf=True):
Region = apps.get_model('app', 'Region')
if isinstance(root, dict):
for k, v in root.items():
s = dfs(apps, k, deep, superset, not v)
dfs(apps, v, deep + 1, s)
elif isinstance(root, list):
for k in root:
dfs(apps, k, deep, superset, True)
else:
region = Region(name=root, superset=superset, admin_level=deep, leaf=leaf)
region.save()
#print("{tab}{name}".format(tab="".join([" " * (deep-1)]), name=region.name))
return region
def add_region(apps, schema_editor):
if settings.TESTING:
data_file = "regions_for_test.json"
else:
data_file = "regions.txt"
regions = json.load(open(os.path.join(os.path.dirname(__file__),
data_file)))
#print("添加省份")
dfs(apps, regions, 1)
class Migration(migrations.Migration):
dependencies = [
('app', '0002_subject'),
]
operations = [
migrations.RunPython(add_region),
]
SERVER-242: Make ID of regions be definite.
|
import os
import json
from collections import OrderedDict
from django.db import migrations
from django.conf import settings
def dfs(apps, root, deep, superset=None, leaf=True):
Region = apps.get_model('app', 'Region')
if isinstance(root, dict):
for k, v in root.items():
s = dfs(apps, k, deep, superset, not v)
dfs(apps, v, deep + 1, s)
elif isinstance(root, list):
for k in root:
dfs(apps, k, deep, superset, True)
else:
region = Region(
name=root, superset=superset, admin_level=deep, leaf=leaf)
region.save()
return region
def add_region(apps, schema_editor):
if settings.TESTING:
data_file = "regions_for_test.json"
else:
data_file = "regions.txt"
regions = json.load(open(
os.path.join(os.path.dirname(__file__), data_file)),
object_pairs_hook=OrderedDict)
dfs(apps, regions, 1)
class Migration(migrations.Migration):
dependencies = [
('app', '0002_subject'),
]
operations = [
migrations.RunPython(add_region),
]
|
<commit_before>import os
import json
from django.db import migrations
from django.conf import settings
def dfs(apps, root, deep, superset=None, leaf=True):
Region = apps.get_model('app', 'Region')
if isinstance(root, dict):
for k, v in root.items():
s = dfs(apps, k, deep, superset, not v)
dfs(apps, v, deep + 1, s)
elif isinstance(root, list):
for k in root:
dfs(apps, k, deep, superset, True)
else:
region = Region(name=root, superset=superset, admin_level=deep, leaf=leaf)
region.save()
#print("{tab}{name}".format(tab="".join([" " * (deep-1)]), name=region.name))
return region
def add_region(apps, schema_editor):
if settings.TESTING:
data_file = "regions_for_test.json"
else:
data_file = "regions.txt"
regions = json.load(open(os.path.join(os.path.dirname(__file__),
data_file)))
#print("添加省份")
dfs(apps, regions, 1)
class Migration(migrations.Migration):
dependencies = [
('app', '0002_subject'),
]
operations = [
migrations.RunPython(add_region),
]
<commit_msg>SERVER-242: Make ID of regions be definite.<commit_after>
|
import os
import json
from collections import OrderedDict
from django.db import migrations
from django.conf import settings
def dfs(apps, root, deep, superset=None, leaf=True):
Region = apps.get_model('app', 'Region')
if isinstance(root, dict):
for k, v in root.items():
s = dfs(apps, k, deep, superset, not v)
dfs(apps, v, deep + 1, s)
elif isinstance(root, list):
for k in root:
dfs(apps, k, deep, superset, True)
else:
region = Region(
name=root, superset=superset, admin_level=deep, leaf=leaf)
region.save()
return region
def add_region(apps, schema_editor):
if settings.TESTING:
data_file = "regions_for_test.json"
else:
data_file = "regions.txt"
regions = json.load(open(
os.path.join(os.path.dirname(__file__), data_file)),
object_pairs_hook=OrderedDict)
dfs(apps, regions, 1)
class Migration(migrations.Migration):
dependencies = [
('app', '0002_subject'),
]
operations = [
migrations.RunPython(add_region),
]
|
import os
import json
from django.db import migrations
from django.conf import settings
def dfs(apps, root, deep, superset=None, leaf=True):
Region = apps.get_model('app', 'Region')
if isinstance(root, dict):
for k, v in root.items():
s = dfs(apps, k, deep, superset, not v)
dfs(apps, v, deep + 1, s)
elif isinstance(root, list):
for k in root:
dfs(apps, k, deep, superset, True)
else:
region = Region(name=root, superset=superset, admin_level=deep, leaf=leaf)
region.save()
#print("{tab}{name}".format(tab="".join([" " * (deep-1)]), name=region.name))
return region
def add_region(apps, schema_editor):
if settings.TESTING:
data_file = "regions_for_test.json"
else:
data_file = "regions.txt"
regions = json.load(open(os.path.join(os.path.dirname(__file__),
data_file)))
#print("添加省份")
dfs(apps, regions, 1)
class Migration(migrations.Migration):
dependencies = [
('app', '0002_subject'),
]
operations = [
migrations.RunPython(add_region),
]
SERVER-242: Make ID of regions be definite.import os
import json
from collections import OrderedDict
from django.db import migrations
from django.conf import settings
def dfs(apps, root, deep, superset=None, leaf=True):
Region = apps.get_model('app', 'Region')
if isinstance(root, dict):
for k, v in root.items():
s = dfs(apps, k, deep, superset, not v)
dfs(apps, v, deep + 1, s)
elif isinstance(root, list):
for k in root:
dfs(apps, k, deep, superset, True)
else:
region = Region(
name=root, superset=superset, admin_level=deep, leaf=leaf)
region.save()
return region
def add_region(apps, schema_editor):
if settings.TESTING:
data_file = "regions_for_test.json"
else:
data_file = "regions.txt"
regions = json.load(open(
os.path.join(os.path.dirname(__file__), data_file)),
object_pairs_hook=OrderedDict)
dfs(apps, regions, 1)
class Migration(migrations.Migration):
dependencies = [
('app', '0002_subject'),
]
operations = [
migrations.RunPython(add_region),
]
|
<commit_before>import os
import json
from django.db import migrations
from django.conf import settings
def dfs(apps, root, deep, superset=None, leaf=True):
Region = apps.get_model('app', 'Region')
if isinstance(root, dict):
for k, v in root.items():
s = dfs(apps, k, deep, superset, not v)
dfs(apps, v, deep + 1, s)
elif isinstance(root, list):
for k in root:
dfs(apps, k, deep, superset, True)
else:
region = Region(name=root, superset=superset, admin_level=deep, leaf=leaf)
region.save()
#print("{tab}{name}".format(tab="".join([" " * (deep-1)]), name=region.name))
return region
def add_region(apps, schema_editor):
if settings.TESTING:
data_file = "regions_for_test.json"
else:
data_file = "regions.txt"
regions = json.load(open(os.path.join(os.path.dirname(__file__),
data_file)))
#print("添加省份")
dfs(apps, regions, 1)
class Migration(migrations.Migration):
dependencies = [
('app', '0002_subject'),
]
operations = [
migrations.RunPython(add_region),
]
<commit_msg>SERVER-242: Make ID of regions be definite.<commit_after>import os
import json
from collections import OrderedDict
from django.db import migrations
from django.conf import settings
def dfs(apps, root, deep, superset=None, leaf=True):
Region = apps.get_model('app', 'Region')
if isinstance(root, dict):
for k, v in root.items():
s = dfs(apps, k, deep, superset, not v)
dfs(apps, v, deep + 1, s)
elif isinstance(root, list):
for k in root:
dfs(apps, k, deep, superset, True)
else:
region = Region(
name=root, superset=superset, admin_level=deep, leaf=leaf)
region.save()
return region
def add_region(apps, schema_editor):
if settings.TESTING:
data_file = "regions_for_test.json"
else:
data_file = "regions.txt"
regions = json.load(open(
os.path.join(os.path.dirname(__file__), data_file)),
object_pairs_hook=OrderedDict)
dfs(apps, regions, 1)
class Migration(migrations.Migration):
dependencies = [
('app', '0002_subject'),
]
operations = [
migrations.RunPython(add_region),
]
|
e2ecc6968eb4108a3c15d16898e60e0962eba9f8
|
invocations/checks.py
|
invocations/checks.py
|
"""
Shortcuts for common development check tasks
"""
from __future__ import unicode_literals
from invoke import task
@task(name='blacken', iterable=['folder'])
def blacken(c, line_length=79, folder=None):
"""Run black on the current source"""
default_folders = ["."]
configured_folders = c.config.get("blacken", {}).get("folders", default_folders)
folders = configured_folders if not folder else folder
black_command_line = "black -l {0}".format(line_length)
cmd = "find {0} -name '*.py' | xargs {1}".format(
" ".join(folders), black_command_line
)
c.run(cmd, pty=True)
|
"""
Shortcuts for common development check tasks
"""
from __future__ import unicode_literals
from invoke import task
@task(name="blacken", iterable=["folder"])
def blacken(c, line_length=79, folder=None, check=False):
"""Run black on the current source"""
default_folders = ["."]
configured_folders = c.config.get("blacken", {}).get(
"folders", default_folders
)
folders = configured_folders if not folder else folder
black_command_line = "black -l {0}".format(line_length)
if check:
black_command_line = "{0} --check".format(black_command_line)
cmd = "find {0} -name '*.py' | xargs {1}".format(
" ".join(folders), black_command_line
)
c.run(cmd, pty=True)
|
Add --check support to the invocations.blacken task
|
Add --check support to the invocations.blacken task
|
Python
|
bsd-2-clause
|
pyinvoke/invocations
|
"""
Shortcuts for common development check tasks
"""
from __future__ import unicode_literals
from invoke import task
@task(name='blacken', iterable=['folder'])
def blacken(c, line_length=79, folder=None):
"""Run black on the current source"""
default_folders = ["."]
configured_folders = c.config.get("blacken", {}).get("folders", default_folders)
folders = configured_folders if not folder else folder
black_command_line = "black -l {0}".format(line_length)
cmd = "find {0} -name '*.py' | xargs {1}".format(
" ".join(folders), black_command_line
)
c.run(cmd, pty=True)
Add --check support to the invocations.blacken task
|
"""
Shortcuts for common development check tasks
"""
from __future__ import unicode_literals
from invoke import task
@task(name="blacken", iterable=["folder"])
def blacken(c, line_length=79, folder=None, check=False):
"""Run black on the current source"""
default_folders = ["."]
configured_folders = c.config.get("blacken", {}).get(
"folders", default_folders
)
folders = configured_folders if not folder else folder
black_command_line = "black -l {0}".format(line_length)
if check:
black_command_line = "{0} --check".format(black_command_line)
cmd = "find {0} -name '*.py' | xargs {1}".format(
" ".join(folders), black_command_line
)
c.run(cmd, pty=True)
|
<commit_before>"""
Shortcuts for common development check tasks
"""
from __future__ import unicode_literals
from invoke import task
@task(name='blacken', iterable=['folder'])
def blacken(c, line_length=79, folder=None):
"""Run black on the current source"""
default_folders = ["."]
configured_folders = c.config.get("blacken", {}).get("folders", default_folders)
folders = configured_folders if not folder else folder
black_command_line = "black -l {0}".format(line_length)
cmd = "find {0} -name '*.py' | xargs {1}".format(
" ".join(folders), black_command_line
)
c.run(cmd, pty=True)
<commit_msg>Add --check support to the invocations.blacken task<commit_after>
|
"""
Shortcuts for common development check tasks
"""
from __future__ import unicode_literals
from invoke import task
@task(name="blacken", iterable=["folder"])
def blacken(c, line_length=79, folder=None, check=False):
"""Run black on the current source"""
default_folders = ["."]
configured_folders = c.config.get("blacken", {}).get(
"folders", default_folders
)
folders = configured_folders if not folder else folder
black_command_line = "black -l {0}".format(line_length)
if check:
black_command_line = "{0} --check".format(black_command_line)
cmd = "find {0} -name '*.py' | xargs {1}".format(
" ".join(folders), black_command_line
)
c.run(cmd, pty=True)
|
"""
Shortcuts for common development check tasks
"""
from __future__ import unicode_literals
from invoke import task
@task(name='blacken', iterable=['folder'])
def blacken(c, line_length=79, folder=None):
"""Run black on the current source"""
default_folders = ["."]
configured_folders = c.config.get("blacken", {}).get("folders", default_folders)
folders = configured_folders if not folder else folder
black_command_line = "black -l {0}".format(line_length)
cmd = "find {0} -name '*.py' | xargs {1}".format(
" ".join(folders), black_command_line
)
c.run(cmd, pty=True)
Add --check support to the invocations.blacken task"""
Shortcuts for common development check tasks
"""
from __future__ import unicode_literals
from invoke import task
@task(name="blacken", iterable=["folder"])
def blacken(c, line_length=79, folder=None, check=False):
"""Run black on the current source"""
default_folders = ["."]
configured_folders = c.config.get("blacken", {}).get(
"folders", default_folders
)
folders = configured_folders if not folder else folder
black_command_line = "black -l {0}".format(line_length)
if check:
black_command_line = "{0} --check".format(black_command_line)
cmd = "find {0} -name '*.py' | xargs {1}".format(
" ".join(folders), black_command_line
)
c.run(cmd, pty=True)
|
<commit_before>"""
Shortcuts for common development check tasks
"""
from __future__ import unicode_literals
from invoke import task
@task(name='blacken', iterable=['folder'])
def blacken(c, line_length=79, folder=None):
"""Run black on the current source"""
default_folders = ["."]
configured_folders = c.config.get("blacken", {}).get("folders", default_folders)
folders = configured_folders if not folder else folder
black_command_line = "black -l {0}".format(line_length)
cmd = "find {0} -name '*.py' | xargs {1}".format(
" ".join(folders), black_command_line
)
c.run(cmd, pty=True)
<commit_msg>Add --check support to the invocations.blacken task<commit_after>"""
Shortcuts for common development check tasks
"""
from __future__ import unicode_literals
from invoke import task
@task(name="blacken", iterable=["folder"])
def blacken(c, line_length=79, folder=None, check=False):
"""Run black on the current source"""
default_folders = ["."]
configured_folders = c.config.get("blacken", {}).get(
"folders", default_folders
)
folders = configured_folders if not folder else folder
black_command_line = "black -l {0}".format(line_length)
if check:
black_command_line = "{0} --check".format(black_command_line)
cmd = "find {0} -name '*.py' | xargs {1}".format(
" ".join(folders), black_command_line
)
c.run(cmd, pty=True)
|
dba6f3a456b3d75e1202ccb688581876a93e48f2
|
pwndbg/strings.py
|
pwndbg/strings.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Functionality for resolving ASCII printable strings within
the debuggee's address space.
"""
from __future__ import print_function
import string
import gdb
import pwndbg.events
import pwndbg.typeinfo
length = 15
@pwndbg.events.stop
def update_length():
r"""
Unfortunately there's not a better way to get at this info.
>>> gdb.execute('show print elements', from_tty=False, to_string=True)
'Limit on string chars or array elements to print is 21.\n'
"""
global length
message = gdb.execute('show print elements', from_tty=False, to_string=True)
message = message.split()[-1]
message = message.strip('.')
length = int(message)
def get(address, maxlen = None):
if maxlen is None:
maxlen = length
try:
sz = gdb.Value(address)
sz = sz.cast(pwndbg.typeinfo.pchar)
sz = sz.string('ascii', 'ignore')
sz = str(sz)
except Exception as e:
return None
if not all(s in string.printable for s in sz.rstrip('\x00')):
return None
if len(sz) < maxlen:
return sz
return sz[:maxlen] + '...'
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Functionality for resolving ASCII printable strings within
the debuggee's address space.
"""
from __future__ import print_function
import string
import gdb
import pwndbg.events
import pwndbg.typeinfo
length = 15
@pwndbg.events.stop
def update_length():
r"""
Unfortunately there's not a better way to get at this info.
>>> gdb.execute('show print elements', from_tty=False, to_string=True)
'Limit on string chars or array elements to print is 21.\n'
"""
global length
message = gdb.execute('show print elements', from_tty=False, to_string=True)
message = message.split()[-1]
message = message.strip('.')
length = int(message)
def get(address, maxlen = None):
if maxlen is None:
maxlen = length
try:
sz = gdb.Value(address)
sz = sz.cast(pwndbg.typeinfo.pchar)
sz = sz.string('ascii', 'replace', maxlen)
sz = pwndbg.memory.read(address, len(sz))
if not all(s in string.printable for s in sz.rstrip('\x00')):
return None
sz = str(sz)
except Exception as e:
return None
if len(sz) < maxlen:
return sz
return sz[:maxlen] + '...'
|
Fix string extraction. The "ignore" setting was getting us lots of non-string stuff.
|
Fix string extraction. The "ignore" setting was getting us lots of non-string stuff.
|
Python
|
mit
|
anthraxx/pwndbg,cebrusfs/217gdb,pwndbg/pwndbg,chubbymaggie/pwndbg,disconnect3d/pwndbg,zachriggle/pwndbg,cebrusfs/217gdb,anthraxx/pwndbg,pwndbg/pwndbg,disconnect3d/pwndbg,cebrusfs/217gdb,disconnect3d/pwndbg,anthraxx/pwndbg,0xddaa/pwndbg,pwndbg/pwndbg,chubbymaggie/pwndbg,cebrusfs/217gdb,anthraxx/pwndbg,zachriggle/pwndbg,0xddaa/pwndbg,pwndbg/pwndbg,0xddaa/pwndbg
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Functionality for resolving ASCII printable strings within
the debuggee's address space.
"""
from __future__ import print_function
import string
import gdb
import pwndbg.events
import pwndbg.typeinfo
length = 15
@pwndbg.events.stop
def update_length():
r"""
Unfortunately there's not a better way to get at this info.
>>> gdb.execute('show print elements', from_tty=False, to_string=True)
'Limit on string chars or array elements to print is 21.\n'
"""
global length
message = gdb.execute('show print elements', from_tty=False, to_string=True)
message = message.split()[-1]
message = message.strip('.')
length = int(message)
def get(address, maxlen = None):
if maxlen is None:
maxlen = length
try:
sz = gdb.Value(address)
sz = sz.cast(pwndbg.typeinfo.pchar)
sz = sz.string('ascii', 'ignore')
sz = str(sz)
except Exception as e:
return None
if not all(s in string.printable for s in sz.rstrip('\x00')):
return None
if len(sz) < maxlen:
return sz
return sz[:maxlen] + '...'
Fix string extraction. The "ignore" setting was getting us lots of non-string stuff.
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Functionality for resolving ASCII printable strings within
the debuggee's address space.
"""
from __future__ import print_function
import string
import gdb
import pwndbg.events
import pwndbg.typeinfo
length = 15
@pwndbg.events.stop
def update_length():
r"""
Unfortunately there's not a better way to get at this info.
>>> gdb.execute('show print elements', from_tty=False, to_string=True)
'Limit on string chars or array elements to print is 21.\n'
"""
global length
message = gdb.execute('show print elements', from_tty=False, to_string=True)
message = message.split()[-1]
message = message.strip('.')
length = int(message)
def get(address, maxlen = None):
if maxlen is None:
maxlen = length
try:
sz = gdb.Value(address)
sz = sz.cast(pwndbg.typeinfo.pchar)
sz = sz.string('ascii', 'replace', maxlen)
sz = pwndbg.memory.read(address, len(sz))
if not all(s in string.printable for s in sz.rstrip('\x00')):
return None
sz = str(sz)
except Exception as e:
return None
if len(sz) < maxlen:
return sz
return sz[:maxlen] + '...'
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Functionality for resolving ASCII printable strings within
the debuggee's address space.
"""
from __future__ import print_function
import string
import gdb
import pwndbg.events
import pwndbg.typeinfo
length = 15
@pwndbg.events.stop
def update_length():
r"""
Unfortunately there's not a better way to get at this info.
>>> gdb.execute('show print elements', from_tty=False, to_string=True)
'Limit on string chars or array elements to print is 21.\n'
"""
global length
message = gdb.execute('show print elements', from_tty=False, to_string=True)
message = message.split()[-1]
message = message.strip('.')
length = int(message)
def get(address, maxlen = None):
if maxlen is None:
maxlen = length
try:
sz = gdb.Value(address)
sz = sz.cast(pwndbg.typeinfo.pchar)
sz = sz.string('ascii', 'ignore')
sz = str(sz)
except Exception as e:
return None
if not all(s in string.printable for s in sz.rstrip('\x00')):
return None
if len(sz) < maxlen:
return sz
return sz[:maxlen] + '...'
<commit_msg>Fix string extraction. The "ignore" setting was getting us lots of non-string stuff.<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Functionality for resolving ASCII printable strings within
the debuggee's address space.
"""
from __future__ import print_function
import string
import gdb
import pwndbg.events
import pwndbg.typeinfo
length = 15
@pwndbg.events.stop
def update_length():
r"""
Unfortunately there's not a better way to get at this info.
>>> gdb.execute('show print elements', from_tty=False, to_string=True)
'Limit on string chars or array elements to print is 21.\n'
"""
global length
message = gdb.execute('show print elements', from_tty=False, to_string=True)
message = message.split()[-1]
message = message.strip('.')
length = int(message)
def get(address, maxlen = None):
if maxlen is None:
maxlen = length
try:
sz = gdb.Value(address)
sz = sz.cast(pwndbg.typeinfo.pchar)
sz = sz.string('ascii', 'replace', maxlen)
sz = pwndbg.memory.read(address, len(sz))
if not all(s in string.printable for s in sz.rstrip('\x00')):
return None
sz = str(sz)
except Exception as e:
return None
if len(sz) < maxlen:
return sz
return sz[:maxlen] + '...'
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Functionality for resolving ASCII printable strings within
the debuggee's address space.
"""
from __future__ import print_function
import string
import gdb
import pwndbg.events
import pwndbg.typeinfo
length = 15
@pwndbg.events.stop
def update_length():
r"""
Unfortunately there's not a better way to get at this info.
>>> gdb.execute('show print elements', from_tty=False, to_string=True)
'Limit on string chars or array elements to print is 21.\n'
"""
global length
message = gdb.execute('show print elements', from_tty=False, to_string=True)
message = message.split()[-1]
message = message.strip('.')
length = int(message)
def get(address, maxlen = None):
if maxlen is None:
maxlen = length
try:
sz = gdb.Value(address)
sz = sz.cast(pwndbg.typeinfo.pchar)
sz = sz.string('ascii', 'ignore')
sz = str(sz)
except Exception as e:
return None
if not all(s in string.printable for s in sz.rstrip('\x00')):
return None
if len(sz) < maxlen:
return sz
return sz[:maxlen] + '...'
Fix string extraction. The "ignore" setting was getting us lots of non-string stuff.#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Functionality for resolving ASCII printable strings within
the debuggee's address space.
"""
from __future__ import print_function
import string
import gdb
import pwndbg.events
import pwndbg.typeinfo
length = 15
@pwndbg.events.stop
def update_length():
r"""
Unfortunately there's not a better way to get at this info.
>>> gdb.execute('show print elements', from_tty=False, to_string=True)
'Limit on string chars or array elements to print is 21.\n'
"""
global length
message = gdb.execute('show print elements', from_tty=False, to_string=True)
message = message.split()[-1]
message = message.strip('.')
length = int(message)
def get(address, maxlen = None):
if maxlen is None:
maxlen = length
try:
sz = gdb.Value(address)
sz = sz.cast(pwndbg.typeinfo.pchar)
sz = sz.string('ascii', 'replace', maxlen)
sz = pwndbg.memory.read(address, len(sz))
if not all(s in string.printable for s in sz.rstrip('\x00')):
return None
sz = str(sz)
except Exception as e:
return None
if len(sz) < maxlen:
return sz
return sz[:maxlen] + '...'
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Functionality for resolving ASCII printable strings within
the debuggee's address space.
"""
from __future__ import print_function
import string
import gdb
import pwndbg.events
import pwndbg.typeinfo
length = 15
@pwndbg.events.stop
def update_length():
r"""
Unfortunately there's not a better way to get at this info.
>>> gdb.execute('show print elements', from_tty=False, to_string=True)
'Limit on string chars or array elements to print is 21.\n'
"""
global length
message = gdb.execute('show print elements', from_tty=False, to_string=True)
message = message.split()[-1]
message = message.strip('.')
length = int(message)
def get(address, maxlen = None):
if maxlen is None:
maxlen = length
try:
sz = gdb.Value(address)
sz = sz.cast(pwndbg.typeinfo.pchar)
sz = sz.string('ascii', 'ignore')
sz = str(sz)
except Exception as e:
return None
if not all(s in string.printable for s in sz.rstrip('\x00')):
return None
if len(sz) < maxlen:
return sz
return sz[:maxlen] + '...'
<commit_msg>Fix string extraction. The "ignore" setting was getting us lots of non-string stuff.<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Functionality for resolving ASCII printable strings within
the debuggee's address space.
"""
from __future__ import print_function
import string
import gdb
import pwndbg.events
import pwndbg.typeinfo
length = 15
@pwndbg.events.stop
def update_length():
r"""
Unfortunately there's not a better way to get at this info.
>>> gdb.execute('show print elements', from_tty=False, to_string=True)
'Limit on string chars or array elements to print is 21.\n'
"""
global length
message = gdb.execute('show print elements', from_tty=False, to_string=True)
message = message.split()[-1]
message = message.strip('.')
length = int(message)
def get(address, maxlen = None):
if maxlen is None:
maxlen = length
try:
sz = gdb.Value(address)
sz = sz.cast(pwndbg.typeinfo.pchar)
sz = sz.string('ascii', 'replace', maxlen)
sz = pwndbg.memory.read(address, len(sz))
if not all(s in string.printable for s in sz.rstrip('\x00')):
return None
sz = str(sz)
except Exception as e:
return None
if len(sz) < maxlen:
return sz
return sz[:maxlen] + '...'
|
cd827059f9c500603d5c6b1d1bdf1621dc87a6a2
|
pyaem/handlers.py
|
pyaem/handlers.py
|
from BeautifulSoup import *
import exception
def auth_fail(response, **kwargs):
code = response['http_code']
message = 'Authentication failed - incorrect username and/or password'
raise exception.PyAemException(code, message)
def method_not_allowed(response, **kwargs):
code = response['http_code']
soup = BeautifulSoup(response['body'])
message = soup.p.string
raise exception.PyAemException(code, message)
def unexpected(response, **kwargs):
code = response['http_code']
message = 'Unexpected error'
raise exception.PyAemException(code, message)
|
from BeautifulSoup import *
import exception
def auth_fail(response, **kwargs):
code = response['http_code']
message = 'Authentication failed - incorrect username and/or password'
raise exception.PyAemException(code, message)
def method_not_allowed(response, **kwargs):
code = response['http_code']
soup = BeautifulSoup(response['body'])
message = soup.p.string
raise exception.PyAemException(code, message)
def unexpected(response, **kwargs):
code = response['http_code']
message = 'Unexpected response http code {0} and body\n{1}'.format(response['http_code'], response['body'])
raise exception.PyAemException(code, message)
|
Update unexpected handler message, with http code and body.
|
Update unexpected handler message, with http code and body.
|
Python
|
mit
|
wildone/pyaem,Sensis/pyaem
|
from BeautifulSoup import *
import exception
def auth_fail(response, **kwargs):
code = response['http_code']
message = 'Authentication failed - incorrect username and/or password'
raise exception.PyAemException(code, message)
def method_not_allowed(response, **kwargs):
code = response['http_code']
soup = BeautifulSoup(response['body'])
message = soup.p.string
raise exception.PyAemException(code, message)
def unexpected(response, **kwargs):
code = response['http_code']
message = 'Unexpected error'
raise exception.PyAemException(code, message)Update unexpected handler message, with http code and body.
|
from BeautifulSoup import *
import exception
def auth_fail(response, **kwargs):
code = response['http_code']
message = 'Authentication failed - incorrect username and/or password'
raise exception.PyAemException(code, message)
def method_not_allowed(response, **kwargs):
code = response['http_code']
soup = BeautifulSoup(response['body'])
message = soup.p.string
raise exception.PyAemException(code, message)
def unexpected(response, **kwargs):
code = response['http_code']
message = 'Unexpected response http code {0} and body\n{1}'.format(response['http_code'], response['body'])
raise exception.PyAemException(code, message)
|
<commit_before>from BeautifulSoup import *
import exception
def auth_fail(response, **kwargs):
code = response['http_code']
message = 'Authentication failed - incorrect username and/or password'
raise exception.PyAemException(code, message)
def method_not_allowed(response, **kwargs):
code = response['http_code']
soup = BeautifulSoup(response['body'])
message = soup.p.string
raise exception.PyAemException(code, message)
def unexpected(response, **kwargs):
code = response['http_code']
message = 'Unexpected error'
raise exception.PyAemException(code, message)<commit_msg>Update unexpected handler message, with http code and body.<commit_after>
|
from BeautifulSoup import *
import exception
def auth_fail(response, **kwargs):
code = response['http_code']
message = 'Authentication failed - incorrect username and/or password'
raise exception.PyAemException(code, message)
def method_not_allowed(response, **kwargs):
code = response['http_code']
soup = BeautifulSoup(response['body'])
message = soup.p.string
raise exception.PyAemException(code, message)
def unexpected(response, **kwargs):
code = response['http_code']
message = 'Unexpected response http code {0} and body\n{1}'.format(response['http_code'], response['body'])
raise exception.PyAemException(code, message)
|
from BeautifulSoup import *
import exception
def auth_fail(response, **kwargs):
code = response['http_code']
message = 'Authentication failed - incorrect username and/or password'
raise exception.PyAemException(code, message)
def method_not_allowed(response, **kwargs):
code = response['http_code']
soup = BeautifulSoup(response['body'])
message = soup.p.string
raise exception.PyAemException(code, message)
def unexpected(response, **kwargs):
code = response['http_code']
message = 'Unexpected error'
raise exception.PyAemException(code, message)Update unexpected handler message, with http code and body.from BeautifulSoup import *
import exception
def auth_fail(response, **kwargs):
code = response['http_code']
message = 'Authentication failed - incorrect username and/or password'
raise exception.PyAemException(code, message)
def method_not_allowed(response, **kwargs):
code = response['http_code']
soup = BeautifulSoup(response['body'])
message = soup.p.string
raise exception.PyAemException(code, message)
def unexpected(response, **kwargs):
code = response['http_code']
message = 'Unexpected response http code {0} and body\n{1}'.format(response['http_code'], response['body'])
raise exception.PyAemException(code, message)
|
<commit_before>from BeautifulSoup import *
import exception
def auth_fail(response, **kwargs):
code = response['http_code']
message = 'Authentication failed - incorrect username and/or password'
raise exception.PyAemException(code, message)
def method_not_allowed(response, **kwargs):
code = response['http_code']
soup = BeautifulSoup(response['body'])
message = soup.p.string
raise exception.PyAemException(code, message)
def unexpected(response, **kwargs):
code = response['http_code']
message = 'Unexpected error'
raise exception.PyAemException(code, message)<commit_msg>Update unexpected handler message, with http code and body.<commit_after>from BeautifulSoup import *
import exception
def auth_fail(response, **kwargs):
code = response['http_code']
message = 'Authentication failed - incorrect username and/or password'
raise exception.PyAemException(code, message)
def method_not_allowed(response, **kwargs):
code = response['http_code']
soup = BeautifulSoup(response['body'])
message = soup.p.string
raise exception.PyAemException(code, message)
def unexpected(response, **kwargs):
code = response['http_code']
message = 'Unexpected response http code {0} and body\n{1}'.format(response['http_code'], response['body'])
raise exception.PyAemException(code, message)
|
eabe9c25d73a2644b8697f0e9304e61dee5be198
|
src/smdba/roller.py
|
src/smdba/roller.py
|
# coding: utf-8
"""
Visual console "toys".
"""
import time
import sys
import threading
class Roller(threading.Thread):
"""
Roller of some fun sequences while waiting.
"""
def __init__(self):
threading.Thread.__init__(self)
self.__sequence = ['-', '\\', '|', '/',]
self.__freq = .1
self.__offset = 0
self.__running = False
self.__message = None
def run(self):
"""
Run roller.
:return: None
"""
self.__running = True
while self.__running:
if self.__offset > len(self.__sequence) - 1:
self.__offset = 0
sys.stdout.write("\b" + self.__sequence[self.__offset])
sys.stdout.flush()
time.sleep(self.__freq)
self.__offset += 1
print("\b" + self.__message)
sys.stdout.flush()
def stop(self, message: str = None):
"""
Stop roller.
:param message: Message for the roller.
:return: None
"""
self.__message = message if message else " "
self.__running = False
self.__offset = 0
# if __name__ == '__main__':
# print("Doing thing:\t", end="")
# sys.stdout.flush()
#
# roller = Roller()
# roller.start()
# time.sleep(5)
# roller.stop("finished")
# time.sleep(1)
# print("OK")
|
# coding: utf-8
"""
Visual console "toys".
"""
import time
import sys
import threading
import typing
class Roller(threading.Thread):
"""
Roller of some fun sequences while waiting.
"""
def __init__(self) -> None:
threading.Thread.__init__(self)
self.__sequence = ['-', '\\', '|', '/',]
self.__freq = .1
self.__offset = 0
self.__running = False
self.__message: typing.Optional[str] = None
def run(self) -> None:
"""
Run roller.
:return: None
"""
self.__running = True
while self.__running:
if self.__offset > len(self.__sequence) - 1:
self.__offset = 0
sys.stdout.write("\b" + self.__sequence[self.__offset])
sys.stdout.flush()
time.sleep(self.__freq)
self.__offset += 1
print("\b" + self.__message)
sys.stdout.flush()
def stop(self, message: typing.Optional[str] = None) -> None:
"""
Stop roller.
:param message: Message for the roller.
:return: None
"""
self.__message = message if message else " "
self.__running = False
self.__offset = 0
# if __name__ == '__main__':
# print("Doing thing:\t", end="")
# sys.stdout.flush()
#
# roller = Roller()
# roller.start()
# time.sleep(5)
# roller.stop("finished")
# time.sleep(1)
# print("OK")
|
Add annotations to the methods
|
Add annotations to the methods
|
Python
|
mit
|
SUSE/smdba,SUSE/smdba
|
# coding: utf-8
"""
Visual console "toys".
"""
import time
import sys
import threading
class Roller(threading.Thread):
"""
Roller of some fun sequences while waiting.
"""
def __init__(self):
threading.Thread.__init__(self)
self.__sequence = ['-', '\\', '|', '/',]
self.__freq = .1
self.__offset = 0
self.__running = False
self.__message = None
def run(self):
"""
Run roller.
:return: None
"""
self.__running = True
while self.__running:
if self.__offset > len(self.__sequence) - 1:
self.__offset = 0
sys.stdout.write("\b" + self.__sequence[self.__offset])
sys.stdout.flush()
time.sleep(self.__freq)
self.__offset += 1
print("\b" + self.__message)
sys.stdout.flush()
def stop(self, message: str = None):
"""
Stop roller.
:param message: Message for the roller.
:return: None
"""
self.__message = message if message else " "
self.__running = False
self.__offset = 0
# if __name__ == '__main__':
# print("Doing thing:\t", end="")
# sys.stdout.flush()
#
# roller = Roller()
# roller.start()
# time.sleep(5)
# roller.stop("finished")
# time.sleep(1)
# print("OK")
Add annotations to the methods
|
# coding: utf-8
"""
Visual console "toys".
"""
import time
import sys
import threading
import typing
class Roller(threading.Thread):
"""
Roller of some fun sequences while waiting.
"""
def __init__(self) -> None:
threading.Thread.__init__(self)
self.__sequence = ['-', '\\', '|', '/',]
self.__freq = .1
self.__offset = 0
self.__running = False
self.__message: typing.Optional[str] = None
def run(self) -> None:
"""
Run roller.
:return: None
"""
self.__running = True
while self.__running:
if self.__offset > len(self.__sequence) - 1:
self.__offset = 0
sys.stdout.write("\b" + self.__sequence[self.__offset])
sys.stdout.flush()
time.sleep(self.__freq)
self.__offset += 1
print("\b" + self.__message)
sys.stdout.flush()
def stop(self, message: typing.Optional[str] = None) -> None:
"""
Stop roller.
:param message: Message for the roller.
:return: None
"""
self.__message = message if message else " "
self.__running = False
self.__offset = 0
# if __name__ == '__main__':
# print("Doing thing:\t", end="")
# sys.stdout.flush()
#
# roller = Roller()
# roller.start()
# time.sleep(5)
# roller.stop("finished")
# time.sleep(1)
# print("OK")
|
<commit_before># coding: utf-8
"""
Visual console "toys".
"""
import time
import sys
import threading
class Roller(threading.Thread):
"""
Roller of some fun sequences while waiting.
"""
def __init__(self):
threading.Thread.__init__(self)
self.__sequence = ['-', '\\', '|', '/',]
self.__freq = .1
self.__offset = 0
self.__running = False
self.__message = None
def run(self):
"""
Run roller.
:return: None
"""
self.__running = True
while self.__running:
if self.__offset > len(self.__sequence) - 1:
self.__offset = 0
sys.stdout.write("\b" + self.__sequence[self.__offset])
sys.stdout.flush()
time.sleep(self.__freq)
self.__offset += 1
print("\b" + self.__message)
sys.stdout.flush()
def stop(self, message: str = None):
"""
Stop roller.
:param message: Message for the roller.
:return: None
"""
self.__message = message if message else " "
self.__running = False
self.__offset = 0
# if __name__ == '__main__':
# print("Doing thing:\t", end="")
# sys.stdout.flush()
#
# roller = Roller()
# roller.start()
# time.sleep(5)
# roller.stop("finished")
# time.sleep(1)
# print("OK")
<commit_msg>Add annotations to the methods<commit_after>
|
# coding: utf-8
"""
Visual console "toys".
"""
import time
import sys
import threading
import typing
class Roller(threading.Thread):
"""
Roller of some fun sequences while waiting.
"""
def __init__(self) -> None:
threading.Thread.__init__(self)
self.__sequence = ['-', '\\', '|', '/',]
self.__freq = .1
self.__offset = 0
self.__running = False
self.__message: typing.Optional[str] = None
def run(self) -> None:
"""
Run roller.
:return: None
"""
self.__running = True
while self.__running:
if self.__offset > len(self.__sequence) - 1:
self.__offset = 0
sys.stdout.write("\b" + self.__sequence[self.__offset])
sys.stdout.flush()
time.sleep(self.__freq)
self.__offset += 1
print("\b" + self.__message)
sys.stdout.flush()
def stop(self, message: typing.Optional[str] = None) -> None:
"""
Stop roller.
:param message: Message for the roller.
:return: None
"""
self.__message = message if message else " "
self.__running = False
self.__offset = 0
# if __name__ == '__main__':
# print("Doing thing:\t", end="")
# sys.stdout.flush()
#
# roller = Roller()
# roller.start()
# time.sleep(5)
# roller.stop("finished")
# time.sleep(1)
# print("OK")
|
# coding: utf-8
"""
Visual console "toys".
"""
import time
import sys
import threading
class Roller(threading.Thread):
"""
Roller of some fun sequences while waiting.
"""
def __init__(self):
threading.Thread.__init__(self)
self.__sequence = ['-', '\\', '|', '/',]
self.__freq = .1
self.__offset = 0
self.__running = False
self.__message = None
def run(self):
"""
Run roller.
:return: None
"""
self.__running = True
while self.__running:
if self.__offset > len(self.__sequence) - 1:
self.__offset = 0
sys.stdout.write("\b" + self.__sequence[self.__offset])
sys.stdout.flush()
time.sleep(self.__freq)
self.__offset += 1
print("\b" + self.__message)
sys.stdout.flush()
def stop(self, message: str = None):
"""
Stop roller.
:param message: Message for the roller.
:return: None
"""
self.__message = message if message else " "
self.__running = False
self.__offset = 0
# if __name__ == '__main__':
# print("Doing thing:\t", end="")
# sys.stdout.flush()
#
# roller = Roller()
# roller.start()
# time.sleep(5)
# roller.stop("finished")
# time.sleep(1)
# print("OK")
Add annotations to the methods# coding: utf-8
"""
Visual console "toys".
"""
import time
import sys
import threading
import typing
class Roller(threading.Thread):
"""
Roller of some fun sequences while waiting.
"""
def __init__(self) -> None:
threading.Thread.__init__(self)
self.__sequence = ['-', '\\', '|', '/',]
self.__freq = .1
self.__offset = 0
self.__running = False
self.__message: typing.Optional[str] = None
def run(self) -> None:
"""
Run roller.
:return: None
"""
self.__running = True
while self.__running:
if self.__offset > len(self.__sequence) - 1:
self.__offset = 0
sys.stdout.write("\b" + self.__sequence[self.__offset])
sys.stdout.flush()
time.sleep(self.__freq)
self.__offset += 1
print("\b" + self.__message)
sys.stdout.flush()
def stop(self, message: typing.Optional[str] = None) -> None:
"""
Stop roller.
:param message: Message for the roller.
:return: None
"""
self.__message = message if message else " "
self.__running = False
self.__offset = 0
# if __name__ == '__main__':
# print("Doing thing:\t", end="")
# sys.stdout.flush()
#
# roller = Roller()
# roller.start()
# time.sleep(5)
# roller.stop("finished")
# time.sleep(1)
# print("OK")
|
<commit_before># coding: utf-8
"""
Visual console "toys".
"""
import time
import sys
import threading
class Roller(threading.Thread):
"""
Roller of some fun sequences while waiting.
"""
def __init__(self):
threading.Thread.__init__(self)
self.__sequence = ['-', '\\', '|', '/',]
self.__freq = .1
self.__offset = 0
self.__running = False
self.__message = None
def run(self):
"""
Run roller.
:return: None
"""
self.__running = True
while self.__running:
if self.__offset > len(self.__sequence) - 1:
self.__offset = 0
sys.stdout.write("\b" + self.__sequence[self.__offset])
sys.stdout.flush()
time.sleep(self.__freq)
self.__offset += 1
print("\b" + self.__message)
sys.stdout.flush()
def stop(self, message: str = None):
"""
Stop roller.
:param message: Message for the roller.
:return: None
"""
self.__message = message if message else " "
self.__running = False
self.__offset = 0
# if __name__ == '__main__':
# print("Doing thing:\t", end="")
# sys.stdout.flush()
#
# roller = Roller()
# roller.start()
# time.sleep(5)
# roller.stop("finished")
# time.sleep(1)
# print("OK")
<commit_msg>Add annotations to the methods<commit_after># coding: utf-8
"""
Visual console "toys".
"""
import time
import sys
import threading
import typing
class Roller(threading.Thread):
"""
Roller of some fun sequences while waiting.
"""
def __init__(self) -> None:
threading.Thread.__init__(self)
self.__sequence = ['-', '\\', '|', '/',]
self.__freq = .1
self.__offset = 0
self.__running = False
self.__message: typing.Optional[str] = None
def run(self) -> None:
"""
Run roller.
:return: None
"""
self.__running = True
while self.__running:
if self.__offset > len(self.__sequence) - 1:
self.__offset = 0
sys.stdout.write("\b" + self.__sequence[self.__offset])
sys.stdout.flush()
time.sleep(self.__freq)
self.__offset += 1
print("\b" + self.__message)
sys.stdout.flush()
def stop(self, message: typing.Optional[str] = None) -> None:
"""
Stop roller.
:param message: Message for the roller.
:return: None
"""
self.__message = message if message else " "
self.__running = False
self.__offset = 0
# if __name__ == '__main__':
# print("Doing thing:\t", end="")
# sys.stdout.flush()
#
# roller = Roller()
# roller.start()
# time.sleep(5)
# roller.stop("finished")
# time.sleep(1)
# print("OK")
|
b6bf01a5c95da0de1e6831a3cf41243e69297854
|
setup.py
|
setup.py
|
# Copyright (C) 2011, 2012 Nippon Telegraph and Telephone Corporation.
# Copyright (C) 2011 Isaku Yamahata <yamahata at valinux co jp>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# a bug workaround. http://bugs.python.org/issue15881
try:
import multiprocessing
except ImportError:
pass
import setuptools
import ryu.hooks
ryu.hooks.save_orig()
setuptools.setup(name='ryu',
setup_requires=['pbr'],
pbr=True)
|
# Copyright (C) 2011, 2012 Nippon Telegraph and Telephone Corporation.
# Copyright (C) 2011 Isaku Yamahata <yamahata at valinux co jp>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import setuptools
import ryu.hooks
ryu.hooks.save_orig()
setuptools.setup(name='ryu',
setup_requires=['pbr'],
pbr=True)
|
Remove workaround for issue with older python versions.
|
Remove workaround for issue with older python versions.
|
Python
|
apache-2.0
|
osrg/ryu,osrg/ryu,osrg/ryu,osrg/ryu,osrg/ryu
|
# Copyright (C) 2011, 2012 Nippon Telegraph and Telephone Corporation.
# Copyright (C) 2011 Isaku Yamahata <yamahata at valinux co jp>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# a bug workaround. http://bugs.python.org/issue15881
try:
import multiprocessing
except ImportError:
pass
import setuptools
import ryu.hooks
ryu.hooks.save_orig()
setuptools.setup(name='ryu',
setup_requires=['pbr'],
pbr=True)
Remove workaround for issue with older python versions.
|
# Copyright (C) 2011, 2012 Nippon Telegraph and Telephone Corporation.
# Copyright (C) 2011 Isaku Yamahata <yamahata at valinux co jp>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import setuptools
import ryu.hooks
ryu.hooks.save_orig()
setuptools.setup(name='ryu',
setup_requires=['pbr'],
pbr=True)
|
<commit_before># Copyright (C) 2011, 2012 Nippon Telegraph and Telephone Corporation.
# Copyright (C) 2011 Isaku Yamahata <yamahata at valinux co jp>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# a bug workaround. http://bugs.python.org/issue15881
try:
import multiprocessing
except ImportError:
pass
import setuptools
import ryu.hooks
ryu.hooks.save_orig()
setuptools.setup(name='ryu',
setup_requires=['pbr'],
pbr=True)
<commit_msg>Remove workaround for issue with older python versions.<commit_after>
|
# Copyright (C) 2011, 2012 Nippon Telegraph and Telephone Corporation.
# Copyright (C) 2011 Isaku Yamahata <yamahata at valinux co jp>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import setuptools
import ryu.hooks
ryu.hooks.save_orig()
setuptools.setup(name='ryu',
setup_requires=['pbr'],
pbr=True)
|
# Copyright (C) 2011, 2012 Nippon Telegraph and Telephone Corporation.
# Copyright (C) 2011 Isaku Yamahata <yamahata at valinux co jp>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# a bug workaround. http://bugs.python.org/issue15881
try:
import multiprocessing
except ImportError:
pass
import setuptools
import ryu.hooks
ryu.hooks.save_orig()
setuptools.setup(name='ryu',
setup_requires=['pbr'],
pbr=True)
Remove workaround for issue with older python versions.# Copyright (C) 2011, 2012 Nippon Telegraph and Telephone Corporation.
# Copyright (C) 2011 Isaku Yamahata <yamahata at valinux co jp>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import setuptools
import ryu.hooks
ryu.hooks.save_orig()
setuptools.setup(name='ryu',
setup_requires=['pbr'],
pbr=True)
|
<commit_before># Copyright (C) 2011, 2012 Nippon Telegraph and Telephone Corporation.
# Copyright (C) 2011 Isaku Yamahata <yamahata at valinux co jp>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# a bug workaround. http://bugs.python.org/issue15881
try:
import multiprocessing
except ImportError:
pass
import setuptools
import ryu.hooks
ryu.hooks.save_orig()
setuptools.setup(name='ryu',
setup_requires=['pbr'],
pbr=True)
<commit_msg>Remove workaround for issue with older python versions.<commit_after># Copyright (C) 2011, 2012 Nippon Telegraph and Telephone Corporation.
# Copyright (C) 2011 Isaku Yamahata <yamahata at valinux co jp>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import setuptools
import ryu.hooks
ryu.hooks.save_orig()
setuptools.setup(name='ryu',
setup_requires=['pbr'],
pbr=True)
|
f87e5d37609b075abd2c7adb0a8b97294b205cb2
|
src/info_retrieval/info_retrieval.py
|
src/info_retrieval/info_retrieval.py
|
# LING 573 Question Answering System
# Code last updated 4/17/14 by Clara Gordon
# This code implements an InfoRetriever for the question answering system.
from pymur import *
from general_classes import *
class InfoRetriever:
# builds a QueryEnvironment associated with the indexed document collection
def __init__(self, index_path):
# how to get this to link up to the doc collection?
self.path_to_idx = index_path
self.index = Index(self.path_to_idx)
self.query_env = QueryEnvironment()
self.query_env.addIndex(self.path_to_idx)
# creates a list of all the passages returned by all the queries generated by
# the query-processing module
def retrieve_passages(self, queries):
passages = []
for query in queries:
# second argument is the number of documents desired
docs = self.query_env.runQuery("#combine[passage50:25](" + query + ")", 20)
for doc in docs:
doc_num = doc.document
begin = doc.begin
end = doc.end
doc_id = self.query_env.documents([doc_num])[0].metadata['docno'] # need this for output
passage = Passage(self.index.document(doc_num, True)[begin, end], doc.score, doc_id)
passages.append(passage)
return passages
|
# LING 573 Question Answering System
# Code last updated 4/17/14 by Clara Gordon
# This code implements an InfoRetriever for the question answering system.
from pymur import *
from general_classes import *
class InfoRetriever:
# builds a QueryEnvironment associated with the indexed document collection
def __init__(self, index_path):
# how to get this to link up to the doc collection?
self.path_to_idx = index_path
self.index = Index(self.path_to_idx)
self.query_env = QueryEnvironment()
self.query_env.addIndex(self.path_to_idx)
# creates a list of all the passages returned by all the queries generated by
# the query-processing module
def retrieve_passages(self, queries):
passages = []
for query in queries:
query = " ".join(query)
# second argument is the number of documents desired
docs = self.query_env.runQuery("#combine[passage50:25](" + query + ")", 20)
for doc in docs:
doc_num = doc.document
begin = doc.begin
end = doc.end
doc_id = self.query_env.documents([doc_num])[0].metadata['docno'] # need this for output
passage = Passage(self.index.document(doc_num, True)[begin, end], doc.score, doc_id)
passages.append(passage)
return passages
|
Join word tokens into space-delimited string in InfoRetriever
|
Join word tokens into space-delimited string in InfoRetriever
|
Python
|
mit
|
amkahn/question-answering,amkahn/question-answering
|
# LING 573 Question Answering System
# Code last updated 4/17/14 by Clara Gordon
# This code implements an InfoRetriever for the question answering system.
from pymur import *
from general_classes import *
class InfoRetriever:
# builds a QueryEnvironment associated with the indexed document collection
def __init__(self, index_path):
# how to get this to link up to the doc collection?
self.path_to_idx = index_path
self.index = Index(self.path_to_idx)
self.query_env = QueryEnvironment()
self.query_env.addIndex(self.path_to_idx)
# creates a list of all the passages returned by all the queries generated by
# the query-processing module
def retrieve_passages(self, queries):
passages = []
for query in queries:
# second argument is the number of documents desired
docs = self.query_env.runQuery("#combine[passage50:25](" + query + ")", 20)
for doc in docs:
doc_num = doc.document
begin = doc.begin
end = doc.end
doc_id = self.query_env.documents([doc_num])[0].metadata['docno'] # need this for output
passage = Passage(self.index.document(doc_num, True)[begin, end], doc.score, doc_id)
passages.append(passage)
return passages
Join word tokens into space-delimited string in InfoRetriever
|
# LING 573 Question Answering System
# Code last updated 4/17/14 by Clara Gordon
# This code implements an InfoRetriever for the question answering system.
from pymur import *
from general_classes import *
class InfoRetriever:
# builds a QueryEnvironment associated with the indexed document collection
def __init__(self, index_path):
# how to get this to link up to the doc collection?
self.path_to_idx = index_path
self.index = Index(self.path_to_idx)
self.query_env = QueryEnvironment()
self.query_env.addIndex(self.path_to_idx)
# creates a list of all the passages returned by all the queries generated by
# the query-processing module
def retrieve_passages(self, queries):
passages = []
for query in queries:
query = " ".join(query)
# second argument is the number of documents desired
docs = self.query_env.runQuery("#combine[passage50:25](" + query + ")", 20)
for doc in docs:
doc_num = doc.document
begin = doc.begin
end = doc.end
doc_id = self.query_env.documents([doc_num])[0].metadata['docno'] # need this for output
passage = Passage(self.index.document(doc_num, True)[begin, end], doc.score, doc_id)
passages.append(passage)
return passages
|
<commit_before># LING 573 Question Answering System
# Code last updated 4/17/14 by Clara Gordon
# This code implements an InfoRetriever for the question answering system.
from pymur import *
from general_classes import *
class InfoRetriever:
# builds a QueryEnvironment associated with the indexed document collection
def __init__(self, index_path):
# how to get this to link up to the doc collection?
self.path_to_idx = index_path
self.index = Index(self.path_to_idx)
self.query_env = QueryEnvironment()
self.query_env.addIndex(self.path_to_idx)
# creates a list of all the passages returned by all the queries generated by
# the query-processing module
def retrieve_passages(self, queries):
passages = []
for query in queries:
# second argument is the number of documents desired
docs = self.query_env.runQuery("#combine[passage50:25](" + query + ")", 20)
for doc in docs:
doc_num = doc.document
begin = doc.begin
end = doc.end
doc_id = self.query_env.documents([doc_num])[0].metadata['docno'] # need this for output
passage = Passage(self.index.document(doc_num, True)[begin, end], doc.score, doc_id)
passages.append(passage)
return passages
<commit_msg>Join word tokens into space-delimited string in InfoRetriever<commit_after>
|
# LING 573 Question Answering System
# Code last updated 4/17/14 by Clara Gordon
# This code implements an InfoRetriever for the question answering system.
from pymur import *
from general_classes import *
class InfoRetriever:
# builds a QueryEnvironment associated with the indexed document collection
def __init__(self, index_path):
# how to get this to link up to the doc collection?
self.path_to_idx = index_path
self.index = Index(self.path_to_idx)
self.query_env = QueryEnvironment()
self.query_env.addIndex(self.path_to_idx)
# creates a list of all the passages returned by all the queries generated by
# the query-processing module
def retrieve_passages(self, queries):
passages = []
for query in queries:
query = " ".join(query)
# second argument is the number of documents desired
docs = self.query_env.runQuery("#combine[passage50:25](" + query + ")", 20)
for doc in docs:
doc_num = doc.document
begin = doc.begin
end = doc.end
doc_id = self.query_env.documents([doc_num])[0].metadata['docno'] # need this for output
passage = Passage(self.index.document(doc_num, True)[begin, end], doc.score, doc_id)
passages.append(passage)
return passages
|
# LING 573 Question Answering System
# Code last updated 4/17/14 by Clara Gordon
# This code implements an InfoRetriever for the question answering system.
from pymur import *
from general_classes import *
class InfoRetriever:
# builds a QueryEnvironment associated with the indexed document collection
def __init__(self, index_path):
# how to get this to link up to the doc collection?
self.path_to_idx = index_path
self.index = Index(self.path_to_idx)
self.query_env = QueryEnvironment()
self.query_env.addIndex(self.path_to_idx)
# creates a list of all the passages returned by all the queries generated by
# the query-processing module
def retrieve_passages(self, queries):
passages = []
for query in queries:
# second argument is the number of documents desired
docs = self.query_env.runQuery("#combine[passage50:25](" + query + ")", 20)
for doc in docs:
doc_num = doc.document
begin = doc.begin
end = doc.end
doc_id = self.query_env.documents([doc_num])[0].metadata['docno'] # need this for output
passage = Passage(self.index.document(doc_num, True)[begin, end], doc.score, doc_id)
passages.append(passage)
return passages
Join word tokens into space-delimited string in InfoRetriever# LING 573 Question Answering System
# Code last updated 4/17/14 by Clara Gordon
# This code implements an InfoRetriever for the question answering system.
from pymur import *
from general_classes import *
class InfoRetriever:
# builds a QueryEnvironment associated with the indexed document collection
def __init__(self, index_path):
# how to get this to link up to the doc collection?
self.path_to_idx = index_path
self.index = Index(self.path_to_idx)
self.query_env = QueryEnvironment()
self.query_env.addIndex(self.path_to_idx)
# creates a list of all the passages returned by all the queries generated by
# the query-processing module
def retrieve_passages(self, queries):
passages = []
for query in queries:
query = " ".join(query)
# second argument is the number of documents desired
docs = self.query_env.runQuery("#combine[passage50:25](" + query + ")", 20)
for doc in docs:
doc_num = doc.document
begin = doc.begin
end = doc.end
doc_id = self.query_env.documents([doc_num])[0].metadata['docno'] # need this for output
passage = Passage(self.index.document(doc_num, True)[begin, end], doc.score, doc_id)
passages.append(passage)
return passages
|
<commit_before># LING 573 Question Answering System
# Code last updated 4/17/14 by Clara Gordon
# This code implements an InfoRetriever for the question answering system.
from pymur import *
from general_classes import *
class InfoRetriever:
# builds a QueryEnvironment associated with the indexed document collection
def __init__(self, index_path):
# how to get this to link up to the doc collection?
self.path_to_idx = index_path
self.index = Index(self.path_to_idx)
self.query_env = QueryEnvironment()
self.query_env.addIndex(self.path_to_idx)
# creates a list of all the passages returned by all the queries generated by
# the query-processing module
def retrieve_passages(self, queries):
passages = []
for query in queries:
# second argument is the number of documents desired
docs = self.query_env.runQuery("#combine[passage50:25](" + query + ")", 20)
for doc in docs:
doc_num = doc.document
begin = doc.begin
end = doc.end
doc_id = self.query_env.documents([doc_num])[0].metadata['docno'] # need this for output
passage = Passage(self.index.document(doc_num, True)[begin, end], doc.score, doc_id)
passages.append(passage)
return passages
<commit_msg>Join word tokens into space-delimited string in InfoRetriever<commit_after># LING 573 Question Answering System
# Code last updated 4/17/14 by Clara Gordon
# This code implements an InfoRetriever for the question answering system.
from pymur import *
from general_classes import *
class InfoRetriever:
# builds a QueryEnvironment associated with the indexed document collection
def __init__(self, index_path):
# how to get this to link up to the doc collection?
self.path_to_idx = index_path
self.index = Index(self.path_to_idx)
self.query_env = QueryEnvironment()
self.query_env.addIndex(self.path_to_idx)
# creates a list of all the passages returned by all the queries generated by
# the query-processing module
def retrieve_passages(self, queries):
passages = []
for query in queries:
query = " ".join(query)
# second argument is the number of documents desired
docs = self.query_env.runQuery("#combine[passage50:25](" + query + ")", 20)
for doc in docs:
doc_num = doc.document
begin = doc.begin
end = doc.end
doc_id = self.query_env.documents([doc_num])[0].metadata['docno'] # need this for output
passage = Passage(self.index.document(doc_num, True)[begin, end], doc.score, doc_id)
passages.append(passage)
return passages
|
36c7fab4939bbf15c3023883aafdf5f302600018
|
usingnamespace/management/traversal/__init__.py
|
usingnamespace/management/traversal/__init__.py
|
class Root(object):
"""ManagementRoot
The main root object for any management traversal
"""
__name__ = None
__parent__ = None
def __init__(self, request):
"""Create the default root object
:request: The Pyramid request object
"""
self._request = request
def __getitem__(self, key):
"""Check to see if we can traverse this ..."""
raise KeyError
|
class Root(object):
"""ManagementRoot
The main root object for any management traversal
"""
__name__ = None
__parent__ = None
def __init__(self, request):
"""Create the default root object
:request: The Pyramid request object
"""
self._request = request
def __getitem__(self, key):
"""Check to see if we can traverse this ..."""
next_ctx = None
if key == 'api':
next_ctx = API()
if next_ctx is None:
raise KeyError
next_ctx.__parent__ = self
return next_ctx
class API(object):
"""Management allows access to API tickets"""
__name__ = 'api'
__parent__ = None
def __init__(self):
"""Create the API object"""
pass
def __getitem__(self, key):
"""Check to see if we can traverse this ..."""
raise KeyError
|
Add API traversal to the management app
|
Add API traversal to the management app
|
Python
|
isc
|
usingnamespace/usingnamespace
|
class Root(object):
"""ManagementRoot
The main root object for any management traversal
"""
__name__ = None
__parent__ = None
def __init__(self, request):
"""Create the default root object
:request: The Pyramid request object
"""
self._request = request
def __getitem__(self, key):
"""Check to see if we can traverse this ..."""
raise KeyError
Add API traversal to the management app
|
class Root(object):
"""ManagementRoot
The main root object for any management traversal
"""
__name__ = None
__parent__ = None
def __init__(self, request):
"""Create the default root object
:request: The Pyramid request object
"""
self._request = request
def __getitem__(self, key):
"""Check to see if we can traverse this ..."""
next_ctx = None
if key == 'api':
next_ctx = API()
if next_ctx is None:
raise KeyError
next_ctx.__parent__ = self
return next_ctx
class API(object):
"""Management allows access to API tickets"""
__name__ = 'api'
__parent__ = None
def __init__(self):
"""Create the API object"""
pass
def __getitem__(self, key):
"""Check to see if we can traverse this ..."""
raise KeyError
|
<commit_before>class Root(object):
"""ManagementRoot
The main root object for any management traversal
"""
__name__ = None
__parent__ = None
def __init__(self, request):
"""Create the default root object
:request: The Pyramid request object
"""
self._request = request
def __getitem__(self, key):
"""Check to see if we can traverse this ..."""
raise KeyError
<commit_msg>Add API traversal to the management app<commit_after>
|
class Root(object):
"""ManagementRoot
The main root object for any management traversal
"""
__name__ = None
__parent__ = None
def __init__(self, request):
"""Create the default root object
:request: The Pyramid request object
"""
self._request = request
def __getitem__(self, key):
"""Check to see if we can traverse this ..."""
next_ctx = None
if key == 'api':
next_ctx = API()
if next_ctx is None:
raise KeyError
next_ctx.__parent__ = self
return next_ctx
class API(object):
"""Management allows access to API tickets"""
__name__ = 'api'
__parent__ = None
def __init__(self):
"""Create the API object"""
pass
def __getitem__(self, key):
"""Check to see if we can traverse this ..."""
raise KeyError
|
class Root(object):
"""ManagementRoot
The main root object for any management traversal
"""
__name__ = None
__parent__ = None
def __init__(self, request):
"""Create the default root object
:request: The Pyramid request object
"""
self._request = request
def __getitem__(self, key):
"""Check to see if we can traverse this ..."""
raise KeyError
Add API traversal to the management appclass Root(object):
"""ManagementRoot
The main root object for any management traversal
"""
__name__ = None
__parent__ = None
def __init__(self, request):
"""Create the default root object
:request: The Pyramid request object
"""
self._request = request
def __getitem__(self, key):
"""Check to see if we can traverse this ..."""
next_ctx = None
if key == 'api':
next_ctx = API()
if next_ctx is None:
raise KeyError
next_ctx.__parent__ = self
return next_ctx
class API(object):
"""Management allows access to API tickets"""
__name__ = 'api'
__parent__ = None
def __init__(self):
"""Create the API object"""
pass
def __getitem__(self, key):
"""Check to see if we can traverse this ..."""
raise KeyError
|
<commit_before>class Root(object):
"""ManagementRoot
The main root object for any management traversal
"""
__name__ = None
__parent__ = None
def __init__(self, request):
"""Create the default root object
:request: The Pyramid request object
"""
self._request = request
def __getitem__(self, key):
"""Check to see if we can traverse this ..."""
raise KeyError
<commit_msg>Add API traversal to the management app<commit_after>class Root(object):
"""ManagementRoot
The main root object for any management traversal
"""
__name__ = None
__parent__ = None
def __init__(self, request):
"""Create the default root object
:request: The Pyramid request object
"""
self._request = request
def __getitem__(self, key):
"""Check to see if we can traverse this ..."""
next_ctx = None
if key == 'api':
next_ctx = API()
if next_ctx is None:
raise KeyError
next_ctx.__parent__ = self
return next_ctx
class API(object):
"""Management allows access to API tickets"""
__name__ = 'api'
__parent__ = None
def __init__(self):
"""Create the API object"""
pass
def __getitem__(self, key):
"""Check to see if we can traverse this ..."""
raise KeyError
|
b21fa7c5b4aeb7421149730df3e1a5e3bec97ed3
|
dodo_commands/dodo_upgrade.py
|
dodo_commands/dodo_upgrade.py
|
import os
import sys
from plumbum import local
def main(): # noqa
pip = local[os.path.join(os.path.dirname(sys.executable), "pip")]
pip["install", "--upgrade", "dodo_commands"]
|
import os
import sys
from plumbum import local
def main(): # noqa
pip = local[os.path.join(os.path.dirname(sys.executable), "pip")]
pip("install", "--upgrade", "dodo_commands")
|
Fix dodo-upgrade (nothing was executed)
|
Fix dodo-upgrade (nothing was executed)
|
Python
|
mit
|
mnieber/dodo_commands
|
import os
import sys
from plumbum import local
def main(): # noqa
pip = local[os.path.join(os.path.dirname(sys.executable), "pip")]
pip["install", "--upgrade", "dodo_commands"]
Fix dodo-upgrade (nothing was executed)
|
import os
import sys
from plumbum import local
def main(): # noqa
pip = local[os.path.join(os.path.dirname(sys.executable), "pip")]
pip("install", "--upgrade", "dodo_commands")
|
<commit_before>import os
import sys
from plumbum import local
def main(): # noqa
pip = local[os.path.join(os.path.dirname(sys.executable), "pip")]
pip["install", "--upgrade", "dodo_commands"]
<commit_msg>Fix dodo-upgrade (nothing was executed)<commit_after>
|
import os
import sys
from plumbum import local
def main(): # noqa
pip = local[os.path.join(os.path.dirname(sys.executable), "pip")]
pip("install", "--upgrade", "dodo_commands")
|
import os
import sys
from plumbum import local
def main(): # noqa
pip = local[os.path.join(os.path.dirname(sys.executable), "pip")]
pip["install", "--upgrade", "dodo_commands"]
Fix dodo-upgrade (nothing was executed)import os
import sys
from plumbum import local
def main(): # noqa
pip = local[os.path.join(os.path.dirname(sys.executable), "pip")]
pip("install", "--upgrade", "dodo_commands")
|
<commit_before>import os
import sys
from plumbum import local
def main(): # noqa
pip = local[os.path.join(os.path.dirname(sys.executable), "pip")]
pip["install", "--upgrade", "dodo_commands"]
<commit_msg>Fix dodo-upgrade (nothing was executed)<commit_after>import os
import sys
from plumbum import local
def main(): # noqa
pip = local[os.path.join(os.path.dirname(sys.executable), "pip")]
pip("install", "--upgrade", "dodo_commands")
|
3ffb34257acdd58ec8929bf7ec7d5bd2567be334
|
nvchecker_source/git.py
|
nvchecker_source/git.py
|
# MIT licensed
# Copyright (c) 2020 Felix Yan <felixonmars@archlinux.org>, et al.
from .cmd import run_cmd # type: ignore
async def get_version(
name, conf, *, cache, keymanager=None
):
git = conf['git']
cmd = f"git ls-remote -t --refs {git}"
data = await cache.get(cmd, run_cmd)
versions = list(map(lambda line: line.split("refs/tags/")[1], data.split("\n")))
return versions
|
# MIT licensed
# Copyright (c) 2020 Felix Yan <felixonmars@archlinux.org>, et al.
from .cmd import run_cmd # type: ignore
async def get_version(
name, conf, *, cache, keymanager=None
):
git = conf['git']
cmd = f"git ls-remote -t --refs {git}"
data = await cache.get(cmd, run_cmd)
versions = [line.split("refs/tags/")[1] for line in data.splitlines()]
return versions
|
Use list comprehension instead of map lambda
|
Use list comprehension instead of map lambda
|
Python
|
mit
|
lilydjwg/nvchecker
|
# MIT licensed
# Copyright (c) 2020 Felix Yan <felixonmars@archlinux.org>, et al.
from .cmd import run_cmd # type: ignore
async def get_version(
name, conf, *, cache, keymanager=None
):
git = conf['git']
cmd = f"git ls-remote -t --refs {git}"
data = await cache.get(cmd, run_cmd)
versions = list(map(lambda line: line.split("refs/tags/")[1], data.split("\n")))
return versions
Use list comprehension instead of map lambda
|
# MIT licensed
# Copyright (c) 2020 Felix Yan <felixonmars@archlinux.org>, et al.
from .cmd import run_cmd # type: ignore
async def get_version(
name, conf, *, cache, keymanager=None
):
git = conf['git']
cmd = f"git ls-remote -t --refs {git}"
data = await cache.get(cmd, run_cmd)
versions = [line.split("refs/tags/")[1] for line in data.splitlines()]
return versions
|
<commit_before># MIT licensed
# Copyright (c) 2020 Felix Yan <felixonmars@archlinux.org>, et al.
from .cmd import run_cmd # type: ignore
async def get_version(
name, conf, *, cache, keymanager=None
):
git = conf['git']
cmd = f"git ls-remote -t --refs {git}"
data = await cache.get(cmd, run_cmd)
versions = list(map(lambda line: line.split("refs/tags/")[1], data.split("\n")))
return versions
<commit_msg>Use list comprehension instead of map lambda<commit_after>
|
# MIT licensed
# Copyright (c) 2020 Felix Yan <felixonmars@archlinux.org>, et al.
from .cmd import run_cmd # type: ignore
async def get_version(
name, conf, *, cache, keymanager=None
):
git = conf['git']
cmd = f"git ls-remote -t --refs {git}"
data = await cache.get(cmd, run_cmd)
versions = [line.split("refs/tags/")[1] for line in data.splitlines()]
return versions
|
# MIT licensed
# Copyright (c) 2020 Felix Yan <felixonmars@archlinux.org>, et al.
from .cmd import run_cmd # type: ignore
async def get_version(
name, conf, *, cache, keymanager=None
):
git = conf['git']
cmd = f"git ls-remote -t --refs {git}"
data = await cache.get(cmd, run_cmd)
versions = list(map(lambda line: line.split("refs/tags/")[1], data.split("\n")))
return versions
Use list comprehension instead of map lambda# MIT licensed
# Copyright (c) 2020 Felix Yan <felixonmars@archlinux.org>, et al.
from .cmd import run_cmd # type: ignore
async def get_version(
name, conf, *, cache, keymanager=None
):
git = conf['git']
cmd = f"git ls-remote -t --refs {git}"
data = await cache.get(cmd, run_cmd)
versions = [line.split("refs/tags/")[1] for line in data.splitlines()]
return versions
|
<commit_before># MIT licensed
# Copyright (c) 2020 Felix Yan <felixonmars@archlinux.org>, et al.
from .cmd import run_cmd # type: ignore
async def get_version(
name, conf, *, cache, keymanager=None
):
git = conf['git']
cmd = f"git ls-remote -t --refs {git}"
data = await cache.get(cmd, run_cmd)
versions = list(map(lambda line: line.split("refs/tags/")[1], data.split("\n")))
return versions
<commit_msg>Use list comprehension instead of map lambda<commit_after># MIT licensed
# Copyright (c) 2020 Felix Yan <felixonmars@archlinux.org>, et al.
from .cmd import run_cmd # type: ignore
async def get_version(
name, conf, *, cache, keymanager=None
):
git = conf['git']
cmd = f"git ls-remote -t --refs {git}"
data = await cache.get(cmd, run_cmd)
versions = [line.split("refs/tags/")[1] for line in data.splitlines()]
return versions
|
d2456f280fd1d1bff44475b870bf067d2694fc9d
|
chainerrl/functions/arctanh.py
|
chainerrl/functions/arctanh.py
|
from chainer.backends import cuda
from chainer import function_node
from chainer import utils
from chainer.utils import type_check
class Arctanh(function_node.FunctionNode):
"""Elementwise inverse hyperbolic tangent function."""
def check_type_forward(self, in_types):
type_check._argname(in_types, ('x',))
x_type, = in_types
type_check.expect(x_type.dtype.kind == 'f')
def forward(self, inputs):
self.retain_inputs((0,))
x, = inputs
xp = cuda.get_array_module(x)
y = xp.arctanh(x)
return utils.force_array(y, dtype=x.dtype),
def backward(self, indexes, grad_outputs):
x, = self.get_retained_inputs()
gy, = grad_outputs
gx = 1. / (1 - x ** 2) * gy
return gx,
def arctanh(x):
"""Elementwise inverse hyperbolic tangent function.
Args:
x (:class:`~chainer.Variable` or :ref:`ndarray`): Input variable.
Returns:
~chainer.Variable: Output variable.
"""
return Arctanh().apply((x,))[0]
|
from chainer.backends import cuda
from chainer import function_node
from chainer import utils
from chainer.utils import type_check
class Arctanh(function_node.FunctionNode):
"""Elementwise inverse hyperbolic tangent function."""
def check_type_forward(self, in_types):
if hasattr(type_check, '_argname'):
# typecheck._argname is introduced by Chainer v6
type_check._argname(in_types, ('x',))
x_type, = in_types
type_check.expect(x_type.dtype.kind == 'f')
def forward(self, inputs):
self.retain_inputs((0,))
x, = inputs
xp = cuda.get_array_module(x)
y = xp.arctanh(x)
return utils.force_array(y, dtype=x.dtype),
def backward(self, indexes, grad_outputs):
x, = self.get_retained_inputs()
gy, = grad_outputs
gx = 1. / (1 - x ** 2) * gy
return gx,
def arctanh(x):
"""Elementwise inverse hyperbolic tangent function.
Args:
x (:class:`~chainer.Variable` or :ref:`ndarray`): Input variable.
Returns:
~chainer.Variable: Output variable.
"""
return Arctanh().apply((x,))[0]
|
Fix chainer v4 error about type_check._argname
|
Fix chainer v4 error about type_check._argname
|
Python
|
mit
|
toslunar/chainerrl,toslunar/chainerrl
|
from chainer.backends import cuda
from chainer import function_node
from chainer import utils
from chainer.utils import type_check
class Arctanh(function_node.FunctionNode):
"""Elementwise inverse hyperbolic tangent function."""
def check_type_forward(self, in_types):
type_check._argname(in_types, ('x',))
x_type, = in_types
type_check.expect(x_type.dtype.kind == 'f')
def forward(self, inputs):
self.retain_inputs((0,))
x, = inputs
xp = cuda.get_array_module(x)
y = xp.arctanh(x)
return utils.force_array(y, dtype=x.dtype),
def backward(self, indexes, grad_outputs):
x, = self.get_retained_inputs()
gy, = grad_outputs
gx = 1. / (1 - x ** 2) * gy
return gx,
def arctanh(x):
"""Elementwise inverse hyperbolic tangent function.
Args:
x (:class:`~chainer.Variable` or :ref:`ndarray`): Input variable.
Returns:
~chainer.Variable: Output variable.
"""
return Arctanh().apply((x,))[0]
Fix chainer v4 error about type_check._argname
|
from chainer.backends import cuda
from chainer import function_node
from chainer import utils
from chainer.utils import type_check
class Arctanh(function_node.FunctionNode):
"""Elementwise inverse hyperbolic tangent function."""
def check_type_forward(self, in_types):
if hasattr(type_check, '_argname'):
# typecheck._argname is introduced by Chainer v6
type_check._argname(in_types, ('x',))
x_type, = in_types
type_check.expect(x_type.dtype.kind == 'f')
def forward(self, inputs):
self.retain_inputs((0,))
x, = inputs
xp = cuda.get_array_module(x)
y = xp.arctanh(x)
return utils.force_array(y, dtype=x.dtype),
def backward(self, indexes, grad_outputs):
x, = self.get_retained_inputs()
gy, = grad_outputs
gx = 1. / (1 - x ** 2) * gy
return gx,
def arctanh(x):
"""Elementwise inverse hyperbolic tangent function.
Args:
x (:class:`~chainer.Variable` or :ref:`ndarray`): Input variable.
Returns:
~chainer.Variable: Output variable.
"""
return Arctanh().apply((x,))[0]
|
<commit_before>from chainer.backends import cuda
from chainer import function_node
from chainer import utils
from chainer.utils import type_check
class Arctanh(function_node.FunctionNode):
"""Elementwise inverse hyperbolic tangent function."""
def check_type_forward(self, in_types):
type_check._argname(in_types, ('x',))
x_type, = in_types
type_check.expect(x_type.dtype.kind == 'f')
def forward(self, inputs):
self.retain_inputs((0,))
x, = inputs
xp = cuda.get_array_module(x)
y = xp.arctanh(x)
return utils.force_array(y, dtype=x.dtype),
def backward(self, indexes, grad_outputs):
x, = self.get_retained_inputs()
gy, = grad_outputs
gx = 1. / (1 - x ** 2) * gy
return gx,
def arctanh(x):
"""Elementwise inverse hyperbolic tangent function.
Args:
x (:class:`~chainer.Variable` or :ref:`ndarray`): Input variable.
Returns:
~chainer.Variable: Output variable.
"""
return Arctanh().apply((x,))[0]
<commit_msg>Fix chainer v4 error about type_check._argname<commit_after>
|
from chainer.backends import cuda
from chainer import function_node
from chainer import utils
from chainer.utils import type_check
class Arctanh(function_node.FunctionNode):
"""Elementwise inverse hyperbolic tangent function."""
def check_type_forward(self, in_types):
if hasattr(type_check, '_argname'):
# typecheck._argname is introduced by Chainer v6
type_check._argname(in_types, ('x',))
x_type, = in_types
type_check.expect(x_type.dtype.kind == 'f')
def forward(self, inputs):
self.retain_inputs((0,))
x, = inputs
xp = cuda.get_array_module(x)
y = xp.arctanh(x)
return utils.force_array(y, dtype=x.dtype),
def backward(self, indexes, grad_outputs):
x, = self.get_retained_inputs()
gy, = grad_outputs
gx = 1. / (1 - x ** 2) * gy
return gx,
def arctanh(x):
"""Elementwise inverse hyperbolic tangent function.
Args:
x (:class:`~chainer.Variable` or :ref:`ndarray`): Input variable.
Returns:
~chainer.Variable: Output variable.
"""
return Arctanh().apply((x,))[0]
|
from chainer.backends import cuda
from chainer import function_node
from chainer import utils
from chainer.utils import type_check
class Arctanh(function_node.FunctionNode):
"""Elementwise inverse hyperbolic tangent function."""
def check_type_forward(self, in_types):
type_check._argname(in_types, ('x',))
x_type, = in_types
type_check.expect(x_type.dtype.kind == 'f')
def forward(self, inputs):
self.retain_inputs((0,))
x, = inputs
xp = cuda.get_array_module(x)
y = xp.arctanh(x)
return utils.force_array(y, dtype=x.dtype),
def backward(self, indexes, grad_outputs):
x, = self.get_retained_inputs()
gy, = grad_outputs
gx = 1. / (1 - x ** 2) * gy
return gx,
def arctanh(x):
"""Elementwise inverse hyperbolic tangent function.
Args:
x (:class:`~chainer.Variable` or :ref:`ndarray`): Input variable.
Returns:
~chainer.Variable: Output variable.
"""
return Arctanh().apply((x,))[0]
Fix chainer v4 error about type_check._argnamefrom chainer.backends import cuda
from chainer import function_node
from chainer import utils
from chainer.utils import type_check
class Arctanh(function_node.FunctionNode):
"""Elementwise inverse hyperbolic tangent function."""
def check_type_forward(self, in_types):
if hasattr(type_check, '_argname'):
# typecheck._argname is introduced by Chainer v6
type_check._argname(in_types, ('x',))
x_type, = in_types
type_check.expect(x_type.dtype.kind == 'f')
def forward(self, inputs):
self.retain_inputs((0,))
x, = inputs
xp = cuda.get_array_module(x)
y = xp.arctanh(x)
return utils.force_array(y, dtype=x.dtype),
def backward(self, indexes, grad_outputs):
x, = self.get_retained_inputs()
gy, = grad_outputs
gx = 1. / (1 - x ** 2) * gy
return gx,
def arctanh(x):
"""Elementwise inverse hyperbolic tangent function.
Args:
x (:class:`~chainer.Variable` or :ref:`ndarray`): Input variable.
Returns:
~chainer.Variable: Output variable.
"""
return Arctanh().apply((x,))[0]
|
<commit_before>from chainer.backends import cuda
from chainer import function_node
from chainer import utils
from chainer.utils import type_check
class Arctanh(function_node.FunctionNode):
"""Elementwise inverse hyperbolic tangent function."""
def check_type_forward(self, in_types):
type_check._argname(in_types, ('x',))
x_type, = in_types
type_check.expect(x_type.dtype.kind == 'f')
def forward(self, inputs):
self.retain_inputs((0,))
x, = inputs
xp = cuda.get_array_module(x)
y = xp.arctanh(x)
return utils.force_array(y, dtype=x.dtype),
def backward(self, indexes, grad_outputs):
x, = self.get_retained_inputs()
gy, = grad_outputs
gx = 1. / (1 - x ** 2) * gy
return gx,
def arctanh(x):
"""Elementwise inverse hyperbolic tangent function.
Args:
x (:class:`~chainer.Variable` or :ref:`ndarray`): Input variable.
Returns:
~chainer.Variable: Output variable.
"""
return Arctanh().apply((x,))[0]
<commit_msg>Fix chainer v4 error about type_check._argname<commit_after>from chainer.backends import cuda
from chainer import function_node
from chainer import utils
from chainer.utils import type_check
class Arctanh(function_node.FunctionNode):
"""Elementwise inverse hyperbolic tangent function."""
def check_type_forward(self, in_types):
if hasattr(type_check, '_argname'):
# typecheck._argname is introduced by Chainer v6
type_check._argname(in_types, ('x',))
x_type, = in_types
type_check.expect(x_type.dtype.kind == 'f')
def forward(self, inputs):
self.retain_inputs((0,))
x, = inputs
xp = cuda.get_array_module(x)
y = xp.arctanh(x)
return utils.force_array(y, dtype=x.dtype),
def backward(self, indexes, grad_outputs):
x, = self.get_retained_inputs()
gy, = grad_outputs
gx = 1. / (1 - x ** 2) * gy
return gx,
def arctanh(x):
"""Elementwise inverse hyperbolic tangent function.
Args:
x (:class:`~chainer.Variable` or :ref:`ndarray`): Input variable.
Returns:
~chainer.Variable: Output variable.
"""
return Arctanh().apply((x,))[0]
|
3c264c4ddf3e21c3b0e495d663e78dc3c80ce949
|
python/saliweb/test/MySQLdb/cursors.py
|
python/saliweb/test/MySQLdb/cursors.py
|
import datetime
class DictCursor(object):
def __init__(self, conn):
self.conn = conn
def execute(self, sql, args=()):
self.sql, self.args = sql, args
def fetchone(self):
if self.sql == 'SELECT * FROM jobs WHERE name=%s AND passwd=%s':
# Check completed jobs
for j in self.conn._jobs:
if self.args == (j.name, j.passwd):
return {'state': 'COMPLETED', 'name': j.name,
'passwd': j.passwd,
'archive_time': datetime.datetime(year=2099,
month=1, day=1),
'directory': j.directory}
# Check incoming jobs
for j in self.conn._incoming_jobs:
if self.args == (j['name'], j['passwd']):
return {'state': 'INCOMING', 'name': j['name'],
'contact_email': j['email'],
'submit_time': datetime.datetime(year=2000,
month=1, day=1)}
def __iter__(self):
return iter([])
|
import datetime
class DictCursor(object):
def __init__(self, conn):
self.conn = conn
def execute(self, sql, args=()):
self.sql, self.args = sql, args
def fetchone(self):
if self.sql == 'SELECT * FROM jobs WHERE name=%s AND passwd=%s':
# Check completed jobs
for j in self.conn._jobs:
if self.args == (j.name, j.passwd):
return {'state': 'COMPLETED', 'name': j.name,
'passwd': j.passwd,
'archive_time': datetime.datetime(year=2099,
month=1, day=1),
'directory': j.directory,
'contact_email': 'test@test.com'}
# Check incoming jobs
for j in self.conn._incoming_jobs:
if self.args == (j['name'], j['passwd']):
return {'state': 'INCOMING', 'name': j['name'],
'contact_email': j['email'],
'submit_time': datetime.datetime(year=2000,
month=1, day=1)}
def __iter__(self):
return iter([])
|
Add support for completed-job email to mocks
|
Add support for completed-job email to mocks
|
Python
|
lgpl-2.1
|
salilab/saliweb,salilab/saliweb,salilab/saliweb,salilab/saliweb,salilab/saliweb
|
import datetime
class DictCursor(object):
def __init__(self, conn):
self.conn = conn
def execute(self, sql, args=()):
self.sql, self.args = sql, args
def fetchone(self):
if self.sql == 'SELECT * FROM jobs WHERE name=%s AND passwd=%s':
# Check completed jobs
for j in self.conn._jobs:
if self.args == (j.name, j.passwd):
return {'state': 'COMPLETED', 'name': j.name,
'passwd': j.passwd,
'archive_time': datetime.datetime(year=2099,
month=1, day=1),
'directory': j.directory}
# Check incoming jobs
for j in self.conn._incoming_jobs:
if self.args == (j['name'], j['passwd']):
return {'state': 'INCOMING', 'name': j['name'],
'contact_email': j['email'],
'submit_time': datetime.datetime(year=2000,
month=1, day=1)}
def __iter__(self):
return iter([])
Add support for completed-job email to mocks
|
import datetime
class DictCursor(object):
def __init__(self, conn):
self.conn = conn
def execute(self, sql, args=()):
self.sql, self.args = sql, args
def fetchone(self):
if self.sql == 'SELECT * FROM jobs WHERE name=%s AND passwd=%s':
# Check completed jobs
for j in self.conn._jobs:
if self.args == (j.name, j.passwd):
return {'state': 'COMPLETED', 'name': j.name,
'passwd': j.passwd,
'archive_time': datetime.datetime(year=2099,
month=1, day=1),
'directory': j.directory,
'contact_email': 'test@test.com'}
# Check incoming jobs
for j in self.conn._incoming_jobs:
if self.args == (j['name'], j['passwd']):
return {'state': 'INCOMING', 'name': j['name'],
'contact_email': j['email'],
'submit_time': datetime.datetime(year=2000,
month=1, day=1)}
def __iter__(self):
return iter([])
|
<commit_before>import datetime
class DictCursor(object):
def __init__(self, conn):
self.conn = conn
def execute(self, sql, args=()):
self.sql, self.args = sql, args
def fetchone(self):
if self.sql == 'SELECT * FROM jobs WHERE name=%s AND passwd=%s':
# Check completed jobs
for j in self.conn._jobs:
if self.args == (j.name, j.passwd):
return {'state': 'COMPLETED', 'name': j.name,
'passwd': j.passwd,
'archive_time': datetime.datetime(year=2099,
month=1, day=1),
'directory': j.directory}
# Check incoming jobs
for j in self.conn._incoming_jobs:
if self.args == (j['name'], j['passwd']):
return {'state': 'INCOMING', 'name': j['name'],
'contact_email': j['email'],
'submit_time': datetime.datetime(year=2000,
month=1, day=1)}
def __iter__(self):
return iter([])
<commit_msg>Add support for completed-job email to mocks<commit_after>
|
import datetime
class DictCursor(object):
def __init__(self, conn):
self.conn = conn
def execute(self, sql, args=()):
self.sql, self.args = sql, args
def fetchone(self):
if self.sql == 'SELECT * FROM jobs WHERE name=%s AND passwd=%s':
# Check completed jobs
for j in self.conn._jobs:
if self.args == (j.name, j.passwd):
return {'state': 'COMPLETED', 'name': j.name,
'passwd': j.passwd,
'archive_time': datetime.datetime(year=2099,
month=1, day=1),
'directory': j.directory,
'contact_email': 'test@test.com'}
# Check incoming jobs
for j in self.conn._incoming_jobs:
if self.args == (j['name'], j['passwd']):
return {'state': 'INCOMING', 'name': j['name'],
'contact_email': j['email'],
'submit_time': datetime.datetime(year=2000,
month=1, day=1)}
def __iter__(self):
return iter([])
|
import datetime
class DictCursor(object):
def __init__(self, conn):
self.conn = conn
def execute(self, sql, args=()):
self.sql, self.args = sql, args
def fetchone(self):
if self.sql == 'SELECT * FROM jobs WHERE name=%s AND passwd=%s':
# Check completed jobs
for j in self.conn._jobs:
if self.args == (j.name, j.passwd):
return {'state': 'COMPLETED', 'name': j.name,
'passwd': j.passwd,
'archive_time': datetime.datetime(year=2099,
month=1, day=1),
'directory': j.directory}
# Check incoming jobs
for j in self.conn._incoming_jobs:
if self.args == (j['name'], j['passwd']):
return {'state': 'INCOMING', 'name': j['name'],
'contact_email': j['email'],
'submit_time': datetime.datetime(year=2000,
month=1, day=1)}
def __iter__(self):
return iter([])
Add support for completed-job email to mocksimport datetime
class DictCursor(object):
def __init__(self, conn):
self.conn = conn
def execute(self, sql, args=()):
self.sql, self.args = sql, args
def fetchone(self):
if self.sql == 'SELECT * FROM jobs WHERE name=%s AND passwd=%s':
# Check completed jobs
for j in self.conn._jobs:
if self.args == (j.name, j.passwd):
return {'state': 'COMPLETED', 'name': j.name,
'passwd': j.passwd,
'archive_time': datetime.datetime(year=2099,
month=1, day=1),
'directory': j.directory,
'contact_email': 'test@test.com'}
# Check incoming jobs
for j in self.conn._incoming_jobs:
if self.args == (j['name'], j['passwd']):
return {'state': 'INCOMING', 'name': j['name'],
'contact_email': j['email'],
'submit_time': datetime.datetime(year=2000,
month=1, day=1)}
def __iter__(self):
return iter([])
|
<commit_before>import datetime
class DictCursor(object):
def __init__(self, conn):
self.conn = conn
def execute(self, sql, args=()):
self.sql, self.args = sql, args
def fetchone(self):
if self.sql == 'SELECT * FROM jobs WHERE name=%s AND passwd=%s':
# Check completed jobs
for j in self.conn._jobs:
if self.args == (j.name, j.passwd):
return {'state': 'COMPLETED', 'name': j.name,
'passwd': j.passwd,
'archive_time': datetime.datetime(year=2099,
month=1, day=1),
'directory': j.directory}
# Check incoming jobs
for j in self.conn._incoming_jobs:
if self.args == (j['name'], j['passwd']):
return {'state': 'INCOMING', 'name': j['name'],
'contact_email': j['email'],
'submit_time': datetime.datetime(year=2000,
month=1, day=1)}
def __iter__(self):
return iter([])
<commit_msg>Add support for completed-job email to mocks<commit_after>import datetime
class DictCursor(object):
def __init__(self, conn):
self.conn = conn
def execute(self, sql, args=()):
self.sql, self.args = sql, args
def fetchone(self):
if self.sql == 'SELECT * FROM jobs WHERE name=%s AND passwd=%s':
# Check completed jobs
for j in self.conn._jobs:
if self.args == (j.name, j.passwd):
return {'state': 'COMPLETED', 'name': j.name,
'passwd': j.passwd,
'archive_time': datetime.datetime(year=2099,
month=1, day=1),
'directory': j.directory,
'contact_email': 'test@test.com'}
# Check incoming jobs
for j in self.conn._incoming_jobs:
if self.args == (j['name'], j['passwd']):
return {'state': 'INCOMING', 'name': j['name'],
'contact_email': j['email'],
'submit_time': datetime.datetime(year=2000,
month=1, day=1)}
def __iter__(self):
return iter([])
|
0c56e276aa1963ec35d744f61cecbb9368f115be
|
admin_tools/theming/templatetags/theming_tags.py
|
admin_tools/theming/templatetags/theming_tags.py
|
"""
Theming template tags.
To load the theming tags just do: ``{% load theming_tags %}``.
"""
from django import template
from django.conf import settings
from admin_tools.utils import get_media_url
register = template.Library()
def render_theming_css():
"""
Template tag that renders the needed css files for the theming app.
"""
css = getattr(settings, 'ADMIN_TOOLS_THEMING_CSS', False)
if css:
css = '/'.join([get_media_url(), css])
else:
css = '/'.join([get_media_url(), 'admin_tools', 'css', 'theming.css'])
return '<link rel="stylesheet" type="text/css" media="screen" href="%s" />' % css
register.simple_tag(render_theming_css)
|
"""
Theming template tags.
To load the theming tags just do: ``{% load theming_tags %}``.
"""
from django import template
from django.conf import settings
from admin_tools.utils import get_media_url
register = template.Library()
def render_theming_css():
"""
Template tag that renders the needed css files for the theming app.
If ADMIN_TOOLS_THEMING_CSS is explicitely defined to None, don't render
anything.
"""
rval = ''
try:
css_path = getattr(settings, 'ADMIN_TOOLS_THEMING_CSS')
except AttributeError:
css_path = 'admin_tools/css/theming.css'
if css_path is not None:
css_url = '%s/%s' % (get_media_url(), css_path)
rval = '<link rel="stylesheet" type="text/css" media="screen" href="%s" />' % css_url
return rval
register.simple_tag(render_theming_css)
|
Enable not loading theming CSS by explicitely setting ADMIN_TOOLS_THEMING_CSS to None
|
Enable not loading theming CSS by explicitely setting ADMIN_TOOLS_THEMING_CSS to None
|
Python
|
mit
|
liberation/django-admin-tools,liberation/django-admin-tools,liberation/django-admin-tools,liberation/django-admin-tools
|
"""
Theming template tags.
To load the theming tags just do: ``{% load theming_tags %}``.
"""
from django import template
from django.conf import settings
from admin_tools.utils import get_media_url
register = template.Library()
def render_theming_css():
"""
Template tag that renders the needed css files for the theming app.
"""
css = getattr(settings, 'ADMIN_TOOLS_THEMING_CSS', False)
if css:
css = '/'.join([get_media_url(), css])
else:
css = '/'.join([get_media_url(), 'admin_tools', 'css', 'theming.css'])
return '<link rel="stylesheet" type="text/css" media="screen" href="%s" />' % css
register.simple_tag(render_theming_css)
Enable not loading theming CSS by explicitely setting ADMIN_TOOLS_THEMING_CSS to None
|
"""
Theming template tags.
To load the theming tags just do: ``{% load theming_tags %}``.
"""
from django import template
from django.conf import settings
from admin_tools.utils import get_media_url
register = template.Library()
def render_theming_css():
"""
Template tag that renders the needed css files for the theming app.
If ADMIN_TOOLS_THEMING_CSS is explicitely defined to None, don't render
anything.
"""
rval = ''
try:
css_path = getattr(settings, 'ADMIN_TOOLS_THEMING_CSS')
except AttributeError:
css_path = 'admin_tools/css/theming.css'
if css_path is not None:
css_url = '%s/%s' % (get_media_url(), css_path)
rval = '<link rel="stylesheet" type="text/css" media="screen" href="%s" />' % css_url
return rval
register.simple_tag(render_theming_css)
|
<commit_before>"""
Theming template tags.
To load the theming tags just do: ``{% load theming_tags %}``.
"""
from django import template
from django.conf import settings
from admin_tools.utils import get_media_url
register = template.Library()
def render_theming_css():
"""
Template tag that renders the needed css files for the theming app.
"""
css = getattr(settings, 'ADMIN_TOOLS_THEMING_CSS', False)
if css:
css = '/'.join([get_media_url(), css])
else:
css = '/'.join([get_media_url(), 'admin_tools', 'css', 'theming.css'])
return '<link rel="stylesheet" type="text/css" media="screen" href="%s" />' % css
register.simple_tag(render_theming_css)
<commit_msg>Enable not loading theming CSS by explicitely setting ADMIN_TOOLS_THEMING_CSS to None<commit_after>
|
"""
Theming template tags.
To load the theming tags just do: ``{% load theming_tags %}``.
"""
from django import template
from django.conf import settings
from admin_tools.utils import get_media_url
register = template.Library()
def render_theming_css():
"""
Template tag that renders the needed css files for the theming app.
If ADMIN_TOOLS_THEMING_CSS is explicitely defined to None, don't render
anything.
"""
rval = ''
try:
css_path = getattr(settings, 'ADMIN_TOOLS_THEMING_CSS')
except AttributeError:
css_path = 'admin_tools/css/theming.css'
if css_path is not None:
css_url = '%s/%s' % (get_media_url(), css_path)
rval = '<link rel="stylesheet" type="text/css" media="screen" href="%s" />' % css_url
return rval
register.simple_tag(render_theming_css)
|
"""
Theming template tags.
To load the theming tags just do: ``{% load theming_tags %}``.
"""
from django import template
from django.conf import settings
from admin_tools.utils import get_media_url
register = template.Library()
def render_theming_css():
"""
Template tag that renders the needed css files for the theming app.
"""
css = getattr(settings, 'ADMIN_TOOLS_THEMING_CSS', False)
if css:
css = '/'.join([get_media_url(), css])
else:
css = '/'.join([get_media_url(), 'admin_tools', 'css', 'theming.css'])
return '<link rel="stylesheet" type="text/css" media="screen" href="%s" />' % css
register.simple_tag(render_theming_css)
Enable not loading theming CSS by explicitely setting ADMIN_TOOLS_THEMING_CSS to None"""
Theming template tags.
To load the theming tags just do: ``{% load theming_tags %}``.
"""
from django import template
from django.conf import settings
from admin_tools.utils import get_media_url
register = template.Library()
def render_theming_css():
"""
Template tag that renders the needed css files for the theming app.
If ADMIN_TOOLS_THEMING_CSS is explicitely defined to None, don't render
anything.
"""
rval = ''
try:
css_path = getattr(settings, 'ADMIN_TOOLS_THEMING_CSS')
except AttributeError:
css_path = 'admin_tools/css/theming.css'
if css_path is not None:
css_url = '%s/%s' % (get_media_url(), css_path)
rval = '<link rel="stylesheet" type="text/css" media="screen" href="%s" />' % css_url
return rval
register.simple_tag(render_theming_css)
|
<commit_before>"""
Theming template tags.
To load the theming tags just do: ``{% load theming_tags %}``.
"""
from django import template
from django.conf import settings
from admin_tools.utils import get_media_url
register = template.Library()
def render_theming_css():
"""
Template tag that renders the needed css files for the theming app.
"""
css = getattr(settings, 'ADMIN_TOOLS_THEMING_CSS', False)
if css:
css = '/'.join([get_media_url(), css])
else:
css = '/'.join([get_media_url(), 'admin_tools', 'css', 'theming.css'])
return '<link rel="stylesheet" type="text/css" media="screen" href="%s" />' % css
register.simple_tag(render_theming_css)
<commit_msg>Enable not loading theming CSS by explicitely setting ADMIN_TOOLS_THEMING_CSS to None<commit_after>"""
Theming template tags.
To load the theming tags just do: ``{% load theming_tags %}``.
"""
from django import template
from django.conf import settings
from admin_tools.utils import get_media_url
register = template.Library()
def render_theming_css():
"""
Template tag that renders the needed css files for the theming app.
If ADMIN_TOOLS_THEMING_CSS is explicitely defined to None, don't render
anything.
"""
rval = ''
try:
css_path = getattr(settings, 'ADMIN_TOOLS_THEMING_CSS')
except AttributeError:
css_path = 'admin_tools/css/theming.css'
if css_path is not None:
css_url = '%s/%s' % (get_media_url(), css_path)
rval = '<link rel="stylesheet" type="text/css" media="screen" href="%s" />' % css_url
return rval
register.simple_tag(render_theming_css)
|
21e961d7b52b5e34607f899748ef7be9aa3cd1be
|
evalset/multiopt_test_funcs.py
|
evalset/multiopt_test_funcs.py
|
from evalset.test_funcs import TestFunction, lzip
import numpy
class MultioptTestFunction(TestFunction):
def __init__(self, dim):
super(MultioptTestFunction, self).__init__(dim)
self.local_minima_loc = [] # Sorted in increasing order of function value at the local minima
class LowDMixtureOfGaussians(MultioptTestFunction):
def __init__(self, dim=2):
assert dim == 2
super(LowDMixtureOfGaussians, self).__init__(dim)
self.bounds = lzip([-1] * self.dim, [1] * self.dim)
self.fmin = -0.502124885135
self.fmax = 0
self.local_minima_loc = [(-0.2, -0.5), (0.8, 0.3)]
def do_evaluate(self, x):
x1, x2 = x
return -(
.5 * numpy.exp(-10 * (.8 * (x1 + .2) ** 2 + .7 * (x2 + .5) ** 2)) +
.5 * numpy.exp(-8 * (.3 * (x1 - .8) ** 2 + .6 * (x2 - .3) ** 2))
)
|
from evalset.test_funcs import TestFunction, lzip
import numpy
class MultioptTestFunction(TestFunction):
def __init__(self, dim):
super(MultioptTestFunction, self).__init__(dim)
self.local_minima_loc = [] # Sorted in increasing order of function value at the local minima
class LowDMixtureOfGaussians01(MultioptTestFunction):
def __init__(self, dim=2):
assert dim == 2
super(LowDMixtureOfGaussians01, self).__init__(dim)
self.bounds = lzip([-1] * self.dim, [1] * self.dim)
self.fmin = -0.502124885135
self.fmax = 0
self.local_minima_loc = [(-0.2, -0.5), (0.8, 0.3)]
def do_evaluate(self, x):
x1, x2 = x
return -(
.5 * numpy.exp(-10 * (.8 * (x1 + .2) ** 2 + .7 * (x2 + .5) ** 2)) +
.5 * numpy.exp(-8 * (.3 * (x1 - .8) ** 2 + .6 * (x2 - .3) ** 2))
)
|
Add 01 to function name
|
Add 01 to function name
|
Python
|
mit
|
sigopt/evalset
|
from evalset.test_funcs import TestFunction, lzip
import numpy
class MultioptTestFunction(TestFunction):
def __init__(self, dim):
super(MultioptTestFunction, self).__init__(dim)
self.local_minima_loc = [] # Sorted in increasing order of function value at the local minima
class LowDMixtureOfGaussians(MultioptTestFunction):
def __init__(self, dim=2):
assert dim == 2
super(LowDMixtureOfGaussians, self).__init__(dim)
self.bounds = lzip([-1] * self.dim, [1] * self.dim)
self.fmin = -0.502124885135
self.fmax = 0
self.local_minima_loc = [(-0.2, -0.5), (0.8, 0.3)]
def do_evaluate(self, x):
x1, x2 = x
return -(
.5 * numpy.exp(-10 * (.8 * (x1 + .2) ** 2 + .7 * (x2 + .5) ** 2)) +
.5 * numpy.exp(-8 * (.3 * (x1 - .8) ** 2 + .6 * (x2 - .3) ** 2))
)
Add 01 to function name
|
from evalset.test_funcs import TestFunction, lzip
import numpy
class MultioptTestFunction(TestFunction):
def __init__(self, dim):
super(MultioptTestFunction, self).__init__(dim)
self.local_minima_loc = [] # Sorted in increasing order of function value at the local minima
class LowDMixtureOfGaussians01(MultioptTestFunction):
def __init__(self, dim=2):
assert dim == 2
super(LowDMixtureOfGaussians01, self).__init__(dim)
self.bounds = lzip([-1] * self.dim, [1] * self.dim)
self.fmin = -0.502124885135
self.fmax = 0
self.local_minima_loc = [(-0.2, -0.5), (0.8, 0.3)]
def do_evaluate(self, x):
x1, x2 = x
return -(
.5 * numpy.exp(-10 * (.8 * (x1 + .2) ** 2 + .7 * (x2 + .5) ** 2)) +
.5 * numpy.exp(-8 * (.3 * (x1 - .8) ** 2 + .6 * (x2 - .3) ** 2))
)
|
<commit_before>from evalset.test_funcs import TestFunction, lzip
import numpy
class MultioptTestFunction(TestFunction):
def __init__(self, dim):
super(MultioptTestFunction, self).__init__(dim)
self.local_minima_loc = [] # Sorted in increasing order of function value at the local minima
class LowDMixtureOfGaussians(MultioptTestFunction):
def __init__(self, dim=2):
assert dim == 2
super(LowDMixtureOfGaussians, self).__init__(dim)
self.bounds = lzip([-1] * self.dim, [1] * self.dim)
self.fmin = -0.502124885135
self.fmax = 0
self.local_minima_loc = [(-0.2, -0.5), (0.8, 0.3)]
def do_evaluate(self, x):
x1, x2 = x
return -(
.5 * numpy.exp(-10 * (.8 * (x1 + .2) ** 2 + .7 * (x2 + .5) ** 2)) +
.5 * numpy.exp(-8 * (.3 * (x1 - .8) ** 2 + .6 * (x2 - .3) ** 2))
)
<commit_msg>Add 01 to function name<commit_after>
|
from evalset.test_funcs import TestFunction, lzip
import numpy
class MultioptTestFunction(TestFunction):
def __init__(self, dim):
super(MultioptTestFunction, self).__init__(dim)
self.local_minima_loc = [] # Sorted in increasing order of function value at the local minima
class LowDMixtureOfGaussians01(MultioptTestFunction):
def __init__(self, dim=2):
assert dim == 2
super(LowDMixtureOfGaussians01, self).__init__(dim)
self.bounds = lzip([-1] * self.dim, [1] * self.dim)
self.fmin = -0.502124885135
self.fmax = 0
self.local_minima_loc = [(-0.2, -0.5), (0.8, 0.3)]
def do_evaluate(self, x):
x1, x2 = x
return -(
.5 * numpy.exp(-10 * (.8 * (x1 + .2) ** 2 + .7 * (x2 + .5) ** 2)) +
.5 * numpy.exp(-8 * (.3 * (x1 - .8) ** 2 + .6 * (x2 - .3) ** 2))
)
|
from evalset.test_funcs import TestFunction, lzip
import numpy
class MultioptTestFunction(TestFunction):
def __init__(self, dim):
super(MultioptTestFunction, self).__init__(dim)
self.local_minima_loc = [] # Sorted in increasing order of function value at the local minima
class LowDMixtureOfGaussians(MultioptTestFunction):
def __init__(self, dim=2):
assert dim == 2
super(LowDMixtureOfGaussians, self).__init__(dim)
self.bounds = lzip([-1] * self.dim, [1] * self.dim)
self.fmin = -0.502124885135
self.fmax = 0
self.local_minima_loc = [(-0.2, -0.5), (0.8, 0.3)]
def do_evaluate(self, x):
x1, x2 = x
return -(
.5 * numpy.exp(-10 * (.8 * (x1 + .2) ** 2 + .7 * (x2 + .5) ** 2)) +
.5 * numpy.exp(-8 * (.3 * (x1 - .8) ** 2 + .6 * (x2 - .3) ** 2))
)
Add 01 to function namefrom evalset.test_funcs import TestFunction, lzip
import numpy
class MultioptTestFunction(TestFunction):
def __init__(self, dim):
super(MultioptTestFunction, self).__init__(dim)
self.local_minima_loc = [] # Sorted in increasing order of function value at the local minima
class LowDMixtureOfGaussians01(MultioptTestFunction):
def __init__(self, dim=2):
assert dim == 2
super(LowDMixtureOfGaussians01, self).__init__(dim)
self.bounds = lzip([-1] * self.dim, [1] * self.dim)
self.fmin = -0.502124885135
self.fmax = 0
self.local_minima_loc = [(-0.2, -0.5), (0.8, 0.3)]
def do_evaluate(self, x):
x1, x2 = x
return -(
.5 * numpy.exp(-10 * (.8 * (x1 + .2) ** 2 + .7 * (x2 + .5) ** 2)) +
.5 * numpy.exp(-8 * (.3 * (x1 - .8) ** 2 + .6 * (x2 - .3) ** 2))
)
|
<commit_before>from evalset.test_funcs import TestFunction, lzip
import numpy
class MultioptTestFunction(TestFunction):
def __init__(self, dim):
super(MultioptTestFunction, self).__init__(dim)
self.local_minima_loc = [] # Sorted in increasing order of function value at the local minima
class LowDMixtureOfGaussians(MultioptTestFunction):
def __init__(self, dim=2):
assert dim == 2
super(LowDMixtureOfGaussians, self).__init__(dim)
self.bounds = lzip([-1] * self.dim, [1] * self.dim)
self.fmin = -0.502124885135
self.fmax = 0
self.local_minima_loc = [(-0.2, -0.5), (0.8, 0.3)]
def do_evaluate(self, x):
x1, x2 = x
return -(
.5 * numpy.exp(-10 * (.8 * (x1 + .2) ** 2 + .7 * (x2 + .5) ** 2)) +
.5 * numpy.exp(-8 * (.3 * (x1 - .8) ** 2 + .6 * (x2 - .3) ** 2))
)
<commit_msg>Add 01 to function name<commit_after>from evalset.test_funcs import TestFunction, lzip
import numpy
class MultioptTestFunction(TestFunction):
def __init__(self, dim):
super(MultioptTestFunction, self).__init__(dim)
self.local_minima_loc = [] # Sorted in increasing order of function value at the local minima
class LowDMixtureOfGaussians01(MultioptTestFunction):
def __init__(self, dim=2):
assert dim == 2
super(LowDMixtureOfGaussians01, self).__init__(dim)
self.bounds = lzip([-1] * self.dim, [1] * self.dim)
self.fmin = -0.502124885135
self.fmax = 0
self.local_minima_loc = [(-0.2, -0.5), (0.8, 0.3)]
def do_evaluate(self, x):
x1, x2 = x
return -(
.5 * numpy.exp(-10 * (.8 * (x1 + .2) ** 2 + .7 * (x2 + .5) ** 2)) +
.5 * numpy.exp(-8 * (.3 * (x1 - .8) ** 2 + .6 * (x2 - .3) ** 2))
)
|
5785323d0a83c1f8b3b4e1cd17a22ff5222114fe
|
mistraldashboard/test/tests/error_handle.py
|
mistraldashboard/test/tests/error_handle.py
|
# Copyright 2015 ASD Technologies Co.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from mistraldashboard.handle_errors import handle_errors
from mistraldashboard.test import helpers as test
class ErrorHandleTests(test.TestCase):
class CommonException(Exception):
pass
def test_args_request_view_error_handle(self):
@handle_errors('Error message')
def common_view(request):
raise self.CommonException()
self.assertRaises(self.CommonException, common_view, {})
def test_kwargs_request_view_error_handle(self):
@handle_errors('Error message')
def common_view(slf, request, context=None):
raise self.CommonException()
with self.assertRaises(self.CommonException):
common_view(slf=None, request={})
|
# Copyright 2015 ASD Technologies Co.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from mistraldashboard.test import helpers as test
class ErrorHandleTests(test.TestCase):
class CommonException(Exception):
pass
|
Remove the test cases for handle_errors to fix the py27 gate issue
|
Remove the test cases for handle_errors to fix the py27 gate issue
As we just change the exceptions handle method in horizon, now the
test cases have some issues, so disable them first to fix all py27
gate fails.
Change-Id: Ic369434a40ff209b06de9481884637d46ee588f7
|
Python
|
apache-2.0
|
openstack/mistral-dashboard,openstack/mistral-dashboard,openstack/mistral-dashboard
|
# Copyright 2015 ASD Technologies Co.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from mistraldashboard.handle_errors import handle_errors
from mistraldashboard.test import helpers as test
class ErrorHandleTests(test.TestCase):
class CommonException(Exception):
pass
def test_args_request_view_error_handle(self):
@handle_errors('Error message')
def common_view(request):
raise self.CommonException()
self.assertRaises(self.CommonException, common_view, {})
def test_kwargs_request_view_error_handle(self):
@handle_errors('Error message')
def common_view(slf, request, context=None):
raise self.CommonException()
with self.assertRaises(self.CommonException):
common_view(slf=None, request={})
Remove the test cases for handle_errors to fix the py27 gate issue
As we just change the exceptions handle method in horizon, now the
test cases have some issues, so disable them first to fix all py27
gate fails.
Change-Id: Ic369434a40ff209b06de9481884637d46ee588f7
|
# Copyright 2015 ASD Technologies Co.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from mistraldashboard.test import helpers as test
class ErrorHandleTests(test.TestCase):
class CommonException(Exception):
pass
|
<commit_before># Copyright 2015 ASD Technologies Co.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from mistraldashboard.handle_errors import handle_errors
from mistraldashboard.test import helpers as test
class ErrorHandleTests(test.TestCase):
class CommonException(Exception):
pass
def test_args_request_view_error_handle(self):
@handle_errors('Error message')
def common_view(request):
raise self.CommonException()
self.assertRaises(self.CommonException, common_view, {})
def test_kwargs_request_view_error_handle(self):
@handle_errors('Error message')
def common_view(slf, request, context=None):
raise self.CommonException()
with self.assertRaises(self.CommonException):
common_view(slf=None, request={})
<commit_msg>Remove the test cases for handle_errors to fix the py27 gate issue
As we just change the exceptions handle method in horizon, now the
test cases have some issues, so disable them first to fix all py27
gate fails.
Change-Id: Ic369434a40ff209b06de9481884637d46ee588f7<commit_after>
|
# Copyright 2015 ASD Technologies Co.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from mistraldashboard.test import helpers as test
class ErrorHandleTests(test.TestCase):
class CommonException(Exception):
pass
|
# Copyright 2015 ASD Technologies Co.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from mistraldashboard.handle_errors import handle_errors
from mistraldashboard.test import helpers as test
class ErrorHandleTests(test.TestCase):
class CommonException(Exception):
pass
def test_args_request_view_error_handle(self):
@handle_errors('Error message')
def common_view(request):
raise self.CommonException()
self.assertRaises(self.CommonException, common_view, {})
def test_kwargs_request_view_error_handle(self):
@handle_errors('Error message')
def common_view(slf, request, context=None):
raise self.CommonException()
with self.assertRaises(self.CommonException):
common_view(slf=None, request={})
Remove the test cases for handle_errors to fix the py27 gate issue
As we just change the exceptions handle method in horizon, now the
test cases have some issues, so disable them first to fix all py27
gate fails.
Change-Id: Ic369434a40ff209b06de9481884637d46ee588f7# Copyright 2015 ASD Technologies Co.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from mistraldashboard.test import helpers as test
class ErrorHandleTests(test.TestCase):
class CommonException(Exception):
pass
|
<commit_before># Copyright 2015 ASD Technologies Co.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from mistraldashboard.handle_errors import handle_errors
from mistraldashboard.test import helpers as test
class ErrorHandleTests(test.TestCase):
class CommonException(Exception):
pass
def test_args_request_view_error_handle(self):
@handle_errors('Error message')
def common_view(request):
raise self.CommonException()
self.assertRaises(self.CommonException, common_view, {})
def test_kwargs_request_view_error_handle(self):
@handle_errors('Error message')
def common_view(slf, request, context=None):
raise self.CommonException()
with self.assertRaises(self.CommonException):
common_view(slf=None, request={})
<commit_msg>Remove the test cases for handle_errors to fix the py27 gate issue
As we just change the exceptions handle method in horizon, now the
test cases have some issues, so disable them first to fix all py27
gate fails.
Change-Id: Ic369434a40ff209b06de9481884637d46ee588f7<commit_after># Copyright 2015 ASD Technologies Co.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from mistraldashboard.test import helpers as test
class ErrorHandleTests(test.TestCase):
class CommonException(Exception):
pass
|
1454ae817862fc446ad5948cbefe2825ceb46fc8
|
queue.py
|
queue.py
|
#!/usr/bin/env python
'''Implementation of a simple queue data structure.
The queue has `enqueue`, `dequeue`, and `peek` methods.
Items in the queue have `value` and `behind` attributes.
The queue has a `front` attribute.
'''
class Item(object):
def __init__(self, value, behind=None):
self.value = value
self.behind = behind
def __str__(self):
return self.value
class Queue(object):
def __init__(self, front=None):
self.front = front
def enqueue(self, value):
pass
def dequeue(self):
pass
def peek(self):
pass
|
#!/usr/bin/env python
'''Implementation of a simple queue data structure.
The queue has `enqueue`, `dequeue`, and `peek` methods.
Items in the queue have `value` and `behind` attributes.
The queue has a `front` attribute.
'''
class Item(object):
def __init__(self, value, behind=None):
self.value = value
self.behind = behind
def __str__(self):
return self.value
class Queue(object):
def __init__(self, front=None):
self.front = front
def enqueue(self, value):
pass
def dequeue(self):
pass
def peek(self):
try:
return self.front.value
except AttributeError:
raise ValueError('No items in queue')
|
Add peek function to Queue class
|
Add peek function to Queue class
|
Python
|
mit
|
jwarren116/data-structures-deux
|
#!/usr/bin/env python
'''Implementation of a simple queue data structure.
The queue has `enqueue`, `dequeue`, and `peek` methods.
Items in the queue have `value` and `behind` attributes.
The queue has a `front` attribute.
'''
class Item(object):
def __init__(self, value, behind=None):
self.value = value
self.behind = behind
def __str__(self):
return self.value
class Queue(object):
def __init__(self, front=None):
self.front = front
def enqueue(self, value):
pass
def dequeue(self):
pass
def peek(self):
pass
Add peek function to Queue class
|
#!/usr/bin/env python
'''Implementation of a simple queue data structure.
The queue has `enqueue`, `dequeue`, and `peek` methods.
Items in the queue have `value` and `behind` attributes.
The queue has a `front` attribute.
'''
class Item(object):
def __init__(self, value, behind=None):
self.value = value
self.behind = behind
def __str__(self):
return self.value
class Queue(object):
def __init__(self, front=None):
self.front = front
def enqueue(self, value):
pass
def dequeue(self):
pass
def peek(self):
try:
return self.front.value
except AttributeError:
raise ValueError('No items in queue')
|
<commit_before>#!/usr/bin/env python
'''Implementation of a simple queue data structure.
The queue has `enqueue`, `dequeue`, and `peek` methods.
Items in the queue have `value` and `behind` attributes.
The queue has a `front` attribute.
'''
class Item(object):
def __init__(self, value, behind=None):
self.value = value
self.behind = behind
def __str__(self):
return self.value
class Queue(object):
def __init__(self, front=None):
self.front = front
def enqueue(self, value):
pass
def dequeue(self):
pass
def peek(self):
pass
<commit_msg>Add peek function to Queue class<commit_after>
|
#!/usr/bin/env python
'''Implementation of a simple queue data structure.
The queue has `enqueue`, `dequeue`, and `peek` methods.
Items in the queue have `value` and `behind` attributes.
The queue has a `front` attribute.
'''
class Item(object):
def __init__(self, value, behind=None):
self.value = value
self.behind = behind
def __str__(self):
return self.value
class Queue(object):
def __init__(self, front=None):
self.front = front
def enqueue(self, value):
pass
def dequeue(self):
pass
def peek(self):
try:
return self.front.value
except AttributeError:
raise ValueError('No items in queue')
|
#!/usr/bin/env python
'''Implementation of a simple queue data structure.
The queue has `enqueue`, `dequeue`, and `peek` methods.
Items in the queue have `value` and `behind` attributes.
The queue has a `front` attribute.
'''
class Item(object):
def __init__(self, value, behind=None):
self.value = value
self.behind = behind
def __str__(self):
return self.value
class Queue(object):
def __init__(self, front=None):
self.front = front
def enqueue(self, value):
pass
def dequeue(self):
pass
def peek(self):
pass
Add peek function to Queue class#!/usr/bin/env python
'''Implementation of a simple queue data structure.
The queue has `enqueue`, `dequeue`, and `peek` methods.
Items in the queue have `value` and `behind` attributes.
The queue has a `front` attribute.
'''
class Item(object):
def __init__(self, value, behind=None):
self.value = value
self.behind = behind
def __str__(self):
return self.value
class Queue(object):
def __init__(self, front=None):
self.front = front
def enqueue(self, value):
pass
def dequeue(self):
pass
def peek(self):
try:
return self.front.value
except AttributeError:
raise ValueError('No items in queue')
|
<commit_before>#!/usr/bin/env python
'''Implementation of a simple queue data structure.
The queue has `enqueue`, `dequeue`, and `peek` methods.
Items in the queue have `value` and `behind` attributes.
The queue has a `front` attribute.
'''
class Item(object):
def __init__(self, value, behind=None):
self.value = value
self.behind = behind
def __str__(self):
return self.value
class Queue(object):
def __init__(self, front=None):
self.front = front
def enqueue(self, value):
pass
def dequeue(self):
pass
def peek(self):
pass
<commit_msg>Add peek function to Queue class<commit_after>#!/usr/bin/env python
'''Implementation of a simple queue data structure.
The queue has `enqueue`, `dequeue`, and `peek` methods.
Items in the queue have `value` and `behind` attributes.
The queue has a `front` attribute.
'''
class Item(object):
def __init__(self, value, behind=None):
self.value = value
self.behind = behind
def __str__(self):
return self.value
class Queue(object):
def __init__(self, front=None):
self.front = front
def enqueue(self, value):
pass
def dequeue(self):
pass
def peek(self):
try:
return self.front.value
except AttributeError:
raise ValueError('No items in queue')
|
dc4a16a663e718e07815d810313d36fcc6039878
|
sequere/backends/redis/query.py
|
sequere/backends/redis/query.py
|
from collections import OrderedDict
from sequere.query import QuerySetTransformer
from sequere import utils
class RedisQuerySetTransformer(QuerySetTransformer):
def __init__(self, client, count, key, prefix, manager):
super(RedisQuerySetTransformer, self).__init__(client, count)
self.keys = [key, ]
self.order_by(False)
self.prefix = prefix
self.manager = manager
def order_by(self, desc):
self.desc = desc
if desc:
self.method = getattr(self.qs, 'zrevrangebyscore')
self.pieces = self.keys + ['+inf', '-inf']
else:
self.method = getattr(self.qs, 'zrangebyscore')
self.pieces = self.keys + ['-inf', '+inf']
return self
def transform(self, qs):
scores = self.method(*self.pieces,
start=self.start,
num=self.stop - self.start,
withscores=True)
scores = OrderedDict(scores)
objects = self.manager.get_from_uid_list(scores.keys())
return [(objects[i], utils.from_timestamp(value[1])) for i, value in enumerate(scores.items())]
|
try:
from collections import OrderedDict
except ImportError:
from django.utils.datastructures import SortedDict as OrderedDict
from sequere.query import QuerySetTransformer
from sequere import utils
class RedisQuerySetTransformer(QuerySetTransformer):
def __init__(self, client, count, key, prefix, manager):
super(RedisQuerySetTransformer, self).__init__(client, count)
self.keys = [key, ]
self.order_by(False)
self.prefix = prefix
self.manager = manager
def order_by(self, desc):
self.desc = desc
if desc:
self.method = getattr(self.qs, 'zrevrangebyscore')
self.pieces = self.keys + ['+inf', '-inf']
else:
self.method = getattr(self.qs, 'zrangebyscore')
self.pieces = self.keys + ['-inf', '+inf']
return self
def transform(self, qs):
scores = self.method(*self.pieces,
start=self.start,
num=self.stop - self.start,
withscores=True)
scores = OrderedDict(scores)
objects = self.manager.get_from_uid_list(scores.keys())
return [(objects[i], utils.from_timestamp(value[1]))
for i, value in enumerate(scores.items())]
|
Fix compat for python 2.6
|
Fix compat for python 2.6
|
Python
|
mit
|
thoas/django-sequere
|
from collections import OrderedDict
from sequere.query import QuerySetTransformer
from sequere import utils
class RedisQuerySetTransformer(QuerySetTransformer):
def __init__(self, client, count, key, prefix, manager):
super(RedisQuerySetTransformer, self).__init__(client, count)
self.keys = [key, ]
self.order_by(False)
self.prefix = prefix
self.manager = manager
def order_by(self, desc):
self.desc = desc
if desc:
self.method = getattr(self.qs, 'zrevrangebyscore')
self.pieces = self.keys + ['+inf', '-inf']
else:
self.method = getattr(self.qs, 'zrangebyscore')
self.pieces = self.keys + ['-inf', '+inf']
return self
def transform(self, qs):
scores = self.method(*self.pieces,
start=self.start,
num=self.stop - self.start,
withscores=True)
scores = OrderedDict(scores)
objects = self.manager.get_from_uid_list(scores.keys())
return [(objects[i], utils.from_timestamp(value[1])) for i, value in enumerate(scores.items())]
Fix compat for python 2.6
|
try:
from collections import OrderedDict
except ImportError:
from django.utils.datastructures import SortedDict as OrderedDict
from sequere.query import QuerySetTransformer
from sequere import utils
class RedisQuerySetTransformer(QuerySetTransformer):
def __init__(self, client, count, key, prefix, manager):
super(RedisQuerySetTransformer, self).__init__(client, count)
self.keys = [key, ]
self.order_by(False)
self.prefix = prefix
self.manager = manager
def order_by(self, desc):
self.desc = desc
if desc:
self.method = getattr(self.qs, 'zrevrangebyscore')
self.pieces = self.keys + ['+inf', '-inf']
else:
self.method = getattr(self.qs, 'zrangebyscore')
self.pieces = self.keys + ['-inf', '+inf']
return self
def transform(self, qs):
scores = self.method(*self.pieces,
start=self.start,
num=self.stop - self.start,
withscores=True)
scores = OrderedDict(scores)
objects = self.manager.get_from_uid_list(scores.keys())
return [(objects[i], utils.from_timestamp(value[1]))
for i, value in enumerate(scores.items())]
|
<commit_before>from collections import OrderedDict
from sequere.query import QuerySetTransformer
from sequere import utils
class RedisQuerySetTransformer(QuerySetTransformer):
def __init__(self, client, count, key, prefix, manager):
super(RedisQuerySetTransformer, self).__init__(client, count)
self.keys = [key, ]
self.order_by(False)
self.prefix = prefix
self.manager = manager
def order_by(self, desc):
self.desc = desc
if desc:
self.method = getattr(self.qs, 'zrevrangebyscore')
self.pieces = self.keys + ['+inf', '-inf']
else:
self.method = getattr(self.qs, 'zrangebyscore')
self.pieces = self.keys + ['-inf', '+inf']
return self
def transform(self, qs):
scores = self.method(*self.pieces,
start=self.start,
num=self.stop - self.start,
withscores=True)
scores = OrderedDict(scores)
objects = self.manager.get_from_uid_list(scores.keys())
return [(objects[i], utils.from_timestamp(value[1])) for i, value in enumerate(scores.items())]
<commit_msg>Fix compat for python 2.6<commit_after>
|
try:
from collections import OrderedDict
except ImportError:
from django.utils.datastructures import SortedDict as OrderedDict
from sequere.query import QuerySetTransformer
from sequere import utils
class RedisQuerySetTransformer(QuerySetTransformer):
def __init__(self, client, count, key, prefix, manager):
super(RedisQuerySetTransformer, self).__init__(client, count)
self.keys = [key, ]
self.order_by(False)
self.prefix = prefix
self.manager = manager
def order_by(self, desc):
self.desc = desc
if desc:
self.method = getattr(self.qs, 'zrevrangebyscore')
self.pieces = self.keys + ['+inf', '-inf']
else:
self.method = getattr(self.qs, 'zrangebyscore')
self.pieces = self.keys + ['-inf', '+inf']
return self
def transform(self, qs):
scores = self.method(*self.pieces,
start=self.start,
num=self.stop - self.start,
withscores=True)
scores = OrderedDict(scores)
objects = self.manager.get_from_uid_list(scores.keys())
return [(objects[i], utils.from_timestamp(value[1]))
for i, value in enumerate(scores.items())]
|
from collections import OrderedDict
from sequere.query import QuerySetTransformer
from sequere import utils
class RedisQuerySetTransformer(QuerySetTransformer):
def __init__(self, client, count, key, prefix, manager):
super(RedisQuerySetTransformer, self).__init__(client, count)
self.keys = [key, ]
self.order_by(False)
self.prefix = prefix
self.manager = manager
def order_by(self, desc):
self.desc = desc
if desc:
self.method = getattr(self.qs, 'zrevrangebyscore')
self.pieces = self.keys + ['+inf', '-inf']
else:
self.method = getattr(self.qs, 'zrangebyscore')
self.pieces = self.keys + ['-inf', '+inf']
return self
def transform(self, qs):
scores = self.method(*self.pieces,
start=self.start,
num=self.stop - self.start,
withscores=True)
scores = OrderedDict(scores)
objects = self.manager.get_from_uid_list(scores.keys())
return [(objects[i], utils.from_timestamp(value[1])) for i, value in enumerate(scores.items())]
Fix compat for python 2.6try:
from collections import OrderedDict
except ImportError:
from django.utils.datastructures import SortedDict as OrderedDict
from sequere.query import QuerySetTransformer
from sequere import utils
class RedisQuerySetTransformer(QuerySetTransformer):
def __init__(self, client, count, key, prefix, manager):
super(RedisQuerySetTransformer, self).__init__(client, count)
self.keys = [key, ]
self.order_by(False)
self.prefix = prefix
self.manager = manager
def order_by(self, desc):
self.desc = desc
if desc:
self.method = getattr(self.qs, 'zrevrangebyscore')
self.pieces = self.keys + ['+inf', '-inf']
else:
self.method = getattr(self.qs, 'zrangebyscore')
self.pieces = self.keys + ['-inf', '+inf']
return self
def transform(self, qs):
scores = self.method(*self.pieces,
start=self.start,
num=self.stop - self.start,
withscores=True)
scores = OrderedDict(scores)
objects = self.manager.get_from_uid_list(scores.keys())
return [(objects[i], utils.from_timestamp(value[1]))
for i, value in enumerate(scores.items())]
|
<commit_before>from collections import OrderedDict
from sequere.query import QuerySetTransformer
from sequere import utils
class RedisQuerySetTransformer(QuerySetTransformer):
def __init__(self, client, count, key, prefix, manager):
super(RedisQuerySetTransformer, self).__init__(client, count)
self.keys = [key, ]
self.order_by(False)
self.prefix = prefix
self.manager = manager
def order_by(self, desc):
self.desc = desc
if desc:
self.method = getattr(self.qs, 'zrevrangebyscore')
self.pieces = self.keys + ['+inf', '-inf']
else:
self.method = getattr(self.qs, 'zrangebyscore')
self.pieces = self.keys + ['-inf', '+inf']
return self
def transform(self, qs):
scores = self.method(*self.pieces,
start=self.start,
num=self.stop - self.start,
withscores=True)
scores = OrderedDict(scores)
objects = self.manager.get_from_uid_list(scores.keys())
return [(objects[i], utils.from_timestamp(value[1])) for i, value in enumerate(scores.items())]
<commit_msg>Fix compat for python 2.6<commit_after>try:
from collections import OrderedDict
except ImportError:
from django.utils.datastructures import SortedDict as OrderedDict
from sequere.query import QuerySetTransformer
from sequere import utils
class RedisQuerySetTransformer(QuerySetTransformer):
def __init__(self, client, count, key, prefix, manager):
super(RedisQuerySetTransformer, self).__init__(client, count)
self.keys = [key, ]
self.order_by(False)
self.prefix = prefix
self.manager = manager
def order_by(self, desc):
self.desc = desc
if desc:
self.method = getattr(self.qs, 'zrevrangebyscore')
self.pieces = self.keys + ['+inf', '-inf']
else:
self.method = getattr(self.qs, 'zrangebyscore')
self.pieces = self.keys + ['-inf', '+inf']
return self
def transform(self, qs):
scores = self.method(*self.pieces,
start=self.start,
num=self.stop - self.start,
withscores=True)
scores = OrderedDict(scores)
objects = self.manager.get_from_uid_list(scores.keys())
return [(objects[i], utils.from_timestamp(value[1]))
for i, value in enumerate(scores.items())]
|
c324a640893a4a6b36bb8edfe0515fad55d1df2d
|
efm2riot/patches.py
|
efm2riot/patches.py
|
EXTERN_START = "\n#ifdef __cplusplus\nextern \"C\" {\n#endif\n\n"
EXTERN_STOP = "#ifdef __cplusplus\n}\n#endif\n\n"
EXTERN_FIND1 = "extern \"C\" {\n"
EXTERN_FIND2 = " *****************************************************************************/\n" # noqa
def add_extern_c(source_file, source):
"""
Add 'Extern C' to a given source_file.
"""
# Don't add it if file already contains it.
if EXTERN_FIND1 in source:
return source
# Dirty hack by looking for a string, but it works.
offset = source.index(EXTERN_FIND2) + len(EXTERN_FIND2)
part_one = source[:offset]
part_two = source[offset:]
return part_one + EXTERN_START + part_two + EXTERN_STOP
def fix_arm_math(source_file, source):
"""
Add conditional for ARM_MATH_CM definition. It is already defined by the
Cortex definitions of RIOT-OS.
"""
return source.replace(
"#define ARM_MATH_CM0PLUS",
"#ifndef ARM_MATH_CM0PLUS\n#define ARM_MATH_CM0PLUS\n#endif")
|
EXTERN_START = "\n#ifdef __cplusplus\nextern \"C\" {\n#endif\n\n"
EXTERN_STOP = "#ifdef __cplusplus\n}\n#endif\n\n"
EXTERN_FIND1 = "extern \"C\" {\n"
EXTERN_FIND2 = " *****************************************************************************/\n" # noqa
def add_extern_c(source_file, source):
"""
Add 'Extern C' to a given source_file.
"""
# Patches only work with newline versions of the file.
if "\r\n" in source:
raise Exception(
"You need to convert all Gecko SDK sources to Linux file endings "
"first (use something like dos2unix).")
# Don't add it if file already contains it.
if EXTERN_FIND1 in source:
return source
# Dirty hack by looking for a string, but it works.
offset = source.index(EXTERN_FIND2) + len(EXTERN_FIND2)
part_one = source[:offset]
part_two = source[offset:]
return part_one + EXTERN_START + part_two + EXTERN_STOP
def fix_arm_math(source_file, source):
"""
Add conditional for ARM_MATH_CM definition. It is already defined by the
Cortex definitions of RIOT-OS.
"""
return source.replace(
"#define ARM_MATH_CM0PLUS",
"#ifndef ARM_MATH_CM0PLUS\n#define ARM_MATH_CM0PLUS\n#endif")
|
Make sure that Linux line endings are used.
|
Make sure that Linux line endings are used.
|
Python
|
mit
|
basilfx/EFM2Riot,basilfx/EFM2Riot,basilfx/EFM2Riot,basilfx/EFM2Riot,basilfx/EFM2Riot
|
EXTERN_START = "\n#ifdef __cplusplus\nextern \"C\" {\n#endif\n\n"
EXTERN_STOP = "#ifdef __cplusplus\n}\n#endif\n\n"
EXTERN_FIND1 = "extern \"C\" {\n"
EXTERN_FIND2 = " *****************************************************************************/\n" # noqa
def add_extern_c(source_file, source):
"""
Add 'Extern C' to a given source_file.
"""
# Don't add it if file already contains it.
if EXTERN_FIND1 in source:
return source
# Dirty hack by looking for a string, but it works.
offset = source.index(EXTERN_FIND2) + len(EXTERN_FIND2)
part_one = source[:offset]
part_two = source[offset:]
return part_one + EXTERN_START + part_two + EXTERN_STOP
def fix_arm_math(source_file, source):
"""
Add conditional for ARM_MATH_CM definition. It is already defined by the
Cortex definitions of RIOT-OS.
"""
return source.replace(
"#define ARM_MATH_CM0PLUS",
"#ifndef ARM_MATH_CM0PLUS\n#define ARM_MATH_CM0PLUS\n#endif")
Make sure that Linux line endings are used.
|
EXTERN_START = "\n#ifdef __cplusplus\nextern \"C\" {\n#endif\n\n"
EXTERN_STOP = "#ifdef __cplusplus\n}\n#endif\n\n"
EXTERN_FIND1 = "extern \"C\" {\n"
EXTERN_FIND2 = " *****************************************************************************/\n" # noqa
def add_extern_c(source_file, source):
"""
Add 'Extern C' to a given source_file.
"""
# Patches only work with newline versions of the file.
if "\r\n" in source:
raise Exception(
"You need to convert all Gecko SDK sources to Linux file endings "
"first (use something like dos2unix).")
# Don't add it if file already contains it.
if EXTERN_FIND1 in source:
return source
# Dirty hack by looking for a string, but it works.
offset = source.index(EXTERN_FIND2) + len(EXTERN_FIND2)
part_one = source[:offset]
part_two = source[offset:]
return part_one + EXTERN_START + part_two + EXTERN_STOP
def fix_arm_math(source_file, source):
"""
Add conditional for ARM_MATH_CM definition. It is already defined by the
Cortex definitions of RIOT-OS.
"""
return source.replace(
"#define ARM_MATH_CM0PLUS",
"#ifndef ARM_MATH_CM0PLUS\n#define ARM_MATH_CM0PLUS\n#endif")
|
<commit_before>EXTERN_START = "\n#ifdef __cplusplus\nextern \"C\" {\n#endif\n\n"
EXTERN_STOP = "#ifdef __cplusplus\n}\n#endif\n\n"
EXTERN_FIND1 = "extern \"C\" {\n"
EXTERN_FIND2 = " *****************************************************************************/\n" # noqa
def add_extern_c(source_file, source):
"""
Add 'Extern C' to a given source_file.
"""
# Don't add it if file already contains it.
if EXTERN_FIND1 in source:
return source
# Dirty hack by looking for a string, but it works.
offset = source.index(EXTERN_FIND2) + len(EXTERN_FIND2)
part_one = source[:offset]
part_two = source[offset:]
return part_one + EXTERN_START + part_two + EXTERN_STOP
def fix_arm_math(source_file, source):
"""
Add conditional for ARM_MATH_CM definition. It is already defined by the
Cortex definitions of RIOT-OS.
"""
return source.replace(
"#define ARM_MATH_CM0PLUS",
"#ifndef ARM_MATH_CM0PLUS\n#define ARM_MATH_CM0PLUS\n#endif")
<commit_msg>Make sure that Linux line endings are used.<commit_after>
|
EXTERN_START = "\n#ifdef __cplusplus\nextern \"C\" {\n#endif\n\n"
EXTERN_STOP = "#ifdef __cplusplus\n}\n#endif\n\n"
EXTERN_FIND1 = "extern \"C\" {\n"
EXTERN_FIND2 = " *****************************************************************************/\n" # noqa
def add_extern_c(source_file, source):
"""
Add 'Extern C' to a given source_file.
"""
# Patches only work with newline versions of the file.
if "\r\n" in source:
raise Exception(
"You need to convert all Gecko SDK sources to Linux file endings "
"first (use something like dos2unix).")
# Don't add it if file already contains it.
if EXTERN_FIND1 in source:
return source
# Dirty hack by looking for a string, but it works.
offset = source.index(EXTERN_FIND2) + len(EXTERN_FIND2)
part_one = source[:offset]
part_two = source[offset:]
return part_one + EXTERN_START + part_two + EXTERN_STOP
def fix_arm_math(source_file, source):
"""
Add conditional for ARM_MATH_CM definition. It is already defined by the
Cortex definitions of RIOT-OS.
"""
return source.replace(
"#define ARM_MATH_CM0PLUS",
"#ifndef ARM_MATH_CM0PLUS\n#define ARM_MATH_CM0PLUS\n#endif")
|
EXTERN_START = "\n#ifdef __cplusplus\nextern \"C\" {\n#endif\n\n"
EXTERN_STOP = "#ifdef __cplusplus\n}\n#endif\n\n"
EXTERN_FIND1 = "extern \"C\" {\n"
EXTERN_FIND2 = " *****************************************************************************/\n" # noqa
def add_extern_c(source_file, source):
"""
Add 'Extern C' to a given source_file.
"""
# Don't add it if file already contains it.
if EXTERN_FIND1 in source:
return source
# Dirty hack by looking for a string, but it works.
offset = source.index(EXTERN_FIND2) + len(EXTERN_FIND2)
part_one = source[:offset]
part_two = source[offset:]
return part_one + EXTERN_START + part_two + EXTERN_STOP
def fix_arm_math(source_file, source):
"""
Add conditional for ARM_MATH_CM definition. It is already defined by the
Cortex definitions of RIOT-OS.
"""
return source.replace(
"#define ARM_MATH_CM0PLUS",
"#ifndef ARM_MATH_CM0PLUS\n#define ARM_MATH_CM0PLUS\n#endif")
Make sure that Linux line endings are used.EXTERN_START = "\n#ifdef __cplusplus\nextern \"C\" {\n#endif\n\n"
EXTERN_STOP = "#ifdef __cplusplus\n}\n#endif\n\n"
EXTERN_FIND1 = "extern \"C\" {\n"
EXTERN_FIND2 = " *****************************************************************************/\n" # noqa
def add_extern_c(source_file, source):
"""
Add 'Extern C' to a given source_file.
"""
# Patches only work with newline versions of the file.
if "\r\n" in source:
raise Exception(
"You need to convert all Gecko SDK sources to Linux file endings "
"first (use something like dos2unix).")
# Don't add it if file already contains it.
if EXTERN_FIND1 in source:
return source
# Dirty hack by looking for a string, but it works.
offset = source.index(EXTERN_FIND2) + len(EXTERN_FIND2)
part_one = source[:offset]
part_two = source[offset:]
return part_one + EXTERN_START + part_two + EXTERN_STOP
def fix_arm_math(source_file, source):
"""
Add conditional for ARM_MATH_CM definition. It is already defined by the
Cortex definitions of RIOT-OS.
"""
return source.replace(
"#define ARM_MATH_CM0PLUS",
"#ifndef ARM_MATH_CM0PLUS\n#define ARM_MATH_CM0PLUS\n#endif")
|
<commit_before>EXTERN_START = "\n#ifdef __cplusplus\nextern \"C\" {\n#endif\n\n"
EXTERN_STOP = "#ifdef __cplusplus\n}\n#endif\n\n"
EXTERN_FIND1 = "extern \"C\" {\n"
EXTERN_FIND2 = " *****************************************************************************/\n" # noqa
def add_extern_c(source_file, source):
"""
Add 'Extern C' to a given source_file.
"""
# Don't add it if file already contains it.
if EXTERN_FIND1 in source:
return source
# Dirty hack by looking for a string, but it works.
offset = source.index(EXTERN_FIND2) + len(EXTERN_FIND2)
part_one = source[:offset]
part_two = source[offset:]
return part_one + EXTERN_START + part_two + EXTERN_STOP
def fix_arm_math(source_file, source):
"""
Add conditional for ARM_MATH_CM definition. It is already defined by the
Cortex definitions of RIOT-OS.
"""
return source.replace(
"#define ARM_MATH_CM0PLUS",
"#ifndef ARM_MATH_CM0PLUS\n#define ARM_MATH_CM0PLUS\n#endif")
<commit_msg>Make sure that Linux line endings are used.<commit_after>EXTERN_START = "\n#ifdef __cplusplus\nextern \"C\" {\n#endif\n\n"
EXTERN_STOP = "#ifdef __cplusplus\n}\n#endif\n\n"
EXTERN_FIND1 = "extern \"C\" {\n"
EXTERN_FIND2 = " *****************************************************************************/\n" # noqa
def add_extern_c(source_file, source):
"""
Add 'Extern C' to a given source_file.
"""
# Patches only work with newline versions of the file.
if "\r\n" in source:
raise Exception(
"You need to convert all Gecko SDK sources to Linux file endings "
"first (use something like dos2unix).")
# Don't add it if file already contains it.
if EXTERN_FIND1 in source:
return source
# Dirty hack by looking for a string, but it works.
offset = source.index(EXTERN_FIND2) + len(EXTERN_FIND2)
part_one = source[:offset]
part_two = source[offset:]
return part_one + EXTERN_START + part_two + EXTERN_STOP
def fix_arm_math(source_file, source):
"""
Add conditional for ARM_MATH_CM definition. It is already defined by the
Cortex definitions of RIOT-OS.
"""
return source.replace(
"#define ARM_MATH_CM0PLUS",
"#ifndef ARM_MATH_CM0PLUS\n#define ARM_MATH_CM0PLUS\n#endif")
|
e07c1659f5c8dcad473b43f492a3418083e8fdad
|
setup.py
|
setup.py
|
# pylint: disable=missing-docstring
from setuptools import setup
from setuptools import find_packages
setup(name='mltils',
version='0.1',
description='A package with utility functions for Machine Learning',
author='Rafael Ladeira',
author_email='rwladeira@gmail.com',
license='MIT',
install_requires=[
'tqdm', 'numpy', 'scipy', 'scikit-learn', 'pandas',
'xgboost'
],
packages=find_packages())
|
# pylint: disable=missing-docstring
from setuptools import setup
from setuptools import find_packages
setup(name='mltils',
version='0.1',
description='A package with utility functions for Machine Learning',
author='Rafael Ladeira',
author_email='rwladeira@gmail.com',
license='MIT',
install_requires=[
'tqdm', 'numpy', 'scipy', 'scikit-learn', 'pandas',
'xgboost', 'statsmodels'
],
packages=find_packages())
|
Add statsmodels as a dependency
|
Add statsmodels as a dependency
|
Python
|
mit
|
rladeira/mltils
|
# pylint: disable=missing-docstring
from setuptools import setup
from setuptools import find_packages
setup(name='mltils',
version='0.1',
description='A package with utility functions for Machine Learning',
author='Rafael Ladeira',
author_email='rwladeira@gmail.com',
license='MIT',
install_requires=[
'tqdm', 'numpy', 'scipy', 'scikit-learn', 'pandas',
'xgboost'
],
packages=find_packages())
Add statsmodels as a dependency
|
# pylint: disable=missing-docstring
from setuptools import setup
from setuptools import find_packages
setup(name='mltils',
version='0.1',
description='A package with utility functions for Machine Learning',
author='Rafael Ladeira',
author_email='rwladeira@gmail.com',
license='MIT',
install_requires=[
'tqdm', 'numpy', 'scipy', 'scikit-learn', 'pandas',
'xgboost', 'statsmodels'
],
packages=find_packages())
|
<commit_before># pylint: disable=missing-docstring
from setuptools import setup
from setuptools import find_packages
setup(name='mltils',
version='0.1',
description='A package with utility functions for Machine Learning',
author='Rafael Ladeira',
author_email='rwladeira@gmail.com',
license='MIT',
install_requires=[
'tqdm', 'numpy', 'scipy', 'scikit-learn', 'pandas',
'xgboost'
],
packages=find_packages())
<commit_msg>Add statsmodels as a dependency<commit_after>
|
# pylint: disable=missing-docstring
from setuptools import setup
from setuptools import find_packages
setup(name='mltils',
version='0.1',
description='A package with utility functions for Machine Learning',
author='Rafael Ladeira',
author_email='rwladeira@gmail.com',
license='MIT',
install_requires=[
'tqdm', 'numpy', 'scipy', 'scikit-learn', 'pandas',
'xgboost', 'statsmodels'
],
packages=find_packages())
|
# pylint: disable=missing-docstring
from setuptools import setup
from setuptools import find_packages
setup(name='mltils',
version='0.1',
description='A package with utility functions for Machine Learning',
author='Rafael Ladeira',
author_email='rwladeira@gmail.com',
license='MIT',
install_requires=[
'tqdm', 'numpy', 'scipy', 'scikit-learn', 'pandas',
'xgboost'
],
packages=find_packages())
Add statsmodels as a dependency# pylint: disable=missing-docstring
from setuptools import setup
from setuptools import find_packages
setup(name='mltils',
version='0.1',
description='A package with utility functions for Machine Learning',
author='Rafael Ladeira',
author_email='rwladeira@gmail.com',
license='MIT',
install_requires=[
'tqdm', 'numpy', 'scipy', 'scikit-learn', 'pandas',
'xgboost', 'statsmodels'
],
packages=find_packages())
|
<commit_before># pylint: disable=missing-docstring
from setuptools import setup
from setuptools import find_packages
setup(name='mltils',
version='0.1',
description='A package with utility functions for Machine Learning',
author='Rafael Ladeira',
author_email='rwladeira@gmail.com',
license='MIT',
install_requires=[
'tqdm', 'numpy', 'scipy', 'scikit-learn', 'pandas',
'xgboost'
],
packages=find_packages())
<commit_msg>Add statsmodels as a dependency<commit_after># pylint: disable=missing-docstring
from setuptools import setup
from setuptools import find_packages
setup(name='mltils',
version='0.1',
description='A package with utility functions for Machine Learning',
author='Rafael Ladeira',
author_email='rwladeira@gmail.com',
license='MIT',
install_requires=[
'tqdm', 'numpy', 'scipy', 'scikit-learn', 'pandas',
'xgboost', 'statsmodels'
],
packages=find_packages())
|
e727b390732687565a0a21127e78c6d36e8a8b84
|
setup.py
|
setup.py
|
#!/usr/bin/env python
from setuptools import setup, find_packages
setup(
name='blanc-basic-news',
version='0.3',
description='Blanc Basic News for Django',
long_description=open('README.rst').read(),
url='https://github.com/blancltd/blanc-basic-news',
maintainer='Blanc Ltd',
maintainer_email='studio@blanc.ltd.uk',
platforms=['any'],
install_requires=[
'blanc-basic-assets>=0.3',
],
packages=find_packages(),
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
],
license='BSD',
)
|
#!/usr/bin/env python
from setuptools import find_packages, setup
setup(
name='blanc-basic-news',
version='0.3',
description='Blanc Basic News for Django',
long_description=open('README.rst').read(),
url='https://github.com/blancltd/blanc-basic-news',
maintainer='Blanc Ltd',
maintainer_email='studio@blanc.ltd.uk',
platforms=['any'],
install_requires=[
'blanc-basic-assets>=0.3',
],
packages=find_packages(),
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
],
license='BSD',
)
|
Add Python 3.4 to supported list
|
Add Python 3.4 to supported list
|
Python
|
bsd-3-clause
|
blancltd/blanc-basic-news
|
#!/usr/bin/env python
from setuptools import setup, find_packages
setup(
name='blanc-basic-news',
version='0.3',
description='Blanc Basic News for Django',
long_description=open('README.rst').read(),
url='https://github.com/blancltd/blanc-basic-news',
maintainer='Blanc Ltd',
maintainer_email='studio@blanc.ltd.uk',
platforms=['any'],
install_requires=[
'blanc-basic-assets>=0.3',
],
packages=find_packages(),
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
],
license='BSD',
)
Add Python 3.4 to supported list
|
#!/usr/bin/env python
from setuptools import find_packages, setup
setup(
name='blanc-basic-news',
version='0.3',
description='Blanc Basic News for Django',
long_description=open('README.rst').read(),
url='https://github.com/blancltd/blanc-basic-news',
maintainer='Blanc Ltd',
maintainer_email='studio@blanc.ltd.uk',
platforms=['any'],
install_requires=[
'blanc-basic-assets>=0.3',
],
packages=find_packages(),
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
],
license='BSD',
)
|
<commit_before>#!/usr/bin/env python
from setuptools import setup, find_packages
setup(
name='blanc-basic-news',
version='0.3',
description='Blanc Basic News for Django',
long_description=open('README.rst').read(),
url='https://github.com/blancltd/blanc-basic-news',
maintainer='Blanc Ltd',
maintainer_email='studio@blanc.ltd.uk',
platforms=['any'],
install_requires=[
'blanc-basic-assets>=0.3',
],
packages=find_packages(),
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
],
license='BSD',
)
<commit_msg>Add Python 3.4 to supported list<commit_after>
|
#!/usr/bin/env python
from setuptools import find_packages, setup
setup(
name='blanc-basic-news',
version='0.3',
description='Blanc Basic News for Django',
long_description=open('README.rst').read(),
url='https://github.com/blancltd/blanc-basic-news',
maintainer='Blanc Ltd',
maintainer_email='studio@blanc.ltd.uk',
platforms=['any'],
install_requires=[
'blanc-basic-assets>=0.3',
],
packages=find_packages(),
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
],
license='BSD',
)
|
#!/usr/bin/env python
from setuptools import setup, find_packages
setup(
name='blanc-basic-news',
version='0.3',
description='Blanc Basic News for Django',
long_description=open('README.rst').read(),
url='https://github.com/blancltd/blanc-basic-news',
maintainer='Blanc Ltd',
maintainer_email='studio@blanc.ltd.uk',
platforms=['any'],
install_requires=[
'blanc-basic-assets>=0.3',
],
packages=find_packages(),
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
],
license='BSD',
)
Add Python 3.4 to supported list#!/usr/bin/env python
from setuptools import find_packages, setup
setup(
name='blanc-basic-news',
version='0.3',
description='Blanc Basic News for Django',
long_description=open('README.rst').read(),
url='https://github.com/blancltd/blanc-basic-news',
maintainer='Blanc Ltd',
maintainer_email='studio@blanc.ltd.uk',
platforms=['any'],
install_requires=[
'blanc-basic-assets>=0.3',
],
packages=find_packages(),
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
],
license='BSD',
)
|
<commit_before>#!/usr/bin/env python
from setuptools import setup, find_packages
setup(
name='blanc-basic-news',
version='0.3',
description='Blanc Basic News for Django',
long_description=open('README.rst').read(),
url='https://github.com/blancltd/blanc-basic-news',
maintainer='Blanc Ltd',
maintainer_email='studio@blanc.ltd.uk',
platforms=['any'],
install_requires=[
'blanc-basic-assets>=0.3',
],
packages=find_packages(),
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
],
license='BSD',
)
<commit_msg>Add Python 3.4 to supported list<commit_after>#!/usr/bin/env python
from setuptools import find_packages, setup
setup(
name='blanc-basic-news',
version='0.3',
description='Blanc Basic News for Django',
long_description=open('README.rst').read(),
url='https://github.com/blancltd/blanc-basic-news',
maintainer='Blanc Ltd',
maintainer_email='studio@blanc.ltd.uk',
platforms=['any'],
install_requires=[
'blanc-basic-assets>=0.3',
],
packages=find_packages(),
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
],
license='BSD',
)
|
1abed516121f879e7497b665d59866d7a59ccc4a
|
setup.py
|
setup.py
|
import distutils.core
import sys
from rabbitpy import __version__
tests_require = ['nose', 'mock']
if sys.version_info < (2, 7, 0):
tests_require.append('unittest2')
desc = 'A minimalistic & pythonic AMQP library focused on supporting RabbitMQ'
distutils.core.setup(name='rabbitpy',
version=__version__,
description=desc,
long_description=open('README.md').read(),
author='Gavin M. Roy',
author_email='gavinmroy@gmail.com',
url='http://rabbitpy.readthedocs.org',
packages=['rabbitpy'],
package_data={'': ['LICENSE', 'README.md']},
include_package_data=True,
install_requires=['pamqp>=1.2.0'],
tests_require=tests_require,
test_suite='nose.collector',
license=open('LICENSE').read(),
classifiers=['Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
'Topic :: Communications',
'Topic :: Internet',
'Topic :: Software Development :: Libraries'],
zip_safe=True)
|
import distutils.core
import sys
from rabbitpy import __version__
tests_require = ['nose', 'mock']
if sys.version_info < (2, 7, 0):
tests_require.append('unittest2')
desc = 'A minimalistic & pythonic AMQP library focused on supporting RabbitMQ'
distutils.core.setup(name='rabbitpy',
version=__version__,
description=desc,
long_description=open('README.md').read(),
author='Gavin M. Roy',
author_email='gavinmroy@gmail.com',
url='http://rabbitpy.readthedocs.org',
packages=['rabbitpy'],
package_data={'': ['LICENSE', 'README.md']},
include_package_data=True,
install_requires=['pamqp>=1.2.0'],
tests_require=tests_require,
test_suite='nose.collector',
license=open('LICENSE').read(),
classifiers=['Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
'Topic :: Communications',
'Topic :: Internet',
'Topic :: Software Development :: Libraries'],
zip_safe=True)
|
Change the development status to Beta
|
Change the development status to Beta
|
Python
|
bsd-3-clause
|
gmr/rabbitpy,gmr/rabbitpy,jonahbull/rabbitpy
|
import distutils.core
import sys
from rabbitpy import __version__
tests_require = ['nose', 'mock']
if sys.version_info < (2, 7, 0):
tests_require.append('unittest2')
desc = 'A minimalistic & pythonic AMQP library focused on supporting RabbitMQ'
distutils.core.setup(name='rabbitpy',
version=__version__,
description=desc,
long_description=open('README.md').read(),
author='Gavin M. Roy',
author_email='gavinmroy@gmail.com',
url='http://rabbitpy.readthedocs.org',
packages=['rabbitpy'],
package_data={'': ['LICENSE', 'README.md']},
include_package_data=True,
install_requires=['pamqp>=1.2.0'],
tests_require=tests_require,
test_suite='nose.collector',
license=open('LICENSE').read(),
classifiers=['Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
'Topic :: Communications',
'Topic :: Internet',
'Topic :: Software Development :: Libraries'],
zip_safe=True)
Change the development status to Beta
|
import distutils.core
import sys
from rabbitpy import __version__
tests_require = ['nose', 'mock']
if sys.version_info < (2, 7, 0):
tests_require.append('unittest2')
desc = 'A minimalistic & pythonic AMQP library focused on supporting RabbitMQ'
distutils.core.setup(name='rabbitpy',
version=__version__,
description=desc,
long_description=open('README.md').read(),
author='Gavin M. Roy',
author_email='gavinmroy@gmail.com',
url='http://rabbitpy.readthedocs.org',
packages=['rabbitpy'],
package_data={'': ['LICENSE', 'README.md']},
include_package_data=True,
install_requires=['pamqp>=1.2.0'],
tests_require=tests_require,
test_suite='nose.collector',
license=open('LICENSE').read(),
classifiers=['Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
'Topic :: Communications',
'Topic :: Internet',
'Topic :: Software Development :: Libraries'],
zip_safe=True)
|
<commit_before>import distutils.core
import sys
from rabbitpy import __version__
tests_require = ['nose', 'mock']
if sys.version_info < (2, 7, 0):
tests_require.append('unittest2')
desc = 'A minimalistic & pythonic AMQP library focused on supporting RabbitMQ'
distutils.core.setup(name='rabbitpy',
version=__version__,
description=desc,
long_description=open('README.md').read(),
author='Gavin M. Roy',
author_email='gavinmroy@gmail.com',
url='http://rabbitpy.readthedocs.org',
packages=['rabbitpy'],
package_data={'': ['LICENSE', 'README.md']},
include_package_data=True,
install_requires=['pamqp>=1.2.0'],
tests_require=tests_require,
test_suite='nose.collector',
license=open('LICENSE').read(),
classifiers=['Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
'Topic :: Communications',
'Topic :: Internet',
'Topic :: Software Development :: Libraries'],
zip_safe=True)
<commit_msg>Change the development status to Beta<commit_after>
|
import distutils.core
import sys
from rabbitpy import __version__
tests_require = ['nose', 'mock']
if sys.version_info < (2, 7, 0):
tests_require.append('unittest2')
desc = 'A minimalistic & pythonic AMQP library focused on supporting RabbitMQ'
distutils.core.setup(name='rabbitpy',
version=__version__,
description=desc,
long_description=open('README.md').read(),
author='Gavin M. Roy',
author_email='gavinmroy@gmail.com',
url='http://rabbitpy.readthedocs.org',
packages=['rabbitpy'],
package_data={'': ['LICENSE', 'README.md']},
include_package_data=True,
install_requires=['pamqp>=1.2.0'],
tests_require=tests_require,
test_suite='nose.collector',
license=open('LICENSE').read(),
classifiers=['Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
'Topic :: Communications',
'Topic :: Internet',
'Topic :: Software Development :: Libraries'],
zip_safe=True)
|
import distutils.core
import sys
from rabbitpy import __version__
tests_require = ['nose', 'mock']
if sys.version_info < (2, 7, 0):
tests_require.append('unittest2')
desc = 'A minimalistic & pythonic AMQP library focused on supporting RabbitMQ'
distutils.core.setup(name='rabbitpy',
version=__version__,
description=desc,
long_description=open('README.md').read(),
author='Gavin M. Roy',
author_email='gavinmroy@gmail.com',
url='http://rabbitpy.readthedocs.org',
packages=['rabbitpy'],
package_data={'': ['LICENSE', 'README.md']},
include_package_data=True,
install_requires=['pamqp>=1.2.0'],
tests_require=tests_require,
test_suite='nose.collector',
license=open('LICENSE').read(),
classifiers=['Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
'Topic :: Communications',
'Topic :: Internet',
'Topic :: Software Development :: Libraries'],
zip_safe=True)
Change the development status to Betaimport distutils.core
import sys
from rabbitpy import __version__
tests_require = ['nose', 'mock']
if sys.version_info < (2, 7, 0):
tests_require.append('unittest2')
desc = 'A minimalistic & pythonic AMQP library focused on supporting RabbitMQ'
distutils.core.setup(name='rabbitpy',
version=__version__,
description=desc,
long_description=open('README.md').read(),
author='Gavin M. Roy',
author_email='gavinmroy@gmail.com',
url='http://rabbitpy.readthedocs.org',
packages=['rabbitpy'],
package_data={'': ['LICENSE', 'README.md']},
include_package_data=True,
install_requires=['pamqp>=1.2.0'],
tests_require=tests_require,
test_suite='nose.collector',
license=open('LICENSE').read(),
classifiers=['Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
'Topic :: Communications',
'Topic :: Internet',
'Topic :: Software Development :: Libraries'],
zip_safe=True)
|
<commit_before>import distutils.core
import sys
from rabbitpy import __version__
tests_require = ['nose', 'mock']
if sys.version_info < (2, 7, 0):
tests_require.append('unittest2')
desc = 'A minimalistic & pythonic AMQP library focused on supporting RabbitMQ'
distutils.core.setup(name='rabbitpy',
version=__version__,
description=desc,
long_description=open('README.md').read(),
author='Gavin M. Roy',
author_email='gavinmroy@gmail.com',
url='http://rabbitpy.readthedocs.org',
packages=['rabbitpy'],
package_data={'': ['LICENSE', 'README.md']},
include_package_data=True,
install_requires=['pamqp>=1.2.0'],
tests_require=tests_require,
test_suite='nose.collector',
license=open('LICENSE').read(),
classifiers=['Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
'Topic :: Communications',
'Topic :: Internet',
'Topic :: Software Development :: Libraries'],
zip_safe=True)
<commit_msg>Change the development status to Beta<commit_after>import distutils.core
import sys
from rabbitpy import __version__
tests_require = ['nose', 'mock']
if sys.version_info < (2, 7, 0):
tests_require.append('unittest2')
desc = 'A minimalistic & pythonic AMQP library focused on supporting RabbitMQ'
distutils.core.setup(name='rabbitpy',
version=__version__,
description=desc,
long_description=open('README.md').read(),
author='Gavin M. Roy',
author_email='gavinmroy@gmail.com',
url='http://rabbitpy.readthedocs.org',
packages=['rabbitpy'],
package_data={'': ['LICENSE', 'README.md']},
include_package_data=True,
install_requires=['pamqp>=1.2.0'],
tests_require=tests_require,
test_suite='nose.collector',
license=open('LICENSE').read(),
classifiers=['Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
'Topic :: Communications',
'Topic :: Internet',
'Topic :: Software Development :: Libraries'],
zip_safe=True)
|
0805fd05006d3efba6b6fa52b5921ed01120988b
|
wagtail_pgsearchbackend/migrations/0002_add_gin_index.py
|
wagtail_pgsearchbackend/migrations/0002_add_gin_index.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.1 on 2017-03-22 14:53
from __future__ import unicode_literals
from django.db import migrations
from ..models import IndexEntry
table = IndexEntry._meta.db_table
class Migration(migrations.Migration):
initial = True
dependencies = [
('wagtail_pgsearchbackend', '0001_initial'),
]
operations = [
migrations.RunSQL(
'CREATE INDEX IF NOT EXISTS {0}_body_search ON {0} '
'USING GIN(body_search);'.format(table),
'DROP INDEX IF EXISTS {}_body_search;'.format(table),
),
]
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.1 on 2017-03-22 14:53
from __future__ import unicode_literals
from django.db import migrations
from ..models import IndexEntry
table = IndexEntry._meta.db_table
class Migration(migrations.Migration):
initial = True
dependencies = [
('wagtail_pgsearchbackend', '0001_initial'),
]
operations = [
migrations.RunSQL(
'CREATE INDEX {0}_body_search ON {0} '
'USING GIN(body_search);'.format(table),
'DROP INDEX IF EXISTS {}_body_search;'.format(table),
),
]
|
Fix migration for Postgres < 9.5
|
Fix migration for Postgres < 9.5
|
Python
|
mit
|
wagtail/wagtail-pg-search-backend
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.1 on 2017-03-22 14:53
from __future__ import unicode_literals
from django.db import migrations
from ..models import IndexEntry
table = IndexEntry._meta.db_table
class Migration(migrations.Migration):
initial = True
dependencies = [
('wagtail_pgsearchbackend', '0001_initial'),
]
operations = [
migrations.RunSQL(
'CREATE INDEX IF NOT EXISTS {0}_body_search ON {0} '
'USING GIN(body_search);'.format(table),
'DROP INDEX IF EXISTS {}_body_search;'.format(table),
),
]
Fix migration for Postgres < 9.5
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.1 on 2017-03-22 14:53
from __future__ import unicode_literals
from django.db import migrations
from ..models import IndexEntry
table = IndexEntry._meta.db_table
class Migration(migrations.Migration):
initial = True
dependencies = [
('wagtail_pgsearchbackend', '0001_initial'),
]
operations = [
migrations.RunSQL(
'CREATE INDEX {0}_body_search ON {0} '
'USING GIN(body_search);'.format(table),
'DROP INDEX IF EXISTS {}_body_search;'.format(table),
),
]
|
<commit_before># -*- coding: utf-8 -*-
# Generated by Django 1.10.1 on 2017-03-22 14:53
from __future__ import unicode_literals
from django.db import migrations
from ..models import IndexEntry
table = IndexEntry._meta.db_table
class Migration(migrations.Migration):
initial = True
dependencies = [
('wagtail_pgsearchbackend', '0001_initial'),
]
operations = [
migrations.RunSQL(
'CREATE INDEX IF NOT EXISTS {0}_body_search ON {0} '
'USING GIN(body_search);'.format(table),
'DROP INDEX IF EXISTS {}_body_search;'.format(table),
),
]
<commit_msg>Fix migration for Postgres < 9.5<commit_after>
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.1 on 2017-03-22 14:53
from __future__ import unicode_literals
from django.db import migrations
from ..models import IndexEntry
table = IndexEntry._meta.db_table
class Migration(migrations.Migration):
initial = True
dependencies = [
('wagtail_pgsearchbackend', '0001_initial'),
]
operations = [
migrations.RunSQL(
'CREATE INDEX {0}_body_search ON {0} '
'USING GIN(body_search);'.format(table),
'DROP INDEX IF EXISTS {}_body_search;'.format(table),
),
]
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.1 on 2017-03-22 14:53
from __future__ import unicode_literals
from django.db import migrations
from ..models import IndexEntry
table = IndexEntry._meta.db_table
class Migration(migrations.Migration):
initial = True
dependencies = [
('wagtail_pgsearchbackend', '0001_initial'),
]
operations = [
migrations.RunSQL(
'CREATE INDEX IF NOT EXISTS {0}_body_search ON {0} '
'USING GIN(body_search);'.format(table),
'DROP INDEX IF EXISTS {}_body_search;'.format(table),
),
]
Fix migration for Postgres < 9.5# -*- coding: utf-8 -*-
# Generated by Django 1.10.1 on 2017-03-22 14:53
from __future__ import unicode_literals
from django.db import migrations
from ..models import IndexEntry
table = IndexEntry._meta.db_table
class Migration(migrations.Migration):
initial = True
dependencies = [
('wagtail_pgsearchbackend', '0001_initial'),
]
operations = [
migrations.RunSQL(
'CREATE INDEX {0}_body_search ON {0} '
'USING GIN(body_search);'.format(table),
'DROP INDEX IF EXISTS {}_body_search;'.format(table),
),
]
|
<commit_before># -*- coding: utf-8 -*-
# Generated by Django 1.10.1 on 2017-03-22 14:53
from __future__ import unicode_literals
from django.db import migrations
from ..models import IndexEntry
table = IndexEntry._meta.db_table
class Migration(migrations.Migration):
initial = True
dependencies = [
('wagtail_pgsearchbackend', '0001_initial'),
]
operations = [
migrations.RunSQL(
'CREATE INDEX IF NOT EXISTS {0}_body_search ON {0} '
'USING GIN(body_search);'.format(table),
'DROP INDEX IF EXISTS {}_body_search;'.format(table),
),
]
<commit_msg>Fix migration for Postgres < 9.5<commit_after># -*- coding: utf-8 -*-
# Generated by Django 1.10.1 on 2017-03-22 14:53
from __future__ import unicode_literals
from django.db import migrations
from ..models import IndexEntry
table = IndexEntry._meta.db_table
class Migration(migrations.Migration):
initial = True
dependencies = [
('wagtail_pgsearchbackend', '0001_initial'),
]
operations = [
migrations.RunSQL(
'CREATE INDEX {0}_body_search ON {0} '
'USING GIN(body_search);'.format(table),
'DROP INDEX IF EXISTS {}_body_search;'.format(table),
),
]
|
991e0f6692a5a07eda8bd1901af6110b5a7dee0b
|
setup.py
|
setup.py
|
import setuptools
import sys
import os.path
import subprocess
# Build README.txt from README.md if not present, and if we are actually building for distribution to pypi
if not os.path.exists('README.txt') and 'sdist' in sys.argv:
subprocess.call(['pandoc', '--to=rst', '--smart', '--output=README.txt', 'README.md'])
# But use the best README around
readme = 'README.txt' if os.path.exists('README.txt') else 'README.md'
setuptools.setup(
name='jsonpath-rw',
version='0.9',
description='A robust and significantly extended implementation of JSONPath for Python, with a clear AST for metaprogramming.',
author='Kenneth Knowles',
author_email='kenn.knowles@gmail.com',
url='https://github.com/kennknowles/python-jsonpath-rw',
license='Apache 2.0',
long_description=open(readme).read(),
packages = ['jsonpath_rw'],
test_suite = 'tests',
install_requires = [ 'ply', 'decorator', 'six' ],
classifiers = [
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
],
)
|
import setuptools
import sys
import os.path
import subprocess
# Build README.txt from README.md if not present, and if we are actually building for distribution to pypi
if not os.path.exists('README.txt') and 'sdist' in sys.argv:
subprocess.call(['pandoc', '--to=rst', '--smart', '--output=README.txt', 'README.md'])
# But use the best README around
readme = 'README.txt' if os.path.exists('README.txt') else 'README.md'
setuptools.setup(
name='jsonpath-rw',
version='0.9',
description='A robust and significantly extended implementation of JSONPath for Python, with a clear AST for metaprogramming.',
author='Kenneth Knowles',
author_email='kenn.knowles@gmail.com',
url='https://github.com/kennknowles/python-jsonpath-rw',
license='Apache 2.0',
long_description=open(readme).read(),
packages = ['jsonpath_rw'],
test_suite = 'tests',
install_requires = [ 'ply', 'decorator', 'six' ],
classifiers = [
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
],
)
|
Add more specific Python version classifiers
|
Add more specific Python version classifiers
|
Python
|
apache-2.0
|
abloomston/python-jsonpath-rw,kennknowles/python-jsonpath-rw,brianthelion/python-jsonpath-rw,pkilambi/python-jsonpath-rw,wangjild/python-jsonpath-rw,sileht/python-jsonpath-rw
|
import setuptools
import sys
import os.path
import subprocess
# Build README.txt from README.md if not present, and if we are actually building for distribution to pypi
if not os.path.exists('README.txt') and 'sdist' in sys.argv:
subprocess.call(['pandoc', '--to=rst', '--smart', '--output=README.txt', 'README.md'])
# But use the best README around
readme = 'README.txt' if os.path.exists('README.txt') else 'README.md'
setuptools.setup(
name='jsonpath-rw',
version='0.9',
description='A robust and significantly extended implementation of JSONPath for Python, with a clear AST for metaprogramming.',
author='Kenneth Knowles',
author_email='kenn.knowles@gmail.com',
url='https://github.com/kennknowles/python-jsonpath-rw',
license='Apache 2.0',
long_description=open(readme).read(),
packages = ['jsonpath_rw'],
test_suite = 'tests',
install_requires = [ 'ply', 'decorator', 'six' ],
classifiers = [
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
],
)
Add more specific Python version classifiers
|
import setuptools
import sys
import os.path
import subprocess
# Build README.txt from README.md if not present, and if we are actually building for distribution to pypi
if not os.path.exists('README.txt') and 'sdist' in sys.argv:
subprocess.call(['pandoc', '--to=rst', '--smart', '--output=README.txt', 'README.md'])
# But use the best README around
readme = 'README.txt' if os.path.exists('README.txt') else 'README.md'
setuptools.setup(
name='jsonpath-rw',
version='0.9',
description='A robust and significantly extended implementation of JSONPath for Python, with a clear AST for metaprogramming.',
author='Kenneth Knowles',
author_email='kenn.knowles@gmail.com',
url='https://github.com/kennknowles/python-jsonpath-rw',
license='Apache 2.0',
long_description=open(readme).read(),
packages = ['jsonpath_rw'],
test_suite = 'tests',
install_requires = [ 'ply', 'decorator', 'six' ],
classifiers = [
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
],
)
|
<commit_before>import setuptools
import sys
import os.path
import subprocess
# Build README.txt from README.md if not present, and if we are actually building for distribution to pypi
if not os.path.exists('README.txt') and 'sdist' in sys.argv:
subprocess.call(['pandoc', '--to=rst', '--smart', '--output=README.txt', 'README.md'])
# But use the best README around
readme = 'README.txt' if os.path.exists('README.txt') else 'README.md'
setuptools.setup(
name='jsonpath-rw',
version='0.9',
description='A robust and significantly extended implementation of JSONPath for Python, with a clear AST for metaprogramming.',
author='Kenneth Knowles',
author_email='kenn.knowles@gmail.com',
url='https://github.com/kennknowles/python-jsonpath-rw',
license='Apache 2.0',
long_description=open(readme).read(),
packages = ['jsonpath_rw'],
test_suite = 'tests',
install_requires = [ 'ply', 'decorator', 'six' ],
classifiers = [
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
],
)
<commit_msg>Add more specific Python version classifiers<commit_after>
|
import setuptools
import sys
import os.path
import subprocess
# Build README.txt from README.md if not present, and if we are actually building for distribution to pypi
if not os.path.exists('README.txt') and 'sdist' in sys.argv:
subprocess.call(['pandoc', '--to=rst', '--smart', '--output=README.txt', 'README.md'])
# But use the best README around
readme = 'README.txt' if os.path.exists('README.txt') else 'README.md'
setuptools.setup(
name='jsonpath-rw',
version='0.9',
description='A robust and significantly extended implementation of JSONPath for Python, with a clear AST for metaprogramming.',
author='Kenneth Knowles',
author_email='kenn.knowles@gmail.com',
url='https://github.com/kennknowles/python-jsonpath-rw',
license='Apache 2.0',
long_description=open(readme).read(),
packages = ['jsonpath_rw'],
test_suite = 'tests',
install_requires = [ 'ply', 'decorator', 'six' ],
classifiers = [
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
],
)
|
import setuptools
import sys
import os.path
import subprocess
# Build README.txt from README.md if not present, and if we are actually building for distribution to pypi
if not os.path.exists('README.txt') and 'sdist' in sys.argv:
subprocess.call(['pandoc', '--to=rst', '--smart', '--output=README.txt', 'README.md'])
# But use the best README around
readme = 'README.txt' if os.path.exists('README.txt') else 'README.md'
setuptools.setup(
name='jsonpath-rw',
version='0.9',
description='A robust and significantly extended implementation of JSONPath for Python, with a clear AST for metaprogramming.',
author='Kenneth Knowles',
author_email='kenn.knowles@gmail.com',
url='https://github.com/kennknowles/python-jsonpath-rw',
license='Apache 2.0',
long_description=open(readme).read(),
packages = ['jsonpath_rw'],
test_suite = 'tests',
install_requires = [ 'ply', 'decorator', 'six' ],
classifiers = [
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
],
)
Add more specific Python version classifiersimport setuptools
import sys
import os.path
import subprocess
# Build README.txt from README.md if not present, and if we are actually building for distribution to pypi
if not os.path.exists('README.txt') and 'sdist' in sys.argv:
subprocess.call(['pandoc', '--to=rst', '--smart', '--output=README.txt', 'README.md'])
# But use the best README around
readme = 'README.txt' if os.path.exists('README.txt') else 'README.md'
setuptools.setup(
name='jsonpath-rw',
version='0.9',
description='A robust and significantly extended implementation of JSONPath for Python, with a clear AST for metaprogramming.',
author='Kenneth Knowles',
author_email='kenn.knowles@gmail.com',
url='https://github.com/kennknowles/python-jsonpath-rw',
license='Apache 2.0',
long_description=open(readme).read(),
packages = ['jsonpath_rw'],
test_suite = 'tests',
install_requires = [ 'ply', 'decorator', 'six' ],
classifiers = [
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
],
)
|
<commit_before>import setuptools
import sys
import os.path
import subprocess
# Build README.txt from README.md if not present, and if we are actually building for distribution to pypi
if not os.path.exists('README.txt') and 'sdist' in sys.argv:
subprocess.call(['pandoc', '--to=rst', '--smart', '--output=README.txt', 'README.md'])
# But use the best README around
readme = 'README.txt' if os.path.exists('README.txt') else 'README.md'
setuptools.setup(
name='jsonpath-rw',
version='0.9',
description='A robust and significantly extended implementation of JSONPath for Python, with a clear AST for metaprogramming.',
author='Kenneth Knowles',
author_email='kenn.knowles@gmail.com',
url='https://github.com/kennknowles/python-jsonpath-rw',
license='Apache 2.0',
long_description=open(readme).read(),
packages = ['jsonpath_rw'],
test_suite = 'tests',
install_requires = [ 'ply', 'decorator', 'six' ],
classifiers = [
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
],
)
<commit_msg>Add more specific Python version classifiers<commit_after>import setuptools
import sys
import os.path
import subprocess
# Build README.txt from README.md if not present, and if we are actually building for distribution to pypi
if not os.path.exists('README.txt') and 'sdist' in sys.argv:
subprocess.call(['pandoc', '--to=rst', '--smart', '--output=README.txt', 'README.md'])
# But use the best README around
readme = 'README.txt' if os.path.exists('README.txt') else 'README.md'
setuptools.setup(
name='jsonpath-rw',
version='0.9',
description='A robust and significantly extended implementation of JSONPath for Python, with a clear AST for metaprogramming.',
author='Kenneth Knowles',
author_email='kenn.knowles@gmail.com',
url='https://github.com/kennknowles/python-jsonpath-rw',
license='Apache 2.0',
long_description=open(readme).read(),
packages = ['jsonpath_rw'],
test_suite = 'tests',
install_requires = [ 'ply', 'decorator', 'six' ],
classifiers = [
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
],
)
|
264e522539f7df0743c1724ad69a83495055d78b
|
setup.py
|
setup.py
|
#!/usr/bin/env python
from setuptools import setup, find_packages
import os
from azure_storage import __version__
PACKAGE_DIR = os.path.abspath(os.path.dirname(__file__))
os.chdir(PACKAGE_DIR)
setup(
name='django-azure-storage',
version=__version__,
url="https://github.com/Rediker-Software/django-azure-storage",
author="Kevin Brown",
author_email="kbrown@rediker.com",
description="Django storage backends for Windows Azure blob storage.",
license="MIT",
packages=find_packages(exclude=["tests*", ]),
include_package_data=True,
install_requires=[
'Django>=1.3',
'azure>=1.0.0,<1.1.0',
],
# See http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
]
)
|
#!/usr/bin/env python
from setuptools import setup, find_packages
import os
from azure_storage import __version__
PACKAGE_DIR = os.path.abspath(os.path.dirname(__file__))
os.chdir(PACKAGE_DIR)
setup(
name='django-azure-storage',
version=__version__,
url="https://github.com/Rediker-Software/django-azure-storage",
author="Kevin Brown",
author_email="kbrown@rediker.com",
description="Django storage backends for Windows Azure blob storage.",
license="MIT",
packages=find_packages(exclude=["tests*", ]),
include_package_data=True,
install_requires=[
'Django>=1.3',
'azure-storage>=0.20.0,<0.30.0',
],
# See http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
]
)
|
Switch to only requiring azure-storage
|
Switch to only requiring azure-storage
This switches the requirement to match azure@1.0.3. We no longer
require all of the Azure packages in order to use the storage
adapter. This should not be a breaking change, it only reduces the
scope of the requirements to be as minimal as possible for this
package. There are no functionality changes.
|
Python
|
mit
|
Rediker-Software/django-azure-storage
|
#!/usr/bin/env python
from setuptools import setup, find_packages
import os
from azure_storage import __version__
PACKAGE_DIR = os.path.abspath(os.path.dirname(__file__))
os.chdir(PACKAGE_DIR)
setup(
name='django-azure-storage',
version=__version__,
url="https://github.com/Rediker-Software/django-azure-storage",
author="Kevin Brown",
author_email="kbrown@rediker.com",
description="Django storage backends for Windows Azure blob storage.",
license="MIT",
packages=find_packages(exclude=["tests*", ]),
include_package_data=True,
install_requires=[
'Django>=1.3',
'azure>=1.0.0,<1.1.0',
],
# See http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
]
)
Switch to only requiring azure-storage
This switches the requirement to match azure@1.0.3. We no longer
require all of the Azure packages in order to use the storage
adapter. This should not be a breaking change, it only reduces the
scope of the requirements to be as minimal as possible for this
package. There are no functionality changes.
|
#!/usr/bin/env python
from setuptools import setup, find_packages
import os
from azure_storage import __version__
PACKAGE_DIR = os.path.abspath(os.path.dirname(__file__))
os.chdir(PACKAGE_DIR)
setup(
name='django-azure-storage',
version=__version__,
url="https://github.com/Rediker-Software/django-azure-storage",
author="Kevin Brown",
author_email="kbrown@rediker.com",
description="Django storage backends for Windows Azure blob storage.",
license="MIT",
packages=find_packages(exclude=["tests*", ]),
include_package_data=True,
install_requires=[
'Django>=1.3',
'azure-storage>=0.20.0,<0.30.0',
],
# See http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
]
)
|
<commit_before>#!/usr/bin/env python
from setuptools import setup, find_packages
import os
from azure_storage import __version__
PACKAGE_DIR = os.path.abspath(os.path.dirname(__file__))
os.chdir(PACKAGE_DIR)
setup(
name='django-azure-storage',
version=__version__,
url="https://github.com/Rediker-Software/django-azure-storage",
author="Kevin Brown",
author_email="kbrown@rediker.com",
description="Django storage backends for Windows Azure blob storage.",
license="MIT",
packages=find_packages(exclude=["tests*", ]),
include_package_data=True,
install_requires=[
'Django>=1.3',
'azure>=1.0.0,<1.1.0',
],
# See http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
]
)
<commit_msg>Switch to only requiring azure-storage
This switches the requirement to match azure@1.0.3. We no longer
require all of the Azure packages in order to use the storage
adapter. This should not be a breaking change, it only reduces the
scope of the requirements to be as minimal as possible for this
package. There are no functionality changes.<commit_after>
|
#!/usr/bin/env python
from setuptools import setup, find_packages
import os
from azure_storage import __version__
PACKAGE_DIR = os.path.abspath(os.path.dirname(__file__))
os.chdir(PACKAGE_DIR)
setup(
name='django-azure-storage',
version=__version__,
url="https://github.com/Rediker-Software/django-azure-storage",
author="Kevin Brown",
author_email="kbrown@rediker.com",
description="Django storage backends for Windows Azure blob storage.",
license="MIT",
packages=find_packages(exclude=["tests*", ]),
include_package_data=True,
install_requires=[
'Django>=1.3',
'azure-storage>=0.20.0,<0.30.0',
],
# See http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
]
)
|
#!/usr/bin/env python
from setuptools import setup, find_packages
import os
from azure_storage import __version__
PACKAGE_DIR = os.path.abspath(os.path.dirname(__file__))
os.chdir(PACKAGE_DIR)
setup(
name='django-azure-storage',
version=__version__,
url="https://github.com/Rediker-Software/django-azure-storage",
author="Kevin Brown",
author_email="kbrown@rediker.com",
description="Django storage backends for Windows Azure blob storage.",
license="MIT",
packages=find_packages(exclude=["tests*", ]),
include_package_data=True,
install_requires=[
'Django>=1.3',
'azure>=1.0.0,<1.1.0',
],
# See http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
]
)
Switch to only requiring azure-storage
This switches the requirement to match azure@1.0.3. We no longer
require all of the Azure packages in order to use the storage
adapter. This should not be a breaking change, it only reduces the
scope of the requirements to be as minimal as possible for this
package. There are no functionality changes.#!/usr/bin/env python
from setuptools import setup, find_packages
import os
from azure_storage import __version__
PACKAGE_DIR = os.path.abspath(os.path.dirname(__file__))
os.chdir(PACKAGE_DIR)
setup(
name='django-azure-storage',
version=__version__,
url="https://github.com/Rediker-Software/django-azure-storage",
author="Kevin Brown",
author_email="kbrown@rediker.com",
description="Django storage backends for Windows Azure blob storage.",
license="MIT",
packages=find_packages(exclude=["tests*", ]),
include_package_data=True,
install_requires=[
'Django>=1.3',
'azure-storage>=0.20.0,<0.30.0',
],
# See http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
]
)
|
<commit_before>#!/usr/bin/env python
from setuptools import setup, find_packages
import os
from azure_storage import __version__
PACKAGE_DIR = os.path.abspath(os.path.dirname(__file__))
os.chdir(PACKAGE_DIR)
setup(
name='django-azure-storage',
version=__version__,
url="https://github.com/Rediker-Software/django-azure-storage",
author="Kevin Brown",
author_email="kbrown@rediker.com",
description="Django storage backends for Windows Azure blob storage.",
license="MIT",
packages=find_packages(exclude=["tests*", ]),
include_package_data=True,
install_requires=[
'Django>=1.3',
'azure>=1.0.0,<1.1.0',
],
# See http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
]
)
<commit_msg>Switch to only requiring azure-storage
This switches the requirement to match azure@1.0.3. We no longer
require all of the Azure packages in order to use the storage
adapter. This should not be a breaking change, it only reduces the
scope of the requirements to be as minimal as possible for this
package. There are no functionality changes.<commit_after>#!/usr/bin/env python
from setuptools import setup, find_packages
import os
from azure_storage import __version__
PACKAGE_DIR = os.path.abspath(os.path.dirname(__file__))
os.chdir(PACKAGE_DIR)
setup(
name='django-azure-storage',
version=__version__,
url="https://github.com/Rediker-Software/django-azure-storage",
author="Kevin Brown",
author_email="kbrown@rediker.com",
description="Django storage backends for Windows Azure blob storage.",
license="MIT",
packages=find_packages(exclude=["tests*", ]),
include_package_data=True,
install_requires=[
'Django>=1.3',
'azure-storage>=0.20.0,<0.30.0',
],
# See http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
]
)
|
ce53b83dba2bdbbd04497d4edda402ccc79bed76
|
setup.py
|
setup.py
|
from setuptools import setup
setup(
name = "simplecpreprocessor",
version = "0.0.4",
author = "Seppo Yli-Olli",
author_email = "seppo.yli-olli@iki.fi",
description = "Simple C preprocessor for usage eg before CFFI",
keywords = "python c preprocessor",
license = "BSD",
url = "https://github.com/nanonyme/simplecpreprocessor",
py_modules=["simplepreprocessor"],
classifiers=[
"Development Status :: 4 - Beta",
"Topic :: Utilities",
"License :: OSI Approved :: BSD License",
],
)
|
from setuptools import setup
setup(
name = "simplecpreprocessor",
version = "0.0.5",
author = "Seppo Yli-Olli",
author_email = "seppo.yli-olli@iki.fi",
description = "Simple C preprocessor for usage eg before CFFI",
keywords = "python c preprocessor",
license = "BSD",
url = "https://github.com/nanonyme/simplecpreprocessor",
py_modules=["simplepreprocessor"],
classifiers=[
"Development Status :: 4 - Beta",
"Topic :: Utilities",
"License :: OSI Approved :: BSD License",
],
)
|
Bump version to fix URL
|
Bump version to fix URL
|
Python
|
mit
|
nanonyme/simplecpreprocessor
|
from setuptools import setup
setup(
name = "simplecpreprocessor",
version = "0.0.4",
author = "Seppo Yli-Olli",
author_email = "seppo.yli-olli@iki.fi",
description = "Simple C preprocessor for usage eg before CFFI",
keywords = "python c preprocessor",
license = "BSD",
url = "https://github.com/nanonyme/simplecpreprocessor",
py_modules=["simplepreprocessor"],
classifiers=[
"Development Status :: 4 - Beta",
"Topic :: Utilities",
"License :: OSI Approved :: BSD License",
],
)
Bump version to fix URL
|
from setuptools import setup
setup(
name = "simplecpreprocessor",
version = "0.0.5",
author = "Seppo Yli-Olli",
author_email = "seppo.yli-olli@iki.fi",
description = "Simple C preprocessor for usage eg before CFFI",
keywords = "python c preprocessor",
license = "BSD",
url = "https://github.com/nanonyme/simplecpreprocessor",
py_modules=["simplepreprocessor"],
classifiers=[
"Development Status :: 4 - Beta",
"Topic :: Utilities",
"License :: OSI Approved :: BSD License",
],
)
|
<commit_before>from setuptools import setup
setup(
name = "simplecpreprocessor",
version = "0.0.4",
author = "Seppo Yli-Olli",
author_email = "seppo.yli-olli@iki.fi",
description = "Simple C preprocessor for usage eg before CFFI",
keywords = "python c preprocessor",
license = "BSD",
url = "https://github.com/nanonyme/simplecpreprocessor",
py_modules=["simplepreprocessor"],
classifiers=[
"Development Status :: 4 - Beta",
"Topic :: Utilities",
"License :: OSI Approved :: BSD License",
],
)
<commit_msg>Bump version to fix URL<commit_after>
|
from setuptools import setup
setup(
name = "simplecpreprocessor",
version = "0.0.5",
author = "Seppo Yli-Olli",
author_email = "seppo.yli-olli@iki.fi",
description = "Simple C preprocessor for usage eg before CFFI",
keywords = "python c preprocessor",
license = "BSD",
url = "https://github.com/nanonyme/simplecpreprocessor",
py_modules=["simplepreprocessor"],
classifiers=[
"Development Status :: 4 - Beta",
"Topic :: Utilities",
"License :: OSI Approved :: BSD License",
],
)
|
from setuptools import setup
setup(
name = "simplecpreprocessor",
version = "0.0.4",
author = "Seppo Yli-Olli",
author_email = "seppo.yli-olli@iki.fi",
description = "Simple C preprocessor for usage eg before CFFI",
keywords = "python c preprocessor",
license = "BSD",
url = "https://github.com/nanonyme/simplecpreprocessor",
py_modules=["simplepreprocessor"],
classifiers=[
"Development Status :: 4 - Beta",
"Topic :: Utilities",
"License :: OSI Approved :: BSD License",
],
)
Bump version to fix URLfrom setuptools import setup
setup(
name = "simplecpreprocessor",
version = "0.0.5",
author = "Seppo Yli-Olli",
author_email = "seppo.yli-olli@iki.fi",
description = "Simple C preprocessor for usage eg before CFFI",
keywords = "python c preprocessor",
license = "BSD",
url = "https://github.com/nanonyme/simplecpreprocessor",
py_modules=["simplepreprocessor"],
classifiers=[
"Development Status :: 4 - Beta",
"Topic :: Utilities",
"License :: OSI Approved :: BSD License",
],
)
|
<commit_before>from setuptools import setup
setup(
name = "simplecpreprocessor",
version = "0.0.4",
author = "Seppo Yli-Olli",
author_email = "seppo.yli-olli@iki.fi",
description = "Simple C preprocessor for usage eg before CFFI",
keywords = "python c preprocessor",
license = "BSD",
url = "https://github.com/nanonyme/simplecpreprocessor",
py_modules=["simplepreprocessor"],
classifiers=[
"Development Status :: 4 - Beta",
"Topic :: Utilities",
"License :: OSI Approved :: BSD License",
],
)
<commit_msg>Bump version to fix URL<commit_after>from setuptools import setup
setup(
name = "simplecpreprocessor",
version = "0.0.5",
author = "Seppo Yli-Olli",
author_email = "seppo.yli-olli@iki.fi",
description = "Simple C preprocessor for usage eg before CFFI",
keywords = "python c preprocessor",
license = "BSD",
url = "https://github.com/nanonyme/simplecpreprocessor",
py_modules=["simplepreprocessor"],
classifiers=[
"Development Status :: 4 - Beta",
"Topic :: Utilities",
"License :: OSI Approved :: BSD License",
],
)
|
63b484e164a24b2f14475ed9a4efbcb025707e03
|
setup.py
|
setup.py
|
from django_rocket import __version__, __author__, __email__, __license__
from setuptools import setup, find_packages
README = open('README.rst').read()
# Second paragraph has the short description
description = README.split('\n')[1]
setup(
name='django-rocket',
version=__version__,
description=description,
long_description=README,
author=__author__,
author_email=__email__,
license=__license__,
url='https://github.com/mariocesar/django-rocket',
download_url='https://pypi.python.org/pypi/django-rocket',
packages=find_packages(exclude=['tests', 'tests.*']),
install_requires=[
'django>1.5,<1.7',
'wheel',
],
extras_require={
'Docs': ["sphinx", "sphinx_rtd_theme"],
'develop': ["coverage"],
},
zip_safe=False,
classifiers=[
'Operating System :: OS Independent',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Framework :: Django'
]
)
|
from django_rocket import __version__, __author__, __email__, __license__
from setuptools import setup, find_packages
README = open('README.rst').read()
# Second paragraph has the short description
description = README.split('\n')[1]
setup(
name='django-rocket',
version=__version__,
description=description,
long_description=README,
author=__author__,
author_email=__email__,
license=__license__,
url='https://github.com/mariocesar/django-rocket',
download_url='https://pypi.python.org/pypi/django-rocket',
packages=find_packages(exclude=['tests', 'tests.*', 'example', 'docs', 'env']),
install_requires=[
'django>1.5,<1.7',
'wheel',
],
extras_require={
'Docs': ["sphinx", "sphinx_rtd_theme"],
'develop': ["coverage"],
},
zip_safe=False,
classifiers=[
'Operating System :: OS Independent',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Framework :: Django'
]
)
|
Exclude common unwanted package patterns
|
Exclude common unwanted package patterns
|
Python
|
mit
|
mariocesar/django-rocket,mariocesar/django-rocket
|
from django_rocket import __version__, __author__, __email__, __license__
from setuptools import setup, find_packages
README = open('README.rst').read()
# Second paragraph has the short description
description = README.split('\n')[1]
setup(
name='django-rocket',
version=__version__,
description=description,
long_description=README,
author=__author__,
author_email=__email__,
license=__license__,
url='https://github.com/mariocesar/django-rocket',
download_url='https://pypi.python.org/pypi/django-rocket',
packages=find_packages(exclude=['tests', 'tests.*']),
install_requires=[
'django>1.5,<1.7',
'wheel',
],
extras_require={
'Docs': ["sphinx", "sphinx_rtd_theme"],
'develop': ["coverage"],
},
zip_safe=False,
classifiers=[
'Operating System :: OS Independent',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Framework :: Django'
]
)
Exclude common unwanted package patterns
|
from django_rocket import __version__, __author__, __email__, __license__
from setuptools import setup, find_packages
README = open('README.rst').read()
# Second paragraph has the short description
description = README.split('\n')[1]
setup(
name='django-rocket',
version=__version__,
description=description,
long_description=README,
author=__author__,
author_email=__email__,
license=__license__,
url='https://github.com/mariocesar/django-rocket',
download_url='https://pypi.python.org/pypi/django-rocket',
packages=find_packages(exclude=['tests', 'tests.*', 'example', 'docs', 'env']),
install_requires=[
'django>1.5,<1.7',
'wheel',
],
extras_require={
'Docs': ["sphinx", "sphinx_rtd_theme"],
'develop': ["coverage"],
},
zip_safe=False,
classifiers=[
'Operating System :: OS Independent',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Framework :: Django'
]
)
|
<commit_before>from django_rocket import __version__, __author__, __email__, __license__
from setuptools import setup, find_packages
README = open('README.rst').read()
# Second paragraph has the short description
description = README.split('\n')[1]
setup(
name='django-rocket',
version=__version__,
description=description,
long_description=README,
author=__author__,
author_email=__email__,
license=__license__,
url='https://github.com/mariocesar/django-rocket',
download_url='https://pypi.python.org/pypi/django-rocket',
packages=find_packages(exclude=['tests', 'tests.*']),
install_requires=[
'django>1.5,<1.7',
'wheel',
],
extras_require={
'Docs': ["sphinx", "sphinx_rtd_theme"],
'develop': ["coverage"],
},
zip_safe=False,
classifiers=[
'Operating System :: OS Independent',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Framework :: Django'
]
)
<commit_msg>Exclude common unwanted package patterns<commit_after>
|
from django_rocket import __version__, __author__, __email__, __license__
from setuptools import setup, find_packages
README = open('README.rst').read()
# Second paragraph has the short description
description = README.split('\n')[1]
setup(
name='django-rocket',
version=__version__,
description=description,
long_description=README,
author=__author__,
author_email=__email__,
license=__license__,
url='https://github.com/mariocesar/django-rocket',
download_url='https://pypi.python.org/pypi/django-rocket',
packages=find_packages(exclude=['tests', 'tests.*', 'example', 'docs', 'env']),
install_requires=[
'django>1.5,<1.7',
'wheel',
],
extras_require={
'Docs': ["sphinx", "sphinx_rtd_theme"],
'develop': ["coverage"],
},
zip_safe=False,
classifiers=[
'Operating System :: OS Independent',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Framework :: Django'
]
)
|
from django_rocket import __version__, __author__, __email__, __license__
from setuptools import setup, find_packages
README = open('README.rst').read()
# Second paragraph has the short description
description = README.split('\n')[1]
setup(
name='django-rocket',
version=__version__,
description=description,
long_description=README,
author=__author__,
author_email=__email__,
license=__license__,
url='https://github.com/mariocesar/django-rocket',
download_url='https://pypi.python.org/pypi/django-rocket',
packages=find_packages(exclude=['tests', 'tests.*']),
install_requires=[
'django>1.5,<1.7',
'wheel',
],
extras_require={
'Docs': ["sphinx", "sphinx_rtd_theme"],
'develop': ["coverage"],
},
zip_safe=False,
classifiers=[
'Operating System :: OS Independent',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Framework :: Django'
]
)
Exclude common unwanted package patternsfrom django_rocket import __version__, __author__, __email__, __license__
from setuptools import setup, find_packages
README = open('README.rst').read()
# Second paragraph has the short description
description = README.split('\n')[1]
setup(
name='django-rocket',
version=__version__,
description=description,
long_description=README,
author=__author__,
author_email=__email__,
license=__license__,
url='https://github.com/mariocesar/django-rocket',
download_url='https://pypi.python.org/pypi/django-rocket',
packages=find_packages(exclude=['tests', 'tests.*', 'example', 'docs', 'env']),
install_requires=[
'django>1.5,<1.7',
'wheel',
],
extras_require={
'Docs': ["sphinx", "sphinx_rtd_theme"],
'develop': ["coverage"],
},
zip_safe=False,
classifiers=[
'Operating System :: OS Independent',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Framework :: Django'
]
)
|
<commit_before>from django_rocket import __version__, __author__, __email__, __license__
from setuptools import setup, find_packages
README = open('README.rst').read()
# Second paragraph has the short description
description = README.split('\n')[1]
setup(
name='django-rocket',
version=__version__,
description=description,
long_description=README,
author=__author__,
author_email=__email__,
license=__license__,
url='https://github.com/mariocesar/django-rocket',
download_url='https://pypi.python.org/pypi/django-rocket',
packages=find_packages(exclude=['tests', 'tests.*']),
install_requires=[
'django>1.5,<1.7',
'wheel',
],
extras_require={
'Docs': ["sphinx", "sphinx_rtd_theme"],
'develop': ["coverage"],
},
zip_safe=False,
classifiers=[
'Operating System :: OS Independent',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Framework :: Django'
]
)
<commit_msg>Exclude common unwanted package patterns<commit_after>from django_rocket import __version__, __author__, __email__, __license__
from setuptools import setup, find_packages
README = open('README.rst').read()
# Second paragraph has the short description
description = README.split('\n')[1]
setup(
name='django-rocket',
version=__version__,
description=description,
long_description=README,
author=__author__,
author_email=__email__,
license=__license__,
url='https://github.com/mariocesar/django-rocket',
download_url='https://pypi.python.org/pypi/django-rocket',
packages=find_packages(exclude=['tests', 'tests.*', 'example', 'docs', 'env']),
install_requires=[
'django>1.5,<1.7',
'wheel',
],
extras_require={
'Docs': ["sphinx", "sphinx_rtd_theme"],
'develop': ["coverage"],
},
zip_safe=False,
classifiers=[
'Operating System :: OS Independent',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Framework :: Django'
]
)
|
96a43f4ef5dae1cfdd8fd356bc3d22e98971dd00
|
setup.py
|
setup.py
|
import sys
from setuptools import setup, find_packages
import populous
requirements = [
"click",
"cached-property",
"fake-factory",
"dateutils",
"PyYAML",
"peloton_bloomfilters"
]
if sys.version_info < (3, 2):
requirements.append('functools32')
setup(
name="populous",
version=populous.__version__,
url=populous.__url__,
description=populous.__doc__,
author=populous.__author__,
license=populous.__license__,
long_description="TODO",
packages=find_packages(),
install_requires=requirements,
extras_require={
'tests': ['tox', 'pytest', 'pytest-mock', 'flake8'],
},
entry_points={
'console_scripts': [
'populous = populous.__main__:cli'
]
},
classifiers=[
"Development Status :: 3 - Alpha",
"License :: OSI Approved :: MIT License",
"Operating System :: POSIX",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Topic :: Utilities",
],
keywords='populous populate database',
)
|
import sys
from setuptools import setup, find_packages
import populous
requirements = [
"click",
"cached-property",
"Faker",
"dateutils",
"PyYAML",
"peloton_bloomfilters"
]
if sys.version_info < (3, 2):
requirements.append('functools32')
setup(
name="populous",
version=populous.__version__,
url=populous.__url__,
description=populous.__doc__,
author=populous.__author__,
license=populous.__license__,
long_description="TODO",
packages=find_packages(),
install_requires=requirements,
extras_require={
'tests': ['tox', 'pytest', 'pytest-mock', 'flake8'],
},
entry_points={
'console_scripts': [
'populous = populous.__main__:cli'
]
},
classifiers=[
"Development Status :: 3 - Alpha",
"License :: OSI Approved :: MIT License",
"Operating System :: POSIX",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Topic :: Utilities",
],
keywords='populous populate database',
)
|
Use new 'Faker' package for fake-factory
|
Use new 'Faker' package for fake-factory
|
Python
|
mit
|
novafloss/populous
|
import sys
from setuptools import setup, find_packages
import populous
requirements = [
"click",
"cached-property",
"fake-factory",
"dateutils",
"PyYAML",
"peloton_bloomfilters"
]
if sys.version_info < (3, 2):
requirements.append('functools32')
setup(
name="populous",
version=populous.__version__,
url=populous.__url__,
description=populous.__doc__,
author=populous.__author__,
license=populous.__license__,
long_description="TODO",
packages=find_packages(),
install_requires=requirements,
extras_require={
'tests': ['tox', 'pytest', 'pytest-mock', 'flake8'],
},
entry_points={
'console_scripts': [
'populous = populous.__main__:cli'
]
},
classifiers=[
"Development Status :: 3 - Alpha",
"License :: OSI Approved :: MIT License",
"Operating System :: POSIX",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Topic :: Utilities",
],
keywords='populous populate database',
)
Use new 'Faker' package for fake-factory
|
import sys
from setuptools import setup, find_packages
import populous
requirements = [
"click",
"cached-property",
"Faker",
"dateutils",
"PyYAML",
"peloton_bloomfilters"
]
if sys.version_info < (3, 2):
requirements.append('functools32')
setup(
name="populous",
version=populous.__version__,
url=populous.__url__,
description=populous.__doc__,
author=populous.__author__,
license=populous.__license__,
long_description="TODO",
packages=find_packages(),
install_requires=requirements,
extras_require={
'tests': ['tox', 'pytest', 'pytest-mock', 'flake8'],
},
entry_points={
'console_scripts': [
'populous = populous.__main__:cli'
]
},
classifiers=[
"Development Status :: 3 - Alpha",
"License :: OSI Approved :: MIT License",
"Operating System :: POSIX",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Topic :: Utilities",
],
keywords='populous populate database',
)
|
<commit_before>import sys
from setuptools import setup, find_packages
import populous
requirements = [
"click",
"cached-property",
"fake-factory",
"dateutils",
"PyYAML",
"peloton_bloomfilters"
]
if sys.version_info < (3, 2):
requirements.append('functools32')
setup(
name="populous",
version=populous.__version__,
url=populous.__url__,
description=populous.__doc__,
author=populous.__author__,
license=populous.__license__,
long_description="TODO",
packages=find_packages(),
install_requires=requirements,
extras_require={
'tests': ['tox', 'pytest', 'pytest-mock', 'flake8'],
},
entry_points={
'console_scripts': [
'populous = populous.__main__:cli'
]
},
classifiers=[
"Development Status :: 3 - Alpha",
"License :: OSI Approved :: MIT License",
"Operating System :: POSIX",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Topic :: Utilities",
],
keywords='populous populate database',
)
<commit_msg>Use new 'Faker' package for fake-factory<commit_after>
|
import sys
from setuptools import setup, find_packages
import populous
requirements = [
"click",
"cached-property",
"Faker",
"dateutils",
"PyYAML",
"peloton_bloomfilters"
]
if sys.version_info < (3, 2):
requirements.append('functools32')
setup(
name="populous",
version=populous.__version__,
url=populous.__url__,
description=populous.__doc__,
author=populous.__author__,
license=populous.__license__,
long_description="TODO",
packages=find_packages(),
install_requires=requirements,
extras_require={
'tests': ['tox', 'pytest', 'pytest-mock', 'flake8'],
},
entry_points={
'console_scripts': [
'populous = populous.__main__:cli'
]
},
classifiers=[
"Development Status :: 3 - Alpha",
"License :: OSI Approved :: MIT License",
"Operating System :: POSIX",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Topic :: Utilities",
],
keywords='populous populate database',
)
|
import sys
from setuptools import setup, find_packages
import populous
requirements = [
"click",
"cached-property",
"fake-factory",
"dateutils",
"PyYAML",
"peloton_bloomfilters"
]
if sys.version_info < (3, 2):
requirements.append('functools32')
setup(
name="populous",
version=populous.__version__,
url=populous.__url__,
description=populous.__doc__,
author=populous.__author__,
license=populous.__license__,
long_description="TODO",
packages=find_packages(),
install_requires=requirements,
extras_require={
'tests': ['tox', 'pytest', 'pytest-mock', 'flake8'],
},
entry_points={
'console_scripts': [
'populous = populous.__main__:cli'
]
},
classifiers=[
"Development Status :: 3 - Alpha",
"License :: OSI Approved :: MIT License",
"Operating System :: POSIX",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Topic :: Utilities",
],
keywords='populous populate database',
)
Use new 'Faker' package for fake-factoryimport sys
from setuptools import setup, find_packages
import populous
requirements = [
"click",
"cached-property",
"Faker",
"dateutils",
"PyYAML",
"peloton_bloomfilters"
]
if sys.version_info < (3, 2):
requirements.append('functools32')
setup(
name="populous",
version=populous.__version__,
url=populous.__url__,
description=populous.__doc__,
author=populous.__author__,
license=populous.__license__,
long_description="TODO",
packages=find_packages(),
install_requires=requirements,
extras_require={
'tests': ['tox', 'pytest', 'pytest-mock', 'flake8'],
},
entry_points={
'console_scripts': [
'populous = populous.__main__:cli'
]
},
classifiers=[
"Development Status :: 3 - Alpha",
"License :: OSI Approved :: MIT License",
"Operating System :: POSIX",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Topic :: Utilities",
],
keywords='populous populate database',
)
|
<commit_before>import sys
from setuptools import setup, find_packages
import populous
requirements = [
"click",
"cached-property",
"fake-factory",
"dateutils",
"PyYAML",
"peloton_bloomfilters"
]
if sys.version_info < (3, 2):
requirements.append('functools32')
setup(
name="populous",
version=populous.__version__,
url=populous.__url__,
description=populous.__doc__,
author=populous.__author__,
license=populous.__license__,
long_description="TODO",
packages=find_packages(),
install_requires=requirements,
extras_require={
'tests': ['tox', 'pytest', 'pytest-mock', 'flake8'],
},
entry_points={
'console_scripts': [
'populous = populous.__main__:cli'
]
},
classifiers=[
"Development Status :: 3 - Alpha",
"License :: OSI Approved :: MIT License",
"Operating System :: POSIX",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Topic :: Utilities",
],
keywords='populous populate database',
)
<commit_msg>Use new 'Faker' package for fake-factory<commit_after>import sys
from setuptools import setup, find_packages
import populous
requirements = [
"click",
"cached-property",
"Faker",
"dateutils",
"PyYAML",
"peloton_bloomfilters"
]
if sys.version_info < (3, 2):
requirements.append('functools32')
setup(
name="populous",
version=populous.__version__,
url=populous.__url__,
description=populous.__doc__,
author=populous.__author__,
license=populous.__license__,
long_description="TODO",
packages=find_packages(),
install_requires=requirements,
extras_require={
'tests': ['tox', 'pytest', 'pytest-mock', 'flake8'],
},
entry_points={
'console_scripts': [
'populous = populous.__main__:cli'
]
},
classifiers=[
"Development Status :: 3 - Alpha",
"License :: OSI Approved :: MIT License",
"Operating System :: POSIX",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Topic :: Utilities",
],
keywords='populous populate database',
)
|
974160117e2f36b12b52df13d4a35726a4ff0907
|
boxsdk/object/api_json_object.py
|
boxsdk/object/api_json_object.py
|
# coding: utf-8
from __future__ import unicode_literals, absolute_import
from collections import Mapping
from abc import ABCMeta
from .base_api_json_object import BaseAPIJSONObject, BaseAPIJSONObjectMeta
from ..util.compat import with_metaclass
class APIJSONObjectMeta(BaseAPIJSONObjectMeta, ABCMeta):
"""
Avoid conflicting metaclass definitions for APIJSONObject.
http://code.activestate.com/recipes/204197-solving-the-metaclass-conflict/
"""
pass
class APIJSONObject(with_metaclass(APIJSONObjectMeta, BaseAPIJSONObject, Mapping)):
"""Class representing objects that are not part of the REST API."""
def __len__(self):
return len(self._response_object)
def __iter__(self):
return iter(self._response_object)
|
# coding: utf-8
from __future__ import unicode_literals, absolute_import
from collections import Mapping
from abc import ABCMeta
from .base_api_json_object import BaseAPIJSONObject, BaseAPIJSONObjectMeta
from ..util.compat import with_metaclass
class APIJSONObjectMeta(BaseAPIJSONObjectMeta, ABCMeta):
"""
Avoid conflicting metaclass definitions for APIJSONObject.
http://code.activestate.com/recipes/204197-solving-the-metaclass-conflict/
"""
pass
class APIJSONObject(with_metaclass(APIJSONObjectMeta, BaseAPIJSONObject, Mapping)):
"""Class representing objects that are not part of the REST API."""
def __len__(self):
return len(self._response_object)
|
Remove redundant __iter__ from APIJsonObject base class
|
Remove redundant __iter__ from APIJsonObject base class
|
Python
|
apache-2.0
|
box/box-python-sdk
|
# coding: utf-8
from __future__ import unicode_literals, absolute_import
from collections import Mapping
from abc import ABCMeta
from .base_api_json_object import BaseAPIJSONObject, BaseAPIJSONObjectMeta
from ..util.compat import with_metaclass
class APIJSONObjectMeta(BaseAPIJSONObjectMeta, ABCMeta):
"""
Avoid conflicting metaclass definitions for APIJSONObject.
http://code.activestate.com/recipes/204197-solving-the-metaclass-conflict/
"""
pass
class APIJSONObject(with_metaclass(APIJSONObjectMeta, BaseAPIJSONObject, Mapping)):
"""Class representing objects that are not part of the REST API."""
def __len__(self):
return len(self._response_object)
def __iter__(self):
return iter(self._response_object)
Remove redundant __iter__ from APIJsonObject base class
|
# coding: utf-8
from __future__ import unicode_literals, absolute_import
from collections import Mapping
from abc import ABCMeta
from .base_api_json_object import BaseAPIJSONObject, BaseAPIJSONObjectMeta
from ..util.compat import with_metaclass
class APIJSONObjectMeta(BaseAPIJSONObjectMeta, ABCMeta):
"""
Avoid conflicting metaclass definitions for APIJSONObject.
http://code.activestate.com/recipes/204197-solving-the-metaclass-conflict/
"""
pass
class APIJSONObject(with_metaclass(APIJSONObjectMeta, BaseAPIJSONObject, Mapping)):
"""Class representing objects that are not part of the REST API."""
def __len__(self):
return len(self._response_object)
|
<commit_before># coding: utf-8
from __future__ import unicode_literals, absolute_import
from collections import Mapping
from abc import ABCMeta
from .base_api_json_object import BaseAPIJSONObject, BaseAPIJSONObjectMeta
from ..util.compat import with_metaclass
class APIJSONObjectMeta(BaseAPIJSONObjectMeta, ABCMeta):
"""
Avoid conflicting metaclass definitions for APIJSONObject.
http://code.activestate.com/recipes/204197-solving-the-metaclass-conflict/
"""
pass
class APIJSONObject(with_metaclass(APIJSONObjectMeta, BaseAPIJSONObject, Mapping)):
"""Class representing objects that are not part of the REST API."""
def __len__(self):
return len(self._response_object)
def __iter__(self):
return iter(self._response_object)
<commit_msg>Remove redundant __iter__ from APIJsonObject base class<commit_after>
|
# coding: utf-8
from __future__ import unicode_literals, absolute_import
from collections import Mapping
from abc import ABCMeta
from .base_api_json_object import BaseAPIJSONObject, BaseAPIJSONObjectMeta
from ..util.compat import with_metaclass
class APIJSONObjectMeta(BaseAPIJSONObjectMeta, ABCMeta):
"""
Avoid conflicting metaclass definitions for APIJSONObject.
http://code.activestate.com/recipes/204197-solving-the-metaclass-conflict/
"""
pass
class APIJSONObject(with_metaclass(APIJSONObjectMeta, BaseAPIJSONObject, Mapping)):
"""Class representing objects that are not part of the REST API."""
def __len__(self):
return len(self._response_object)
|
# coding: utf-8
from __future__ import unicode_literals, absolute_import
from collections import Mapping
from abc import ABCMeta
from .base_api_json_object import BaseAPIJSONObject, BaseAPIJSONObjectMeta
from ..util.compat import with_metaclass
class APIJSONObjectMeta(BaseAPIJSONObjectMeta, ABCMeta):
"""
Avoid conflicting metaclass definitions for APIJSONObject.
http://code.activestate.com/recipes/204197-solving-the-metaclass-conflict/
"""
pass
class APIJSONObject(with_metaclass(APIJSONObjectMeta, BaseAPIJSONObject, Mapping)):
"""Class representing objects that are not part of the REST API."""
def __len__(self):
return len(self._response_object)
def __iter__(self):
return iter(self._response_object)
Remove redundant __iter__ from APIJsonObject base class# coding: utf-8
from __future__ import unicode_literals, absolute_import
from collections import Mapping
from abc import ABCMeta
from .base_api_json_object import BaseAPIJSONObject, BaseAPIJSONObjectMeta
from ..util.compat import with_metaclass
class APIJSONObjectMeta(BaseAPIJSONObjectMeta, ABCMeta):
"""
Avoid conflicting metaclass definitions for APIJSONObject.
http://code.activestate.com/recipes/204197-solving-the-metaclass-conflict/
"""
pass
class APIJSONObject(with_metaclass(APIJSONObjectMeta, BaseAPIJSONObject, Mapping)):
"""Class representing objects that are not part of the REST API."""
def __len__(self):
return len(self._response_object)
|
<commit_before># coding: utf-8
from __future__ import unicode_literals, absolute_import
from collections import Mapping
from abc import ABCMeta
from .base_api_json_object import BaseAPIJSONObject, BaseAPIJSONObjectMeta
from ..util.compat import with_metaclass
class APIJSONObjectMeta(BaseAPIJSONObjectMeta, ABCMeta):
"""
Avoid conflicting metaclass definitions for APIJSONObject.
http://code.activestate.com/recipes/204197-solving-the-metaclass-conflict/
"""
pass
class APIJSONObject(with_metaclass(APIJSONObjectMeta, BaseAPIJSONObject, Mapping)):
"""Class representing objects that are not part of the REST API."""
def __len__(self):
return len(self._response_object)
def __iter__(self):
return iter(self._response_object)
<commit_msg>Remove redundant __iter__ from APIJsonObject base class<commit_after># coding: utf-8
from __future__ import unicode_literals, absolute_import
from collections import Mapping
from abc import ABCMeta
from .base_api_json_object import BaseAPIJSONObject, BaseAPIJSONObjectMeta
from ..util.compat import with_metaclass
class APIJSONObjectMeta(BaseAPIJSONObjectMeta, ABCMeta):
"""
Avoid conflicting metaclass definitions for APIJSONObject.
http://code.activestate.com/recipes/204197-solving-the-metaclass-conflict/
"""
pass
class APIJSONObject(with_metaclass(APIJSONObjectMeta, BaseAPIJSONObject, Mapping)):
"""Class representing objects that are not part of the REST API."""
def __len__(self):
return len(self._response_object)
|
1ec0f5267119874244474072dfb32f952ae4a8f1
|
setup.py
|
setup.py
|
from distutils.core import setup
setup(
name='cardscript',
version='0.6',
description="A scriptable card game processing engine.",
author="Charles Nelson",
author_email="cnelsonsic@gmail.com",
url="https://github.com/cnelsonsic/cardscript",
packages=['cardscript', 'cardscript.cards'],
license='AGPLv3+',
long_description=open('README.md').read(),
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: GNU Affero General Public License v3 or later (AGPLv3+)',
'Intended Audience :: End Users/Desktop',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: Implementation :: PyPy',
'Topic :: Games/Entertainment :: Board Games',
],
)
|
from distutils.core import setup
from sh import pandoc
setup(
name='cardscript',
version='0.6',
description="A scriptable card game processing engine.",
author="Charles Nelson",
author_email="cnelsonsic@gmail.com",
url="https://github.com/cnelsonsic/cardscript",
packages=['cardscript', 'cardscript.cards'],
license='AGPLv3+',
long_description='\n'.join(pandoc('README.md', t='rst')),
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: GNU Affero General Public License v3 or later (AGPLv3+)',
'Intended Audience :: End Users/Desktop',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: Implementation :: PyPy',
'Topic :: Games/Entertainment :: Board Games',
],
)
|
Use pandoc to convert the markdown readme to rst.
|
Use pandoc to convert the markdown readme to rst.
|
Python
|
agpl-3.0
|
cnelsonsic/cardscript
|
from distutils.core import setup
setup(
name='cardscript',
version='0.6',
description="A scriptable card game processing engine.",
author="Charles Nelson",
author_email="cnelsonsic@gmail.com",
url="https://github.com/cnelsonsic/cardscript",
packages=['cardscript', 'cardscript.cards'],
license='AGPLv3+',
long_description=open('README.md').read(),
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: GNU Affero General Public License v3 or later (AGPLv3+)',
'Intended Audience :: End Users/Desktop',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: Implementation :: PyPy',
'Topic :: Games/Entertainment :: Board Games',
],
)
Use pandoc to convert the markdown readme to rst.
|
from distutils.core import setup
from sh import pandoc
setup(
name='cardscript',
version='0.6',
description="A scriptable card game processing engine.",
author="Charles Nelson",
author_email="cnelsonsic@gmail.com",
url="https://github.com/cnelsonsic/cardscript",
packages=['cardscript', 'cardscript.cards'],
license='AGPLv3+',
long_description='\n'.join(pandoc('README.md', t='rst')),
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: GNU Affero General Public License v3 or later (AGPLv3+)',
'Intended Audience :: End Users/Desktop',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: Implementation :: PyPy',
'Topic :: Games/Entertainment :: Board Games',
],
)
|
<commit_before>from distutils.core import setup
setup(
name='cardscript',
version='0.6',
description="A scriptable card game processing engine.",
author="Charles Nelson",
author_email="cnelsonsic@gmail.com",
url="https://github.com/cnelsonsic/cardscript",
packages=['cardscript', 'cardscript.cards'],
license='AGPLv3+',
long_description=open('README.md').read(),
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: GNU Affero General Public License v3 or later (AGPLv3+)',
'Intended Audience :: End Users/Desktop',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: Implementation :: PyPy',
'Topic :: Games/Entertainment :: Board Games',
],
)
<commit_msg>Use pandoc to convert the markdown readme to rst.<commit_after>
|
from distutils.core import setup
from sh import pandoc
setup(
name='cardscript',
version='0.6',
description="A scriptable card game processing engine.",
author="Charles Nelson",
author_email="cnelsonsic@gmail.com",
url="https://github.com/cnelsonsic/cardscript",
packages=['cardscript', 'cardscript.cards'],
license='AGPLv3+',
long_description='\n'.join(pandoc('README.md', t='rst')),
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: GNU Affero General Public License v3 or later (AGPLv3+)',
'Intended Audience :: End Users/Desktop',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: Implementation :: PyPy',
'Topic :: Games/Entertainment :: Board Games',
],
)
|
from distutils.core import setup
setup(
name='cardscript',
version='0.6',
description="A scriptable card game processing engine.",
author="Charles Nelson",
author_email="cnelsonsic@gmail.com",
url="https://github.com/cnelsonsic/cardscript",
packages=['cardscript', 'cardscript.cards'],
license='AGPLv3+',
long_description=open('README.md').read(),
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: GNU Affero General Public License v3 or later (AGPLv3+)',
'Intended Audience :: End Users/Desktop',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: Implementation :: PyPy',
'Topic :: Games/Entertainment :: Board Games',
],
)
Use pandoc to convert the markdown readme to rst.from distutils.core import setup
from sh import pandoc
setup(
name='cardscript',
version='0.6',
description="A scriptable card game processing engine.",
author="Charles Nelson",
author_email="cnelsonsic@gmail.com",
url="https://github.com/cnelsonsic/cardscript",
packages=['cardscript', 'cardscript.cards'],
license='AGPLv3+',
long_description='\n'.join(pandoc('README.md', t='rst')),
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: GNU Affero General Public License v3 or later (AGPLv3+)',
'Intended Audience :: End Users/Desktop',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: Implementation :: PyPy',
'Topic :: Games/Entertainment :: Board Games',
],
)
|
<commit_before>from distutils.core import setup
setup(
name='cardscript',
version='0.6',
description="A scriptable card game processing engine.",
author="Charles Nelson",
author_email="cnelsonsic@gmail.com",
url="https://github.com/cnelsonsic/cardscript",
packages=['cardscript', 'cardscript.cards'],
license='AGPLv3+',
long_description=open('README.md').read(),
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: GNU Affero General Public License v3 or later (AGPLv3+)',
'Intended Audience :: End Users/Desktop',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: Implementation :: PyPy',
'Topic :: Games/Entertainment :: Board Games',
],
)
<commit_msg>Use pandoc to convert the markdown readme to rst.<commit_after>from distutils.core import setup
from sh import pandoc
setup(
name='cardscript',
version='0.6',
description="A scriptable card game processing engine.",
author="Charles Nelson",
author_email="cnelsonsic@gmail.com",
url="https://github.com/cnelsonsic/cardscript",
packages=['cardscript', 'cardscript.cards'],
license='AGPLv3+',
long_description='\n'.join(pandoc('README.md', t='rst')),
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: GNU Affero General Public License v3 or later (AGPLv3+)',
'Intended Audience :: End Users/Desktop',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: Implementation :: PyPy',
'Topic :: Games/Entertainment :: Board Games',
],
)
|
9bf70db96d8ae5204b20e1e214cb92e195ab5928
|
changes/api/build_flaky_tests.py
|
changes/api/build_flaky_tests.py
|
from __future__ import absolute_import
from changes.api.base import APIView
from changes.config import db
from changes.constants import Result
from changes.models import Build, Job, TestCase
class BuildFlakyTestsAPIView(APIView):
def get(self, build_id):
build = Build.query.get(build_id)
if build is None:
return '', 404
jobs = list(Job.query.filter(
Job.build_id == build.id,
))
flaky_tests_query = db.session.query(
TestCase.name
).filter(
TestCase.job_id.in_([j.id for j in jobs]),
TestCase.result == Result.passed,
TestCase.reruns > 1
).order_by(TestCase.name.asc())
flaky_tests = map(lambda test: {'name': test.name}, flaky_tests_query)
context = {
'repositoryUrl': build.project.repository.url,
'flakyTests': {
'count': len(flaky_tests),
'items': flaky_tests
}
}
return self.respond(context)
|
from __future__ import absolute_import
from changes.api.base import APIView
from changes.config import db
from changes.constants import Result
from changes.models import Build, Job, TestCase
class BuildFlakyTestsAPIView(APIView):
def get(self, build_id):
build = Build.query.get(build_id)
if build is None:
return '', 404
jobs = list(Job.query.filter(
Job.build_id == build.id,
))
flaky_tests_query = db.session.query(
TestCase.name
).filter(
TestCase.job_id.in_([j.id for j in jobs]),
TestCase.result == Result.passed,
TestCase.reruns > 1
).order_by(TestCase.name.asc())
flaky_tests = map(lambda test: {'name': test.name}, flaky_tests_query)
context = {
'projectSlug': build.project.slug,
'repositoryUrl': build.project.repository.url,
'flakyTests': {
'count': len(flaky_tests),
'items': flaky_tests
}
}
return self.respond(context)
|
Add projectSlug to build flaky tests API response
|
Add projectSlug to build flaky tests API response
Summary: We will use it in the test quarantine service to whitelist projects which support quarantine.
Test Plan:
Tested locally.
{
"projectSlug": "changesjenkins",
"repositoryUrl": "https://github.com/dropbox/changes.git",
"flakyTests": {
"count": 1,
"items": [
{
"name": "tests.account.test_account.AccountTest.test_account_change_language"
}
]
}
}
Reviewers: haoyi
Reviewed By: haoyi
Subscribers: changesbot, mkedia
Differential Revision: https://tails.corp.dropbox.com/D123809
|
Python
|
apache-2.0
|
dropbox/changes,wfxiang08/changes,wfxiang08/changes,dropbox/changes,dropbox/changes,wfxiang08/changes,bowlofstew/changes,dropbox/changes,wfxiang08/changes,bowlofstew/changes,bowlofstew/changes,bowlofstew/changes
|
from __future__ import absolute_import
from changes.api.base import APIView
from changes.config import db
from changes.constants import Result
from changes.models import Build, Job, TestCase
class BuildFlakyTestsAPIView(APIView):
def get(self, build_id):
build = Build.query.get(build_id)
if build is None:
return '', 404
jobs = list(Job.query.filter(
Job.build_id == build.id,
))
flaky_tests_query = db.session.query(
TestCase.name
).filter(
TestCase.job_id.in_([j.id for j in jobs]),
TestCase.result == Result.passed,
TestCase.reruns > 1
).order_by(TestCase.name.asc())
flaky_tests = map(lambda test: {'name': test.name}, flaky_tests_query)
context = {
'repositoryUrl': build.project.repository.url,
'flakyTests': {
'count': len(flaky_tests),
'items': flaky_tests
}
}
return self.respond(context)
Add projectSlug to build flaky tests API response
Summary: We will use it in the test quarantine service to whitelist projects which support quarantine.
Test Plan:
Tested locally.
{
"projectSlug": "changesjenkins",
"repositoryUrl": "https://github.com/dropbox/changes.git",
"flakyTests": {
"count": 1,
"items": [
{
"name": "tests.account.test_account.AccountTest.test_account_change_language"
}
]
}
}
Reviewers: haoyi
Reviewed By: haoyi
Subscribers: changesbot, mkedia
Differential Revision: https://tails.corp.dropbox.com/D123809
|
from __future__ import absolute_import
from changes.api.base import APIView
from changes.config import db
from changes.constants import Result
from changes.models import Build, Job, TestCase
class BuildFlakyTestsAPIView(APIView):
def get(self, build_id):
build = Build.query.get(build_id)
if build is None:
return '', 404
jobs = list(Job.query.filter(
Job.build_id == build.id,
))
flaky_tests_query = db.session.query(
TestCase.name
).filter(
TestCase.job_id.in_([j.id for j in jobs]),
TestCase.result == Result.passed,
TestCase.reruns > 1
).order_by(TestCase.name.asc())
flaky_tests = map(lambda test: {'name': test.name}, flaky_tests_query)
context = {
'projectSlug': build.project.slug,
'repositoryUrl': build.project.repository.url,
'flakyTests': {
'count': len(flaky_tests),
'items': flaky_tests
}
}
return self.respond(context)
|
<commit_before>from __future__ import absolute_import
from changes.api.base import APIView
from changes.config import db
from changes.constants import Result
from changes.models import Build, Job, TestCase
class BuildFlakyTestsAPIView(APIView):
def get(self, build_id):
build = Build.query.get(build_id)
if build is None:
return '', 404
jobs = list(Job.query.filter(
Job.build_id == build.id,
))
flaky_tests_query = db.session.query(
TestCase.name
).filter(
TestCase.job_id.in_([j.id for j in jobs]),
TestCase.result == Result.passed,
TestCase.reruns > 1
).order_by(TestCase.name.asc())
flaky_tests = map(lambda test: {'name': test.name}, flaky_tests_query)
context = {
'repositoryUrl': build.project.repository.url,
'flakyTests': {
'count': len(flaky_tests),
'items': flaky_tests
}
}
return self.respond(context)
<commit_msg>Add projectSlug to build flaky tests API response
Summary: We will use it in the test quarantine service to whitelist projects which support quarantine.
Test Plan:
Tested locally.
{
"projectSlug": "changesjenkins",
"repositoryUrl": "https://github.com/dropbox/changes.git",
"flakyTests": {
"count": 1,
"items": [
{
"name": "tests.account.test_account.AccountTest.test_account_change_language"
}
]
}
}
Reviewers: haoyi
Reviewed By: haoyi
Subscribers: changesbot, mkedia
Differential Revision: https://tails.corp.dropbox.com/D123809<commit_after>
|
from __future__ import absolute_import
from changes.api.base import APIView
from changes.config import db
from changes.constants import Result
from changes.models import Build, Job, TestCase
class BuildFlakyTestsAPIView(APIView):
def get(self, build_id):
build = Build.query.get(build_id)
if build is None:
return '', 404
jobs = list(Job.query.filter(
Job.build_id == build.id,
))
flaky_tests_query = db.session.query(
TestCase.name
).filter(
TestCase.job_id.in_([j.id for j in jobs]),
TestCase.result == Result.passed,
TestCase.reruns > 1
).order_by(TestCase.name.asc())
flaky_tests = map(lambda test: {'name': test.name}, flaky_tests_query)
context = {
'projectSlug': build.project.slug,
'repositoryUrl': build.project.repository.url,
'flakyTests': {
'count': len(flaky_tests),
'items': flaky_tests
}
}
return self.respond(context)
|
from __future__ import absolute_import
from changes.api.base import APIView
from changes.config import db
from changes.constants import Result
from changes.models import Build, Job, TestCase
class BuildFlakyTestsAPIView(APIView):
def get(self, build_id):
build = Build.query.get(build_id)
if build is None:
return '', 404
jobs = list(Job.query.filter(
Job.build_id == build.id,
))
flaky_tests_query = db.session.query(
TestCase.name
).filter(
TestCase.job_id.in_([j.id for j in jobs]),
TestCase.result == Result.passed,
TestCase.reruns > 1
).order_by(TestCase.name.asc())
flaky_tests = map(lambda test: {'name': test.name}, flaky_tests_query)
context = {
'repositoryUrl': build.project.repository.url,
'flakyTests': {
'count': len(flaky_tests),
'items': flaky_tests
}
}
return self.respond(context)
Add projectSlug to build flaky tests API response
Summary: We will use it in the test quarantine service to whitelist projects which support quarantine.
Test Plan:
Tested locally.
{
"projectSlug": "changesjenkins",
"repositoryUrl": "https://github.com/dropbox/changes.git",
"flakyTests": {
"count": 1,
"items": [
{
"name": "tests.account.test_account.AccountTest.test_account_change_language"
}
]
}
}
Reviewers: haoyi
Reviewed By: haoyi
Subscribers: changesbot, mkedia
Differential Revision: https://tails.corp.dropbox.com/D123809from __future__ import absolute_import
from changes.api.base import APIView
from changes.config import db
from changes.constants import Result
from changes.models import Build, Job, TestCase
class BuildFlakyTestsAPIView(APIView):
def get(self, build_id):
build = Build.query.get(build_id)
if build is None:
return '', 404
jobs = list(Job.query.filter(
Job.build_id == build.id,
))
flaky_tests_query = db.session.query(
TestCase.name
).filter(
TestCase.job_id.in_([j.id for j in jobs]),
TestCase.result == Result.passed,
TestCase.reruns > 1
).order_by(TestCase.name.asc())
flaky_tests = map(lambda test: {'name': test.name}, flaky_tests_query)
context = {
'projectSlug': build.project.slug,
'repositoryUrl': build.project.repository.url,
'flakyTests': {
'count': len(flaky_tests),
'items': flaky_tests
}
}
return self.respond(context)
|
<commit_before>from __future__ import absolute_import
from changes.api.base import APIView
from changes.config import db
from changes.constants import Result
from changes.models import Build, Job, TestCase
class BuildFlakyTestsAPIView(APIView):
def get(self, build_id):
build = Build.query.get(build_id)
if build is None:
return '', 404
jobs = list(Job.query.filter(
Job.build_id == build.id,
))
flaky_tests_query = db.session.query(
TestCase.name
).filter(
TestCase.job_id.in_([j.id for j in jobs]),
TestCase.result == Result.passed,
TestCase.reruns > 1
).order_by(TestCase.name.asc())
flaky_tests = map(lambda test: {'name': test.name}, flaky_tests_query)
context = {
'repositoryUrl': build.project.repository.url,
'flakyTests': {
'count': len(flaky_tests),
'items': flaky_tests
}
}
return self.respond(context)
<commit_msg>Add projectSlug to build flaky tests API response
Summary: We will use it in the test quarantine service to whitelist projects which support quarantine.
Test Plan:
Tested locally.
{
"projectSlug": "changesjenkins",
"repositoryUrl": "https://github.com/dropbox/changes.git",
"flakyTests": {
"count": 1,
"items": [
{
"name": "tests.account.test_account.AccountTest.test_account_change_language"
}
]
}
}
Reviewers: haoyi
Reviewed By: haoyi
Subscribers: changesbot, mkedia
Differential Revision: https://tails.corp.dropbox.com/D123809<commit_after>from __future__ import absolute_import
from changes.api.base import APIView
from changes.config import db
from changes.constants import Result
from changes.models import Build, Job, TestCase
class BuildFlakyTestsAPIView(APIView):
def get(self, build_id):
build = Build.query.get(build_id)
if build is None:
return '', 404
jobs = list(Job.query.filter(
Job.build_id == build.id,
))
flaky_tests_query = db.session.query(
TestCase.name
).filter(
TestCase.job_id.in_([j.id for j in jobs]),
TestCase.result == Result.passed,
TestCase.reruns > 1
).order_by(TestCase.name.asc())
flaky_tests = map(lambda test: {'name': test.name}, flaky_tests_query)
context = {
'projectSlug': build.project.slug,
'repositoryUrl': build.project.repository.url,
'flakyTests': {
'count': len(flaky_tests),
'items': flaky_tests
}
}
return self.respond(context)
|
51466e1d18180e538cbbb92cc8d3c1a2417be5ff
|
geojsonlint/urls.py
|
geojsonlint/urls.py
|
from django.conf.urls import patterns, include, url
from django.conf import settings
# Uncomment the next two lines to enable the admin:
# from django.contrib import admin
# admin.autodiscover()
urlpatterns = patterns('',
# Examples:
url(r'^$', 'geojsonlint.views.home', name='home'),
url(r'^validate$', 'geojsonlint.views.validate', name='validate'),
# url(r'^geojsonlint/', include('geojsonlint.foo.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
# url(r'^admin/', include(admin.site.urls)),
)
if settings.DEBUG:
urlpatterns += patterns('',
url(r'^static/(?P<path>.*)$', 'django.views.static.serve', {
'document_root': settings.STATIC_ROOT,
}),
)
|
from django.conf.urls import patterns, include, url
from django.conf import settings
# Uncomment the next two lines to enable the admin:
# from django.contrib import admin
# admin.autodiscover()
urlpatterns = patterns('',
# Examples:
url(r'^$', 'geojsonlint.views.home', name='home'),
url(r'^validate$', 'geojsonlint.views.validate', name='validate'),
# url(r'^geojsonlint/', include('geojsonlint.foo.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
# url(r'^admin/', include(admin.site.urls)),
)
if settings.DEBUG:
urlpatterns += patterns('',
(r'^static/(?P<path>.*)$', 'django.views.static.serve', {'document_root': settings.STATIC_ROOT}),
)
|
Fix URL that was killing serving static files.
|
Fix URL that was killing serving static files.
|
Python
|
bsd-2-clause
|
JasonSanford/geojsonlint.com,Drooids/geojsonlint.com,JasonSanford/geojsonlint.com,Drooids/geojsonlint.com,JasonSanford/geojsonlint.com,Drooids/geojsonlint.com
|
from django.conf.urls import patterns, include, url
from django.conf import settings
# Uncomment the next two lines to enable the admin:
# from django.contrib import admin
# admin.autodiscover()
urlpatterns = patterns('',
# Examples:
url(r'^$', 'geojsonlint.views.home', name='home'),
url(r'^validate$', 'geojsonlint.views.validate', name='validate'),
# url(r'^geojsonlint/', include('geojsonlint.foo.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
# url(r'^admin/', include(admin.site.urls)),
)
if settings.DEBUG:
urlpatterns += patterns('',
url(r'^static/(?P<path>.*)$', 'django.views.static.serve', {
'document_root': settings.STATIC_ROOT,
}),
)Fix URL that was killing serving static files.
|
from django.conf.urls import patterns, include, url
from django.conf import settings
# Uncomment the next two lines to enable the admin:
# from django.contrib import admin
# admin.autodiscover()
urlpatterns = patterns('',
# Examples:
url(r'^$', 'geojsonlint.views.home', name='home'),
url(r'^validate$', 'geojsonlint.views.validate', name='validate'),
# url(r'^geojsonlint/', include('geojsonlint.foo.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
# url(r'^admin/', include(admin.site.urls)),
)
if settings.DEBUG:
urlpatterns += patterns('',
(r'^static/(?P<path>.*)$', 'django.views.static.serve', {'document_root': settings.STATIC_ROOT}),
)
|
<commit_before>from django.conf.urls import patterns, include, url
from django.conf import settings
# Uncomment the next two lines to enable the admin:
# from django.contrib import admin
# admin.autodiscover()
urlpatterns = patterns('',
# Examples:
url(r'^$', 'geojsonlint.views.home', name='home'),
url(r'^validate$', 'geojsonlint.views.validate', name='validate'),
# url(r'^geojsonlint/', include('geojsonlint.foo.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
# url(r'^admin/', include(admin.site.urls)),
)
if settings.DEBUG:
urlpatterns += patterns('',
url(r'^static/(?P<path>.*)$', 'django.views.static.serve', {
'document_root': settings.STATIC_ROOT,
}),
)<commit_msg>Fix URL that was killing serving static files.<commit_after>
|
from django.conf.urls import patterns, include, url
from django.conf import settings
# Uncomment the next two lines to enable the admin:
# from django.contrib import admin
# admin.autodiscover()
urlpatterns = patterns('',
# Examples:
url(r'^$', 'geojsonlint.views.home', name='home'),
url(r'^validate$', 'geojsonlint.views.validate', name='validate'),
# url(r'^geojsonlint/', include('geojsonlint.foo.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
# url(r'^admin/', include(admin.site.urls)),
)
if settings.DEBUG:
urlpatterns += patterns('',
(r'^static/(?P<path>.*)$', 'django.views.static.serve', {'document_root': settings.STATIC_ROOT}),
)
|
from django.conf.urls import patterns, include, url
from django.conf import settings
# Uncomment the next two lines to enable the admin:
# from django.contrib import admin
# admin.autodiscover()
urlpatterns = patterns('',
# Examples:
url(r'^$', 'geojsonlint.views.home', name='home'),
url(r'^validate$', 'geojsonlint.views.validate', name='validate'),
# url(r'^geojsonlint/', include('geojsonlint.foo.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
# url(r'^admin/', include(admin.site.urls)),
)
if settings.DEBUG:
urlpatterns += patterns('',
url(r'^static/(?P<path>.*)$', 'django.views.static.serve', {
'document_root': settings.STATIC_ROOT,
}),
)Fix URL that was killing serving static files.from django.conf.urls import patterns, include, url
from django.conf import settings
# Uncomment the next two lines to enable the admin:
# from django.contrib import admin
# admin.autodiscover()
urlpatterns = patterns('',
# Examples:
url(r'^$', 'geojsonlint.views.home', name='home'),
url(r'^validate$', 'geojsonlint.views.validate', name='validate'),
# url(r'^geojsonlint/', include('geojsonlint.foo.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
# url(r'^admin/', include(admin.site.urls)),
)
if settings.DEBUG:
urlpatterns += patterns('',
(r'^static/(?P<path>.*)$', 'django.views.static.serve', {'document_root': settings.STATIC_ROOT}),
)
|
<commit_before>from django.conf.urls import patterns, include, url
from django.conf import settings
# Uncomment the next two lines to enable the admin:
# from django.contrib import admin
# admin.autodiscover()
urlpatterns = patterns('',
# Examples:
url(r'^$', 'geojsonlint.views.home', name='home'),
url(r'^validate$', 'geojsonlint.views.validate', name='validate'),
# url(r'^geojsonlint/', include('geojsonlint.foo.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
# url(r'^admin/', include(admin.site.urls)),
)
if settings.DEBUG:
urlpatterns += patterns('',
url(r'^static/(?P<path>.*)$', 'django.views.static.serve', {
'document_root': settings.STATIC_ROOT,
}),
)<commit_msg>Fix URL that was killing serving static files.<commit_after>from django.conf.urls import patterns, include, url
from django.conf import settings
# Uncomment the next two lines to enable the admin:
# from django.contrib import admin
# admin.autodiscover()
urlpatterns = patterns('',
# Examples:
url(r'^$', 'geojsonlint.views.home', name='home'),
url(r'^validate$', 'geojsonlint.views.validate', name='validate'),
# url(r'^geojsonlint/', include('geojsonlint.foo.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
# url(r'^admin/', include(admin.site.urls)),
)
if settings.DEBUG:
urlpatterns += patterns('',
(r'^static/(?P<path>.*)$', 'django.views.static.serve', {'document_root': settings.STATIC_ROOT}),
)
|
6d5f5975f913e91554c02b008d3196fa5c6f90cb
|
setup.py
|
setup.py
|
#!/usr/bin/env python
import os
import sys
import io
try:
import setuptools
except ImportError:
from distribute_setup import use_setuptools
use_setuptools()
from setuptools import setup, Extension
from setuptools import find_packages
mod_cv_algorithms = Extension('cv_algorithms._cv_algorithms',
sources=['src/thinning.cpp',
'src/distance.cpp',
'src/grassfire.cpp',
'src/popcount.cpp',
'src/neighbours.cpp'],
extra_compile_args=["-g", "-O2", "-march=native"],
extra_link_args=["-g"])
setup(
name='cv_algorithms',
license='Apache license 2.0',
packages=find_packages(exclude=['tests*']),
install_requires=['cffi>=0.7'],
ext_modules=[mod_cv_algorithms],
test_suite='nose.collector',
tests_require=['nose', 'coverage', 'mock', 'rednose', 'nose-parameterized'],
setup_requires=['nose>=1.0'],
platforms="any",
zip_safe=False,
version='1.0.0',
long_description=io.open("README.rst", encoding="utf-8").read(),
description='Optimized OpenCV extra algorithms for Python',
url="https://github.com/ulikoehler/"
)
|
#!/usr/bin/env python
import os
import sys
import io
try:
import setuptools
except ImportError:
from distribute_setup import use_setuptools
use_setuptools()
from setuptools import setup, Extension
from setuptools import find_packages
extra_compile_args = [] if os.name == 'nt' else ["-g", "-O2", "-march=native"]
extra_link_args = [] if os.name == 'nt' else ["-g"]
mod_cv_algorithms = Extension('cv_algorithms._cv_algorithms',
sources=['src/thinning.cpp',
'src/distance.cpp',
'src/grassfire.cpp',
'src/popcount.cpp',
'src/neighbours.cpp'],
extra_compile_args=extra_compile_args,
extra_link_args=extra_link_args)
setup(
name='cv_algorithms',
license='Apache license 2.0',
packages=find_packages(exclude=['tests*']),
install_requires=['cffi>=0.7'],
ext_modules=[mod_cv_algorithms],
test_suite='nose.collector',
tests_require=['nose', 'coverage', 'mock', 'rednose', 'nose-parameterized'],
setup_requires=['nose>=1.0'],
platforms="any",
zip_safe=False,
version='1.0.0',
long_description=io.open("README.rst", encoding="utf-8").read(),
description='Optimized OpenCV extra algorithms for Python',
url="https://github.com/ulikoehler/"
)
|
Fix passing GCC compiler options on Windows
|
Fix passing GCC compiler options on Windows
|
Python
|
apache-2.0
|
ulikoehler/cv_algorithms,ulikoehler/cv_algorithms
|
#!/usr/bin/env python
import os
import sys
import io
try:
import setuptools
except ImportError:
from distribute_setup import use_setuptools
use_setuptools()
from setuptools import setup, Extension
from setuptools import find_packages
mod_cv_algorithms = Extension('cv_algorithms._cv_algorithms',
sources=['src/thinning.cpp',
'src/distance.cpp',
'src/grassfire.cpp',
'src/popcount.cpp',
'src/neighbours.cpp'],
extra_compile_args=["-g", "-O2", "-march=native"],
extra_link_args=["-g"])
setup(
name='cv_algorithms',
license='Apache license 2.0',
packages=find_packages(exclude=['tests*']),
install_requires=['cffi>=0.7'],
ext_modules=[mod_cv_algorithms],
test_suite='nose.collector',
tests_require=['nose', 'coverage', 'mock', 'rednose', 'nose-parameterized'],
setup_requires=['nose>=1.0'],
platforms="any",
zip_safe=False,
version='1.0.0',
long_description=io.open("README.rst", encoding="utf-8").read(),
description='Optimized OpenCV extra algorithms for Python',
url="https://github.com/ulikoehler/"
)
Fix passing GCC compiler options on Windows
|
#!/usr/bin/env python
import os
import sys
import io
try:
import setuptools
except ImportError:
from distribute_setup import use_setuptools
use_setuptools()
from setuptools import setup, Extension
from setuptools import find_packages
extra_compile_args = [] if os.name == 'nt' else ["-g", "-O2", "-march=native"]
extra_link_args = [] if os.name == 'nt' else ["-g"]
mod_cv_algorithms = Extension('cv_algorithms._cv_algorithms',
sources=['src/thinning.cpp',
'src/distance.cpp',
'src/grassfire.cpp',
'src/popcount.cpp',
'src/neighbours.cpp'],
extra_compile_args=extra_compile_args,
extra_link_args=extra_link_args)
setup(
name='cv_algorithms',
license='Apache license 2.0',
packages=find_packages(exclude=['tests*']),
install_requires=['cffi>=0.7'],
ext_modules=[mod_cv_algorithms],
test_suite='nose.collector',
tests_require=['nose', 'coverage', 'mock', 'rednose', 'nose-parameterized'],
setup_requires=['nose>=1.0'],
platforms="any",
zip_safe=False,
version='1.0.0',
long_description=io.open("README.rst", encoding="utf-8").read(),
description='Optimized OpenCV extra algorithms for Python',
url="https://github.com/ulikoehler/"
)
|
<commit_before>#!/usr/bin/env python
import os
import sys
import io
try:
import setuptools
except ImportError:
from distribute_setup import use_setuptools
use_setuptools()
from setuptools import setup, Extension
from setuptools import find_packages
mod_cv_algorithms = Extension('cv_algorithms._cv_algorithms',
sources=['src/thinning.cpp',
'src/distance.cpp',
'src/grassfire.cpp',
'src/popcount.cpp',
'src/neighbours.cpp'],
extra_compile_args=["-g", "-O2", "-march=native"],
extra_link_args=["-g"])
setup(
name='cv_algorithms',
license='Apache license 2.0',
packages=find_packages(exclude=['tests*']),
install_requires=['cffi>=0.7'],
ext_modules=[mod_cv_algorithms],
test_suite='nose.collector',
tests_require=['nose', 'coverage', 'mock', 'rednose', 'nose-parameterized'],
setup_requires=['nose>=1.0'],
platforms="any",
zip_safe=False,
version='1.0.0',
long_description=io.open("README.rst", encoding="utf-8").read(),
description='Optimized OpenCV extra algorithms for Python',
url="https://github.com/ulikoehler/"
)
<commit_msg>Fix passing GCC compiler options on Windows<commit_after>
|
#!/usr/bin/env python
import os
import sys
import io
try:
import setuptools
except ImportError:
from distribute_setup import use_setuptools
use_setuptools()
from setuptools import setup, Extension
from setuptools import find_packages
extra_compile_args = [] if os.name == 'nt' else ["-g", "-O2", "-march=native"]
extra_link_args = [] if os.name == 'nt' else ["-g"]
mod_cv_algorithms = Extension('cv_algorithms._cv_algorithms',
sources=['src/thinning.cpp',
'src/distance.cpp',
'src/grassfire.cpp',
'src/popcount.cpp',
'src/neighbours.cpp'],
extra_compile_args=extra_compile_args,
extra_link_args=extra_link_args)
setup(
name='cv_algorithms',
license='Apache license 2.0',
packages=find_packages(exclude=['tests*']),
install_requires=['cffi>=0.7'],
ext_modules=[mod_cv_algorithms],
test_suite='nose.collector',
tests_require=['nose', 'coverage', 'mock', 'rednose', 'nose-parameterized'],
setup_requires=['nose>=1.0'],
platforms="any",
zip_safe=False,
version='1.0.0',
long_description=io.open("README.rst", encoding="utf-8").read(),
description='Optimized OpenCV extra algorithms for Python',
url="https://github.com/ulikoehler/"
)
|
#!/usr/bin/env python
import os
import sys
import io
try:
import setuptools
except ImportError:
from distribute_setup import use_setuptools
use_setuptools()
from setuptools import setup, Extension
from setuptools import find_packages
mod_cv_algorithms = Extension('cv_algorithms._cv_algorithms',
sources=['src/thinning.cpp',
'src/distance.cpp',
'src/grassfire.cpp',
'src/popcount.cpp',
'src/neighbours.cpp'],
extra_compile_args=["-g", "-O2", "-march=native"],
extra_link_args=["-g"])
setup(
name='cv_algorithms',
license='Apache license 2.0',
packages=find_packages(exclude=['tests*']),
install_requires=['cffi>=0.7'],
ext_modules=[mod_cv_algorithms],
test_suite='nose.collector',
tests_require=['nose', 'coverage', 'mock', 'rednose', 'nose-parameterized'],
setup_requires=['nose>=1.0'],
platforms="any",
zip_safe=False,
version='1.0.0',
long_description=io.open("README.rst", encoding="utf-8").read(),
description='Optimized OpenCV extra algorithms for Python',
url="https://github.com/ulikoehler/"
)
Fix passing GCC compiler options on Windows#!/usr/bin/env python
import os
import sys
import io
try:
import setuptools
except ImportError:
from distribute_setup import use_setuptools
use_setuptools()
from setuptools import setup, Extension
from setuptools import find_packages
extra_compile_args = [] if os.name == 'nt' else ["-g", "-O2", "-march=native"]
extra_link_args = [] if os.name == 'nt' else ["-g"]
mod_cv_algorithms = Extension('cv_algorithms._cv_algorithms',
sources=['src/thinning.cpp',
'src/distance.cpp',
'src/grassfire.cpp',
'src/popcount.cpp',
'src/neighbours.cpp'],
extra_compile_args=extra_compile_args,
extra_link_args=extra_link_args)
setup(
name='cv_algorithms',
license='Apache license 2.0',
packages=find_packages(exclude=['tests*']),
install_requires=['cffi>=0.7'],
ext_modules=[mod_cv_algorithms],
test_suite='nose.collector',
tests_require=['nose', 'coverage', 'mock', 'rednose', 'nose-parameterized'],
setup_requires=['nose>=1.0'],
platforms="any",
zip_safe=False,
version='1.0.0',
long_description=io.open("README.rst", encoding="utf-8").read(),
description='Optimized OpenCV extra algorithms for Python',
url="https://github.com/ulikoehler/"
)
|
<commit_before>#!/usr/bin/env python
import os
import sys
import io
try:
import setuptools
except ImportError:
from distribute_setup import use_setuptools
use_setuptools()
from setuptools import setup, Extension
from setuptools import find_packages
mod_cv_algorithms = Extension('cv_algorithms._cv_algorithms',
sources=['src/thinning.cpp',
'src/distance.cpp',
'src/grassfire.cpp',
'src/popcount.cpp',
'src/neighbours.cpp'],
extra_compile_args=["-g", "-O2", "-march=native"],
extra_link_args=["-g"])
setup(
name='cv_algorithms',
license='Apache license 2.0',
packages=find_packages(exclude=['tests*']),
install_requires=['cffi>=0.7'],
ext_modules=[mod_cv_algorithms],
test_suite='nose.collector',
tests_require=['nose', 'coverage', 'mock', 'rednose', 'nose-parameterized'],
setup_requires=['nose>=1.0'],
platforms="any",
zip_safe=False,
version='1.0.0',
long_description=io.open("README.rst", encoding="utf-8").read(),
description='Optimized OpenCV extra algorithms for Python',
url="https://github.com/ulikoehler/"
)
<commit_msg>Fix passing GCC compiler options on Windows<commit_after>#!/usr/bin/env python
import os
import sys
import io
try:
import setuptools
except ImportError:
from distribute_setup import use_setuptools
use_setuptools()
from setuptools import setup, Extension
from setuptools import find_packages
extra_compile_args = [] if os.name == 'nt' else ["-g", "-O2", "-march=native"]
extra_link_args = [] if os.name == 'nt' else ["-g"]
mod_cv_algorithms = Extension('cv_algorithms._cv_algorithms',
sources=['src/thinning.cpp',
'src/distance.cpp',
'src/grassfire.cpp',
'src/popcount.cpp',
'src/neighbours.cpp'],
extra_compile_args=extra_compile_args,
extra_link_args=extra_link_args)
setup(
name='cv_algorithms',
license='Apache license 2.0',
packages=find_packages(exclude=['tests*']),
install_requires=['cffi>=0.7'],
ext_modules=[mod_cv_algorithms],
test_suite='nose.collector',
tests_require=['nose', 'coverage', 'mock', 'rednose', 'nose-parameterized'],
setup_requires=['nose>=1.0'],
platforms="any",
zip_safe=False,
version='1.0.0',
long_description=io.open("README.rst", encoding="utf-8").read(),
description='Optimized OpenCV extra algorithms for Python',
url="https://github.com/ulikoehler/"
)
|
ba0805d69e4c5e897383403a9aef1a99861d5a86
|
setup.py
|
setup.py
|
from setuptools import setup, find_packages
setup(name='pixiedust',
version='1.1.18',
description='Productivity library for Jupyter Notebook',
url='https://github.com/pixiedust/pixiedust',
install_requires=['mpld3', 'lxml', 'geojson', 'astunparse', 'markdown', 'colour', 'requests', 'matplotlib', 'pandas],
author='David Taieb',
author_email='david_taieb@us.ibm.com',
license='Apache 2.0',
packages=find_packages(exclude=('tests', 'tests.*')),
include_package_data=True,
zip_safe=False,
entry_points={
'console_scripts': [
'jupyter-pixiedust = install.pixiedustapp:main'
]
}
)
|
from setuptools import setup, find_packages
setup(name='pixiedust',
version='1.1.18',
description='Productivity library for Jupyter Notebook',
url='https://github.com/pixiedust/pixiedust',
install_requires=['mpld3', 'lxml', 'geojson', 'astunparse', 'markdown', 'colour', 'requests', 'matplotlib', 'pandas'],
author='David Taieb',
author_email='david_taieb@us.ibm.com',
license='Apache 2.0',
packages=find_packages(exclude=('tests', 'tests.*')),
include_package_data=True,
zip_safe=False,
entry_points={
'console_scripts': [
'jupyter-pixiedust = install.pixiedustapp:main'
]
}
)
|
Resolve syntax errror during install
|
Resolve syntax errror during install
|
Python
|
apache-2.0
|
ibm-cds-labs/pixiedust,ibm-cds-labs/pixiedust,ibm-cds-labs/pixiedust,ibm-cds-labs/pixiedust
|
from setuptools import setup, find_packages
setup(name='pixiedust',
version='1.1.18',
description='Productivity library for Jupyter Notebook',
url='https://github.com/pixiedust/pixiedust',
install_requires=['mpld3', 'lxml', 'geojson', 'astunparse', 'markdown', 'colour', 'requests', 'matplotlib', 'pandas],
author='David Taieb',
author_email='david_taieb@us.ibm.com',
license='Apache 2.0',
packages=find_packages(exclude=('tests', 'tests.*')),
include_package_data=True,
zip_safe=False,
entry_points={
'console_scripts': [
'jupyter-pixiedust = install.pixiedustapp:main'
]
}
)
Resolve syntax errror during install
|
from setuptools import setup, find_packages
setup(name='pixiedust',
version='1.1.18',
description='Productivity library for Jupyter Notebook',
url='https://github.com/pixiedust/pixiedust',
install_requires=['mpld3', 'lxml', 'geojson', 'astunparse', 'markdown', 'colour', 'requests', 'matplotlib', 'pandas'],
author='David Taieb',
author_email='david_taieb@us.ibm.com',
license='Apache 2.0',
packages=find_packages(exclude=('tests', 'tests.*')),
include_package_data=True,
zip_safe=False,
entry_points={
'console_scripts': [
'jupyter-pixiedust = install.pixiedustapp:main'
]
}
)
|
<commit_before>from setuptools import setup, find_packages
setup(name='pixiedust',
version='1.1.18',
description='Productivity library for Jupyter Notebook',
url='https://github.com/pixiedust/pixiedust',
install_requires=['mpld3', 'lxml', 'geojson', 'astunparse', 'markdown', 'colour', 'requests', 'matplotlib', 'pandas],
author='David Taieb',
author_email='david_taieb@us.ibm.com',
license='Apache 2.0',
packages=find_packages(exclude=('tests', 'tests.*')),
include_package_data=True,
zip_safe=False,
entry_points={
'console_scripts': [
'jupyter-pixiedust = install.pixiedustapp:main'
]
}
)
<commit_msg>Resolve syntax errror during install<commit_after>
|
from setuptools import setup, find_packages
setup(name='pixiedust',
version='1.1.18',
description='Productivity library for Jupyter Notebook',
url='https://github.com/pixiedust/pixiedust',
install_requires=['mpld3', 'lxml', 'geojson', 'astunparse', 'markdown', 'colour', 'requests', 'matplotlib', 'pandas'],
author='David Taieb',
author_email='david_taieb@us.ibm.com',
license='Apache 2.0',
packages=find_packages(exclude=('tests', 'tests.*')),
include_package_data=True,
zip_safe=False,
entry_points={
'console_scripts': [
'jupyter-pixiedust = install.pixiedustapp:main'
]
}
)
|
from setuptools import setup, find_packages
setup(name='pixiedust',
version='1.1.18',
description='Productivity library for Jupyter Notebook',
url='https://github.com/pixiedust/pixiedust',
install_requires=['mpld3', 'lxml', 'geojson', 'astunparse', 'markdown', 'colour', 'requests', 'matplotlib', 'pandas],
author='David Taieb',
author_email='david_taieb@us.ibm.com',
license='Apache 2.0',
packages=find_packages(exclude=('tests', 'tests.*')),
include_package_data=True,
zip_safe=False,
entry_points={
'console_scripts': [
'jupyter-pixiedust = install.pixiedustapp:main'
]
}
)
Resolve syntax errror during installfrom setuptools import setup, find_packages
setup(name='pixiedust',
version='1.1.18',
description='Productivity library for Jupyter Notebook',
url='https://github.com/pixiedust/pixiedust',
install_requires=['mpld3', 'lxml', 'geojson', 'astunparse', 'markdown', 'colour', 'requests', 'matplotlib', 'pandas'],
author='David Taieb',
author_email='david_taieb@us.ibm.com',
license='Apache 2.0',
packages=find_packages(exclude=('tests', 'tests.*')),
include_package_data=True,
zip_safe=False,
entry_points={
'console_scripts': [
'jupyter-pixiedust = install.pixiedustapp:main'
]
}
)
|
<commit_before>from setuptools import setup, find_packages
setup(name='pixiedust',
version='1.1.18',
description='Productivity library for Jupyter Notebook',
url='https://github.com/pixiedust/pixiedust',
install_requires=['mpld3', 'lxml', 'geojson', 'astunparse', 'markdown', 'colour', 'requests', 'matplotlib', 'pandas],
author='David Taieb',
author_email='david_taieb@us.ibm.com',
license='Apache 2.0',
packages=find_packages(exclude=('tests', 'tests.*')),
include_package_data=True,
zip_safe=False,
entry_points={
'console_scripts': [
'jupyter-pixiedust = install.pixiedustapp:main'
]
}
)
<commit_msg>Resolve syntax errror during install<commit_after>from setuptools import setup, find_packages
setup(name='pixiedust',
version='1.1.18',
description='Productivity library for Jupyter Notebook',
url='https://github.com/pixiedust/pixiedust',
install_requires=['mpld3', 'lxml', 'geojson', 'astunparse', 'markdown', 'colour', 'requests', 'matplotlib', 'pandas'],
author='David Taieb',
author_email='david_taieb@us.ibm.com',
license='Apache 2.0',
packages=find_packages(exclude=('tests', 'tests.*')),
include_package_data=True,
zip_safe=False,
entry_points={
'console_scripts': [
'jupyter-pixiedust = install.pixiedustapp:main'
]
}
)
|
ca470bc87dc8aceda0295f79864a47b208bf1a19
|
setup.py
|
setup.py
|
#!/usr/bin/env python
# -----------------------------------------------------------------------------
# FreeType high-level python API - copyright 2011 Nicolas P. Rougier
# Distributed under the terms of the new BSD license.
# -----------------------------------------------------------------------------
from distutils.core import setup
setup( name = 'freetype-py',
version = '0.4.1',
description = 'Freetype python bindings',
author = 'Nicolas P. Rougier',
author_email= 'Nicolas.Rougier@inria.fr',
url = 'https://github.com/rougier/freetype-py',
packages = ['freetype', 'freetype.ft_enums'],
classifiers = [
'Development Status :: 5 - Production/Stable',
'Environment :: X11 Applications',
'Environment :: MacOS X',
'Intended Audience :: Developers',
'License :: OSI Approved :: GNU General Public License (GPL)',
'Operating System :: MacOS',
'Operating System :: Microsoft :: Windows',
'Operating System :: POSIX',
'Operating System :: Unix',
'Programming Language :: Python',
'Topic :: Multimedia :: Graphics',
],
)
|
#!/usr/bin/env python
# -----------------------------------------------------------------------------
# FreeType high-level python API - copyright 2011 Nicolas P. Rougier
# Distributed under the terms of the new BSD license.
# -----------------------------------------------------------------------------
from distutils.core import setup
setup( name = 'freetype-py',
version = '0.4.1',
description = 'Freetype python bindings',
author = 'Nicolas P. Rougier',
author_email= 'Nicolas.Rougier@inria.fr',
url = 'https://github.com/rougier/freetype-py',
packages = ['freetype', 'freetype.ft_enums'],
classifiers = [
'Development Status :: 5 - Production/Stable',
'Environment :: X11 Applications',
'Environment :: MacOS X',
'Intended Audience :: Developers',
'License :: OSI Approved :: GNU General Public License (GPL)',
'Operating System :: MacOS',
'Operating System :: Microsoft :: Windows',
'Operating System :: POSIX',
'Operating System :: Unix',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'Topic :: Multimedia :: Graphics',
],
)
|
Set classifiers to indicate this project supoorts both Python 2 and 3
|
Set classifiers to indicate this project supoorts both Python 2 and 3
|
Python
|
bsd-3-clause
|
daltonmaag/freetype-py,bitforks/freetype-py
|
#!/usr/bin/env python
# -----------------------------------------------------------------------------
# FreeType high-level python API - copyright 2011 Nicolas P. Rougier
# Distributed under the terms of the new BSD license.
# -----------------------------------------------------------------------------
from distutils.core import setup
setup( name = 'freetype-py',
version = '0.4.1',
description = 'Freetype python bindings',
author = 'Nicolas P. Rougier',
author_email= 'Nicolas.Rougier@inria.fr',
url = 'https://github.com/rougier/freetype-py',
packages = ['freetype', 'freetype.ft_enums'],
classifiers = [
'Development Status :: 5 - Production/Stable',
'Environment :: X11 Applications',
'Environment :: MacOS X',
'Intended Audience :: Developers',
'License :: OSI Approved :: GNU General Public License (GPL)',
'Operating System :: MacOS',
'Operating System :: Microsoft :: Windows',
'Operating System :: POSIX',
'Operating System :: Unix',
'Programming Language :: Python',
'Topic :: Multimedia :: Graphics',
],
)
Set classifiers to indicate this project supoorts both Python 2 and 3
|
#!/usr/bin/env python
# -----------------------------------------------------------------------------
# FreeType high-level python API - copyright 2011 Nicolas P. Rougier
# Distributed under the terms of the new BSD license.
# -----------------------------------------------------------------------------
from distutils.core import setup
setup( name = 'freetype-py',
version = '0.4.1',
description = 'Freetype python bindings',
author = 'Nicolas P. Rougier',
author_email= 'Nicolas.Rougier@inria.fr',
url = 'https://github.com/rougier/freetype-py',
packages = ['freetype', 'freetype.ft_enums'],
classifiers = [
'Development Status :: 5 - Production/Stable',
'Environment :: X11 Applications',
'Environment :: MacOS X',
'Intended Audience :: Developers',
'License :: OSI Approved :: GNU General Public License (GPL)',
'Operating System :: MacOS',
'Operating System :: Microsoft :: Windows',
'Operating System :: POSIX',
'Operating System :: Unix',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'Topic :: Multimedia :: Graphics',
],
)
|
<commit_before>#!/usr/bin/env python
# -----------------------------------------------------------------------------
# FreeType high-level python API - copyright 2011 Nicolas P. Rougier
# Distributed under the terms of the new BSD license.
# -----------------------------------------------------------------------------
from distutils.core import setup
setup( name = 'freetype-py',
version = '0.4.1',
description = 'Freetype python bindings',
author = 'Nicolas P. Rougier',
author_email= 'Nicolas.Rougier@inria.fr',
url = 'https://github.com/rougier/freetype-py',
packages = ['freetype', 'freetype.ft_enums'],
classifiers = [
'Development Status :: 5 - Production/Stable',
'Environment :: X11 Applications',
'Environment :: MacOS X',
'Intended Audience :: Developers',
'License :: OSI Approved :: GNU General Public License (GPL)',
'Operating System :: MacOS',
'Operating System :: Microsoft :: Windows',
'Operating System :: POSIX',
'Operating System :: Unix',
'Programming Language :: Python',
'Topic :: Multimedia :: Graphics',
],
)
<commit_msg>Set classifiers to indicate this project supoorts both Python 2 and 3<commit_after>
|
#!/usr/bin/env python
# -----------------------------------------------------------------------------
# FreeType high-level python API - copyright 2011 Nicolas P. Rougier
# Distributed under the terms of the new BSD license.
# -----------------------------------------------------------------------------
from distutils.core import setup
setup( name = 'freetype-py',
version = '0.4.1',
description = 'Freetype python bindings',
author = 'Nicolas P. Rougier',
author_email= 'Nicolas.Rougier@inria.fr',
url = 'https://github.com/rougier/freetype-py',
packages = ['freetype', 'freetype.ft_enums'],
classifiers = [
'Development Status :: 5 - Production/Stable',
'Environment :: X11 Applications',
'Environment :: MacOS X',
'Intended Audience :: Developers',
'License :: OSI Approved :: GNU General Public License (GPL)',
'Operating System :: MacOS',
'Operating System :: Microsoft :: Windows',
'Operating System :: POSIX',
'Operating System :: Unix',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'Topic :: Multimedia :: Graphics',
],
)
|
#!/usr/bin/env python
# -----------------------------------------------------------------------------
# FreeType high-level python API - copyright 2011 Nicolas P. Rougier
# Distributed under the terms of the new BSD license.
# -----------------------------------------------------------------------------
from distutils.core import setup
setup( name = 'freetype-py',
version = '0.4.1',
description = 'Freetype python bindings',
author = 'Nicolas P. Rougier',
author_email= 'Nicolas.Rougier@inria.fr',
url = 'https://github.com/rougier/freetype-py',
packages = ['freetype', 'freetype.ft_enums'],
classifiers = [
'Development Status :: 5 - Production/Stable',
'Environment :: X11 Applications',
'Environment :: MacOS X',
'Intended Audience :: Developers',
'License :: OSI Approved :: GNU General Public License (GPL)',
'Operating System :: MacOS',
'Operating System :: Microsoft :: Windows',
'Operating System :: POSIX',
'Operating System :: Unix',
'Programming Language :: Python',
'Topic :: Multimedia :: Graphics',
],
)
Set classifiers to indicate this project supoorts both Python 2 and 3#!/usr/bin/env python
# -----------------------------------------------------------------------------
# FreeType high-level python API - copyright 2011 Nicolas P. Rougier
# Distributed under the terms of the new BSD license.
# -----------------------------------------------------------------------------
from distutils.core import setup
setup( name = 'freetype-py',
version = '0.4.1',
description = 'Freetype python bindings',
author = 'Nicolas P. Rougier',
author_email= 'Nicolas.Rougier@inria.fr',
url = 'https://github.com/rougier/freetype-py',
packages = ['freetype', 'freetype.ft_enums'],
classifiers = [
'Development Status :: 5 - Production/Stable',
'Environment :: X11 Applications',
'Environment :: MacOS X',
'Intended Audience :: Developers',
'License :: OSI Approved :: GNU General Public License (GPL)',
'Operating System :: MacOS',
'Operating System :: Microsoft :: Windows',
'Operating System :: POSIX',
'Operating System :: Unix',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'Topic :: Multimedia :: Graphics',
],
)
|
<commit_before>#!/usr/bin/env python
# -----------------------------------------------------------------------------
# FreeType high-level python API - copyright 2011 Nicolas P. Rougier
# Distributed under the terms of the new BSD license.
# -----------------------------------------------------------------------------
from distutils.core import setup
setup( name = 'freetype-py',
version = '0.4.1',
description = 'Freetype python bindings',
author = 'Nicolas P. Rougier',
author_email= 'Nicolas.Rougier@inria.fr',
url = 'https://github.com/rougier/freetype-py',
packages = ['freetype', 'freetype.ft_enums'],
classifiers = [
'Development Status :: 5 - Production/Stable',
'Environment :: X11 Applications',
'Environment :: MacOS X',
'Intended Audience :: Developers',
'License :: OSI Approved :: GNU General Public License (GPL)',
'Operating System :: MacOS',
'Operating System :: Microsoft :: Windows',
'Operating System :: POSIX',
'Operating System :: Unix',
'Programming Language :: Python',
'Topic :: Multimedia :: Graphics',
],
)
<commit_msg>Set classifiers to indicate this project supoorts both Python 2 and 3<commit_after>#!/usr/bin/env python
# -----------------------------------------------------------------------------
# FreeType high-level python API - copyright 2011 Nicolas P. Rougier
# Distributed under the terms of the new BSD license.
# -----------------------------------------------------------------------------
from distutils.core import setup
setup( name = 'freetype-py',
version = '0.4.1',
description = 'Freetype python bindings',
author = 'Nicolas P. Rougier',
author_email= 'Nicolas.Rougier@inria.fr',
url = 'https://github.com/rougier/freetype-py',
packages = ['freetype', 'freetype.ft_enums'],
classifiers = [
'Development Status :: 5 - Production/Stable',
'Environment :: X11 Applications',
'Environment :: MacOS X',
'Intended Audience :: Developers',
'License :: OSI Approved :: GNU General Public License (GPL)',
'Operating System :: MacOS',
'Operating System :: Microsoft :: Windows',
'Operating System :: POSIX',
'Operating System :: Unix',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'Topic :: Multimedia :: Graphics',
],
)
|
f42d42135ab234bd49337b826dbddce08cbff804
|
setup.py
|
setup.py
|
"""Setuptools configuration for rpmvenv."""
from setuptools import setup
from setuptools import find_packages
with open('README.rst', 'r') as readmefile:
README = readmefile.read()
setup(
name='rpmvenv',
version='0.19.0',
url='https://github.com/kevinconway/rpmvenv',
description='RPM packager for Python virtualenv.',
author="Kevin Conway",
author_email="kevinjacobconway@gmail.com",
long_description=README,
license='MIT',
packages=find_packages(exclude=['tests', 'build', 'dist', 'docs']),
install_requires=[
'jinja2',
'venvctrl',
'argparse',
'confpy',
'ordereddict',
'semver',
],
entry_points={
'console_scripts': [
'rpmvenv = rpmvenv.cli:main',
],
'rpmvenv.extensions': [
'core = rpmvenv.extensions.core:Extension',
'file_permissions = rpmvenv.extensions.files.permissions:Extension',
'file_extras = rpmvenv.extensions.files.extras:Extension',
'python_venv = rpmvenv.extensions.python.venv:Extension',
'blocks = rpmvenv.extensions.blocks.generic:Extension',
]
},
package_data={
"rpmvenv": ["templates/*"],
},
)
|
"""Setuptools configuration for rpmvenv."""
from setuptools import setup
from setuptools import find_packages
with open('README.rst', 'r') as readmefile:
README = readmefile.read()
setup(
name='rpmvenv',
version='0.20.0',
url='https://github.com/kevinconway/rpmvenv',
description='RPM packager for Python virtualenv.',
author="Kevin Conway",
author_email="kevinjacobconway@gmail.com",
long_description=README,
license='MIT',
packages=find_packages(exclude=['tests', 'build', 'dist', 'docs']),
install_requires=[
'jinja2',
'venvctrl',
'argparse',
'confpy',
'ordereddict',
'semver',
],
entry_points={
'console_scripts': [
'rpmvenv = rpmvenv.cli:main',
],
'rpmvenv.extensions': [
'core = rpmvenv.extensions.core:Extension',
'file_permissions = rpmvenv.extensions.files.permissions:Extension',
'file_extras = rpmvenv.extensions.files.extras:Extension',
'python_venv = rpmvenv.extensions.python.venv:Extension',
'blocks = rpmvenv.extensions.blocks.generic:Extension',
]
},
package_data={
"rpmvenv": ["templates/*"],
},
)
|
Bump version for relative path fix
|
Bump version for relative path fix
|
Python
|
mit
|
kevinconway/rpmvenv
|
"""Setuptools configuration for rpmvenv."""
from setuptools import setup
from setuptools import find_packages
with open('README.rst', 'r') as readmefile:
README = readmefile.read()
setup(
name='rpmvenv',
version='0.19.0',
url='https://github.com/kevinconway/rpmvenv',
description='RPM packager for Python virtualenv.',
author="Kevin Conway",
author_email="kevinjacobconway@gmail.com",
long_description=README,
license='MIT',
packages=find_packages(exclude=['tests', 'build', 'dist', 'docs']),
install_requires=[
'jinja2',
'venvctrl',
'argparse',
'confpy',
'ordereddict',
'semver',
],
entry_points={
'console_scripts': [
'rpmvenv = rpmvenv.cli:main',
],
'rpmvenv.extensions': [
'core = rpmvenv.extensions.core:Extension',
'file_permissions = rpmvenv.extensions.files.permissions:Extension',
'file_extras = rpmvenv.extensions.files.extras:Extension',
'python_venv = rpmvenv.extensions.python.venv:Extension',
'blocks = rpmvenv.extensions.blocks.generic:Extension',
]
},
package_data={
"rpmvenv": ["templates/*"],
},
)
Bump version for relative path fix
|
"""Setuptools configuration for rpmvenv."""
from setuptools import setup
from setuptools import find_packages
with open('README.rst', 'r') as readmefile:
README = readmefile.read()
setup(
name='rpmvenv',
version='0.20.0',
url='https://github.com/kevinconway/rpmvenv',
description='RPM packager for Python virtualenv.',
author="Kevin Conway",
author_email="kevinjacobconway@gmail.com",
long_description=README,
license='MIT',
packages=find_packages(exclude=['tests', 'build', 'dist', 'docs']),
install_requires=[
'jinja2',
'venvctrl',
'argparse',
'confpy',
'ordereddict',
'semver',
],
entry_points={
'console_scripts': [
'rpmvenv = rpmvenv.cli:main',
],
'rpmvenv.extensions': [
'core = rpmvenv.extensions.core:Extension',
'file_permissions = rpmvenv.extensions.files.permissions:Extension',
'file_extras = rpmvenv.extensions.files.extras:Extension',
'python_venv = rpmvenv.extensions.python.venv:Extension',
'blocks = rpmvenv.extensions.blocks.generic:Extension',
]
},
package_data={
"rpmvenv": ["templates/*"],
},
)
|
<commit_before>"""Setuptools configuration for rpmvenv."""
from setuptools import setup
from setuptools import find_packages
with open('README.rst', 'r') as readmefile:
README = readmefile.read()
setup(
name='rpmvenv',
version='0.19.0',
url='https://github.com/kevinconway/rpmvenv',
description='RPM packager for Python virtualenv.',
author="Kevin Conway",
author_email="kevinjacobconway@gmail.com",
long_description=README,
license='MIT',
packages=find_packages(exclude=['tests', 'build', 'dist', 'docs']),
install_requires=[
'jinja2',
'venvctrl',
'argparse',
'confpy',
'ordereddict',
'semver',
],
entry_points={
'console_scripts': [
'rpmvenv = rpmvenv.cli:main',
],
'rpmvenv.extensions': [
'core = rpmvenv.extensions.core:Extension',
'file_permissions = rpmvenv.extensions.files.permissions:Extension',
'file_extras = rpmvenv.extensions.files.extras:Extension',
'python_venv = rpmvenv.extensions.python.venv:Extension',
'blocks = rpmvenv.extensions.blocks.generic:Extension',
]
},
package_data={
"rpmvenv": ["templates/*"],
},
)
<commit_msg>Bump version for relative path fix<commit_after>
|
"""Setuptools configuration for rpmvenv."""
from setuptools import setup
from setuptools import find_packages
with open('README.rst', 'r') as readmefile:
README = readmefile.read()
setup(
name='rpmvenv',
version='0.20.0',
url='https://github.com/kevinconway/rpmvenv',
description='RPM packager for Python virtualenv.',
author="Kevin Conway",
author_email="kevinjacobconway@gmail.com",
long_description=README,
license='MIT',
packages=find_packages(exclude=['tests', 'build', 'dist', 'docs']),
install_requires=[
'jinja2',
'venvctrl',
'argparse',
'confpy',
'ordereddict',
'semver',
],
entry_points={
'console_scripts': [
'rpmvenv = rpmvenv.cli:main',
],
'rpmvenv.extensions': [
'core = rpmvenv.extensions.core:Extension',
'file_permissions = rpmvenv.extensions.files.permissions:Extension',
'file_extras = rpmvenv.extensions.files.extras:Extension',
'python_venv = rpmvenv.extensions.python.venv:Extension',
'blocks = rpmvenv.extensions.blocks.generic:Extension',
]
},
package_data={
"rpmvenv": ["templates/*"],
},
)
|
"""Setuptools configuration for rpmvenv."""
from setuptools import setup
from setuptools import find_packages
with open('README.rst', 'r') as readmefile:
README = readmefile.read()
setup(
name='rpmvenv',
version='0.19.0',
url='https://github.com/kevinconway/rpmvenv',
description='RPM packager for Python virtualenv.',
author="Kevin Conway",
author_email="kevinjacobconway@gmail.com",
long_description=README,
license='MIT',
packages=find_packages(exclude=['tests', 'build', 'dist', 'docs']),
install_requires=[
'jinja2',
'venvctrl',
'argparse',
'confpy',
'ordereddict',
'semver',
],
entry_points={
'console_scripts': [
'rpmvenv = rpmvenv.cli:main',
],
'rpmvenv.extensions': [
'core = rpmvenv.extensions.core:Extension',
'file_permissions = rpmvenv.extensions.files.permissions:Extension',
'file_extras = rpmvenv.extensions.files.extras:Extension',
'python_venv = rpmvenv.extensions.python.venv:Extension',
'blocks = rpmvenv.extensions.blocks.generic:Extension',
]
},
package_data={
"rpmvenv": ["templates/*"],
},
)
Bump version for relative path fix"""Setuptools configuration for rpmvenv."""
from setuptools import setup
from setuptools import find_packages
with open('README.rst', 'r') as readmefile:
README = readmefile.read()
setup(
name='rpmvenv',
version='0.20.0',
url='https://github.com/kevinconway/rpmvenv',
description='RPM packager for Python virtualenv.',
author="Kevin Conway",
author_email="kevinjacobconway@gmail.com",
long_description=README,
license='MIT',
packages=find_packages(exclude=['tests', 'build', 'dist', 'docs']),
install_requires=[
'jinja2',
'venvctrl',
'argparse',
'confpy',
'ordereddict',
'semver',
],
entry_points={
'console_scripts': [
'rpmvenv = rpmvenv.cli:main',
],
'rpmvenv.extensions': [
'core = rpmvenv.extensions.core:Extension',
'file_permissions = rpmvenv.extensions.files.permissions:Extension',
'file_extras = rpmvenv.extensions.files.extras:Extension',
'python_venv = rpmvenv.extensions.python.venv:Extension',
'blocks = rpmvenv.extensions.blocks.generic:Extension',
]
},
package_data={
"rpmvenv": ["templates/*"],
},
)
|
<commit_before>"""Setuptools configuration for rpmvenv."""
from setuptools import setup
from setuptools import find_packages
with open('README.rst', 'r') as readmefile:
README = readmefile.read()
setup(
name='rpmvenv',
version='0.19.0',
url='https://github.com/kevinconway/rpmvenv',
description='RPM packager for Python virtualenv.',
author="Kevin Conway",
author_email="kevinjacobconway@gmail.com",
long_description=README,
license='MIT',
packages=find_packages(exclude=['tests', 'build', 'dist', 'docs']),
install_requires=[
'jinja2',
'venvctrl',
'argparse',
'confpy',
'ordereddict',
'semver',
],
entry_points={
'console_scripts': [
'rpmvenv = rpmvenv.cli:main',
],
'rpmvenv.extensions': [
'core = rpmvenv.extensions.core:Extension',
'file_permissions = rpmvenv.extensions.files.permissions:Extension',
'file_extras = rpmvenv.extensions.files.extras:Extension',
'python_venv = rpmvenv.extensions.python.venv:Extension',
'blocks = rpmvenv.extensions.blocks.generic:Extension',
]
},
package_data={
"rpmvenv": ["templates/*"],
},
)
<commit_msg>Bump version for relative path fix<commit_after>"""Setuptools configuration for rpmvenv."""
from setuptools import setup
from setuptools import find_packages
with open('README.rst', 'r') as readmefile:
README = readmefile.read()
setup(
name='rpmvenv',
version='0.20.0',
url='https://github.com/kevinconway/rpmvenv',
description='RPM packager for Python virtualenv.',
author="Kevin Conway",
author_email="kevinjacobconway@gmail.com",
long_description=README,
license='MIT',
packages=find_packages(exclude=['tests', 'build', 'dist', 'docs']),
install_requires=[
'jinja2',
'venvctrl',
'argparse',
'confpy',
'ordereddict',
'semver',
],
entry_points={
'console_scripts': [
'rpmvenv = rpmvenv.cli:main',
],
'rpmvenv.extensions': [
'core = rpmvenv.extensions.core:Extension',
'file_permissions = rpmvenv.extensions.files.permissions:Extension',
'file_extras = rpmvenv.extensions.files.extras:Extension',
'python_venv = rpmvenv.extensions.python.venv:Extension',
'blocks = rpmvenv.extensions.blocks.generic:Extension',
]
},
package_data={
"rpmvenv": ["templates/*"],
},
)
|
9faf2f746eb47ad0d3961ee1d934314c608c3614
|
setup.py
|
setup.py
|
import os
import relman
import setuptools
BASEPATH = os.path.abspath(os.path.dirname(__file__))
def load_requirements(filename):
"""load requirements from a pip requirements file."""
lineiter = (line.strip()
for line in open(os.path.join(BASEPATH, filename)))
return [line for line in lineiter if line and not line.startswith("#")]
setuptools.setup(
name="relman",
version=relman.__version__,
description="Release manager",
long_description='Release manager - Automation Toolkit',
classifiers=[
"Topic :: Software Development :: Build Tools",
"Topic :: Software Development :: Documentation"
"Development Status :: 4 - Beta",
"Operating System :: POSIX :: Linux",
"Programming Language :: Python",
"License :: Public Domain",
"Intended Audience :: Developers",
"Intended Audience :: System Administrators"
],
keywords="jira confluence python",
author="David Lapsley, Alex Chan",
author_email="dlapsley@cisco.com, alexc2@cisco.com",
url="http://www.cisco.com",
license="ASL",
packages=setuptools.find_packages(exclude=["tests"]),
include_package_data=True,
data_files=[
('etc', ['etc/sample.cfg']),
],
zip_safe=False,
install_requires=load_requirements("requirements.txt"),
test_suite="tests",
entry_points={
"console_scripts": [
"relman = relman.main:run"
]
}
)
|
import os
import relman
import setuptools
BASEPATH = os.path.abspath(os.path.dirname(__file__))
def load_requirements(filename):
"""load requirements from a pip requirements file."""
lineiter = (line.strip()
for line in open(os.path.join(BASEPATH, filename)))
return [line for line in lineiter if line and not line.startswith("#")]
setuptools.setup(
name="relman",
version=relman.__version__,
description="Release manager",
long_description='Release manager - Automation Toolkit',
classifiers=[
"Topic :: Software Development :: Build Tools",
"Topic :: Software Development :: Documentation",
"Development Status :: 4 - Beta",
"Operating System :: POSIX :: Linux",
"Programming Language :: Python",
"License :: Public Domain",
"Intended Audience :: Developers",
"Intended Audience :: System Administrators"
],
keywords="jira confluence python",
author="David Lapsley, Alex Chan",
author_email="dlapsley@cisco.com, alexc2@cisco.com",
url="http://www.cisco.com",
license="ASL",
packages=setuptools.find_packages(exclude=["tests"]),
include_package_data=True,
data_files=[
('etc', ['etc/sample.cfg']),
],
zip_safe=False,
install_requires=load_requirements("requirements.txt"),
test_suite="tests",
entry_points={
"console_scripts": [
"relman = relman.main:run"
]
}
)
|
Fix dev status trove classifier.
|
Fix dev status trove classifier.
|
Python
|
apache-2.0
|
delapsley/relman
|
import os
import relman
import setuptools
BASEPATH = os.path.abspath(os.path.dirname(__file__))
def load_requirements(filename):
"""load requirements from a pip requirements file."""
lineiter = (line.strip()
for line in open(os.path.join(BASEPATH, filename)))
return [line for line in lineiter if line and not line.startswith("#")]
setuptools.setup(
name="relman",
version=relman.__version__,
description="Release manager",
long_description='Release manager - Automation Toolkit',
classifiers=[
"Topic :: Software Development :: Build Tools",
"Topic :: Software Development :: Documentation"
"Development Status :: 4 - Beta",
"Operating System :: POSIX :: Linux",
"Programming Language :: Python",
"License :: Public Domain",
"Intended Audience :: Developers",
"Intended Audience :: System Administrators"
],
keywords="jira confluence python",
author="David Lapsley, Alex Chan",
author_email="dlapsley@cisco.com, alexc2@cisco.com",
url="http://www.cisco.com",
license="ASL",
packages=setuptools.find_packages(exclude=["tests"]),
include_package_data=True,
data_files=[
('etc', ['etc/sample.cfg']),
],
zip_safe=False,
install_requires=load_requirements("requirements.txt"),
test_suite="tests",
entry_points={
"console_scripts": [
"relman = relman.main:run"
]
}
)Fix dev status trove classifier.
|
import os
import relman
import setuptools
BASEPATH = os.path.abspath(os.path.dirname(__file__))
def load_requirements(filename):
"""load requirements from a pip requirements file."""
lineiter = (line.strip()
for line in open(os.path.join(BASEPATH, filename)))
return [line for line in lineiter if line and not line.startswith("#")]
setuptools.setup(
name="relman",
version=relman.__version__,
description="Release manager",
long_description='Release manager - Automation Toolkit',
classifiers=[
"Topic :: Software Development :: Build Tools",
"Topic :: Software Development :: Documentation",
"Development Status :: 4 - Beta",
"Operating System :: POSIX :: Linux",
"Programming Language :: Python",
"License :: Public Domain",
"Intended Audience :: Developers",
"Intended Audience :: System Administrators"
],
keywords="jira confluence python",
author="David Lapsley, Alex Chan",
author_email="dlapsley@cisco.com, alexc2@cisco.com",
url="http://www.cisco.com",
license="ASL",
packages=setuptools.find_packages(exclude=["tests"]),
include_package_data=True,
data_files=[
('etc', ['etc/sample.cfg']),
],
zip_safe=False,
install_requires=load_requirements("requirements.txt"),
test_suite="tests",
entry_points={
"console_scripts": [
"relman = relman.main:run"
]
}
)
|
<commit_before>import os
import relman
import setuptools
BASEPATH = os.path.abspath(os.path.dirname(__file__))
def load_requirements(filename):
"""load requirements from a pip requirements file."""
lineiter = (line.strip()
for line in open(os.path.join(BASEPATH, filename)))
return [line for line in lineiter if line and not line.startswith("#")]
setuptools.setup(
name="relman",
version=relman.__version__,
description="Release manager",
long_description='Release manager - Automation Toolkit',
classifiers=[
"Topic :: Software Development :: Build Tools",
"Topic :: Software Development :: Documentation"
"Development Status :: 4 - Beta",
"Operating System :: POSIX :: Linux",
"Programming Language :: Python",
"License :: Public Domain",
"Intended Audience :: Developers",
"Intended Audience :: System Administrators"
],
keywords="jira confluence python",
author="David Lapsley, Alex Chan",
author_email="dlapsley@cisco.com, alexc2@cisco.com",
url="http://www.cisco.com",
license="ASL",
packages=setuptools.find_packages(exclude=["tests"]),
include_package_data=True,
data_files=[
('etc', ['etc/sample.cfg']),
],
zip_safe=False,
install_requires=load_requirements("requirements.txt"),
test_suite="tests",
entry_points={
"console_scripts": [
"relman = relman.main:run"
]
}
)<commit_msg>Fix dev status trove classifier.<commit_after>
|
import os
import relman
import setuptools
BASEPATH = os.path.abspath(os.path.dirname(__file__))
def load_requirements(filename):
"""load requirements from a pip requirements file."""
lineiter = (line.strip()
for line in open(os.path.join(BASEPATH, filename)))
return [line for line in lineiter if line and not line.startswith("#")]
setuptools.setup(
name="relman",
version=relman.__version__,
description="Release manager",
long_description='Release manager - Automation Toolkit',
classifiers=[
"Topic :: Software Development :: Build Tools",
"Topic :: Software Development :: Documentation",
"Development Status :: 4 - Beta",
"Operating System :: POSIX :: Linux",
"Programming Language :: Python",
"License :: Public Domain",
"Intended Audience :: Developers",
"Intended Audience :: System Administrators"
],
keywords="jira confluence python",
author="David Lapsley, Alex Chan",
author_email="dlapsley@cisco.com, alexc2@cisco.com",
url="http://www.cisco.com",
license="ASL",
packages=setuptools.find_packages(exclude=["tests"]),
include_package_data=True,
data_files=[
('etc', ['etc/sample.cfg']),
],
zip_safe=False,
install_requires=load_requirements("requirements.txt"),
test_suite="tests",
entry_points={
"console_scripts": [
"relman = relman.main:run"
]
}
)
|
import os
import relman
import setuptools
BASEPATH = os.path.abspath(os.path.dirname(__file__))
def load_requirements(filename):
"""load requirements from a pip requirements file."""
lineiter = (line.strip()
for line in open(os.path.join(BASEPATH, filename)))
return [line for line in lineiter if line and not line.startswith("#")]
setuptools.setup(
name="relman",
version=relman.__version__,
description="Release manager",
long_description='Release manager - Automation Toolkit',
classifiers=[
"Topic :: Software Development :: Build Tools",
"Topic :: Software Development :: Documentation"
"Development Status :: 4 - Beta",
"Operating System :: POSIX :: Linux",
"Programming Language :: Python",
"License :: Public Domain",
"Intended Audience :: Developers",
"Intended Audience :: System Administrators"
],
keywords="jira confluence python",
author="David Lapsley, Alex Chan",
author_email="dlapsley@cisco.com, alexc2@cisco.com",
url="http://www.cisco.com",
license="ASL",
packages=setuptools.find_packages(exclude=["tests"]),
include_package_data=True,
data_files=[
('etc', ['etc/sample.cfg']),
],
zip_safe=False,
install_requires=load_requirements("requirements.txt"),
test_suite="tests",
entry_points={
"console_scripts": [
"relman = relman.main:run"
]
}
)Fix dev status trove classifier.import os
import relman
import setuptools
BASEPATH = os.path.abspath(os.path.dirname(__file__))
def load_requirements(filename):
"""load requirements from a pip requirements file."""
lineiter = (line.strip()
for line in open(os.path.join(BASEPATH, filename)))
return [line for line in lineiter if line and not line.startswith("#")]
setuptools.setup(
name="relman",
version=relman.__version__,
description="Release manager",
long_description='Release manager - Automation Toolkit',
classifiers=[
"Topic :: Software Development :: Build Tools",
"Topic :: Software Development :: Documentation",
"Development Status :: 4 - Beta",
"Operating System :: POSIX :: Linux",
"Programming Language :: Python",
"License :: Public Domain",
"Intended Audience :: Developers",
"Intended Audience :: System Administrators"
],
keywords="jira confluence python",
author="David Lapsley, Alex Chan",
author_email="dlapsley@cisco.com, alexc2@cisco.com",
url="http://www.cisco.com",
license="ASL",
packages=setuptools.find_packages(exclude=["tests"]),
include_package_data=True,
data_files=[
('etc', ['etc/sample.cfg']),
],
zip_safe=False,
install_requires=load_requirements("requirements.txt"),
test_suite="tests",
entry_points={
"console_scripts": [
"relman = relman.main:run"
]
}
)
|
<commit_before>import os
import relman
import setuptools
BASEPATH = os.path.abspath(os.path.dirname(__file__))
def load_requirements(filename):
"""load requirements from a pip requirements file."""
lineiter = (line.strip()
for line in open(os.path.join(BASEPATH, filename)))
return [line for line in lineiter if line and not line.startswith("#")]
setuptools.setup(
name="relman",
version=relman.__version__,
description="Release manager",
long_description='Release manager - Automation Toolkit',
classifiers=[
"Topic :: Software Development :: Build Tools",
"Topic :: Software Development :: Documentation"
"Development Status :: 4 - Beta",
"Operating System :: POSIX :: Linux",
"Programming Language :: Python",
"License :: Public Domain",
"Intended Audience :: Developers",
"Intended Audience :: System Administrators"
],
keywords="jira confluence python",
author="David Lapsley, Alex Chan",
author_email="dlapsley@cisco.com, alexc2@cisco.com",
url="http://www.cisco.com",
license="ASL",
packages=setuptools.find_packages(exclude=["tests"]),
include_package_data=True,
data_files=[
('etc', ['etc/sample.cfg']),
],
zip_safe=False,
install_requires=load_requirements("requirements.txt"),
test_suite="tests",
entry_points={
"console_scripts": [
"relman = relman.main:run"
]
}
)<commit_msg>Fix dev status trove classifier.<commit_after>import os
import relman
import setuptools
BASEPATH = os.path.abspath(os.path.dirname(__file__))
def load_requirements(filename):
"""load requirements from a pip requirements file."""
lineiter = (line.strip()
for line in open(os.path.join(BASEPATH, filename)))
return [line for line in lineiter if line and not line.startswith("#")]
setuptools.setup(
name="relman",
version=relman.__version__,
description="Release manager",
long_description='Release manager - Automation Toolkit',
classifiers=[
"Topic :: Software Development :: Build Tools",
"Topic :: Software Development :: Documentation",
"Development Status :: 4 - Beta",
"Operating System :: POSIX :: Linux",
"Programming Language :: Python",
"License :: Public Domain",
"Intended Audience :: Developers",
"Intended Audience :: System Administrators"
],
keywords="jira confluence python",
author="David Lapsley, Alex Chan",
author_email="dlapsley@cisco.com, alexc2@cisco.com",
url="http://www.cisco.com",
license="ASL",
packages=setuptools.find_packages(exclude=["tests"]),
include_package_data=True,
data_files=[
('etc', ['etc/sample.cfg']),
],
zip_safe=False,
install_requires=load_requirements("requirements.txt"),
test_suite="tests",
entry_points={
"console_scripts": [
"relman = relman.main:run"
]
}
)
|
a0dd5f40852af01d83451fd213f57a61a4fe0cc5
|
openstack_dashboard/dashboards/project/images/snapshots/urls.py
|
openstack_dashboard/dashboards/project/images/snapshots/urls.py
|
# Copyright 2012 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Copyright 2012 Nebula, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.conf.urls import patterns
from django.conf.urls import url
from openstack_dashboard.dashboards.project.images.snapshots import views
urlpatterns = patterns(
'',
url(r'^(?P<instance_id>[^/]+)/create',
views.CreateView.as_view(),
name='create')
)
|
# Copyright 2012 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Copyright 2012 Nebula, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.conf.urls import patterns
from django.conf.urls import url
from openstack_dashboard.dashboards.project.images.snapshots import views
urlpatterns = patterns(
'',
url(r'^(?P<instance_id>[^/]+)/create/$',
views.CreateView.as_view(),
name='create')
)
|
Fix too loose url regex for snapshot creation
|
Fix too loose url regex for snapshot creation
Current regex '^(?P<instance_id>[^/]+)/create' for url
projects/images/<instance-id>/create will match all urls start with 'create' by
mistake. The '$' added will make sure the regex only match 'create' as expect.
Change-Id: I9f180d8d904d15e9458513f39b1e4719ac6800a3
Closes-Bug: #1477822
|
Python
|
apache-2.0
|
gerrive/horizon,wolverineav/horizon,noironetworks/horizon,openstack/horizon,wolverineav/horizon,redhat-openstack/horizon,davidcusatis/horizon,noironetworks/horizon,Mirantis/mos-horizon,django-leonardo/horizon,redhat-cip/horizon,bigswitch/horizon,dan1/horizon-x509,redhat-cip/horizon,davidcusatis/horizon,django-leonardo/horizon,Tesora/tesora-horizon,django-leonardo/horizon,BiznetGIO/horizon,NeCTAR-RC/horizon,yeming233/horizon,openstack/horizon,bac/horizon,sandvine/horizon,dan1/horizon-x509,ChameleonCloud/horizon,redhat-openstack/horizon,coreycb/horizon,BiznetGIO/horizon,Mirantis/mos-horizon,wolverineav/horizon,redhat-openstack/horizon,noironetworks/horizon,dan1/horizon-x509,BiznetGIO/horizon,gerrive/horizon,Mirantis/mos-horizon,Tesora/tesora-horizon,gerrive/horizon,ChameleonCloud/horizon,bac/horizon,coreycb/horizon,dan1/horizon-x509,BiznetGIO/horizon,maestro-hybrid-cloud/horizon,redhat-cip/horizon,bigswitch/horizon,redhat-cip/horizon,sandvine/horizon,openstack/horizon,maestro-hybrid-cloud/horizon,django-leonardo/horizon,yeming233/horizon,ChameleonCloud/horizon,coreycb/horizon,Mirantis/mos-horizon,NeCTAR-RC/horizon,wolverineav/horizon,sandvine/horizon,noironetworks/horizon,redhat-openstack/horizon,yeming233/horizon,davidcusatis/horizon,openstack/horizon,bigswitch/horizon,Tesora/tesora-horizon,NeCTAR-RC/horizon,ChameleonCloud/horizon,bac/horizon,gerrive/horizon,NeCTAR-RC/horizon,maestro-hybrid-cloud/horizon,coreycb/horizon,Tesora/tesora-horizon,bac/horizon,davidcusatis/horizon,bigswitch/horizon,yeming233/horizon,sandvine/horizon,maestro-hybrid-cloud/horizon
|
# Copyright 2012 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Copyright 2012 Nebula, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.conf.urls import patterns
from django.conf.urls import url
from openstack_dashboard.dashboards.project.images.snapshots import views
urlpatterns = patterns(
'',
url(r'^(?P<instance_id>[^/]+)/create',
views.CreateView.as_view(),
name='create')
)
Fix too loose url regex for snapshot creation
Current regex '^(?P<instance_id>[^/]+)/create' for url
projects/images/<instance-id>/create will match all urls start with 'create' by
mistake. The '$' added will make sure the regex only match 'create' as expect.
Change-Id: I9f180d8d904d15e9458513f39b1e4719ac6800a3
Closes-Bug: #1477822
|
# Copyright 2012 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Copyright 2012 Nebula, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.conf.urls import patterns
from django.conf.urls import url
from openstack_dashboard.dashboards.project.images.snapshots import views
urlpatterns = patterns(
'',
url(r'^(?P<instance_id>[^/]+)/create/$',
views.CreateView.as_view(),
name='create')
)
|
<commit_before># Copyright 2012 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Copyright 2012 Nebula, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.conf.urls import patterns
from django.conf.urls import url
from openstack_dashboard.dashboards.project.images.snapshots import views
urlpatterns = patterns(
'',
url(r'^(?P<instance_id>[^/]+)/create',
views.CreateView.as_view(),
name='create')
)
<commit_msg>Fix too loose url regex for snapshot creation
Current regex '^(?P<instance_id>[^/]+)/create' for url
projects/images/<instance-id>/create will match all urls start with 'create' by
mistake. The '$' added will make sure the regex only match 'create' as expect.
Change-Id: I9f180d8d904d15e9458513f39b1e4719ac6800a3
Closes-Bug: #1477822<commit_after>
|
# Copyright 2012 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Copyright 2012 Nebula, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.conf.urls import patterns
from django.conf.urls import url
from openstack_dashboard.dashboards.project.images.snapshots import views
urlpatterns = patterns(
'',
url(r'^(?P<instance_id>[^/]+)/create/$',
views.CreateView.as_view(),
name='create')
)
|
# Copyright 2012 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Copyright 2012 Nebula, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.conf.urls import patterns
from django.conf.urls import url
from openstack_dashboard.dashboards.project.images.snapshots import views
urlpatterns = patterns(
'',
url(r'^(?P<instance_id>[^/]+)/create',
views.CreateView.as_view(),
name='create')
)
Fix too loose url regex for snapshot creation
Current regex '^(?P<instance_id>[^/]+)/create' for url
projects/images/<instance-id>/create will match all urls start with 'create' by
mistake. The '$' added will make sure the regex only match 'create' as expect.
Change-Id: I9f180d8d904d15e9458513f39b1e4719ac6800a3
Closes-Bug: #1477822# Copyright 2012 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Copyright 2012 Nebula, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.conf.urls import patterns
from django.conf.urls import url
from openstack_dashboard.dashboards.project.images.snapshots import views
urlpatterns = patterns(
'',
url(r'^(?P<instance_id>[^/]+)/create/$',
views.CreateView.as_view(),
name='create')
)
|
<commit_before># Copyright 2012 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Copyright 2012 Nebula, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.conf.urls import patterns
from django.conf.urls import url
from openstack_dashboard.dashboards.project.images.snapshots import views
urlpatterns = patterns(
'',
url(r'^(?P<instance_id>[^/]+)/create',
views.CreateView.as_view(),
name='create')
)
<commit_msg>Fix too loose url regex for snapshot creation
Current regex '^(?P<instance_id>[^/]+)/create' for url
projects/images/<instance-id>/create will match all urls start with 'create' by
mistake. The '$' added will make sure the regex only match 'create' as expect.
Change-Id: I9f180d8d904d15e9458513f39b1e4719ac6800a3
Closes-Bug: #1477822<commit_after># Copyright 2012 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Copyright 2012 Nebula, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.conf.urls import patterns
from django.conf.urls import url
from openstack_dashboard.dashboards.project.images.snapshots import views
urlpatterns = patterns(
'',
url(r'^(?P<instance_id>[^/]+)/create/$',
views.CreateView.as_view(),
name='create')
)
|
dd3ddd6bb7fcfdf9aa230ba53dde85f4cab4cc6d
|
setup.py
|
setup.py
|
from setuptools import setup
setup(
name="demosys-py",
version="0.3.10",
description="Modern OpenGL 4.1+ Framework inspired by Django",
long_description=open('README.rst').read(),
url="https://github.com/Contraz/demosys-py",
author="Einar Forselv",
author_email="eforselv@gmail.com",
maintainer="Einar Forselv",
maintainer_email="eforselv@gmail.com",
packages=['demosys'],
include_package_data=True,
keywords = ['opengl', 'framework', 'demoscene'],
classifiers=[
'Programming Language :: Python',
'Environment :: MacOS X',
'Environment :: X11 Applications',
'Intended Audience :: Developers',
'Topic :: Multimedia :: Graphics',
'License :: OSI Approved :: ISC License (ISCL)',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.6',
'Topic :: Software Development :: Libraries :: Application Frameworks',
],
install_requires=[
'PyOpenGL==3.1.0',
'glfw==1.4.0',
'pyrr==0.8.2',
'Pillow==4.0.0',
'pyrocket==0.2.5',
# 'pygame==1.9.3',
],
entry_points={'console_scripts': [
'demosys-admin = demosys.core.management:execute_from_command_line',
]},
)
|
from setuptools import setup
setup(
name="demosys-py",
version="0.3.10",
description="Modern OpenGL 3.3+ Framework inspired by Django",
long_description=open('README.rst').read(),
url="https://github.com/Contraz/demosys-py",
author="Einar Forselv",
author_email="eforselv@gmail.com",
maintainer="Einar Forselv",
maintainer_email="eforselv@gmail.com",
packages=['demosys'],
include_package_data=True,
keywords = ['opengl', 'framework', 'demoscene'],
classifiers=[
'Programming Language :: Python',
'Environment :: MacOS X',
'Environment :: X11 Applications',
'Intended Audience :: Developers',
'Topic :: Multimedia :: Graphics',
'License :: OSI Approved :: ISC License (ISCL)',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.6',
'Topic :: Software Development :: Libraries :: Application Frameworks',
],
install_requires=[
'PyOpenGL==3.1.0',
'glfw==1.4.0',
'pyrr==0.8.2',
'Pillow==4.0.0',
'pyrocket==0.2.5',
# 'pygame==1.9.3',
],
entry_points={'console_scripts': [
'demosys-admin = demosys.core.management:execute_from_command_line',
]},
)
|
Update package description to specify OpenGL 3.3+
|
Update package description to specify OpenGL 3.3+
|
Python
|
isc
|
Contraz/demosys-py
|
from setuptools import setup
setup(
name="demosys-py",
version="0.3.10",
description="Modern OpenGL 4.1+ Framework inspired by Django",
long_description=open('README.rst').read(),
url="https://github.com/Contraz/demosys-py",
author="Einar Forselv",
author_email="eforselv@gmail.com",
maintainer="Einar Forselv",
maintainer_email="eforselv@gmail.com",
packages=['demosys'],
include_package_data=True,
keywords = ['opengl', 'framework', 'demoscene'],
classifiers=[
'Programming Language :: Python',
'Environment :: MacOS X',
'Environment :: X11 Applications',
'Intended Audience :: Developers',
'Topic :: Multimedia :: Graphics',
'License :: OSI Approved :: ISC License (ISCL)',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.6',
'Topic :: Software Development :: Libraries :: Application Frameworks',
],
install_requires=[
'PyOpenGL==3.1.0',
'glfw==1.4.0',
'pyrr==0.8.2',
'Pillow==4.0.0',
'pyrocket==0.2.5',
# 'pygame==1.9.3',
],
entry_points={'console_scripts': [
'demosys-admin = demosys.core.management:execute_from_command_line',
]},
)
Update package description to specify OpenGL 3.3+
|
from setuptools import setup
setup(
name="demosys-py",
version="0.3.10",
description="Modern OpenGL 3.3+ Framework inspired by Django",
long_description=open('README.rst').read(),
url="https://github.com/Contraz/demosys-py",
author="Einar Forselv",
author_email="eforselv@gmail.com",
maintainer="Einar Forselv",
maintainer_email="eforselv@gmail.com",
packages=['demosys'],
include_package_data=True,
keywords = ['opengl', 'framework', 'demoscene'],
classifiers=[
'Programming Language :: Python',
'Environment :: MacOS X',
'Environment :: X11 Applications',
'Intended Audience :: Developers',
'Topic :: Multimedia :: Graphics',
'License :: OSI Approved :: ISC License (ISCL)',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.6',
'Topic :: Software Development :: Libraries :: Application Frameworks',
],
install_requires=[
'PyOpenGL==3.1.0',
'glfw==1.4.0',
'pyrr==0.8.2',
'Pillow==4.0.0',
'pyrocket==0.2.5',
# 'pygame==1.9.3',
],
entry_points={'console_scripts': [
'demosys-admin = demosys.core.management:execute_from_command_line',
]},
)
|
<commit_before>from setuptools import setup
setup(
name="demosys-py",
version="0.3.10",
description="Modern OpenGL 4.1+ Framework inspired by Django",
long_description=open('README.rst').read(),
url="https://github.com/Contraz/demosys-py",
author="Einar Forselv",
author_email="eforselv@gmail.com",
maintainer="Einar Forselv",
maintainer_email="eforselv@gmail.com",
packages=['demosys'],
include_package_data=True,
keywords = ['opengl', 'framework', 'demoscene'],
classifiers=[
'Programming Language :: Python',
'Environment :: MacOS X',
'Environment :: X11 Applications',
'Intended Audience :: Developers',
'Topic :: Multimedia :: Graphics',
'License :: OSI Approved :: ISC License (ISCL)',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.6',
'Topic :: Software Development :: Libraries :: Application Frameworks',
],
install_requires=[
'PyOpenGL==3.1.0',
'glfw==1.4.0',
'pyrr==0.8.2',
'Pillow==4.0.0',
'pyrocket==0.2.5',
# 'pygame==1.9.3',
],
entry_points={'console_scripts': [
'demosys-admin = demosys.core.management:execute_from_command_line',
]},
)
<commit_msg>Update package description to specify OpenGL 3.3+<commit_after>
|
from setuptools import setup
setup(
name="demosys-py",
version="0.3.10",
description="Modern OpenGL 3.3+ Framework inspired by Django",
long_description=open('README.rst').read(),
url="https://github.com/Contraz/demosys-py",
author="Einar Forselv",
author_email="eforselv@gmail.com",
maintainer="Einar Forselv",
maintainer_email="eforselv@gmail.com",
packages=['demosys'],
include_package_data=True,
keywords = ['opengl', 'framework', 'demoscene'],
classifiers=[
'Programming Language :: Python',
'Environment :: MacOS X',
'Environment :: X11 Applications',
'Intended Audience :: Developers',
'Topic :: Multimedia :: Graphics',
'License :: OSI Approved :: ISC License (ISCL)',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.6',
'Topic :: Software Development :: Libraries :: Application Frameworks',
],
install_requires=[
'PyOpenGL==3.1.0',
'glfw==1.4.0',
'pyrr==0.8.2',
'Pillow==4.0.0',
'pyrocket==0.2.5',
# 'pygame==1.9.3',
],
entry_points={'console_scripts': [
'demosys-admin = demosys.core.management:execute_from_command_line',
]},
)
|
from setuptools import setup
setup(
name="demosys-py",
version="0.3.10",
description="Modern OpenGL 4.1+ Framework inspired by Django",
long_description=open('README.rst').read(),
url="https://github.com/Contraz/demosys-py",
author="Einar Forselv",
author_email="eforselv@gmail.com",
maintainer="Einar Forselv",
maintainer_email="eforselv@gmail.com",
packages=['demosys'],
include_package_data=True,
keywords = ['opengl', 'framework', 'demoscene'],
classifiers=[
'Programming Language :: Python',
'Environment :: MacOS X',
'Environment :: X11 Applications',
'Intended Audience :: Developers',
'Topic :: Multimedia :: Graphics',
'License :: OSI Approved :: ISC License (ISCL)',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.6',
'Topic :: Software Development :: Libraries :: Application Frameworks',
],
install_requires=[
'PyOpenGL==3.1.0',
'glfw==1.4.0',
'pyrr==0.8.2',
'Pillow==4.0.0',
'pyrocket==0.2.5',
# 'pygame==1.9.3',
],
entry_points={'console_scripts': [
'demosys-admin = demosys.core.management:execute_from_command_line',
]},
)
Update package description to specify OpenGL 3.3+from setuptools import setup
setup(
name="demosys-py",
version="0.3.10",
description="Modern OpenGL 3.3+ Framework inspired by Django",
long_description=open('README.rst').read(),
url="https://github.com/Contraz/demosys-py",
author="Einar Forselv",
author_email="eforselv@gmail.com",
maintainer="Einar Forselv",
maintainer_email="eforselv@gmail.com",
packages=['demosys'],
include_package_data=True,
keywords = ['opengl', 'framework', 'demoscene'],
classifiers=[
'Programming Language :: Python',
'Environment :: MacOS X',
'Environment :: X11 Applications',
'Intended Audience :: Developers',
'Topic :: Multimedia :: Graphics',
'License :: OSI Approved :: ISC License (ISCL)',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.6',
'Topic :: Software Development :: Libraries :: Application Frameworks',
],
install_requires=[
'PyOpenGL==3.1.0',
'glfw==1.4.0',
'pyrr==0.8.2',
'Pillow==4.0.0',
'pyrocket==0.2.5',
# 'pygame==1.9.3',
],
entry_points={'console_scripts': [
'demosys-admin = demosys.core.management:execute_from_command_line',
]},
)
|
<commit_before>from setuptools import setup
setup(
name="demosys-py",
version="0.3.10",
description="Modern OpenGL 4.1+ Framework inspired by Django",
long_description=open('README.rst').read(),
url="https://github.com/Contraz/demosys-py",
author="Einar Forselv",
author_email="eforselv@gmail.com",
maintainer="Einar Forselv",
maintainer_email="eforselv@gmail.com",
packages=['demosys'],
include_package_data=True,
keywords = ['opengl', 'framework', 'demoscene'],
classifiers=[
'Programming Language :: Python',
'Environment :: MacOS X',
'Environment :: X11 Applications',
'Intended Audience :: Developers',
'Topic :: Multimedia :: Graphics',
'License :: OSI Approved :: ISC License (ISCL)',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.6',
'Topic :: Software Development :: Libraries :: Application Frameworks',
],
install_requires=[
'PyOpenGL==3.1.0',
'glfw==1.4.0',
'pyrr==0.8.2',
'Pillow==4.0.0',
'pyrocket==0.2.5',
# 'pygame==1.9.3',
],
entry_points={'console_scripts': [
'demosys-admin = demosys.core.management:execute_from_command_line',
]},
)
<commit_msg>Update package description to specify OpenGL 3.3+<commit_after>from setuptools import setup
setup(
name="demosys-py",
version="0.3.10",
description="Modern OpenGL 3.3+ Framework inspired by Django",
long_description=open('README.rst').read(),
url="https://github.com/Contraz/demosys-py",
author="Einar Forselv",
author_email="eforselv@gmail.com",
maintainer="Einar Forselv",
maintainer_email="eforselv@gmail.com",
packages=['demosys'],
include_package_data=True,
keywords = ['opengl', 'framework', 'demoscene'],
classifiers=[
'Programming Language :: Python',
'Environment :: MacOS X',
'Environment :: X11 Applications',
'Intended Audience :: Developers',
'Topic :: Multimedia :: Graphics',
'License :: OSI Approved :: ISC License (ISCL)',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.6',
'Topic :: Software Development :: Libraries :: Application Frameworks',
],
install_requires=[
'PyOpenGL==3.1.0',
'glfw==1.4.0',
'pyrr==0.8.2',
'Pillow==4.0.0',
'pyrocket==0.2.5',
# 'pygame==1.9.3',
],
entry_points={'console_scripts': [
'demosys-admin = demosys.core.management:execute_from_command_line',
]},
)
|
6f7354ac31e0ac19a1d2850093a5f1e9901b4fb5
|
setup.py
|
setup.py
|
#!/usr/bin/env python
from setuptools import setup, find_packages
setup(
name='pytest-relaxed',
version='0.0.1',
description='Relaxed test discovery/organization for pytest',
license='BSD',
author='Jeff Forcier',
author_email='jeff@bitprophet.org',
packages=find_packages(),
entry_points={
# TODO: do we need to name the LHS 'pytest_relaxed' too? meh
'pytest11': ['relaxed = pytest_relaxed'],
},
classifiers=[
'Development Status :: 3 - Alpha',
'Framework :: Pytest',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python',
'Topic :: Software Development :: Testing',
],
)
|
#!/usr/bin/env python
from setuptools import setup, find_packages
setup(
name='pytest-relaxed',
version='0.0.1',
description='Relaxed test discovery/organization for pytest',
license='BSD',
url="https://github.com/bitprophet/pytest-relaxed",
author='Jeff Forcier',
author_email='jeff@bitprophet.org',
long_description="\n" + open('README.rst').read(),
packages=find_packages(),
entry_points={
# TODO: do we need to name the LHS 'pytest_relaxed' too? meh
'pytest11': ['relaxed = pytest_relaxed'],
},
classifiers=[
'Development Status :: 3 - Alpha',
'Framework :: Pytest',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python',
'Topic :: Software Development :: Testing',
],
)
|
Add URL and long-desc to metadata
|
Add URL and long-desc to metadata
|
Python
|
bsd-2-clause
|
bitprophet/pytest-relaxed
|
#!/usr/bin/env python
from setuptools import setup, find_packages
setup(
name='pytest-relaxed',
version='0.0.1',
description='Relaxed test discovery/organization for pytest',
license='BSD',
author='Jeff Forcier',
author_email='jeff@bitprophet.org',
packages=find_packages(),
entry_points={
# TODO: do we need to name the LHS 'pytest_relaxed' too? meh
'pytest11': ['relaxed = pytest_relaxed'],
},
classifiers=[
'Development Status :: 3 - Alpha',
'Framework :: Pytest',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python',
'Topic :: Software Development :: Testing',
],
)
Add URL and long-desc to metadata
|
#!/usr/bin/env python
from setuptools import setup, find_packages
setup(
name='pytest-relaxed',
version='0.0.1',
description='Relaxed test discovery/organization for pytest',
license='BSD',
url="https://github.com/bitprophet/pytest-relaxed",
author='Jeff Forcier',
author_email='jeff@bitprophet.org',
long_description="\n" + open('README.rst').read(),
packages=find_packages(),
entry_points={
# TODO: do we need to name the LHS 'pytest_relaxed' too? meh
'pytest11': ['relaxed = pytest_relaxed'],
},
classifiers=[
'Development Status :: 3 - Alpha',
'Framework :: Pytest',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python',
'Topic :: Software Development :: Testing',
],
)
|
<commit_before>#!/usr/bin/env python
from setuptools import setup, find_packages
setup(
name='pytest-relaxed',
version='0.0.1',
description='Relaxed test discovery/organization for pytest',
license='BSD',
author='Jeff Forcier',
author_email='jeff@bitprophet.org',
packages=find_packages(),
entry_points={
# TODO: do we need to name the LHS 'pytest_relaxed' too? meh
'pytest11': ['relaxed = pytest_relaxed'],
},
classifiers=[
'Development Status :: 3 - Alpha',
'Framework :: Pytest',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python',
'Topic :: Software Development :: Testing',
],
)
<commit_msg>Add URL and long-desc to metadata<commit_after>
|
#!/usr/bin/env python
from setuptools import setup, find_packages
setup(
name='pytest-relaxed',
version='0.0.1',
description='Relaxed test discovery/organization for pytest',
license='BSD',
url="https://github.com/bitprophet/pytest-relaxed",
author='Jeff Forcier',
author_email='jeff@bitprophet.org',
long_description="\n" + open('README.rst').read(),
packages=find_packages(),
entry_points={
# TODO: do we need to name the LHS 'pytest_relaxed' too? meh
'pytest11': ['relaxed = pytest_relaxed'],
},
classifiers=[
'Development Status :: 3 - Alpha',
'Framework :: Pytest',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python',
'Topic :: Software Development :: Testing',
],
)
|
#!/usr/bin/env python
from setuptools import setup, find_packages
setup(
name='pytest-relaxed',
version='0.0.1',
description='Relaxed test discovery/organization for pytest',
license='BSD',
author='Jeff Forcier',
author_email='jeff@bitprophet.org',
packages=find_packages(),
entry_points={
# TODO: do we need to name the LHS 'pytest_relaxed' too? meh
'pytest11': ['relaxed = pytest_relaxed'],
},
classifiers=[
'Development Status :: 3 - Alpha',
'Framework :: Pytest',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python',
'Topic :: Software Development :: Testing',
],
)
Add URL and long-desc to metadata#!/usr/bin/env python
from setuptools import setup, find_packages
setup(
name='pytest-relaxed',
version='0.0.1',
description='Relaxed test discovery/organization for pytest',
license='BSD',
url="https://github.com/bitprophet/pytest-relaxed",
author='Jeff Forcier',
author_email='jeff@bitprophet.org',
long_description="\n" + open('README.rst').read(),
packages=find_packages(),
entry_points={
# TODO: do we need to name the LHS 'pytest_relaxed' too? meh
'pytest11': ['relaxed = pytest_relaxed'],
},
classifiers=[
'Development Status :: 3 - Alpha',
'Framework :: Pytest',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python',
'Topic :: Software Development :: Testing',
],
)
|
<commit_before>#!/usr/bin/env python
from setuptools import setup, find_packages
setup(
name='pytest-relaxed',
version='0.0.1',
description='Relaxed test discovery/organization for pytest',
license='BSD',
author='Jeff Forcier',
author_email='jeff@bitprophet.org',
packages=find_packages(),
entry_points={
# TODO: do we need to name the LHS 'pytest_relaxed' too? meh
'pytest11': ['relaxed = pytest_relaxed'],
},
classifiers=[
'Development Status :: 3 - Alpha',
'Framework :: Pytest',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python',
'Topic :: Software Development :: Testing',
],
)
<commit_msg>Add URL and long-desc to metadata<commit_after>#!/usr/bin/env python
from setuptools import setup, find_packages
setup(
name='pytest-relaxed',
version='0.0.1',
description='Relaxed test discovery/organization for pytest',
license='BSD',
url="https://github.com/bitprophet/pytest-relaxed",
author='Jeff Forcier',
author_email='jeff@bitprophet.org',
long_description="\n" + open('README.rst').read(),
packages=find_packages(),
entry_points={
# TODO: do we need to name the LHS 'pytest_relaxed' too? meh
'pytest11': ['relaxed = pytest_relaxed'],
},
classifiers=[
'Development Status :: 3 - Alpha',
'Framework :: Pytest',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python',
'Topic :: Software Development :: Testing',
],
)
|
0d3481a2ee4eb5a079a580785124d610d5bcc9e0
|
setup.py
|
setup.py
|
from distutils.core import setup
from setuptools import find_packages
VERSION = __import__("import_export").__version__
CLASSIFIERS = [
'Framework :: Django',
'Framework :: Django :: 1.8',
'Framework :: Django :: 1.9',
'Framework :: Django :: 1.10',
'Framework :: Django :: 1.11',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Topic :: Software Development',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
]
install_requires = [
'diff-match-patch',
'django>=1.5',
'tablib',
]
setup(
name="django-import-export",
description="Django application and library for importing and exporting"
"data with included admin integration.",
version=VERSION,
author="Informatika Mihelac",
author_email="bmihelac@mihelac.org",
license='BSD License',
platforms=['OS Independent'],
url="https://github.com/django-import-export/django-import-export",
packages=find_packages(exclude=["tests"]),
include_package_data=True,
install_requires=install_requires,
classifiers=CLASSIFIERS,
)
|
from distutils.core import setup
from setuptools import find_packages
VERSION = __import__("import_export").__version__
CLASSIFIERS = [
'Framework :: Django',
'Framework :: Django :: 1.8',
'Framework :: Django :: 1.9',
'Framework :: Django :: 1.10',
'Framework :: Django :: 1.11',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Topic :: Software Development',
]
install_requires = [
'diff-match-patch',
'django>=1.5',
'tablib',
]
setup(
name="django-import-export",
description="Django application and library for importing and exporting"
"data with included admin integration.",
version=VERSION,
author="Informatika Mihelac",
author_email="bmihelac@mihelac.org",
license='BSD License',
platforms=['OS Independent'],
url="https://github.com/django-import-export/django-import-export",
packages=find_packages(exclude=["tests"]),
include_package_data=True,
install_requires=install_requires,
classifiers=CLASSIFIERS,
)
|
Add trove classifiers for all supported Python versions
|
Add trove classifiers for all supported Python versions
|
Python
|
bsd-2-clause
|
bmihelac/django-import-export,django-import-export/django-import-export,daniell/django-import-export,copperleaftech/django-import-export,django-import-export/django-import-export,jnns/django-import-export,daniell/django-import-export,copperleaftech/django-import-export,bmihelac/django-import-export,copperleaftech/django-import-export,jnns/django-import-export,bmihelac/django-import-export,daniell/django-import-export,jnns/django-import-export,django-import-export/django-import-export,jnns/django-import-export,daniell/django-import-export,bmihelac/django-import-export,copperleaftech/django-import-export,django-import-export/django-import-export
|
from distutils.core import setup
from setuptools import find_packages
VERSION = __import__("import_export").__version__
CLASSIFIERS = [
'Framework :: Django',
'Framework :: Django :: 1.8',
'Framework :: Django :: 1.9',
'Framework :: Django :: 1.10',
'Framework :: Django :: 1.11',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Topic :: Software Development',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
]
install_requires = [
'diff-match-patch',
'django>=1.5',
'tablib',
]
setup(
name="django-import-export",
description="Django application and library for importing and exporting"
"data with included admin integration.",
version=VERSION,
author="Informatika Mihelac",
author_email="bmihelac@mihelac.org",
license='BSD License',
platforms=['OS Independent'],
url="https://github.com/django-import-export/django-import-export",
packages=find_packages(exclude=["tests"]),
include_package_data=True,
install_requires=install_requires,
classifiers=CLASSIFIERS,
)
Add trove classifiers for all supported Python versions
|
from distutils.core import setup
from setuptools import find_packages
VERSION = __import__("import_export").__version__
CLASSIFIERS = [
'Framework :: Django',
'Framework :: Django :: 1.8',
'Framework :: Django :: 1.9',
'Framework :: Django :: 1.10',
'Framework :: Django :: 1.11',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Topic :: Software Development',
]
install_requires = [
'diff-match-patch',
'django>=1.5',
'tablib',
]
setup(
name="django-import-export",
description="Django application and library for importing and exporting"
"data with included admin integration.",
version=VERSION,
author="Informatika Mihelac",
author_email="bmihelac@mihelac.org",
license='BSD License',
platforms=['OS Independent'],
url="https://github.com/django-import-export/django-import-export",
packages=find_packages(exclude=["tests"]),
include_package_data=True,
install_requires=install_requires,
classifiers=CLASSIFIERS,
)
|
<commit_before>from distutils.core import setup
from setuptools import find_packages
VERSION = __import__("import_export").__version__
CLASSIFIERS = [
'Framework :: Django',
'Framework :: Django :: 1.8',
'Framework :: Django :: 1.9',
'Framework :: Django :: 1.10',
'Framework :: Django :: 1.11',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Topic :: Software Development',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
]
install_requires = [
'diff-match-patch',
'django>=1.5',
'tablib',
]
setup(
name="django-import-export",
description="Django application and library for importing and exporting"
"data with included admin integration.",
version=VERSION,
author="Informatika Mihelac",
author_email="bmihelac@mihelac.org",
license='BSD License',
platforms=['OS Independent'],
url="https://github.com/django-import-export/django-import-export",
packages=find_packages(exclude=["tests"]),
include_package_data=True,
install_requires=install_requires,
classifiers=CLASSIFIERS,
)
<commit_msg>Add trove classifiers for all supported Python versions<commit_after>
|
from distutils.core import setup
from setuptools import find_packages
VERSION = __import__("import_export").__version__
CLASSIFIERS = [
'Framework :: Django',
'Framework :: Django :: 1.8',
'Framework :: Django :: 1.9',
'Framework :: Django :: 1.10',
'Framework :: Django :: 1.11',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Topic :: Software Development',
]
install_requires = [
'diff-match-patch',
'django>=1.5',
'tablib',
]
setup(
name="django-import-export",
description="Django application and library for importing and exporting"
"data with included admin integration.",
version=VERSION,
author="Informatika Mihelac",
author_email="bmihelac@mihelac.org",
license='BSD License',
platforms=['OS Independent'],
url="https://github.com/django-import-export/django-import-export",
packages=find_packages(exclude=["tests"]),
include_package_data=True,
install_requires=install_requires,
classifiers=CLASSIFIERS,
)
|
from distutils.core import setup
from setuptools import find_packages
VERSION = __import__("import_export").__version__
CLASSIFIERS = [
'Framework :: Django',
'Framework :: Django :: 1.8',
'Framework :: Django :: 1.9',
'Framework :: Django :: 1.10',
'Framework :: Django :: 1.11',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Topic :: Software Development',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
]
install_requires = [
'diff-match-patch',
'django>=1.5',
'tablib',
]
setup(
name="django-import-export",
description="Django application and library for importing and exporting"
"data with included admin integration.",
version=VERSION,
author="Informatika Mihelac",
author_email="bmihelac@mihelac.org",
license='BSD License',
platforms=['OS Independent'],
url="https://github.com/django-import-export/django-import-export",
packages=find_packages(exclude=["tests"]),
include_package_data=True,
install_requires=install_requires,
classifiers=CLASSIFIERS,
)
Add trove classifiers for all supported Python versionsfrom distutils.core import setup
from setuptools import find_packages
VERSION = __import__("import_export").__version__
CLASSIFIERS = [
'Framework :: Django',
'Framework :: Django :: 1.8',
'Framework :: Django :: 1.9',
'Framework :: Django :: 1.10',
'Framework :: Django :: 1.11',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Topic :: Software Development',
]
install_requires = [
'diff-match-patch',
'django>=1.5',
'tablib',
]
setup(
name="django-import-export",
description="Django application and library for importing and exporting"
"data with included admin integration.",
version=VERSION,
author="Informatika Mihelac",
author_email="bmihelac@mihelac.org",
license='BSD License',
platforms=['OS Independent'],
url="https://github.com/django-import-export/django-import-export",
packages=find_packages(exclude=["tests"]),
include_package_data=True,
install_requires=install_requires,
classifiers=CLASSIFIERS,
)
|
<commit_before>from distutils.core import setup
from setuptools import find_packages
VERSION = __import__("import_export").__version__
CLASSIFIERS = [
'Framework :: Django',
'Framework :: Django :: 1.8',
'Framework :: Django :: 1.9',
'Framework :: Django :: 1.10',
'Framework :: Django :: 1.11',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Topic :: Software Development',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
]
install_requires = [
'diff-match-patch',
'django>=1.5',
'tablib',
]
setup(
name="django-import-export",
description="Django application and library for importing and exporting"
"data with included admin integration.",
version=VERSION,
author="Informatika Mihelac",
author_email="bmihelac@mihelac.org",
license='BSD License',
platforms=['OS Independent'],
url="https://github.com/django-import-export/django-import-export",
packages=find_packages(exclude=["tests"]),
include_package_data=True,
install_requires=install_requires,
classifiers=CLASSIFIERS,
)
<commit_msg>Add trove classifiers for all supported Python versions<commit_after>from distutils.core import setup
from setuptools import find_packages
VERSION = __import__("import_export").__version__
CLASSIFIERS = [
'Framework :: Django',
'Framework :: Django :: 1.8',
'Framework :: Django :: 1.9',
'Framework :: Django :: 1.10',
'Framework :: Django :: 1.11',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Topic :: Software Development',
]
install_requires = [
'diff-match-patch',
'django>=1.5',
'tablib',
]
setup(
name="django-import-export",
description="Django application and library for importing and exporting"
"data with included admin integration.",
version=VERSION,
author="Informatika Mihelac",
author_email="bmihelac@mihelac.org",
license='BSD License',
platforms=['OS Independent'],
url="https://github.com/django-import-export/django-import-export",
packages=find_packages(exclude=["tests"]),
include_package_data=True,
install_requires=install_requires,
classifiers=CLASSIFIERS,
)
|
2c696d7182bf6f857842e2ae95efa5eaa4fb2594
|
setup.py
|
setup.py
|
from distutils.core import Extension, setup
from Cython.Build import cythonize
extensions = [
Extension('*', ['mathix/*.pyx']),
]
setup(
name='mathix',
author='Peith Vergil',
version='0.1',
ext_modules=cythonize(extensions)
)
|
from distutils.core import Extension, setup
from Cython.Build import cythonize
try:
from Cython.Distutils import build_ext
except ImportError:
use_cython = False
else:
use_cython = True
if use_cython:
extensions = [
Extension('mathix.vector', ['mathix/vector.pyx']),
]
cmdclass = {
'build_ext': build_ext
}
else:
extensions = [
Extension('mathix.vector', ['mathix/vector.c']),
]
cmdclass = {}
setup(
name='mathix',
author='Peith Vergil',
version='0.1',
cmdclass=cmdclass,
packages=[
'mathix',
],
keywords='useless simple math library',
description='A useless simple math library.',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Programming Language :: Cython',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
ext_modules=cythonize(extensions)
)
|
Add more classifiers. Check if Cython compilation is available or not.
|
Add more classifiers. Check if Cython compilation is available or not.
|
Python
|
mit
|
PeithVergil/cython-example
|
from distutils.core import Extension, setup
from Cython.Build import cythonize
extensions = [
Extension('*', ['mathix/*.pyx']),
]
setup(
name='mathix',
author='Peith Vergil',
version='0.1',
ext_modules=cythonize(extensions)
)
Add more classifiers. Check if Cython compilation is available or not.
|
from distutils.core import Extension, setup
from Cython.Build import cythonize
try:
from Cython.Distutils import build_ext
except ImportError:
use_cython = False
else:
use_cython = True
if use_cython:
extensions = [
Extension('mathix.vector', ['mathix/vector.pyx']),
]
cmdclass = {
'build_ext': build_ext
}
else:
extensions = [
Extension('mathix.vector', ['mathix/vector.c']),
]
cmdclass = {}
setup(
name='mathix',
author='Peith Vergil',
version='0.1',
cmdclass=cmdclass,
packages=[
'mathix',
],
keywords='useless simple math library',
description='A useless simple math library.',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Programming Language :: Cython',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
ext_modules=cythonize(extensions)
)
|
<commit_before>from distutils.core import Extension, setup
from Cython.Build import cythonize
extensions = [
Extension('*', ['mathix/*.pyx']),
]
setup(
name='mathix',
author='Peith Vergil',
version='0.1',
ext_modules=cythonize(extensions)
)
<commit_msg>Add more classifiers. Check if Cython compilation is available or not.<commit_after>
|
from distutils.core import Extension, setup
from Cython.Build import cythonize
try:
from Cython.Distutils import build_ext
except ImportError:
use_cython = False
else:
use_cython = True
if use_cython:
extensions = [
Extension('mathix.vector', ['mathix/vector.pyx']),
]
cmdclass = {
'build_ext': build_ext
}
else:
extensions = [
Extension('mathix.vector', ['mathix/vector.c']),
]
cmdclass = {}
setup(
name='mathix',
author='Peith Vergil',
version='0.1',
cmdclass=cmdclass,
packages=[
'mathix',
],
keywords='useless simple math library',
description='A useless simple math library.',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Programming Language :: Cython',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
ext_modules=cythonize(extensions)
)
|
from distutils.core import Extension, setup
from Cython.Build import cythonize
extensions = [
Extension('*', ['mathix/*.pyx']),
]
setup(
name='mathix',
author='Peith Vergil',
version='0.1',
ext_modules=cythonize(extensions)
)
Add more classifiers. Check if Cython compilation is available or not.from distutils.core import Extension, setup
from Cython.Build import cythonize
try:
from Cython.Distutils import build_ext
except ImportError:
use_cython = False
else:
use_cython = True
if use_cython:
extensions = [
Extension('mathix.vector', ['mathix/vector.pyx']),
]
cmdclass = {
'build_ext': build_ext
}
else:
extensions = [
Extension('mathix.vector', ['mathix/vector.c']),
]
cmdclass = {}
setup(
name='mathix',
author='Peith Vergil',
version='0.1',
cmdclass=cmdclass,
packages=[
'mathix',
],
keywords='useless simple math library',
description='A useless simple math library.',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Programming Language :: Cython',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
ext_modules=cythonize(extensions)
)
|
<commit_before>from distutils.core import Extension, setup
from Cython.Build import cythonize
extensions = [
Extension('*', ['mathix/*.pyx']),
]
setup(
name='mathix',
author='Peith Vergil',
version='0.1',
ext_modules=cythonize(extensions)
)
<commit_msg>Add more classifiers. Check if Cython compilation is available or not.<commit_after>from distutils.core import Extension, setup
from Cython.Build import cythonize
try:
from Cython.Distutils import build_ext
except ImportError:
use_cython = False
else:
use_cython = True
if use_cython:
extensions = [
Extension('mathix.vector', ['mathix/vector.pyx']),
]
cmdclass = {
'build_ext': build_ext
}
else:
extensions = [
Extension('mathix.vector', ['mathix/vector.c']),
]
cmdclass = {}
setup(
name='mathix',
author='Peith Vergil',
version='0.1',
cmdclass=cmdclass,
packages=[
'mathix',
],
keywords='useless simple math library',
description='A useless simple math library.',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Programming Language :: Cython',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
ext_modules=cythonize(extensions)
)
|
2337deea980b12ea83bd0c05feac2f1b503c5869
|
setup.py
|
setup.py
|
#!/usr/bin/env python
# coding=utf-8
from setuptools import setup, find_packages
import os
def find_scripts(scriptdir):
"""scrape all available scripts from 'bin' folder"""
return [os.path.join(scriptdir, s) for s in os.listdir(scriptdir) if not s.endswith(".pyc")]
setup(
name = "veros",
version = "0.0.1b0",
packages = find_packages(),
install_requires = [
"numpy<1.12",
"scipy",
"netCDF4",
"h5py",
"pillow"
],
author = "Dion Häfner (NBI Copenhagen)",
author_email = "dion.haefner@nbi.ku.dk",
scripts = find_scripts("bin")
)
|
#!/usr/bin/env python
# coding=utf-8
from setuptools import setup, find_packages
import os
def find_scripts(scriptdir):
"""scrape all available scripts from 'bin' folder"""
return [os.path.join(scriptdir, s) for s in os.listdir(scriptdir) if not s.endswith(".pyc")]
setup(
name = "veros",
version = "0.0.1b0",
packages = find_packages(),
install_requires = [
"numpy",
"scipy",
"netCDF4",
"h5py",
"pillow"
],
author = "Dion Häfner (NBI Copenhagen)",
author_email = "dion.haefner@nbi.ku.dk",
scripts = find_scripts("bin")
)
|
Remove version requirement for numpy
|
Remove version requirement for numpy
|
Python
|
mit
|
dionhaefner/veros,dionhaefner/veros
|
#!/usr/bin/env python
# coding=utf-8
from setuptools import setup, find_packages
import os
def find_scripts(scriptdir):
"""scrape all available scripts from 'bin' folder"""
return [os.path.join(scriptdir, s) for s in os.listdir(scriptdir) if not s.endswith(".pyc")]
setup(
name = "veros",
version = "0.0.1b0",
packages = find_packages(),
install_requires = [
"numpy<1.12",
"scipy",
"netCDF4",
"h5py",
"pillow"
],
author = "Dion Häfner (NBI Copenhagen)",
author_email = "dion.haefner@nbi.ku.dk",
scripts = find_scripts("bin")
)
Remove version requirement for numpy
|
#!/usr/bin/env python
# coding=utf-8
from setuptools import setup, find_packages
import os
def find_scripts(scriptdir):
"""scrape all available scripts from 'bin' folder"""
return [os.path.join(scriptdir, s) for s in os.listdir(scriptdir) if not s.endswith(".pyc")]
setup(
name = "veros",
version = "0.0.1b0",
packages = find_packages(),
install_requires = [
"numpy",
"scipy",
"netCDF4",
"h5py",
"pillow"
],
author = "Dion Häfner (NBI Copenhagen)",
author_email = "dion.haefner@nbi.ku.dk",
scripts = find_scripts("bin")
)
|
<commit_before>#!/usr/bin/env python
# coding=utf-8
from setuptools import setup, find_packages
import os
def find_scripts(scriptdir):
"""scrape all available scripts from 'bin' folder"""
return [os.path.join(scriptdir, s) for s in os.listdir(scriptdir) if not s.endswith(".pyc")]
setup(
name = "veros",
version = "0.0.1b0",
packages = find_packages(),
install_requires = [
"numpy<1.12",
"scipy",
"netCDF4",
"h5py",
"pillow"
],
author = "Dion Häfner (NBI Copenhagen)",
author_email = "dion.haefner@nbi.ku.dk",
scripts = find_scripts("bin")
)
<commit_msg>Remove version requirement for numpy<commit_after>
|
#!/usr/bin/env python
# coding=utf-8
from setuptools import setup, find_packages
import os
def find_scripts(scriptdir):
"""scrape all available scripts from 'bin' folder"""
return [os.path.join(scriptdir, s) for s in os.listdir(scriptdir) if not s.endswith(".pyc")]
setup(
name = "veros",
version = "0.0.1b0",
packages = find_packages(),
install_requires = [
"numpy",
"scipy",
"netCDF4",
"h5py",
"pillow"
],
author = "Dion Häfner (NBI Copenhagen)",
author_email = "dion.haefner@nbi.ku.dk",
scripts = find_scripts("bin")
)
|
#!/usr/bin/env python
# coding=utf-8
from setuptools import setup, find_packages
import os
def find_scripts(scriptdir):
"""scrape all available scripts from 'bin' folder"""
return [os.path.join(scriptdir, s) for s in os.listdir(scriptdir) if not s.endswith(".pyc")]
setup(
name = "veros",
version = "0.0.1b0",
packages = find_packages(),
install_requires = [
"numpy<1.12",
"scipy",
"netCDF4",
"h5py",
"pillow"
],
author = "Dion Häfner (NBI Copenhagen)",
author_email = "dion.haefner@nbi.ku.dk",
scripts = find_scripts("bin")
)
Remove version requirement for numpy#!/usr/bin/env python
# coding=utf-8
from setuptools import setup, find_packages
import os
def find_scripts(scriptdir):
"""scrape all available scripts from 'bin' folder"""
return [os.path.join(scriptdir, s) for s in os.listdir(scriptdir) if not s.endswith(".pyc")]
setup(
name = "veros",
version = "0.0.1b0",
packages = find_packages(),
install_requires = [
"numpy",
"scipy",
"netCDF4",
"h5py",
"pillow"
],
author = "Dion Häfner (NBI Copenhagen)",
author_email = "dion.haefner@nbi.ku.dk",
scripts = find_scripts("bin")
)
|
<commit_before>#!/usr/bin/env python
# coding=utf-8
from setuptools import setup, find_packages
import os
def find_scripts(scriptdir):
"""scrape all available scripts from 'bin' folder"""
return [os.path.join(scriptdir, s) for s in os.listdir(scriptdir) if not s.endswith(".pyc")]
setup(
name = "veros",
version = "0.0.1b0",
packages = find_packages(),
install_requires = [
"numpy<1.12",
"scipy",
"netCDF4",
"h5py",
"pillow"
],
author = "Dion Häfner (NBI Copenhagen)",
author_email = "dion.haefner@nbi.ku.dk",
scripts = find_scripts("bin")
)
<commit_msg>Remove version requirement for numpy<commit_after>#!/usr/bin/env python
# coding=utf-8
from setuptools import setup, find_packages
import os
def find_scripts(scriptdir):
"""scrape all available scripts from 'bin' folder"""
return [os.path.join(scriptdir, s) for s in os.listdir(scriptdir) if not s.endswith(".pyc")]
setup(
name = "veros",
version = "0.0.1b0",
packages = find_packages(),
install_requires = [
"numpy",
"scipy",
"netCDF4",
"h5py",
"pillow"
],
author = "Dion Häfner (NBI Copenhagen)",
author_email = "dion.haefner@nbi.ku.dk",
scripts = find_scripts("bin")
)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.