commit
stringlengths 40
40
| old_file
stringlengths 4
118
| new_file
stringlengths 4
118
| old_contents
stringlengths 0
2.94k
| new_contents
stringlengths 1
4.43k
| subject
stringlengths 15
444
| message
stringlengths 16
3.45k
| lang
stringclasses 1
value | license
stringclasses 13
values | repos
stringlengths 5
43.2k
| prompt
stringlengths 17
4.58k
| response
stringlengths 1
4.43k
| prompt_tagged
stringlengths 58
4.62k
| response_tagged
stringlengths 1
4.43k
| text
stringlengths 132
7.29k
| text_tagged
stringlengths 173
7.33k
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
abcf57ab6d4d15a04c1638f383b4a50a17ea6a3c
|
setup.py
|
setup.py
|
from setuptools import setup
def readme():
with open('README.rst') as f:
return f.read()
setup(
name='hashdb2',
version='0.3',
description='HashDb2 provides a simple method for executing commands based on matched files',
long_description=readme(),
classifiers=[
'Development Status :: 1 - Planning0',
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: 3.0',
'Programming Language :: Python :: 3.1',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'License :: OSI Approved :: Apache Software License',
'Topic :: System :: Filesystems',
'Topic :: Database',
'Topic :: Utilities'
],
keywords='file matching comparison same identical duplicate duplicates',
url='https://github.com/WHenderson/HashDb',
author='Will Henderson',
author_email='whenderson.github@gmail.com',
license='Apache 2.0',
packages=['hashdb2'],
zip_safe=False,
install_requires=[
'docopt>=0.6.2'
],
entry_points = {
'console_scripts': ['hashdb2=hashdb2.command_line:main'],
}
)
|
from setuptools import setup
def readme():
with open('README.rst') as f:
return f.read()
setup(
name='hashdb2',
version='0.3',
description='HashDb2 provides a simple method for executing commands based on matched files',
long_description=readme(),
classifiers=[
'Development Status :: 1 - Planning0',
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'License :: OSI Approved :: Apache Software License',
'Topic :: System :: Filesystems',
'Topic :: Database',
'Topic :: Utilities'
],
keywords='file matching comparison same identical duplicate duplicates',
url='https://github.com/WHenderson/HashDb',
author='Will Henderson',
author_email='whenderson.github@gmail.com',
license='Apache 2.0',
packages=['hashdb2'],
zip_safe=False,
install_requires=[
'docopt>=0.6.2'
],
entry_points = {
'console_scripts': ['hashdb2=hashdb2.command_line:main'],
}
)
|
Remove meta data around unsupported versions
|
Remove meta data around unsupported versions
|
Python
|
apache-2.0
|
WHenderson/HashDb
|
from setuptools import setup
def readme():
with open('README.rst') as f:
return f.read()
setup(
name='hashdb2',
version='0.3',
description='HashDb2 provides a simple method for executing commands based on matched files',
long_description=readme(),
classifiers=[
'Development Status :: 1 - Planning0',
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: 3.0',
'Programming Language :: Python :: 3.1',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'License :: OSI Approved :: Apache Software License',
'Topic :: System :: Filesystems',
'Topic :: Database',
'Topic :: Utilities'
],
keywords='file matching comparison same identical duplicate duplicates',
url='https://github.com/WHenderson/HashDb',
author='Will Henderson',
author_email='whenderson.github@gmail.com',
license='Apache 2.0',
packages=['hashdb2'],
zip_safe=False,
install_requires=[
'docopt>=0.6.2'
],
entry_points = {
'console_scripts': ['hashdb2=hashdb2.command_line:main'],
}
)
Remove meta data around unsupported versions
|
from setuptools import setup
def readme():
with open('README.rst') as f:
return f.read()
setup(
name='hashdb2',
version='0.3',
description='HashDb2 provides a simple method for executing commands based on matched files',
long_description=readme(),
classifiers=[
'Development Status :: 1 - Planning0',
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'License :: OSI Approved :: Apache Software License',
'Topic :: System :: Filesystems',
'Topic :: Database',
'Topic :: Utilities'
],
keywords='file matching comparison same identical duplicate duplicates',
url='https://github.com/WHenderson/HashDb',
author='Will Henderson',
author_email='whenderson.github@gmail.com',
license='Apache 2.0',
packages=['hashdb2'],
zip_safe=False,
install_requires=[
'docopt>=0.6.2'
],
entry_points = {
'console_scripts': ['hashdb2=hashdb2.command_line:main'],
}
)
|
<commit_before>from setuptools import setup
def readme():
with open('README.rst') as f:
return f.read()
setup(
name='hashdb2',
version='0.3',
description='HashDb2 provides a simple method for executing commands based on matched files',
long_description=readme(),
classifiers=[
'Development Status :: 1 - Planning0',
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: 3.0',
'Programming Language :: Python :: 3.1',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'License :: OSI Approved :: Apache Software License',
'Topic :: System :: Filesystems',
'Topic :: Database',
'Topic :: Utilities'
],
keywords='file matching comparison same identical duplicate duplicates',
url='https://github.com/WHenderson/HashDb',
author='Will Henderson',
author_email='whenderson.github@gmail.com',
license='Apache 2.0',
packages=['hashdb2'],
zip_safe=False,
install_requires=[
'docopt>=0.6.2'
],
entry_points = {
'console_scripts': ['hashdb2=hashdb2.command_line:main'],
}
)
<commit_msg>Remove meta data around unsupported versions<commit_after>
|
from setuptools import setup
def readme():
with open('README.rst') as f:
return f.read()
setup(
name='hashdb2',
version='0.3',
description='HashDb2 provides a simple method for executing commands based on matched files',
long_description=readme(),
classifiers=[
'Development Status :: 1 - Planning0',
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'License :: OSI Approved :: Apache Software License',
'Topic :: System :: Filesystems',
'Topic :: Database',
'Topic :: Utilities'
],
keywords='file matching comparison same identical duplicate duplicates',
url='https://github.com/WHenderson/HashDb',
author='Will Henderson',
author_email='whenderson.github@gmail.com',
license='Apache 2.0',
packages=['hashdb2'],
zip_safe=False,
install_requires=[
'docopt>=0.6.2'
],
entry_points = {
'console_scripts': ['hashdb2=hashdb2.command_line:main'],
}
)
|
from setuptools import setup
def readme():
with open('README.rst') as f:
return f.read()
setup(
name='hashdb2',
version='0.3',
description='HashDb2 provides a simple method for executing commands based on matched files',
long_description=readme(),
classifiers=[
'Development Status :: 1 - Planning0',
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: 3.0',
'Programming Language :: Python :: 3.1',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'License :: OSI Approved :: Apache Software License',
'Topic :: System :: Filesystems',
'Topic :: Database',
'Topic :: Utilities'
],
keywords='file matching comparison same identical duplicate duplicates',
url='https://github.com/WHenderson/HashDb',
author='Will Henderson',
author_email='whenderson.github@gmail.com',
license='Apache 2.0',
packages=['hashdb2'],
zip_safe=False,
install_requires=[
'docopt>=0.6.2'
],
entry_points = {
'console_scripts': ['hashdb2=hashdb2.command_line:main'],
}
)
Remove meta data around unsupported versionsfrom setuptools import setup
def readme():
with open('README.rst') as f:
return f.read()
setup(
name='hashdb2',
version='0.3',
description='HashDb2 provides a simple method for executing commands based on matched files',
long_description=readme(),
classifiers=[
'Development Status :: 1 - Planning0',
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'License :: OSI Approved :: Apache Software License',
'Topic :: System :: Filesystems',
'Topic :: Database',
'Topic :: Utilities'
],
keywords='file matching comparison same identical duplicate duplicates',
url='https://github.com/WHenderson/HashDb',
author='Will Henderson',
author_email='whenderson.github@gmail.com',
license='Apache 2.0',
packages=['hashdb2'],
zip_safe=False,
install_requires=[
'docopt>=0.6.2'
],
entry_points = {
'console_scripts': ['hashdb2=hashdb2.command_line:main'],
}
)
|
<commit_before>from setuptools import setup
def readme():
with open('README.rst') as f:
return f.read()
setup(
name='hashdb2',
version='0.3',
description='HashDb2 provides a simple method for executing commands based on matched files',
long_description=readme(),
classifiers=[
'Development Status :: 1 - Planning0',
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: 3.0',
'Programming Language :: Python :: 3.1',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'License :: OSI Approved :: Apache Software License',
'Topic :: System :: Filesystems',
'Topic :: Database',
'Topic :: Utilities'
],
keywords='file matching comparison same identical duplicate duplicates',
url='https://github.com/WHenderson/HashDb',
author='Will Henderson',
author_email='whenderson.github@gmail.com',
license='Apache 2.0',
packages=['hashdb2'],
zip_safe=False,
install_requires=[
'docopt>=0.6.2'
],
entry_points = {
'console_scripts': ['hashdb2=hashdb2.command_line:main'],
}
)
<commit_msg>Remove meta data around unsupported versions<commit_after>from setuptools import setup
def readme():
with open('README.rst') as f:
return f.read()
setup(
name='hashdb2',
version='0.3',
description='HashDb2 provides a simple method for executing commands based on matched files',
long_description=readme(),
classifiers=[
'Development Status :: 1 - Planning0',
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'License :: OSI Approved :: Apache Software License',
'Topic :: System :: Filesystems',
'Topic :: Database',
'Topic :: Utilities'
],
keywords='file matching comparison same identical duplicate duplicates',
url='https://github.com/WHenderson/HashDb',
author='Will Henderson',
author_email='whenderson.github@gmail.com',
license='Apache 2.0',
packages=['hashdb2'],
zip_safe=False,
install_requires=[
'docopt>=0.6.2'
],
entry_points = {
'console_scripts': ['hashdb2=hashdb2.command_line:main'],
}
)
|
2d5a45ecd7269c59ce7643dd1e4919a84ca16659
|
setup.py
|
setup.py
|
from setuptools import setup, find_packages
from phonenumber_field import __version__
setup(
name="django-phonenumber-field",
version=__version__,
url='http://github.com/stefanfoulis/django-phonenumber-field',
license='BSD',
platforms=['OS Independent'],
description="An international phone number field for django models.",
install_requires=[
'phonenumbers>=7.0.2',
],
long_description=open('README.rst').read(),
author='Stefan Foulis',
author_email='stefan.foulis@gmail.com',
maintainer='Stefan Foulis',
maintainer_email='stefan.foulis@gmail.com',
packages=find_packages(),
package_data = {
'phonenumber_field': [
'locale/*/LC_MESSAGES/*',
],
},
include_package_data=True,
zip_safe=False,
classifiers=[
'Development Status :: 3 - Alpha',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP',
]
)
|
from setuptools import setup, find_packages
from phonenumber_field import __version__
setup(
name="django-phonenumber-field",
version=__version__,
url='http://github.com/stefanfoulis/django-phonenumber-field',
license='BSD',
platforms=['OS Independent'],
description="An international phone number field for django models.",
install_requires=[
'phonenumbers>=7.0.2',
'babel',
],
long_description=open('README.rst').read(),
author='Stefan Foulis',
author_email='stefan.foulis@gmail.com',
maintainer='Stefan Foulis',
maintainer_email='stefan.foulis@gmail.com',
packages=find_packages(),
package_data = {
'phonenumber_field': [
'locale/*/LC_MESSAGES/*',
],
},
include_package_data=True,
zip_safe=False,
classifiers=[
'Development Status :: 3 - Alpha',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP',
]
)
|
Add python package install dependency
|
Add python package install dependency
Add babel as python package install dependency. Babel is used in
phonenumber_field.widget.
|
Python
|
mit
|
hwkns/django-phonenumber-field,bramd/django-phonenumber-field,ellmetha/django-phonenumber-field,thenewguy/django-phonenumber-field,hwkns/django-phonenumber-field,bramd/django-phonenumber-field,hovel/django-phonenumber-field,ellmetha/django-phonenumber-field,invalid-access/django-phonenumber-field,stefanfoulis/django-phonenumber-field,thenewguy/django-phonenumber-field,thenewguy/django-phonenumber-field,hovel/django-phonenumber-field,invalid-access/django-phonenumber-field
|
from setuptools import setup, find_packages
from phonenumber_field import __version__
setup(
name="django-phonenumber-field",
version=__version__,
url='http://github.com/stefanfoulis/django-phonenumber-field',
license='BSD',
platforms=['OS Independent'],
description="An international phone number field for django models.",
install_requires=[
'phonenumbers>=7.0.2',
],
long_description=open('README.rst').read(),
author='Stefan Foulis',
author_email='stefan.foulis@gmail.com',
maintainer='Stefan Foulis',
maintainer_email='stefan.foulis@gmail.com',
packages=find_packages(),
package_data = {
'phonenumber_field': [
'locale/*/LC_MESSAGES/*',
],
},
include_package_data=True,
zip_safe=False,
classifiers=[
'Development Status :: 3 - Alpha',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP',
]
)
Add python package install dependency
Add babel as python package install dependency. Babel is used in
phonenumber_field.widget.
|
from setuptools import setup, find_packages
from phonenumber_field import __version__
setup(
name="django-phonenumber-field",
version=__version__,
url='http://github.com/stefanfoulis/django-phonenumber-field',
license='BSD',
platforms=['OS Independent'],
description="An international phone number field for django models.",
install_requires=[
'phonenumbers>=7.0.2',
'babel',
],
long_description=open('README.rst').read(),
author='Stefan Foulis',
author_email='stefan.foulis@gmail.com',
maintainer='Stefan Foulis',
maintainer_email='stefan.foulis@gmail.com',
packages=find_packages(),
package_data = {
'phonenumber_field': [
'locale/*/LC_MESSAGES/*',
],
},
include_package_data=True,
zip_safe=False,
classifiers=[
'Development Status :: 3 - Alpha',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP',
]
)
|
<commit_before>from setuptools import setup, find_packages
from phonenumber_field import __version__
setup(
name="django-phonenumber-field",
version=__version__,
url='http://github.com/stefanfoulis/django-phonenumber-field',
license='BSD',
platforms=['OS Independent'],
description="An international phone number field for django models.",
install_requires=[
'phonenumbers>=7.0.2',
],
long_description=open('README.rst').read(),
author='Stefan Foulis',
author_email='stefan.foulis@gmail.com',
maintainer='Stefan Foulis',
maintainer_email='stefan.foulis@gmail.com',
packages=find_packages(),
package_data = {
'phonenumber_field': [
'locale/*/LC_MESSAGES/*',
],
},
include_package_data=True,
zip_safe=False,
classifiers=[
'Development Status :: 3 - Alpha',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP',
]
)
<commit_msg>Add python package install dependency
Add babel as python package install dependency. Babel is used in
phonenumber_field.widget.<commit_after>
|
from setuptools import setup, find_packages
from phonenumber_field import __version__
setup(
name="django-phonenumber-field",
version=__version__,
url='http://github.com/stefanfoulis/django-phonenumber-field',
license='BSD',
platforms=['OS Independent'],
description="An international phone number field for django models.",
install_requires=[
'phonenumbers>=7.0.2',
'babel',
],
long_description=open('README.rst').read(),
author='Stefan Foulis',
author_email='stefan.foulis@gmail.com',
maintainer='Stefan Foulis',
maintainer_email='stefan.foulis@gmail.com',
packages=find_packages(),
package_data = {
'phonenumber_field': [
'locale/*/LC_MESSAGES/*',
],
},
include_package_data=True,
zip_safe=False,
classifiers=[
'Development Status :: 3 - Alpha',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP',
]
)
|
from setuptools import setup, find_packages
from phonenumber_field import __version__
setup(
name="django-phonenumber-field",
version=__version__,
url='http://github.com/stefanfoulis/django-phonenumber-field',
license='BSD',
platforms=['OS Independent'],
description="An international phone number field for django models.",
install_requires=[
'phonenumbers>=7.0.2',
],
long_description=open('README.rst').read(),
author='Stefan Foulis',
author_email='stefan.foulis@gmail.com',
maintainer='Stefan Foulis',
maintainer_email='stefan.foulis@gmail.com',
packages=find_packages(),
package_data = {
'phonenumber_field': [
'locale/*/LC_MESSAGES/*',
],
},
include_package_data=True,
zip_safe=False,
classifiers=[
'Development Status :: 3 - Alpha',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP',
]
)
Add python package install dependency
Add babel as python package install dependency. Babel is used in
phonenumber_field.widget.from setuptools import setup, find_packages
from phonenumber_field import __version__
setup(
name="django-phonenumber-field",
version=__version__,
url='http://github.com/stefanfoulis/django-phonenumber-field',
license='BSD',
platforms=['OS Independent'],
description="An international phone number field for django models.",
install_requires=[
'phonenumbers>=7.0.2',
'babel',
],
long_description=open('README.rst').read(),
author='Stefan Foulis',
author_email='stefan.foulis@gmail.com',
maintainer='Stefan Foulis',
maintainer_email='stefan.foulis@gmail.com',
packages=find_packages(),
package_data = {
'phonenumber_field': [
'locale/*/LC_MESSAGES/*',
],
},
include_package_data=True,
zip_safe=False,
classifiers=[
'Development Status :: 3 - Alpha',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP',
]
)
|
<commit_before>from setuptools import setup, find_packages
from phonenumber_field import __version__
setup(
name="django-phonenumber-field",
version=__version__,
url='http://github.com/stefanfoulis/django-phonenumber-field',
license='BSD',
platforms=['OS Independent'],
description="An international phone number field for django models.",
install_requires=[
'phonenumbers>=7.0.2',
],
long_description=open('README.rst').read(),
author='Stefan Foulis',
author_email='stefan.foulis@gmail.com',
maintainer='Stefan Foulis',
maintainer_email='stefan.foulis@gmail.com',
packages=find_packages(),
package_data = {
'phonenumber_field': [
'locale/*/LC_MESSAGES/*',
],
},
include_package_data=True,
zip_safe=False,
classifiers=[
'Development Status :: 3 - Alpha',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP',
]
)
<commit_msg>Add python package install dependency
Add babel as python package install dependency. Babel is used in
phonenumber_field.widget.<commit_after>from setuptools import setup, find_packages
from phonenumber_field import __version__
setup(
name="django-phonenumber-field",
version=__version__,
url='http://github.com/stefanfoulis/django-phonenumber-field',
license='BSD',
platforms=['OS Independent'],
description="An international phone number field for django models.",
install_requires=[
'phonenumbers>=7.0.2',
'babel',
],
long_description=open('README.rst').read(),
author='Stefan Foulis',
author_email='stefan.foulis@gmail.com',
maintainer='Stefan Foulis',
maintainer_email='stefan.foulis@gmail.com',
packages=find_packages(),
package_data = {
'phonenumber_field': [
'locale/*/LC_MESSAGES/*',
],
},
include_package_data=True,
zip_safe=False,
classifiers=[
'Development Status :: 3 - Alpha',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP',
]
)
|
45b23c04e68e50314bddb063aecf4df63edcbc7a
|
setup.py
|
setup.py
|
from setuptools import setup
setup(
name='runcalc',
version='0.1.1',
description='Running pace calculator',
author='Mike Patek',
author_email='mpatek@gmail.com',
url='https://github.com/mpatek/runcalc',
download_url='https://github.com/mpatek/runcalc/tarball/0.1',
packages=['runcalc'],
include_package_data=True,
entry_points={
'console_scripts': [
'runcalc=runcalc.cli:cli'
]
},
install_requires=['click'],
setup_requires=['pytest-runner'],
tests_require=['pytest'],
keywords=['running', 'exercise', 'cli'],
)
|
from setuptools import setup
setup(
name='runcalc',
version='0.1.1',
description='Running pace calculator',
author='Mike Patek',
author_email='mpatek@gmail.com',
url='https://github.com/mpatek/runcalc',
download_url='https://github.com/mpatek/runcalc/tarball/0.1.1',
packages=['runcalc'],
include_package_data=True,
entry_points={
'console_scripts': [
'runcalc=runcalc.cli:cli'
]
},
install_requires=['click'],
setup_requires=['pytest-runner'],
tests_require=['pytest'],
keywords=['running', 'exercise', 'cli'],
)
|
Increment version number in tarball
|
Increment version number in tarball
|
Python
|
mit
|
mpatek/runcalc
|
from setuptools import setup
setup(
name='runcalc',
version='0.1.1',
description='Running pace calculator',
author='Mike Patek',
author_email='mpatek@gmail.com',
url='https://github.com/mpatek/runcalc',
download_url='https://github.com/mpatek/runcalc/tarball/0.1',
packages=['runcalc'],
include_package_data=True,
entry_points={
'console_scripts': [
'runcalc=runcalc.cli:cli'
]
},
install_requires=['click'],
setup_requires=['pytest-runner'],
tests_require=['pytest'],
keywords=['running', 'exercise', 'cli'],
)
Increment version number in tarball
|
from setuptools import setup
setup(
name='runcalc',
version='0.1.1',
description='Running pace calculator',
author='Mike Patek',
author_email='mpatek@gmail.com',
url='https://github.com/mpatek/runcalc',
download_url='https://github.com/mpatek/runcalc/tarball/0.1.1',
packages=['runcalc'],
include_package_data=True,
entry_points={
'console_scripts': [
'runcalc=runcalc.cli:cli'
]
},
install_requires=['click'],
setup_requires=['pytest-runner'],
tests_require=['pytest'],
keywords=['running', 'exercise', 'cli'],
)
|
<commit_before>from setuptools import setup
setup(
name='runcalc',
version='0.1.1',
description='Running pace calculator',
author='Mike Patek',
author_email='mpatek@gmail.com',
url='https://github.com/mpatek/runcalc',
download_url='https://github.com/mpatek/runcalc/tarball/0.1',
packages=['runcalc'],
include_package_data=True,
entry_points={
'console_scripts': [
'runcalc=runcalc.cli:cli'
]
},
install_requires=['click'],
setup_requires=['pytest-runner'],
tests_require=['pytest'],
keywords=['running', 'exercise', 'cli'],
)
<commit_msg>Increment version number in tarball<commit_after>
|
from setuptools import setup
setup(
name='runcalc',
version='0.1.1',
description='Running pace calculator',
author='Mike Patek',
author_email='mpatek@gmail.com',
url='https://github.com/mpatek/runcalc',
download_url='https://github.com/mpatek/runcalc/tarball/0.1.1',
packages=['runcalc'],
include_package_data=True,
entry_points={
'console_scripts': [
'runcalc=runcalc.cli:cli'
]
},
install_requires=['click'],
setup_requires=['pytest-runner'],
tests_require=['pytest'],
keywords=['running', 'exercise', 'cli'],
)
|
from setuptools import setup
setup(
name='runcalc',
version='0.1.1',
description='Running pace calculator',
author='Mike Patek',
author_email='mpatek@gmail.com',
url='https://github.com/mpatek/runcalc',
download_url='https://github.com/mpatek/runcalc/tarball/0.1',
packages=['runcalc'],
include_package_data=True,
entry_points={
'console_scripts': [
'runcalc=runcalc.cli:cli'
]
},
install_requires=['click'],
setup_requires=['pytest-runner'],
tests_require=['pytest'],
keywords=['running', 'exercise', 'cli'],
)
Increment version number in tarballfrom setuptools import setup
setup(
name='runcalc',
version='0.1.1',
description='Running pace calculator',
author='Mike Patek',
author_email='mpatek@gmail.com',
url='https://github.com/mpatek/runcalc',
download_url='https://github.com/mpatek/runcalc/tarball/0.1.1',
packages=['runcalc'],
include_package_data=True,
entry_points={
'console_scripts': [
'runcalc=runcalc.cli:cli'
]
},
install_requires=['click'],
setup_requires=['pytest-runner'],
tests_require=['pytest'],
keywords=['running', 'exercise', 'cli'],
)
|
<commit_before>from setuptools import setup
setup(
name='runcalc',
version='0.1.1',
description='Running pace calculator',
author='Mike Patek',
author_email='mpatek@gmail.com',
url='https://github.com/mpatek/runcalc',
download_url='https://github.com/mpatek/runcalc/tarball/0.1',
packages=['runcalc'],
include_package_data=True,
entry_points={
'console_scripts': [
'runcalc=runcalc.cli:cli'
]
},
install_requires=['click'],
setup_requires=['pytest-runner'],
tests_require=['pytest'],
keywords=['running', 'exercise', 'cli'],
)
<commit_msg>Increment version number in tarball<commit_after>from setuptools import setup
setup(
name='runcalc',
version='0.1.1',
description='Running pace calculator',
author='Mike Patek',
author_email='mpatek@gmail.com',
url='https://github.com/mpatek/runcalc',
download_url='https://github.com/mpatek/runcalc/tarball/0.1.1',
packages=['runcalc'],
include_package_data=True,
entry_points={
'console_scripts': [
'runcalc=runcalc.cli:cli'
]
},
install_requires=['click'],
setup_requires=['pytest-runner'],
tests_require=['pytest'],
keywords=['running', 'exercise', 'cli'],
)
|
6b191243fee14e4703d42dc3fcc38c5c71ccffc9
|
setup.py
|
setup.py
|
from setuptools import setup, PEP420PackageFinder
setup(
name='tangled.web',
version='1.0a13.dev0',
description='RESTful Web Framework',
long_description=open('README.rst').read(),
url='https://tangledframework.org/',
download_url='https://github.com/TangledWeb/tangled.web/tags',
author='Wyatt Baldwin',
author_email='self@wyattbaldwin.com',
packages=PEP420PackageFinder.find(include=['tangled*']),
include_package_data=True,
install_requires=[
'tangled>=1.0a12',
'MarkupSafe>=0.23',
'WebOb>=1.5.1',
],
entry_points="""
[tangled.scripts]
serve = tangled.web.scripts.serve
shell = tangled.web.scripts.shell
show = tangled.web.scripts.show
[tangled.scaffolds]
basic = tangled.web.scaffolds:basic
""",
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
],
)
|
from setuptools import setup, PEP420PackageFinder
setup(
name='tangled.web',
version='1.0a13.dev0',
description='RESTful Web Framework',
long_description=open('README.rst').read(),
url='https://tangledframework.org/',
download_url='https://github.com/TangledWeb/tangled.web/tags',
author='Wyatt Baldwin',
author_email='self@wyattbaldwin.com',
packages=PEP420PackageFinder.find(include=['tangled*']),
include_package_data=True,
install_requires=[
'tangled>=1.0a12',
'MarkupSafe>=0.23',
'WebOb>=1.5.1',
],
entry_points="""
[tangled.scripts]
serve = tangled.web.scripts.serve
show = tangled.web.scripts.show
[tangled.scaffolds]
basic = tangled.web.scaffolds:basic
""",
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
],
)
|
Remove shell console script from entry points
|
Remove shell console script from entry points
Amends e09ee16a77a7181fbf8ee18841be6ca37fd32250
|
Python
|
mit
|
TangledWeb/tangled.web
|
from setuptools import setup, PEP420PackageFinder
setup(
name='tangled.web',
version='1.0a13.dev0',
description='RESTful Web Framework',
long_description=open('README.rst').read(),
url='https://tangledframework.org/',
download_url='https://github.com/TangledWeb/tangled.web/tags',
author='Wyatt Baldwin',
author_email='self@wyattbaldwin.com',
packages=PEP420PackageFinder.find(include=['tangled*']),
include_package_data=True,
install_requires=[
'tangled>=1.0a12',
'MarkupSafe>=0.23',
'WebOb>=1.5.1',
],
entry_points="""
[tangled.scripts]
serve = tangled.web.scripts.serve
shell = tangled.web.scripts.shell
show = tangled.web.scripts.show
[tangled.scaffolds]
basic = tangled.web.scaffolds:basic
""",
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
],
)
Remove shell console script from entry points
Amends e09ee16a77a7181fbf8ee18841be6ca37fd32250
|
from setuptools import setup, PEP420PackageFinder
setup(
name='tangled.web',
version='1.0a13.dev0',
description='RESTful Web Framework',
long_description=open('README.rst').read(),
url='https://tangledframework.org/',
download_url='https://github.com/TangledWeb/tangled.web/tags',
author='Wyatt Baldwin',
author_email='self@wyattbaldwin.com',
packages=PEP420PackageFinder.find(include=['tangled*']),
include_package_data=True,
install_requires=[
'tangled>=1.0a12',
'MarkupSafe>=0.23',
'WebOb>=1.5.1',
],
entry_points="""
[tangled.scripts]
serve = tangled.web.scripts.serve
show = tangled.web.scripts.show
[tangled.scaffolds]
basic = tangled.web.scaffolds:basic
""",
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
],
)
|
<commit_before>from setuptools import setup, PEP420PackageFinder
setup(
name='tangled.web',
version='1.0a13.dev0',
description='RESTful Web Framework',
long_description=open('README.rst').read(),
url='https://tangledframework.org/',
download_url='https://github.com/TangledWeb/tangled.web/tags',
author='Wyatt Baldwin',
author_email='self@wyattbaldwin.com',
packages=PEP420PackageFinder.find(include=['tangled*']),
include_package_data=True,
install_requires=[
'tangled>=1.0a12',
'MarkupSafe>=0.23',
'WebOb>=1.5.1',
],
entry_points="""
[tangled.scripts]
serve = tangled.web.scripts.serve
shell = tangled.web.scripts.shell
show = tangled.web.scripts.show
[tangled.scaffolds]
basic = tangled.web.scaffolds:basic
""",
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
],
)
<commit_msg>Remove shell console script from entry points
Amends e09ee16a77a7181fbf8ee18841be6ca37fd32250<commit_after>
|
from setuptools import setup, PEP420PackageFinder
setup(
name='tangled.web',
version='1.0a13.dev0',
description='RESTful Web Framework',
long_description=open('README.rst').read(),
url='https://tangledframework.org/',
download_url='https://github.com/TangledWeb/tangled.web/tags',
author='Wyatt Baldwin',
author_email='self@wyattbaldwin.com',
packages=PEP420PackageFinder.find(include=['tangled*']),
include_package_data=True,
install_requires=[
'tangled>=1.0a12',
'MarkupSafe>=0.23',
'WebOb>=1.5.1',
],
entry_points="""
[tangled.scripts]
serve = tangled.web.scripts.serve
show = tangled.web.scripts.show
[tangled.scaffolds]
basic = tangled.web.scaffolds:basic
""",
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
],
)
|
from setuptools import setup, PEP420PackageFinder
setup(
name='tangled.web',
version='1.0a13.dev0',
description='RESTful Web Framework',
long_description=open('README.rst').read(),
url='https://tangledframework.org/',
download_url='https://github.com/TangledWeb/tangled.web/tags',
author='Wyatt Baldwin',
author_email='self@wyattbaldwin.com',
packages=PEP420PackageFinder.find(include=['tangled*']),
include_package_data=True,
install_requires=[
'tangled>=1.0a12',
'MarkupSafe>=0.23',
'WebOb>=1.5.1',
],
entry_points="""
[tangled.scripts]
serve = tangled.web.scripts.serve
shell = tangled.web.scripts.shell
show = tangled.web.scripts.show
[tangled.scaffolds]
basic = tangled.web.scaffolds:basic
""",
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
],
)
Remove shell console script from entry points
Amends e09ee16a77a7181fbf8ee18841be6ca37fd32250from setuptools import setup, PEP420PackageFinder
setup(
name='tangled.web',
version='1.0a13.dev0',
description='RESTful Web Framework',
long_description=open('README.rst').read(),
url='https://tangledframework.org/',
download_url='https://github.com/TangledWeb/tangled.web/tags',
author='Wyatt Baldwin',
author_email='self@wyattbaldwin.com',
packages=PEP420PackageFinder.find(include=['tangled*']),
include_package_data=True,
install_requires=[
'tangled>=1.0a12',
'MarkupSafe>=0.23',
'WebOb>=1.5.1',
],
entry_points="""
[tangled.scripts]
serve = tangled.web.scripts.serve
show = tangled.web.scripts.show
[tangled.scaffolds]
basic = tangled.web.scaffolds:basic
""",
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
],
)
|
<commit_before>from setuptools import setup, PEP420PackageFinder
setup(
name='tangled.web',
version='1.0a13.dev0',
description='RESTful Web Framework',
long_description=open('README.rst').read(),
url='https://tangledframework.org/',
download_url='https://github.com/TangledWeb/tangled.web/tags',
author='Wyatt Baldwin',
author_email='self@wyattbaldwin.com',
packages=PEP420PackageFinder.find(include=['tangled*']),
include_package_data=True,
install_requires=[
'tangled>=1.0a12',
'MarkupSafe>=0.23',
'WebOb>=1.5.1',
],
entry_points="""
[tangled.scripts]
serve = tangled.web.scripts.serve
shell = tangled.web.scripts.shell
show = tangled.web.scripts.show
[tangled.scaffolds]
basic = tangled.web.scaffolds:basic
""",
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
],
)
<commit_msg>Remove shell console script from entry points
Amends e09ee16a77a7181fbf8ee18841be6ca37fd32250<commit_after>from setuptools import setup, PEP420PackageFinder
setup(
name='tangled.web',
version='1.0a13.dev0',
description='RESTful Web Framework',
long_description=open('README.rst').read(),
url='https://tangledframework.org/',
download_url='https://github.com/TangledWeb/tangled.web/tags',
author='Wyatt Baldwin',
author_email='self@wyattbaldwin.com',
packages=PEP420PackageFinder.find(include=['tangled*']),
include_package_data=True,
install_requires=[
'tangled>=1.0a12',
'MarkupSafe>=0.23',
'WebOb>=1.5.1',
],
entry_points="""
[tangled.scripts]
serve = tangled.web.scripts.serve
show = tangled.web.scripts.show
[tangled.scaffolds]
basic = tangled.web.scaffolds:basic
""",
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
],
)
|
b68fbd9deefddac10328be172af4932e2149280a
|
setup.py
|
setup.py
|
# -*- coding: utf-8 -*-
import re
import ast
from setuptools import setup
_version_re = re.compile(r'__version__\s+=\s+(.*)')
with open('ipybind/__init__.py', 'rb') as f:
version = str(ast.literal_eval(_version_re.search(
f.read().decode('utf-8')).group(1)))
setup(
name='ipybind',
author='Ivan Smirnov',
author_email='i.s.smirnov@gmail.com',
license='MIT',
version=version,
url='http://github.com/aldanor/ipybind',
packages=['ipybind'],
description='IPython and Jupyter integration for pybind11.',
install_requires=['ipython', 'pybind11'],
classifiers=[
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 3'
]
)
|
# -*- coding: utf-8 -*-
import re
import ast
from setuptools import setup
_version_re = re.compile(r'__version__\s+=\s+(.*)')
with open('ipybind/__init__.py', 'rb') as f:
version = str(ast.literal_eval(_version_re.search(
f.read().decode('utf-8')).group(1)))
setup(
name='ipybind',
author='Ivan Smirnov',
author_email='i.s.smirnov@gmail.com',
license='MIT',
version=version,
url='http://github.com/aldanor/ipybind',
packages=['ipybind', 'ipybind.ext'],
description='IPython and Jupyter integration for pybind11.',
install_requires=['ipython', 'pybind11'],
classifiers=[
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 3'
]
)
|
Add ipybind.ext package to the manifest
|
Add ipybind.ext package to the manifest
|
Python
|
mit
|
aldanor/ipybind,aldanor/ipybind,aldanor/ipybind
|
# -*- coding: utf-8 -*-
import re
import ast
from setuptools import setup
_version_re = re.compile(r'__version__\s+=\s+(.*)')
with open('ipybind/__init__.py', 'rb') as f:
version = str(ast.literal_eval(_version_re.search(
f.read().decode('utf-8')).group(1)))
setup(
name='ipybind',
author='Ivan Smirnov',
author_email='i.s.smirnov@gmail.com',
license='MIT',
version=version,
url='http://github.com/aldanor/ipybind',
packages=['ipybind'],
description='IPython and Jupyter integration for pybind11.',
install_requires=['ipython', 'pybind11'],
classifiers=[
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 3'
]
)
Add ipybind.ext package to the manifest
|
# -*- coding: utf-8 -*-
import re
import ast
from setuptools import setup
_version_re = re.compile(r'__version__\s+=\s+(.*)')
with open('ipybind/__init__.py', 'rb') as f:
version = str(ast.literal_eval(_version_re.search(
f.read().decode('utf-8')).group(1)))
setup(
name='ipybind',
author='Ivan Smirnov',
author_email='i.s.smirnov@gmail.com',
license='MIT',
version=version,
url='http://github.com/aldanor/ipybind',
packages=['ipybind', 'ipybind.ext'],
description='IPython and Jupyter integration for pybind11.',
install_requires=['ipython', 'pybind11'],
classifiers=[
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 3'
]
)
|
<commit_before># -*- coding: utf-8 -*-
import re
import ast
from setuptools import setup
_version_re = re.compile(r'__version__\s+=\s+(.*)')
with open('ipybind/__init__.py', 'rb') as f:
version = str(ast.literal_eval(_version_re.search(
f.read().decode('utf-8')).group(1)))
setup(
name='ipybind',
author='Ivan Smirnov',
author_email='i.s.smirnov@gmail.com',
license='MIT',
version=version,
url='http://github.com/aldanor/ipybind',
packages=['ipybind'],
description='IPython and Jupyter integration for pybind11.',
install_requires=['ipython', 'pybind11'],
classifiers=[
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 3'
]
)
<commit_msg>Add ipybind.ext package to the manifest<commit_after>
|
# -*- coding: utf-8 -*-
import re
import ast
from setuptools import setup
_version_re = re.compile(r'__version__\s+=\s+(.*)')
with open('ipybind/__init__.py', 'rb') as f:
version = str(ast.literal_eval(_version_re.search(
f.read().decode('utf-8')).group(1)))
setup(
name='ipybind',
author='Ivan Smirnov',
author_email='i.s.smirnov@gmail.com',
license='MIT',
version=version,
url='http://github.com/aldanor/ipybind',
packages=['ipybind', 'ipybind.ext'],
description='IPython and Jupyter integration for pybind11.',
install_requires=['ipython', 'pybind11'],
classifiers=[
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 3'
]
)
|
# -*- coding: utf-8 -*-
import re
import ast
from setuptools import setup
_version_re = re.compile(r'__version__\s+=\s+(.*)')
with open('ipybind/__init__.py', 'rb') as f:
version = str(ast.literal_eval(_version_re.search(
f.read().decode('utf-8')).group(1)))
setup(
name='ipybind',
author='Ivan Smirnov',
author_email='i.s.smirnov@gmail.com',
license='MIT',
version=version,
url='http://github.com/aldanor/ipybind',
packages=['ipybind'],
description='IPython and Jupyter integration for pybind11.',
install_requires=['ipython', 'pybind11'],
classifiers=[
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 3'
]
)
Add ipybind.ext package to the manifest# -*- coding: utf-8 -*-
import re
import ast
from setuptools import setup
_version_re = re.compile(r'__version__\s+=\s+(.*)')
with open('ipybind/__init__.py', 'rb') as f:
version = str(ast.literal_eval(_version_re.search(
f.read().decode('utf-8')).group(1)))
setup(
name='ipybind',
author='Ivan Smirnov',
author_email='i.s.smirnov@gmail.com',
license='MIT',
version=version,
url='http://github.com/aldanor/ipybind',
packages=['ipybind', 'ipybind.ext'],
description='IPython and Jupyter integration for pybind11.',
install_requires=['ipython', 'pybind11'],
classifiers=[
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 3'
]
)
|
<commit_before># -*- coding: utf-8 -*-
import re
import ast
from setuptools import setup
_version_re = re.compile(r'__version__\s+=\s+(.*)')
with open('ipybind/__init__.py', 'rb') as f:
version = str(ast.literal_eval(_version_re.search(
f.read().decode('utf-8')).group(1)))
setup(
name='ipybind',
author='Ivan Smirnov',
author_email='i.s.smirnov@gmail.com',
license='MIT',
version=version,
url='http://github.com/aldanor/ipybind',
packages=['ipybind'],
description='IPython and Jupyter integration for pybind11.',
install_requires=['ipython', 'pybind11'],
classifiers=[
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 3'
]
)
<commit_msg>Add ipybind.ext package to the manifest<commit_after># -*- coding: utf-8 -*-
import re
import ast
from setuptools import setup
_version_re = re.compile(r'__version__\s+=\s+(.*)')
with open('ipybind/__init__.py', 'rb') as f:
version = str(ast.literal_eval(_version_re.search(
f.read().decode('utf-8')).group(1)))
setup(
name='ipybind',
author='Ivan Smirnov',
author_email='i.s.smirnov@gmail.com',
license='MIT',
version=version,
url='http://github.com/aldanor/ipybind',
packages=['ipybind', 'ipybind.ext'],
description='IPython and Jupyter integration for pybind11.',
install_requires=['ipython', 'pybind11'],
classifiers=[
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 3'
]
)
|
d0fc566f2145d100cbfae864eade01c8163fdb97
|
setup.py
|
setup.py
|
# -*- coding: utf-8 -*-
from distutils.core import setup
from setuptools import find_packages
setup(
name='django-tests-assistant',
version='0.1.2',
description= 'A tool to help keep track of tests, specially for you - developer.',
license='(AGPL v3+) GNU AFFERO GENERAL PUBLIC LICENSE Version 3 or later',
url='https://github.com/tests-assistant/tests-assistant/',
author=u'Amirouche Boubekki',
author_email='amirouche.boubekki@gmail.com',
maintainer=u'Arun Karunagath',
maintainer_email='the1.arun@gmail.com',
packages=find_packages(),
long_description=open('README.rst').read(),
install_requires=open('requirements.txt').readlines(),
include_package_data=True,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: GNU Affero General Public License v3 or later (AGPLv3+)',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
],
zip_safe=False,
)
|
# -*- coding: utf-8 -*-
from distutils.core import setup
from setuptools import find_packages
setup(
name='django-tests-assistant',
version='0.2.0',
description= 'A tool to help keep track of tests, specially for you - developer.',
license='(AGPL v3+) GNU AFFERO GENERAL PUBLIC LICENSE Version 3 or later',
url='https://github.com/tests-assistant/tests-assistant/',
author=u'Amirouche Boubekki',
author_email='amirouche.boubekki@gmail.com',
maintainer=u'Arun Karunagath',
maintainer_email='the1.arun@gmail.com',
packages=find_packages(),
long_description=open('README.rst').read(),
install_requires=open('requirements.txt').readlines(),
include_package_data=True,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: GNU Affero General Public License v3 or later (AGPLv3+)',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
],
zip_safe=False,
)
|
Package is now successfully getting integrated in a django project. So version increased
|
Package is now successfully getting integrated in a django project. So version increased
|
Python
|
agpl-3.0
|
tests-assistant/tests-assistant,tests-assistant/tests-assistant
|
# -*- coding: utf-8 -*-
from distutils.core import setup
from setuptools import find_packages
setup(
name='django-tests-assistant',
version='0.1.2',
description= 'A tool to help keep track of tests, specially for you - developer.',
license='(AGPL v3+) GNU AFFERO GENERAL PUBLIC LICENSE Version 3 or later',
url='https://github.com/tests-assistant/tests-assistant/',
author=u'Amirouche Boubekki',
author_email='amirouche.boubekki@gmail.com',
maintainer=u'Arun Karunagath',
maintainer_email='the1.arun@gmail.com',
packages=find_packages(),
long_description=open('README.rst').read(),
install_requires=open('requirements.txt').readlines(),
include_package_data=True,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: GNU Affero General Public License v3 or later (AGPLv3+)',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
],
zip_safe=False,
)
Package is now successfully getting integrated in a django project. So version increased
|
# -*- coding: utf-8 -*-
from distutils.core import setup
from setuptools import find_packages
setup(
name='django-tests-assistant',
version='0.2.0',
description= 'A tool to help keep track of tests, specially for you - developer.',
license='(AGPL v3+) GNU AFFERO GENERAL PUBLIC LICENSE Version 3 or later',
url='https://github.com/tests-assistant/tests-assistant/',
author=u'Amirouche Boubekki',
author_email='amirouche.boubekki@gmail.com',
maintainer=u'Arun Karunagath',
maintainer_email='the1.arun@gmail.com',
packages=find_packages(),
long_description=open('README.rst').read(),
install_requires=open('requirements.txt').readlines(),
include_package_data=True,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: GNU Affero General Public License v3 or later (AGPLv3+)',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
],
zip_safe=False,
)
|
<commit_before># -*- coding: utf-8 -*-
from distutils.core import setup
from setuptools import find_packages
setup(
name='django-tests-assistant',
version='0.1.2',
description= 'A tool to help keep track of tests, specially for you - developer.',
license='(AGPL v3+) GNU AFFERO GENERAL PUBLIC LICENSE Version 3 or later',
url='https://github.com/tests-assistant/tests-assistant/',
author=u'Amirouche Boubekki',
author_email='amirouche.boubekki@gmail.com',
maintainer=u'Arun Karunagath',
maintainer_email='the1.arun@gmail.com',
packages=find_packages(),
long_description=open('README.rst').read(),
install_requires=open('requirements.txt').readlines(),
include_package_data=True,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: GNU Affero General Public License v3 or later (AGPLv3+)',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
],
zip_safe=False,
)
<commit_msg>Package is now successfully getting integrated in a django project. So version increased<commit_after>
|
# -*- coding: utf-8 -*-
from distutils.core import setup
from setuptools import find_packages
setup(
name='django-tests-assistant',
version='0.2.0',
description= 'A tool to help keep track of tests, specially for you - developer.',
license='(AGPL v3+) GNU AFFERO GENERAL PUBLIC LICENSE Version 3 or later',
url='https://github.com/tests-assistant/tests-assistant/',
author=u'Amirouche Boubekki',
author_email='amirouche.boubekki@gmail.com',
maintainer=u'Arun Karunagath',
maintainer_email='the1.arun@gmail.com',
packages=find_packages(),
long_description=open('README.rst').read(),
install_requires=open('requirements.txt').readlines(),
include_package_data=True,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: GNU Affero General Public License v3 or later (AGPLv3+)',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
],
zip_safe=False,
)
|
# -*- coding: utf-8 -*-
from distutils.core import setup
from setuptools import find_packages
setup(
name='django-tests-assistant',
version='0.1.2',
description= 'A tool to help keep track of tests, specially for you - developer.',
license='(AGPL v3+) GNU AFFERO GENERAL PUBLIC LICENSE Version 3 or later',
url='https://github.com/tests-assistant/tests-assistant/',
author=u'Amirouche Boubekki',
author_email='amirouche.boubekki@gmail.com',
maintainer=u'Arun Karunagath',
maintainer_email='the1.arun@gmail.com',
packages=find_packages(),
long_description=open('README.rst').read(),
install_requires=open('requirements.txt').readlines(),
include_package_data=True,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: GNU Affero General Public License v3 or later (AGPLv3+)',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
],
zip_safe=False,
)
Package is now successfully getting integrated in a django project. So version increased# -*- coding: utf-8 -*-
from distutils.core import setup
from setuptools import find_packages
setup(
name='django-tests-assistant',
version='0.2.0',
description= 'A tool to help keep track of tests, specially for you - developer.',
license='(AGPL v3+) GNU AFFERO GENERAL PUBLIC LICENSE Version 3 or later',
url='https://github.com/tests-assistant/tests-assistant/',
author=u'Amirouche Boubekki',
author_email='amirouche.boubekki@gmail.com',
maintainer=u'Arun Karunagath',
maintainer_email='the1.arun@gmail.com',
packages=find_packages(),
long_description=open('README.rst').read(),
install_requires=open('requirements.txt').readlines(),
include_package_data=True,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: GNU Affero General Public License v3 or later (AGPLv3+)',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
],
zip_safe=False,
)
|
<commit_before># -*- coding: utf-8 -*-
from distutils.core import setup
from setuptools import find_packages
setup(
name='django-tests-assistant',
version='0.1.2',
description= 'A tool to help keep track of tests, specially for you - developer.',
license='(AGPL v3+) GNU AFFERO GENERAL PUBLIC LICENSE Version 3 or later',
url='https://github.com/tests-assistant/tests-assistant/',
author=u'Amirouche Boubekki',
author_email='amirouche.boubekki@gmail.com',
maintainer=u'Arun Karunagath',
maintainer_email='the1.arun@gmail.com',
packages=find_packages(),
long_description=open('README.rst').read(),
install_requires=open('requirements.txt').readlines(),
include_package_data=True,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: GNU Affero General Public License v3 or later (AGPLv3+)',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
],
zip_safe=False,
)
<commit_msg>Package is now successfully getting integrated in a django project. So version increased<commit_after># -*- coding: utf-8 -*-
from distutils.core import setup
from setuptools import find_packages
setup(
name='django-tests-assistant',
version='0.2.0',
description= 'A tool to help keep track of tests, specially for you - developer.',
license='(AGPL v3+) GNU AFFERO GENERAL PUBLIC LICENSE Version 3 or later',
url='https://github.com/tests-assistant/tests-assistant/',
author=u'Amirouche Boubekki',
author_email='amirouche.boubekki@gmail.com',
maintainer=u'Arun Karunagath',
maintainer_email='the1.arun@gmail.com',
packages=find_packages(),
long_description=open('README.rst').read(),
install_requires=open('requirements.txt').readlines(),
include_package_data=True,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: GNU Affero General Public License v3 or later (AGPLv3+)',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
],
zip_safe=False,
)
|
741776ce931eeb14156ca89f61ad94211d727eff
|
setup.py
|
setup.py
|
from setuptools import setup, find_packages
from sys import version_info
assert version_info >= (2,7)
setup(
name='molly',
version='2.0dev',
packages=find_packages(exclude=['tests']),
url='http://mollyproject.org/',
author='The Molly Project',
setup_requires=['setuptools'],
tests_require=['unittest2', 'mock'],
test_suite='unittest2.collector',
install_requires=[
'celery',
'cssmin',
'Flask',
'Flask-Assets',
'Flask-Babel',
'Flask-Cache',
'Flask-PyMongo',
'Flask-Script',
'Flask-StatsD',
'geojson',
'gunicorn',
'imposm.parser',
'phonenumbers==5.7b2',
'python-dateutil',
'python-memcached',
'raven',
'requests',
'Shapely',
'supervisor'
],
entry_points={
'console_scripts': [
'mollyui = molly.command:ui_main',
'mollyrest = molly.command:rest_main',
'mollyd = molly.command:mollyd',
'mollydebugd = molly.command:mollydebugd',
'mollyctl = molly.command:mollyctl',
'mollydebugctl = molly.command:mollydebugctl'
]
}
)
|
from setuptools import setup, find_packages
from sys import version_info
assert version_info >= (2,7)
setup(
name='molly',
version='2.0dev',
packages=find_packages(exclude=['tests']),
include_package_data=True,
url='http://mollyproject.org/',
author='The Molly Project',
setup_requires=['setuptools'],
tests_require=['unittest2', 'mock'],
test_suite='unittest2.collector',
install_requires=[
'celery',
'cssmin',
'Flask',
'Flask-Assets',
'Flask-Babel',
'Flask-Cache',
'Flask-PyMongo',
'Flask-Script',
'Flask-StatsD',
'geojson',
'gunicorn',
'imposm.parser',
'phonenumbers==5.7b2',
'python-dateutil',
'python-memcached',
'raven',
'requests',
'Shapely',
'supervisor'
],
entry_points={
'console_scripts': [
'mollyui = molly.command:ui_main',
'mollyrest = molly.command:rest_main',
'mollyd = molly.command:mollyd',
'mollydebugd = molly.command:mollydebugd',
'mollyctl = molly.command:mollyctl',
'mollydebugctl = molly.command:mollydebugctl'
]
}
)
|
Include templates, etc, with install
|
Include templates, etc, with install
|
Python
|
apache-2.0
|
ManchesterIO/mollyproject-next,ManchesterIO/mollyproject-next,ManchesterIO/mollyproject-next
|
from setuptools import setup, find_packages
from sys import version_info
assert version_info >= (2,7)
setup(
name='molly',
version='2.0dev',
packages=find_packages(exclude=['tests']),
url='http://mollyproject.org/',
author='The Molly Project',
setup_requires=['setuptools'],
tests_require=['unittest2', 'mock'],
test_suite='unittest2.collector',
install_requires=[
'celery',
'cssmin',
'Flask',
'Flask-Assets',
'Flask-Babel',
'Flask-Cache',
'Flask-PyMongo',
'Flask-Script',
'Flask-StatsD',
'geojson',
'gunicorn',
'imposm.parser',
'phonenumbers==5.7b2',
'python-dateutil',
'python-memcached',
'raven',
'requests',
'Shapely',
'supervisor'
],
entry_points={
'console_scripts': [
'mollyui = molly.command:ui_main',
'mollyrest = molly.command:rest_main',
'mollyd = molly.command:mollyd',
'mollydebugd = molly.command:mollydebugd',
'mollyctl = molly.command:mollyctl',
'mollydebugctl = molly.command:mollydebugctl'
]
}
)
Include templates, etc, with install
|
from setuptools import setup, find_packages
from sys import version_info
assert version_info >= (2,7)
setup(
name='molly',
version='2.0dev',
packages=find_packages(exclude=['tests']),
include_package_data=True,
url='http://mollyproject.org/',
author='The Molly Project',
setup_requires=['setuptools'],
tests_require=['unittest2', 'mock'],
test_suite='unittest2.collector',
install_requires=[
'celery',
'cssmin',
'Flask',
'Flask-Assets',
'Flask-Babel',
'Flask-Cache',
'Flask-PyMongo',
'Flask-Script',
'Flask-StatsD',
'geojson',
'gunicorn',
'imposm.parser',
'phonenumbers==5.7b2',
'python-dateutil',
'python-memcached',
'raven',
'requests',
'Shapely',
'supervisor'
],
entry_points={
'console_scripts': [
'mollyui = molly.command:ui_main',
'mollyrest = molly.command:rest_main',
'mollyd = molly.command:mollyd',
'mollydebugd = molly.command:mollydebugd',
'mollyctl = molly.command:mollyctl',
'mollydebugctl = molly.command:mollydebugctl'
]
}
)
|
<commit_before>from setuptools import setup, find_packages
from sys import version_info
assert version_info >= (2,7)
setup(
name='molly',
version='2.0dev',
packages=find_packages(exclude=['tests']),
url='http://mollyproject.org/',
author='The Molly Project',
setup_requires=['setuptools'],
tests_require=['unittest2', 'mock'],
test_suite='unittest2.collector',
install_requires=[
'celery',
'cssmin',
'Flask',
'Flask-Assets',
'Flask-Babel',
'Flask-Cache',
'Flask-PyMongo',
'Flask-Script',
'Flask-StatsD',
'geojson',
'gunicorn',
'imposm.parser',
'phonenumbers==5.7b2',
'python-dateutil',
'python-memcached',
'raven',
'requests',
'Shapely',
'supervisor'
],
entry_points={
'console_scripts': [
'mollyui = molly.command:ui_main',
'mollyrest = molly.command:rest_main',
'mollyd = molly.command:mollyd',
'mollydebugd = molly.command:mollydebugd',
'mollyctl = molly.command:mollyctl',
'mollydebugctl = molly.command:mollydebugctl'
]
}
)
<commit_msg>Include templates, etc, with install<commit_after>
|
from setuptools import setup, find_packages
from sys import version_info
assert version_info >= (2,7)
setup(
name='molly',
version='2.0dev',
packages=find_packages(exclude=['tests']),
include_package_data=True,
url='http://mollyproject.org/',
author='The Molly Project',
setup_requires=['setuptools'],
tests_require=['unittest2', 'mock'],
test_suite='unittest2.collector',
install_requires=[
'celery',
'cssmin',
'Flask',
'Flask-Assets',
'Flask-Babel',
'Flask-Cache',
'Flask-PyMongo',
'Flask-Script',
'Flask-StatsD',
'geojson',
'gunicorn',
'imposm.parser',
'phonenumbers==5.7b2',
'python-dateutil',
'python-memcached',
'raven',
'requests',
'Shapely',
'supervisor'
],
entry_points={
'console_scripts': [
'mollyui = molly.command:ui_main',
'mollyrest = molly.command:rest_main',
'mollyd = molly.command:mollyd',
'mollydebugd = molly.command:mollydebugd',
'mollyctl = molly.command:mollyctl',
'mollydebugctl = molly.command:mollydebugctl'
]
}
)
|
from setuptools import setup, find_packages
from sys import version_info
assert version_info >= (2,7)
setup(
name='molly',
version='2.0dev',
packages=find_packages(exclude=['tests']),
url='http://mollyproject.org/',
author='The Molly Project',
setup_requires=['setuptools'],
tests_require=['unittest2', 'mock'],
test_suite='unittest2.collector',
install_requires=[
'celery',
'cssmin',
'Flask',
'Flask-Assets',
'Flask-Babel',
'Flask-Cache',
'Flask-PyMongo',
'Flask-Script',
'Flask-StatsD',
'geojson',
'gunicorn',
'imposm.parser',
'phonenumbers==5.7b2',
'python-dateutil',
'python-memcached',
'raven',
'requests',
'Shapely',
'supervisor'
],
entry_points={
'console_scripts': [
'mollyui = molly.command:ui_main',
'mollyrest = molly.command:rest_main',
'mollyd = molly.command:mollyd',
'mollydebugd = molly.command:mollydebugd',
'mollyctl = molly.command:mollyctl',
'mollydebugctl = molly.command:mollydebugctl'
]
}
)
Include templates, etc, with installfrom setuptools import setup, find_packages
from sys import version_info
assert version_info >= (2,7)
setup(
name='molly',
version='2.0dev',
packages=find_packages(exclude=['tests']),
include_package_data=True,
url='http://mollyproject.org/',
author='The Molly Project',
setup_requires=['setuptools'],
tests_require=['unittest2', 'mock'],
test_suite='unittest2.collector',
install_requires=[
'celery',
'cssmin',
'Flask',
'Flask-Assets',
'Flask-Babel',
'Flask-Cache',
'Flask-PyMongo',
'Flask-Script',
'Flask-StatsD',
'geojson',
'gunicorn',
'imposm.parser',
'phonenumbers==5.7b2',
'python-dateutil',
'python-memcached',
'raven',
'requests',
'Shapely',
'supervisor'
],
entry_points={
'console_scripts': [
'mollyui = molly.command:ui_main',
'mollyrest = molly.command:rest_main',
'mollyd = molly.command:mollyd',
'mollydebugd = molly.command:mollydebugd',
'mollyctl = molly.command:mollyctl',
'mollydebugctl = molly.command:mollydebugctl'
]
}
)
|
<commit_before>from setuptools import setup, find_packages
from sys import version_info
assert version_info >= (2,7)
setup(
name='molly',
version='2.0dev',
packages=find_packages(exclude=['tests']),
url='http://mollyproject.org/',
author='The Molly Project',
setup_requires=['setuptools'],
tests_require=['unittest2', 'mock'],
test_suite='unittest2.collector',
install_requires=[
'celery',
'cssmin',
'Flask',
'Flask-Assets',
'Flask-Babel',
'Flask-Cache',
'Flask-PyMongo',
'Flask-Script',
'Flask-StatsD',
'geojson',
'gunicorn',
'imposm.parser',
'phonenumbers==5.7b2',
'python-dateutil',
'python-memcached',
'raven',
'requests',
'Shapely',
'supervisor'
],
entry_points={
'console_scripts': [
'mollyui = molly.command:ui_main',
'mollyrest = molly.command:rest_main',
'mollyd = molly.command:mollyd',
'mollydebugd = molly.command:mollydebugd',
'mollyctl = molly.command:mollyctl',
'mollydebugctl = molly.command:mollydebugctl'
]
}
)
<commit_msg>Include templates, etc, with install<commit_after>from setuptools import setup, find_packages
from sys import version_info
assert version_info >= (2,7)
setup(
name='molly',
version='2.0dev',
packages=find_packages(exclude=['tests']),
include_package_data=True,
url='http://mollyproject.org/',
author='The Molly Project',
setup_requires=['setuptools'],
tests_require=['unittest2', 'mock'],
test_suite='unittest2.collector',
install_requires=[
'celery',
'cssmin',
'Flask',
'Flask-Assets',
'Flask-Babel',
'Flask-Cache',
'Flask-PyMongo',
'Flask-Script',
'Flask-StatsD',
'geojson',
'gunicorn',
'imposm.parser',
'phonenumbers==5.7b2',
'python-dateutil',
'python-memcached',
'raven',
'requests',
'Shapely',
'supervisor'
],
entry_points={
'console_scripts': [
'mollyui = molly.command:ui_main',
'mollyrest = molly.command:rest_main',
'mollyd = molly.command:mollyd',
'mollydebugd = molly.command:mollydebugd',
'mollyctl = molly.command:mollyctl',
'mollydebugctl = molly.command:mollydebugctl'
]
}
)
|
200f1727f16bcd903554346611afc976846f5896
|
setup.py
|
setup.py
|
#!/usr/bin/env python
from distutils.core import setup
setup(
name='django-payfast',
version='0.2.2',
author='Mikhail Korobov',
author_email='kmike84@gmail.com',
packages=['payfast', 'payfast.south_migrations'],
url='http://bitbucket.org/kmike/django-payfast/',
download_url = 'http://bitbucket.org/kmike/django-payfast/get/tip.gz',
license = 'MIT license',
description = 'A pluggable Django application for integrating payfast.co.za payment system.',
long_description = open('README.rst').read().decode('utf8'),
classifiers=(
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
),
)
|
#!/usr/bin/env python
from distutils.core import setup
setup(
name='django-payfast',
version='0.2.2',
author='Mikhail Korobov',
author_email='kmike84@gmail.com',
packages=['payfast', 'payfast.south_migrations'],
url='http://bitbucket.org/kmike/django-payfast/',
download_url = 'http://bitbucket.org/kmike/django-payfast/get/tip.gz',
license = 'MIT license',
description = 'A pluggable Django application for integrating payfast.co.za payment system.',
long_description = open('README.rst').read().decode('utf8'),
classifiers=(
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
'Topic :: Software Development :: Libraries :: Python Modules',
),
)
|
Package classifiers: Explicitly target Python 2.7
|
Package classifiers: Explicitly target Python 2.7
|
Python
|
mit
|
reinbach/django-payfast
|
#!/usr/bin/env python
from distutils.core import setup
setup(
name='django-payfast',
version='0.2.2',
author='Mikhail Korobov',
author_email='kmike84@gmail.com',
packages=['payfast', 'payfast.south_migrations'],
url='http://bitbucket.org/kmike/django-payfast/',
download_url = 'http://bitbucket.org/kmike/django-payfast/get/tip.gz',
license = 'MIT license',
description = 'A pluggable Django application for integrating payfast.co.za payment system.',
long_description = open('README.rst').read().decode('utf8'),
classifiers=(
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
),
)
Package classifiers: Explicitly target Python 2.7
|
#!/usr/bin/env python
from distutils.core import setup
setup(
name='django-payfast',
version='0.2.2',
author='Mikhail Korobov',
author_email='kmike84@gmail.com',
packages=['payfast', 'payfast.south_migrations'],
url='http://bitbucket.org/kmike/django-payfast/',
download_url = 'http://bitbucket.org/kmike/django-payfast/get/tip.gz',
license = 'MIT license',
description = 'A pluggable Django application for integrating payfast.co.za payment system.',
long_description = open('README.rst').read().decode('utf8'),
classifiers=(
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
'Topic :: Software Development :: Libraries :: Python Modules',
),
)
|
<commit_before>#!/usr/bin/env python
from distutils.core import setup
setup(
name='django-payfast',
version='0.2.2',
author='Mikhail Korobov',
author_email='kmike84@gmail.com',
packages=['payfast', 'payfast.south_migrations'],
url='http://bitbucket.org/kmike/django-payfast/',
download_url = 'http://bitbucket.org/kmike/django-payfast/get/tip.gz',
license = 'MIT license',
description = 'A pluggable Django application for integrating payfast.co.za payment system.',
long_description = open('README.rst').read().decode('utf8'),
classifiers=(
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
),
)
<commit_msg>Package classifiers: Explicitly target Python 2.7<commit_after>
|
#!/usr/bin/env python
from distutils.core import setup
setup(
name='django-payfast',
version='0.2.2',
author='Mikhail Korobov',
author_email='kmike84@gmail.com',
packages=['payfast', 'payfast.south_migrations'],
url='http://bitbucket.org/kmike/django-payfast/',
download_url = 'http://bitbucket.org/kmike/django-payfast/get/tip.gz',
license = 'MIT license',
description = 'A pluggable Django application for integrating payfast.co.za payment system.',
long_description = open('README.rst').read().decode('utf8'),
classifiers=(
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
'Topic :: Software Development :: Libraries :: Python Modules',
),
)
|
#!/usr/bin/env python
from distutils.core import setup
setup(
name='django-payfast',
version='0.2.2',
author='Mikhail Korobov',
author_email='kmike84@gmail.com',
packages=['payfast', 'payfast.south_migrations'],
url='http://bitbucket.org/kmike/django-payfast/',
download_url = 'http://bitbucket.org/kmike/django-payfast/get/tip.gz',
license = 'MIT license',
description = 'A pluggable Django application for integrating payfast.co.za payment system.',
long_description = open('README.rst').read().decode('utf8'),
classifiers=(
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
),
)
Package classifiers: Explicitly target Python 2.7#!/usr/bin/env python
from distutils.core import setup
setup(
name='django-payfast',
version='0.2.2',
author='Mikhail Korobov',
author_email='kmike84@gmail.com',
packages=['payfast', 'payfast.south_migrations'],
url='http://bitbucket.org/kmike/django-payfast/',
download_url = 'http://bitbucket.org/kmike/django-payfast/get/tip.gz',
license = 'MIT license',
description = 'A pluggable Django application for integrating payfast.co.za payment system.',
long_description = open('README.rst').read().decode('utf8'),
classifiers=(
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
'Topic :: Software Development :: Libraries :: Python Modules',
),
)
|
<commit_before>#!/usr/bin/env python
from distutils.core import setup
setup(
name='django-payfast',
version='0.2.2',
author='Mikhail Korobov',
author_email='kmike84@gmail.com',
packages=['payfast', 'payfast.south_migrations'],
url='http://bitbucket.org/kmike/django-payfast/',
download_url = 'http://bitbucket.org/kmike/django-payfast/get/tip.gz',
license = 'MIT license',
description = 'A pluggable Django application for integrating payfast.co.za payment system.',
long_description = open('README.rst').read().decode('utf8'),
classifiers=(
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
),
)
<commit_msg>Package classifiers: Explicitly target Python 2.7<commit_after>#!/usr/bin/env python
from distutils.core import setup
setup(
name='django-payfast',
version='0.2.2',
author='Mikhail Korobov',
author_email='kmike84@gmail.com',
packages=['payfast', 'payfast.south_migrations'],
url='http://bitbucket.org/kmike/django-payfast/',
download_url = 'http://bitbucket.org/kmike/django-payfast/get/tip.gz',
license = 'MIT license',
description = 'A pluggable Django application for integrating payfast.co.za payment system.',
long_description = open('README.rst').read().decode('utf8'),
classifiers=(
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
'Topic :: Software Development :: Libraries :: Python Modules',
),
)
|
4e0e6b0d2586ea970da9980ab143488b82316da1
|
setup.py
|
setup.py
|
import os
from setuptools import setup
from withtool import __version__
def read(fname):
path = os.path.join(os.path.dirname(__file__), fname)
with open(path, encoding='utf-8') as f:
return f.read()
setup(
name='with',
version=__version__,
description='A shell context manager',
long_description=read('README.rst'),
author='Renan Ivo',
author_email='renanivom@gmail.com',
url='https://github.com/renanivo/with',
keywords='context manager shell command line repl',
scripts=['bin/with'],
install_requires=[
'appdirs==1.4.2',
'docopt==0.6.2',
'prompt-toolkit==1.0',
'python-slugify==1.2.1',
],
packages=['withtool'],
classifiers=[
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
]
)
|
import os
from setuptools import setup
from withtool import __version__
def read(fname):
path = os.path.join(os.path.dirname(__file__), fname)
with open(path, encoding='utf-8') as f:
return f.read()
setup(
name='with',
version=__version__,
description='A shell context manager',
long_description=read('README.rst'),
author='Renan Ivo',
author_email='renanivom@gmail.com',
url='https://github.com/renanivo/with',
keywords='context manager shell command line repl',
scripts=['bin/with'],
install_requires=[
'appdirs==1.4.3',
'docopt==0.6.2',
'prompt-toolkit==1.0',
'python-slugify==1.2.1',
],
packages=['withtool'],
classifiers=[
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
]
)
|
Upgrade dependency appdirs to ==1.4.3
|
Upgrade dependency appdirs to ==1.4.3
|
Python
|
mit
|
renanivo/with
|
import os
from setuptools import setup
from withtool import __version__
def read(fname):
path = os.path.join(os.path.dirname(__file__), fname)
with open(path, encoding='utf-8') as f:
return f.read()
setup(
name='with',
version=__version__,
description='A shell context manager',
long_description=read('README.rst'),
author='Renan Ivo',
author_email='renanivom@gmail.com',
url='https://github.com/renanivo/with',
keywords='context manager shell command line repl',
scripts=['bin/with'],
install_requires=[
'appdirs==1.4.2',
'docopt==0.6.2',
'prompt-toolkit==1.0',
'python-slugify==1.2.1',
],
packages=['withtool'],
classifiers=[
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
]
)
Upgrade dependency appdirs to ==1.4.3
|
import os
from setuptools import setup
from withtool import __version__
def read(fname):
path = os.path.join(os.path.dirname(__file__), fname)
with open(path, encoding='utf-8') as f:
return f.read()
setup(
name='with',
version=__version__,
description='A shell context manager',
long_description=read('README.rst'),
author='Renan Ivo',
author_email='renanivom@gmail.com',
url='https://github.com/renanivo/with',
keywords='context manager shell command line repl',
scripts=['bin/with'],
install_requires=[
'appdirs==1.4.3',
'docopt==0.6.2',
'prompt-toolkit==1.0',
'python-slugify==1.2.1',
],
packages=['withtool'],
classifiers=[
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
]
)
|
<commit_before>import os
from setuptools import setup
from withtool import __version__
def read(fname):
path = os.path.join(os.path.dirname(__file__), fname)
with open(path, encoding='utf-8') as f:
return f.read()
setup(
name='with',
version=__version__,
description='A shell context manager',
long_description=read('README.rst'),
author='Renan Ivo',
author_email='renanivom@gmail.com',
url='https://github.com/renanivo/with',
keywords='context manager shell command line repl',
scripts=['bin/with'],
install_requires=[
'appdirs==1.4.2',
'docopt==0.6.2',
'prompt-toolkit==1.0',
'python-slugify==1.2.1',
],
packages=['withtool'],
classifiers=[
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
]
)
<commit_msg>Upgrade dependency appdirs to ==1.4.3<commit_after>
|
import os
from setuptools import setup
from withtool import __version__
def read(fname):
path = os.path.join(os.path.dirname(__file__), fname)
with open(path, encoding='utf-8') as f:
return f.read()
setup(
name='with',
version=__version__,
description='A shell context manager',
long_description=read('README.rst'),
author='Renan Ivo',
author_email='renanivom@gmail.com',
url='https://github.com/renanivo/with',
keywords='context manager shell command line repl',
scripts=['bin/with'],
install_requires=[
'appdirs==1.4.3',
'docopt==0.6.2',
'prompt-toolkit==1.0',
'python-slugify==1.2.1',
],
packages=['withtool'],
classifiers=[
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
]
)
|
import os
from setuptools import setup
from withtool import __version__
def read(fname):
path = os.path.join(os.path.dirname(__file__), fname)
with open(path, encoding='utf-8') as f:
return f.read()
setup(
name='with',
version=__version__,
description='A shell context manager',
long_description=read('README.rst'),
author='Renan Ivo',
author_email='renanivom@gmail.com',
url='https://github.com/renanivo/with',
keywords='context manager shell command line repl',
scripts=['bin/with'],
install_requires=[
'appdirs==1.4.2',
'docopt==0.6.2',
'prompt-toolkit==1.0',
'python-slugify==1.2.1',
],
packages=['withtool'],
classifiers=[
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
]
)
Upgrade dependency appdirs to ==1.4.3import os
from setuptools import setup
from withtool import __version__
def read(fname):
path = os.path.join(os.path.dirname(__file__), fname)
with open(path, encoding='utf-8') as f:
return f.read()
setup(
name='with',
version=__version__,
description='A shell context manager',
long_description=read('README.rst'),
author='Renan Ivo',
author_email='renanivom@gmail.com',
url='https://github.com/renanivo/with',
keywords='context manager shell command line repl',
scripts=['bin/with'],
install_requires=[
'appdirs==1.4.3',
'docopt==0.6.2',
'prompt-toolkit==1.0',
'python-slugify==1.2.1',
],
packages=['withtool'],
classifiers=[
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
]
)
|
<commit_before>import os
from setuptools import setup
from withtool import __version__
def read(fname):
path = os.path.join(os.path.dirname(__file__), fname)
with open(path, encoding='utf-8') as f:
return f.read()
setup(
name='with',
version=__version__,
description='A shell context manager',
long_description=read('README.rst'),
author='Renan Ivo',
author_email='renanivom@gmail.com',
url='https://github.com/renanivo/with',
keywords='context manager shell command line repl',
scripts=['bin/with'],
install_requires=[
'appdirs==1.4.2',
'docopt==0.6.2',
'prompt-toolkit==1.0',
'python-slugify==1.2.1',
],
packages=['withtool'],
classifiers=[
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
]
)
<commit_msg>Upgrade dependency appdirs to ==1.4.3<commit_after>import os
from setuptools import setup
from withtool import __version__
def read(fname):
path = os.path.join(os.path.dirname(__file__), fname)
with open(path, encoding='utf-8') as f:
return f.read()
setup(
name='with',
version=__version__,
description='A shell context manager',
long_description=read('README.rst'),
author='Renan Ivo',
author_email='renanivom@gmail.com',
url='https://github.com/renanivo/with',
keywords='context manager shell command line repl',
scripts=['bin/with'],
install_requires=[
'appdirs==1.4.3',
'docopt==0.6.2',
'prompt-toolkit==1.0',
'python-slugify==1.2.1',
],
packages=['withtool'],
classifiers=[
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
]
)
|
5b7fd9974fab77c396288d0aefbc8b657e5b24d9
|
setup.py
|
setup.py
|
from setuptools import setup
import os
def read(filename):
fin = None
data = None
try:
fin = open(filename)
data = fin.read()
finally:
if fin is not None:
fin.close()
return data
setup(
name='jstree',
version='0.1.1',
author='William Grim',
author_email='william@grimapps.com',
url='https://github.com/grimwm/py-jstree',
classifiers = [
'Development Status :: 2 - Pre-Alpha',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
],
description='A package that helps generate JSON data for jQuery jsTree.',
long_description=read('README.txt') if os.path.exists('README.txt') else '',
requires=['dictobj'],
py_modules=['jstree'],
test_suite='jstree_test',
)
|
from setuptools import setup
import os
def read(filename):
fin = None
data = None
try:
fin = open(filename)
data = fin.read()
finally:
if fin is not None:
fin.close()
return data
setup(
name='jstree',
version='0.1.2',
author='William Grim',
author_email='william@grimapps.com',
url='https://github.com/grimwm/py-jstree',
classifiers = [
'Development Status :: 2 - Pre-Alpha',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
],
description='A package that helps generate JSON data for jQuery jsTree.',
long_description=read('README.txt') if os.path.exists('README.txt') else '',
install_requires=['dictobj'],
py_modules=['jstree'],
test_suite='jstree_test',
)
|
Upgrade to v0.1.2 and properly add dictobj as an installation requirement.
|
Upgrade to v0.1.2 and properly add dictobj as an installation requirement.
|
Python
|
apache-2.0
|
grimwm/py-jstree,grimwm/py-jstree
|
from setuptools import setup
import os
def read(filename):
fin = None
data = None
try:
fin = open(filename)
data = fin.read()
finally:
if fin is not None:
fin.close()
return data
setup(
name='jstree',
version='0.1.1',
author='William Grim',
author_email='william@grimapps.com',
url='https://github.com/grimwm/py-jstree',
classifiers = [
'Development Status :: 2 - Pre-Alpha',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
],
description='A package that helps generate JSON data for jQuery jsTree.',
long_description=read('README.txt') if os.path.exists('README.txt') else '',
requires=['dictobj'],
py_modules=['jstree'],
test_suite='jstree_test',
)
Upgrade to v0.1.2 and properly add dictobj as an installation requirement.
|
from setuptools import setup
import os
def read(filename):
fin = None
data = None
try:
fin = open(filename)
data = fin.read()
finally:
if fin is not None:
fin.close()
return data
setup(
name='jstree',
version='0.1.2',
author='William Grim',
author_email='william@grimapps.com',
url='https://github.com/grimwm/py-jstree',
classifiers = [
'Development Status :: 2 - Pre-Alpha',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
],
description='A package that helps generate JSON data for jQuery jsTree.',
long_description=read('README.txt') if os.path.exists('README.txt') else '',
install_requires=['dictobj'],
py_modules=['jstree'],
test_suite='jstree_test',
)
|
<commit_before>from setuptools import setup
import os
def read(filename):
fin = None
data = None
try:
fin = open(filename)
data = fin.read()
finally:
if fin is not None:
fin.close()
return data
setup(
name='jstree',
version='0.1.1',
author='William Grim',
author_email='william@grimapps.com',
url='https://github.com/grimwm/py-jstree',
classifiers = [
'Development Status :: 2 - Pre-Alpha',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
],
description='A package that helps generate JSON data for jQuery jsTree.',
long_description=read('README.txt') if os.path.exists('README.txt') else '',
requires=['dictobj'],
py_modules=['jstree'],
test_suite='jstree_test',
)
<commit_msg>Upgrade to v0.1.2 and properly add dictobj as an installation requirement.<commit_after>
|
from setuptools import setup
import os
def read(filename):
fin = None
data = None
try:
fin = open(filename)
data = fin.read()
finally:
if fin is not None:
fin.close()
return data
setup(
name='jstree',
version='0.1.2',
author='William Grim',
author_email='william@grimapps.com',
url='https://github.com/grimwm/py-jstree',
classifiers = [
'Development Status :: 2 - Pre-Alpha',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
],
description='A package that helps generate JSON data for jQuery jsTree.',
long_description=read('README.txt') if os.path.exists('README.txt') else '',
install_requires=['dictobj'],
py_modules=['jstree'],
test_suite='jstree_test',
)
|
from setuptools import setup
import os
def read(filename):
fin = None
data = None
try:
fin = open(filename)
data = fin.read()
finally:
if fin is not None:
fin.close()
return data
setup(
name='jstree',
version='0.1.1',
author='William Grim',
author_email='william@grimapps.com',
url='https://github.com/grimwm/py-jstree',
classifiers = [
'Development Status :: 2 - Pre-Alpha',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
],
description='A package that helps generate JSON data for jQuery jsTree.',
long_description=read('README.txt') if os.path.exists('README.txt') else '',
requires=['dictobj'],
py_modules=['jstree'],
test_suite='jstree_test',
)
Upgrade to v0.1.2 and properly add dictobj as an installation requirement.from setuptools import setup
import os
def read(filename):
fin = None
data = None
try:
fin = open(filename)
data = fin.read()
finally:
if fin is not None:
fin.close()
return data
setup(
name='jstree',
version='0.1.2',
author='William Grim',
author_email='william@grimapps.com',
url='https://github.com/grimwm/py-jstree',
classifiers = [
'Development Status :: 2 - Pre-Alpha',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
],
description='A package that helps generate JSON data for jQuery jsTree.',
long_description=read('README.txt') if os.path.exists('README.txt') else '',
install_requires=['dictobj'],
py_modules=['jstree'],
test_suite='jstree_test',
)
|
<commit_before>from setuptools import setup
import os
def read(filename):
fin = None
data = None
try:
fin = open(filename)
data = fin.read()
finally:
if fin is not None:
fin.close()
return data
setup(
name='jstree',
version='0.1.1',
author='William Grim',
author_email='william@grimapps.com',
url='https://github.com/grimwm/py-jstree',
classifiers = [
'Development Status :: 2 - Pre-Alpha',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
],
description='A package that helps generate JSON data for jQuery jsTree.',
long_description=read('README.txt') if os.path.exists('README.txt') else '',
requires=['dictobj'],
py_modules=['jstree'],
test_suite='jstree_test',
)
<commit_msg>Upgrade to v0.1.2 and properly add dictobj as an installation requirement.<commit_after>from setuptools import setup
import os
def read(filename):
fin = None
data = None
try:
fin = open(filename)
data = fin.read()
finally:
if fin is not None:
fin.close()
return data
setup(
name='jstree',
version='0.1.2',
author='William Grim',
author_email='william@grimapps.com',
url='https://github.com/grimwm/py-jstree',
classifiers = [
'Development Status :: 2 - Pre-Alpha',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
],
description='A package that helps generate JSON data for jQuery jsTree.',
long_description=read('README.txt') if os.path.exists('README.txt') else '',
install_requires=['dictobj'],
py_modules=['jstree'],
test_suite='jstree_test',
)
|
379eafd08e7e412c9abf88783f665e36e019fa23
|
setup.py
|
setup.py
|
#!/usr/bin/env python
import os
from setuptools import setup, find_packages
name = 'Mikko Korpela'
# I might be just a little bit too much afraid of those bots..
address = name.lower().replace(' ', '.')+chr(64)+'gmail.com'
setup(name='robotframework-pabot',
version='1.2.1',
description='Parallel test runner for Robot Framework',
long_description='A parallel executor for Robot Framework tests.'
' With Pabot you can split one execution into multiple and save test execution time.',
author=name,
author_email=address,
url='https://pabot.org',
download_url='https://pypi.python.org/pypi/robotframework-pabot',
packages=find_packages(),
classifiers=[
'Intended Audience :: Developers',
'Natural Language :: English',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Topic :: Software Development :: Testing',
'License :: OSI Approved :: Apache Software License',
'Development Status :: 5 - Production/Stable',
'Framework :: Robot Framework'
],
entry_points = {'console_scripts': [
'pabot=pabot.pabot:main']},
license='Apache License, Version 2.0',
install_requires=[
'robotframework',
'websockets>=8.1;python_version>="3.6"',
'robotremoteserver>=1.1',
'typing;python_version<"3.5"'])
|
#!/usr/bin/env python
import os
from setuptools import setup, find_packages
name = 'Mikko Korpela'
# I might be just a little bit too much afraid of those bots..
address = name.lower().replace(' ', '.')+chr(64)+'gmail.com'
setup(name='robotframework-pabot',
version='1.2.1',
description='Parallel test runner for Robot Framework',
long_description='A parallel executor for Robot Framework tests.'
' With Pabot you can split one execution into multiple and save test execution time.',
author=name,
author_email=address,
url='https://pabot.org',
download_url='https://pypi.python.org/pypi/robotframework-pabot',
packages=find_packages(),
classifiers=[
'Intended Audience :: Developers',
'Natural Language :: English',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Topic :: Software Development :: Testing',
'License :: OSI Approved :: Apache Software License',
'Development Status :: 5 - Production/Stable',
'Framework :: Robot Framework'
],
entry_points = {'console_scripts': [
'pabot=pabot.pabot:main',
'pabotcoordinator=pabot.coordinatorwrapper:main',
'pabotworker=pabot.workerwrapper:main']},
license='Apache License, Version 2.0',
install_requires=[
'robotframework',
'websockets>=8.1;python_version>="3.6"',
'robotremoteserver>=1.1',
'typing;python_version<"3.5"'])
|
Revert "hide worker and coordinator"
|
Revert "hide worker and coordinator"
This reverts commit f73ce955ef6a2562f75f3311de279d43566dba08.
|
Python
|
apache-2.0
|
mkorpela/pabot,mkorpela/pabot
|
#!/usr/bin/env python
import os
from setuptools import setup, find_packages
name = 'Mikko Korpela'
# I might be just a little bit too much afraid of those bots..
address = name.lower().replace(' ', '.')+chr(64)+'gmail.com'
setup(name='robotframework-pabot',
version='1.2.1',
description='Parallel test runner for Robot Framework',
long_description='A parallel executor for Robot Framework tests.'
' With Pabot you can split one execution into multiple and save test execution time.',
author=name,
author_email=address,
url='https://pabot.org',
download_url='https://pypi.python.org/pypi/robotframework-pabot',
packages=find_packages(),
classifiers=[
'Intended Audience :: Developers',
'Natural Language :: English',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Topic :: Software Development :: Testing',
'License :: OSI Approved :: Apache Software License',
'Development Status :: 5 - Production/Stable',
'Framework :: Robot Framework'
],
entry_points = {'console_scripts': [
'pabot=pabot.pabot:main']},
license='Apache License, Version 2.0',
install_requires=[
'robotframework',
'websockets>=8.1;python_version>="3.6"',
'robotremoteserver>=1.1',
'typing;python_version<"3.5"'])
Revert "hide worker and coordinator"
This reverts commit f73ce955ef6a2562f75f3311de279d43566dba08.
|
#!/usr/bin/env python
import os
from setuptools import setup, find_packages
name = 'Mikko Korpela'
# I might be just a little bit too much afraid of those bots..
address = name.lower().replace(' ', '.')+chr(64)+'gmail.com'
setup(name='robotframework-pabot',
version='1.2.1',
description='Parallel test runner for Robot Framework',
long_description='A parallel executor for Robot Framework tests.'
' With Pabot you can split one execution into multiple and save test execution time.',
author=name,
author_email=address,
url='https://pabot.org',
download_url='https://pypi.python.org/pypi/robotframework-pabot',
packages=find_packages(),
classifiers=[
'Intended Audience :: Developers',
'Natural Language :: English',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Topic :: Software Development :: Testing',
'License :: OSI Approved :: Apache Software License',
'Development Status :: 5 - Production/Stable',
'Framework :: Robot Framework'
],
entry_points = {'console_scripts': [
'pabot=pabot.pabot:main',
'pabotcoordinator=pabot.coordinatorwrapper:main',
'pabotworker=pabot.workerwrapper:main']},
license='Apache License, Version 2.0',
install_requires=[
'robotframework',
'websockets>=8.1;python_version>="3.6"',
'robotremoteserver>=1.1',
'typing;python_version<"3.5"'])
|
<commit_before>#!/usr/bin/env python
import os
from setuptools import setup, find_packages
name = 'Mikko Korpela'
# I might be just a little bit too much afraid of those bots..
address = name.lower().replace(' ', '.')+chr(64)+'gmail.com'
setup(name='robotframework-pabot',
version='1.2.1',
description='Parallel test runner for Robot Framework',
long_description='A parallel executor for Robot Framework tests.'
' With Pabot you can split one execution into multiple and save test execution time.',
author=name,
author_email=address,
url='https://pabot.org',
download_url='https://pypi.python.org/pypi/robotframework-pabot',
packages=find_packages(),
classifiers=[
'Intended Audience :: Developers',
'Natural Language :: English',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Topic :: Software Development :: Testing',
'License :: OSI Approved :: Apache Software License',
'Development Status :: 5 - Production/Stable',
'Framework :: Robot Framework'
],
entry_points = {'console_scripts': [
'pabot=pabot.pabot:main']},
license='Apache License, Version 2.0',
install_requires=[
'robotframework',
'websockets>=8.1;python_version>="3.6"',
'robotremoteserver>=1.1',
'typing;python_version<"3.5"'])
<commit_msg>Revert "hide worker and coordinator"
This reverts commit f73ce955ef6a2562f75f3311de279d43566dba08.<commit_after>
|
#!/usr/bin/env python
import os
from setuptools import setup, find_packages
name = 'Mikko Korpela'
# I might be just a little bit too much afraid of those bots..
address = name.lower().replace(' ', '.')+chr(64)+'gmail.com'
setup(name='robotframework-pabot',
version='1.2.1',
description='Parallel test runner for Robot Framework',
long_description='A parallel executor for Robot Framework tests.'
' With Pabot you can split one execution into multiple and save test execution time.',
author=name,
author_email=address,
url='https://pabot.org',
download_url='https://pypi.python.org/pypi/robotframework-pabot',
packages=find_packages(),
classifiers=[
'Intended Audience :: Developers',
'Natural Language :: English',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Topic :: Software Development :: Testing',
'License :: OSI Approved :: Apache Software License',
'Development Status :: 5 - Production/Stable',
'Framework :: Robot Framework'
],
entry_points = {'console_scripts': [
'pabot=pabot.pabot:main',
'pabotcoordinator=pabot.coordinatorwrapper:main',
'pabotworker=pabot.workerwrapper:main']},
license='Apache License, Version 2.0',
install_requires=[
'robotframework',
'websockets>=8.1;python_version>="3.6"',
'robotremoteserver>=1.1',
'typing;python_version<"3.5"'])
|
#!/usr/bin/env python
import os
from setuptools import setup, find_packages
name = 'Mikko Korpela'
# I might be just a little bit too much afraid of those bots..
address = name.lower().replace(' ', '.')+chr(64)+'gmail.com'
setup(name='robotframework-pabot',
version='1.2.1',
description='Parallel test runner for Robot Framework',
long_description='A parallel executor for Robot Framework tests.'
' With Pabot you can split one execution into multiple and save test execution time.',
author=name,
author_email=address,
url='https://pabot.org',
download_url='https://pypi.python.org/pypi/robotframework-pabot',
packages=find_packages(),
classifiers=[
'Intended Audience :: Developers',
'Natural Language :: English',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Topic :: Software Development :: Testing',
'License :: OSI Approved :: Apache Software License',
'Development Status :: 5 - Production/Stable',
'Framework :: Robot Framework'
],
entry_points = {'console_scripts': [
'pabot=pabot.pabot:main']},
license='Apache License, Version 2.0',
install_requires=[
'robotframework',
'websockets>=8.1;python_version>="3.6"',
'robotremoteserver>=1.1',
'typing;python_version<"3.5"'])
Revert "hide worker and coordinator"
This reverts commit f73ce955ef6a2562f75f3311de279d43566dba08.#!/usr/bin/env python
import os
from setuptools import setup, find_packages
name = 'Mikko Korpela'
# I might be just a little bit too much afraid of those bots..
address = name.lower().replace(' ', '.')+chr(64)+'gmail.com'
setup(name='robotframework-pabot',
version='1.2.1',
description='Parallel test runner for Robot Framework',
long_description='A parallel executor for Robot Framework tests.'
' With Pabot you can split one execution into multiple and save test execution time.',
author=name,
author_email=address,
url='https://pabot.org',
download_url='https://pypi.python.org/pypi/robotframework-pabot',
packages=find_packages(),
classifiers=[
'Intended Audience :: Developers',
'Natural Language :: English',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Topic :: Software Development :: Testing',
'License :: OSI Approved :: Apache Software License',
'Development Status :: 5 - Production/Stable',
'Framework :: Robot Framework'
],
entry_points = {'console_scripts': [
'pabot=pabot.pabot:main',
'pabotcoordinator=pabot.coordinatorwrapper:main',
'pabotworker=pabot.workerwrapper:main']},
license='Apache License, Version 2.0',
install_requires=[
'robotframework',
'websockets>=8.1;python_version>="3.6"',
'robotremoteserver>=1.1',
'typing;python_version<"3.5"'])
|
<commit_before>#!/usr/bin/env python
import os
from setuptools import setup, find_packages
name = 'Mikko Korpela'
# I might be just a little bit too much afraid of those bots..
address = name.lower().replace(' ', '.')+chr(64)+'gmail.com'
setup(name='robotframework-pabot',
version='1.2.1',
description='Parallel test runner for Robot Framework',
long_description='A parallel executor for Robot Framework tests.'
' With Pabot you can split one execution into multiple and save test execution time.',
author=name,
author_email=address,
url='https://pabot.org',
download_url='https://pypi.python.org/pypi/robotframework-pabot',
packages=find_packages(),
classifiers=[
'Intended Audience :: Developers',
'Natural Language :: English',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Topic :: Software Development :: Testing',
'License :: OSI Approved :: Apache Software License',
'Development Status :: 5 - Production/Stable',
'Framework :: Robot Framework'
],
entry_points = {'console_scripts': [
'pabot=pabot.pabot:main']},
license='Apache License, Version 2.0',
install_requires=[
'robotframework',
'websockets>=8.1;python_version>="3.6"',
'robotremoteserver>=1.1',
'typing;python_version<"3.5"'])
<commit_msg>Revert "hide worker and coordinator"
This reverts commit f73ce955ef6a2562f75f3311de279d43566dba08.<commit_after>#!/usr/bin/env python
import os
from setuptools import setup, find_packages
name = 'Mikko Korpela'
# I might be just a little bit too much afraid of those bots..
address = name.lower().replace(' ', '.')+chr(64)+'gmail.com'
setup(name='robotframework-pabot',
version='1.2.1',
description='Parallel test runner for Robot Framework',
long_description='A parallel executor for Robot Framework tests.'
' With Pabot you can split one execution into multiple and save test execution time.',
author=name,
author_email=address,
url='https://pabot.org',
download_url='https://pypi.python.org/pypi/robotframework-pabot',
packages=find_packages(),
classifiers=[
'Intended Audience :: Developers',
'Natural Language :: English',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Topic :: Software Development :: Testing',
'License :: OSI Approved :: Apache Software License',
'Development Status :: 5 - Production/Stable',
'Framework :: Robot Framework'
],
entry_points = {'console_scripts': [
'pabot=pabot.pabot:main',
'pabotcoordinator=pabot.coordinatorwrapper:main',
'pabotworker=pabot.workerwrapper:main']},
license='Apache License, Version 2.0',
install_requires=[
'robotframework',
'websockets>=8.1;python_version>="3.6"',
'robotremoteserver>=1.1',
'typing;python_version<"3.5"'])
|
2b241e537eca2ea0aab718a61cbdec4539d03fe4
|
setup.py
|
setup.py
|
from setuptools import setup
setup(name='journal_dates',
packages=[''],
version='0.1',
description='Prints a monthly journal template',
url='http://github.com/bzamecnik/journal_dates.py',
author='Bohumir Zamecnik',
author_email='bohumir.zamecnik@gmail.com',
license='MIT',
install_requires=['arrow'],
zip_safe=False,
entry_points={
'console_scripts': [
'journal_dates=journal_dates:main',
],
},
# See https://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
# How mature is this project? Common values are
# 3 - Alpha
# 4 - Beta
# 5 - Production/Stable
'Development Status :: 3 - Alpha',
'Topic :: Utilities',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'Operating System :: POSIX :: Linux',
'Operating System :: MacOS :: MacOS X',
])
|
from setuptools import setup
setup(name='journal_dates',
packages=[''],
version='0.1',
description='Prints a monthly journal template',
url='http://github.com/bzamecnik/journal_dates',
author='Bohumir Zamecnik',
author_email='bohumir.zamecnik@gmail.com',
license='MIT',
install_requires=['arrow'],
zip_safe=False,
entry_points={
'console_scripts': [
'journal_dates=journal_dates:main',
],
},
# See https://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
# How mature is this project? Common values are
# 3 - Alpha
# 4 - Beta
# 5 - Production/Stable
'Development Status :: 3 - Alpha',
'Topic :: Utilities',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'Operating System :: POSIX :: Linux',
'Operating System :: MacOS :: MacOS X',
])
|
Fix the github repo name.
|
Fix the github repo name.
|
Python
|
mit
|
bzamecnik/journal_dates
|
from setuptools import setup
setup(name='journal_dates',
packages=[''],
version='0.1',
description='Prints a monthly journal template',
url='http://github.com/bzamecnik/journal_dates.py',
author='Bohumir Zamecnik',
author_email='bohumir.zamecnik@gmail.com',
license='MIT',
install_requires=['arrow'],
zip_safe=False,
entry_points={
'console_scripts': [
'journal_dates=journal_dates:main',
],
},
# See https://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
# How mature is this project? Common values are
# 3 - Alpha
# 4 - Beta
# 5 - Production/Stable
'Development Status :: 3 - Alpha',
'Topic :: Utilities',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'Operating System :: POSIX :: Linux',
'Operating System :: MacOS :: MacOS X',
])
Fix the github repo name.
|
from setuptools import setup
setup(name='journal_dates',
packages=[''],
version='0.1',
description='Prints a monthly journal template',
url='http://github.com/bzamecnik/journal_dates',
author='Bohumir Zamecnik',
author_email='bohumir.zamecnik@gmail.com',
license='MIT',
install_requires=['arrow'],
zip_safe=False,
entry_points={
'console_scripts': [
'journal_dates=journal_dates:main',
],
},
# See https://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
# How mature is this project? Common values are
# 3 - Alpha
# 4 - Beta
# 5 - Production/Stable
'Development Status :: 3 - Alpha',
'Topic :: Utilities',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'Operating System :: POSIX :: Linux',
'Operating System :: MacOS :: MacOS X',
])
|
<commit_before>from setuptools import setup
setup(name='journal_dates',
packages=[''],
version='0.1',
description='Prints a monthly journal template',
url='http://github.com/bzamecnik/journal_dates.py',
author='Bohumir Zamecnik',
author_email='bohumir.zamecnik@gmail.com',
license='MIT',
install_requires=['arrow'],
zip_safe=False,
entry_points={
'console_scripts': [
'journal_dates=journal_dates:main',
],
},
# See https://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
# How mature is this project? Common values are
# 3 - Alpha
# 4 - Beta
# 5 - Production/Stable
'Development Status :: 3 - Alpha',
'Topic :: Utilities',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'Operating System :: POSIX :: Linux',
'Operating System :: MacOS :: MacOS X',
])
<commit_msg>Fix the github repo name.<commit_after>
|
from setuptools import setup
setup(name='journal_dates',
packages=[''],
version='0.1',
description='Prints a monthly journal template',
url='http://github.com/bzamecnik/journal_dates',
author='Bohumir Zamecnik',
author_email='bohumir.zamecnik@gmail.com',
license='MIT',
install_requires=['arrow'],
zip_safe=False,
entry_points={
'console_scripts': [
'journal_dates=journal_dates:main',
],
},
# See https://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
# How mature is this project? Common values are
# 3 - Alpha
# 4 - Beta
# 5 - Production/Stable
'Development Status :: 3 - Alpha',
'Topic :: Utilities',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'Operating System :: POSIX :: Linux',
'Operating System :: MacOS :: MacOS X',
])
|
from setuptools import setup
setup(name='journal_dates',
packages=[''],
version='0.1',
description='Prints a monthly journal template',
url='http://github.com/bzamecnik/journal_dates.py',
author='Bohumir Zamecnik',
author_email='bohumir.zamecnik@gmail.com',
license='MIT',
install_requires=['arrow'],
zip_safe=False,
entry_points={
'console_scripts': [
'journal_dates=journal_dates:main',
],
},
# See https://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
# How mature is this project? Common values are
# 3 - Alpha
# 4 - Beta
# 5 - Production/Stable
'Development Status :: 3 - Alpha',
'Topic :: Utilities',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'Operating System :: POSIX :: Linux',
'Operating System :: MacOS :: MacOS X',
])
Fix the github repo name.from setuptools import setup
setup(name='journal_dates',
packages=[''],
version='0.1',
description='Prints a monthly journal template',
url='http://github.com/bzamecnik/journal_dates',
author='Bohumir Zamecnik',
author_email='bohumir.zamecnik@gmail.com',
license='MIT',
install_requires=['arrow'],
zip_safe=False,
entry_points={
'console_scripts': [
'journal_dates=journal_dates:main',
],
},
# See https://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
# How mature is this project? Common values are
# 3 - Alpha
# 4 - Beta
# 5 - Production/Stable
'Development Status :: 3 - Alpha',
'Topic :: Utilities',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'Operating System :: POSIX :: Linux',
'Operating System :: MacOS :: MacOS X',
])
|
<commit_before>from setuptools import setup
setup(name='journal_dates',
packages=[''],
version='0.1',
description='Prints a monthly journal template',
url='http://github.com/bzamecnik/journal_dates.py',
author='Bohumir Zamecnik',
author_email='bohumir.zamecnik@gmail.com',
license='MIT',
install_requires=['arrow'],
zip_safe=False,
entry_points={
'console_scripts': [
'journal_dates=journal_dates:main',
],
},
# See https://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
# How mature is this project? Common values are
# 3 - Alpha
# 4 - Beta
# 5 - Production/Stable
'Development Status :: 3 - Alpha',
'Topic :: Utilities',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'Operating System :: POSIX :: Linux',
'Operating System :: MacOS :: MacOS X',
])
<commit_msg>Fix the github repo name.<commit_after>from setuptools import setup
setup(name='journal_dates',
packages=[''],
version='0.1',
description='Prints a monthly journal template',
url='http://github.com/bzamecnik/journal_dates',
author='Bohumir Zamecnik',
author_email='bohumir.zamecnik@gmail.com',
license='MIT',
install_requires=['arrow'],
zip_safe=False,
entry_points={
'console_scripts': [
'journal_dates=journal_dates:main',
],
},
# See https://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
# How mature is this project? Common values are
# 3 - Alpha
# 4 - Beta
# 5 - Production/Stable
'Development Status :: 3 - Alpha',
'Topic :: Utilities',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'Operating System :: POSIX :: Linux',
'Operating System :: MacOS :: MacOS X',
])
|
456aba54c0a0967cfca807fe193a959228a0576f
|
setup.py
|
setup.py
|
#!/usr/bin/env python2
from distutils.core import setup
import codecs
import os.path as path
cwd = path.dirname(__file__)
version = '0.0.0'
with codecs.open(path.join(cwd, 'mlbgame/version.py'), 'r', 'ascii') as f:
exec(f.read())
version = __version__
assert version != '0.0.0'
setup(
name='mlbgame',
author='Zach Panzarino',
author_email='zachary@panzarino.com',
version=version,
license='MIT',
description='An API to retrieve and read MLB GameDay JSON and XML data',
long_description='An API to retrieve and read MLB GameDay JSON and XML data',
url='https://github.com/zachpanz88/mlbgame',
classifiers=[
'License :: MIT License',
'Development Status :: Initial Development',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: End Users/Desktop',
'Intended Audience :: Other Audience',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
],
platforms='ANY',
packages=['mlbgame'],
package_data={'mlbgame': ['gameday-data/*.xml.gz']},
scripts=['scripts/mlbgame-update-games'],
)
|
#!/usr/bin/env python2
from distutils.core import setup
import codecs
import os.path as path
cwd = path.dirname(__file__)
version = '0.0.0'
with codecs.open(path.join(cwd, 'mlbgame/version.py'), 'r', 'ascii') as f:
exec(f.read())
version = __version__
assert version != '0.0.0'
install_requires = []
try:
import urllib2
except ImportError:
install_requires.append('urllib2')
setup(
name='mlbgame',
author='Zach Panzarino',
author_email='zachary@panzarino.com',
version=version,
license='MIT',
description='An API to retrieve and read MLB GameDay JSON and XML data',
long_description='An API to retrieve and read MLB GameDay JSON and XML data',
url='https://github.com/zachpanz88/mlbgame',
classifiers=[
'License :: MIT License',
'Development Status :: Initial Development',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: End Users/Desktop',
'Intended Audience :: Other Audience',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
],
platforms='ANY',
packages=['mlbgame'],
package_data={'mlbgame': ['gameday-data/*.xml.gz']},
scripts=['scripts/mlbgame-update-games'],
)
|
Make sure urllib2 is installed
|
Make sure urllib2 is installed
|
Python
|
mit
|
panzarino/mlbgame,zachpanz88/mlbgame
|
#!/usr/bin/env python2
from distutils.core import setup
import codecs
import os.path as path
cwd = path.dirname(__file__)
version = '0.0.0'
with codecs.open(path.join(cwd, 'mlbgame/version.py'), 'r', 'ascii') as f:
exec(f.read())
version = __version__
assert version != '0.0.0'
setup(
name='mlbgame',
author='Zach Panzarino',
author_email='zachary@panzarino.com',
version=version,
license='MIT',
description='An API to retrieve and read MLB GameDay JSON and XML data',
long_description='An API to retrieve and read MLB GameDay JSON and XML data',
url='https://github.com/zachpanz88/mlbgame',
classifiers=[
'License :: MIT License',
'Development Status :: Initial Development',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: End Users/Desktop',
'Intended Audience :: Other Audience',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
],
platforms='ANY',
packages=['mlbgame'],
package_data={'mlbgame': ['gameday-data/*.xml.gz']},
scripts=['scripts/mlbgame-update-games'],
)Make sure urllib2 is installed
|
#!/usr/bin/env python2
from distutils.core import setup
import codecs
import os.path as path
cwd = path.dirname(__file__)
version = '0.0.0'
with codecs.open(path.join(cwd, 'mlbgame/version.py'), 'r', 'ascii') as f:
exec(f.read())
version = __version__
assert version != '0.0.0'
install_requires = []
try:
import urllib2
except ImportError:
install_requires.append('urllib2')
setup(
name='mlbgame',
author='Zach Panzarino',
author_email='zachary@panzarino.com',
version=version,
license='MIT',
description='An API to retrieve and read MLB GameDay JSON and XML data',
long_description='An API to retrieve and read MLB GameDay JSON and XML data',
url='https://github.com/zachpanz88/mlbgame',
classifiers=[
'License :: MIT License',
'Development Status :: Initial Development',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: End Users/Desktop',
'Intended Audience :: Other Audience',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
],
platforms='ANY',
packages=['mlbgame'],
package_data={'mlbgame': ['gameday-data/*.xml.gz']},
scripts=['scripts/mlbgame-update-games'],
)
|
<commit_before>#!/usr/bin/env python2
from distutils.core import setup
import codecs
import os.path as path
cwd = path.dirname(__file__)
version = '0.0.0'
with codecs.open(path.join(cwd, 'mlbgame/version.py'), 'r', 'ascii') as f:
exec(f.read())
version = __version__
assert version != '0.0.0'
setup(
name='mlbgame',
author='Zach Panzarino',
author_email='zachary@panzarino.com',
version=version,
license='MIT',
description='An API to retrieve and read MLB GameDay JSON and XML data',
long_description='An API to retrieve and read MLB GameDay JSON and XML data',
url='https://github.com/zachpanz88/mlbgame',
classifiers=[
'License :: MIT License',
'Development Status :: Initial Development',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: End Users/Desktop',
'Intended Audience :: Other Audience',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
],
platforms='ANY',
packages=['mlbgame'],
package_data={'mlbgame': ['gameday-data/*.xml.gz']},
scripts=['scripts/mlbgame-update-games'],
)<commit_msg>Make sure urllib2 is installed<commit_after>
|
#!/usr/bin/env python2
from distutils.core import setup
import codecs
import os.path as path
cwd = path.dirname(__file__)
version = '0.0.0'
with codecs.open(path.join(cwd, 'mlbgame/version.py'), 'r', 'ascii') as f:
exec(f.read())
version = __version__
assert version != '0.0.0'
install_requires = []
try:
import urllib2
except ImportError:
install_requires.append('urllib2')
setup(
name='mlbgame',
author='Zach Panzarino',
author_email='zachary@panzarino.com',
version=version,
license='MIT',
description='An API to retrieve and read MLB GameDay JSON and XML data',
long_description='An API to retrieve and read MLB GameDay JSON and XML data',
url='https://github.com/zachpanz88/mlbgame',
classifiers=[
'License :: MIT License',
'Development Status :: Initial Development',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: End Users/Desktop',
'Intended Audience :: Other Audience',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
],
platforms='ANY',
packages=['mlbgame'],
package_data={'mlbgame': ['gameday-data/*.xml.gz']},
scripts=['scripts/mlbgame-update-games'],
)
|
#!/usr/bin/env python2
from distutils.core import setup
import codecs
import os.path as path
cwd = path.dirname(__file__)
version = '0.0.0'
with codecs.open(path.join(cwd, 'mlbgame/version.py'), 'r', 'ascii') as f:
exec(f.read())
version = __version__
assert version != '0.0.0'
setup(
name='mlbgame',
author='Zach Panzarino',
author_email='zachary@panzarino.com',
version=version,
license='MIT',
description='An API to retrieve and read MLB GameDay JSON and XML data',
long_description='An API to retrieve and read MLB GameDay JSON and XML data',
url='https://github.com/zachpanz88/mlbgame',
classifiers=[
'License :: MIT License',
'Development Status :: Initial Development',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: End Users/Desktop',
'Intended Audience :: Other Audience',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
],
platforms='ANY',
packages=['mlbgame'],
package_data={'mlbgame': ['gameday-data/*.xml.gz']},
scripts=['scripts/mlbgame-update-games'],
)Make sure urllib2 is installed#!/usr/bin/env python2
from distutils.core import setup
import codecs
import os.path as path
cwd = path.dirname(__file__)
version = '0.0.0'
with codecs.open(path.join(cwd, 'mlbgame/version.py'), 'r', 'ascii') as f:
exec(f.read())
version = __version__
assert version != '0.0.0'
install_requires = []
try:
import urllib2
except ImportError:
install_requires.append('urllib2')
setup(
name='mlbgame',
author='Zach Panzarino',
author_email='zachary@panzarino.com',
version=version,
license='MIT',
description='An API to retrieve and read MLB GameDay JSON and XML data',
long_description='An API to retrieve and read MLB GameDay JSON and XML data',
url='https://github.com/zachpanz88/mlbgame',
classifiers=[
'License :: MIT License',
'Development Status :: Initial Development',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: End Users/Desktop',
'Intended Audience :: Other Audience',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
],
platforms='ANY',
packages=['mlbgame'],
package_data={'mlbgame': ['gameday-data/*.xml.gz']},
scripts=['scripts/mlbgame-update-games'],
)
|
<commit_before>#!/usr/bin/env python2
from distutils.core import setup
import codecs
import os.path as path
cwd = path.dirname(__file__)
version = '0.0.0'
with codecs.open(path.join(cwd, 'mlbgame/version.py'), 'r', 'ascii') as f:
exec(f.read())
version = __version__
assert version != '0.0.0'
setup(
name='mlbgame',
author='Zach Panzarino',
author_email='zachary@panzarino.com',
version=version,
license='MIT',
description='An API to retrieve and read MLB GameDay JSON and XML data',
long_description='An API to retrieve and read MLB GameDay JSON and XML data',
url='https://github.com/zachpanz88/mlbgame',
classifiers=[
'License :: MIT License',
'Development Status :: Initial Development',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: End Users/Desktop',
'Intended Audience :: Other Audience',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
],
platforms='ANY',
packages=['mlbgame'],
package_data={'mlbgame': ['gameday-data/*.xml.gz']},
scripts=['scripts/mlbgame-update-games'],
)<commit_msg>Make sure urllib2 is installed<commit_after>#!/usr/bin/env python2
from distutils.core import setup
import codecs
import os.path as path
cwd = path.dirname(__file__)
version = '0.0.0'
with codecs.open(path.join(cwd, 'mlbgame/version.py'), 'r', 'ascii') as f:
exec(f.read())
version = __version__
assert version != '0.0.0'
install_requires = []
try:
import urllib2
except ImportError:
install_requires.append('urllib2')
setup(
name='mlbgame',
author='Zach Panzarino',
author_email='zachary@panzarino.com',
version=version,
license='MIT',
description='An API to retrieve and read MLB GameDay JSON and XML data',
long_description='An API to retrieve and read MLB GameDay JSON and XML data',
url='https://github.com/zachpanz88/mlbgame',
classifiers=[
'License :: MIT License',
'Development Status :: Initial Development',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: End Users/Desktop',
'Intended Audience :: Other Audience',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
],
platforms='ANY',
packages=['mlbgame'],
package_data={'mlbgame': ['gameday-data/*.xml.gz']},
scripts=['scripts/mlbgame-update-games'],
)
|
f291359cec29752ad852002c532b683d278025f2
|
setup.py
|
setup.py
|
import os
from setuptools import setup, find_packages
os.environ['DJANGO_SETTINGS_MODULE'] = 'tests.settings'
setup(
name="django-nopassword",
version='1.3.1',
url='http://github.com/relekang/django-nopassword',
author='Rolf Erik Lekang',
author_email='me@rolflekang.com',
description='Authentication backend for django that uses a one time code instead of passwords',
packages=find_packages(exclude='tests'),
tests_require=[
'django>=1.4',
'twilio==3.6.8',
'mock>=1.0'
],
license='MIT',
test_suite='runtests.runtests',
include_package_data=True,
classifiers=[
"Programming Language :: Python",
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
"Topic :: Software Development :: Libraries :: Python Modules",
"Framework :: Django",
"Environment :: Web Environment",
"Operating System :: OS Independent",
"Natural Language :: English",
]
)
|
import os
from setuptools import setup, find_packages
os.environ['DJANGO_SETTINGS_MODULE'] = 'tests.settings'
def _read_long_description():
try:
import pypandoc
return pypandoc.convert('README.md', 'rst')
except ImportError:
return None
setup(
name="django-nopassword",
version='1.3.1',
url='http://github.com/relekang/django-nopassword',
author='Rolf Erik Lekang',
author_email='me@rolflekang.com',
description='Authentication backend for django that uses a one time code instead of passwords',
long_description=_read_long_description(),
packages=find_packages(exclude='tests'),
tests_require=[
'django>=1.4',
'twilio==3.6.8',
'mock>=1.0'
],
license='MIT',
test_suite='runtests.runtests',
include_package_data=True,
classifiers=[
"Programming Language :: Python",
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
"Topic :: Software Development :: Libraries :: Python Modules",
"Framework :: Django",
"Environment :: Web Environment",
"Operating System :: OS Independent",
"Natural Language :: English",
]
)
|
Convert README.md to rst in long_description
|
Convert README.md to rst in long_description
|
Python
|
mit
|
mjumbewu/django-nopassword,relekang/django-nopassword,mjumbewu/django-nopassword,relekang/django-nopassword
|
import os
from setuptools import setup, find_packages
os.environ['DJANGO_SETTINGS_MODULE'] = 'tests.settings'
setup(
name="django-nopassword",
version='1.3.1',
url='http://github.com/relekang/django-nopassword',
author='Rolf Erik Lekang',
author_email='me@rolflekang.com',
description='Authentication backend for django that uses a one time code instead of passwords',
packages=find_packages(exclude='tests'),
tests_require=[
'django>=1.4',
'twilio==3.6.8',
'mock>=1.0'
],
license='MIT',
test_suite='runtests.runtests',
include_package_data=True,
classifiers=[
"Programming Language :: Python",
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
"Topic :: Software Development :: Libraries :: Python Modules",
"Framework :: Django",
"Environment :: Web Environment",
"Operating System :: OS Independent",
"Natural Language :: English",
]
)
Convert README.md to rst in long_description
|
import os
from setuptools import setup, find_packages
os.environ['DJANGO_SETTINGS_MODULE'] = 'tests.settings'
def _read_long_description():
try:
import pypandoc
return pypandoc.convert('README.md', 'rst')
except ImportError:
return None
setup(
name="django-nopassword",
version='1.3.1',
url='http://github.com/relekang/django-nopassword',
author='Rolf Erik Lekang',
author_email='me@rolflekang.com',
description='Authentication backend for django that uses a one time code instead of passwords',
long_description=_read_long_description(),
packages=find_packages(exclude='tests'),
tests_require=[
'django>=1.4',
'twilio==3.6.8',
'mock>=1.0'
],
license='MIT',
test_suite='runtests.runtests',
include_package_data=True,
classifiers=[
"Programming Language :: Python",
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
"Topic :: Software Development :: Libraries :: Python Modules",
"Framework :: Django",
"Environment :: Web Environment",
"Operating System :: OS Independent",
"Natural Language :: English",
]
)
|
<commit_before>import os
from setuptools import setup, find_packages
os.environ['DJANGO_SETTINGS_MODULE'] = 'tests.settings'
setup(
name="django-nopassword",
version='1.3.1',
url='http://github.com/relekang/django-nopassword',
author='Rolf Erik Lekang',
author_email='me@rolflekang.com',
description='Authentication backend for django that uses a one time code instead of passwords',
packages=find_packages(exclude='tests'),
tests_require=[
'django>=1.4',
'twilio==3.6.8',
'mock>=1.0'
],
license='MIT',
test_suite='runtests.runtests',
include_package_data=True,
classifiers=[
"Programming Language :: Python",
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
"Topic :: Software Development :: Libraries :: Python Modules",
"Framework :: Django",
"Environment :: Web Environment",
"Operating System :: OS Independent",
"Natural Language :: English",
]
)
<commit_msg>Convert README.md to rst in long_description<commit_after>
|
import os
from setuptools import setup, find_packages
os.environ['DJANGO_SETTINGS_MODULE'] = 'tests.settings'
def _read_long_description():
try:
import pypandoc
return pypandoc.convert('README.md', 'rst')
except ImportError:
return None
setup(
name="django-nopassword",
version='1.3.1',
url='http://github.com/relekang/django-nopassword',
author='Rolf Erik Lekang',
author_email='me@rolflekang.com',
description='Authentication backend for django that uses a one time code instead of passwords',
long_description=_read_long_description(),
packages=find_packages(exclude='tests'),
tests_require=[
'django>=1.4',
'twilio==3.6.8',
'mock>=1.0'
],
license='MIT',
test_suite='runtests.runtests',
include_package_data=True,
classifiers=[
"Programming Language :: Python",
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
"Topic :: Software Development :: Libraries :: Python Modules",
"Framework :: Django",
"Environment :: Web Environment",
"Operating System :: OS Independent",
"Natural Language :: English",
]
)
|
import os
from setuptools import setup, find_packages
os.environ['DJANGO_SETTINGS_MODULE'] = 'tests.settings'
setup(
name="django-nopassword",
version='1.3.1',
url='http://github.com/relekang/django-nopassword',
author='Rolf Erik Lekang',
author_email='me@rolflekang.com',
description='Authentication backend for django that uses a one time code instead of passwords',
packages=find_packages(exclude='tests'),
tests_require=[
'django>=1.4',
'twilio==3.6.8',
'mock>=1.0'
],
license='MIT',
test_suite='runtests.runtests',
include_package_data=True,
classifiers=[
"Programming Language :: Python",
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
"Topic :: Software Development :: Libraries :: Python Modules",
"Framework :: Django",
"Environment :: Web Environment",
"Operating System :: OS Independent",
"Natural Language :: English",
]
)
Convert README.md to rst in long_descriptionimport os
from setuptools import setup, find_packages
os.environ['DJANGO_SETTINGS_MODULE'] = 'tests.settings'
def _read_long_description():
try:
import pypandoc
return pypandoc.convert('README.md', 'rst')
except ImportError:
return None
setup(
name="django-nopassword",
version='1.3.1',
url='http://github.com/relekang/django-nopassword',
author='Rolf Erik Lekang',
author_email='me@rolflekang.com',
description='Authentication backend for django that uses a one time code instead of passwords',
long_description=_read_long_description(),
packages=find_packages(exclude='tests'),
tests_require=[
'django>=1.4',
'twilio==3.6.8',
'mock>=1.0'
],
license='MIT',
test_suite='runtests.runtests',
include_package_data=True,
classifiers=[
"Programming Language :: Python",
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
"Topic :: Software Development :: Libraries :: Python Modules",
"Framework :: Django",
"Environment :: Web Environment",
"Operating System :: OS Independent",
"Natural Language :: English",
]
)
|
<commit_before>import os
from setuptools import setup, find_packages
os.environ['DJANGO_SETTINGS_MODULE'] = 'tests.settings'
setup(
name="django-nopassword",
version='1.3.1',
url='http://github.com/relekang/django-nopassword',
author='Rolf Erik Lekang',
author_email='me@rolflekang.com',
description='Authentication backend for django that uses a one time code instead of passwords',
packages=find_packages(exclude='tests'),
tests_require=[
'django>=1.4',
'twilio==3.6.8',
'mock>=1.0'
],
license='MIT',
test_suite='runtests.runtests',
include_package_data=True,
classifiers=[
"Programming Language :: Python",
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
"Topic :: Software Development :: Libraries :: Python Modules",
"Framework :: Django",
"Environment :: Web Environment",
"Operating System :: OS Independent",
"Natural Language :: English",
]
)
<commit_msg>Convert README.md to rst in long_description<commit_after>import os
from setuptools import setup, find_packages
os.environ['DJANGO_SETTINGS_MODULE'] = 'tests.settings'
def _read_long_description():
try:
import pypandoc
return pypandoc.convert('README.md', 'rst')
except ImportError:
return None
setup(
name="django-nopassword",
version='1.3.1',
url='http://github.com/relekang/django-nopassword',
author='Rolf Erik Lekang',
author_email='me@rolflekang.com',
description='Authentication backend for django that uses a one time code instead of passwords',
long_description=_read_long_description(),
packages=find_packages(exclude='tests'),
tests_require=[
'django>=1.4',
'twilio==3.6.8',
'mock>=1.0'
],
license='MIT',
test_suite='runtests.runtests',
include_package_data=True,
classifiers=[
"Programming Language :: Python",
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
"Topic :: Software Development :: Libraries :: Python Modules",
"Framework :: Django",
"Environment :: Web Environment",
"Operating System :: OS Independent",
"Natural Language :: English",
]
)
|
a9709c70148368ead466b976136f89efeeaed8bc
|
setup.py
|
setup.py
|
from setuptools import setup, find_packages
setup(name='bmi-ilamb',
version=execfile('./bmi_ilamb/version.py'),
description='BMI for ILAMB',
long_description=open('README.md').read(),
url='https://github.com/permamodel/bmi-ilamb',
license='MIT',
author='Mark Piper',
author_email='mark.piper@colorado.edu',
packages=find_packages(exclude=['*.tests']),
package_data={'': ['data/*']},
install_requires=['pyyaml', 'basic-modeling-interface'],
keywords='CSDMS BMI ILAMB model benchmark',
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
],
)
|
from setuptools import setup, find_packages
execfile('./bmi_ilamb/version.py')
setup(name='bmi-ilamb',
version=__version__,
description='BMI for ILAMB',
long_description=open('README.md').read(),
url='https://github.com/permamodel/bmi-ilamb',
license='MIT',
author='Mark Piper',
author_email='mark.piper@colorado.edu',
packages=find_packages(exclude=['*.tests']),
package_data={'': ['data/*']},
install_requires=['pyyaml', 'basic-modeling-interface'],
keywords='CSDMS BMI ILAMB model benchmark',
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
],
)
|
Fix error with setting version
|
Fix error with setting version
|
Python
|
mit
|
permamodel/bmi-ilamb
|
from setuptools import setup, find_packages
setup(name='bmi-ilamb',
version=execfile('./bmi_ilamb/version.py'),
description='BMI for ILAMB',
long_description=open('README.md').read(),
url='https://github.com/permamodel/bmi-ilamb',
license='MIT',
author='Mark Piper',
author_email='mark.piper@colorado.edu',
packages=find_packages(exclude=['*.tests']),
package_data={'': ['data/*']},
install_requires=['pyyaml', 'basic-modeling-interface'],
keywords='CSDMS BMI ILAMB model benchmark',
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
],
)
Fix error with setting version
|
from setuptools import setup, find_packages
execfile('./bmi_ilamb/version.py')
setup(name='bmi-ilamb',
version=__version__,
description='BMI for ILAMB',
long_description=open('README.md').read(),
url='https://github.com/permamodel/bmi-ilamb',
license='MIT',
author='Mark Piper',
author_email='mark.piper@colorado.edu',
packages=find_packages(exclude=['*.tests']),
package_data={'': ['data/*']},
install_requires=['pyyaml', 'basic-modeling-interface'],
keywords='CSDMS BMI ILAMB model benchmark',
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
],
)
|
<commit_before>from setuptools import setup, find_packages
setup(name='bmi-ilamb',
version=execfile('./bmi_ilamb/version.py'),
description='BMI for ILAMB',
long_description=open('README.md').read(),
url='https://github.com/permamodel/bmi-ilamb',
license='MIT',
author='Mark Piper',
author_email='mark.piper@colorado.edu',
packages=find_packages(exclude=['*.tests']),
package_data={'': ['data/*']},
install_requires=['pyyaml', 'basic-modeling-interface'],
keywords='CSDMS BMI ILAMB model benchmark',
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
],
)
<commit_msg>Fix error with setting version<commit_after>
|
from setuptools import setup, find_packages
execfile('./bmi_ilamb/version.py')
setup(name='bmi-ilamb',
version=__version__,
description='BMI for ILAMB',
long_description=open('README.md').read(),
url='https://github.com/permamodel/bmi-ilamb',
license='MIT',
author='Mark Piper',
author_email='mark.piper@colorado.edu',
packages=find_packages(exclude=['*.tests']),
package_data={'': ['data/*']},
install_requires=['pyyaml', 'basic-modeling-interface'],
keywords='CSDMS BMI ILAMB model benchmark',
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
],
)
|
from setuptools import setup, find_packages
setup(name='bmi-ilamb',
version=execfile('./bmi_ilamb/version.py'),
description='BMI for ILAMB',
long_description=open('README.md').read(),
url='https://github.com/permamodel/bmi-ilamb',
license='MIT',
author='Mark Piper',
author_email='mark.piper@colorado.edu',
packages=find_packages(exclude=['*.tests']),
package_data={'': ['data/*']},
install_requires=['pyyaml', 'basic-modeling-interface'],
keywords='CSDMS BMI ILAMB model benchmark',
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
],
)
Fix error with setting versionfrom setuptools import setup, find_packages
execfile('./bmi_ilamb/version.py')
setup(name='bmi-ilamb',
version=__version__,
description='BMI for ILAMB',
long_description=open('README.md').read(),
url='https://github.com/permamodel/bmi-ilamb',
license='MIT',
author='Mark Piper',
author_email='mark.piper@colorado.edu',
packages=find_packages(exclude=['*.tests']),
package_data={'': ['data/*']},
install_requires=['pyyaml', 'basic-modeling-interface'],
keywords='CSDMS BMI ILAMB model benchmark',
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
],
)
|
<commit_before>from setuptools import setup, find_packages
setup(name='bmi-ilamb',
version=execfile('./bmi_ilamb/version.py'),
description='BMI for ILAMB',
long_description=open('README.md').read(),
url='https://github.com/permamodel/bmi-ilamb',
license='MIT',
author='Mark Piper',
author_email='mark.piper@colorado.edu',
packages=find_packages(exclude=['*.tests']),
package_data={'': ['data/*']},
install_requires=['pyyaml', 'basic-modeling-interface'],
keywords='CSDMS BMI ILAMB model benchmark',
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
],
)
<commit_msg>Fix error with setting version<commit_after>from setuptools import setup, find_packages
execfile('./bmi_ilamb/version.py')
setup(name='bmi-ilamb',
version=__version__,
description='BMI for ILAMB',
long_description=open('README.md').read(),
url='https://github.com/permamodel/bmi-ilamb',
license='MIT',
author='Mark Piper',
author_email='mark.piper@colorado.edu',
packages=find_packages(exclude=['*.tests']),
package_data={'': ['data/*']},
install_requires=['pyyaml', 'basic-modeling-interface'],
keywords='CSDMS BMI ILAMB model benchmark',
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
],
)
|
3b0479903c6ee13a5a6ab3d2fc96ae8c8566ed08
|
setup.py
|
setup.py
|
from __future__ import print_function
import sys, os
from setuptools import setup, find_packages
with open('requirements.txt') as f:
INSTALL_REQUIRES = [l.strip() for l in f.readlines() if l]
try:
import numpy
except ImportError:
print('numpy is required during installation')
sys.exit(1)
try:
import scipy
except ImportError:
print('scipy is required during installation')
sys.exit(1)
# Get version and release info, which is all stored in sklforestci/version.py
ver_file = os.path.join('sklforestci', 'version.py')
with open(ver_file) as f:
exec(f.read())
1/0.
opts = dict(name=NAME,
maintainer=MAINTAINER,
maintainer_email=MAINTAINER_EMAIL,
description=DESCRIPTION,
long_description=LONG_DESCRIPTION,
url=URL,
download_url=DOWNLOAD_URL,
license=LICENSE,
classifiers=CLASSIFIERS,
author=AUTHOR,
author_email=AUTHOR_EMAIL,
platforms=PLATFORMS,
version=VERSION,
packages=find_packages(),
install_requires=INSTALL_REQUIRES)
if __name__ == '__main__':
setup(**opts)
|
from __future__ import print_function
import sys, os
from setuptools import setup, find_packages
with open('requirements.txt') as f:
INSTALL_REQUIRES = [l.strip() for l in f.readlines() if l]
try:
import numpy
except ImportError:
print('numpy is required during installation')
sys.exit(1)
try:
import scipy
except ImportError:
print('scipy is required during installation')
sys.exit(1)
# Get version and release info, which is all stored in sklforestci/version.py
ver_file = os.path.join('sklforestci', 'version.py')
with open(ver_file) as f:
exec(f.read())
opts = dict(name=NAME,
maintainer=MAINTAINER,
maintainer_email=MAINTAINER_EMAIL,
description=DESCRIPTION,
long_description=LONG_DESCRIPTION,
url=URL,
download_url=DOWNLOAD_URL,
license=LICENSE,
classifiers=CLASSIFIERS,
author=AUTHOR,
author_email=AUTHOR_EMAIL,
platforms=PLATFORMS,
version=VERSION,
packages=find_packages(),
install_requires=INSTALL_REQUIRES)
if __name__ == '__main__':
setup(**opts)
|
Remove a debug statement (1/0.)
|
Remove a debug statement (1/0.)
|
Python
|
mit
|
kpolimis/sklearn-forest-ci,uwescience/sklearn-forest-ci,arokem/sklearn-forest-ci,kpolimis/sklearn-forest-ci,scikit-learn-contrib/forest-confidence-interval,uwescience/sklearn-forest-ci,scikit-learn-contrib/forest-confidence-interval,arokem/sklearn-forest-ci
|
from __future__ import print_function
import sys, os
from setuptools import setup, find_packages
with open('requirements.txt') as f:
INSTALL_REQUIRES = [l.strip() for l in f.readlines() if l]
try:
import numpy
except ImportError:
print('numpy is required during installation')
sys.exit(1)
try:
import scipy
except ImportError:
print('scipy is required during installation')
sys.exit(1)
# Get version and release info, which is all stored in sklforestci/version.py
ver_file = os.path.join('sklforestci', 'version.py')
with open(ver_file) as f:
exec(f.read())
1/0.
opts = dict(name=NAME,
maintainer=MAINTAINER,
maintainer_email=MAINTAINER_EMAIL,
description=DESCRIPTION,
long_description=LONG_DESCRIPTION,
url=URL,
download_url=DOWNLOAD_URL,
license=LICENSE,
classifiers=CLASSIFIERS,
author=AUTHOR,
author_email=AUTHOR_EMAIL,
platforms=PLATFORMS,
version=VERSION,
packages=find_packages(),
install_requires=INSTALL_REQUIRES)
if __name__ == '__main__':
setup(**opts)
Remove a debug statement (1/0.)
|
from __future__ import print_function
import sys, os
from setuptools import setup, find_packages
with open('requirements.txt') as f:
INSTALL_REQUIRES = [l.strip() for l in f.readlines() if l]
try:
import numpy
except ImportError:
print('numpy is required during installation')
sys.exit(1)
try:
import scipy
except ImportError:
print('scipy is required during installation')
sys.exit(1)
# Get version and release info, which is all stored in sklforestci/version.py
ver_file = os.path.join('sklforestci', 'version.py')
with open(ver_file) as f:
exec(f.read())
opts = dict(name=NAME,
maintainer=MAINTAINER,
maintainer_email=MAINTAINER_EMAIL,
description=DESCRIPTION,
long_description=LONG_DESCRIPTION,
url=URL,
download_url=DOWNLOAD_URL,
license=LICENSE,
classifiers=CLASSIFIERS,
author=AUTHOR,
author_email=AUTHOR_EMAIL,
platforms=PLATFORMS,
version=VERSION,
packages=find_packages(),
install_requires=INSTALL_REQUIRES)
if __name__ == '__main__':
setup(**opts)
|
<commit_before>from __future__ import print_function
import sys, os
from setuptools import setup, find_packages
with open('requirements.txt') as f:
INSTALL_REQUIRES = [l.strip() for l in f.readlines() if l]
try:
import numpy
except ImportError:
print('numpy is required during installation')
sys.exit(1)
try:
import scipy
except ImportError:
print('scipy is required during installation')
sys.exit(1)
# Get version and release info, which is all stored in sklforestci/version.py
ver_file = os.path.join('sklforestci', 'version.py')
with open(ver_file) as f:
exec(f.read())
1/0.
opts = dict(name=NAME,
maintainer=MAINTAINER,
maintainer_email=MAINTAINER_EMAIL,
description=DESCRIPTION,
long_description=LONG_DESCRIPTION,
url=URL,
download_url=DOWNLOAD_URL,
license=LICENSE,
classifiers=CLASSIFIERS,
author=AUTHOR,
author_email=AUTHOR_EMAIL,
platforms=PLATFORMS,
version=VERSION,
packages=find_packages(),
install_requires=INSTALL_REQUIRES)
if __name__ == '__main__':
setup(**opts)
<commit_msg>Remove a debug statement (1/0.)<commit_after>
|
from __future__ import print_function
import sys, os
from setuptools import setup, find_packages
with open('requirements.txt') as f:
INSTALL_REQUIRES = [l.strip() for l in f.readlines() if l]
try:
import numpy
except ImportError:
print('numpy is required during installation')
sys.exit(1)
try:
import scipy
except ImportError:
print('scipy is required during installation')
sys.exit(1)
# Get version and release info, which is all stored in sklforestci/version.py
ver_file = os.path.join('sklforestci', 'version.py')
with open(ver_file) as f:
exec(f.read())
opts = dict(name=NAME,
maintainer=MAINTAINER,
maintainer_email=MAINTAINER_EMAIL,
description=DESCRIPTION,
long_description=LONG_DESCRIPTION,
url=URL,
download_url=DOWNLOAD_URL,
license=LICENSE,
classifiers=CLASSIFIERS,
author=AUTHOR,
author_email=AUTHOR_EMAIL,
platforms=PLATFORMS,
version=VERSION,
packages=find_packages(),
install_requires=INSTALL_REQUIRES)
if __name__ == '__main__':
setup(**opts)
|
from __future__ import print_function
import sys, os
from setuptools import setup, find_packages
with open('requirements.txt') as f:
INSTALL_REQUIRES = [l.strip() for l in f.readlines() if l]
try:
import numpy
except ImportError:
print('numpy is required during installation')
sys.exit(1)
try:
import scipy
except ImportError:
print('scipy is required during installation')
sys.exit(1)
# Get version and release info, which is all stored in sklforestci/version.py
ver_file = os.path.join('sklforestci', 'version.py')
with open(ver_file) as f:
exec(f.read())
1/0.
opts = dict(name=NAME,
maintainer=MAINTAINER,
maintainer_email=MAINTAINER_EMAIL,
description=DESCRIPTION,
long_description=LONG_DESCRIPTION,
url=URL,
download_url=DOWNLOAD_URL,
license=LICENSE,
classifiers=CLASSIFIERS,
author=AUTHOR,
author_email=AUTHOR_EMAIL,
platforms=PLATFORMS,
version=VERSION,
packages=find_packages(),
install_requires=INSTALL_REQUIRES)
if __name__ == '__main__':
setup(**opts)
Remove a debug statement (1/0.)from __future__ import print_function
import sys, os
from setuptools import setup, find_packages
with open('requirements.txt') as f:
INSTALL_REQUIRES = [l.strip() for l in f.readlines() if l]
try:
import numpy
except ImportError:
print('numpy is required during installation')
sys.exit(1)
try:
import scipy
except ImportError:
print('scipy is required during installation')
sys.exit(1)
# Get version and release info, which is all stored in sklforestci/version.py
ver_file = os.path.join('sklforestci', 'version.py')
with open(ver_file) as f:
exec(f.read())
opts = dict(name=NAME,
maintainer=MAINTAINER,
maintainer_email=MAINTAINER_EMAIL,
description=DESCRIPTION,
long_description=LONG_DESCRIPTION,
url=URL,
download_url=DOWNLOAD_URL,
license=LICENSE,
classifiers=CLASSIFIERS,
author=AUTHOR,
author_email=AUTHOR_EMAIL,
platforms=PLATFORMS,
version=VERSION,
packages=find_packages(),
install_requires=INSTALL_REQUIRES)
if __name__ == '__main__':
setup(**opts)
|
<commit_before>from __future__ import print_function
import sys, os
from setuptools import setup, find_packages
with open('requirements.txt') as f:
INSTALL_REQUIRES = [l.strip() for l in f.readlines() if l]
try:
import numpy
except ImportError:
print('numpy is required during installation')
sys.exit(1)
try:
import scipy
except ImportError:
print('scipy is required during installation')
sys.exit(1)
# Get version and release info, which is all stored in sklforestci/version.py
ver_file = os.path.join('sklforestci', 'version.py')
with open(ver_file) as f:
exec(f.read())
1/0.
opts = dict(name=NAME,
maintainer=MAINTAINER,
maintainer_email=MAINTAINER_EMAIL,
description=DESCRIPTION,
long_description=LONG_DESCRIPTION,
url=URL,
download_url=DOWNLOAD_URL,
license=LICENSE,
classifiers=CLASSIFIERS,
author=AUTHOR,
author_email=AUTHOR_EMAIL,
platforms=PLATFORMS,
version=VERSION,
packages=find_packages(),
install_requires=INSTALL_REQUIRES)
if __name__ == '__main__':
setup(**opts)
<commit_msg>Remove a debug statement (1/0.)<commit_after>from __future__ import print_function
import sys, os
from setuptools import setup, find_packages
with open('requirements.txt') as f:
INSTALL_REQUIRES = [l.strip() for l in f.readlines() if l]
try:
import numpy
except ImportError:
print('numpy is required during installation')
sys.exit(1)
try:
import scipy
except ImportError:
print('scipy is required during installation')
sys.exit(1)
# Get version and release info, which is all stored in sklforestci/version.py
ver_file = os.path.join('sklforestci', 'version.py')
with open(ver_file) as f:
exec(f.read())
opts = dict(name=NAME,
maintainer=MAINTAINER,
maintainer_email=MAINTAINER_EMAIL,
description=DESCRIPTION,
long_description=LONG_DESCRIPTION,
url=URL,
download_url=DOWNLOAD_URL,
license=LICENSE,
classifiers=CLASSIFIERS,
author=AUTHOR,
author_email=AUTHOR_EMAIL,
platforms=PLATFORMS,
version=VERSION,
packages=find_packages(),
install_requires=INSTALL_REQUIRES)
if __name__ == '__main__':
setup(**opts)
|
ae463e9f27bd1266125d0d3d94dd88171df997d2
|
setup.py
|
setup.py
|
#!/usr/bin/env python
from distutils.core import setup
setup(name='straight.plugin',
version='1.0',
description='A simple namespaced plugin facility',
author='Calvin Spealman',
author_email='ironfroggy@gmail.com',
url='https://github.com/ironfroggy/straight.plugin',
packages=['straight', 'straight.plugin'],
)
|
#!/usr/bin/env python
from distutils.core import setup
setup(name='straight.plugin',
version='1.1.1',
description='A simple namespaced plugin facility',
author='Calvin Spealman',
author_email='ironfroggy@gmail.com',
url='https://github.com/ironfroggy/straight.plugin',
packages=['straight', 'straight.plugin'],
)
|
Mark version 1.1.1 with license.
|
Mark version 1.1.1 with license.
|
Python
|
mit
|
ironfroggy/straight.plugin,pombredanne/straight.plugin
|
#!/usr/bin/env python
from distutils.core import setup
setup(name='straight.plugin',
version='1.0',
description='A simple namespaced plugin facility',
author='Calvin Spealman',
author_email='ironfroggy@gmail.com',
url='https://github.com/ironfroggy/straight.plugin',
packages=['straight', 'straight.plugin'],
)
Mark version 1.1.1 with license.
|
#!/usr/bin/env python
from distutils.core import setup
setup(name='straight.plugin',
version='1.1.1',
description='A simple namespaced plugin facility',
author='Calvin Spealman',
author_email='ironfroggy@gmail.com',
url='https://github.com/ironfroggy/straight.plugin',
packages=['straight', 'straight.plugin'],
)
|
<commit_before>#!/usr/bin/env python
from distutils.core import setup
setup(name='straight.plugin',
version='1.0',
description='A simple namespaced plugin facility',
author='Calvin Spealman',
author_email='ironfroggy@gmail.com',
url='https://github.com/ironfroggy/straight.plugin',
packages=['straight', 'straight.plugin'],
)
<commit_msg>Mark version 1.1.1 with license.<commit_after>
|
#!/usr/bin/env python
from distutils.core import setup
setup(name='straight.plugin',
version='1.1.1',
description='A simple namespaced plugin facility',
author='Calvin Spealman',
author_email='ironfroggy@gmail.com',
url='https://github.com/ironfroggy/straight.plugin',
packages=['straight', 'straight.plugin'],
)
|
#!/usr/bin/env python
from distutils.core import setup
setup(name='straight.plugin',
version='1.0',
description='A simple namespaced plugin facility',
author='Calvin Spealman',
author_email='ironfroggy@gmail.com',
url='https://github.com/ironfroggy/straight.plugin',
packages=['straight', 'straight.plugin'],
)
Mark version 1.1.1 with license.#!/usr/bin/env python
from distutils.core import setup
setup(name='straight.plugin',
version='1.1.1',
description='A simple namespaced plugin facility',
author='Calvin Spealman',
author_email='ironfroggy@gmail.com',
url='https://github.com/ironfroggy/straight.plugin',
packages=['straight', 'straight.plugin'],
)
|
<commit_before>#!/usr/bin/env python
from distutils.core import setup
setup(name='straight.plugin',
version='1.0',
description='A simple namespaced plugin facility',
author='Calvin Spealman',
author_email='ironfroggy@gmail.com',
url='https://github.com/ironfroggy/straight.plugin',
packages=['straight', 'straight.plugin'],
)
<commit_msg>Mark version 1.1.1 with license.<commit_after>#!/usr/bin/env python
from distutils.core import setup
setup(name='straight.plugin',
version='1.1.1',
description='A simple namespaced plugin facility',
author='Calvin Spealman',
author_email='ironfroggy@gmail.com',
url='https://github.com/ironfroggy/straight.plugin',
packages=['straight', 'straight.plugin'],
)
|
6c70998c81dfe945ab82fdf0f2b99cb73ee1b031
|
setup.py
|
setup.py
|
from setuptools import setup, find_packages
setup(
name='myria-python',
version='1.1',
author='Daniel Halperin',
author_email='dhalperi@cs.washington.edu',
packages=find_packages(),
scripts=[],
url='https://github.com/uwescience/myria',
description='Python interface for Myria.',
long_description=open('README.txt').read(),
install_requires=["requests", "requests_toolbelt", "messytables"],
)
|
from setuptools import setup, find_packages
setup(
name='myria-python',
version='1.1',
author='Daniel Halperin',
author_email='dhalperi@cs.washington.edu',
packages=find_packages(),
scripts=[],
url='https://github.com/uwescience/myria',
description='Python interface for Myria.',
long_description=open('README.txt').read(),
install_requires=["requests", "requests_toolbelt", "messytables"],
entry_points={
'console_scripts': [
'myria_upload = myria.cmd.upload_file:main'
],
},
)
|
Add upload file console script
|
Add upload file console script
|
Python
|
bsd-3-clause
|
uwescience/myria-python,uwescience/myria-python
|
from setuptools import setup, find_packages
setup(
name='myria-python',
version='1.1',
author='Daniel Halperin',
author_email='dhalperi@cs.washington.edu',
packages=find_packages(),
scripts=[],
url='https://github.com/uwescience/myria',
description='Python interface for Myria.',
long_description=open('README.txt').read(),
install_requires=["requests", "requests_toolbelt", "messytables"],
)
Add upload file console script
|
from setuptools import setup, find_packages
setup(
name='myria-python',
version='1.1',
author='Daniel Halperin',
author_email='dhalperi@cs.washington.edu',
packages=find_packages(),
scripts=[],
url='https://github.com/uwescience/myria',
description='Python interface for Myria.',
long_description=open('README.txt').read(),
install_requires=["requests", "requests_toolbelt", "messytables"],
entry_points={
'console_scripts': [
'myria_upload = myria.cmd.upload_file:main'
],
},
)
|
<commit_before>from setuptools import setup, find_packages
setup(
name='myria-python',
version='1.1',
author='Daniel Halperin',
author_email='dhalperi@cs.washington.edu',
packages=find_packages(),
scripts=[],
url='https://github.com/uwescience/myria',
description='Python interface for Myria.',
long_description=open('README.txt').read(),
install_requires=["requests", "requests_toolbelt", "messytables"],
)
<commit_msg>Add upload file console script<commit_after>
|
from setuptools import setup, find_packages
setup(
name='myria-python',
version='1.1',
author='Daniel Halperin',
author_email='dhalperi@cs.washington.edu',
packages=find_packages(),
scripts=[],
url='https://github.com/uwescience/myria',
description='Python interface for Myria.',
long_description=open('README.txt').read(),
install_requires=["requests", "requests_toolbelt", "messytables"],
entry_points={
'console_scripts': [
'myria_upload = myria.cmd.upload_file:main'
],
},
)
|
from setuptools import setup, find_packages
setup(
name='myria-python',
version='1.1',
author='Daniel Halperin',
author_email='dhalperi@cs.washington.edu',
packages=find_packages(),
scripts=[],
url='https://github.com/uwescience/myria',
description='Python interface for Myria.',
long_description=open('README.txt').read(),
install_requires=["requests", "requests_toolbelt", "messytables"],
)
Add upload file console scriptfrom setuptools import setup, find_packages
setup(
name='myria-python',
version='1.1',
author='Daniel Halperin',
author_email='dhalperi@cs.washington.edu',
packages=find_packages(),
scripts=[],
url='https://github.com/uwescience/myria',
description='Python interface for Myria.',
long_description=open('README.txt').read(),
install_requires=["requests", "requests_toolbelt", "messytables"],
entry_points={
'console_scripts': [
'myria_upload = myria.cmd.upload_file:main'
],
},
)
|
<commit_before>from setuptools import setup, find_packages
setup(
name='myria-python',
version='1.1',
author='Daniel Halperin',
author_email='dhalperi@cs.washington.edu',
packages=find_packages(),
scripts=[],
url='https://github.com/uwescience/myria',
description='Python interface for Myria.',
long_description=open('README.txt').read(),
install_requires=["requests", "requests_toolbelt", "messytables"],
)
<commit_msg>Add upload file console script<commit_after>from setuptools import setup, find_packages
setup(
name='myria-python',
version='1.1',
author='Daniel Halperin',
author_email='dhalperi@cs.washington.edu',
packages=find_packages(),
scripts=[],
url='https://github.com/uwescience/myria',
description='Python interface for Myria.',
long_description=open('README.txt').read(),
install_requires=["requests", "requests_toolbelt", "messytables"],
entry_points={
'console_scripts': [
'myria_upload = myria.cmd.upload_file:main'
],
},
)
|
9343fe5c269240d488a9a31016ab396f1df55c8e
|
setup.py
|
setup.py
|
from setuptools import setup, find_packages
from glob import glob
__author__ = 'shafferm'
__version__ = '0.1.1'
setup(
name="dada2_qiime1",
version=__version__,
install_requires=['rpy2 ==2.8', 'biom-format', 'numpy', 'qiime'],
scripts=glob("scripts/*.py"),
packages=find_packages(),
description="Using DADA2 with qiime 1",
author="Michael Shaffer",
author_email='michael.shaffer@ucdenver.edu',
package_data={'': ['*.r', '*.R']},
include_package_data=True,
url="https://github.com/shafferm/dada2_qiime1/",
download_url="https://github.com/shafferm/dada2_qiime1/tarball/%s" % __version__
)
|
from setuptools import setup, find_packages
from glob import glob
__author__ = 'shafferm'
__version__ = '0.1.1'
setup(
name="dada2_qiime1",
version=__version__,
install_requires=['rpy2 ==2.8.5', 'biom-format', 'numpy', 'qiime'],
scripts=glob("scripts/*.py"),
packages=find_packages(),
description="Using DADA2 with qiime 1",
author="Michael Shaffer",
author_email='michael.shaffer@ucdenver.edu',
package_data={'': ['*.r', '*.R']},
include_package_data=True,
url="https://github.com/shafferm/dada2_qiime1/",
download_url="https://github.com/shafferm/dada2_qiime1/tarball/%s" % __version__
)
|
Use rpy2 version available through conda (2.8.5)
|
Use rpy2 version available through conda (2.8.5)
|
Python
|
mit
|
shafferm/dada2_qiime1,shafferm/dada2_qiime1
|
from setuptools import setup, find_packages
from glob import glob
__author__ = 'shafferm'
__version__ = '0.1.1'
setup(
name="dada2_qiime1",
version=__version__,
install_requires=['rpy2 ==2.8', 'biom-format', 'numpy', 'qiime'],
scripts=glob("scripts/*.py"),
packages=find_packages(),
description="Using DADA2 with qiime 1",
author="Michael Shaffer",
author_email='michael.shaffer@ucdenver.edu',
package_data={'': ['*.r', '*.R']},
include_package_data=True,
url="https://github.com/shafferm/dada2_qiime1/",
download_url="https://github.com/shafferm/dada2_qiime1/tarball/%s" % __version__
)
Use rpy2 version available through conda (2.8.5)
|
from setuptools import setup, find_packages
from glob import glob
__author__ = 'shafferm'
__version__ = '0.1.1'
setup(
name="dada2_qiime1",
version=__version__,
install_requires=['rpy2 ==2.8.5', 'biom-format', 'numpy', 'qiime'],
scripts=glob("scripts/*.py"),
packages=find_packages(),
description="Using DADA2 with qiime 1",
author="Michael Shaffer",
author_email='michael.shaffer@ucdenver.edu',
package_data={'': ['*.r', '*.R']},
include_package_data=True,
url="https://github.com/shafferm/dada2_qiime1/",
download_url="https://github.com/shafferm/dada2_qiime1/tarball/%s" % __version__
)
|
<commit_before>from setuptools import setup, find_packages
from glob import glob
__author__ = 'shafferm'
__version__ = '0.1.1'
setup(
name="dada2_qiime1",
version=__version__,
install_requires=['rpy2 ==2.8', 'biom-format', 'numpy', 'qiime'],
scripts=glob("scripts/*.py"),
packages=find_packages(),
description="Using DADA2 with qiime 1",
author="Michael Shaffer",
author_email='michael.shaffer@ucdenver.edu',
package_data={'': ['*.r', '*.R']},
include_package_data=True,
url="https://github.com/shafferm/dada2_qiime1/",
download_url="https://github.com/shafferm/dada2_qiime1/tarball/%s" % __version__
)
<commit_msg>Use rpy2 version available through conda (2.8.5)<commit_after>
|
from setuptools import setup, find_packages
from glob import glob
__author__ = 'shafferm'
__version__ = '0.1.1'
setup(
name="dada2_qiime1",
version=__version__,
install_requires=['rpy2 ==2.8.5', 'biom-format', 'numpy', 'qiime'],
scripts=glob("scripts/*.py"),
packages=find_packages(),
description="Using DADA2 with qiime 1",
author="Michael Shaffer",
author_email='michael.shaffer@ucdenver.edu',
package_data={'': ['*.r', '*.R']},
include_package_data=True,
url="https://github.com/shafferm/dada2_qiime1/",
download_url="https://github.com/shafferm/dada2_qiime1/tarball/%s" % __version__
)
|
from setuptools import setup, find_packages
from glob import glob
__author__ = 'shafferm'
__version__ = '0.1.1'
setup(
name="dada2_qiime1",
version=__version__,
install_requires=['rpy2 ==2.8', 'biom-format', 'numpy', 'qiime'],
scripts=glob("scripts/*.py"),
packages=find_packages(),
description="Using DADA2 with qiime 1",
author="Michael Shaffer",
author_email='michael.shaffer@ucdenver.edu',
package_data={'': ['*.r', '*.R']},
include_package_data=True,
url="https://github.com/shafferm/dada2_qiime1/",
download_url="https://github.com/shafferm/dada2_qiime1/tarball/%s" % __version__
)
Use rpy2 version available through conda (2.8.5)from setuptools import setup, find_packages
from glob import glob
__author__ = 'shafferm'
__version__ = '0.1.1'
setup(
name="dada2_qiime1",
version=__version__,
install_requires=['rpy2 ==2.8.5', 'biom-format', 'numpy', 'qiime'],
scripts=glob("scripts/*.py"),
packages=find_packages(),
description="Using DADA2 with qiime 1",
author="Michael Shaffer",
author_email='michael.shaffer@ucdenver.edu',
package_data={'': ['*.r', '*.R']},
include_package_data=True,
url="https://github.com/shafferm/dada2_qiime1/",
download_url="https://github.com/shafferm/dada2_qiime1/tarball/%s" % __version__
)
|
<commit_before>from setuptools import setup, find_packages
from glob import glob
__author__ = 'shafferm'
__version__ = '0.1.1'
setup(
name="dada2_qiime1",
version=__version__,
install_requires=['rpy2 ==2.8', 'biom-format', 'numpy', 'qiime'],
scripts=glob("scripts/*.py"),
packages=find_packages(),
description="Using DADA2 with qiime 1",
author="Michael Shaffer",
author_email='michael.shaffer@ucdenver.edu',
package_data={'': ['*.r', '*.R']},
include_package_data=True,
url="https://github.com/shafferm/dada2_qiime1/",
download_url="https://github.com/shafferm/dada2_qiime1/tarball/%s" % __version__
)
<commit_msg>Use rpy2 version available through conda (2.8.5)<commit_after>from setuptools import setup, find_packages
from glob import glob
__author__ = 'shafferm'
__version__ = '0.1.1'
setup(
name="dada2_qiime1",
version=__version__,
install_requires=['rpy2 ==2.8.5', 'biom-format', 'numpy', 'qiime'],
scripts=glob("scripts/*.py"),
packages=find_packages(),
description="Using DADA2 with qiime 1",
author="Michael Shaffer",
author_email='michael.shaffer@ucdenver.edu',
package_data={'': ['*.r', '*.R']},
include_package_data=True,
url="https://github.com/shafferm/dada2_qiime1/",
download_url="https://github.com/shafferm/dada2_qiime1/tarball/%s" % __version__
)
|
e1319c52e55834a85bdd3242933f64f239e8b1d0
|
setup.py
|
setup.py
|
from setuptools import setup, find_packages
setup(
name='exchangerates',
version='0.2.0',
description="A module to make it easier to handle historical exchange rates",
long_description="",
classifiers=[
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
'Programming Language :: Python :: 3.6'
],
author='Mark Brough',
author_email='mark@brough.io',
url='http://github.com/markbrough/exchangerates',
license='MIT',
packages=find_packages(exclude=['ez_setup', 'examples']),
namespace_packages=[],
include_package_data=True,
zip_safe=False,
install_requires=[
'lxml == 3.7.3',
'requests == 2.13.0',
'six == 1.11.0'
],
entry_points={
}
)
|
from setuptools import setup, find_packages
setup(
name='exchangerates',
version='0.3.0',
description="A module to make it easier to handle historical exchange rates",
long_description="",
classifiers=[
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
'Programming Language :: Python :: 3.6'
],
author='Mark Brough',
author_email='mark@brough.io',
url='http://github.com/markbrough/exchangerates',
license='MIT',
packages=find_packages(exclude=['ez_setup', 'examples']),
namespace_packages=[],
include_package_data=True,
zip_safe=False,
install_requires=[
'lxml == 4.4.0',
'requests == 2.22.0',
'six == 1.12.0'
],
entry_points={
}
)
|
Update dependencies and version number, NB from the OECD rates for all dates are now included, not only those that fall before FERD rates
|
Update dependencies and version number, NB from the OECD rates for all dates are now included, not only those that fall before FERD rates
|
Python
|
mit
|
markbrough/exchangerates
|
from setuptools import setup, find_packages
setup(
name='exchangerates',
version='0.2.0',
description="A module to make it easier to handle historical exchange rates",
long_description="",
classifiers=[
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
'Programming Language :: Python :: 3.6'
],
author='Mark Brough',
author_email='mark@brough.io',
url='http://github.com/markbrough/exchangerates',
license='MIT',
packages=find_packages(exclude=['ez_setup', 'examples']),
namespace_packages=[],
include_package_data=True,
zip_safe=False,
install_requires=[
'lxml == 3.7.3',
'requests == 2.13.0',
'six == 1.11.0'
],
entry_points={
}
)
Update dependencies and version number, NB from the OECD rates for all dates are now included, not only those that fall before FERD rates
|
from setuptools import setup, find_packages
setup(
name='exchangerates',
version='0.3.0',
description="A module to make it easier to handle historical exchange rates",
long_description="",
classifiers=[
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
'Programming Language :: Python :: 3.6'
],
author='Mark Brough',
author_email='mark@brough.io',
url='http://github.com/markbrough/exchangerates',
license='MIT',
packages=find_packages(exclude=['ez_setup', 'examples']),
namespace_packages=[],
include_package_data=True,
zip_safe=False,
install_requires=[
'lxml == 4.4.0',
'requests == 2.22.0',
'six == 1.12.0'
],
entry_points={
}
)
|
<commit_before>from setuptools import setup, find_packages
setup(
name='exchangerates',
version='0.2.0',
description="A module to make it easier to handle historical exchange rates",
long_description="",
classifiers=[
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
'Programming Language :: Python :: 3.6'
],
author='Mark Brough',
author_email='mark@brough.io',
url='http://github.com/markbrough/exchangerates',
license='MIT',
packages=find_packages(exclude=['ez_setup', 'examples']),
namespace_packages=[],
include_package_data=True,
zip_safe=False,
install_requires=[
'lxml == 3.7.3',
'requests == 2.13.0',
'six == 1.11.0'
],
entry_points={
}
)
<commit_msg>Update dependencies and version number, NB from the OECD rates for all dates are now included, not only those that fall before FERD rates<commit_after>
|
from setuptools import setup, find_packages
setup(
name='exchangerates',
version='0.3.0',
description="A module to make it easier to handle historical exchange rates",
long_description="",
classifiers=[
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
'Programming Language :: Python :: 3.6'
],
author='Mark Brough',
author_email='mark@brough.io',
url='http://github.com/markbrough/exchangerates',
license='MIT',
packages=find_packages(exclude=['ez_setup', 'examples']),
namespace_packages=[],
include_package_data=True,
zip_safe=False,
install_requires=[
'lxml == 4.4.0',
'requests == 2.22.0',
'six == 1.12.0'
],
entry_points={
}
)
|
from setuptools import setup, find_packages
setup(
name='exchangerates',
version='0.2.0',
description="A module to make it easier to handle historical exchange rates",
long_description="",
classifiers=[
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
'Programming Language :: Python :: 3.6'
],
author='Mark Brough',
author_email='mark@brough.io',
url='http://github.com/markbrough/exchangerates',
license='MIT',
packages=find_packages(exclude=['ez_setup', 'examples']),
namespace_packages=[],
include_package_data=True,
zip_safe=False,
install_requires=[
'lxml == 3.7.3',
'requests == 2.13.0',
'six == 1.11.0'
],
entry_points={
}
)
Update dependencies and version number, NB from the OECD rates for all dates are now included, not only those that fall before FERD ratesfrom setuptools import setup, find_packages
setup(
name='exchangerates',
version='0.3.0',
description="A module to make it easier to handle historical exchange rates",
long_description="",
classifiers=[
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
'Programming Language :: Python :: 3.6'
],
author='Mark Brough',
author_email='mark@brough.io',
url='http://github.com/markbrough/exchangerates',
license='MIT',
packages=find_packages(exclude=['ez_setup', 'examples']),
namespace_packages=[],
include_package_data=True,
zip_safe=False,
install_requires=[
'lxml == 4.4.0',
'requests == 2.22.0',
'six == 1.12.0'
],
entry_points={
}
)
|
<commit_before>from setuptools import setup, find_packages
setup(
name='exchangerates',
version='0.2.0',
description="A module to make it easier to handle historical exchange rates",
long_description="",
classifiers=[
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
'Programming Language :: Python :: 3.6'
],
author='Mark Brough',
author_email='mark@brough.io',
url='http://github.com/markbrough/exchangerates',
license='MIT',
packages=find_packages(exclude=['ez_setup', 'examples']),
namespace_packages=[],
include_package_data=True,
zip_safe=False,
install_requires=[
'lxml == 3.7.3',
'requests == 2.13.0',
'six == 1.11.0'
],
entry_points={
}
)
<commit_msg>Update dependencies and version number, NB from the OECD rates for all dates are now included, not only those that fall before FERD rates<commit_after>from setuptools import setup, find_packages
setup(
name='exchangerates',
version='0.3.0',
description="A module to make it easier to handle historical exchange rates",
long_description="",
classifiers=[
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
'Programming Language :: Python :: 3.6'
],
author='Mark Brough',
author_email='mark@brough.io',
url='http://github.com/markbrough/exchangerates',
license='MIT',
packages=find_packages(exclude=['ez_setup', 'examples']),
namespace_packages=[],
include_package_data=True,
zip_safe=False,
install_requires=[
'lxml == 4.4.0',
'requests == 2.22.0',
'six == 1.12.0'
],
entry_points={
}
)
|
97284a715fe2e062b6923e9fbb83a30972fe0a75
|
setup.py
|
setup.py
|
from setuptools import setup
version = '0.2beta'
setup(
name='boilerpipy',
version=version,
description='Readability/Boilerpipe extractor in Python',
author='Harshavardhana',
author_email='harsha@harshavardhana.net',
url='https://github.com/harshavardhana/boilerpipy.git',
license='Apache',
platforms=['any'],
classifiers=[
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"License :: OSI Approved :: Apache License",
"Programming Language :: Python",
"Topic :: Software Development :: Libraries :: Python Modules",
],
packages=['boilerpipy'],
scripts=['bin/readability'],
install_requires=['lxml', 'beautifulsoup4', 'urlparse2'],
)
|
from setuptools import setup
version = '0.2beta'
setup(
name='boilerpipy',
version=version,
description='Readability/Boilerpipe extractor in Python',
author='Harshavardhana',
author_email='harsha@harshavardhana.net',
url='https://github.com/harshavardhana/boilerpipy.git',
license='Apache',
platforms=['any'],
classifiers=[
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"License :: OSI Approved :: Apache Software License",
"Programming Language :: Python",
"Topic :: Software Development :: Libraries :: Python Modules",
],
packages=['boilerpipy'],
scripts=['bin/readability'],
install_requires=['lxml', 'beautifulsoup4', 'urlparse2'],
)
|
Update license details according to pypi standards
|
Update license details according to pypi standards
|
Python
|
apache-2.0
|
harshavardhana/boilerpipy
|
from setuptools import setup
version = '0.2beta'
setup(
name='boilerpipy',
version=version,
description='Readability/Boilerpipe extractor in Python',
author='Harshavardhana',
author_email='harsha@harshavardhana.net',
url='https://github.com/harshavardhana/boilerpipy.git',
license='Apache',
platforms=['any'],
classifiers=[
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"License :: OSI Approved :: Apache License",
"Programming Language :: Python",
"Topic :: Software Development :: Libraries :: Python Modules",
],
packages=['boilerpipy'],
scripts=['bin/readability'],
install_requires=['lxml', 'beautifulsoup4', 'urlparse2'],
)
Update license details according to pypi standards
|
from setuptools import setup
version = '0.2beta'
setup(
name='boilerpipy',
version=version,
description='Readability/Boilerpipe extractor in Python',
author='Harshavardhana',
author_email='harsha@harshavardhana.net',
url='https://github.com/harshavardhana/boilerpipy.git',
license='Apache',
platforms=['any'],
classifiers=[
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"License :: OSI Approved :: Apache Software License",
"Programming Language :: Python",
"Topic :: Software Development :: Libraries :: Python Modules",
],
packages=['boilerpipy'],
scripts=['bin/readability'],
install_requires=['lxml', 'beautifulsoup4', 'urlparse2'],
)
|
<commit_before>from setuptools import setup
version = '0.2beta'
setup(
name='boilerpipy',
version=version,
description='Readability/Boilerpipe extractor in Python',
author='Harshavardhana',
author_email='harsha@harshavardhana.net',
url='https://github.com/harshavardhana/boilerpipy.git',
license='Apache',
platforms=['any'],
classifiers=[
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"License :: OSI Approved :: Apache License",
"Programming Language :: Python",
"Topic :: Software Development :: Libraries :: Python Modules",
],
packages=['boilerpipy'],
scripts=['bin/readability'],
install_requires=['lxml', 'beautifulsoup4', 'urlparse2'],
)
<commit_msg>Update license details according to pypi standards<commit_after>
|
from setuptools import setup
version = '0.2beta'
setup(
name='boilerpipy',
version=version,
description='Readability/Boilerpipe extractor in Python',
author='Harshavardhana',
author_email='harsha@harshavardhana.net',
url='https://github.com/harshavardhana/boilerpipy.git',
license='Apache',
platforms=['any'],
classifiers=[
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"License :: OSI Approved :: Apache Software License",
"Programming Language :: Python",
"Topic :: Software Development :: Libraries :: Python Modules",
],
packages=['boilerpipy'],
scripts=['bin/readability'],
install_requires=['lxml', 'beautifulsoup4', 'urlparse2'],
)
|
from setuptools import setup
version = '0.2beta'
setup(
name='boilerpipy',
version=version,
description='Readability/Boilerpipe extractor in Python',
author='Harshavardhana',
author_email='harsha@harshavardhana.net',
url='https://github.com/harshavardhana/boilerpipy.git',
license='Apache',
platforms=['any'],
classifiers=[
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"License :: OSI Approved :: Apache License",
"Programming Language :: Python",
"Topic :: Software Development :: Libraries :: Python Modules",
],
packages=['boilerpipy'],
scripts=['bin/readability'],
install_requires=['lxml', 'beautifulsoup4', 'urlparse2'],
)
Update license details according to pypi standardsfrom setuptools import setup
version = '0.2beta'
setup(
name='boilerpipy',
version=version,
description='Readability/Boilerpipe extractor in Python',
author='Harshavardhana',
author_email='harsha@harshavardhana.net',
url='https://github.com/harshavardhana/boilerpipy.git',
license='Apache',
platforms=['any'],
classifiers=[
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"License :: OSI Approved :: Apache Software License",
"Programming Language :: Python",
"Topic :: Software Development :: Libraries :: Python Modules",
],
packages=['boilerpipy'],
scripts=['bin/readability'],
install_requires=['lxml', 'beautifulsoup4', 'urlparse2'],
)
|
<commit_before>from setuptools import setup
version = '0.2beta'
setup(
name='boilerpipy',
version=version,
description='Readability/Boilerpipe extractor in Python',
author='Harshavardhana',
author_email='harsha@harshavardhana.net',
url='https://github.com/harshavardhana/boilerpipy.git',
license='Apache',
platforms=['any'],
classifiers=[
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"License :: OSI Approved :: Apache License",
"Programming Language :: Python",
"Topic :: Software Development :: Libraries :: Python Modules",
],
packages=['boilerpipy'],
scripts=['bin/readability'],
install_requires=['lxml', 'beautifulsoup4', 'urlparse2'],
)
<commit_msg>Update license details according to pypi standards<commit_after>from setuptools import setup
version = '0.2beta'
setup(
name='boilerpipy',
version=version,
description='Readability/Boilerpipe extractor in Python',
author='Harshavardhana',
author_email='harsha@harshavardhana.net',
url='https://github.com/harshavardhana/boilerpipy.git',
license='Apache',
platforms=['any'],
classifiers=[
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"License :: OSI Approved :: Apache Software License",
"Programming Language :: Python",
"Topic :: Software Development :: Libraries :: Python Modules",
],
packages=['boilerpipy'],
scripts=['bin/readability'],
install_requires=['lxml', 'beautifulsoup4', 'urlparse2'],
)
|
77f781d54a76eee5b1b47b57872593542e0a93bf
|
setup.py
|
setup.py
|
from setuptools import setup, find_packages
setup(
name='pysummarize',
version='0.5.0',
description='Simple Python and NLTK-based implementation of text summarization',
url='https://github.com/despawnerer/summarize',
author='Aleksei Voronov',
author_email='despawn@gmail.com',
license='MIT',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'Topic :: Text Processing',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
keywords='language nltk linguistics nlp',
packages=find_packages(exclude=['contrib', 'docs', 'tests*']),
install_requires=[
'Distance>=0.1.3',
'networkx>=1.9.1',
'nltk>=3.0.3',
'funcy>=1.5',
],
)
|
from setuptools import setup, find_packages
setup(
name='pysummarize',
version='0.5.0',
description='Simple Python and NLTK-based implementation of text summarization',
url='https://github.com/despawnerer/summarize',
author='Aleksei Voronov',
author_email='despawn@gmail.com',
license='MIT',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'Topic :: Text Processing',
'Natural Language :: English',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
keywords='language nltk linguistics nlp',
packages=find_packages(exclude=['contrib', 'docs', 'tests*']),
install_requires=[
'Distance>=0.1.3',
'networkx>=1.9.1',
'nltk>=3.0.3',
'funcy>=1.5',
],
)
|
Add English natural language classifier
|
Add English natural language classifier
|
Python
|
mit
|
despawnerer/summarize
|
from setuptools import setup, find_packages
setup(
name='pysummarize',
version='0.5.0',
description='Simple Python and NLTK-based implementation of text summarization',
url='https://github.com/despawnerer/summarize',
author='Aleksei Voronov',
author_email='despawn@gmail.com',
license='MIT',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'Topic :: Text Processing',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
keywords='language nltk linguistics nlp',
packages=find_packages(exclude=['contrib', 'docs', 'tests*']),
install_requires=[
'Distance>=0.1.3',
'networkx>=1.9.1',
'nltk>=3.0.3',
'funcy>=1.5',
],
)
Add English natural language classifier
|
from setuptools import setup, find_packages
setup(
name='pysummarize',
version='0.5.0',
description='Simple Python and NLTK-based implementation of text summarization',
url='https://github.com/despawnerer/summarize',
author='Aleksei Voronov',
author_email='despawn@gmail.com',
license='MIT',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'Topic :: Text Processing',
'Natural Language :: English',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
keywords='language nltk linguistics nlp',
packages=find_packages(exclude=['contrib', 'docs', 'tests*']),
install_requires=[
'Distance>=0.1.3',
'networkx>=1.9.1',
'nltk>=3.0.3',
'funcy>=1.5',
],
)
|
<commit_before>from setuptools import setup, find_packages
setup(
name='pysummarize',
version='0.5.0',
description='Simple Python and NLTK-based implementation of text summarization',
url='https://github.com/despawnerer/summarize',
author='Aleksei Voronov',
author_email='despawn@gmail.com',
license='MIT',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'Topic :: Text Processing',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
keywords='language nltk linguistics nlp',
packages=find_packages(exclude=['contrib', 'docs', 'tests*']),
install_requires=[
'Distance>=0.1.3',
'networkx>=1.9.1',
'nltk>=3.0.3',
'funcy>=1.5',
],
)
<commit_msg>Add English natural language classifier<commit_after>
|
from setuptools import setup, find_packages
setup(
name='pysummarize',
version='0.5.0',
description='Simple Python and NLTK-based implementation of text summarization',
url='https://github.com/despawnerer/summarize',
author='Aleksei Voronov',
author_email='despawn@gmail.com',
license='MIT',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'Topic :: Text Processing',
'Natural Language :: English',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
keywords='language nltk linguistics nlp',
packages=find_packages(exclude=['contrib', 'docs', 'tests*']),
install_requires=[
'Distance>=0.1.3',
'networkx>=1.9.1',
'nltk>=3.0.3',
'funcy>=1.5',
],
)
|
from setuptools import setup, find_packages
setup(
name='pysummarize',
version='0.5.0',
description='Simple Python and NLTK-based implementation of text summarization',
url='https://github.com/despawnerer/summarize',
author='Aleksei Voronov',
author_email='despawn@gmail.com',
license='MIT',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'Topic :: Text Processing',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
keywords='language nltk linguistics nlp',
packages=find_packages(exclude=['contrib', 'docs', 'tests*']),
install_requires=[
'Distance>=0.1.3',
'networkx>=1.9.1',
'nltk>=3.0.3',
'funcy>=1.5',
],
)
Add English natural language classifierfrom setuptools import setup, find_packages
setup(
name='pysummarize',
version='0.5.0',
description='Simple Python and NLTK-based implementation of text summarization',
url='https://github.com/despawnerer/summarize',
author='Aleksei Voronov',
author_email='despawn@gmail.com',
license='MIT',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'Topic :: Text Processing',
'Natural Language :: English',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
keywords='language nltk linguistics nlp',
packages=find_packages(exclude=['contrib', 'docs', 'tests*']),
install_requires=[
'Distance>=0.1.3',
'networkx>=1.9.1',
'nltk>=3.0.3',
'funcy>=1.5',
],
)
|
<commit_before>from setuptools import setup, find_packages
setup(
name='pysummarize',
version='0.5.0',
description='Simple Python and NLTK-based implementation of text summarization',
url='https://github.com/despawnerer/summarize',
author='Aleksei Voronov',
author_email='despawn@gmail.com',
license='MIT',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'Topic :: Text Processing',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
keywords='language nltk linguistics nlp',
packages=find_packages(exclude=['contrib', 'docs', 'tests*']),
install_requires=[
'Distance>=0.1.3',
'networkx>=1.9.1',
'nltk>=3.0.3',
'funcy>=1.5',
],
)
<commit_msg>Add English natural language classifier<commit_after>from setuptools import setup, find_packages
setup(
name='pysummarize',
version='0.5.0',
description='Simple Python and NLTK-based implementation of text summarization',
url='https://github.com/despawnerer/summarize',
author='Aleksei Voronov',
author_email='despawn@gmail.com',
license='MIT',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'Topic :: Text Processing',
'Natural Language :: English',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
keywords='language nltk linguistics nlp',
packages=find_packages(exclude=['contrib', 'docs', 'tests*']),
install_requires=[
'Distance>=0.1.3',
'networkx>=1.9.1',
'nltk>=3.0.3',
'funcy>=1.5',
],
)
|
a0aa52418658623044e22eaba7edb441b15e642c
|
setup.py
|
setup.py
|
#! /usr/bin/env python
from setuptools import setup, find_packages
setup(
name='armet',
version='0.3.0-pre',
description='Clean and modern framework for creating RESTful APIs.',
author='Concordus Applications',
author_email='support@concordusapps.com',
url='http://github.com/armet/python-armet',
package_dir={'armet': 'src/armet'},
packages=find_packages('src'),
install_requires=(
'six', # Python 2 and 3 normalization layer
'python-mimeparse' # For parsing accept and content-type headers
),
extras_require={
'test': (
'nose',
'yanc',
'httplib2',
'flask',
'django'
)
}
)
|
#! /usr/bin/env python
from setuptools import setup, find_packages
setup(
name='armet',
version='0.3.0-pre',
description='Clean and modern framework for creating RESTful APIs.',
author='Concordus Applications',
author_email='support@concordusapps.com',
url='http://github.com/armet/python-armet',
package_dir={'armet': 'src/armet'},
packages=find_packages('src'),
install_requires=(
'six', # Python 2 and 3 normalization layer
'python-mimeparse' # For parsing accept and content-type headers
),
extras_require={
'test': (
'nose',
'yanc',
'httplib2',
'flask',
'django',
'wsgi_intercept'
)
}
)
|
Add wsgi_intercept to the dependencies list
|
Add wsgi_intercept to the dependencies list
|
Python
|
mit
|
armet/python-armet
|
#! /usr/bin/env python
from setuptools import setup, find_packages
setup(
name='armet',
version='0.3.0-pre',
description='Clean and modern framework for creating RESTful APIs.',
author='Concordus Applications',
author_email='support@concordusapps.com',
url='http://github.com/armet/python-armet',
package_dir={'armet': 'src/armet'},
packages=find_packages('src'),
install_requires=(
'six', # Python 2 and 3 normalization layer
'python-mimeparse' # For parsing accept and content-type headers
),
extras_require={
'test': (
'nose',
'yanc',
'httplib2',
'flask',
'django'
)
}
)
Add wsgi_intercept to the dependencies list
|
#! /usr/bin/env python
from setuptools import setup, find_packages
setup(
name='armet',
version='0.3.0-pre',
description='Clean and modern framework for creating RESTful APIs.',
author='Concordus Applications',
author_email='support@concordusapps.com',
url='http://github.com/armet/python-armet',
package_dir={'armet': 'src/armet'},
packages=find_packages('src'),
install_requires=(
'six', # Python 2 and 3 normalization layer
'python-mimeparse' # For parsing accept and content-type headers
),
extras_require={
'test': (
'nose',
'yanc',
'httplib2',
'flask',
'django',
'wsgi_intercept'
)
}
)
|
<commit_before>#! /usr/bin/env python
from setuptools import setup, find_packages
setup(
name='armet',
version='0.3.0-pre',
description='Clean and modern framework for creating RESTful APIs.',
author='Concordus Applications',
author_email='support@concordusapps.com',
url='http://github.com/armet/python-armet',
package_dir={'armet': 'src/armet'},
packages=find_packages('src'),
install_requires=(
'six', # Python 2 and 3 normalization layer
'python-mimeparse' # For parsing accept and content-type headers
),
extras_require={
'test': (
'nose',
'yanc',
'httplib2',
'flask',
'django'
)
}
)
<commit_msg>Add wsgi_intercept to the dependencies list<commit_after>
|
#! /usr/bin/env python
from setuptools import setup, find_packages
setup(
name='armet',
version='0.3.0-pre',
description='Clean and modern framework for creating RESTful APIs.',
author='Concordus Applications',
author_email='support@concordusapps.com',
url='http://github.com/armet/python-armet',
package_dir={'armet': 'src/armet'},
packages=find_packages('src'),
install_requires=(
'six', # Python 2 and 3 normalization layer
'python-mimeparse' # For parsing accept and content-type headers
),
extras_require={
'test': (
'nose',
'yanc',
'httplib2',
'flask',
'django',
'wsgi_intercept'
)
}
)
|
#! /usr/bin/env python
from setuptools import setup, find_packages
setup(
name='armet',
version='0.3.0-pre',
description='Clean and modern framework for creating RESTful APIs.',
author='Concordus Applications',
author_email='support@concordusapps.com',
url='http://github.com/armet/python-armet',
package_dir={'armet': 'src/armet'},
packages=find_packages('src'),
install_requires=(
'six', # Python 2 and 3 normalization layer
'python-mimeparse' # For parsing accept and content-type headers
),
extras_require={
'test': (
'nose',
'yanc',
'httplib2',
'flask',
'django'
)
}
)
Add wsgi_intercept to the dependencies list#! /usr/bin/env python
from setuptools import setup, find_packages
setup(
name='armet',
version='0.3.0-pre',
description='Clean and modern framework for creating RESTful APIs.',
author='Concordus Applications',
author_email='support@concordusapps.com',
url='http://github.com/armet/python-armet',
package_dir={'armet': 'src/armet'},
packages=find_packages('src'),
install_requires=(
'six', # Python 2 and 3 normalization layer
'python-mimeparse' # For parsing accept and content-type headers
),
extras_require={
'test': (
'nose',
'yanc',
'httplib2',
'flask',
'django',
'wsgi_intercept'
)
}
)
|
<commit_before>#! /usr/bin/env python
from setuptools import setup, find_packages
setup(
name='armet',
version='0.3.0-pre',
description='Clean and modern framework for creating RESTful APIs.',
author='Concordus Applications',
author_email='support@concordusapps.com',
url='http://github.com/armet/python-armet',
package_dir={'armet': 'src/armet'},
packages=find_packages('src'),
install_requires=(
'six', # Python 2 and 3 normalization layer
'python-mimeparse' # For parsing accept and content-type headers
),
extras_require={
'test': (
'nose',
'yanc',
'httplib2',
'flask',
'django'
)
}
)
<commit_msg>Add wsgi_intercept to the dependencies list<commit_after>#! /usr/bin/env python
from setuptools import setup, find_packages
setup(
name='armet',
version='0.3.0-pre',
description='Clean and modern framework for creating RESTful APIs.',
author='Concordus Applications',
author_email='support@concordusapps.com',
url='http://github.com/armet/python-armet',
package_dir={'armet': 'src/armet'},
packages=find_packages('src'),
install_requires=(
'six', # Python 2 and 3 normalization layer
'python-mimeparse' # For parsing accept and content-type headers
),
extras_require={
'test': (
'nose',
'yanc',
'httplib2',
'flask',
'django',
'wsgi_intercept'
)
}
)
|
70ce88c93054702713f4a636aa354ae806e757e5
|
setup.py
|
setup.py
|
from setuptools import setup, find_packages
setup(
name="seed-stage-based-messaging",
version="0.1",
url='http://github.com/praekelt/seed-stage-based-messaging',
license='BSD',
author='Praekelt Foundation',
author_email='dev@praekeltfoundation.org',
packages=find_packages(),
include_package_data=True,
install_requires=[
'Django==1.9.1',
'djangorestframework==3.3.2',
'dj-database-url==0.3.0',
'psycopg2==2.6.1',
'raven==5.10.0',
'gunicorn==19.4.5',
'django-filter==0.12.0',
'dj-static==0.0.6',
'celery==3.1.19',
'django-celery==3.1.17',
'redis==2.10.5',
'pytz==2015.7',
'requests==2.9.1',
'go-http==0.3.0'
],
classifiers=[
'Development Status :: 4 - Beta',
'Framework :: Django',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)
|
from setuptools import setup, find_packages
setup(
name="seed-stage-based-messaging",
version="0.1",
url='http://github.com/praekelt/seed-stage-based-messaging',
license='BSD',
author='Praekelt Foundation',
author_email='dev@praekeltfoundation.org',
packages=find_packages(),
include_package_data=True,
install_requires=[
'Django==1.9.1',
'djangorestframework==3.3.2',
'dj-database-url==0.3.0',
'psycopg2==2.6.2',
'raven==5.10.0',
'gunicorn==19.4.5',
'django-filter==0.12.0',
'dj-static==0.0.6',
'celery==3.1.19',
'django-celery==3.1.17',
'redis==2.10.5',
'pytz==2015.7',
'requests==2.9.1',
'go-http==0.3.0'
],
classifiers=[
'Development Status :: 4 - Beta',
'Framework :: Django',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)
|
Update filter. Review by @nicolaasdiebaas
|
Update filter. Review by @nicolaasdiebaas
|
Python
|
bsd-3-clause
|
praekelt/seed-staged-based-messaging,praekelt/seed-stage-based-messaging,praekelt/seed-stage-based-messaging
|
from setuptools import setup, find_packages
setup(
name="seed-stage-based-messaging",
version="0.1",
url='http://github.com/praekelt/seed-stage-based-messaging',
license='BSD',
author='Praekelt Foundation',
author_email='dev@praekeltfoundation.org',
packages=find_packages(),
include_package_data=True,
install_requires=[
'Django==1.9.1',
'djangorestframework==3.3.2',
'dj-database-url==0.3.0',
'psycopg2==2.6.1',
'raven==5.10.0',
'gunicorn==19.4.5',
'django-filter==0.12.0',
'dj-static==0.0.6',
'celery==3.1.19',
'django-celery==3.1.17',
'redis==2.10.5',
'pytz==2015.7',
'requests==2.9.1',
'go-http==0.3.0'
],
classifiers=[
'Development Status :: 4 - Beta',
'Framework :: Django',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)
Update filter. Review by @nicolaasdiebaas
|
from setuptools import setup, find_packages
setup(
name="seed-stage-based-messaging",
version="0.1",
url='http://github.com/praekelt/seed-stage-based-messaging',
license='BSD',
author='Praekelt Foundation',
author_email='dev@praekeltfoundation.org',
packages=find_packages(),
include_package_data=True,
install_requires=[
'Django==1.9.1',
'djangorestframework==3.3.2',
'dj-database-url==0.3.0',
'psycopg2==2.6.2',
'raven==5.10.0',
'gunicorn==19.4.5',
'django-filter==0.12.0',
'dj-static==0.0.6',
'celery==3.1.19',
'django-celery==3.1.17',
'redis==2.10.5',
'pytz==2015.7',
'requests==2.9.1',
'go-http==0.3.0'
],
classifiers=[
'Development Status :: 4 - Beta',
'Framework :: Django',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)
|
<commit_before>from setuptools import setup, find_packages
setup(
name="seed-stage-based-messaging",
version="0.1",
url='http://github.com/praekelt/seed-stage-based-messaging',
license='BSD',
author='Praekelt Foundation',
author_email='dev@praekeltfoundation.org',
packages=find_packages(),
include_package_data=True,
install_requires=[
'Django==1.9.1',
'djangorestframework==3.3.2',
'dj-database-url==0.3.0',
'psycopg2==2.6.1',
'raven==5.10.0',
'gunicorn==19.4.5',
'django-filter==0.12.0',
'dj-static==0.0.6',
'celery==3.1.19',
'django-celery==3.1.17',
'redis==2.10.5',
'pytz==2015.7',
'requests==2.9.1',
'go-http==0.3.0'
],
classifiers=[
'Development Status :: 4 - Beta',
'Framework :: Django',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)
<commit_msg>Update filter. Review by @nicolaasdiebaas<commit_after>
|
from setuptools import setup, find_packages
setup(
name="seed-stage-based-messaging",
version="0.1",
url='http://github.com/praekelt/seed-stage-based-messaging',
license='BSD',
author='Praekelt Foundation',
author_email='dev@praekeltfoundation.org',
packages=find_packages(),
include_package_data=True,
install_requires=[
'Django==1.9.1',
'djangorestframework==3.3.2',
'dj-database-url==0.3.0',
'psycopg2==2.6.2',
'raven==5.10.0',
'gunicorn==19.4.5',
'django-filter==0.12.0',
'dj-static==0.0.6',
'celery==3.1.19',
'django-celery==3.1.17',
'redis==2.10.5',
'pytz==2015.7',
'requests==2.9.1',
'go-http==0.3.0'
],
classifiers=[
'Development Status :: 4 - Beta',
'Framework :: Django',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)
|
from setuptools import setup, find_packages
setup(
name="seed-stage-based-messaging",
version="0.1",
url='http://github.com/praekelt/seed-stage-based-messaging',
license='BSD',
author='Praekelt Foundation',
author_email='dev@praekeltfoundation.org',
packages=find_packages(),
include_package_data=True,
install_requires=[
'Django==1.9.1',
'djangorestframework==3.3.2',
'dj-database-url==0.3.0',
'psycopg2==2.6.1',
'raven==5.10.0',
'gunicorn==19.4.5',
'django-filter==0.12.0',
'dj-static==0.0.6',
'celery==3.1.19',
'django-celery==3.1.17',
'redis==2.10.5',
'pytz==2015.7',
'requests==2.9.1',
'go-http==0.3.0'
],
classifiers=[
'Development Status :: 4 - Beta',
'Framework :: Django',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)
Update filter. Review by @nicolaasdiebaasfrom setuptools import setup, find_packages
setup(
name="seed-stage-based-messaging",
version="0.1",
url='http://github.com/praekelt/seed-stage-based-messaging',
license='BSD',
author='Praekelt Foundation',
author_email='dev@praekeltfoundation.org',
packages=find_packages(),
include_package_data=True,
install_requires=[
'Django==1.9.1',
'djangorestframework==3.3.2',
'dj-database-url==0.3.0',
'psycopg2==2.6.2',
'raven==5.10.0',
'gunicorn==19.4.5',
'django-filter==0.12.0',
'dj-static==0.0.6',
'celery==3.1.19',
'django-celery==3.1.17',
'redis==2.10.5',
'pytz==2015.7',
'requests==2.9.1',
'go-http==0.3.0'
],
classifiers=[
'Development Status :: 4 - Beta',
'Framework :: Django',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)
|
<commit_before>from setuptools import setup, find_packages
setup(
name="seed-stage-based-messaging",
version="0.1",
url='http://github.com/praekelt/seed-stage-based-messaging',
license='BSD',
author='Praekelt Foundation',
author_email='dev@praekeltfoundation.org',
packages=find_packages(),
include_package_data=True,
install_requires=[
'Django==1.9.1',
'djangorestframework==3.3.2',
'dj-database-url==0.3.0',
'psycopg2==2.6.1',
'raven==5.10.0',
'gunicorn==19.4.5',
'django-filter==0.12.0',
'dj-static==0.0.6',
'celery==3.1.19',
'django-celery==3.1.17',
'redis==2.10.5',
'pytz==2015.7',
'requests==2.9.1',
'go-http==0.3.0'
],
classifiers=[
'Development Status :: 4 - Beta',
'Framework :: Django',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)
<commit_msg>Update filter. Review by @nicolaasdiebaas<commit_after>from setuptools import setup, find_packages
setup(
name="seed-stage-based-messaging",
version="0.1",
url='http://github.com/praekelt/seed-stage-based-messaging',
license='BSD',
author='Praekelt Foundation',
author_email='dev@praekeltfoundation.org',
packages=find_packages(),
include_package_data=True,
install_requires=[
'Django==1.9.1',
'djangorestframework==3.3.2',
'dj-database-url==0.3.0',
'psycopg2==2.6.2',
'raven==5.10.0',
'gunicorn==19.4.5',
'django-filter==0.12.0',
'dj-static==0.0.6',
'celery==3.1.19',
'django-celery==3.1.17',
'redis==2.10.5',
'pytz==2015.7',
'requests==2.9.1',
'go-http==0.3.0'
],
classifiers=[
'Development Status :: 4 - Beta',
'Framework :: Django',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)
|
7b2d44cdc23ab8ec16113ff2aa8e0c7f5d98505c
|
setup.py
|
setup.py
|
"""
Usage instructions:
- If you are installing: `python setup.py install`
- If you are developing: `python setup.py sdist bdist --format=zip bdist_wheel --universal`
"""
try:
import pypandoc
long_description = pypandoc.convert('README.md', 'rst')
except ImportError:
print('Failed to convert README to rst.')
try:
long_description = open('README.md').read()
except:
long_description = ''
from setuptools import setup
setup(
name='keyboard',
version='0.6.5',
author='BoppreH',
author_email='boppreh@gmail.com',
packages=['keyboard'],
url='https://github.com/boppreh/keyboard',
license='MIT',
description='Hook and simulate keyboard events on Windows and Linux',
keywords = 'keyboard hook simulate hotkey',
long_description=long_description,
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: Microsoft :: Windows',
'Operating System :: Unix',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Utilities',
],
)
|
"""
Usage instructions:
- If you are installing: `python setup.py install`
- If you are developing: `python setup.py sdist bdist --format=zip bdist_wheel --universal`
"""
try:
import pypandoc
long_description = pypandoc.convert('README.md', 'rst')
except ImportError:
try:
long_description = open('README.md').read()
except:
try:
long_description = open('README.rst').read()
except:
print('Failed to convert README to rst.')
long_description = ''
from setuptools import setup
setup(
name='keyboard',
version='0.6.7',
author='BoppreH',
author_email='boppreh@gmail.com',
packages=['keyboard'],
url='https://github.com/boppreh/keyboard',
license='MIT',
description='Hook and simulate keyboard events on Windows and Linux',
keywords = 'keyboard hook simulate hotkey',
long_description=long_description,
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: Microsoft :: Windows',
'Operating System :: Unix',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Utilities',
],
)
|
Bump version and make better attemp at generating README
|
Bump version and make better attemp at generating README
|
Python
|
mit
|
boppreh/keyboard,glitchassassin/keyboard
|
"""
Usage instructions:
- If you are installing: `python setup.py install`
- If you are developing: `python setup.py sdist bdist --format=zip bdist_wheel --universal`
"""
try:
import pypandoc
long_description = pypandoc.convert('README.md', 'rst')
except ImportError:
print('Failed to convert README to rst.')
try:
long_description = open('README.md').read()
except:
long_description = ''
from setuptools import setup
setup(
name='keyboard',
version='0.6.5',
author='BoppreH',
author_email='boppreh@gmail.com',
packages=['keyboard'],
url='https://github.com/boppreh/keyboard',
license='MIT',
description='Hook and simulate keyboard events on Windows and Linux',
keywords = 'keyboard hook simulate hotkey',
long_description=long_description,
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: Microsoft :: Windows',
'Operating System :: Unix',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Utilities',
],
)
Bump version and make better attemp at generating README
|
"""
Usage instructions:
- If you are installing: `python setup.py install`
- If you are developing: `python setup.py sdist bdist --format=zip bdist_wheel --universal`
"""
try:
import pypandoc
long_description = pypandoc.convert('README.md', 'rst')
except ImportError:
try:
long_description = open('README.md').read()
except:
try:
long_description = open('README.rst').read()
except:
print('Failed to convert README to rst.')
long_description = ''
from setuptools import setup
setup(
name='keyboard',
version='0.6.7',
author='BoppreH',
author_email='boppreh@gmail.com',
packages=['keyboard'],
url='https://github.com/boppreh/keyboard',
license='MIT',
description='Hook and simulate keyboard events on Windows and Linux',
keywords = 'keyboard hook simulate hotkey',
long_description=long_description,
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: Microsoft :: Windows',
'Operating System :: Unix',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Utilities',
],
)
|
<commit_before>"""
Usage instructions:
- If you are installing: `python setup.py install`
- If you are developing: `python setup.py sdist bdist --format=zip bdist_wheel --universal`
"""
try:
import pypandoc
long_description = pypandoc.convert('README.md', 'rst')
except ImportError:
print('Failed to convert README to rst.')
try:
long_description = open('README.md').read()
except:
long_description = ''
from setuptools import setup
setup(
name='keyboard',
version='0.6.5',
author='BoppreH',
author_email='boppreh@gmail.com',
packages=['keyboard'],
url='https://github.com/boppreh/keyboard',
license='MIT',
description='Hook and simulate keyboard events on Windows and Linux',
keywords = 'keyboard hook simulate hotkey',
long_description=long_description,
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: Microsoft :: Windows',
'Operating System :: Unix',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Utilities',
],
)
<commit_msg>Bump version and make better attemp at generating README<commit_after>
|
"""
Usage instructions:
- If you are installing: `python setup.py install`
- If you are developing: `python setup.py sdist bdist --format=zip bdist_wheel --universal`
"""
try:
import pypandoc
long_description = pypandoc.convert('README.md', 'rst')
except ImportError:
try:
long_description = open('README.md').read()
except:
try:
long_description = open('README.rst').read()
except:
print('Failed to convert README to rst.')
long_description = ''
from setuptools import setup
setup(
name='keyboard',
version='0.6.7',
author='BoppreH',
author_email='boppreh@gmail.com',
packages=['keyboard'],
url='https://github.com/boppreh/keyboard',
license='MIT',
description='Hook and simulate keyboard events on Windows and Linux',
keywords = 'keyboard hook simulate hotkey',
long_description=long_description,
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: Microsoft :: Windows',
'Operating System :: Unix',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Utilities',
],
)
|
"""
Usage instructions:
- If you are installing: `python setup.py install`
- If you are developing: `python setup.py sdist bdist --format=zip bdist_wheel --universal`
"""
try:
import pypandoc
long_description = pypandoc.convert('README.md', 'rst')
except ImportError:
print('Failed to convert README to rst.')
try:
long_description = open('README.md').read()
except:
long_description = ''
from setuptools import setup
setup(
name='keyboard',
version='0.6.5',
author='BoppreH',
author_email='boppreh@gmail.com',
packages=['keyboard'],
url='https://github.com/boppreh/keyboard',
license='MIT',
description='Hook and simulate keyboard events on Windows and Linux',
keywords = 'keyboard hook simulate hotkey',
long_description=long_description,
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: Microsoft :: Windows',
'Operating System :: Unix',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Utilities',
],
)
Bump version and make better attemp at generating README"""
Usage instructions:
- If you are installing: `python setup.py install`
- If you are developing: `python setup.py sdist bdist --format=zip bdist_wheel --universal`
"""
try:
import pypandoc
long_description = pypandoc.convert('README.md', 'rst')
except ImportError:
try:
long_description = open('README.md').read()
except:
try:
long_description = open('README.rst').read()
except:
print('Failed to convert README to rst.')
long_description = ''
from setuptools import setup
setup(
name='keyboard',
version='0.6.7',
author='BoppreH',
author_email='boppreh@gmail.com',
packages=['keyboard'],
url='https://github.com/boppreh/keyboard',
license='MIT',
description='Hook and simulate keyboard events on Windows and Linux',
keywords = 'keyboard hook simulate hotkey',
long_description=long_description,
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: Microsoft :: Windows',
'Operating System :: Unix',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Utilities',
],
)
|
<commit_before>"""
Usage instructions:
- If you are installing: `python setup.py install`
- If you are developing: `python setup.py sdist bdist --format=zip bdist_wheel --universal`
"""
try:
import pypandoc
long_description = pypandoc.convert('README.md', 'rst')
except ImportError:
print('Failed to convert README to rst.')
try:
long_description = open('README.md').read()
except:
long_description = ''
from setuptools import setup
setup(
name='keyboard',
version='0.6.5',
author='BoppreH',
author_email='boppreh@gmail.com',
packages=['keyboard'],
url='https://github.com/boppreh/keyboard',
license='MIT',
description='Hook and simulate keyboard events on Windows and Linux',
keywords = 'keyboard hook simulate hotkey',
long_description=long_description,
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: Microsoft :: Windows',
'Operating System :: Unix',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Utilities',
],
)
<commit_msg>Bump version and make better attemp at generating README<commit_after>"""
Usage instructions:
- If you are installing: `python setup.py install`
- If you are developing: `python setup.py sdist bdist --format=zip bdist_wheel --universal`
"""
try:
import pypandoc
long_description = pypandoc.convert('README.md', 'rst')
except ImportError:
try:
long_description = open('README.md').read()
except:
try:
long_description = open('README.rst').read()
except:
print('Failed to convert README to rst.')
long_description = ''
from setuptools import setup
setup(
name='keyboard',
version='0.6.7',
author='BoppreH',
author_email='boppreh@gmail.com',
packages=['keyboard'],
url='https://github.com/boppreh/keyboard',
license='MIT',
description='Hook and simulate keyboard events on Windows and Linux',
keywords = 'keyboard hook simulate hotkey',
long_description=long_description,
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: Microsoft :: Windows',
'Operating System :: Unix',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Utilities',
],
)
|
886eb01e87fa8decf821e6b560a6b57f4376644e
|
setup.py
|
setup.py
|
# coding: utf-8
# Author: Milan Kubik
from setuptools import setup, find_packages
with open('README.md') as f:
long_description = f.read()
setup(
name='ipaqe-dyndir',
version='0.1.4',
description='Ansible dynamic inventory for FreeIPA',
long_description=long_description,
keywords='freeipa tests ansible',
license='MIT',
author='Milan Kubik',
author_email='mkubik@redhat.com',
url='https://github.com/apophys/ipaqe-dyndir',
classifiers=[
'Development Status :: 3 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
],
packages=find_packages(exclude=['tests']),
install_requires=['PyYAML'],
entry_points={
'console_scripts': [
'ipaqe-dyndir = ipaqe_dyndir.__main__:main'
],
'org.freeipa.dyndir.plugins': [
'updates-testing = ipaqe_dyndir.builtin.repos:UpdatesTestingRepositoryPlugin',
'copr = ipaqe_dyndir.builtin.repos:COPRPlugin'
]
}
)
|
# coding: utf-8
# Author: Milan Kubik
from setuptools import setup, find_packages
with open('README.md') as f:
long_description = f.read()
setup(
name='ipaqe-dyndir',
version='0.2.0',
description='Ansible dynamic inventory for FreeIPA',
long_description=long_description,
keywords='freeipa tests ansible',
license='MIT',
author='Milan Kubik',
author_email='mkubik@redhat.com',
url='https://github.com/apophys/ipaqe-dyndir',
classifiers=[
'Development Status :: 3 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
],
packages=find_packages(exclude=['tests']),
install_requires=['PyYAML'],
entry_points={
'console_scripts': [
'ipaqe-dyndir = ipaqe_dyndir.__main__:main'
],
'org.freeipa.dyndir.plugins': [
'updates-testing = ipaqe_dyndir.builtin.repos:UpdatesTestingRepositoryPlugin',
'copr = ipaqe_dyndir.builtin.repos:COPRPlugin'
]
}
)
|
Increase the version to 0.2
|
Increase the version to 0.2
An backward incompatible change was introduced by removing the package
names metadata from the inventory core.
If necessary, it is hereby advised to provide the metadata in other ways
supported by ansible, such as variables defined on role or playbook
level.
|
Python
|
mit
|
apophys/ipaqe-dyndir
|
# coding: utf-8
# Author: Milan Kubik
from setuptools import setup, find_packages
with open('README.md') as f:
long_description = f.read()
setup(
name='ipaqe-dyndir',
version='0.1.4',
description='Ansible dynamic inventory for FreeIPA',
long_description=long_description,
keywords='freeipa tests ansible',
license='MIT',
author='Milan Kubik',
author_email='mkubik@redhat.com',
url='https://github.com/apophys/ipaqe-dyndir',
classifiers=[
'Development Status :: 3 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
],
packages=find_packages(exclude=['tests']),
install_requires=['PyYAML'],
entry_points={
'console_scripts': [
'ipaqe-dyndir = ipaqe_dyndir.__main__:main'
],
'org.freeipa.dyndir.plugins': [
'updates-testing = ipaqe_dyndir.builtin.repos:UpdatesTestingRepositoryPlugin',
'copr = ipaqe_dyndir.builtin.repos:COPRPlugin'
]
}
)
Increase the version to 0.2
An backward incompatible change was introduced by removing the package
names metadata from the inventory core.
If necessary, it is hereby advised to provide the metadata in other ways
supported by ansible, such as variables defined on role or playbook
level.
|
# coding: utf-8
# Author: Milan Kubik
from setuptools import setup, find_packages
with open('README.md') as f:
long_description = f.read()
setup(
name='ipaqe-dyndir',
version='0.2.0',
description='Ansible dynamic inventory for FreeIPA',
long_description=long_description,
keywords='freeipa tests ansible',
license='MIT',
author='Milan Kubik',
author_email='mkubik@redhat.com',
url='https://github.com/apophys/ipaqe-dyndir',
classifiers=[
'Development Status :: 3 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
],
packages=find_packages(exclude=['tests']),
install_requires=['PyYAML'],
entry_points={
'console_scripts': [
'ipaqe-dyndir = ipaqe_dyndir.__main__:main'
],
'org.freeipa.dyndir.plugins': [
'updates-testing = ipaqe_dyndir.builtin.repos:UpdatesTestingRepositoryPlugin',
'copr = ipaqe_dyndir.builtin.repos:COPRPlugin'
]
}
)
|
<commit_before># coding: utf-8
# Author: Milan Kubik
from setuptools import setup, find_packages
with open('README.md') as f:
long_description = f.read()
setup(
name='ipaqe-dyndir',
version='0.1.4',
description='Ansible dynamic inventory for FreeIPA',
long_description=long_description,
keywords='freeipa tests ansible',
license='MIT',
author='Milan Kubik',
author_email='mkubik@redhat.com',
url='https://github.com/apophys/ipaqe-dyndir',
classifiers=[
'Development Status :: 3 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
],
packages=find_packages(exclude=['tests']),
install_requires=['PyYAML'],
entry_points={
'console_scripts': [
'ipaqe-dyndir = ipaqe_dyndir.__main__:main'
],
'org.freeipa.dyndir.plugins': [
'updates-testing = ipaqe_dyndir.builtin.repos:UpdatesTestingRepositoryPlugin',
'copr = ipaqe_dyndir.builtin.repos:COPRPlugin'
]
}
)
<commit_msg>Increase the version to 0.2
An backward incompatible change was introduced by removing the package
names metadata from the inventory core.
If necessary, it is hereby advised to provide the metadata in other ways
supported by ansible, such as variables defined on role or playbook
level.<commit_after>
|
# coding: utf-8
# Author: Milan Kubik
from setuptools import setup, find_packages
with open('README.md') as f:
long_description = f.read()
setup(
name='ipaqe-dyndir',
version='0.2.0',
description='Ansible dynamic inventory for FreeIPA',
long_description=long_description,
keywords='freeipa tests ansible',
license='MIT',
author='Milan Kubik',
author_email='mkubik@redhat.com',
url='https://github.com/apophys/ipaqe-dyndir',
classifiers=[
'Development Status :: 3 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
],
packages=find_packages(exclude=['tests']),
install_requires=['PyYAML'],
entry_points={
'console_scripts': [
'ipaqe-dyndir = ipaqe_dyndir.__main__:main'
],
'org.freeipa.dyndir.plugins': [
'updates-testing = ipaqe_dyndir.builtin.repos:UpdatesTestingRepositoryPlugin',
'copr = ipaqe_dyndir.builtin.repos:COPRPlugin'
]
}
)
|
# coding: utf-8
# Author: Milan Kubik
from setuptools import setup, find_packages
with open('README.md') as f:
long_description = f.read()
setup(
name='ipaqe-dyndir',
version='0.1.4',
description='Ansible dynamic inventory for FreeIPA',
long_description=long_description,
keywords='freeipa tests ansible',
license='MIT',
author='Milan Kubik',
author_email='mkubik@redhat.com',
url='https://github.com/apophys/ipaqe-dyndir',
classifiers=[
'Development Status :: 3 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
],
packages=find_packages(exclude=['tests']),
install_requires=['PyYAML'],
entry_points={
'console_scripts': [
'ipaqe-dyndir = ipaqe_dyndir.__main__:main'
],
'org.freeipa.dyndir.plugins': [
'updates-testing = ipaqe_dyndir.builtin.repos:UpdatesTestingRepositoryPlugin',
'copr = ipaqe_dyndir.builtin.repos:COPRPlugin'
]
}
)
Increase the version to 0.2
An backward incompatible change was introduced by removing the package
names metadata from the inventory core.
If necessary, it is hereby advised to provide the metadata in other ways
supported by ansible, such as variables defined on role or playbook
level.# coding: utf-8
# Author: Milan Kubik
from setuptools import setup, find_packages
with open('README.md') as f:
long_description = f.read()
setup(
name='ipaqe-dyndir',
version='0.2.0',
description='Ansible dynamic inventory for FreeIPA',
long_description=long_description,
keywords='freeipa tests ansible',
license='MIT',
author='Milan Kubik',
author_email='mkubik@redhat.com',
url='https://github.com/apophys/ipaqe-dyndir',
classifiers=[
'Development Status :: 3 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
],
packages=find_packages(exclude=['tests']),
install_requires=['PyYAML'],
entry_points={
'console_scripts': [
'ipaqe-dyndir = ipaqe_dyndir.__main__:main'
],
'org.freeipa.dyndir.plugins': [
'updates-testing = ipaqe_dyndir.builtin.repos:UpdatesTestingRepositoryPlugin',
'copr = ipaqe_dyndir.builtin.repos:COPRPlugin'
]
}
)
|
<commit_before># coding: utf-8
# Author: Milan Kubik
from setuptools import setup, find_packages
with open('README.md') as f:
long_description = f.read()
setup(
name='ipaqe-dyndir',
version='0.1.4',
description='Ansible dynamic inventory for FreeIPA',
long_description=long_description,
keywords='freeipa tests ansible',
license='MIT',
author='Milan Kubik',
author_email='mkubik@redhat.com',
url='https://github.com/apophys/ipaqe-dyndir',
classifiers=[
'Development Status :: 3 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
],
packages=find_packages(exclude=['tests']),
install_requires=['PyYAML'],
entry_points={
'console_scripts': [
'ipaqe-dyndir = ipaqe_dyndir.__main__:main'
],
'org.freeipa.dyndir.plugins': [
'updates-testing = ipaqe_dyndir.builtin.repos:UpdatesTestingRepositoryPlugin',
'copr = ipaqe_dyndir.builtin.repos:COPRPlugin'
]
}
)
<commit_msg>Increase the version to 0.2
An backward incompatible change was introduced by removing the package
names metadata from the inventory core.
If necessary, it is hereby advised to provide the metadata in other ways
supported by ansible, such as variables defined on role or playbook
level.<commit_after># coding: utf-8
# Author: Milan Kubik
from setuptools import setup, find_packages
with open('README.md') as f:
long_description = f.read()
setup(
name='ipaqe-dyndir',
version='0.2.0',
description='Ansible dynamic inventory for FreeIPA',
long_description=long_description,
keywords='freeipa tests ansible',
license='MIT',
author='Milan Kubik',
author_email='mkubik@redhat.com',
url='https://github.com/apophys/ipaqe-dyndir',
classifiers=[
'Development Status :: 3 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
],
packages=find_packages(exclude=['tests']),
install_requires=['PyYAML'],
entry_points={
'console_scripts': [
'ipaqe-dyndir = ipaqe_dyndir.__main__:main'
],
'org.freeipa.dyndir.plugins': [
'updates-testing = ipaqe_dyndir.builtin.repos:UpdatesTestingRepositoryPlugin',
'copr = ipaqe_dyndir.builtin.repos:COPRPlugin'
]
}
)
|
62cddf84c9e46bb34c5f8320c0e739e38ebf5fec
|
setup.py
|
setup.py
|
# -*- coding: utf-8 -*-
from setuptools import setup
try:
from Cython.Build import cythonize
except ImportError:
CYTHON = False
else:
CYTHON = True
setup(
name='grako',
version='3.1.3-rc.1',
author='Juancarlo Añez',
author_email='apalala@gmail.com',
packages=['grako', 'grako.test'],
scripts=['scripts/grako'],
url='http://bitbucket.org/apalala/grako',
license='BSD License',
description='A generator of PEG/Packrat parsers from EBNF grammars.',
long_description=open('README.rst').read(),
classifiers=[
'Development Status :: 5 - Production/Stable',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'Environment :: Console',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
'Topic :: Software Development :: Code Generators',
'Topic :: Software Development :: Compilers',
'Topic :: Software Development :: Interpreters',
'Topic :: Text Processing :: General'
],
ext_modules=cythonize(
"grako/**/*.py",
exclude=[
'grako/__main__.py',
'grako/test/__main__.py',
'grako/test/*.py'
]
) if CYTHON else [],
)
|
# -*- coding: utf-8 -*-
from setuptools import setup
try:
from Cython.Build import cythonize
except ImportError:
CYTHON = False
else:
CYTHON = True
setup(
name='grako',
version='3.1.3-rc.1',
author='Juancarlo Añez',
author_email='apalala@gmail.com',
packages=['grako', 'grako.codegen', 'grako.test'],
scripts=['scripts/grako'],
url='http://bitbucket.org/apalala/grako',
license='BSD License',
description='A generator of PEG/Packrat parsers from EBNF grammars.',
long_description=open('README.rst').read(),
classifiers=[
'Development Status :: 5 - Production/Stable',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'Environment :: Console',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
'Topic :: Software Development :: Code Generators',
'Topic :: Software Development :: Compilers',
'Topic :: Software Development :: Interpreters',
'Topic :: Text Processing :: General'
],
ext_modules=cythonize(
"grako/**/*.py",
exclude=[
'grako/__main__.py',
'grako/test/__main__.py',
'grako/test/*.py'
]
) if CYTHON else [],
)
|
Include codegen package in distribution.
|
Include codegen package in distribution.
|
Python
|
bsd-2-clause
|
vmuriart/grako,frnknglrt/grako
|
# -*- coding: utf-8 -*-
from setuptools import setup
try:
from Cython.Build import cythonize
except ImportError:
CYTHON = False
else:
CYTHON = True
setup(
name='grako',
version='3.1.3-rc.1',
author='Juancarlo Añez',
author_email='apalala@gmail.com',
packages=['grako', 'grako.test'],
scripts=['scripts/grako'],
url='http://bitbucket.org/apalala/grako',
license='BSD License',
description='A generator of PEG/Packrat parsers from EBNF grammars.',
long_description=open('README.rst').read(),
classifiers=[
'Development Status :: 5 - Production/Stable',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'Environment :: Console',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
'Topic :: Software Development :: Code Generators',
'Topic :: Software Development :: Compilers',
'Topic :: Software Development :: Interpreters',
'Topic :: Text Processing :: General'
],
ext_modules=cythonize(
"grako/**/*.py",
exclude=[
'grako/__main__.py',
'grako/test/__main__.py',
'grako/test/*.py'
]
) if CYTHON else [],
)
Include codegen package in distribution.
|
# -*- coding: utf-8 -*-
from setuptools import setup
try:
from Cython.Build import cythonize
except ImportError:
CYTHON = False
else:
CYTHON = True
setup(
name='grako',
version='3.1.3-rc.1',
author='Juancarlo Añez',
author_email='apalala@gmail.com',
packages=['grako', 'grako.codegen', 'grako.test'],
scripts=['scripts/grako'],
url='http://bitbucket.org/apalala/grako',
license='BSD License',
description='A generator of PEG/Packrat parsers from EBNF grammars.',
long_description=open('README.rst').read(),
classifiers=[
'Development Status :: 5 - Production/Stable',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'Environment :: Console',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
'Topic :: Software Development :: Code Generators',
'Topic :: Software Development :: Compilers',
'Topic :: Software Development :: Interpreters',
'Topic :: Text Processing :: General'
],
ext_modules=cythonize(
"grako/**/*.py",
exclude=[
'grako/__main__.py',
'grako/test/__main__.py',
'grako/test/*.py'
]
) if CYTHON else [],
)
|
<commit_before># -*- coding: utf-8 -*-
from setuptools import setup
try:
from Cython.Build import cythonize
except ImportError:
CYTHON = False
else:
CYTHON = True
setup(
name='grako',
version='3.1.3-rc.1',
author='Juancarlo Añez',
author_email='apalala@gmail.com',
packages=['grako', 'grako.test'],
scripts=['scripts/grako'],
url='http://bitbucket.org/apalala/grako',
license='BSD License',
description='A generator of PEG/Packrat parsers from EBNF grammars.',
long_description=open('README.rst').read(),
classifiers=[
'Development Status :: 5 - Production/Stable',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'Environment :: Console',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
'Topic :: Software Development :: Code Generators',
'Topic :: Software Development :: Compilers',
'Topic :: Software Development :: Interpreters',
'Topic :: Text Processing :: General'
],
ext_modules=cythonize(
"grako/**/*.py",
exclude=[
'grako/__main__.py',
'grako/test/__main__.py',
'grako/test/*.py'
]
) if CYTHON else [],
)
<commit_msg>Include codegen package in distribution.<commit_after>
|
# -*- coding: utf-8 -*-
from setuptools import setup
try:
from Cython.Build import cythonize
except ImportError:
CYTHON = False
else:
CYTHON = True
setup(
name='grako',
version='3.1.3-rc.1',
author='Juancarlo Añez',
author_email='apalala@gmail.com',
packages=['grako', 'grako.codegen', 'grako.test'],
scripts=['scripts/grako'],
url='http://bitbucket.org/apalala/grako',
license='BSD License',
description='A generator of PEG/Packrat parsers from EBNF grammars.',
long_description=open('README.rst').read(),
classifiers=[
'Development Status :: 5 - Production/Stable',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'Environment :: Console',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
'Topic :: Software Development :: Code Generators',
'Topic :: Software Development :: Compilers',
'Topic :: Software Development :: Interpreters',
'Topic :: Text Processing :: General'
],
ext_modules=cythonize(
"grako/**/*.py",
exclude=[
'grako/__main__.py',
'grako/test/__main__.py',
'grako/test/*.py'
]
) if CYTHON else [],
)
|
# -*- coding: utf-8 -*-
from setuptools import setup
try:
from Cython.Build import cythonize
except ImportError:
CYTHON = False
else:
CYTHON = True
setup(
name='grako',
version='3.1.3-rc.1',
author='Juancarlo Añez',
author_email='apalala@gmail.com',
packages=['grako', 'grako.test'],
scripts=['scripts/grako'],
url='http://bitbucket.org/apalala/grako',
license='BSD License',
description='A generator of PEG/Packrat parsers from EBNF grammars.',
long_description=open('README.rst').read(),
classifiers=[
'Development Status :: 5 - Production/Stable',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'Environment :: Console',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
'Topic :: Software Development :: Code Generators',
'Topic :: Software Development :: Compilers',
'Topic :: Software Development :: Interpreters',
'Topic :: Text Processing :: General'
],
ext_modules=cythonize(
"grako/**/*.py",
exclude=[
'grako/__main__.py',
'grako/test/__main__.py',
'grako/test/*.py'
]
) if CYTHON else [],
)
Include codegen package in distribution.# -*- coding: utf-8 -*-
from setuptools import setup
try:
from Cython.Build import cythonize
except ImportError:
CYTHON = False
else:
CYTHON = True
setup(
name='grako',
version='3.1.3-rc.1',
author='Juancarlo Añez',
author_email='apalala@gmail.com',
packages=['grako', 'grako.codegen', 'grako.test'],
scripts=['scripts/grako'],
url='http://bitbucket.org/apalala/grako',
license='BSD License',
description='A generator of PEG/Packrat parsers from EBNF grammars.',
long_description=open('README.rst').read(),
classifiers=[
'Development Status :: 5 - Production/Stable',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'Environment :: Console',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
'Topic :: Software Development :: Code Generators',
'Topic :: Software Development :: Compilers',
'Topic :: Software Development :: Interpreters',
'Topic :: Text Processing :: General'
],
ext_modules=cythonize(
"grako/**/*.py",
exclude=[
'grako/__main__.py',
'grako/test/__main__.py',
'grako/test/*.py'
]
) if CYTHON else [],
)
|
<commit_before># -*- coding: utf-8 -*-
from setuptools import setup
try:
from Cython.Build import cythonize
except ImportError:
CYTHON = False
else:
CYTHON = True
setup(
name='grako',
version='3.1.3-rc.1',
author='Juancarlo Añez',
author_email='apalala@gmail.com',
packages=['grako', 'grako.test'],
scripts=['scripts/grako'],
url='http://bitbucket.org/apalala/grako',
license='BSD License',
description='A generator of PEG/Packrat parsers from EBNF grammars.',
long_description=open('README.rst').read(),
classifiers=[
'Development Status :: 5 - Production/Stable',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'Environment :: Console',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
'Topic :: Software Development :: Code Generators',
'Topic :: Software Development :: Compilers',
'Topic :: Software Development :: Interpreters',
'Topic :: Text Processing :: General'
],
ext_modules=cythonize(
"grako/**/*.py",
exclude=[
'grako/__main__.py',
'grako/test/__main__.py',
'grako/test/*.py'
]
) if CYTHON else [],
)
<commit_msg>Include codegen package in distribution.<commit_after># -*- coding: utf-8 -*-
from setuptools import setup
try:
from Cython.Build import cythonize
except ImportError:
CYTHON = False
else:
CYTHON = True
setup(
name='grako',
version='3.1.3-rc.1',
author='Juancarlo Añez',
author_email='apalala@gmail.com',
packages=['grako', 'grako.codegen', 'grako.test'],
scripts=['scripts/grako'],
url='http://bitbucket.org/apalala/grako',
license='BSD License',
description='A generator of PEG/Packrat parsers from EBNF grammars.',
long_description=open('README.rst').read(),
classifiers=[
'Development Status :: 5 - Production/Stable',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'Environment :: Console',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
'Topic :: Software Development :: Code Generators',
'Topic :: Software Development :: Compilers',
'Topic :: Software Development :: Interpreters',
'Topic :: Text Processing :: General'
],
ext_modules=cythonize(
"grako/**/*.py",
exclude=[
'grako/__main__.py',
'grako/test/__main__.py',
'grako/test/*.py'
]
) if CYTHON else [],
)
|
74152918057a90799163ee31725916cd411b31bc
|
setup.py
|
setup.py
|
#!/usr/bin/env python
# -*- coding: UTF-8 -*-
from setuptools import setup
import os.path
scripts = ['bpln', 'cbpn', 'mcmcbpn', 'sabpn']
scripts = [os.path.sep.join(('scripts', script)) for script in scripts]
setup(
name='BiologicalProcessNetworks',
version='0.1.0',
author='Christopher D. Lasher',
author_email='chris.lasher@gmail.com',
install_requires=[
'bitarray',
'ConflictsOptionParser',
'ConvUtils',
'fisher',
'networkx>=1.0',
'numpy',
'scipy'
],
packages=['bpn', 'bpn.mcmc', 'bpn.tests'],
scripts=scripts,
url='http://pypi.python.org/pypi/BiologicalProcessNetworks',
license='MIT License',
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Topic :: Scientific/Engineering :: Bio-Informatics'
],
description=("Determine connections between biological processes."),
long_description=open('README.rst').read(),
)
|
#!/usr/bin/env python
# -*- coding: UTF-8 -*-
from setuptools import setup
import os.path
scripts = ['bpln', 'cbpn', 'mcmcbpn', 'sabpn']
scripts = [os.path.sep.join(('scripts', script)) for script in scripts]
setup(
name='BiologicalProcessNetworks',
version='1.0a1',
author='Christopher D. Lasher',
author_email='chris.lasher@gmail.com',
install_requires=[
'bitarray',
'ConflictsOptionParser',
'ConvUtils',
'fisher',
'networkx>=1.0',
'numpy',
'scipy'
],
packages=['bpn', 'bpn.mcmc', 'bpn.tests'],
scripts=scripts,
url='http://pypi.python.org/pypi/BiologicalProcessNetworks',
license='MIT License',
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Topic :: Scientific/Engineering :: Bio-Informatics'
],
description=("Identify significant connections between "
"biological processes using gene interaction networks."),
long_description=open('README.rst').read(),
)
|
Make this the alpha release of 1.0.
|
Make this the alpha release of 1.0.
|
Python
|
mit
|
gotgenes/BiologicalProcessNetworks
|
#!/usr/bin/env python
# -*- coding: UTF-8 -*-
from setuptools import setup
import os.path
scripts = ['bpln', 'cbpn', 'mcmcbpn', 'sabpn']
scripts = [os.path.sep.join(('scripts', script)) for script in scripts]
setup(
name='BiologicalProcessNetworks',
version='0.1.0',
author='Christopher D. Lasher',
author_email='chris.lasher@gmail.com',
install_requires=[
'bitarray',
'ConflictsOptionParser',
'ConvUtils',
'fisher',
'networkx>=1.0',
'numpy',
'scipy'
],
packages=['bpn', 'bpn.mcmc', 'bpn.tests'],
scripts=scripts,
url='http://pypi.python.org/pypi/BiologicalProcessNetworks',
license='MIT License',
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Topic :: Scientific/Engineering :: Bio-Informatics'
],
description=("Determine connections between biological processes."),
long_description=open('README.rst').read(),
)
Make this the alpha release of 1.0.
|
#!/usr/bin/env python
# -*- coding: UTF-8 -*-
from setuptools import setup
import os.path
scripts = ['bpln', 'cbpn', 'mcmcbpn', 'sabpn']
scripts = [os.path.sep.join(('scripts', script)) for script in scripts]
setup(
name='BiologicalProcessNetworks',
version='1.0a1',
author='Christopher D. Lasher',
author_email='chris.lasher@gmail.com',
install_requires=[
'bitarray',
'ConflictsOptionParser',
'ConvUtils',
'fisher',
'networkx>=1.0',
'numpy',
'scipy'
],
packages=['bpn', 'bpn.mcmc', 'bpn.tests'],
scripts=scripts,
url='http://pypi.python.org/pypi/BiologicalProcessNetworks',
license='MIT License',
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Topic :: Scientific/Engineering :: Bio-Informatics'
],
description=("Identify significant connections between "
"biological processes using gene interaction networks."),
long_description=open('README.rst').read(),
)
|
<commit_before>#!/usr/bin/env python
# -*- coding: UTF-8 -*-
from setuptools import setup
import os.path
scripts = ['bpln', 'cbpn', 'mcmcbpn', 'sabpn']
scripts = [os.path.sep.join(('scripts', script)) for script in scripts]
setup(
name='BiologicalProcessNetworks',
version='0.1.0',
author='Christopher D. Lasher',
author_email='chris.lasher@gmail.com',
install_requires=[
'bitarray',
'ConflictsOptionParser',
'ConvUtils',
'fisher',
'networkx>=1.0',
'numpy',
'scipy'
],
packages=['bpn', 'bpn.mcmc', 'bpn.tests'],
scripts=scripts,
url='http://pypi.python.org/pypi/BiologicalProcessNetworks',
license='MIT License',
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Topic :: Scientific/Engineering :: Bio-Informatics'
],
description=("Determine connections between biological processes."),
long_description=open('README.rst').read(),
)
<commit_msg>Make this the alpha release of 1.0.<commit_after>
|
#!/usr/bin/env python
# -*- coding: UTF-8 -*-
from setuptools import setup
import os.path
scripts = ['bpln', 'cbpn', 'mcmcbpn', 'sabpn']
scripts = [os.path.sep.join(('scripts', script)) for script in scripts]
setup(
name='BiologicalProcessNetworks',
version='1.0a1',
author='Christopher D. Lasher',
author_email='chris.lasher@gmail.com',
install_requires=[
'bitarray',
'ConflictsOptionParser',
'ConvUtils',
'fisher',
'networkx>=1.0',
'numpy',
'scipy'
],
packages=['bpn', 'bpn.mcmc', 'bpn.tests'],
scripts=scripts,
url='http://pypi.python.org/pypi/BiologicalProcessNetworks',
license='MIT License',
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Topic :: Scientific/Engineering :: Bio-Informatics'
],
description=("Identify significant connections between "
"biological processes using gene interaction networks."),
long_description=open('README.rst').read(),
)
|
#!/usr/bin/env python
# -*- coding: UTF-8 -*-
from setuptools import setup
import os.path
scripts = ['bpln', 'cbpn', 'mcmcbpn', 'sabpn']
scripts = [os.path.sep.join(('scripts', script)) for script in scripts]
setup(
name='BiologicalProcessNetworks',
version='0.1.0',
author='Christopher D. Lasher',
author_email='chris.lasher@gmail.com',
install_requires=[
'bitarray',
'ConflictsOptionParser',
'ConvUtils',
'fisher',
'networkx>=1.0',
'numpy',
'scipy'
],
packages=['bpn', 'bpn.mcmc', 'bpn.tests'],
scripts=scripts,
url='http://pypi.python.org/pypi/BiologicalProcessNetworks',
license='MIT License',
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Topic :: Scientific/Engineering :: Bio-Informatics'
],
description=("Determine connections between biological processes."),
long_description=open('README.rst').read(),
)
Make this the alpha release of 1.0.#!/usr/bin/env python
# -*- coding: UTF-8 -*-
from setuptools import setup
import os.path
scripts = ['bpln', 'cbpn', 'mcmcbpn', 'sabpn']
scripts = [os.path.sep.join(('scripts', script)) for script in scripts]
setup(
name='BiologicalProcessNetworks',
version='1.0a1',
author='Christopher D. Lasher',
author_email='chris.lasher@gmail.com',
install_requires=[
'bitarray',
'ConflictsOptionParser',
'ConvUtils',
'fisher',
'networkx>=1.0',
'numpy',
'scipy'
],
packages=['bpn', 'bpn.mcmc', 'bpn.tests'],
scripts=scripts,
url='http://pypi.python.org/pypi/BiologicalProcessNetworks',
license='MIT License',
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Topic :: Scientific/Engineering :: Bio-Informatics'
],
description=("Identify significant connections between "
"biological processes using gene interaction networks."),
long_description=open('README.rst').read(),
)
|
<commit_before>#!/usr/bin/env python
# -*- coding: UTF-8 -*-
from setuptools import setup
import os.path
scripts = ['bpln', 'cbpn', 'mcmcbpn', 'sabpn']
scripts = [os.path.sep.join(('scripts', script)) for script in scripts]
setup(
name='BiologicalProcessNetworks',
version='0.1.0',
author='Christopher D. Lasher',
author_email='chris.lasher@gmail.com',
install_requires=[
'bitarray',
'ConflictsOptionParser',
'ConvUtils',
'fisher',
'networkx>=1.0',
'numpy',
'scipy'
],
packages=['bpn', 'bpn.mcmc', 'bpn.tests'],
scripts=scripts,
url='http://pypi.python.org/pypi/BiologicalProcessNetworks',
license='MIT License',
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Topic :: Scientific/Engineering :: Bio-Informatics'
],
description=("Determine connections between biological processes."),
long_description=open('README.rst').read(),
)
<commit_msg>Make this the alpha release of 1.0.<commit_after>#!/usr/bin/env python
# -*- coding: UTF-8 -*-
from setuptools import setup
import os.path
scripts = ['bpln', 'cbpn', 'mcmcbpn', 'sabpn']
scripts = [os.path.sep.join(('scripts', script)) for script in scripts]
setup(
name='BiologicalProcessNetworks',
version='1.0a1',
author='Christopher D. Lasher',
author_email='chris.lasher@gmail.com',
install_requires=[
'bitarray',
'ConflictsOptionParser',
'ConvUtils',
'fisher',
'networkx>=1.0',
'numpy',
'scipy'
],
packages=['bpn', 'bpn.mcmc', 'bpn.tests'],
scripts=scripts,
url='http://pypi.python.org/pypi/BiologicalProcessNetworks',
license='MIT License',
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Topic :: Scientific/Engineering :: Bio-Informatics'
],
description=("Identify significant connections between "
"biological processes using gene interaction networks."),
long_description=open('README.rst').read(),
)
|
6dcfacb5c76305bb227674eac6d926e54a26f45c
|
utils.py
|
utils.py
|
import platform
RUNNING_IN_HELL = platform.system() == 'Windows'
RUNNING_IN_STEVE_JOBS = platform.system() == 'Darwin'
RUNNING_IN_GANOO_LOONIX = platform.system() == 'Linux'
|
import platform
import io
from PyQt5.QtCore import *
from PyQt5.QtWidgets import *
from PyQt5.QtGui import *
RUNNING_IN_HELL = platform.system() == 'Windows'
RUNNING_IN_STEVE_JOBS = platform.system() == 'Darwin'
RUNNING_IN_GANOO_LOONIX = platform.system() == 'Linux'
def Pixmap2StringIO(pixmap):
byteArray = QByteArray()
buffer = QBuffer(byteArray)
buffer.open(QIODevice.WriteOnly)
pixmap.save(buffer, "PNG")
strio = io.StringIO(byteArray)
strio.seek(0)
return strio
|
Write Pixmap to a StringIO for uploading.
|
Write Pixmap to a StringIO for uploading.
|
Python
|
mit
|
miniCruzer/postit-desktop
|
import platform
RUNNING_IN_HELL = platform.system() == 'Windows'
RUNNING_IN_STEVE_JOBS = platform.system() == 'Darwin'
RUNNING_IN_GANOO_LOONIX = platform.system() == 'Linux'Write Pixmap to a StringIO for uploading.
|
import platform
import io
from PyQt5.QtCore import *
from PyQt5.QtWidgets import *
from PyQt5.QtGui import *
RUNNING_IN_HELL = platform.system() == 'Windows'
RUNNING_IN_STEVE_JOBS = platform.system() == 'Darwin'
RUNNING_IN_GANOO_LOONIX = platform.system() == 'Linux'
def Pixmap2StringIO(pixmap):
byteArray = QByteArray()
buffer = QBuffer(byteArray)
buffer.open(QIODevice.WriteOnly)
pixmap.save(buffer, "PNG")
strio = io.StringIO(byteArray)
strio.seek(0)
return strio
|
<commit_before>import platform
RUNNING_IN_HELL = platform.system() == 'Windows'
RUNNING_IN_STEVE_JOBS = platform.system() == 'Darwin'
RUNNING_IN_GANOO_LOONIX = platform.system() == 'Linux'<commit_msg>Write Pixmap to a StringIO for uploading.<commit_after>
|
import platform
import io
from PyQt5.QtCore import *
from PyQt5.QtWidgets import *
from PyQt5.QtGui import *
RUNNING_IN_HELL = platform.system() == 'Windows'
RUNNING_IN_STEVE_JOBS = platform.system() == 'Darwin'
RUNNING_IN_GANOO_LOONIX = platform.system() == 'Linux'
def Pixmap2StringIO(pixmap):
byteArray = QByteArray()
buffer = QBuffer(byteArray)
buffer.open(QIODevice.WriteOnly)
pixmap.save(buffer, "PNG")
strio = io.StringIO(byteArray)
strio.seek(0)
return strio
|
import platform
RUNNING_IN_HELL = platform.system() == 'Windows'
RUNNING_IN_STEVE_JOBS = platform.system() == 'Darwin'
RUNNING_IN_GANOO_LOONIX = platform.system() == 'Linux'Write Pixmap to a StringIO for uploading.import platform
import io
from PyQt5.QtCore import *
from PyQt5.QtWidgets import *
from PyQt5.QtGui import *
RUNNING_IN_HELL = platform.system() == 'Windows'
RUNNING_IN_STEVE_JOBS = platform.system() == 'Darwin'
RUNNING_IN_GANOO_LOONIX = platform.system() == 'Linux'
def Pixmap2StringIO(pixmap):
byteArray = QByteArray()
buffer = QBuffer(byteArray)
buffer.open(QIODevice.WriteOnly)
pixmap.save(buffer, "PNG")
strio = io.StringIO(byteArray)
strio.seek(0)
return strio
|
<commit_before>import platform
RUNNING_IN_HELL = platform.system() == 'Windows'
RUNNING_IN_STEVE_JOBS = platform.system() == 'Darwin'
RUNNING_IN_GANOO_LOONIX = platform.system() == 'Linux'<commit_msg>Write Pixmap to a StringIO for uploading.<commit_after>import platform
import io
from PyQt5.QtCore import *
from PyQt5.QtWidgets import *
from PyQt5.QtGui import *
RUNNING_IN_HELL = platform.system() == 'Windows'
RUNNING_IN_STEVE_JOBS = platform.system() == 'Darwin'
RUNNING_IN_GANOO_LOONIX = platform.system() == 'Linux'
def Pixmap2StringIO(pixmap):
byteArray = QByteArray()
buffer = QBuffer(byteArray)
buffer.open(QIODevice.WriteOnly)
pixmap.save(buffer, "PNG")
strio = io.StringIO(byteArray)
strio.seek(0)
return strio
|
78d57a82383747b657522c35449d249cb17cd610
|
views.py
|
views.py
|
from django.conf import settings
from django.http import HttpResponse
from django.utils.importlib import import_module
def warmup(request):
"""
Provides default procedure for handling warmup requests on App Engine.
Just add this view to your main urls.py.
"""
for app in settings.INSTALLED_APPS:
for name in ('urls', 'views'):
try:
import_module('%s.%s' % (app, name))
except ImportError:
pass
content_type = 'text/plain; charset=%s' % settings.DEFAULT_CHARSET
return HttpResponse('Warmup done', content_type=content_type)
|
from django.conf import settings
from django.http import HttpResponse
from django.utils.importlib import import_module
def warmup(request):
"""
Provides default procedure for handling warmup requests on App Engine.
Just add this view to your main urls.py.
"""
for app in settings.INSTALLED_APPS:
for name in ('urls', 'views', 'models'):
try:
import_module('%s.%s' % (app, name))
except ImportError:
pass
content_type = 'text/plain; charset=%s' % settings.DEFAULT_CHARSET
return HttpResponse('Warmup done', content_type=content_type)
|
Expand pre loading on warmup
|
Expand pre loading on warmup
|
Python
|
bsd-3-clause
|
brstrat/djangoappengine,Implisit/djangoappengine,potatolondon/djangoappengine-1-4,dwdraju/djangoappengine,potatolondon/djangoappengine-1-4,django-nonrel/djangoappengine
|
from django.conf import settings
from django.http import HttpResponse
from django.utils.importlib import import_module
def warmup(request):
"""
Provides default procedure for handling warmup requests on App Engine.
Just add this view to your main urls.py.
"""
for app in settings.INSTALLED_APPS:
for name in ('urls', 'views'):
try:
import_module('%s.%s' % (app, name))
except ImportError:
pass
content_type = 'text/plain; charset=%s' % settings.DEFAULT_CHARSET
return HttpResponse('Warmup done', content_type=content_type)
Expand pre loading on warmup
|
from django.conf import settings
from django.http import HttpResponse
from django.utils.importlib import import_module
def warmup(request):
"""
Provides default procedure for handling warmup requests on App Engine.
Just add this view to your main urls.py.
"""
for app in settings.INSTALLED_APPS:
for name in ('urls', 'views', 'models'):
try:
import_module('%s.%s' % (app, name))
except ImportError:
pass
content_type = 'text/plain; charset=%s' % settings.DEFAULT_CHARSET
return HttpResponse('Warmup done', content_type=content_type)
|
<commit_before>from django.conf import settings
from django.http import HttpResponse
from django.utils.importlib import import_module
def warmup(request):
"""
Provides default procedure for handling warmup requests on App Engine.
Just add this view to your main urls.py.
"""
for app in settings.INSTALLED_APPS:
for name in ('urls', 'views'):
try:
import_module('%s.%s' % (app, name))
except ImportError:
pass
content_type = 'text/plain; charset=%s' % settings.DEFAULT_CHARSET
return HttpResponse('Warmup done', content_type=content_type)
<commit_msg>Expand pre loading on warmup<commit_after>
|
from django.conf import settings
from django.http import HttpResponse
from django.utils.importlib import import_module
def warmup(request):
"""
Provides default procedure for handling warmup requests on App Engine.
Just add this view to your main urls.py.
"""
for app in settings.INSTALLED_APPS:
for name in ('urls', 'views', 'models'):
try:
import_module('%s.%s' % (app, name))
except ImportError:
pass
content_type = 'text/plain; charset=%s' % settings.DEFAULT_CHARSET
return HttpResponse('Warmup done', content_type=content_type)
|
from django.conf import settings
from django.http import HttpResponse
from django.utils.importlib import import_module
def warmup(request):
"""
Provides default procedure for handling warmup requests on App Engine.
Just add this view to your main urls.py.
"""
for app in settings.INSTALLED_APPS:
for name in ('urls', 'views'):
try:
import_module('%s.%s' % (app, name))
except ImportError:
pass
content_type = 'text/plain; charset=%s' % settings.DEFAULT_CHARSET
return HttpResponse('Warmup done', content_type=content_type)
Expand pre loading on warmupfrom django.conf import settings
from django.http import HttpResponse
from django.utils.importlib import import_module
def warmup(request):
"""
Provides default procedure for handling warmup requests on App Engine.
Just add this view to your main urls.py.
"""
for app in settings.INSTALLED_APPS:
for name in ('urls', 'views', 'models'):
try:
import_module('%s.%s' % (app, name))
except ImportError:
pass
content_type = 'text/plain; charset=%s' % settings.DEFAULT_CHARSET
return HttpResponse('Warmup done', content_type=content_type)
|
<commit_before>from django.conf import settings
from django.http import HttpResponse
from django.utils.importlib import import_module
def warmup(request):
"""
Provides default procedure for handling warmup requests on App Engine.
Just add this view to your main urls.py.
"""
for app in settings.INSTALLED_APPS:
for name in ('urls', 'views'):
try:
import_module('%s.%s' % (app, name))
except ImportError:
pass
content_type = 'text/plain; charset=%s' % settings.DEFAULT_CHARSET
return HttpResponse('Warmup done', content_type=content_type)
<commit_msg>Expand pre loading on warmup<commit_after>from django.conf import settings
from django.http import HttpResponse
from django.utils.importlib import import_module
def warmup(request):
"""
Provides default procedure for handling warmup requests on App Engine.
Just add this view to your main urls.py.
"""
for app in settings.INSTALLED_APPS:
for name in ('urls', 'views', 'models'):
try:
import_module('%s.%s' % (app, name))
except ImportError:
pass
content_type = 'text/plain; charset=%s' % settings.DEFAULT_CHARSET
return HttpResponse('Warmup done', content_type=content_type)
|
6fabe58bda70c9f6f05a226585259d44b178d6de
|
tests/from_json_test.py
|
tests/from_json_test.py
|
from common import *
import tempfile
def test_from_json(ds_local):
df = ds_local
# Create temporary json files
pandas_df = df.to_pandas_df(df)
tmp = tempfile.mktemp('.json')
with open(tmp, 'w') as f:
f.write(pandas_df.to_json())
tmp_df = vaex.from_json(tmp, copy_index=False)
assert tmp_df.column_names == df.column_names
assert len(tmp_df) == len(df)
assert tmp_df.x.tolist() == df.x.tolist()
assert tmp_df.bool.tolist() == df.bool.tolist()
|
from common import *
import tempfile
def test_from_json(ds_local):
df = ds_local
# Create temporary json files
pandas_df = df.to_pandas_df(df)
tmp = tempfile.mktemp('.json')
with open(tmp, 'w') as f:
f.write(pandas_df.to_json())
tmp_df = vaex.from_json(tmp, copy_index=False)
assert set(tmp_df.column_names) == set(df.column_names)
assert len(tmp_df) == len(df)
assert tmp_df.x.tolist() == df.x.tolist()
assert tmp_df.bool.tolist() == df.bool.tolist()
|
Use set to compare elements between two lists (making them immutable)
|
Use set to compare elements between two lists (making them immutable)
|
Python
|
mit
|
maartenbreddels/vaex,maartenbreddels/vaex,maartenbreddels/vaex,maartenbreddels/vaex,maartenbreddels/vaex,maartenbreddels/vaex
|
from common import *
import tempfile
def test_from_json(ds_local):
df = ds_local
# Create temporary json files
pandas_df = df.to_pandas_df(df)
tmp = tempfile.mktemp('.json')
with open(tmp, 'w') as f:
f.write(pandas_df.to_json())
tmp_df = vaex.from_json(tmp, copy_index=False)
assert tmp_df.column_names == df.column_names
assert len(tmp_df) == len(df)
assert tmp_df.x.tolist() == df.x.tolist()
assert tmp_df.bool.tolist() == df.bool.tolist()
Use set to compare elements between two lists (making them immutable)
|
from common import *
import tempfile
def test_from_json(ds_local):
df = ds_local
# Create temporary json files
pandas_df = df.to_pandas_df(df)
tmp = tempfile.mktemp('.json')
with open(tmp, 'w') as f:
f.write(pandas_df.to_json())
tmp_df = vaex.from_json(tmp, copy_index=False)
assert set(tmp_df.column_names) == set(df.column_names)
assert len(tmp_df) == len(df)
assert tmp_df.x.tolist() == df.x.tolist()
assert tmp_df.bool.tolist() == df.bool.tolist()
|
<commit_before>from common import *
import tempfile
def test_from_json(ds_local):
df = ds_local
# Create temporary json files
pandas_df = df.to_pandas_df(df)
tmp = tempfile.mktemp('.json')
with open(tmp, 'w') as f:
f.write(pandas_df.to_json())
tmp_df = vaex.from_json(tmp, copy_index=False)
assert tmp_df.column_names == df.column_names
assert len(tmp_df) == len(df)
assert tmp_df.x.tolist() == df.x.tolist()
assert tmp_df.bool.tolist() == df.bool.tolist()
<commit_msg>Use set to compare elements between two lists (making them immutable)<commit_after>
|
from common import *
import tempfile
def test_from_json(ds_local):
df = ds_local
# Create temporary json files
pandas_df = df.to_pandas_df(df)
tmp = tempfile.mktemp('.json')
with open(tmp, 'w') as f:
f.write(pandas_df.to_json())
tmp_df = vaex.from_json(tmp, copy_index=False)
assert set(tmp_df.column_names) == set(df.column_names)
assert len(tmp_df) == len(df)
assert tmp_df.x.tolist() == df.x.tolist()
assert tmp_df.bool.tolist() == df.bool.tolist()
|
from common import *
import tempfile
def test_from_json(ds_local):
df = ds_local
# Create temporary json files
pandas_df = df.to_pandas_df(df)
tmp = tempfile.mktemp('.json')
with open(tmp, 'w') as f:
f.write(pandas_df.to_json())
tmp_df = vaex.from_json(tmp, copy_index=False)
assert tmp_df.column_names == df.column_names
assert len(tmp_df) == len(df)
assert tmp_df.x.tolist() == df.x.tolist()
assert tmp_df.bool.tolist() == df.bool.tolist()
Use set to compare elements between two lists (making them immutable)from common import *
import tempfile
def test_from_json(ds_local):
df = ds_local
# Create temporary json files
pandas_df = df.to_pandas_df(df)
tmp = tempfile.mktemp('.json')
with open(tmp, 'w') as f:
f.write(pandas_df.to_json())
tmp_df = vaex.from_json(tmp, copy_index=False)
assert set(tmp_df.column_names) == set(df.column_names)
assert len(tmp_df) == len(df)
assert tmp_df.x.tolist() == df.x.tolist()
assert tmp_df.bool.tolist() == df.bool.tolist()
|
<commit_before>from common import *
import tempfile
def test_from_json(ds_local):
df = ds_local
# Create temporary json files
pandas_df = df.to_pandas_df(df)
tmp = tempfile.mktemp('.json')
with open(tmp, 'w') as f:
f.write(pandas_df.to_json())
tmp_df = vaex.from_json(tmp, copy_index=False)
assert tmp_df.column_names == df.column_names
assert len(tmp_df) == len(df)
assert tmp_df.x.tolist() == df.x.tolist()
assert tmp_df.bool.tolist() == df.bool.tolist()
<commit_msg>Use set to compare elements between two lists (making them immutable)<commit_after>from common import *
import tempfile
def test_from_json(ds_local):
df = ds_local
# Create temporary json files
pandas_df = df.to_pandas_df(df)
tmp = tempfile.mktemp('.json')
with open(tmp, 'w') as f:
f.write(pandas_df.to_json())
tmp_df = vaex.from_json(tmp, copy_index=False)
assert set(tmp_df.column_names) == set(df.column_names)
assert len(tmp_df) == len(df)
assert tmp_df.x.tolist() == df.x.tolist()
assert tmp_df.bool.tolist() == df.bool.tolist()
|
84ebba9c7d9290a7c1b72e82c7ba99a1f3b0db9f
|
eli5/explain.py
|
eli5/explain.py
|
# -*- coding: utf-8 -*-
"""
Dispatch module. Explanation functions for conctere estimator classes
are defined in submodules.
"""
from singledispatch import singledispatch
@singledispatch
def explain_prediction(estimator, doc, **kwargs):
""" Return an explanation of an estimator """
return {
"estimator": repr(estimator),
"description": "Error: estimator %r is not supported" % estimator,
}
@singledispatch
def explain_weights(estimator, **kwargs):
""" Return an explanation of an estimator """
return {
"estimator": repr(estimator),
"description": "Error: estimator %r is not supported" % estimator,
}
|
# -*- coding: utf-8 -*-
"""
Dispatch module. Explanation functions for conctere estimator classes
are defined in submodules.
"""
from singledispatch import singledispatch
from eli5.base import Explanation
@singledispatch
def explain_prediction(estimator, doc, **kwargs):
""" Return an explanation of an estimator prediction """
return Explanation(
estimator=repr(estimator),
error="estimator %r is not supported" % estimator,
)
@singledispatch
def explain_weights(estimator, **kwargs):
""" Return an explanation of an estimator weights """
return Explanation(
estimator=repr(estimator),
error="estimator %r is not supported" % estimator,
)
|
Fix return values of placeholder functions
|
Fix return values of placeholder functions
This allows to get nicer type completions in the IDE.
|
Python
|
mit
|
TeamHG-Memex/eli5,TeamHG-Memex/eli5,TeamHG-Memex/eli5
|
# -*- coding: utf-8 -*-
"""
Dispatch module. Explanation functions for conctere estimator classes
are defined in submodules.
"""
from singledispatch import singledispatch
@singledispatch
def explain_prediction(estimator, doc, **kwargs):
""" Return an explanation of an estimator """
return {
"estimator": repr(estimator),
"description": "Error: estimator %r is not supported" % estimator,
}
@singledispatch
def explain_weights(estimator, **kwargs):
""" Return an explanation of an estimator """
return {
"estimator": repr(estimator),
"description": "Error: estimator %r is not supported" % estimator,
}
Fix return values of placeholder functions
This allows to get nicer type completions in the IDE.
|
# -*- coding: utf-8 -*-
"""
Dispatch module. Explanation functions for conctere estimator classes
are defined in submodules.
"""
from singledispatch import singledispatch
from eli5.base import Explanation
@singledispatch
def explain_prediction(estimator, doc, **kwargs):
""" Return an explanation of an estimator prediction """
return Explanation(
estimator=repr(estimator),
error="estimator %r is not supported" % estimator,
)
@singledispatch
def explain_weights(estimator, **kwargs):
""" Return an explanation of an estimator weights """
return Explanation(
estimator=repr(estimator),
error="estimator %r is not supported" % estimator,
)
|
<commit_before># -*- coding: utf-8 -*-
"""
Dispatch module. Explanation functions for conctere estimator classes
are defined in submodules.
"""
from singledispatch import singledispatch
@singledispatch
def explain_prediction(estimator, doc, **kwargs):
""" Return an explanation of an estimator """
return {
"estimator": repr(estimator),
"description": "Error: estimator %r is not supported" % estimator,
}
@singledispatch
def explain_weights(estimator, **kwargs):
""" Return an explanation of an estimator """
return {
"estimator": repr(estimator),
"description": "Error: estimator %r is not supported" % estimator,
}
<commit_msg>Fix return values of placeholder functions
This allows to get nicer type completions in the IDE.<commit_after>
|
# -*- coding: utf-8 -*-
"""
Dispatch module. Explanation functions for conctere estimator classes
are defined in submodules.
"""
from singledispatch import singledispatch
from eli5.base import Explanation
@singledispatch
def explain_prediction(estimator, doc, **kwargs):
""" Return an explanation of an estimator prediction """
return Explanation(
estimator=repr(estimator),
error="estimator %r is not supported" % estimator,
)
@singledispatch
def explain_weights(estimator, **kwargs):
""" Return an explanation of an estimator weights """
return Explanation(
estimator=repr(estimator),
error="estimator %r is not supported" % estimator,
)
|
# -*- coding: utf-8 -*-
"""
Dispatch module. Explanation functions for conctere estimator classes
are defined in submodules.
"""
from singledispatch import singledispatch
@singledispatch
def explain_prediction(estimator, doc, **kwargs):
""" Return an explanation of an estimator """
return {
"estimator": repr(estimator),
"description": "Error: estimator %r is not supported" % estimator,
}
@singledispatch
def explain_weights(estimator, **kwargs):
""" Return an explanation of an estimator """
return {
"estimator": repr(estimator),
"description": "Error: estimator %r is not supported" % estimator,
}
Fix return values of placeholder functions
This allows to get nicer type completions in the IDE.# -*- coding: utf-8 -*-
"""
Dispatch module. Explanation functions for conctere estimator classes
are defined in submodules.
"""
from singledispatch import singledispatch
from eli5.base import Explanation
@singledispatch
def explain_prediction(estimator, doc, **kwargs):
""" Return an explanation of an estimator prediction """
return Explanation(
estimator=repr(estimator),
error="estimator %r is not supported" % estimator,
)
@singledispatch
def explain_weights(estimator, **kwargs):
""" Return an explanation of an estimator weights """
return Explanation(
estimator=repr(estimator),
error="estimator %r is not supported" % estimator,
)
|
<commit_before># -*- coding: utf-8 -*-
"""
Dispatch module. Explanation functions for conctere estimator classes
are defined in submodules.
"""
from singledispatch import singledispatch
@singledispatch
def explain_prediction(estimator, doc, **kwargs):
""" Return an explanation of an estimator """
return {
"estimator": repr(estimator),
"description": "Error: estimator %r is not supported" % estimator,
}
@singledispatch
def explain_weights(estimator, **kwargs):
""" Return an explanation of an estimator """
return {
"estimator": repr(estimator),
"description": "Error: estimator %r is not supported" % estimator,
}
<commit_msg>Fix return values of placeholder functions
This allows to get nicer type completions in the IDE.<commit_after># -*- coding: utf-8 -*-
"""
Dispatch module. Explanation functions for conctere estimator classes
are defined in submodules.
"""
from singledispatch import singledispatch
from eli5.base import Explanation
@singledispatch
def explain_prediction(estimator, doc, **kwargs):
""" Return an explanation of an estimator prediction """
return Explanation(
estimator=repr(estimator),
error="estimator %r is not supported" % estimator,
)
@singledispatch
def explain_weights(estimator, **kwargs):
""" Return an explanation of an estimator weights """
return Explanation(
estimator=repr(estimator),
error="estimator %r is not supported" % estimator,
)
|
28c326f61848e50bdc6a5e86fc790f8114bd0468
|
rencon.py
|
rencon.py
|
#!/usr/bin/env python
import os
import sys
from string import Template
import argparse
import hashlib
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='Rename files based on content.')
parser.add_argument('files', metavar='FILE', type=str, nargs='+',
help='Files to rename')
parser.add_argument('-m', '--mask', nargs='?',
help='File destination mask', default='${hash}.${ext}')
parser.add_argument('-p', '--pretend', action='store_true',
help='Do not rename, just print')
args = parser.parse_args()
print 'Renaming with mask: %s' % args.mask
mask = Template(args.mask)
for f in args.files:
if not os.path.exists(f):
print >>sys.stderr, 'File %s does not exists.' % f
else:
with open(f) as fp:
h = hashlib.sha1(fp.read()).hexdigest()
ext = os.path.splitext(f)[1][1:]
name = mask.substitute(hash=h, ext=ext)
dest = os.path.join(os.path.dirname(f), name)
print "`%s' -> `%s'" % (f, dest)
if os.path.exists(dest):
print >>sys.stderr, 'Destination %s already exists.' % dest
elif not args.pretend:
os.rename(f, dest)
|
#!/usr/bin/env python
import os
import sys
from string import Template
import argparse
import hashlib
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='Rename files based on content.')
parser.add_argument('files', metavar='FILE', type=str, nargs='+',
help='Files to rename')
parser.add_argument('-m', '--mask', nargs='?',
help='File destination mask', default='${hash}.${ext}')
parser.add_argument('-p', '--pretend', action='store_true',
help='Do not rename, just print')
args = parser.parse_args()
print 'Renaming with mask: %s' % args.mask
mask = Template(args.mask)
for f in args.files:
if not os.path.exists(f):
print >>sys.stderr, 'File %s does not exists.' % f
else:
with open(f) as fp:
h = hashlib.sha1(fp.read()).hexdigest()
ext = os.path.splitext(f)[1][1:]
name = mask.substitute(hash=h, ext=ext)
dest = os.path.join(os.path.dirname(f), name)
if os.path.basename(f) == name:
print 'OK %s' % name
else:
print "`%s' -> `%s'" % (f, dest)
if os.path.exists(dest):
print >>sys.stderr, 'Destination %s already exists.' % dest
elif not args.pretend:
os.rename(f, dest)
|
Fix error when already renamed
|
Fix error when already renamed
|
Python
|
mit
|
laurentb/rencon
|
#!/usr/bin/env python
import os
import sys
from string import Template
import argparse
import hashlib
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='Rename files based on content.')
parser.add_argument('files', metavar='FILE', type=str, nargs='+',
help='Files to rename')
parser.add_argument('-m', '--mask', nargs='?',
help='File destination mask', default='${hash}.${ext}')
parser.add_argument('-p', '--pretend', action='store_true',
help='Do not rename, just print')
args = parser.parse_args()
print 'Renaming with mask: %s' % args.mask
mask = Template(args.mask)
for f in args.files:
if not os.path.exists(f):
print >>sys.stderr, 'File %s does not exists.' % f
else:
with open(f) as fp:
h = hashlib.sha1(fp.read()).hexdigest()
ext = os.path.splitext(f)[1][1:]
name = mask.substitute(hash=h, ext=ext)
dest = os.path.join(os.path.dirname(f), name)
print "`%s' -> `%s'" % (f, dest)
if os.path.exists(dest):
print >>sys.stderr, 'Destination %s already exists.' % dest
elif not args.pretend:
os.rename(f, dest)
Fix error when already renamed
|
#!/usr/bin/env python
import os
import sys
from string import Template
import argparse
import hashlib
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='Rename files based on content.')
parser.add_argument('files', metavar='FILE', type=str, nargs='+',
help='Files to rename')
parser.add_argument('-m', '--mask', nargs='?',
help='File destination mask', default='${hash}.${ext}')
parser.add_argument('-p', '--pretend', action='store_true',
help='Do not rename, just print')
args = parser.parse_args()
print 'Renaming with mask: %s' % args.mask
mask = Template(args.mask)
for f in args.files:
if not os.path.exists(f):
print >>sys.stderr, 'File %s does not exists.' % f
else:
with open(f) as fp:
h = hashlib.sha1(fp.read()).hexdigest()
ext = os.path.splitext(f)[1][1:]
name = mask.substitute(hash=h, ext=ext)
dest = os.path.join(os.path.dirname(f), name)
if os.path.basename(f) == name:
print 'OK %s' % name
else:
print "`%s' -> `%s'" % (f, dest)
if os.path.exists(dest):
print >>sys.stderr, 'Destination %s already exists.' % dest
elif not args.pretend:
os.rename(f, dest)
|
<commit_before>#!/usr/bin/env python
import os
import sys
from string import Template
import argparse
import hashlib
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='Rename files based on content.')
parser.add_argument('files', metavar='FILE', type=str, nargs='+',
help='Files to rename')
parser.add_argument('-m', '--mask', nargs='?',
help='File destination mask', default='${hash}.${ext}')
parser.add_argument('-p', '--pretend', action='store_true',
help='Do not rename, just print')
args = parser.parse_args()
print 'Renaming with mask: %s' % args.mask
mask = Template(args.mask)
for f in args.files:
if not os.path.exists(f):
print >>sys.stderr, 'File %s does not exists.' % f
else:
with open(f) as fp:
h = hashlib.sha1(fp.read()).hexdigest()
ext = os.path.splitext(f)[1][1:]
name = mask.substitute(hash=h, ext=ext)
dest = os.path.join(os.path.dirname(f), name)
print "`%s' -> `%s'" % (f, dest)
if os.path.exists(dest):
print >>sys.stderr, 'Destination %s already exists.' % dest
elif not args.pretend:
os.rename(f, dest)
<commit_msg>Fix error when already renamed<commit_after>
|
#!/usr/bin/env python
import os
import sys
from string import Template
import argparse
import hashlib
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='Rename files based on content.')
parser.add_argument('files', metavar='FILE', type=str, nargs='+',
help='Files to rename')
parser.add_argument('-m', '--mask', nargs='?',
help='File destination mask', default='${hash}.${ext}')
parser.add_argument('-p', '--pretend', action='store_true',
help='Do not rename, just print')
args = parser.parse_args()
print 'Renaming with mask: %s' % args.mask
mask = Template(args.mask)
for f in args.files:
if not os.path.exists(f):
print >>sys.stderr, 'File %s does not exists.' % f
else:
with open(f) as fp:
h = hashlib.sha1(fp.read()).hexdigest()
ext = os.path.splitext(f)[1][1:]
name = mask.substitute(hash=h, ext=ext)
dest = os.path.join(os.path.dirname(f), name)
if os.path.basename(f) == name:
print 'OK %s' % name
else:
print "`%s' -> `%s'" % (f, dest)
if os.path.exists(dest):
print >>sys.stderr, 'Destination %s already exists.' % dest
elif not args.pretend:
os.rename(f, dest)
|
#!/usr/bin/env python
import os
import sys
from string import Template
import argparse
import hashlib
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='Rename files based on content.')
parser.add_argument('files', metavar='FILE', type=str, nargs='+',
help='Files to rename')
parser.add_argument('-m', '--mask', nargs='?',
help='File destination mask', default='${hash}.${ext}')
parser.add_argument('-p', '--pretend', action='store_true',
help='Do not rename, just print')
args = parser.parse_args()
print 'Renaming with mask: %s' % args.mask
mask = Template(args.mask)
for f in args.files:
if not os.path.exists(f):
print >>sys.stderr, 'File %s does not exists.' % f
else:
with open(f) as fp:
h = hashlib.sha1(fp.read()).hexdigest()
ext = os.path.splitext(f)[1][1:]
name = mask.substitute(hash=h, ext=ext)
dest = os.path.join(os.path.dirname(f), name)
print "`%s' -> `%s'" % (f, dest)
if os.path.exists(dest):
print >>sys.stderr, 'Destination %s already exists.' % dest
elif not args.pretend:
os.rename(f, dest)
Fix error when already renamed#!/usr/bin/env python
import os
import sys
from string import Template
import argparse
import hashlib
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='Rename files based on content.')
parser.add_argument('files', metavar='FILE', type=str, nargs='+',
help='Files to rename')
parser.add_argument('-m', '--mask', nargs='?',
help='File destination mask', default='${hash}.${ext}')
parser.add_argument('-p', '--pretend', action='store_true',
help='Do not rename, just print')
args = parser.parse_args()
print 'Renaming with mask: %s' % args.mask
mask = Template(args.mask)
for f in args.files:
if not os.path.exists(f):
print >>sys.stderr, 'File %s does not exists.' % f
else:
with open(f) as fp:
h = hashlib.sha1(fp.read()).hexdigest()
ext = os.path.splitext(f)[1][1:]
name = mask.substitute(hash=h, ext=ext)
dest = os.path.join(os.path.dirname(f), name)
if os.path.basename(f) == name:
print 'OK %s' % name
else:
print "`%s' -> `%s'" % (f, dest)
if os.path.exists(dest):
print >>sys.stderr, 'Destination %s already exists.' % dest
elif not args.pretend:
os.rename(f, dest)
|
<commit_before>#!/usr/bin/env python
import os
import sys
from string import Template
import argparse
import hashlib
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='Rename files based on content.')
parser.add_argument('files', metavar='FILE', type=str, nargs='+',
help='Files to rename')
parser.add_argument('-m', '--mask', nargs='?',
help='File destination mask', default='${hash}.${ext}')
parser.add_argument('-p', '--pretend', action='store_true',
help='Do not rename, just print')
args = parser.parse_args()
print 'Renaming with mask: %s' % args.mask
mask = Template(args.mask)
for f in args.files:
if not os.path.exists(f):
print >>sys.stderr, 'File %s does not exists.' % f
else:
with open(f) as fp:
h = hashlib.sha1(fp.read()).hexdigest()
ext = os.path.splitext(f)[1][1:]
name = mask.substitute(hash=h, ext=ext)
dest = os.path.join(os.path.dirname(f), name)
print "`%s' -> `%s'" % (f, dest)
if os.path.exists(dest):
print >>sys.stderr, 'Destination %s already exists.' % dest
elif not args.pretend:
os.rename(f, dest)
<commit_msg>Fix error when already renamed<commit_after>#!/usr/bin/env python
import os
import sys
from string import Template
import argparse
import hashlib
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='Rename files based on content.')
parser.add_argument('files', metavar='FILE', type=str, nargs='+',
help='Files to rename')
parser.add_argument('-m', '--mask', nargs='?',
help='File destination mask', default='${hash}.${ext}')
parser.add_argument('-p', '--pretend', action='store_true',
help='Do not rename, just print')
args = parser.parse_args()
print 'Renaming with mask: %s' % args.mask
mask = Template(args.mask)
for f in args.files:
if not os.path.exists(f):
print >>sys.stderr, 'File %s does not exists.' % f
else:
with open(f) as fp:
h = hashlib.sha1(fp.read()).hexdigest()
ext = os.path.splitext(f)[1][1:]
name = mask.substitute(hash=h, ext=ext)
dest = os.path.join(os.path.dirname(f), name)
if os.path.basename(f) == name:
print 'OK %s' % name
else:
print "`%s' -> `%s'" % (f, dest)
if os.path.exists(dest):
print >>sys.stderr, 'Destination %s already exists.' % dest
elif not args.pretend:
os.rename(f, dest)
|
c6fa98931feaf9514b84ae979f32013ca345ef5f
|
saleor/product/views.py
|
saleor/product/views.py
|
from __future__ import unicode_literals
from django.http import HttpResponsePermanentRedirect
from django.contrib import messages
from django.shortcuts import get_object_or_404, redirect
from django.template.response import TemplateResponse
from django.utils.translation import ugettext as _
from .forms import get_form_class_for_product
from .models import Product, Category
def product_details(request, slug, product_id):
product = get_object_or_404(Product, id=product_id)
if product.get_slug() != slug:
return HttpResponsePermanentRedirect(product.get_absolute_url())
form_class = get_form_class_for_product(product)
form = form_class(cart=request.cart, product=product,
data=request.POST or None)
if form.is_valid():
if form.cleaned_data['quantity']:
msg = _('Added %(product)s to your cart.') % {
'product': product}
messages.success(request, msg)
form.save()
return redirect('product:details', slug=slug, product_id=product_id)
return TemplateResponse(request, 'product/details.html', {
'product': product, 'form': form})
def category_index(request, slug):
category = get_object_or_404(Category, slug=slug)
products = category.products.all()
return TemplateResponse(request, 'category/index.html', {
'products': products, 'category': category})
|
from __future__ import unicode_literals
from django.http import HttpResponsePermanentRedirect
from django.contrib import messages
from django.shortcuts import get_object_or_404, redirect
from django.template.response import TemplateResponse
from django.utils.translation import ugettext as _
from .forms import get_form_class_for_product
from .models import Product, Category
def product_details(request, slug, product_id):
product = get_object_or_404(Product, id=product_id)
if product.get_slug() != slug:
return HttpResponsePermanentRedirect(product.get_absolute_url())
form_class = get_form_class_for_product(product)
form = form_class(cart=request.cart, product=product,
data=request.POST or None)
if form.is_valid():
if form.cleaned_data['quantity']:
msg = _('Added %(product)s to your cart.') % {
'product': product}
messages.success(request, msg)
form.save()
return redirect('product:details', slug=slug, product_id=product_id)
template_name = 'product/details_%s' % (product.__class__.__name__.lower())
return TemplateResponse(request, [template_name, 'product/details.html'], {
'product': product, 'form': form})
def category_index(request, slug):
category = get_object_or_404(Category, slug=slug)
products = category.products.all()
return TemplateResponse(request, 'category/index.html', {
'products': products, 'category': category})
|
Allow to use custom templates for products details pages
|
Allow to use custom templates for products details pages
|
Python
|
bsd-3-clause
|
josesanch/saleor,car3oon/saleor,UITools/saleor,maferelo/saleor,avorio/saleor,tfroehlich82/saleor,avorio/saleor,laosunhust/saleor,KenMutemi/saleor,paweltin/saleor,mociepka/saleor,dashmug/saleor,HyperManTT/ECommerceSaleor,paweltin/saleor,HyperManTT/ECommerceSaleor,Drekscott/Motlaesaleor,taedori81/saleor,UITools/saleor,josesanch/saleor,itbabu/saleor,UITools/saleor,KenMutemi/saleor,rchav/vinerack,spartonia/saleor,rchav/vinerack,paweltin/saleor,spartonia/saleor,HyperManTT/ECommerceSaleor,car3oon/saleor,paweltin/saleor,laosunhust/saleor,hongquan/saleor,rodrigozn/CW-Shop,maferelo/saleor,UITools/saleor,dashmug/saleor,spartonia/saleor,taedori81/saleor,taedori81/saleor,mociepka/saleor,laosunhust/saleor,hongquan/saleor,rchav/vinerack,taedori81/saleor,jreigel/saleor,Drekscott/Motlaesaleor,jreigel/saleor,arth-co/saleor,dashmug/saleor,arth-co/saleor,maferelo/saleor,mociepka/saleor,avorio/saleor,car3oon/saleor,UITools/saleor,rodrigozn/CW-Shop,itbabu/saleor,spartonia/saleor,Drekscott/Motlaesaleor,Drekscott/Motlaesaleor,KenMutemi/saleor,hongquan/saleor,laosunhust/saleor,arth-co/saleor,josesanch/saleor,itbabu/saleor,jreigel/saleor,tfroehlich82/saleor,rodrigozn/CW-Shop,tfroehlich82/saleor,arth-co/saleor,avorio/saleor
|
from __future__ import unicode_literals
from django.http import HttpResponsePermanentRedirect
from django.contrib import messages
from django.shortcuts import get_object_or_404, redirect
from django.template.response import TemplateResponse
from django.utils.translation import ugettext as _
from .forms import get_form_class_for_product
from .models import Product, Category
def product_details(request, slug, product_id):
product = get_object_or_404(Product, id=product_id)
if product.get_slug() != slug:
return HttpResponsePermanentRedirect(product.get_absolute_url())
form_class = get_form_class_for_product(product)
form = form_class(cart=request.cart, product=product,
data=request.POST or None)
if form.is_valid():
if form.cleaned_data['quantity']:
msg = _('Added %(product)s to your cart.') % {
'product': product}
messages.success(request, msg)
form.save()
return redirect('product:details', slug=slug, product_id=product_id)
return TemplateResponse(request, 'product/details.html', {
'product': product, 'form': form})
def category_index(request, slug):
category = get_object_or_404(Category, slug=slug)
products = category.products.all()
return TemplateResponse(request, 'category/index.html', {
'products': products, 'category': category})
Allow to use custom templates for products details pages
|
from __future__ import unicode_literals
from django.http import HttpResponsePermanentRedirect
from django.contrib import messages
from django.shortcuts import get_object_or_404, redirect
from django.template.response import TemplateResponse
from django.utils.translation import ugettext as _
from .forms import get_form_class_for_product
from .models import Product, Category
def product_details(request, slug, product_id):
product = get_object_or_404(Product, id=product_id)
if product.get_slug() != slug:
return HttpResponsePermanentRedirect(product.get_absolute_url())
form_class = get_form_class_for_product(product)
form = form_class(cart=request.cart, product=product,
data=request.POST or None)
if form.is_valid():
if form.cleaned_data['quantity']:
msg = _('Added %(product)s to your cart.') % {
'product': product}
messages.success(request, msg)
form.save()
return redirect('product:details', slug=slug, product_id=product_id)
template_name = 'product/details_%s' % (product.__class__.__name__.lower())
return TemplateResponse(request, [template_name, 'product/details.html'], {
'product': product, 'form': form})
def category_index(request, slug):
category = get_object_or_404(Category, slug=slug)
products = category.products.all()
return TemplateResponse(request, 'category/index.html', {
'products': products, 'category': category})
|
<commit_before>from __future__ import unicode_literals
from django.http import HttpResponsePermanentRedirect
from django.contrib import messages
from django.shortcuts import get_object_or_404, redirect
from django.template.response import TemplateResponse
from django.utils.translation import ugettext as _
from .forms import get_form_class_for_product
from .models import Product, Category
def product_details(request, slug, product_id):
product = get_object_or_404(Product, id=product_id)
if product.get_slug() != slug:
return HttpResponsePermanentRedirect(product.get_absolute_url())
form_class = get_form_class_for_product(product)
form = form_class(cart=request.cart, product=product,
data=request.POST or None)
if form.is_valid():
if form.cleaned_data['quantity']:
msg = _('Added %(product)s to your cart.') % {
'product': product}
messages.success(request, msg)
form.save()
return redirect('product:details', slug=slug, product_id=product_id)
return TemplateResponse(request, 'product/details.html', {
'product': product, 'form': form})
def category_index(request, slug):
category = get_object_or_404(Category, slug=slug)
products = category.products.all()
return TemplateResponse(request, 'category/index.html', {
'products': products, 'category': category})
<commit_msg>Allow to use custom templates for products details pages<commit_after>
|
from __future__ import unicode_literals
from django.http import HttpResponsePermanentRedirect
from django.contrib import messages
from django.shortcuts import get_object_or_404, redirect
from django.template.response import TemplateResponse
from django.utils.translation import ugettext as _
from .forms import get_form_class_for_product
from .models import Product, Category
def product_details(request, slug, product_id):
product = get_object_or_404(Product, id=product_id)
if product.get_slug() != slug:
return HttpResponsePermanentRedirect(product.get_absolute_url())
form_class = get_form_class_for_product(product)
form = form_class(cart=request.cart, product=product,
data=request.POST or None)
if form.is_valid():
if form.cleaned_data['quantity']:
msg = _('Added %(product)s to your cart.') % {
'product': product}
messages.success(request, msg)
form.save()
return redirect('product:details', slug=slug, product_id=product_id)
template_name = 'product/details_%s' % (product.__class__.__name__.lower())
return TemplateResponse(request, [template_name, 'product/details.html'], {
'product': product, 'form': form})
def category_index(request, slug):
category = get_object_or_404(Category, slug=slug)
products = category.products.all()
return TemplateResponse(request, 'category/index.html', {
'products': products, 'category': category})
|
from __future__ import unicode_literals
from django.http import HttpResponsePermanentRedirect
from django.contrib import messages
from django.shortcuts import get_object_or_404, redirect
from django.template.response import TemplateResponse
from django.utils.translation import ugettext as _
from .forms import get_form_class_for_product
from .models import Product, Category
def product_details(request, slug, product_id):
product = get_object_or_404(Product, id=product_id)
if product.get_slug() != slug:
return HttpResponsePermanentRedirect(product.get_absolute_url())
form_class = get_form_class_for_product(product)
form = form_class(cart=request.cart, product=product,
data=request.POST or None)
if form.is_valid():
if form.cleaned_data['quantity']:
msg = _('Added %(product)s to your cart.') % {
'product': product}
messages.success(request, msg)
form.save()
return redirect('product:details', slug=slug, product_id=product_id)
return TemplateResponse(request, 'product/details.html', {
'product': product, 'form': form})
def category_index(request, slug):
category = get_object_or_404(Category, slug=slug)
products = category.products.all()
return TemplateResponse(request, 'category/index.html', {
'products': products, 'category': category})
Allow to use custom templates for products details pagesfrom __future__ import unicode_literals
from django.http import HttpResponsePermanentRedirect
from django.contrib import messages
from django.shortcuts import get_object_or_404, redirect
from django.template.response import TemplateResponse
from django.utils.translation import ugettext as _
from .forms import get_form_class_for_product
from .models import Product, Category
def product_details(request, slug, product_id):
product = get_object_or_404(Product, id=product_id)
if product.get_slug() != slug:
return HttpResponsePermanentRedirect(product.get_absolute_url())
form_class = get_form_class_for_product(product)
form = form_class(cart=request.cart, product=product,
data=request.POST or None)
if form.is_valid():
if form.cleaned_data['quantity']:
msg = _('Added %(product)s to your cart.') % {
'product': product}
messages.success(request, msg)
form.save()
return redirect('product:details', slug=slug, product_id=product_id)
template_name = 'product/details_%s' % (product.__class__.__name__.lower())
return TemplateResponse(request, [template_name, 'product/details.html'], {
'product': product, 'form': form})
def category_index(request, slug):
category = get_object_or_404(Category, slug=slug)
products = category.products.all()
return TemplateResponse(request, 'category/index.html', {
'products': products, 'category': category})
|
<commit_before>from __future__ import unicode_literals
from django.http import HttpResponsePermanentRedirect
from django.contrib import messages
from django.shortcuts import get_object_or_404, redirect
from django.template.response import TemplateResponse
from django.utils.translation import ugettext as _
from .forms import get_form_class_for_product
from .models import Product, Category
def product_details(request, slug, product_id):
product = get_object_or_404(Product, id=product_id)
if product.get_slug() != slug:
return HttpResponsePermanentRedirect(product.get_absolute_url())
form_class = get_form_class_for_product(product)
form = form_class(cart=request.cart, product=product,
data=request.POST or None)
if form.is_valid():
if form.cleaned_data['quantity']:
msg = _('Added %(product)s to your cart.') % {
'product': product}
messages.success(request, msg)
form.save()
return redirect('product:details', slug=slug, product_id=product_id)
return TemplateResponse(request, 'product/details.html', {
'product': product, 'form': form})
def category_index(request, slug):
category = get_object_or_404(Category, slug=slug)
products = category.products.all()
return TemplateResponse(request, 'category/index.html', {
'products': products, 'category': category})
<commit_msg>Allow to use custom templates for products details pages<commit_after>from __future__ import unicode_literals
from django.http import HttpResponsePermanentRedirect
from django.contrib import messages
from django.shortcuts import get_object_or_404, redirect
from django.template.response import TemplateResponse
from django.utils.translation import ugettext as _
from .forms import get_form_class_for_product
from .models import Product, Category
def product_details(request, slug, product_id):
product = get_object_or_404(Product, id=product_id)
if product.get_slug() != slug:
return HttpResponsePermanentRedirect(product.get_absolute_url())
form_class = get_form_class_for_product(product)
form = form_class(cart=request.cart, product=product,
data=request.POST or None)
if form.is_valid():
if form.cleaned_data['quantity']:
msg = _('Added %(product)s to your cart.') % {
'product': product}
messages.success(request, msg)
form.save()
return redirect('product:details', slug=slug, product_id=product_id)
template_name = 'product/details_%s' % (product.__class__.__name__.lower())
return TemplateResponse(request, [template_name, 'product/details.html'], {
'product': product, 'form': form})
def category_index(request, slug):
category = get_object_or_404(Category, slug=slug)
products = category.products.all()
return TemplateResponse(request, 'category/index.html', {
'products': products, 'category': category})
|
5786942c88420be913705790489676780dcd9fc0
|
nlppln/utils.py
|
nlppln/utils.py
|
"""NLP pipeline utility functionality"""
import os
def remove_ext(fname):
"""Removes the extension from a filename
"""
bn = os.path.basename(fname)
return os.path.splitext(bn)[0]
def create_dirs(fname):
"""Create (output) directories if they don't exist
"""
fname = os.path.dirname(fname)
if not os.path.exists(fname):
os.makedirs(fname)
def out_file_name(out_dir, fname, ext):
fname = remove_ext(fname)
return os.path.join(out_dir, '{}.{}'.format(fname, ext))
|
"""NLP pipeline utility functionality"""
import os
def remove_ext(fname):
"""Removes the extension from a filename
"""
bn = os.path.basename(fname)
return os.path.splitext(bn)[0]
def create_dirs(fname):
"""Create (output) directories if they don't exist
"""
fname = os.path.dirname(os.path.abspath(fname))
if not os.path.exists(fname):
os.makedirs(fname)
def out_file_name(out_dir, fname, ext):
fname = remove_ext(fname)
return os.path.join(out_dir, '{}.{}'.format(fname, ext))
|
Update createdirs to determine the absolute path of files
|
Update createdirs to determine the absolute path of files
|
Python
|
apache-2.0
|
WhatWorksWhenForWhom/nlppln,WhatWorksWhenForWhom/nlppln,WhatWorksWhenForWhom/nlppln
|
"""NLP pipeline utility functionality"""
import os
def remove_ext(fname):
"""Removes the extension from a filename
"""
bn = os.path.basename(fname)
return os.path.splitext(bn)[0]
def create_dirs(fname):
"""Create (output) directories if they don't exist
"""
fname = os.path.dirname(fname)
if not os.path.exists(fname):
os.makedirs(fname)
def out_file_name(out_dir, fname, ext):
fname = remove_ext(fname)
return os.path.join(out_dir, '{}.{}'.format(fname, ext))
Update createdirs to determine the absolute path of files
|
"""NLP pipeline utility functionality"""
import os
def remove_ext(fname):
"""Removes the extension from a filename
"""
bn = os.path.basename(fname)
return os.path.splitext(bn)[0]
def create_dirs(fname):
"""Create (output) directories if they don't exist
"""
fname = os.path.dirname(os.path.abspath(fname))
if not os.path.exists(fname):
os.makedirs(fname)
def out_file_name(out_dir, fname, ext):
fname = remove_ext(fname)
return os.path.join(out_dir, '{}.{}'.format(fname, ext))
|
<commit_before>"""NLP pipeline utility functionality"""
import os
def remove_ext(fname):
"""Removes the extension from a filename
"""
bn = os.path.basename(fname)
return os.path.splitext(bn)[0]
def create_dirs(fname):
"""Create (output) directories if they don't exist
"""
fname = os.path.dirname(fname)
if not os.path.exists(fname):
os.makedirs(fname)
def out_file_name(out_dir, fname, ext):
fname = remove_ext(fname)
return os.path.join(out_dir, '{}.{}'.format(fname, ext))
<commit_msg>Update createdirs to determine the absolute path of files<commit_after>
|
"""NLP pipeline utility functionality"""
import os
def remove_ext(fname):
"""Removes the extension from a filename
"""
bn = os.path.basename(fname)
return os.path.splitext(bn)[0]
def create_dirs(fname):
"""Create (output) directories if they don't exist
"""
fname = os.path.dirname(os.path.abspath(fname))
if not os.path.exists(fname):
os.makedirs(fname)
def out_file_name(out_dir, fname, ext):
fname = remove_ext(fname)
return os.path.join(out_dir, '{}.{}'.format(fname, ext))
|
"""NLP pipeline utility functionality"""
import os
def remove_ext(fname):
"""Removes the extension from a filename
"""
bn = os.path.basename(fname)
return os.path.splitext(bn)[0]
def create_dirs(fname):
"""Create (output) directories if they don't exist
"""
fname = os.path.dirname(fname)
if not os.path.exists(fname):
os.makedirs(fname)
def out_file_name(out_dir, fname, ext):
fname = remove_ext(fname)
return os.path.join(out_dir, '{}.{}'.format(fname, ext))
Update createdirs to determine the absolute path of files"""NLP pipeline utility functionality"""
import os
def remove_ext(fname):
"""Removes the extension from a filename
"""
bn = os.path.basename(fname)
return os.path.splitext(bn)[0]
def create_dirs(fname):
"""Create (output) directories if they don't exist
"""
fname = os.path.dirname(os.path.abspath(fname))
if not os.path.exists(fname):
os.makedirs(fname)
def out_file_name(out_dir, fname, ext):
fname = remove_ext(fname)
return os.path.join(out_dir, '{}.{}'.format(fname, ext))
|
<commit_before>"""NLP pipeline utility functionality"""
import os
def remove_ext(fname):
"""Removes the extension from a filename
"""
bn = os.path.basename(fname)
return os.path.splitext(bn)[0]
def create_dirs(fname):
"""Create (output) directories if they don't exist
"""
fname = os.path.dirname(fname)
if not os.path.exists(fname):
os.makedirs(fname)
def out_file_name(out_dir, fname, ext):
fname = remove_ext(fname)
return os.path.join(out_dir, '{}.{}'.format(fname, ext))
<commit_msg>Update createdirs to determine the absolute path of files<commit_after>"""NLP pipeline utility functionality"""
import os
def remove_ext(fname):
"""Removes the extension from a filename
"""
bn = os.path.basename(fname)
return os.path.splitext(bn)[0]
def create_dirs(fname):
"""Create (output) directories if they don't exist
"""
fname = os.path.dirname(os.path.abspath(fname))
if not os.path.exists(fname):
os.makedirs(fname)
def out_file_name(out_dir, fname, ext):
fname = remove_ext(fname)
return os.path.join(out_dir, '{}.{}'.format(fname, ext))
|
411117bf057e8835b6c9140b6a86b7ea85c6e80d
|
taskrunner/runners/result.py
|
taskrunner/runners/result.py
|
class Result:
def __init__(self, return_code, stdout, stderr):
self.return_code = return_code
self.stdout = stdout
self.stderr = stderr
self.succeeded = self.return_code == 0
self.failed = not self.succeeded
self.stdout_lines = stdout.splitlines() if stdout else []
self.stderr_lines = stderr.splitlines() if stderr else []
|
from ..util import cached_property
class Result:
def __init__(self, return_code, stdout, stderr):
self.return_code = return_code
self.stdout = stdout
self.stderr = stderr
self.succeeded = self.return_code == 0
self.failed = not self.succeeded
@cached_property
def stdout_lines(self):
return self.stdout.splitlines() if self.stdout else []
@cached_property
def stderr_lines(self):
return self.stderr.splitlines() if self.stderr else []
|
Make Result.stdout_lines and stderr_lines cached properties
|
Make Result.stdout_lines and stderr_lines cached properties
I guess it probably doesn't matter much for performance, but we might as
well avoid splitting output into lines eagerly since it's typically not
used.
|
Python
|
mit
|
wylee/runcommands,wylee/runcommands
|
class Result:
def __init__(self, return_code, stdout, stderr):
self.return_code = return_code
self.stdout = stdout
self.stderr = stderr
self.succeeded = self.return_code == 0
self.failed = not self.succeeded
self.stdout_lines = stdout.splitlines() if stdout else []
self.stderr_lines = stderr.splitlines() if stderr else []
Make Result.stdout_lines and stderr_lines cached properties
I guess it probably doesn't matter much for performance, but we might as
well avoid splitting output into lines eagerly since it's typically not
used.
|
from ..util import cached_property
class Result:
def __init__(self, return_code, stdout, stderr):
self.return_code = return_code
self.stdout = stdout
self.stderr = stderr
self.succeeded = self.return_code == 0
self.failed = not self.succeeded
@cached_property
def stdout_lines(self):
return self.stdout.splitlines() if self.stdout else []
@cached_property
def stderr_lines(self):
return self.stderr.splitlines() if self.stderr else []
|
<commit_before>
class Result:
def __init__(self, return_code, stdout, stderr):
self.return_code = return_code
self.stdout = stdout
self.stderr = stderr
self.succeeded = self.return_code == 0
self.failed = not self.succeeded
self.stdout_lines = stdout.splitlines() if stdout else []
self.stderr_lines = stderr.splitlines() if stderr else []
<commit_msg>Make Result.stdout_lines and stderr_lines cached properties
I guess it probably doesn't matter much for performance, but we might as
well avoid splitting output into lines eagerly since it's typically not
used.<commit_after>
|
from ..util import cached_property
class Result:
def __init__(self, return_code, stdout, stderr):
self.return_code = return_code
self.stdout = stdout
self.stderr = stderr
self.succeeded = self.return_code == 0
self.failed = not self.succeeded
@cached_property
def stdout_lines(self):
return self.stdout.splitlines() if self.stdout else []
@cached_property
def stderr_lines(self):
return self.stderr.splitlines() if self.stderr else []
|
class Result:
def __init__(self, return_code, stdout, stderr):
self.return_code = return_code
self.stdout = stdout
self.stderr = stderr
self.succeeded = self.return_code == 0
self.failed = not self.succeeded
self.stdout_lines = stdout.splitlines() if stdout else []
self.stderr_lines = stderr.splitlines() if stderr else []
Make Result.stdout_lines and stderr_lines cached properties
I guess it probably doesn't matter much for performance, but we might as
well avoid splitting output into lines eagerly since it's typically not
used.from ..util import cached_property
class Result:
def __init__(self, return_code, stdout, stderr):
self.return_code = return_code
self.stdout = stdout
self.stderr = stderr
self.succeeded = self.return_code == 0
self.failed = not self.succeeded
@cached_property
def stdout_lines(self):
return self.stdout.splitlines() if self.stdout else []
@cached_property
def stderr_lines(self):
return self.stderr.splitlines() if self.stderr else []
|
<commit_before>
class Result:
def __init__(self, return_code, stdout, stderr):
self.return_code = return_code
self.stdout = stdout
self.stderr = stderr
self.succeeded = self.return_code == 0
self.failed = not self.succeeded
self.stdout_lines = stdout.splitlines() if stdout else []
self.stderr_lines = stderr.splitlines() if stderr else []
<commit_msg>Make Result.stdout_lines and stderr_lines cached properties
I guess it probably doesn't matter much for performance, but we might as
well avoid splitting output into lines eagerly since it's typically not
used.<commit_after>from ..util import cached_property
class Result:
def __init__(self, return_code, stdout, stderr):
self.return_code = return_code
self.stdout = stdout
self.stderr = stderr
self.succeeded = self.return_code == 0
self.failed = not self.succeeded
@cached_property
def stdout_lines(self):
return self.stdout.splitlines() if self.stdout else []
@cached_property
def stderr_lines(self):
return self.stderr.splitlines() if self.stderr else []
|
ccd5c79d1a574fa4cc551675ae12d1e47e46c15d
|
chrome/test/chromeos/autotest/files/client/site_tests/desktopui_SyncIntegrationTests/desktopui_SyncIntegrationTests.py
|
chrome/test/chromeos/autotest/files/client/site_tests/desktopui_SyncIntegrationTests/desktopui_SyncIntegrationTests.py
|
# Copyright (c) 2010 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from autotest_lib.client.cros import chrome_test
class desktopui_SyncIntegrationTests(chrome_test.ChromeTestBase):
version = 1
def run_once(self):
password_file = '%s/sync_password.txt' % self.bindir
self.run_chrome_test('sync_integration_tests',
('--password-file-for-test=%s ' +
'--test-terminate-timeout=300000') % password_file)
|
# Copyright (c) 2011 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from autotest_lib.client.cros import chrome_test
class desktopui_SyncIntegrationTests(chrome_test.ChromeTestBase):
version = 1
binary_to_run = 'sync_integration_tests'
cmd_line_params = '--test-terminate-timeout=120000'
def run_once(self):
self.run_chrome_test(self.binary_to_run, self.cmd_line_params)
|
Make the sync integration tests self-contained on autotest
|
Make the sync integration tests self-contained on autotest
In the past, the sync integration tests used to require a password file
stored on every test device in order to do a gaia sign in using
production gaia servers. This caused the tests to be brittle.
As of today, the sync integration tests no longer rely on a password
file, with gaia sign in being stubbed out locally.
This patch reconfigures the tests on autotest, so that it no longer
looks for a local password file.
In addition, the tests run much faster now, and therefore, we reduce the
max timeout to a more reasonable 2 minutes (in the extreme case).
BUG=chromium-os:11294, chromium-os:9262
TEST=sync_integration_tests
Review URL: http://codereview.chromium.org/6387004
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@72561 0039d316-1c4b-4281-b951-d872f2087c98
|
Python
|
bsd-3-clause
|
Crystalnix/house-of-life-chromium,adobe/chromium,gavinp/chromium,adobe/chromium,adobe/chromium,ropik/chromium,gavinp/chromium,yitian134/chromium,ropik/chromium,gavinp/chromium,Crystalnix/house-of-life-chromium,gavinp/chromium,yitian134/chromium,Crystalnix/house-of-life-chromium,yitian134/chromium,Crystalnix/house-of-life-chromium,gavinp/chromium,Crystalnix/house-of-life-chromium,gavinp/chromium,gavinp/chromium,Crystalnix/house-of-life-chromium,ropik/chromium,yitian134/chromium,gavinp/chromium,adobe/chromium,yitian134/chromium,yitian134/chromium,ropik/chromium,adobe/chromium,adobe/chromium,Crystalnix/house-of-life-chromium,yitian134/chromium,Crystalnix/house-of-life-chromium,yitian134/chromium,ropik/chromium,adobe/chromium,yitian134/chromium,adobe/chromium,yitian134/chromium,adobe/chromium,gavinp/chromium,Crystalnix/house-of-life-chromium,ropik/chromium,ropik/chromium,Crystalnix/house-of-life-chromium,Crystalnix/house-of-life-chromium,adobe/chromium,ropik/chromium,adobe/chromium,gavinp/chromium,ropik/chromium
|
# Copyright (c) 2010 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from autotest_lib.client.cros import chrome_test
class desktopui_SyncIntegrationTests(chrome_test.ChromeTestBase):
version = 1
def run_once(self):
password_file = '%s/sync_password.txt' % self.bindir
self.run_chrome_test('sync_integration_tests',
('--password-file-for-test=%s ' +
'--test-terminate-timeout=300000') % password_file)
Make the sync integration tests self-contained on autotest
In the past, the sync integration tests used to require a password file
stored on every test device in order to do a gaia sign in using
production gaia servers. This caused the tests to be brittle.
As of today, the sync integration tests no longer rely on a password
file, with gaia sign in being stubbed out locally.
This patch reconfigures the tests on autotest, so that it no longer
looks for a local password file.
In addition, the tests run much faster now, and therefore, we reduce the
max timeout to a more reasonable 2 minutes (in the extreme case).
BUG=chromium-os:11294, chromium-os:9262
TEST=sync_integration_tests
Review URL: http://codereview.chromium.org/6387004
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@72561 0039d316-1c4b-4281-b951-d872f2087c98
|
# Copyright (c) 2011 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from autotest_lib.client.cros import chrome_test
class desktopui_SyncIntegrationTests(chrome_test.ChromeTestBase):
version = 1
binary_to_run = 'sync_integration_tests'
cmd_line_params = '--test-terminate-timeout=120000'
def run_once(self):
self.run_chrome_test(self.binary_to_run, self.cmd_line_params)
|
<commit_before># Copyright (c) 2010 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from autotest_lib.client.cros import chrome_test
class desktopui_SyncIntegrationTests(chrome_test.ChromeTestBase):
version = 1
def run_once(self):
password_file = '%s/sync_password.txt' % self.bindir
self.run_chrome_test('sync_integration_tests',
('--password-file-for-test=%s ' +
'--test-terminate-timeout=300000') % password_file)
<commit_msg>Make the sync integration tests self-contained on autotest
In the past, the sync integration tests used to require a password file
stored on every test device in order to do a gaia sign in using
production gaia servers. This caused the tests to be brittle.
As of today, the sync integration tests no longer rely on a password
file, with gaia sign in being stubbed out locally.
This patch reconfigures the tests on autotest, so that it no longer
looks for a local password file.
In addition, the tests run much faster now, and therefore, we reduce the
max timeout to a more reasonable 2 minutes (in the extreme case).
BUG=chromium-os:11294, chromium-os:9262
TEST=sync_integration_tests
Review URL: http://codereview.chromium.org/6387004
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@72561 0039d316-1c4b-4281-b951-d872f2087c98<commit_after>
|
# Copyright (c) 2011 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from autotest_lib.client.cros import chrome_test
class desktopui_SyncIntegrationTests(chrome_test.ChromeTestBase):
version = 1
binary_to_run = 'sync_integration_tests'
cmd_line_params = '--test-terminate-timeout=120000'
def run_once(self):
self.run_chrome_test(self.binary_to_run, self.cmd_line_params)
|
# Copyright (c) 2010 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from autotest_lib.client.cros import chrome_test
class desktopui_SyncIntegrationTests(chrome_test.ChromeTestBase):
version = 1
def run_once(self):
password_file = '%s/sync_password.txt' % self.bindir
self.run_chrome_test('sync_integration_tests',
('--password-file-for-test=%s ' +
'--test-terminate-timeout=300000') % password_file)
Make the sync integration tests self-contained on autotest
In the past, the sync integration tests used to require a password file
stored on every test device in order to do a gaia sign in using
production gaia servers. This caused the tests to be brittle.
As of today, the sync integration tests no longer rely on a password
file, with gaia sign in being stubbed out locally.
This patch reconfigures the tests on autotest, so that it no longer
looks for a local password file.
In addition, the tests run much faster now, and therefore, we reduce the
max timeout to a more reasonable 2 minutes (in the extreme case).
BUG=chromium-os:11294, chromium-os:9262
TEST=sync_integration_tests
Review URL: http://codereview.chromium.org/6387004
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@72561 0039d316-1c4b-4281-b951-d872f2087c98# Copyright (c) 2011 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from autotest_lib.client.cros import chrome_test
class desktopui_SyncIntegrationTests(chrome_test.ChromeTestBase):
version = 1
binary_to_run = 'sync_integration_tests'
cmd_line_params = '--test-terminate-timeout=120000'
def run_once(self):
self.run_chrome_test(self.binary_to_run, self.cmd_line_params)
|
<commit_before># Copyright (c) 2010 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from autotest_lib.client.cros import chrome_test
class desktopui_SyncIntegrationTests(chrome_test.ChromeTestBase):
version = 1
def run_once(self):
password_file = '%s/sync_password.txt' % self.bindir
self.run_chrome_test('sync_integration_tests',
('--password-file-for-test=%s ' +
'--test-terminate-timeout=300000') % password_file)
<commit_msg>Make the sync integration tests self-contained on autotest
In the past, the sync integration tests used to require a password file
stored on every test device in order to do a gaia sign in using
production gaia servers. This caused the tests to be brittle.
As of today, the sync integration tests no longer rely on a password
file, with gaia sign in being stubbed out locally.
This patch reconfigures the tests on autotest, so that it no longer
looks for a local password file.
In addition, the tests run much faster now, and therefore, we reduce the
max timeout to a more reasonable 2 minutes (in the extreme case).
BUG=chromium-os:11294, chromium-os:9262
TEST=sync_integration_tests
Review URL: http://codereview.chromium.org/6387004
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@72561 0039d316-1c4b-4281-b951-d872f2087c98<commit_after># Copyright (c) 2011 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from autotest_lib.client.cros import chrome_test
class desktopui_SyncIntegrationTests(chrome_test.ChromeTestBase):
version = 1
binary_to_run = 'sync_integration_tests'
cmd_line_params = '--test-terminate-timeout=120000'
def run_once(self):
self.run_chrome_test(self.binary_to_run, self.cmd_line_params)
|
00781ea2f2f03eb9229a055e65fc73b07122057c
|
ample/testing/__main__.py
|
ample/testing/__main__.py
|
"""main routine for Ample testing"""
import os
import sys
from ample.util import exit_util, logging_util
from ample.testing import run_tests
logger = logging_util.setup_console_logging(formatstr='%(asctime)s - %(name)s - %(levelname)s - %(message)s')
#############################################################################
## Multiprocessing crashes on Windows when running multiple jobs.
# Issue recorded
# 1) https://docs.python.org/2/library/multiprocessing.html#windows
if sys.platform.startswith("win"):
msg = """
*****************************************************************************
A bug prevents you from invoking our testing framework via the module loader.
Please invoke using the following command:
% ccp4-python {0}{1}run_tests.py <command> [<args>]
*****************************************************************************
"""
msg.format(os.path.dirname(__file__), os.sep)
logger.critical(msg)
sys.exit(1)
#############################################################################
## On Unix systems we can run as normal
try:
run_tests.main()
except Exception as e:
msg = "Error running Ample testsuite: {0}".format(e.message)
logger.critical(msg)
exit_util.exit_error(msg, sys.exc_info()[2])
|
"""main routine for Ample testing"""
import logging
import os
import sys
from ample.util import exit_util, logging_util
from ample.testing import run_tests
logger = logging_util.setup_console_logging(level=logging.CRITICAL, formatstr='%(asctime)s - %(name)s - %(levelname)s - %(message)s')
#############################################################################
## Multiprocessing crashes on Windows when running multiple jobs.
# Issue recorded
# 1) https://docs.python.org/2/library/multiprocessing.html#windows
if sys.platform.startswith("win"):
msg = """
*****************************************************************************
A bug prevents you from invoking our testing framework via the module loader.
Please invoke using the following command:
% ccp4-python {0}{1}run_tests.py <command> [<args>]
*****************************************************************************
"""
msg.format(os.path.dirname(__file__), os.sep)
logger.critical(msg)
sys.exit(1)
#############################################################################
## On Unix systems we can run as normal
try:
run_tests.main()
except Exception as e:
msg = "Error running Ample testsuite: {0}".format(e.message)
logger.critical(msg)
exit_util.exit_error(msg, sys.exc_info()[2])
|
Change log level to CRITICAL
|
Change log level to CRITICAL
|
Python
|
bsd-3-clause
|
linucks/ample,linucks/ample,rigdenlab/ample,rigdenlab/ample
|
"""main routine for Ample testing"""
import os
import sys
from ample.util import exit_util, logging_util
from ample.testing import run_tests
logger = logging_util.setup_console_logging(formatstr='%(asctime)s - %(name)s - %(levelname)s - %(message)s')
#############################################################################
## Multiprocessing crashes on Windows when running multiple jobs.
# Issue recorded
# 1) https://docs.python.org/2/library/multiprocessing.html#windows
if sys.platform.startswith("win"):
msg = """
*****************************************************************************
A bug prevents you from invoking our testing framework via the module loader.
Please invoke using the following command:
% ccp4-python {0}{1}run_tests.py <command> [<args>]
*****************************************************************************
"""
msg.format(os.path.dirname(__file__), os.sep)
logger.critical(msg)
sys.exit(1)
#############################################################################
## On Unix systems we can run as normal
try:
run_tests.main()
except Exception as e:
msg = "Error running Ample testsuite: {0}".format(e.message)
logger.critical(msg)
exit_util.exit_error(msg, sys.exc_info()[2])
Change log level to CRITICAL
|
"""main routine for Ample testing"""
import logging
import os
import sys
from ample.util import exit_util, logging_util
from ample.testing import run_tests
logger = logging_util.setup_console_logging(level=logging.CRITICAL, formatstr='%(asctime)s - %(name)s - %(levelname)s - %(message)s')
#############################################################################
## Multiprocessing crashes on Windows when running multiple jobs.
# Issue recorded
# 1) https://docs.python.org/2/library/multiprocessing.html#windows
if sys.platform.startswith("win"):
msg = """
*****************************************************************************
A bug prevents you from invoking our testing framework via the module loader.
Please invoke using the following command:
% ccp4-python {0}{1}run_tests.py <command> [<args>]
*****************************************************************************
"""
msg.format(os.path.dirname(__file__), os.sep)
logger.critical(msg)
sys.exit(1)
#############################################################################
## On Unix systems we can run as normal
try:
run_tests.main()
except Exception as e:
msg = "Error running Ample testsuite: {0}".format(e.message)
logger.critical(msg)
exit_util.exit_error(msg, sys.exc_info()[2])
|
<commit_before>"""main routine for Ample testing"""
import os
import sys
from ample.util import exit_util, logging_util
from ample.testing import run_tests
logger = logging_util.setup_console_logging(formatstr='%(asctime)s - %(name)s - %(levelname)s - %(message)s')
#############################################################################
## Multiprocessing crashes on Windows when running multiple jobs.
# Issue recorded
# 1) https://docs.python.org/2/library/multiprocessing.html#windows
if sys.platform.startswith("win"):
msg = """
*****************************************************************************
A bug prevents you from invoking our testing framework via the module loader.
Please invoke using the following command:
% ccp4-python {0}{1}run_tests.py <command> [<args>]
*****************************************************************************
"""
msg.format(os.path.dirname(__file__), os.sep)
logger.critical(msg)
sys.exit(1)
#############################################################################
## On Unix systems we can run as normal
try:
run_tests.main()
except Exception as e:
msg = "Error running Ample testsuite: {0}".format(e.message)
logger.critical(msg)
exit_util.exit_error(msg, sys.exc_info()[2])
<commit_msg>Change log level to CRITICAL<commit_after>
|
"""main routine for Ample testing"""
import logging
import os
import sys
from ample.util import exit_util, logging_util
from ample.testing import run_tests
logger = logging_util.setup_console_logging(level=logging.CRITICAL, formatstr='%(asctime)s - %(name)s - %(levelname)s - %(message)s')
#############################################################################
## Multiprocessing crashes on Windows when running multiple jobs.
# Issue recorded
# 1) https://docs.python.org/2/library/multiprocessing.html#windows
if sys.platform.startswith("win"):
msg = """
*****************************************************************************
A bug prevents you from invoking our testing framework via the module loader.
Please invoke using the following command:
% ccp4-python {0}{1}run_tests.py <command> [<args>]
*****************************************************************************
"""
msg.format(os.path.dirname(__file__), os.sep)
logger.critical(msg)
sys.exit(1)
#############################################################################
## On Unix systems we can run as normal
try:
run_tests.main()
except Exception as e:
msg = "Error running Ample testsuite: {0}".format(e.message)
logger.critical(msg)
exit_util.exit_error(msg, sys.exc_info()[2])
|
"""main routine for Ample testing"""
import os
import sys
from ample.util import exit_util, logging_util
from ample.testing import run_tests
logger = logging_util.setup_console_logging(formatstr='%(asctime)s - %(name)s - %(levelname)s - %(message)s')
#############################################################################
## Multiprocessing crashes on Windows when running multiple jobs.
# Issue recorded
# 1) https://docs.python.org/2/library/multiprocessing.html#windows
if sys.platform.startswith("win"):
msg = """
*****************************************************************************
A bug prevents you from invoking our testing framework via the module loader.
Please invoke using the following command:
% ccp4-python {0}{1}run_tests.py <command> [<args>]
*****************************************************************************
"""
msg.format(os.path.dirname(__file__), os.sep)
logger.critical(msg)
sys.exit(1)
#############################################################################
## On Unix systems we can run as normal
try:
run_tests.main()
except Exception as e:
msg = "Error running Ample testsuite: {0}".format(e.message)
logger.critical(msg)
exit_util.exit_error(msg, sys.exc_info()[2])
Change log level to CRITICAL"""main routine for Ample testing"""
import logging
import os
import sys
from ample.util import exit_util, logging_util
from ample.testing import run_tests
logger = logging_util.setup_console_logging(level=logging.CRITICAL, formatstr='%(asctime)s - %(name)s - %(levelname)s - %(message)s')
#############################################################################
## Multiprocessing crashes on Windows when running multiple jobs.
# Issue recorded
# 1) https://docs.python.org/2/library/multiprocessing.html#windows
if sys.platform.startswith("win"):
msg = """
*****************************************************************************
A bug prevents you from invoking our testing framework via the module loader.
Please invoke using the following command:
% ccp4-python {0}{1}run_tests.py <command> [<args>]
*****************************************************************************
"""
msg.format(os.path.dirname(__file__), os.sep)
logger.critical(msg)
sys.exit(1)
#############################################################################
## On Unix systems we can run as normal
try:
run_tests.main()
except Exception as e:
msg = "Error running Ample testsuite: {0}".format(e.message)
logger.critical(msg)
exit_util.exit_error(msg, sys.exc_info()[2])
|
<commit_before>"""main routine for Ample testing"""
import os
import sys
from ample.util import exit_util, logging_util
from ample.testing import run_tests
logger = logging_util.setup_console_logging(formatstr='%(asctime)s - %(name)s - %(levelname)s - %(message)s')
#############################################################################
## Multiprocessing crashes on Windows when running multiple jobs.
# Issue recorded
# 1) https://docs.python.org/2/library/multiprocessing.html#windows
if sys.platform.startswith("win"):
msg = """
*****************************************************************************
A bug prevents you from invoking our testing framework via the module loader.
Please invoke using the following command:
% ccp4-python {0}{1}run_tests.py <command> [<args>]
*****************************************************************************
"""
msg.format(os.path.dirname(__file__), os.sep)
logger.critical(msg)
sys.exit(1)
#############################################################################
## On Unix systems we can run as normal
try:
run_tests.main()
except Exception as e:
msg = "Error running Ample testsuite: {0}".format(e.message)
logger.critical(msg)
exit_util.exit_error(msg, sys.exc_info()[2])
<commit_msg>Change log level to CRITICAL<commit_after>"""main routine for Ample testing"""
import logging
import os
import sys
from ample.util import exit_util, logging_util
from ample.testing import run_tests
logger = logging_util.setup_console_logging(level=logging.CRITICAL, formatstr='%(asctime)s - %(name)s - %(levelname)s - %(message)s')
#############################################################################
## Multiprocessing crashes on Windows when running multiple jobs.
# Issue recorded
# 1) https://docs.python.org/2/library/multiprocessing.html#windows
if sys.platform.startswith("win"):
msg = """
*****************************************************************************
A bug prevents you from invoking our testing framework via the module loader.
Please invoke using the following command:
% ccp4-python {0}{1}run_tests.py <command> [<args>]
*****************************************************************************
"""
msg.format(os.path.dirname(__file__), os.sep)
logger.critical(msg)
sys.exit(1)
#############################################################################
## On Unix systems we can run as normal
try:
run_tests.main()
except Exception as e:
msg = "Error running Ample testsuite: {0}".format(e.message)
logger.critical(msg)
exit_util.exit_error(msg, sys.exc_info()[2])
|
7deedcc1a4762b8156d8c52c42a90f94f501e6f1
|
tests/integration/players.py
|
tests/integration/players.py
|
#!/usr/bin/env python
import urllib.parse
import urllib.request
def create_player(username, password, email):
url = 'http://localhost:3000/players'
values = {'username' : username,
'password' : password,
'email' : email }
data = urllib.parse.urlencode(values)
data = data.encode('utf-8') # data should be bytes
req = urllib.request.Request(url, data)
response = urllib.request.urlopen(req)
the_page = response.read()
print("Created user \'{}\' with password \'{}\' and email \'{}\'".format(username, password, email))
if __name__ == '__main__':
create_player("chapmang", "password", "chapmang@dropshot.com")
create_player("idlee", "deadparrot", "idlee@dropshot.com")
create_player("gilliamt", "lumberjack", "gilliamt@dropshot.com")
create_player("jonest", "trojanrabbit", "jonest@dropshot.com")
create_player("cleesej", "generaldirection", "cleesej@dropshot.com")
create_player("palinm", "fleshwound", "palinm@dropshot.com")
|
#!/usr/bin/env python
import urllib.parse
import urllib.request
def create_player(username, password, email):
url = 'https://localhost:3000/players'
values = {'username' : username,
'password' : password,
'email' : email }
data = urllib.parse.urlencode(values)
data = data.encode('utf-8') # data should be bytes
req = urllib.request.Request(url, data)
response = urllib.request.urlopen(req)
the_page = response.read()
print("Created user \'{}\' with password \'{}\' and email \'{}\'".format(username, password, email))
if __name__ == '__main__':
create_player("chapmang", "password", "chapmang@dropshot.com")
create_player("idlee", "deadparrot", "idlee@dropshot.com")
create_player("gilliamt", "lumberjack", "gilliamt@dropshot.com")
create_player("jonest", "trojanrabbit", "jonest@dropshot.com")
create_player("cleesej", "generaldirection", "cleesej@dropshot.com")
create_player("palinm", "fleshwound", "palinm@dropshot.com")
|
Make integration test work again with usage of SSL
|
Make integration test work again with usage of SSL
|
Python
|
mit
|
dropshot/dropshot-server
|
#!/usr/bin/env python
import urllib.parse
import urllib.request
def create_player(username, password, email):
url = 'http://localhost:3000/players'
values = {'username' : username,
'password' : password,
'email' : email }
data = urllib.parse.urlencode(values)
data = data.encode('utf-8') # data should be bytes
req = urllib.request.Request(url, data)
response = urllib.request.urlopen(req)
the_page = response.read()
print("Created user \'{}\' with password \'{}\' and email \'{}\'".format(username, password, email))
if __name__ == '__main__':
create_player("chapmang", "password", "chapmang@dropshot.com")
create_player("idlee", "deadparrot", "idlee@dropshot.com")
create_player("gilliamt", "lumberjack", "gilliamt@dropshot.com")
create_player("jonest", "trojanrabbit", "jonest@dropshot.com")
create_player("cleesej", "generaldirection", "cleesej@dropshot.com")
create_player("palinm", "fleshwound", "palinm@dropshot.com")
Make integration test work again with usage of SSL
|
#!/usr/bin/env python
import urllib.parse
import urllib.request
def create_player(username, password, email):
url = 'https://localhost:3000/players'
values = {'username' : username,
'password' : password,
'email' : email }
data = urllib.parse.urlencode(values)
data = data.encode('utf-8') # data should be bytes
req = urllib.request.Request(url, data)
response = urllib.request.urlopen(req)
the_page = response.read()
print("Created user \'{}\' with password \'{}\' and email \'{}\'".format(username, password, email))
if __name__ == '__main__':
create_player("chapmang", "password", "chapmang@dropshot.com")
create_player("idlee", "deadparrot", "idlee@dropshot.com")
create_player("gilliamt", "lumberjack", "gilliamt@dropshot.com")
create_player("jonest", "trojanrabbit", "jonest@dropshot.com")
create_player("cleesej", "generaldirection", "cleesej@dropshot.com")
create_player("palinm", "fleshwound", "palinm@dropshot.com")
|
<commit_before>#!/usr/bin/env python
import urllib.parse
import urllib.request
def create_player(username, password, email):
url = 'http://localhost:3000/players'
values = {'username' : username,
'password' : password,
'email' : email }
data = urllib.parse.urlencode(values)
data = data.encode('utf-8') # data should be bytes
req = urllib.request.Request(url, data)
response = urllib.request.urlopen(req)
the_page = response.read()
print("Created user \'{}\' with password \'{}\' and email \'{}\'".format(username, password, email))
if __name__ == '__main__':
create_player("chapmang", "password", "chapmang@dropshot.com")
create_player("idlee", "deadparrot", "idlee@dropshot.com")
create_player("gilliamt", "lumberjack", "gilliamt@dropshot.com")
create_player("jonest", "trojanrabbit", "jonest@dropshot.com")
create_player("cleesej", "generaldirection", "cleesej@dropshot.com")
create_player("palinm", "fleshwound", "palinm@dropshot.com")
<commit_msg>Make integration test work again with usage of SSL<commit_after>
|
#!/usr/bin/env python
import urllib.parse
import urllib.request
def create_player(username, password, email):
url = 'https://localhost:3000/players'
values = {'username' : username,
'password' : password,
'email' : email }
data = urllib.parse.urlencode(values)
data = data.encode('utf-8') # data should be bytes
req = urllib.request.Request(url, data)
response = urllib.request.urlopen(req)
the_page = response.read()
print("Created user \'{}\' with password \'{}\' and email \'{}\'".format(username, password, email))
if __name__ == '__main__':
create_player("chapmang", "password", "chapmang@dropshot.com")
create_player("idlee", "deadparrot", "idlee@dropshot.com")
create_player("gilliamt", "lumberjack", "gilliamt@dropshot.com")
create_player("jonest", "trojanrabbit", "jonest@dropshot.com")
create_player("cleesej", "generaldirection", "cleesej@dropshot.com")
create_player("palinm", "fleshwound", "palinm@dropshot.com")
|
#!/usr/bin/env python
import urllib.parse
import urllib.request
def create_player(username, password, email):
url = 'http://localhost:3000/players'
values = {'username' : username,
'password' : password,
'email' : email }
data = urllib.parse.urlencode(values)
data = data.encode('utf-8') # data should be bytes
req = urllib.request.Request(url, data)
response = urllib.request.urlopen(req)
the_page = response.read()
print("Created user \'{}\' with password \'{}\' and email \'{}\'".format(username, password, email))
if __name__ == '__main__':
create_player("chapmang", "password", "chapmang@dropshot.com")
create_player("idlee", "deadparrot", "idlee@dropshot.com")
create_player("gilliamt", "lumberjack", "gilliamt@dropshot.com")
create_player("jonest", "trojanrabbit", "jonest@dropshot.com")
create_player("cleesej", "generaldirection", "cleesej@dropshot.com")
create_player("palinm", "fleshwound", "palinm@dropshot.com")
Make integration test work again with usage of SSL#!/usr/bin/env python
import urllib.parse
import urllib.request
def create_player(username, password, email):
url = 'https://localhost:3000/players'
values = {'username' : username,
'password' : password,
'email' : email }
data = urllib.parse.urlencode(values)
data = data.encode('utf-8') # data should be bytes
req = urllib.request.Request(url, data)
response = urllib.request.urlopen(req)
the_page = response.read()
print("Created user \'{}\' with password \'{}\' and email \'{}\'".format(username, password, email))
if __name__ == '__main__':
create_player("chapmang", "password", "chapmang@dropshot.com")
create_player("idlee", "deadparrot", "idlee@dropshot.com")
create_player("gilliamt", "lumberjack", "gilliamt@dropshot.com")
create_player("jonest", "trojanrabbit", "jonest@dropshot.com")
create_player("cleesej", "generaldirection", "cleesej@dropshot.com")
create_player("palinm", "fleshwound", "palinm@dropshot.com")
|
<commit_before>#!/usr/bin/env python
import urllib.parse
import urllib.request
def create_player(username, password, email):
url = 'http://localhost:3000/players'
values = {'username' : username,
'password' : password,
'email' : email }
data = urllib.parse.urlencode(values)
data = data.encode('utf-8') # data should be bytes
req = urllib.request.Request(url, data)
response = urllib.request.urlopen(req)
the_page = response.read()
print("Created user \'{}\' with password \'{}\' and email \'{}\'".format(username, password, email))
if __name__ == '__main__':
create_player("chapmang", "password", "chapmang@dropshot.com")
create_player("idlee", "deadparrot", "idlee@dropshot.com")
create_player("gilliamt", "lumberjack", "gilliamt@dropshot.com")
create_player("jonest", "trojanrabbit", "jonest@dropshot.com")
create_player("cleesej", "generaldirection", "cleesej@dropshot.com")
create_player("palinm", "fleshwound", "palinm@dropshot.com")
<commit_msg>Make integration test work again with usage of SSL<commit_after>#!/usr/bin/env python
import urllib.parse
import urllib.request
def create_player(username, password, email):
url = 'https://localhost:3000/players'
values = {'username' : username,
'password' : password,
'email' : email }
data = urllib.parse.urlencode(values)
data = data.encode('utf-8') # data should be bytes
req = urllib.request.Request(url, data)
response = urllib.request.urlopen(req)
the_page = response.read()
print("Created user \'{}\' with password \'{}\' and email \'{}\'".format(username, password, email))
if __name__ == '__main__':
create_player("chapmang", "password", "chapmang@dropshot.com")
create_player("idlee", "deadparrot", "idlee@dropshot.com")
create_player("gilliamt", "lumberjack", "gilliamt@dropshot.com")
create_player("jonest", "trojanrabbit", "jonest@dropshot.com")
create_player("cleesej", "generaldirection", "cleesej@dropshot.com")
create_player("palinm", "fleshwound", "palinm@dropshot.com")
|
1863c2cd57d2d09a8655ed15b741b1a038355321
|
tests/util/get_passed_env.py
|
tests/util/get_passed_env.py
|
import os
from bonobo import Graph
def extract():
env_test_user = os.getenv('ENV_TEST_USER')
env_test_number = os.getenv('ENV_TEST_NUMBER')
env_test_string = os.getenv('ENV_TEST_STRING')
return env_test_user, env_test_number, env_test_string
def load(s: str):
print(s)
graph = Graph(extract, load)
if __name__ == '__main__':
from bonobo import run
run(graph)
|
import os
import bonobo
def extract():
env_test_user = os.getenv('ENV_TEST_USER')
env_test_number = os.getenv('ENV_TEST_NUMBER')
env_test_string = os.getenv('ENV_TEST_STRING')
return env_test_user, env_test_number, env_test_string
def load(s: str):
print(s)
graph = bonobo.Graph(extract, load)
if __name__ == '__main__':
bonobo.run(graph)
|
Change import style in example.
|
Change import style in example.
|
Python
|
apache-2.0
|
python-bonobo/bonobo,hartym/bonobo,hartym/bonobo,python-bonobo/bonobo,hartym/bonobo,python-bonobo/bonobo
|
import os
from bonobo import Graph
def extract():
env_test_user = os.getenv('ENV_TEST_USER')
env_test_number = os.getenv('ENV_TEST_NUMBER')
env_test_string = os.getenv('ENV_TEST_STRING')
return env_test_user, env_test_number, env_test_string
def load(s: str):
print(s)
graph = Graph(extract, load)
if __name__ == '__main__':
from bonobo import run
run(graph)
Change import style in example.
|
import os
import bonobo
def extract():
env_test_user = os.getenv('ENV_TEST_USER')
env_test_number = os.getenv('ENV_TEST_NUMBER')
env_test_string = os.getenv('ENV_TEST_STRING')
return env_test_user, env_test_number, env_test_string
def load(s: str):
print(s)
graph = bonobo.Graph(extract, load)
if __name__ == '__main__':
bonobo.run(graph)
|
<commit_before>import os
from bonobo import Graph
def extract():
env_test_user = os.getenv('ENV_TEST_USER')
env_test_number = os.getenv('ENV_TEST_NUMBER')
env_test_string = os.getenv('ENV_TEST_STRING')
return env_test_user, env_test_number, env_test_string
def load(s: str):
print(s)
graph = Graph(extract, load)
if __name__ == '__main__':
from bonobo import run
run(graph)
<commit_msg>Change import style in example.<commit_after>
|
import os
import bonobo
def extract():
env_test_user = os.getenv('ENV_TEST_USER')
env_test_number = os.getenv('ENV_TEST_NUMBER')
env_test_string = os.getenv('ENV_TEST_STRING')
return env_test_user, env_test_number, env_test_string
def load(s: str):
print(s)
graph = bonobo.Graph(extract, load)
if __name__ == '__main__':
bonobo.run(graph)
|
import os
from bonobo import Graph
def extract():
env_test_user = os.getenv('ENV_TEST_USER')
env_test_number = os.getenv('ENV_TEST_NUMBER')
env_test_string = os.getenv('ENV_TEST_STRING')
return env_test_user, env_test_number, env_test_string
def load(s: str):
print(s)
graph = Graph(extract, load)
if __name__ == '__main__':
from bonobo import run
run(graph)
Change import style in example.import os
import bonobo
def extract():
env_test_user = os.getenv('ENV_TEST_USER')
env_test_number = os.getenv('ENV_TEST_NUMBER')
env_test_string = os.getenv('ENV_TEST_STRING')
return env_test_user, env_test_number, env_test_string
def load(s: str):
print(s)
graph = bonobo.Graph(extract, load)
if __name__ == '__main__':
bonobo.run(graph)
|
<commit_before>import os
from bonobo import Graph
def extract():
env_test_user = os.getenv('ENV_TEST_USER')
env_test_number = os.getenv('ENV_TEST_NUMBER')
env_test_string = os.getenv('ENV_TEST_STRING')
return env_test_user, env_test_number, env_test_string
def load(s: str):
print(s)
graph = Graph(extract, load)
if __name__ == '__main__':
from bonobo import run
run(graph)
<commit_msg>Change import style in example.<commit_after>import os
import bonobo
def extract():
env_test_user = os.getenv('ENV_TEST_USER')
env_test_number = os.getenv('ENV_TEST_NUMBER')
env_test_string = os.getenv('ENV_TEST_STRING')
return env_test_user, env_test_number, env_test_string
def load(s: str):
print(s)
graph = bonobo.Graph(extract, load)
if __name__ == '__main__':
bonobo.run(graph)
|
b2a2cd52d221b377af57b649d66c0df5e44b7712
|
src/mist/io/tests/features/environment.py
|
src/mist/io/tests/features/environment.py
|
import os
import random
from shutil import copyfile
from behaving import environment as benv
from behaving.web.steps import *
from behaving.personas.steps import *
from test_config import CREDENTIALS, MISTCREDS, TESTNAMES
PERSONAS = {
'NinjaTester': dict(
creds=CREDENTIALS,
mistcreds=MISTCREDS,
machine=TESTNAMES['machine_name']+str(random.randint(1, 10000)),
image_machine=TESTNAMES['image_machine']+str(random.randint(1, 10000)),
key_name=TESTNAMES['key']+str(random.randint(1, 10000))
)
}
def before_all(context):
benv.before_all(context)
context.personas = PERSONAS
try:
copyfile("db.yaml", "db.yaml.test_backup")
os.remove("db.yaml")
print "Removing file"
except:
pass
def after_all(context):
try:
copyfile("db.yaml.test_backup", "db.yaml")
os.remove("db.yaml.test_backup")
except:
pass
def before_feature(context, feature):
benv.before_feature(context, feature)
def before_scenario(context, scenario):
benv.before_scenario(context, scenario)
context.personas = PERSONAS
def after_scenario(context, scenario):
benv.after_scenario(context, scenario)
|
import os
import random
from shutil import copyfile
from behaving import environment as benv
from behaving.web.steps import *
from behaving.personas.steps import *
try:
from test_config import CREDENTIALS, MISTCREDS, TESTNAMES
PERSONAS = {
'NinjaTester': dict(
creds=CREDENTIALS,
mistcreds=MISTCREDS,
machine=TESTNAMES['machine_name']+str(random.randint(1, 10000)),
image_machine=TESTNAMES['image_machine']+str(random.randint(1, 10000)),
key_name=TESTNAMES['key']+str(random.randint(1, 10000))
)
}
except:
pass
def before_all(context):
benv.before_all(context)
context.personas = PERSONAS
try:
copyfile("db.yaml", "db.yaml.test_backup")
os.remove("db.yaml")
print "Removing file"
except:
pass
def after_all(context):
try:
copyfile("db.yaml.test_backup", "db.yaml")
os.remove("db.yaml.test_backup")
except:
pass
def before_feature(context, feature):
benv.before_feature(context, feature)
def before_scenario(context, scenario):
benv.before_scenario(context, scenario)
context.personas = PERSONAS
def after_scenario(context, scenario):
benv.after_scenario(context, scenario)
|
Fix auto importing test_config if it is not there
|
Fix auto importing test_config if it is not there
|
Python
|
agpl-3.0
|
DimensionDataCBUSydney/mist.io,kelonye/mist.io,Lao-liu/mist.io,munkiat/mist.io,zBMNForks/mist.io,johnnyWalnut/mist.io,afivos/mist.io,Lao-liu/mist.io,Lao-liu/mist.io,DimensionDataCBUSydney/mist.io,johnnyWalnut/mist.io,munkiat/mist.io,zBMNForks/mist.io,kelonye/mist.io,afivos/mist.io,afivos/mist.io,munkiat/mist.io,munkiat/mist.io,johnnyWalnut/mist.io,kelonye/mist.io,zBMNForks/mist.io,DimensionDataCBUSydney/mist.io,DimensionDataCBUSydney/mist.io,Lao-liu/mist.io
|
import os
import random
from shutil import copyfile
from behaving import environment as benv
from behaving.web.steps import *
from behaving.personas.steps import *
from test_config import CREDENTIALS, MISTCREDS, TESTNAMES
PERSONAS = {
'NinjaTester': dict(
creds=CREDENTIALS,
mistcreds=MISTCREDS,
machine=TESTNAMES['machine_name']+str(random.randint(1, 10000)),
image_machine=TESTNAMES['image_machine']+str(random.randint(1, 10000)),
key_name=TESTNAMES['key']+str(random.randint(1, 10000))
)
}
def before_all(context):
benv.before_all(context)
context.personas = PERSONAS
try:
copyfile("db.yaml", "db.yaml.test_backup")
os.remove("db.yaml")
print "Removing file"
except:
pass
def after_all(context):
try:
copyfile("db.yaml.test_backup", "db.yaml")
os.remove("db.yaml.test_backup")
except:
pass
def before_feature(context, feature):
benv.before_feature(context, feature)
def before_scenario(context, scenario):
benv.before_scenario(context, scenario)
context.personas = PERSONAS
def after_scenario(context, scenario):
benv.after_scenario(context, scenario)Fix auto importing test_config if it is not there
|
import os
import random
from shutil import copyfile
from behaving import environment as benv
from behaving.web.steps import *
from behaving.personas.steps import *
try:
from test_config import CREDENTIALS, MISTCREDS, TESTNAMES
PERSONAS = {
'NinjaTester': dict(
creds=CREDENTIALS,
mistcreds=MISTCREDS,
machine=TESTNAMES['machine_name']+str(random.randint(1, 10000)),
image_machine=TESTNAMES['image_machine']+str(random.randint(1, 10000)),
key_name=TESTNAMES['key']+str(random.randint(1, 10000))
)
}
except:
pass
def before_all(context):
benv.before_all(context)
context.personas = PERSONAS
try:
copyfile("db.yaml", "db.yaml.test_backup")
os.remove("db.yaml")
print "Removing file"
except:
pass
def after_all(context):
try:
copyfile("db.yaml.test_backup", "db.yaml")
os.remove("db.yaml.test_backup")
except:
pass
def before_feature(context, feature):
benv.before_feature(context, feature)
def before_scenario(context, scenario):
benv.before_scenario(context, scenario)
context.personas = PERSONAS
def after_scenario(context, scenario):
benv.after_scenario(context, scenario)
|
<commit_before>import os
import random
from shutil import copyfile
from behaving import environment as benv
from behaving.web.steps import *
from behaving.personas.steps import *
from test_config import CREDENTIALS, MISTCREDS, TESTNAMES
PERSONAS = {
'NinjaTester': dict(
creds=CREDENTIALS,
mistcreds=MISTCREDS,
machine=TESTNAMES['machine_name']+str(random.randint(1, 10000)),
image_machine=TESTNAMES['image_machine']+str(random.randint(1, 10000)),
key_name=TESTNAMES['key']+str(random.randint(1, 10000))
)
}
def before_all(context):
benv.before_all(context)
context.personas = PERSONAS
try:
copyfile("db.yaml", "db.yaml.test_backup")
os.remove("db.yaml")
print "Removing file"
except:
pass
def after_all(context):
try:
copyfile("db.yaml.test_backup", "db.yaml")
os.remove("db.yaml.test_backup")
except:
pass
def before_feature(context, feature):
benv.before_feature(context, feature)
def before_scenario(context, scenario):
benv.before_scenario(context, scenario)
context.personas = PERSONAS
def after_scenario(context, scenario):
benv.after_scenario(context, scenario)<commit_msg>Fix auto importing test_config if it is not there<commit_after>
|
import os
import random
from shutil import copyfile
from behaving import environment as benv
from behaving.web.steps import *
from behaving.personas.steps import *
try:
from test_config import CREDENTIALS, MISTCREDS, TESTNAMES
PERSONAS = {
'NinjaTester': dict(
creds=CREDENTIALS,
mistcreds=MISTCREDS,
machine=TESTNAMES['machine_name']+str(random.randint(1, 10000)),
image_machine=TESTNAMES['image_machine']+str(random.randint(1, 10000)),
key_name=TESTNAMES['key']+str(random.randint(1, 10000))
)
}
except:
pass
def before_all(context):
benv.before_all(context)
context.personas = PERSONAS
try:
copyfile("db.yaml", "db.yaml.test_backup")
os.remove("db.yaml")
print "Removing file"
except:
pass
def after_all(context):
try:
copyfile("db.yaml.test_backup", "db.yaml")
os.remove("db.yaml.test_backup")
except:
pass
def before_feature(context, feature):
benv.before_feature(context, feature)
def before_scenario(context, scenario):
benv.before_scenario(context, scenario)
context.personas = PERSONAS
def after_scenario(context, scenario):
benv.after_scenario(context, scenario)
|
import os
import random
from shutil import copyfile
from behaving import environment as benv
from behaving.web.steps import *
from behaving.personas.steps import *
from test_config import CREDENTIALS, MISTCREDS, TESTNAMES
PERSONAS = {
'NinjaTester': dict(
creds=CREDENTIALS,
mistcreds=MISTCREDS,
machine=TESTNAMES['machine_name']+str(random.randint(1, 10000)),
image_machine=TESTNAMES['image_machine']+str(random.randint(1, 10000)),
key_name=TESTNAMES['key']+str(random.randint(1, 10000))
)
}
def before_all(context):
benv.before_all(context)
context.personas = PERSONAS
try:
copyfile("db.yaml", "db.yaml.test_backup")
os.remove("db.yaml")
print "Removing file"
except:
pass
def after_all(context):
try:
copyfile("db.yaml.test_backup", "db.yaml")
os.remove("db.yaml.test_backup")
except:
pass
def before_feature(context, feature):
benv.before_feature(context, feature)
def before_scenario(context, scenario):
benv.before_scenario(context, scenario)
context.personas = PERSONAS
def after_scenario(context, scenario):
benv.after_scenario(context, scenario)Fix auto importing test_config if it is not thereimport os
import random
from shutil import copyfile
from behaving import environment as benv
from behaving.web.steps import *
from behaving.personas.steps import *
try:
from test_config import CREDENTIALS, MISTCREDS, TESTNAMES
PERSONAS = {
'NinjaTester': dict(
creds=CREDENTIALS,
mistcreds=MISTCREDS,
machine=TESTNAMES['machine_name']+str(random.randint(1, 10000)),
image_machine=TESTNAMES['image_machine']+str(random.randint(1, 10000)),
key_name=TESTNAMES['key']+str(random.randint(1, 10000))
)
}
except:
pass
def before_all(context):
benv.before_all(context)
context.personas = PERSONAS
try:
copyfile("db.yaml", "db.yaml.test_backup")
os.remove("db.yaml")
print "Removing file"
except:
pass
def after_all(context):
try:
copyfile("db.yaml.test_backup", "db.yaml")
os.remove("db.yaml.test_backup")
except:
pass
def before_feature(context, feature):
benv.before_feature(context, feature)
def before_scenario(context, scenario):
benv.before_scenario(context, scenario)
context.personas = PERSONAS
def after_scenario(context, scenario):
benv.after_scenario(context, scenario)
|
<commit_before>import os
import random
from shutil import copyfile
from behaving import environment as benv
from behaving.web.steps import *
from behaving.personas.steps import *
from test_config import CREDENTIALS, MISTCREDS, TESTNAMES
PERSONAS = {
'NinjaTester': dict(
creds=CREDENTIALS,
mistcreds=MISTCREDS,
machine=TESTNAMES['machine_name']+str(random.randint(1, 10000)),
image_machine=TESTNAMES['image_machine']+str(random.randint(1, 10000)),
key_name=TESTNAMES['key']+str(random.randint(1, 10000))
)
}
def before_all(context):
benv.before_all(context)
context.personas = PERSONAS
try:
copyfile("db.yaml", "db.yaml.test_backup")
os.remove("db.yaml")
print "Removing file"
except:
pass
def after_all(context):
try:
copyfile("db.yaml.test_backup", "db.yaml")
os.remove("db.yaml.test_backup")
except:
pass
def before_feature(context, feature):
benv.before_feature(context, feature)
def before_scenario(context, scenario):
benv.before_scenario(context, scenario)
context.personas = PERSONAS
def after_scenario(context, scenario):
benv.after_scenario(context, scenario)<commit_msg>Fix auto importing test_config if it is not there<commit_after>import os
import random
from shutil import copyfile
from behaving import environment as benv
from behaving.web.steps import *
from behaving.personas.steps import *
try:
from test_config import CREDENTIALS, MISTCREDS, TESTNAMES
PERSONAS = {
'NinjaTester': dict(
creds=CREDENTIALS,
mistcreds=MISTCREDS,
machine=TESTNAMES['machine_name']+str(random.randint(1, 10000)),
image_machine=TESTNAMES['image_machine']+str(random.randint(1, 10000)),
key_name=TESTNAMES['key']+str(random.randint(1, 10000))
)
}
except:
pass
def before_all(context):
benv.before_all(context)
context.personas = PERSONAS
try:
copyfile("db.yaml", "db.yaml.test_backup")
os.remove("db.yaml")
print "Removing file"
except:
pass
def after_all(context):
try:
copyfile("db.yaml.test_backup", "db.yaml")
os.remove("db.yaml.test_backup")
except:
pass
def before_feature(context, feature):
benv.before_feature(context, feature)
def before_scenario(context, scenario):
benv.before_scenario(context, scenario)
context.personas = PERSONAS
def after_scenario(context, scenario):
benv.after_scenario(context, scenario)
|
0ea397c635983ed23dd3b8df8cbeb4d64f35404e
|
lib/event.py
|
lib/event.py
|
# For determining hostname
import socket
import logging
log = logging.getLogger(__name__)
class Events():
def __init__(self):
self.events = []
def add(self, service, state, description, ttl, tags=None, metric=None, hostname=socket.gethostname()):
event = {}
event['service'] = service
event['state'] = state
event['description'] = description
event['ttl'] = ttl
event['hostname'] = hostname
if tags is not None:
event['tags'] = tags
if metric is not None:
event['metric'] = metric
self.events.append(event)
log.debug("Event added: %s" % (event))
def send(self, client):
log.debug("Sending %s events..." % (len(self.events)))
while len(self.events) > 0:
event = self.events.pop(0)
try:
client.send(event)
except socket.error:
log.error("Unable to send event '%s' to %s:%s" % (event['service'], client.host, client.port))
|
# For determining hostname
import socket
import logging
log = logging.getLogger(__name__)
class Events():
def __init__(self):
self.events = []
def add(self, service, state, description, ttl, tags=None, metric=None, host=socket.gethostname()):
event = {}
event['service'] = service
event['state'] = state
event['description'] = description
event['ttl'] = ttl
event['host'] = host
if tags is not None:
event['tags'] = tags
if metric is not None:
event['metric'] = metric
self.events.append(event)
log.debug("Event added: %s" % (event))
def send(self, client):
log.debug("Sending %s events..." % (len(self.events)))
while len(self.events) > 0:
event = self.events.pop(0)
try:
client.send(event)
except socket.error:
log.error("Unable to send event '%s' to %s:%s" % (event['service'], client.host, client.port))
|
Fix fieldname mapping for riemann (should be host, not hostname)
|
Fix fieldname mapping for riemann (should be host, not hostname)
|
Python
|
mit
|
crashlytics/riemann-sumd
|
# For determining hostname
import socket
import logging
log = logging.getLogger(__name__)
class Events():
def __init__(self):
self.events = []
def add(self, service, state, description, ttl, tags=None, metric=None, hostname=socket.gethostname()):
event = {}
event['service'] = service
event['state'] = state
event['description'] = description
event['ttl'] = ttl
event['hostname'] = hostname
if tags is not None:
event['tags'] = tags
if metric is not None:
event['metric'] = metric
self.events.append(event)
log.debug("Event added: %s" % (event))
def send(self, client):
log.debug("Sending %s events..." % (len(self.events)))
while len(self.events) > 0:
event = self.events.pop(0)
try:
client.send(event)
except socket.error:
log.error("Unable to send event '%s' to %s:%s" % (event['service'], client.host, client.port))
Fix fieldname mapping for riemann (should be host, not hostname)
|
# For determining hostname
import socket
import logging
log = logging.getLogger(__name__)
class Events():
def __init__(self):
self.events = []
def add(self, service, state, description, ttl, tags=None, metric=None, host=socket.gethostname()):
event = {}
event['service'] = service
event['state'] = state
event['description'] = description
event['ttl'] = ttl
event['host'] = host
if tags is not None:
event['tags'] = tags
if metric is not None:
event['metric'] = metric
self.events.append(event)
log.debug("Event added: %s" % (event))
def send(self, client):
log.debug("Sending %s events..." % (len(self.events)))
while len(self.events) > 0:
event = self.events.pop(0)
try:
client.send(event)
except socket.error:
log.error("Unable to send event '%s' to %s:%s" % (event['service'], client.host, client.port))
|
<commit_before># For determining hostname
import socket
import logging
log = logging.getLogger(__name__)
class Events():
def __init__(self):
self.events = []
def add(self, service, state, description, ttl, tags=None, metric=None, hostname=socket.gethostname()):
event = {}
event['service'] = service
event['state'] = state
event['description'] = description
event['ttl'] = ttl
event['hostname'] = hostname
if tags is not None:
event['tags'] = tags
if metric is not None:
event['metric'] = metric
self.events.append(event)
log.debug("Event added: %s" % (event))
def send(self, client):
log.debug("Sending %s events..." % (len(self.events)))
while len(self.events) > 0:
event = self.events.pop(0)
try:
client.send(event)
except socket.error:
log.error("Unable to send event '%s' to %s:%s" % (event['service'], client.host, client.port))
<commit_msg>Fix fieldname mapping for riemann (should be host, not hostname)<commit_after>
|
# For determining hostname
import socket
import logging
log = logging.getLogger(__name__)
class Events():
def __init__(self):
self.events = []
def add(self, service, state, description, ttl, tags=None, metric=None, host=socket.gethostname()):
event = {}
event['service'] = service
event['state'] = state
event['description'] = description
event['ttl'] = ttl
event['host'] = host
if tags is not None:
event['tags'] = tags
if metric is not None:
event['metric'] = metric
self.events.append(event)
log.debug("Event added: %s" % (event))
def send(self, client):
log.debug("Sending %s events..." % (len(self.events)))
while len(self.events) > 0:
event = self.events.pop(0)
try:
client.send(event)
except socket.error:
log.error("Unable to send event '%s' to %s:%s" % (event['service'], client.host, client.port))
|
# For determining hostname
import socket
import logging
log = logging.getLogger(__name__)
class Events():
def __init__(self):
self.events = []
def add(self, service, state, description, ttl, tags=None, metric=None, hostname=socket.gethostname()):
event = {}
event['service'] = service
event['state'] = state
event['description'] = description
event['ttl'] = ttl
event['hostname'] = hostname
if tags is not None:
event['tags'] = tags
if metric is not None:
event['metric'] = metric
self.events.append(event)
log.debug("Event added: %s" % (event))
def send(self, client):
log.debug("Sending %s events..." % (len(self.events)))
while len(self.events) > 0:
event = self.events.pop(0)
try:
client.send(event)
except socket.error:
log.error("Unable to send event '%s' to %s:%s" % (event['service'], client.host, client.port))
Fix fieldname mapping for riemann (should be host, not hostname)# For determining hostname
import socket
import logging
log = logging.getLogger(__name__)
class Events():
def __init__(self):
self.events = []
def add(self, service, state, description, ttl, tags=None, metric=None, host=socket.gethostname()):
event = {}
event['service'] = service
event['state'] = state
event['description'] = description
event['ttl'] = ttl
event['host'] = host
if tags is not None:
event['tags'] = tags
if metric is not None:
event['metric'] = metric
self.events.append(event)
log.debug("Event added: %s" % (event))
def send(self, client):
log.debug("Sending %s events..." % (len(self.events)))
while len(self.events) > 0:
event = self.events.pop(0)
try:
client.send(event)
except socket.error:
log.error("Unable to send event '%s' to %s:%s" % (event['service'], client.host, client.port))
|
<commit_before># For determining hostname
import socket
import logging
log = logging.getLogger(__name__)
class Events():
def __init__(self):
self.events = []
def add(self, service, state, description, ttl, tags=None, metric=None, hostname=socket.gethostname()):
event = {}
event['service'] = service
event['state'] = state
event['description'] = description
event['ttl'] = ttl
event['hostname'] = hostname
if tags is not None:
event['tags'] = tags
if metric is not None:
event['metric'] = metric
self.events.append(event)
log.debug("Event added: %s" % (event))
def send(self, client):
log.debug("Sending %s events..." % (len(self.events)))
while len(self.events) > 0:
event = self.events.pop(0)
try:
client.send(event)
except socket.error:
log.error("Unable to send event '%s' to %s:%s" % (event['service'], client.host, client.port))
<commit_msg>Fix fieldname mapping for riemann (should be host, not hostname)<commit_after># For determining hostname
import socket
import logging
log = logging.getLogger(__name__)
class Events():
def __init__(self):
self.events = []
def add(self, service, state, description, ttl, tags=None, metric=None, host=socket.gethostname()):
event = {}
event['service'] = service
event['state'] = state
event['description'] = description
event['ttl'] = ttl
event['host'] = host
if tags is not None:
event['tags'] = tags
if metric is not None:
event['metric'] = metric
self.events.append(event)
log.debug("Event added: %s" % (event))
def send(self, client):
log.debug("Sending %s events..." % (len(self.events)))
while len(self.events) > 0:
event = self.events.pop(0)
try:
client.send(event)
except socket.error:
log.error("Unable to send event '%s' to %s:%s" % (event['service'], client.host, client.port))
|
32b8a5d9f18a673c621c8543848f5163dfe24393
|
cobs2/__init__.py
|
cobs2/__init__.py
|
"""
Consistent Overhead Byte Stuffing (COBS) encoding and decoding.
Functions are provided for encoding and decoding according to
the basic COBS method.
The COBS variant "Zero Pair Elimination" (ZPE) is not
implemented.
A C extension implementation only is provided.
References:
http://www.stuartcheshire.org/papers/COBSforSIGCOMM/
http://www.stuartcheshire.org/papers/COBSforToN.pdf
"""
from cobs._cobsext import *
_using_extension = True
|
"""
Consistent Overhead Byte Stuffing (COBS) encoding and decoding.
Functions are provided for encoding and decoding according to
the basic COBS method.
The COBS variant "Zero Pair Elimination" (ZPE) is not
implemented.
A pure Python implementation and a C extension implementation
are provided. If the C extension is not available for some reason,
the pure Python version will be used.
References:
http://www.stuartcheshire.org/papers/COBSforSIGCOMM/
http://www.stuartcheshire.org/papers/COBSforToN.pdf
"""
try:
from cobs._cobsext import *
_using_extension = True
except ImportError:
from cobs._cobspy import *
_using_extension = False
|
Change Python 2.x import to fall-back to pure Python code if it can't load the C extension.
|
Change Python 2.x import to fall-back to pure Python code if it can't load the C extension.
|
Python
|
mit
|
cmcqueen/cobs-python,cmcqueen/cobs-python
|
"""
Consistent Overhead Byte Stuffing (COBS) encoding and decoding.
Functions are provided for encoding and decoding according to
the basic COBS method.
The COBS variant "Zero Pair Elimination" (ZPE) is not
implemented.
A C extension implementation only is provided.
References:
http://www.stuartcheshire.org/papers/COBSforSIGCOMM/
http://www.stuartcheshire.org/papers/COBSforToN.pdf
"""
from cobs._cobsext import *
_using_extension = True
Change Python 2.x import to fall-back to pure Python code if it can't load the C extension.
|
"""
Consistent Overhead Byte Stuffing (COBS) encoding and decoding.
Functions are provided for encoding and decoding according to
the basic COBS method.
The COBS variant "Zero Pair Elimination" (ZPE) is not
implemented.
A pure Python implementation and a C extension implementation
are provided. If the C extension is not available for some reason,
the pure Python version will be used.
References:
http://www.stuartcheshire.org/papers/COBSforSIGCOMM/
http://www.stuartcheshire.org/papers/COBSforToN.pdf
"""
try:
from cobs._cobsext import *
_using_extension = True
except ImportError:
from cobs._cobspy import *
_using_extension = False
|
<commit_before>"""
Consistent Overhead Byte Stuffing (COBS) encoding and decoding.
Functions are provided for encoding and decoding according to
the basic COBS method.
The COBS variant "Zero Pair Elimination" (ZPE) is not
implemented.
A C extension implementation only is provided.
References:
http://www.stuartcheshire.org/papers/COBSforSIGCOMM/
http://www.stuartcheshire.org/papers/COBSforToN.pdf
"""
from cobs._cobsext import *
_using_extension = True
<commit_msg>Change Python 2.x import to fall-back to pure Python code if it can't load the C extension.<commit_after>
|
"""
Consistent Overhead Byte Stuffing (COBS) encoding and decoding.
Functions are provided for encoding and decoding according to
the basic COBS method.
The COBS variant "Zero Pair Elimination" (ZPE) is not
implemented.
A pure Python implementation and a C extension implementation
are provided. If the C extension is not available for some reason,
the pure Python version will be used.
References:
http://www.stuartcheshire.org/papers/COBSforSIGCOMM/
http://www.stuartcheshire.org/papers/COBSforToN.pdf
"""
try:
from cobs._cobsext import *
_using_extension = True
except ImportError:
from cobs._cobspy import *
_using_extension = False
|
"""
Consistent Overhead Byte Stuffing (COBS) encoding and decoding.
Functions are provided for encoding and decoding according to
the basic COBS method.
The COBS variant "Zero Pair Elimination" (ZPE) is not
implemented.
A C extension implementation only is provided.
References:
http://www.stuartcheshire.org/papers/COBSforSIGCOMM/
http://www.stuartcheshire.org/papers/COBSforToN.pdf
"""
from cobs._cobsext import *
_using_extension = True
Change Python 2.x import to fall-back to pure Python code if it can't load the C extension."""
Consistent Overhead Byte Stuffing (COBS) encoding and decoding.
Functions are provided for encoding and decoding according to
the basic COBS method.
The COBS variant "Zero Pair Elimination" (ZPE) is not
implemented.
A pure Python implementation and a C extension implementation
are provided. If the C extension is not available for some reason,
the pure Python version will be used.
References:
http://www.stuartcheshire.org/papers/COBSforSIGCOMM/
http://www.stuartcheshire.org/papers/COBSforToN.pdf
"""
try:
from cobs._cobsext import *
_using_extension = True
except ImportError:
from cobs._cobspy import *
_using_extension = False
|
<commit_before>"""
Consistent Overhead Byte Stuffing (COBS) encoding and decoding.
Functions are provided for encoding and decoding according to
the basic COBS method.
The COBS variant "Zero Pair Elimination" (ZPE) is not
implemented.
A C extension implementation only is provided.
References:
http://www.stuartcheshire.org/papers/COBSforSIGCOMM/
http://www.stuartcheshire.org/papers/COBSforToN.pdf
"""
from cobs._cobsext import *
_using_extension = True
<commit_msg>Change Python 2.x import to fall-back to pure Python code if it can't load the C extension.<commit_after>"""
Consistent Overhead Byte Stuffing (COBS) encoding and decoding.
Functions are provided for encoding and decoding according to
the basic COBS method.
The COBS variant "Zero Pair Elimination" (ZPE) is not
implemented.
A pure Python implementation and a C extension implementation
are provided. If the C extension is not available for some reason,
the pure Python version will be used.
References:
http://www.stuartcheshire.org/papers/COBSforSIGCOMM/
http://www.stuartcheshire.org/papers/COBSforToN.pdf
"""
try:
from cobs._cobsext import *
_using_extension = True
except ImportError:
from cobs._cobspy import *
_using_extension = False
|
326e7ba1378b691ad6323c2559686f0c4d97b45f
|
flowgen/core.py
|
flowgen/core.py
|
# -*- coding: utf-8 -*-
from flowgen.graph import Graph
from flowgen.language import Code
from flowgen.options import parser
from pypeg2 import parse
from pypeg2.xmlast import thing2xml
class FlowGen(object):
def __init__(self, args):
self.args = parser.parse_args(args)
def any_output(self):
return any([self.args.dump_source, self.args.dump_xml])
def safe_print(self, *args, **kwargs):
if not self.any_output():
print(*args, **kwargs)
def run(self):
data_input = self.args.infile.read()
tree = parse(data_input, Code)
if self.args.dump_xml:
print(thing2xml(tree, pretty=True).decode())
graph = Graph(tree)
if self.args.dump_source:
print(graph.get_source())
if self.args.preview:
graph.dot.view()
if self.args.outfile:
graph.save(self.args.outfile.name)
self.safe_print("Saved graph to %s successfull" % (self.args.outfile.name))
|
# -*- coding: utf-8 -*-
from __future__ import print_function
from flowgen.graph import Graph
from flowgen.language import Code
from flowgen.options import parser
from pypeg2 import parse
from pypeg2.xmlast import thing2xml
class FlowGen(object):
def __init__(self, args):
self.args = parser.parse_args(args)
def any_output(self):
return any([self.args.dump_source, self.args.dump_xml])
def safe_print(self, *args, **kwargs):
if not self.any_output():
print(*args, **kwargs)
def run(self):
data_input = self.args.infile.read()
tree = parse(data_input, Code)
if self.args.dump_xml:
print(thing2xml(tree, pretty=True).decode())
graph = Graph(tree)
if self.args.dump_source:
print(graph.get_source())
if self.args.preview:
graph.dot.view()
if self.args.outfile:
graph.save(self.args.outfile.name)
self.safe_print("Saved graph to %s successfull" % (self.args.outfile.name))
|
Update py27 compatibility in print function
|
Update py27 compatibility in print function
|
Python
|
mit
|
ad-m/flowgen
|
# -*- coding: utf-8 -*-
from flowgen.graph import Graph
from flowgen.language import Code
from flowgen.options import parser
from pypeg2 import parse
from pypeg2.xmlast import thing2xml
class FlowGen(object):
def __init__(self, args):
self.args = parser.parse_args(args)
def any_output(self):
return any([self.args.dump_source, self.args.dump_xml])
def safe_print(self, *args, **kwargs):
if not self.any_output():
print(*args, **kwargs)
def run(self):
data_input = self.args.infile.read()
tree = parse(data_input, Code)
if self.args.dump_xml:
print(thing2xml(tree, pretty=True).decode())
graph = Graph(tree)
if self.args.dump_source:
print(graph.get_source())
if self.args.preview:
graph.dot.view()
if self.args.outfile:
graph.save(self.args.outfile.name)
self.safe_print("Saved graph to %s successfull" % (self.args.outfile.name))
Update py27 compatibility in print function
|
# -*- coding: utf-8 -*-
from __future__ import print_function
from flowgen.graph import Graph
from flowgen.language import Code
from flowgen.options import parser
from pypeg2 import parse
from pypeg2.xmlast import thing2xml
class FlowGen(object):
def __init__(self, args):
self.args = parser.parse_args(args)
def any_output(self):
return any([self.args.dump_source, self.args.dump_xml])
def safe_print(self, *args, **kwargs):
if not self.any_output():
print(*args, **kwargs)
def run(self):
data_input = self.args.infile.read()
tree = parse(data_input, Code)
if self.args.dump_xml:
print(thing2xml(tree, pretty=True).decode())
graph = Graph(tree)
if self.args.dump_source:
print(graph.get_source())
if self.args.preview:
graph.dot.view()
if self.args.outfile:
graph.save(self.args.outfile.name)
self.safe_print("Saved graph to %s successfull" % (self.args.outfile.name))
|
<commit_before># -*- coding: utf-8 -*-
from flowgen.graph import Graph
from flowgen.language import Code
from flowgen.options import parser
from pypeg2 import parse
from pypeg2.xmlast import thing2xml
class FlowGen(object):
def __init__(self, args):
self.args = parser.parse_args(args)
def any_output(self):
return any([self.args.dump_source, self.args.dump_xml])
def safe_print(self, *args, **kwargs):
if not self.any_output():
print(*args, **kwargs)
def run(self):
data_input = self.args.infile.read()
tree = parse(data_input, Code)
if self.args.dump_xml:
print(thing2xml(tree, pretty=True).decode())
graph = Graph(tree)
if self.args.dump_source:
print(graph.get_source())
if self.args.preview:
graph.dot.view()
if self.args.outfile:
graph.save(self.args.outfile.name)
self.safe_print("Saved graph to %s successfull" % (self.args.outfile.name))
<commit_msg>Update py27 compatibility in print function<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import print_function
from flowgen.graph import Graph
from flowgen.language import Code
from flowgen.options import parser
from pypeg2 import parse
from pypeg2.xmlast import thing2xml
class FlowGen(object):
def __init__(self, args):
self.args = parser.parse_args(args)
def any_output(self):
return any([self.args.dump_source, self.args.dump_xml])
def safe_print(self, *args, **kwargs):
if not self.any_output():
print(*args, **kwargs)
def run(self):
data_input = self.args.infile.read()
tree = parse(data_input, Code)
if self.args.dump_xml:
print(thing2xml(tree, pretty=True).decode())
graph = Graph(tree)
if self.args.dump_source:
print(graph.get_source())
if self.args.preview:
graph.dot.view()
if self.args.outfile:
graph.save(self.args.outfile.name)
self.safe_print("Saved graph to %s successfull" % (self.args.outfile.name))
|
# -*- coding: utf-8 -*-
from flowgen.graph import Graph
from flowgen.language import Code
from flowgen.options import parser
from pypeg2 import parse
from pypeg2.xmlast import thing2xml
class FlowGen(object):
def __init__(self, args):
self.args = parser.parse_args(args)
def any_output(self):
return any([self.args.dump_source, self.args.dump_xml])
def safe_print(self, *args, **kwargs):
if not self.any_output():
print(*args, **kwargs)
def run(self):
data_input = self.args.infile.read()
tree = parse(data_input, Code)
if self.args.dump_xml:
print(thing2xml(tree, pretty=True).decode())
graph = Graph(tree)
if self.args.dump_source:
print(graph.get_source())
if self.args.preview:
graph.dot.view()
if self.args.outfile:
graph.save(self.args.outfile.name)
self.safe_print("Saved graph to %s successfull" % (self.args.outfile.name))
Update py27 compatibility in print function# -*- coding: utf-8 -*-
from __future__ import print_function
from flowgen.graph import Graph
from flowgen.language import Code
from flowgen.options import parser
from pypeg2 import parse
from pypeg2.xmlast import thing2xml
class FlowGen(object):
def __init__(self, args):
self.args = parser.parse_args(args)
def any_output(self):
return any([self.args.dump_source, self.args.dump_xml])
def safe_print(self, *args, **kwargs):
if not self.any_output():
print(*args, **kwargs)
def run(self):
data_input = self.args.infile.read()
tree = parse(data_input, Code)
if self.args.dump_xml:
print(thing2xml(tree, pretty=True).decode())
graph = Graph(tree)
if self.args.dump_source:
print(graph.get_source())
if self.args.preview:
graph.dot.view()
if self.args.outfile:
graph.save(self.args.outfile.name)
self.safe_print("Saved graph to %s successfull" % (self.args.outfile.name))
|
<commit_before># -*- coding: utf-8 -*-
from flowgen.graph import Graph
from flowgen.language import Code
from flowgen.options import parser
from pypeg2 import parse
from pypeg2.xmlast import thing2xml
class FlowGen(object):
def __init__(self, args):
self.args = parser.parse_args(args)
def any_output(self):
return any([self.args.dump_source, self.args.dump_xml])
def safe_print(self, *args, **kwargs):
if not self.any_output():
print(*args, **kwargs)
def run(self):
data_input = self.args.infile.read()
tree = parse(data_input, Code)
if self.args.dump_xml:
print(thing2xml(tree, pretty=True).decode())
graph = Graph(tree)
if self.args.dump_source:
print(graph.get_source())
if self.args.preview:
graph.dot.view()
if self.args.outfile:
graph.save(self.args.outfile.name)
self.safe_print("Saved graph to %s successfull" % (self.args.outfile.name))
<commit_msg>Update py27 compatibility in print function<commit_after># -*- coding: utf-8 -*-
from __future__ import print_function
from flowgen.graph import Graph
from flowgen.language import Code
from flowgen.options import parser
from pypeg2 import parse
from pypeg2.xmlast import thing2xml
class FlowGen(object):
def __init__(self, args):
self.args = parser.parse_args(args)
def any_output(self):
return any([self.args.dump_source, self.args.dump_xml])
def safe_print(self, *args, **kwargs):
if not self.any_output():
print(*args, **kwargs)
def run(self):
data_input = self.args.infile.read()
tree = parse(data_input, Code)
if self.args.dump_xml:
print(thing2xml(tree, pretty=True).decode())
graph = Graph(tree)
if self.args.dump_source:
print(graph.get_source())
if self.args.preview:
graph.dot.view()
if self.args.outfile:
graph.save(self.args.outfile.name)
self.safe_print("Saved graph to %s successfull" % (self.args.outfile.name))
|
2c56045f95f8efd8ff52e5151b24cfaa275660e8
|
forum/models.py
|
forum/models.py
|
from django.db import models
import django.contrib.auth.models as auth
class User(auth.User):
"""Model for representing users.
It has few fields that aren't in the standard authentication user
table, and are needed for the forum to work, like footers.
"""
display_name = models.CharField(max_length=30, null=True)
footer = models.TextField(null=True)
class Thread(models.Model):
"""Model for representing threads."""
title = models.CharField(max_length=100)
views = models.PositiveIntegerField(default=0)
sticky = models.BooleanField()
closed = models.BooleanField()
class Post(models.Model):
thread = models.ForeignKey(Thread)
|
from django.db import models
import django.contrib.auth.models as auth
class User(auth.User):
"""Model for representing users.
It has few fields that aren't in the standard authentication user
table, and are needed for the forum to work, like footers.
"""
display_name = models.CharField(max_length=30, null=True)
footer = models.TextField(null=True)
class Thread(models.Model):
"""Model for representing threads."""
title = models.CharField(max_length=100)
views = models.PositiveIntegerField(default=0)
sticky = models.BooleanField()
closed = models.BooleanField()
class Post(models.Model):
thread = models.ForeignKey(Thread)
class PostRevision(models.Model):
post = models.ForeignKey(Post)
author = models.ForeignKey(User)
date_created = models.DateTimeField()
text = models.TextField()
|
Create model for post revisions.
|
Create model for post revisions.
|
Python
|
mit
|
xfix/NextBoard
|
from django.db import models
import django.contrib.auth.models as auth
class User(auth.User):
"""Model for representing users.
It has few fields that aren't in the standard authentication user
table, and are needed for the forum to work, like footers.
"""
display_name = models.CharField(max_length=30, null=True)
footer = models.TextField(null=True)
class Thread(models.Model):
"""Model for representing threads."""
title = models.CharField(max_length=100)
views = models.PositiveIntegerField(default=0)
sticky = models.BooleanField()
closed = models.BooleanField()
class Post(models.Model):
thread = models.ForeignKey(Thread)
Create model for post revisions.
|
from django.db import models
import django.contrib.auth.models as auth
class User(auth.User):
"""Model for representing users.
It has few fields that aren't in the standard authentication user
table, and are needed for the forum to work, like footers.
"""
display_name = models.CharField(max_length=30, null=True)
footer = models.TextField(null=True)
class Thread(models.Model):
"""Model for representing threads."""
title = models.CharField(max_length=100)
views = models.PositiveIntegerField(default=0)
sticky = models.BooleanField()
closed = models.BooleanField()
class Post(models.Model):
thread = models.ForeignKey(Thread)
class PostRevision(models.Model):
post = models.ForeignKey(Post)
author = models.ForeignKey(User)
date_created = models.DateTimeField()
text = models.TextField()
|
<commit_before>from django.db import models
import django.contrib.auth.models as auth
class User(auth.User):
"""Model for representing users.
It has few fields that aren't in the standard authentication user
table, and are needed for the forum to work, like footers.
"""
display_name = models.CharField(max_length=30, null=True)
footer = models.TextField(null=True)
class Thread(models.Model):
"""Model for representing threads."""
title = models.CharField(max_length=100)
views = models.PositiveIntegerField(default=0)
sticky = models.BooleanField()
closed = models.BooleanField()
class Post(models.Model):
thread = models.ForeignKey(Thread)
<commit_msg>Create model for post revisions.<commit_after>
|
from django.db import models
import django.contrib.auth.models as auth
class User(auth.User):
"""Model for representing users.
It has few fields that aren't in the standard authentication user
table, and are needed for the forum to work, like footers.
"""
display_name = models.CharField(max_length=30, null=True)
footer = models.TextField(null=True)
class Thread(models.Model):
"""Model for representing threads."""
title = models.CharField(max_length=100)
views = models.PositiveIntegerField(default=0)
sticky = models.BooleanField()
closed = models.BooleanField()
class Post(models.Model):
thread = models.ForeignKey(Thread)
class PostRevision(models.Model):
post = models.ForeignKey(Post)
author = models.ForeignKey(User)
date_created = models.DateTimeField()
text = models.TextField()
|
from django.db import models
import django.contrib.auth.models as auth
class User(auth.User):
"""Model for representing users.
It has few fields that aren't in the standard authentication user
table, and are needed for the forum to work, like footers.
"""
display_name = models.CharField(max_length=30, null=True)
footer = models.TextField(null=True)
class Thread(models.Model):
"""Model for representing threads."""
title = models.CharField(max_length=100)
views = models.PositiveIntegerField(default=0)
sticky = models.BooleanField()
closed = models.BooleanField()
class Post(models.Model):
thread = models.ForeignKey(Thread)
Create model for post revisions.from django.db import models
import django.contrib.auth.models as auth
class User(auth.User):
"""Model for representing users.
It has few fields that aren't in the standard authentication user
table, and are needed for the forum to work, like footers.
"""
display_name = models.CharField(max_length=30, null=True)
footer = models.TextField(null=True)
class Thread(models.Model):
"""Model for representing threads."""
title = models.CharField(max_length=100)
views = models.PositiveIntegerField(default=0)
sticky = models.BooleanField()
closed = models.BooleanField()
class Post(models.Model):
thread = models.ForeignKey(Thread)
class PostRevision(models.Model):
post = models.ForeignKey(Post)
author = models.ForeignKey(User)
date_created = models.DateTimeField()
text = models.TextField()
|
<commit_before>from django.db import models
import django.contrib.auth.models as auth
class User(auth.User):
"""Model for representing users.
It has few fields that aren't in the standard authentication user
table, and are needed for the forum to work, like footers.
"""
display_name = models.CharField(max_length=30, null=True)
footer = models.TextField(null=True)
class Thread(models.Model):
"""Model for representing threads."""
title = models.CharField(max_length=100)
views = models.PositiveIntegerField(default=0)
sticky = models.BooleanField()
closed = models.BooleanField()
class Post(models.Model):
thread = models.ForeignKey(Thread)
<commit_msg>Create model for post revisions.<commit_after>from django.db import models
import django.contrib.auth.models as auth
class User(auth.User):
"""Model for representing users.
It has few fields that aren't in the standard authentication user
table, and are needed for the forum to work, like footers.
"""
display_name = models.CharField(max_length=30, null=True)
footer = models.TextField(null=True)
class Thread(models.Model):
"""Model for representing threads."""
title = models.CharField(max_length=100)
views = models.PositiveIntegerField(default=0)
sticky = models.BooleanField()
closed = models.BooleanField()
class Post(models.Model):
thread = models.ForeignKey(Thread)
class PostRevision(models.Model):
post = models.ForeignKey(Post)
author = models.ForeignKey(User)
date_created = models.DateTimeField()
text = models.TextField()
|
cc8fab220bfeb94c94682912fbb63f4755dfa64f
|
oa_pmc.py
|
oa_pmc.py
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
from time import sleep
from cachetools import LRUCache
from kids.cache import cache
from http_cache import http_get
# examples
# https://www.ebi.ac.uk/europepmc/webservices/rest/search?query=PMC3039489&resulttype=core&format=json&tool=oadoi
# https://www.ebi.ac.uk/europepmc/webservices/rest/search?query=PMC3606428&resulttype=core&format=json&tool=oadoi
# https://www.ebi.ac.uk/europepmc/webservices/rest/search?query=10.1093/jisesa/iex068&resulttype=core&format=json&tool=oadoi
@cache(use=LRUCache(maxsize=32))
def query_pmc(query_text):
if not query_text:
return None
# TODO: remove sleep once europe pmc pages are backfilled
# (pmh_version_first_available is populated for records from endpoint b5e840539009389b1a6)
sleep(3)
url_template = u"https://www.ebi.ac.uk/europepmc/webservices/rest/search?query={}&resulttype=core&format=json&tool=oadoi"
url = url_template.format(query_text)
r = http_get(url)
data = r.json()
result_list = data["resultList"]["result"]
return result_list
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
from cachetools import LRUCache
from kids.cache import cache
from http_cache import http_get
# examples
# https://www.ebi.ac.uk/europepmc/webservices/rest/search?query=PMC3039489&resulttype=core&format=json&tool=oadoi
# https://www.ebi.ac.uk/europepmc/webservices/rest/search?query=PMC3606428&resulttype=core&format=json&tool=oadoi
# https://www.ebi.ac.uk/europepmc/webservices/rest/search?query=10.1093/jisesa/iex068&resulttype=core&format=json&tool=oadoi
@cache(use=LRUCache(maxsize=32))
def query_pmc(query_text):
if not query_text:
return None
url_template = u"https://www.ebi.ac.uk/europepmc/webservices/rest/search?query={}&resulttype=core&format=json&tool=oadoi"
url = url_template.format(query_text)
r = http_get(url)
data = r.json()
result_list = data["resultList"]["result"]
return result_list
|
Revert "apply caveman rate limiting to europe pmc api calls"
|
Revert "apply caveman rate limiting to europe pmc api calls"
This reverts commit f2d0b4ab961325dfffaf3a54603b56e848f46da0.
|
Python
|
mit
|
Impactstory/sherlockoa,Impactstory/sherlockoa
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
from time import sleep
from cachetools import LRUCache
from kids.cache import cache
from http_cache import http_get
# examples
# https://www.ebi.ac.uk/europepmc/webservices/rest/search?query=PMC3039489&resulttype=core&format=json&tool=oadoi
# https://www.ebi.ac.uk/europepmc/webservices/rest/search?query=PMC3606428&resulttype=core&format=json&tool=oadoi
# https://www.ebi.ac.uk/europepmc/webservices/rest/search?query=10.1093/jisesa/iex068&resulttype=core&format=json&tool=oadoi
@cache(use=LRUCache(maxsize=32))
def query_pmc(query_text):
if not query_text:
return None
# TODO: remove sleep once europe pmc pages are backfilled
# (pmh_version_first_available is populated for records from endpoint b5e840539009389b1a6)
sleep(3)
url_template = u"https://www.ebi.ac.uk/europepmc/webservices/rest/search?query={}&resulttype=core&format=json&tool=oadoi"
url = url_template.format(query_text)
r = http_get(url)
data = r.json()
result_list = data["resultList"]["result"]
return result_list
Revert "apply caveman rate limiting to europe pmc api calls"
This reverts commit f2d0b4ab961325dfffaf3a54603b56e848f46da0.
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
from cachetools import LRUCache
from kids.cache import cache
from http_cache import http_get
# examples
# https://www.ebi.ac.uk/europepmc/webservices/rest/search?query=PMC3039489&resulttype=core&format=json&tool=oadoi
# https://www.ebi.ac.uk/europepmc/webservices/rest/search?query=PMC3606428&resulttype=core&format=json&tool=oadoi
# https://www.ebi.ac.uk/europepmc/webservices/rest/search?query=10.1093/jisesa/iex068&resulttype=core&format=json&tool=oadoi
@cache(use=LRUCache(maxsize=32))
def query_pmc(query_text):
if not query_text:
return None
url_template = u"https://www.ebi.ac.uk/europepmc/webservices/rest/search?query={}&resulttype=core&format=json&tool=oadoi"
url = url_template.format(query_text)
r = http_get(url)
data = r.json()
result_list = data["resultList"]["result"]
return result_list
|
<commit_before>#!/usr/bin/python
# -*- coding: utf-8 -*-
from time import sleep
from cachetools import LRUCache
from kids.cache import cache
from http_cache import http_get
# examples
# https://www.ebi.ac.uk/europepmc/webservices/rest/search?query=PMC3039489&resulttype=core&format=json&tool=oadoi
# https://www.ebi.ac.uk/europepmc/webservices/rest/search?query=PMC3606428&resulttype=core&format=json&tool=oadoi
# https://www.ebi.ac.uk/europepmc/webservices/rest/search?query=10.1093/jisesa/iex068&resulttype=core&format=json&tool=oadoi
@cache(use=LRUCache(maxsize=32))
def query_pmc(query_text):
if not query_text:
return None
# TODO: remove sleep once europe pmc pages are backfilled
# (pmh_version_first_available is populated for records from endpoint b5e840539009389b1a6)
sleep(3)
url_template = u"https://www.ebi.ac.uk/europepmc/webservices/rest/search?query={}&resulttype=core&format=json&tool=oadoi"
url = url_template.format(query_text)
r = http_get(url)
data = r.json()
result_list = data["resultList"]["result"]
return result_list
<commit_msg>Revert "apply caveman rate limiting to europe pmc api calls"
This reverts commit f2d0b4ab961325dfffaf3a54603b56e848f46da0.<commit_after>
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
from cachetools import LRUCache
from kids.cache import cache
from http_cache import http_get
# examples
# https://www.ebi.ac.uk/europepmc/webservices/rest/search?query=PMC3039489&resulttype=core&format=json&tool=oadoi
# https://www.ebi.ac.uk/europepmc/webservices/rest/search?query=PMC3606428&resulttype=core&format=json&tool=oadoi
# https://www.ebi.ac.uk/europepmc/webservices/rest/search?query=10.1093/jisesa/iex068&resulttype=core&format=json&tool=oadoi
@cache(use=LRUCache(maxsize=32))
def query_pmc(query_text):
if not query_text:
return None
url_template = u"https://www.ebi.ac.uk/europepmc/webservices/rest/search?query={}&resulttype=core&format=json&tool=oadoi"
url = url_template.format(query_text)
r = http_get(url)
data = r.json()
result_list = data["resultList"]["result"]
return result_list
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
from time import sleep
from cachetools import LRUCache
from kids.cache import cache
from http_cache import http_get
# examples
# https://www.ebi.ac.uk/europepmc/webservices/rest/search?query=PMC3039489&resulttype=core&format=json&tool=oadoi
# https://www.ebi.ac.uk/europepmc/webservices/rest/search?query=PMC3606428&resulttype=core&format=json&tool=oadoi
# https://www.ebi.ac.uk/europepmc/webservices/rest/search?query=10.1093/jisesa/iex068&resulttype=core&format=json&tool=oadoi
@cache(use=LRUCache(maxsize=32))
def query_pmc(query_text):
if not query_text:
return None
# TODO: remove sleep once europe pmc pages are backfilled
# (pmh_version_first_available is populated for records from endpoint b5e840539009389b1a6)
sleep(3)
url_template = u"https://www.ebi.ac.uk/europepmc/webservices/rest/search?query={}&resulttype=core&format=json&tool=oadoi"
url = url_template.format(query_text)
r = http_get(url)
data = r.json()
result_list = data["resultList"]["result"]
return result_list
Revert "apply caveman rate limiting to europe pmc api calls"
This reverts commit f2d0b4ab961325dfffaf3a54603b56e848f46da0.#!/usr/bin/python
# -*- coding: utf-8 -*-
from cachetools import LRUCache
from kids.cache import cache
from http_cache import http_get
# examples
# https://www.ebi.ac.uk/europepmc/webservices/rest/search?query=PMC3039489&resulttype=core&format=json&tool=oadoi
# https://www.ebi.ac.uk/europepmc/webservices/rest/search?query=PMC3606428&resulttype=core&format=json&tool=oadoi
# https://www.ebi.ac.uk/europepmc/webservices/rest/search?query=10.1093/jisesa/iex068&resulttype=core&format=json&tool=oadoi
@cache(use=LRUCache(maxsize=32))
def query_pmc(query_text):
if not query_text:
return None
url_template = u"https://www.ebi.ac.uk/europepmc/webservices/rest/search?query={}&resulttype=core&format=json&tool=oadoi"
url = url_template.format(query_text)
r = http_get(url)
data = r.json()
result_list = data["resultList"]["result"]
return result_list
|
<commit_before>#!/usr/bin/python
# -*- coding: utf-8 -*-
from time import sleep
from cachetools import LRUCache
from kids.cache import cache
from http_cache import http_get
# examples
# https://www.ebi.ac.uk/europepmc/webservices/rest/search?query=PMC3039489&resulttype=core&format=json&tool=oadoi
# https://www.ebi.ac.uk/europepmc/webservices/rest/search?query=PMC3606428&resulttype=core&format=json&tool=oadoi
# https://www.ebi.ac.uk/europepmc/webservices/rest/search?query=10.1093/jisesa/iex068&resulttype=core&format=json&tool=oadoi
@cache(use=LRUCache(maxsize=32))
def query_pmc(query_text):
if not query_text:
return None
# TODO: remove sleep once europe pmc pages are backfilled
# (pmh_version_first_available is populated for records from endpoint b5e840539009389b1a6)
sleep(3)
url_template = u"https://www.ebi.ac.uk/europepmc/webservices/rest/search?query={}&resulttype=core&format=json&tool=oadoi"
url = url_template.format(query_text)
r = http_get(url)
data = r.json()
result_list = data["resultList"]["result"]
return result_list
<commit_msg>Revert "apply caveman rate limiting to europe pmc api calls"
This reverts commit f2d0b4ab961325dfffaf3a54603b56e848f46da0.<commit_after>#!/usr/bin/python
# -*- coding: utf-8 -*-
from cachetools import LRUCache
from kids.cache import cache
from http_cache import http_get
# examples
# https://www.ebi.ac.uk/europepmc/webservices/rest/search?query=PMC3039489&resulttype=core&format=json&tool=oadoi
# https://www.ebi.ac.uk/europepmc/webservices/rest/search?query=PMC3606428&resulttype=core&format=json&tool=oadoi
# https://www.ebi.ac.uk/europepmc/webservices/rest/search?query=10.1093/jisesa/iex068&resulttype=core&format=json&tool=oadoi
@cache(use=LRUCache(maxsize=32))
def query_pmc(query_text):
if not query_text:
return None
url_template = u"https://www.ebi.ac.uk/europepmc/webservices/rest/search?query={}&resulttype=core&format=json&tool=oadoi"
url = url_template.format(query_text)
r = http_get(url)
data = r.json()
result_list = data["resultList"]["result"]
return result_list
|
524626ee33e6139ded9e5f5fdbc108a7775911cf
|
scoring_engine/models/team.py
|
scoring_engine/models/team.py
|
from sqlalchemy import Column, Integer, String
from sqlalchemy.orm import relationship
from scoring_engine.models.base import Base
class Team(Base):
__tablename__ = 'teams'
id = Column(Integer, primary_key=True)
name = Column(String(20), nullable=False)
color = Column(String(10), nullable=False)
services = relationship("Service", back_populates="team")
users = relationship("User", back_populates="team")
def current_score(self):
# todo make this dynamic based on service result
return 2000
@property
def is_red_team(self):
return self.color == 'Red'
@property
def is_white_team(self):
return self.color == 'White'
@property
def is_blue_team(self):
return self.color == 'Blue'
|
from sqlalchemy import Column, Integer, String
from sqlalchemy.orm import relationship
from scoring_engine.models.base import Base
class Team(Base):
__tablename__ = 'teams'
id = Column(Integer, primary_key=True)
name = Column(String(20), nullable=False)
color = Column(String(10), nullable=False)
services = relationship("Service", back_populates="team")
users = relationship("User", back_populates="team")
def __init__(self, name, color):
self.name = name
self.color = color
def current_score(self):
# todo make this dynamic based on service result
return 2000
@property
def is_red_team(self):
return self.color == 'Red'
@property
def is_white_team(self):
return self.color == 'White'
@property
def is_blue_team(self):
return self.color == 'Blue'
|
Add default init for Team Class
|
Add default init for Team Class
|
Python
|
mit
|
pwnbus/scoring_engine,pwnbus/scoring_engine,pwnbus/scoring_engine,pwnbus/scoring_engine
|
from sqlalchemy import Column, Integer, String
from sqlalchemy.orm import relationship
from scoring_engine.models.base import Base
class Team(Base):
__tablename__ = 'teams'
id = Column(Integer, primary_key=True)
name = Column(String(20), nullable=False)
color = Column(String(10), nullable=False)
services = relationship("Service", back_populates="team")
users = relationship("User", back_populates="team")
def current_score(self):
# todo make this dynamic based on service result
return 2000
@property
def is_red_team(self):
return self.color == 'Red'
@property
def is_white_team(self):
return self.color == 'White'
@property
def is_blue_team(self):
return self.color == 'Blue'
Add default init for Team Class
|
from sqlalchemy import Column, Integer, String
from sqlalchemy.orm import relationship
from scoring_engine.models.base import Base
class Team(Base):
__tablename__ = 'teams'
id = Column(Integer, primary_key=True)
name = Column(String(20), nullable=False)
color = Column(String(10), nullable=False)
services = relationship("Service", back_populates="team")
users = relationship("User", back_populates="team")
def __init__(self, name, color):
self.name = name
self.color = color
def current_score(self):
# todo make this dynamic based on service result
return 2000
@property
def is_red_team(self):
return self.color == 'Red'
@property
def is_white_team(self):
return self.color == 'White'
@property
def is_blue_team(self):
return self.color == 'Blue'
|
<commit_before>from sqlalchemy import Column, Integer, String
from sqlalchemy.orm import relationship
from scoring_engine.models.base import Base
class Team(Base):
__tablename__ = 'teams'
id = Column(Integer, primary_key=True)
name = Column(String(20), nullable=False)
color = Column(String(10), nullable=False)
services = relationship("Service", back_populates="team")
users = relationship("User", back_populates="team")
def current_score(self):
# todo make this dynamic based on service result
return 2000
@property
def is_red_team(self):
return self.color == 'Red'
@property
def is_white_team(self):
return self.color == 'White'
@property
def is_blue_team(self):
return self.color == 'Blue'
<commit_msg>Add default init for Team Class<commit_after>
|
from sqlalchemy import Column, Integer, String
from sqlalchemy.orm import relationship
from scoring_engine.models.base import Base
class Team(Base):
__tablename__ = 'teams'
id = Column(Integer, primary_key=True)
name = Column(String(20), nullable=False)
color = Column(String(10), nullable=False)
services = relationship("Service", back_populates="team")
users = relationship("User", back_populates="team")
def __init__(self, name, color):
self.name = name
self.color = color
def current_score(self):
# todo make this dynamic based on service result
return 2000
@property
def is_red_team(self):
return self.color == 'Red'
@property
def is_white_team(self):
return self.color == 'White'
@property
def is_blue_team(self):
return self.color == 'Blue'
|
from sqlalchemy import Column, Integer, String
from sqlalchemy.orm import relationship
from scoring_engine.models.base import Base
class Team(Base):
__tablename__ = 'teams'
id = Column(Integer, primary_key=True)
name = Column(String(20), nullable=False)
color = Column(String(10), nullable=False)
services = relationship("Service", back_populates="team")
users = relationship("User", back_populates="team")
def current_score(self):
# todo make this dynamic based on service result
return 2000
@property
def is_red_team(self):
return self.color == 'Red'
@property
def is_white_team(self):
return self.color == 'White'
@property
def is_blue_team(self):
return self.color == 'Blue'
Add default init for Team Classfrom sqlalchemy import Column, Integer, String
from sqlalchemy.orm import relationship
from scoring_engine.models.base import Base
class Team(Base):
__tablename__ = 'teams'
id = Column(Integer, primary_key=True)
name = Column(String(20), nullable=False)
color = Column(String(10), nullable=False)
services = relationship("Service", back_populates="team")
users = relationship("User", back_populates="team")
def __init__(self, name, color):
self.name = name
self.color = color
def current_score(self):
# todo make this dynamic based on service result
return 2000
@property
def is_red_team(self):
return self.color == 'Red'
@property
def is_white_team(self):
return self.color == 'White'
@property
def is_blue_team(self):
return self.color == 'Blue'
|
<commit_before>from sqlalchemy import Column, Integer, String
from sqlalchemy.orm import relationship
from scoring_engine.models.base import Base
class Team(Base):
__tablename__ = 'teams'
id = Column(Integer, primary_key=True)
name = Column(String(20), nullable=False)
color = Column(String(10), nullable=False)
services = relationship("Service", back_populates="team")
users = relationship("User", back_populates="team")
def current_score(self):
# todo make this dynamic based on service result
return 2000
@property
def is_red_team(self):
return self.color == 'Red'
@property
def is_white_team(self):
return self.color == 'White'
@property
def is_blue_team(self):
return self.color == 'Blue'
<commit_msg>Add default init for Team Class<commit_after>from sqlalchemy import Column, Integer, String
from sqlalchemy.orm import relationship
from scoring_engine.models.base import Base
class Team(Base):
__tablename__ = 'teams'
id = Column(Integer, primary_key=True)
name = Column(String(20), nullable=False)
color = Column(String(10), nullable=False)
services = relationship("Service", back_populates="team")
users = relationship("User", back_populates="team")
def __init__(self, name, color):
self.name = name
self.color = color
def current_score(self):
# todo make this dynamic based on service result
return 2000
@property
def is_red_team(self):
return self.color == 'Red'
@property
def is_white_team(self):
return self.color == 'White'
@property
def is_blue_team(self):
return self.color == 'Blue'
|
3355006202321c9fcace1753486119d5e6177e06
|
weaveserver/services/core/service.py
|
weaveserver/services/core/service.py
|
from threading import Thread, Event
from weavelib.services import BaseService, BackgroundThreadServiceStart
from .server import MessageServer
from .appmgr import ApplicationRegistry
PORT = 11023
class CoreService(BackgroundThreadServiceStart, BaseService):
def __init__(self, token, config):
super(CoreService, self).__init__(token)
config = config["core_config"]
self.apps_auth = {
token: {"type": "SYSTEM", "appid": token}
}
self.message_server = MessageServer(self, config.get("PORT") or PORT,
self.apps_auth,
self.message_server_started)
self.message_server_thread = Thread(target=self.message_server.run)
self.registry = ApplicationRegistry(self)
self.shutdown_event = Event()
def message_server_started(self):
self.registry.start()
def before_service_start(self):
"""Need to override to prevent rpc_client connecting."""
def on_service_start(self, *args, **kwargs):
self.message_server_thread.start()
# self.registry will be started when message_server_started is called.
self.shutdown_event.wait()
def on_service_stop(self):
self.registry.stop()
self.message_server.shutdown()
self.message_server_thread.join()
self.shutdown_event.set()
|
from threading import Thread, Event
from weavelib.services import BaseService, BackgroundThreadServiceStart
from .server import MessageServer
from .appmgr import ApplicationRegistry
PORT = 11023
class CoreService(BackgroundThreadServiceStart, BaseService):
def __init__(self, token, config):
super(CoreService, self).__init__(token)
config = config["core_config"]
self.apps_auth = {
token: {"type": "SYSTEM", "appid": token}
}
self.message_server = MessageServer(int(config.get("PORT") or PORT),
self.apps_auth,
self.message_server_started)
self.message_server_thread = Thread(target=self.message_server.run)
self.registry = ApplicationRegistry(self)
self.shutdown_event = Event()
def message_server_started(self):
self.registry.start()
def before_service_start(self):
"""Need to override to prevent rpc_client connecting."""
def on_service_start(self, *args, **kwargs):
self.message_server_thread.start()
# self.registry will be started when message_server_started is called.
self.shutdown_event.wait()
def on_service_stop(self):
self.registry.stop()
self.message_server.shutdown()
self.message_server_thread.join()
self.shutdown_event.set()
|
Convert config["PORT"] to int before using.
|
Convert config["PORT"] to int before using.
|
Python
|
mit
|
supersaiyanmode/HomePiServer,supersaiyanmode/HomePiServer,supersaiyanmode/HomePiServer
|
from threading import Thread, Event
from weavelib.services import BaseService, BackgroundThreadServiceStart
from .server import MessageServer
from .appmgr import ApplicationRegistry
PORT = 11023
class CoreService(BackgroundThreadServiceStart, BaseService):
def __init__(self, token, config):
super(CoreService, self).__init__(token)
config = config["core_config"]
self.apps_auth = {
token: {"type": "SYSTEM", "appid": token}
}
self.message_server = MessageServer(self, config.get("PORT") or PORT,
self.apps_auth,
self.message_server_started)
self.message_server_thread = Thread(target=self.message_server.run)
self.registry = ApplicationRegistry(self)
self.shutdown_event = Event()
def message_server_started(self):
self.registry.start()
def before_service_start(self):
"""Need to override to prevent rpc_client connecting."""
def on_service_start(self, *args, **kwargs):
self.message_server_thread.start()
# self.registry will be started when message_server_started is called.
self.shutdown_event.wait()
def on_service_stop(self):
self.registry.stop()
self.message_server.shutdown()
self.message_server_thread.join()
self.shutdown_event.set()
Convert config["PORT"] to int before using.
|
from threading import Thread, Event
from weavelib.services import BaseService, BackgroundThreadServiceStart
from .server import MessageServer
from .appmgr import ApplicationRegistry
PORT = 11023
class CoreService(BackgroundThreadServiceStart, BaseService):
def __init__(self, token, config):
super(CoreService, self).__init__(token)
config = config["core_config"]
self.apps_auth = {
token: {"type": "SYSTEM", "appid": token}
}
self.message_server = MessageServer(int(config.get("PORT") or PORT),
self.apps_auth,
self.message_server_started)
self.message_server_thread = Thread(target=self.message_server.run)
self.registry = ApplicationRegistry(self)
self.shutdown_event = Event()
def message_server_started(self):
self.registry.start()
def before_service_start(self):
"""Need to override to prevent rpc_client connecting."""
def on_service_start(self, *args, **kwargs):
self.message_server_thread.start()
# self.registry will be started when message_server_started is called.
self.shutdown_event.wait()
def on_service_stop(self):
self.registry.stop()
self.message_server.shutdown()
self.message_server_thread.join()
self.shutdown_event.set()
|
<commit_before>from threading import Thread, Event
from weavelib.services import BaseService, BackgroundThreadServiceStart
from .server import MessageServer
from .appmgr import ApplicationRegistry
PORT = 11023
class CoreService(BackgroundThreadServiceStart, BaseService):
def __init__(self, token, config):
super(CoreService, self).__init__(token)
config = config["core_config"]
self.apps_auth = {
token: {"type": "SYSTEM", "appid": token}
}
self.message_server = MessageServer(self, config.get("PORT") or PORT,
self.apps_auth,
self.message_server_started)
self.message_server_thread = Thread(target=self.message_server.run)
self.registry = ApplicationRegistry(self)
self.shutdown_event = Event()
def message_server_started(self):
self.registry.start()
def before_service_start(self):
"""Need to override to prevent rpc_client connecting."""
def on_service_start(self, *args, **kwargs):
self.message_server_thread.start()
# self.registry will be started when message_server_started is called.
self.shutdown_event.wait()
def on_service_stop(self):
self.registry.stop()
self.message_server.shutdown()
self.message_server_thread.join()
self.shutdown_event.set()
<commit_msg>Convert config["PORT"] to int before using.<commit_after>
|
from threading import Thread, Event
from weavelib.services import BaseService, BackgroundThreadServiceStart
from .server import MessageServer
from .appmgr import ApplicationRegistry
PORT = 11023
class CoreService(BackgroundThreadServiceStart, BaseService):
def __init__(self, token, config):
super(CoreService, self).__init__(token)
config = config["core_config"]
self.apps_auth = {
token: {"type": "SYSTEM", "appid": token}
}
self.message_server = MessageServer(int(config.get("PORT") or PORT),
self.apps_auth,
self.message_server_started)
self.message_server_thread = Thread(target=self.message_server.run)
self.registry = ApplicationRegistry(self)
self.shutdown_event = Event()
def message_server_started(self):
self.registry.start()
def before_service_start(self):
"""Need to override to prevent rpc_client connecting."""
def on_service_start(self, *args, **kwargs):
self.message_server_thread.start()
# self.registry will be started when message_server_started is called.
self.shutdown_event.wait()
def on_service_stop(self):
self.registry.stop()
self.message_server.shutdown()
self.message_server_thread.join()
self.shutdown_event.set()
|
from threading import Thread, Event
from weavelib.services import BaseService, BackgroundThreadServiceStart
from .server import MessageServer
from .appmgr import ApplicationRegistry
PORT = 11023
class CoreService(BackgroundThreadServiceStart, BaseService):
def __init__(self, token, config):
super(CoreService, self).__init__(token)
config = config["core_config"]
self.apps_auth = {
token: {"type": "SYSTEM", "appid": token}
}
self.message_server = MessageServer(self, config.get("PORT") or PORT,
self.apps_auth,
self.message_server_started)
self.message_server_thread = Thread(target=self.message_server.run)
self.registry = ApplicationRegistry(self)
self.shutdown_event = Event()
def message_server_started(self):
self.registry.start()
def before_service_start(self):
"""Need to override to prevent rpc_client connecting."""
def on_service_start(self, *args, **kwargs):
self.message_server_thread.start()
# self.registry will be started when message_server_started is called.
self.shutdown_event.wait()
def on_service_stop(self):
self.registry.stop()
self.message_server.shutdown()
self.message_server_thread.join()
self.shutdown_event.set()
Convert config["PORT"] to int before using.from threading import Thread, Event
from weavelib.services import BaseService, BackgroundThreadServiceStart
from .server import MessageServer
from .appmgr import ApplicationRegistry
PORT = 11023
class CoreService(BackgroundThreadServiceStart, BaseService):
def __init__(self, token, config):
super(CoreService, self).__init__(token)
config = config["core_config"]
self.apps_auth = {
token: {"type": "SYSTEM", "appid": token}
}
self.message_server = MessageServer(int(config.get("PORT") or PORT),
self.apps_auth,
self.message_server_started)
self.message_server_thread = Thread(target=self.message_server.run)
self.registry = ApplicationRegistry(self)
self.shutdown_event = Event()
def message_server_started(self):
self.registry.start()
def before_service_start(self):
"""Need to override to prevent rpc_client connecting."""
def on_service_start(self, *args, **kwargs):
self.message_server_thread.start()
# self.registry will be started when message_server_started is called.
self.shutdown_event.wait()
def on_service_stop(self):
self.registry.stop()
self.message_server.shutdown()
self.message_server_thread.join()
self.shutdown_event.set()
|
<commit_before>from threading import Thread, Event
from weavelib.services import BaseService, BackgroundThreadServiceStart
from .server import MessageServer
from .appmgr import ApplicationRegistry
PORT = 11023
class CoreService(BackgroundThreadServiceStart, BaseService):
def __init__(self, token, config):
super(CoreService, self).__init__(token)
config = config["core_config"]
self.apps_auth = {
token: {"type": "SYSTEM", "appid": token}
}
self.message_server = MessageServer(self, config.get("PORT") or PORT,
self.apps_auth,
self.message_server_started)
self.message_server_thread = Thread(target=self.message_server.run)
self.registry = ApplicationRegistry(self)
self.shutdown_event = Event()
def message_server_started(self):
self.registry.start()
def before_service_start(self):
"""Need to override to prevent rpc_client connecting."""
def on_service_start(self, *args, **kwargs):
self.message_server_thread.start()
# self.registry will be started when message_server_started is called.
self.shutdown_event.wait()
def on_service_stop(self):
self.registry.stop()
self.message_server.shutdown()
self.message_server_thread.join()
self.shutdown_event.set()
<commit_msg>Convert config["PORT"] to int before using.<commit_after>from threading import Thread, Event
from weavelib.services import BaseService, BackgroundThreadServiceStart
from .server import MessageServer
from .appmgr import ApplicationRegistry
PORT = 11023
class CoreService(BackgroundThreadServiceStart, BaseService):
def __init__(self, token, config):
super(CoreService, self).__init__(token)
config = config["core_config"]
self.apps_auth = {
token: {"type": "SYSTEM", "appid": token}
}
self.message_server = MessageServer(int(config.get("PORT") or PORT),
self.apps_auth,
self.message_server_started)
self.message_server_thread = Thread(target=self.message_server.run)
self.registry = ApplicationRegistry(self)
self.shutdown_event = Event()
def message_server_started(self):
self.registry.start()
def before_service_start(self):
"""Need to override to prevent rpc_client connecting."""
def on_service_start(self, *args, **kwargs):
self.message_server_thread.start()
# self.registry will be started when message_server_started is called.
self.shutdown_event.wait()
def on_service_stop(self):
self.registry.stop()
self.message_server.shutdown()
self.message_server_thread.join()
self.shutdown_event.set()
|
e9198579b3ff10c78df816fec204e36e502233d5
|
tests/test_channel_handler.py
|
tests/test_channel_handler.py
|
import json
from tornado import testing, httpserver
from qotr.server import make_application
from qotr.channels import Channels
class TestChannelHandler(testing.AsyncHTTPTestCase):
'''
Test the channel creation handler.
'''
port = None
application = None
def get_app(self):
Channels.reset()
return make_application()
def test_create(self):
salt = "common"
channel_id = "test-channel"
body = "&".join([
"id={channel_id}",
"salt={salt}",
"key_hash=hmac-key"
]).format(**locals())
response = json.loads(self.fetch(
'/c/new', method='POST',
body=body
).body.decode('utf8'))
self.assertEqual({
"salt": salt,
"id": channel_id
}, response)
def test_confict(self):
body = "&".join([
"id=common",
"salt=test-channel",
"key_hash=hmac-key"
])
self.fetch('/c/new', method='POST', body=body)
response = json.loads(self.fetch(
'/c/new', method='POST',
body=body
).body.decode('utf8'))
self.assertTrue("error" in response)
|
import json
from tornado import testing
from qotr.server import make_application
from qotr.channels import Channels
class TestChannelHandler(testing.AsyncHTTPTestCase):
'''
Test the channel creation handler.
'''
port = None
application = None
def get_app(self):
Channels.reset()
return make_application()
def test_create(self):
salt = "common"
channel_id = "test-channel"
key_hash = 'hmac-key'
body = "&".join([
"id={channel_id}",
"salt={salt}",
"key_hash={key_hash}"
]).format(**locals())
response = json.loads(self.fetch(
'/c/new', method='POST',
body=body
).body.decode('utf8'))
self.assertEqual({
"salt": salt,
"id": channel_id
}, response)
channel = Channels.get(channel_id)
self.assertEqual(salt, channel.salt)
self.assertEqual(key_hash, key_hash)
def test_confict(self):
body = "&".join([
"id=common",
"salt=test-channel",
"key_hash=hmac-key"
])
self.fetch('/c/new', method='POST', body=body)
response = json.loads(self.fetch(
'/c/new', method='POST',
body=body
).body.decode('utf8'))
self.assertTrue("error" in response)
|
Make sure that the channel creation and storage works
|
Make sure that the channel creation and storage works
Signed-off-by: Rohan Jain <f3a935f2cb7c3d75d1446a19169b923809d6e623@gmail.com>
|
Python
|
agpl-3.0
|
rmoorman/qotr,rmoorman/qotr,crodjer/qotr,sbuss/qotr,sbuss/qotr,rmoorman/qotr,crodjer/qotr,curtiszimmerman/qotr,crodjer/qotr,curtiszimmerman/qotr,rmoorman/qotr,sbuss/qotr,curtiszimmerman/qotr,crodjer/qotr,curtiszimmerman/qotr,sbuss/qotr
|
import json
from tornado import testing, httpserver
from qotr.server import make_application
from qotr.channels import Channels
class TestChannelHandler(testing.AsyncHTTPTestCase):
'''
Test the channel creation handler.
'''
port = None
application = None
def get_app(self):
Channels.reset()
return make_application()
def test_create(self):
salt = "common"
channel_id = "test-channel"
body = "&".join([
"id={channel_id}",
"salt={salt}",
"key_hash=hmac-key"
]).format(**locals())
response = json.loads(self.fetch(
'/c/new', method='POST',
body=body
).body.decode('utf8'))
self.assertEqual({
"salt": salt,
"id": channel_id
}, response)
def test_confict(self):
body = "&".join([
"id=common",
"salt=test-channel",
"key_hash=hmac-key"
])
self.fetch('/c/new', method='POST', body=body)
response = json.loads(self.fetch(
'/c/new', method='POST',
body=body
).body.decode('utf8'))
self.assertTrue("error" in response)
Make sure that the channel creation and storage works
Signed-off-by: Rohan Jain <f3a935f2cb7c3d75d1446a19169b923809d6e623@gmail.com>
|
import json
from tornado import testing
from qotr.server import make_application
from qotr.channels import Channels
class TestChannelHandler(testing.AsyncHTTPTestCase):
'''
Test the channel creation handler.
'''
port = None
application = None
def get_app(self):
Channels.reset()
return make_application()
def test_create(self):
salt = "common"
channel_id = "test-channel"
key_hash = 'hmac-key'
body = "&".join([
"id={channel_id}",
"salt={salt}",
"key_hash={key_hash}"
]).format(**locals())
response = json.loads(self.fetch(
'/c/new', method='POST',
body=body
).body.decode('utf8'))
self.assertEqual({
"salt": salt,
"id": channel_id
}, response)
channel = Channels.get(channel_id)
self.assertEqual(salt, channel.salt)
self.assertEqual(key_hash, key_hash)
def test_confict(self):
body = "&".join([
"id=common",
"salt=test-channel",
"key_hash=hmac-key"
])
self.fetch('/c/new', method='POST', body=body)
response = json.loads(self.fetch(
'/c/new', method='POST',
body=body
).body.decode('utf8'))
self.assertTrue("error" in response)
|
<commit_before>import json
from tornado import testing, httpserver
from qotr.server import make_application
from qotr.channels import Channels
class TestChannelHandler(testing.AsyncHTTPTestCase):
'''
Test the channel creation handler.
'''
port = None
application = None
def get_app(self):
Channels.reset()
return make_application()
def test_create(self):
salt = "common"
channel_id = "test-channel"
body = "&".join([
"id={channel_id}",
"salt={salt}",
"key_hash=hmac-key"
]).format(**locals())
response = json.loads(self.fetch(
'/c/new', method='POST',
body=body
).body.decode('utf8'))
self.assertEqual({
"salt": salt,
"id": channel_id
}, response)
def test_confict(self):
body = "&".join([
"id=common",
"salt=test-channel",
"key_hash=hmac-key"
])
self.fetch('/c/new', method='POST', body=body)
response = json.loads(self.fetch(
'/c/new', method='POST',
body=body
).body.decode('utf8'))
self.assertTrue("error" in response)
<commit_msg>Make sure that the channel creation and storage works
Signed-off-by: Rohan Jain <f3a935f2cb7c3d75d1446a19169b923809d6e623@gmail.com><commit_after>
|
import json
from tornado import testing
from qotr.server import make_application
from qotr.channels import Channels
class TestChannelHandler(testing.AsyncHTTPTestCase):
'''
Test the channel creation handler.
'''
port = None
application = None
def get_app(self):
Channels.reset()
return make_application()
def test_create(self):
salt = "common"
channel_id = "test-channel"
key_hash = 'hmac-key'
body = "&".join([
"id={channel_id}",
"salt={salt}",
"key_hash={key_hash}"
]).format(**locals())
response = json.loads(self.fetch(
'/c/new', method='POST',
body=body
).body.decode('utf8'))
self.assertEqual({
"salt": salt,
"id": channel_id
}, response)
channel = Channels.get(channel_id)
self.assertEqual(salt, channel.salt)
self.assertEqual(key_hash, key_hash)
def test_confict(self):
body = "&".join([
"id=common",
"salt=test-channel",
"key_hash=hmac-key"
])
self.fetch('/c/new', method='POST', body=body)
response = json.loads(self.fetch(
'/c/new', method='POST',
body=body
).body.decode('utf8'))
self.assertTrue("error" in response)
|
import json
from tornado import testing, httpserver
from qotr.server import make_application
from qotr.channels import Channels
class TestChannelHandler(testing.AsyncHTTPTestCase):
'''
Test the channel creation handler.
'''
port = None
application = None
def get_app(self):
Channels.reset()
return make_application()
def test_create(self):
salt = "common"
channel_id = "test-channel"
body = "&".join([
"id={channel_id}",
"salt={salt}",
"key_hash=hmac-key"
]).format(**locals())
response = json.loads(self.fetch(
'/c/new', method='POST',
body=body
).body.decode('utf8'))
self.assertEqual({
"salt": salt,
"id": channel_id
}, response)
def test_confict(self):
body = "&".join([
"id=common",
"salt=test-channel",
"key_hash=hmac-key"
])
self.fetch('/c/new', method='POST', body=body)
response = json.loads(self.fetch(
'/c/new', method='POST',
body=body
).body.decode('utf8'))
self.assertTrue("error" in response)
Make sure that the channel creation and storage works
Signed-off-by: Rohan Jain <f3a935f2cb7c3d75d1446a19169b923809d6e623@gmail.com>import json
from tornado import testing
from qotr.server import make_application
from qotr.channels import Channels
class TestChannelHandler(testing.AsyncHTTPTestCase):
'''
Test the channel creation handler.
'''
port = None
application = None
def get_app(self):
Channels.reset()
return make_application()
def test_create(self):
salt = "common"
channel_id = "test-channel"
key_hash = 'hmac-key'
body = "&".join([
"id={channel_id}",
"salt={salt}",
"key_hash={key_hash}"
]).format(**locals())
response = json.loads(self.fetch(
'/c/new', method='POST',
body=body
).body.decode('utf8'))
self.assertEqual({
"salt": salt,
"id": channel_id
}, response)
channel = Channels.get(channel_id)
self.assertEqual(salt, channel.salt)
self.assertEqual(key_hash, key_hash)
def test_confict(self):
body = "&".join([
"id=common",
"salt=test-channel",
"key_hash=hmac-key"
])
self.fetch('/c/new', method='POST', body=body)
response = json.loads(self.fetch(
'/c/new', method='POST',
body=body
).body.decode('utf8'))
self.assertTrue("error" in response)
|
<commit_before>import json
from tornado import testing, httpserver
from qotr.server import make_application
from qotr.channels import Channels
class TestChannelHandler(testing.AsyncHTTPTestCase):
'''
Test the channel creation handler.
'''
port = None
application = None
def get_app(self):
Channels.reset()
return make_application()
def test_create(self):
salt = "common"
channel_id = "test-channel"
body = "&".join([
"id={channel_id}",
"salt={salt}",
"key_hash=hmac-key"
]).format(**locals())
response = json.loads(self.fetch(
'/c/new', method='POST',
body=body
).body.decode('utf8'))
self.assertEqual({
"salt": salt,
"id": channel_id
}, response)
def test_confict(self):
body = "&".join([
"id=common",
"salt=test-channel",
"key_hash=hmac-key"
])
self.fetch('/c/new', method='POST', body=body)
response = json.loads(self.fetch(
'/c/new', method='POST',
body=body
).body.decode('utf8'))
self.assertTrue("error" in response)
<commit_msg>Make sure that the channel creation and storage works
Signed-off-by: Rohan Jain <f3a935f2cb7c3d75d1446a19169b923809d6e623@gmail.com><commit_after>import json
from tornado import testing
from qotr.server import make_application
from qotr.channels import Channels
class TestChannelHandler(testing.AsyncHTTPTestCase):
'''
Test the channel creation handler.
'''
port = None
application = None
def get_app(self):
Channels.reset()
return make_application()
def test_create(self):
salt = "common"
channel_id = "test-channel"
key_hash = 'hmac-key'
body = "&".join([
"id={channel_id}",
"salt={salt}",
"key_hash={key_hash}"
]).format(**locals())
response = json.loads(self.fetch(
'/c/new', method='POST',
body=body
).body.decode('utf8'))
self.assertEqual({
"salt": salt,
"id": channel_id
}, response)
channel = Channels.get(channel_id)
self.assertEqual(salt, channel.salt)
self.assertEqual(key_hash, key_hash)
def test_confict(self):
body = "&".join([
"id=common",
"salt=test-channel",
"key_hash=hmac-key"
])
self.fetch('/c/new', method='POST', body=body)
response = json.loads(self.fetch(
'/c/new', method='POST',
body=body
).body.decode('utf8'))
self.assertTrue("error" in response)
|
b7d63d1aab571eeab3dfc26674039b481398f7e5
|
sidecar/themes/light/__init__.py
|
sidecar/themes/light/__init__.py
|
from pyramid_frontend.theme import Theme
from pyramid_frontend.images import FilterChain
class LightTheme(Theme):
key = 'light'
require_config_path = '/_light/js/require_config.js'
require_base_url = '/_light/js/vendor/'
image_filters = (
FilterChain(
'thumb', width=330, height=220, extension='jpg',
crop=True, quality=80, sharpness=1.5),
FilterChain(
'square', width=300, height=300, extension='jpg',
crop=True, quality=80, sharpness=1.5),
FilterChain(
'about', width=400, height=300, extension='jpg',
quality=80, sharpness=1.5),
FilterChain(
'large', width=800, height=600, extension='jpg', resize=True,
quality=85, sharpness=1.5),
FilterChain(
'huge', width=2400, height=500, extension='jpg', quality=90,
sharpness=1.5),
)
assets = {
'main-less': ('/_light/css/main.less', 'less'),
'main-js': ('/_light/js/main.js', 'requirejs'),
}
|
from pyramid_frontend.theme import Theme
from pyramid_frontend.images import FilterChain
from pyramid_frontend.assets.less import LessAsset
from pyramid_frontend.assets.requirejs import RequireJSAsset
class LightTheme(Theme):
key = 'light'
image_filters = (
FilterChain(
'thumb', width=330, height=220, extension='jpg',
crop=True, quality=80, sharpness=1.5),
FilterChain(
'square', width=300, height=300, extension='jpg',
crop=True, quality=80, sharpness=1.5),
FilterChain(
'about', width=400, height=300, extension='jpg',
quality=80, sharpness=1.5),
FilterChain(
'large', width=800, height=600, extension='jpg', resize=True,
quality=85, sharpness=1.5),
FilterChain(
'huge', width=2400, height=500, extension='jpg', quality=90,
sharpness=1.5),
)
assets = {
'main-less': LessAsset('/_light/css/main.less'),
'main-js': RequireJSAsset(
'/_light/js/main.js',
require_config_path='/_light/js/require_config.js',
require_base_url='/_light/js/vendor/',
),
}
|
Update theme configuration to latest pyramid_frontend API
|
Update theme configuration to latest pyramid_frontend API
|
Python
|
mit
|
storborg/sidecar,storborg/sidecar
|
from pyramid_frontend.theme import Theme
from pyramid_frontend.images import FilterChain
class LightTheme(Theme):
key = 'light'
require_config_path = '/_light/js/require_config.js'
require_base_url = '/_light/js/vendor/'
image_filters = (
FilterChain(
'thumb', width=330, height=220, extension='jpg',
crop=True, quality=80, sharpness=1.5),
FilterChain(
'square', width=300, height=300, extension='jpg',
crop=True, quality=80, sharpness=1.5),
FilterChain(
'about', width=400, height=300, extension='jpg',
quality=80, sharpness=1.5),
FilterChain(
'large', width=800, height=600, extension='jpg', resize=True,
quality=85, sharpness=1.5),
FilterChain(
'huge', width=2400, height=500, extension='jpg', quality=90,
sharpness=1.5),
)
assets = {
'main-less': ('/_light/css/main.less', 'less'),
'main-js': ('/_light/js/main.js', 'requirejs'),
}
Update theme configuration to latest pyramid_frontend API
|
from pyramid_frontend.theme import Theme
from pyramid_frontend.images import FilterChain
from pyramid_frontend.assets.less import LessAsset
from pyramid_frontend.assets.requirejs import RequireJSAsset
class LightTheme(Theme):
key = 'light'
image_filters = (
FilterChain(
'thumb', width=330, height=220, extension='jpg',
crop=True, quality=80, sharpness=1.5),
FilterChain(
'square', width=300, height=300, extension='jpg',
crop=True, quality=80, sharpness=1.5),
FilterChain(
'about', width=400, height=300, extension='jpg',
quality=80, sharpness=1.5),
FilterChain(
'large', width=800, height=600, extension='jpg', resize=True,
quality=85, sharpness=1.5),
FilterChain(
'huge', width=2400, height=500, extension='jpg', quality=90,
sharpness=1.5),
)
assets = {
'main-less': LessAsset('/_light/css/main.less'),
'main-js': RequireJSAsset(
'/_light/js/main.js',
require_config_path='/_light/js/require_config.js',
require_base_url='/_light/js/vendor/',
),
}
|
<commit_before>from pyramid_frontend.theme import Theme
from pyramid_frontend.images import FilterChain
class LightTheme(Theme):
key = 'light'
require_config_path = '/_light/js/require_config.js'
require_base_url = '/_light/js/vendor/'
image_filters = (
FilterChain(
'thumb', width=330, height=220, extension='jpg',
crop=True, quality=80, sharpness=1.5),
FilterChain(
'square', width=300, height=300, extension='jpg',
crop=True, quality=80, sharpness=1.5),
FilterChain(
'about', width=400, height=300, extension='jpg',
quality=80, sharpness=1.5),
FilterChain(
'large', width=800, height=600, extension='jpg', resize=True,
quality=85, sharpness=1.5),
FilterChain(
'huge', width=2400, height=500, extension='jpg', quality=90,
sharpness=1.5),
)
assets = {
'main-less': ('/_light/css/main.less', 'less'),
'main-js': ('/_light/js/main.js', 'requirejs'),
}
<commit_msg>Update theme configuration to latest pyramid_frontend API<commit_after>
|
from pyramid_frontend.theme import Theme
from pyramid_frontend.images import FilterChain
from pyramid_frontend.assets.less import LessAsset
from pyramid_frontend.assets.requirejs import RequireJSAsset
class LightTheme(Theme):
key = 'light'
image_filters = (
FilterChain(
'thumb', width=330, height=220, extension='jpg',
crop=True, quality=80, sharpness=1.5),
FilterChain(
'square', width=300, height=300, extension='jpg',
crop=True, quality=80, sharpness=1.5),
FilterChain(
'about', width=400, height=300, extension='jpg',
quality=80, sharpness=1.5),
FilterChain(
'large', width=800, height=600, extension='jpg', resize=True,
quality=85, sharpness=1.5),
FilterChain(
'huge', width=2400, height=500, extension='jpg', quality=90,
sharpness=1.5),
)
assets = {
'main-less': LessAsset('/_light/css/main.less'),
'main-js': RequireJSAsset(
'/_light/js/main.js',
require_config_path='/_light/js/require_config.js',
require_base_url='/_light/js/vendor/',
),
}
|
from pyramid_frontend.theme import Theme
from pyramid_frontend.images import FilterChain
class LightTheme(Theme):
key = 'light'
require_config_path = '/_light/js/require_config.js'
require_base_url = '/_light/js/vendor/'
image_filters = (
FilterChain(
'thumb', width=330, height=220, extension='jpg',
crop=True, quality=80, sharpness=1.5),
FilterChain(
'square', width=300, height=300, extension='jpg',
crop=True, quality=80, sharpness=1.5),
FilterChain(
'about', width=400, height=300, extension='jpg',
quality=80, sharpness=1.5),
FilterChain(
'large', width=800, height=600, extension='jpg', resize=True,
quality=85, sharpness=1.5),
FilterChain(
'huge', width=2400, height=500, extension='jpg', quality=90,
sharpness=1.5),
)
assets = {
'main-less': ('/_light/css/main.less', 'less'),
'main-js': ('/_light/js/main.js', 'requirejs'),
}
Update theme configuration to latest pyramid_frontend APIfrom pyramid_frontend.theme import Theme
from pyramid_frontend.images import FilterChain
from pyramid_frontend.assets.less import LessAsset
from pyramid_frontend.assets.requirejs import RequireJSAsset
class LightTheme(Theme):
key = 'light'
image_filters = (
FilterChain(
'thumb', width=330, height=220, extension='jpg',
crop=True, quality=80, sharpness=1.5),
FilterChain(
'square', width=300, height=300, extension='jpg',
crop=True, quality=80, sharpness=1.5),
FilterChain(
'about', width=400, height=300, extension='jpg',
quality=80, sharpness=1.5),
FilterChain(
'large', width=800, height=600, extension='jpg', resize=True,
quality=85, sharpness=1.5),
FilterChain(
'huge', width=2400, height=500, extension='jpg', quality=90,
sharpness=1.5),
)
assets = {
'main-less': LessAsset('/_light/css/main.less'),
'main-js': RequireJSAsset(
'/_light/js/main.js',
require_config_path='/_light/js/require_config.js',
require_base_url='/_light/js/vendor/',
),
}
|
<commit_before>from pyramid_frontend.theme import Theme
from pyramid_frontend.images import FilterChain
class LightTheme(Theme):
key = 'light'
require_config_path = '/_light/js/require_config.js'
require_base_url = '/_light/js/vendor/'
image_filters = (
FilterChain(
'thumb', width=330, height=220, extension='jpg',
crop=True, quality=80, sharpness=1.5),
FilterChain(
'square', width=300, height=300, extension='jpg',
crop=True, quality=80, sharpness=1.5),
FilterChain(
'about', width=400, height=300, extension='jpg',
quality=80, sharpness=1.5),
FilterChain(
'large', width=800, height=600, extension='jpg', resize=True,
quality=85, sharpness=1.5),
FilterChain(
'huge', width=2400, height=500, extension='jpg', quality=90,
sharpness=1.5),
)
assets = {
'main-less': ('/_light/css/main.less', 'less'),
'main-js': ('/_light/js/main.js', 'requirejs'),
}
<commit_msg>Update theme configuration to latest pyramid_frontend API<commit_after>from pyramid_frontend.theme import Theme
from pyramid_frontend.images import FilterChain
from pyramid_frontend.assets.less import LessAsset
from pyramid_frontend.assets.requirejs import RequireJSAsset
class LightTheme(Theme):
key = 'light'
image_filters = (
FilterChain(
'thumb', width=330, height=220, extension='jpg',
crop=True, quality=80, sharpness=1.5),
FilterChain(
'square', width=300, height=300, extension='jpg',
crop=True, quality=80, sharpness=1.5),
FilterChain(
'about', width=400, height=300, extension='jpg',
quality=80, sharpness=1.5),
FilterChain(
'large', width=800, height=600, extension='jpg', resize=True,
quality=85, sharpness=1.5),
FilterChain(
'huge', width=2400, height=500, extension='jpg', quality=90,
sharpness=1.5),
)
assets = {
'main-less': LessAsset('/_light/css/main.less'),
'main-js': RequireJSAsset(
'/_light/js/main.js',
require_config_path='/_light/js/require_config.js',
require_base_url='/_light/js/vendor/',
),
}
|
cbb1384757e54252a9a333828e535ca5596eaca1
|
wafer/schedule/admin.py
|
wafer/schedule/admin.py
|
from django.contrib import admin
from django import forms
from wafer.schedule.models import Venue, Slot, ScheduleItem
from wafer.talks.models import Talk, ACCEPTED
class ScheduleItemAdminForm(forms.ModelForm):
class Meta:
model = ScheduleItem
def __init__(self, *args, **kwargs):
super(ScheduleItemAdminForm, self).__init__(*args, **kwargs)
self.fields['talk'].queryset = Talk.objects.filter(status=ACCEPTED)
class ScheduleItemAdmin(admin.ModelAdmin):
form = ScheduleItemAdminForm
change_list_template = 'admin/scheduleitem_list.html'
def changelist_view(self, request, extra_context=None):
extra_context = extra_context or {}
return super(ScheduleItemAdmin, self).changelist_view(request,
extra_context)
class SlotAdminForm(forms.ModelForm):
class Meta:
model = Slot
class Media:
js = ('js/scheduledatetime.js',)
class SlotAdmin(admin.ModelAdmin):
form = SlotAdminForm
admin.site.register(Slot, SlotAdmin)
admin.site.register(Venue)
admin.site.register(ScheduleItem, ScheduleItemAdmin)
|
from django.contrib import admin
from django import forms
from wafer.schedule.models import Venue, Slot, ScheduleItem
from wafer.talks.models import Talk, ACCEPTED
class ScheduleItemAdminForm(forms.ModelForm):
class Meta:
model = ScheduleItem
def __init__(self, *args, **kwargs):
super(ScheduleItemAdminForm, self).__init__(*args, **kwargs)
self.fields['talk'].queryset = Talk.objects.filter(status=ACCEPTED)
class ScheduleItemAdmin(admin.ModelAdmin):
form = ScheduleItemAdminForm
change_list_template = 'admin/scheduleitem_list.html'
def changelist_view(self, request, extra_context=None):
extra_context = extra_context or {}
return super(ScheduleItemAdmin, self).changelist_view(request,
extra_context)
class SlotAdminForm(forms.ModelForm):
class Meta:
model = Slot
class Media:
js = ('js/scheduledatetime.js',)
class SlotAdmin(admin.ModelAdmin):
form = SlotAdminForm
list_display = ('__unicode__', 'end_time')
list_editable = ('end_time',)
admin.site.register(Slot, SlotAdmin)
admin.site.register(Venue)
admin.site.register(ScheduleItem, ScheduleItemAdmin)
|
Add end_time as editable on the slot list view to make tweaking times easier
|
Add end_time as editable on the slot list view to make tweaking times easier
|
Python
|
isc
|
CTPUG/wafer,CarlFK/wafer,CTPUG/wafer,CarlFK/wafer,CTPUG/wafer,CarlFK/wafer,CTPUG/wafer,CarlFK/wafer
|
from django.contrib import admin
from django import forms
from wafer.schedule.models import Venue, Slot, ScheduleItem
from wafer.talks.models import Talk, ACCEPTED
class ScheduleItemAdminForm(forms.ModelForm):
class Meta:
model = ScheduleItem
def __init__(self, *args, **kwargs):
super(ScheduleItemAdminForm, self).__init__(*args, **kwargs)
self.fields['talk'].queryset = Talk.objects.filter(status=ACCEPTED)
class ScheduleItemAdmin(admin.ModelAdmin):
form = ScheduleItemAdminForm
change_list_template = 'admin/scheduleitem_list.html'
def changelist_view(self, request, extra_context=None):
extra_context = extra_context or {}
return super(ScheduleItemAdmin, self).changelist_view(request,
extra_context)
class SlotAdminForm(forms.ModelForm):
class Meta:
model = Slot
class Media:
js = ('js/scheduledatetime.js',)
class SlotAdmin(admin.ModelAdmin):
form = SlotAdminForm
admin.site.register(Slot, SlotAdmin)
admin.site.register(Venue)
admin.site.register(ScheduleItem, ScheduleItemAdmin)
Add end_time as editable on the slot list view to make tweaking times easier
|
from django.contrib import admin
from django import forms
from wafer.schedule.models import Venue, Slot, ScheduleItem
from wafer.talks.models import Talk, ACCEPTED
class ScheduleItemAdminForm(forms.ModelForm):
class Meta:
model = ScheduleItem
def __init__(self, *args, **kwargs):
super(ScheduleItemAdminForm, self).__init__(*args, **kwargs)
self.fields['talk'].queryset = Talk.objects.filter(status=ACCEPTED)
class ScheduleItemAdmin(admin.ModelAdmin):
form = ScheduleItemAdminForm
change_list_template = 'admin/scheduleitem_list.html'
def changelist_view(self, request, extra_context=None):
extra_context = extra_context or {}
return super(ScheduleItemAdmin, self).changelist_view(request,
extra_context)
class SlotAdminForm(forms.ModelForm):
class Meta:
model = Slot
class Media:
js = ('js/scheduledatetime.js',)
class SlotAdmin(admin.ModelAdmin):
form = SlotAdminForm
list_display = ('__unicode__', 'end_time')
list_editable = ('end_time',)
admin.site.register(Slot, SlotAdmin)
admin.site.register(Venue)
admin.site.register(ScheduleItem, ScheduleItemAdmin)
|
<commit_before>from django.contrib import admin
from django import forms
from wafer.schedule.models import Venue, Slot, ScheduleItem
from wafer.talks.models import Talk, ACCEPTED
class ScheduleItemAdminForm(forms.ModelForm):
class Meta:
model = ScheduleItem
def __init__(self, *args, **kwargs):
super(ScheduleItemAdminForm, self).__init__(*args, **kwargs)
self.fields['talk'].queryset = Talk.objects.filter(status=ACCEPTED)
class ScheduleItemAdmin(admin.ModelAdmin):
form = ScheduleItemAdminForm
change_list_template = 'admin/scheduleitem_list.html'
def changelist_view(self, request, extra_context=None):
extra_context = extra_context or {}
return super(ScheduleItemAdmin, self).changelist_view(request,
extra_context)
class SlotAdminForm(forms.ModelForm):
class Meta:
model = Slot
class Media:
js = ('js/scheduledatetime.js',)
class SlotAdmin(admin.ModelAdmin):
form = SlotAdminForm
admin.site.register(Slot, SlotAdmin)
admin.site.register(Venue)
admin.site.register(ScheduleItem, ScheduleItemAdmin)
<commit_msg>Add end_time as editable on the slot list view to make tweaking times easier<commit_after>
|
from django.contrib import admin
from django import forms
from wafer.schedule.models import Venue, Slot, ScheduleItem
from wafer.talks.models import Talk, ACCEPTED
class ScheduleItemAdminForm(forms.ModelForm):
class Meta:
model = ScheduleItem
def __init__(self, *args, **kwargs):
super(ScheduleItemAdminForm, self).__init__(*args, **kwargs)
self.fields['talk'].queryset = Talk.objects.filter(status=ACCEPTED)
class ScheduleItemAdmin(admin.ModelAdmin):
form = ScheduleItemAdminForm
change_list_template = 'admin/scheduleitem_list.html'
def changelist_view(self, request, extra_context=None):
extra_context = extra_context or {}
return super(ScheduleItemAdmin, self).changelist_view(request,
extra_context)
class SlotAdminForm(forms.ModelForm):
class Meta:
model = Slot
class Media:
js = ('js/scheduledatetime.js',)
class SlotAdmin(admin.ModelAdmin):
form = SlotAdminForm
list_display = ('__unicode__', 'end_time')
list_editable = ('end_time',)
admin.site.register(Slot, SlotAdmin)
admin.site.register(Venue)
admin.site.register(ScheduleItem, ScheduleItemAdmin)
|
from django.contrib import admin
from django import forms
from wafer.schedule.models import Venue, Slot, ScheduleItem
from wafer.talks.models import Talk, ACCEPTED
class ScheduleItemAdminForm(forms.ModelForm):
class Meta:
model = ScheduleItem
def __init__(self, *args, **kwargs):
super(ScheduleItemAdminForm, self).__init__(*args, **kwargs)
self.fields['talk'].queryset = Talk.objects.filter(status=ACCEPTED)
class ScheduleItemAdmin(admin.ModelAdmin):
form = ScheduleItemAdminForm
change_list_template = 'admin/scheduleitem_list.html'
def changelist_view(self, request, extra_context=None):
extra_context = extra_context or {}
return super(ScheduleItemAdmin, self).changelist_view(request,
extra_context)
class SlotAdminForm(forms.ModelForm):
class Meta:
model = Slot
class Media:
js = ('js/scheduledatetime.js',)
class SlotAdmin(admin.ModelAdmin):
form = SlotAdminForm
admin.site.register(Slot, SlotAdmin)
admin.site.register(Venue)
admin.site.register(ScheduleItem, ScheduleItemAdmin)
Add end_time as editable on the slot list view to make tweaking times easierfrom django.contrib import admin
from django import forms
from wafer.schedule.models import Venue, Slot, ScheduleItem
from wafer.talks.models import Talk, ACCEPTED
class ScheduleItemAdminForm(forms.ModelForm):
class Meta:
model = ScheduleItem
def __init__(self, *args, **kwargs):
super(ScheduleItemAdminForm, self).__init__(*args, **kwargs)
self.fields['talk'].queryset = Talk.objects.filter(status=ACCEPTED)
class ScheduleItemAdmin(admin.ModelAdmin):
form = ScheduleItemAdminForm
change_list_template = 'admin/scheduleitem_list.html'
def changelist_view(self, request, extra_context=None):
extra_context = extra_context or {}
return super(ScheduleItemAdmin, self).changelist_view(request,
extra_context)
class SlotAdminForm(forms.ModelForm):
class Meta:
model = Slot
class Media:
js = ('js/scheduledatetime.js',)
class SlotAdmin(admin.ModelAdmin):
form = SlotAdminForm
list_display = ('__unicode__', 'end_time')
list_editable = ('end_time',)
admin.site.register(Slot, SlotAdmin)
admin.site.register(Venue)
admin.site.register(ScheduleItem, ScheduleItemAdmin)
|
<commit_before>from django.contrib import admin
from django import forms
from wafer.schedule.models import Venue, Slot, ScheduleItem
from wafer.talks.models import Talk, ACCEPTED
class ScheduleItemAdminForm(forms.ModelForm):
class Meta:
model = ScheduleItem
def __init__(self, *args, **kwargs):
super(ScheduleItemAdminForm, self).__init__(*args, **kwargs)
self.fields['talk'].queryset = Talk.objects.filter(status=ACCEPTED)
class ScheduleItemAdmin(admin.ModelAdmin):
form = ScheduleItemAdminForm
change_list_template = 'admin/scheduleitem_list.html'
def changelist_view(self, request, extra_context=None):
extra_context = extra_context or {}
return super(ScheduleItemAdmin, self).changelist_view(request,
extra_context)
class SlotAdminForm(forms.ModelForm):
class Meta:
model = Slot
class Media:
js = ('js/scheduledatetime.js',)
class SlotAdmin(admin.ModelAdmin):
form = SlotAdminForm
admin.site.register(Slot, SlotAdmin)
admin.site.register(Venue)
admin.site.register(ScheduleItem, ScheduleItemAdmin)
<commit_msg>Add end_time as editable on the slot list view to make tweaking times easier<commit_after>from django.contrib import admin
from django import forms
from wafer.schedule.models import Venue, Slot, ScheduleItem
from wafer.talks.models import Talk, ACCEPTED
class ScheduleItemAdminForm(forms.ModelForm):
class Meta:
model = ScheduleItem
def __init__(self, *args, **kwargs):
super(ScheduleItemAdminForm, self).__init__(*args, **kwargs)
self.fields['talk'].queryset = Talk.objects.filter(status=ACCEPTED)
class ScheduleItemAdmin(admin.ModelAdmin):
form = ScheduleItemAdminForm
change_list_template = 'admin/scheduleitem_list.html'
def changelist_view(self, request, extra_context=None):
extra_context = extra_context or {}
return super(ScheduleItemAdmin, self).changelist_view(request,
extra_context)
class SlotAdminForm(forms.ModelForm):
class Meta:
model = Slot
class Media:
js = ('js/scheduledatetime.js',)
class SlotAdmin(admin.ModelAdmin):
form = SlotAdminForm
list_display = ('__unicode__', 'end_time')
list_editable = ('end_time',)
admin.site.register(Slot, SlotAdmin)
admin.site.register(Venue)
admin.site.register(ScheduleItem, ScheduleItemAdmin)
|
8d3160f15b3c328ba86d6348a278c6825fcfa6c2
|
wcontrol/src/results.py
|
wcontrol/src/results.py
|
from wcontrol.conf.config import BMI, BFP
class results(object):
def __init__(self, control, gender):
self.bmi = self.get_bmi(control.bmi)
self.fat = self.get_fat(control.fat, gender)
def get_bmi(self, bmi):
for limit, msg in BMI:
if bmi <= limit:
return msg
def get_fat(self, fat, gender):
for limit_w, limit_m, msg in BFP:
if gender == 'Feminine' and fat <= limit_w:
return msg
if gender == 'Masculine' and fat <= limit_m:
return msg
|
from wcontrol.conf.config import BMI, BFP, MUSCLE
class results(object):
def __init__(self, control, gender):
self.bmi = self.get_bmi(control.bmi)
self.fat = self.get_fat(control.fat, gender)
self.muscle = self.get_muscle(control.muscle, gender)
def get_bmi(self, bmi):
for limit, msg in BMI:
if bmi <= limit:
return msg
def get_fat(self, fat, gender):
for limit_w, limit_m, msg in BFP:
if gender == 'Feminine' and fat <= limit_w:
return msg
if gender == 'Masculine' and fat <= limit_m:
return msg
def get_muscle(self, muscle, gender):
for limit_w, limit_m, msg in MUSCLE:
if gender == 'Feminine' and muscle <= limit_w:
return msg
if gender == 'Masculine' and muscle <= limit_m:
return msg
|
Add function to get the skeletal muscle result
|
Add function to get the skeletal muscle result
|
Python
|
mit
|
pahumadad/weight-control,pahumadad/weight-control,pahumadad/weight-control,pahumadad/weight-control
|
from wcontrol.conf.config import BMI, BFP
class results(object):
def __init__(self, control, gender):
self.bmi = self.get_bmi(control.bmi)
self.fat = self.get_fat(control.fat, gender)
def get_bmi(self, bmi):
for limit, msg in BMI:
if bmi <= limit:
return msg
def get_fat(self, fat, gender):
for limit_w, limit_m, msg in BFP:
if gender == 'Feminine' and fat <= limit_w:
return msg
if gender == 'Masculine' and fat <= limit_m:
return msg
Add function to get the skeletal muscle result
|
from wcontrol.conf.config import BMI, BFP, MUSCLE
class results(object):
def __init__(self, control, gender):
self.bmi = self.get_bmi(control.bmi)
self.fat = self.get_fat(control.fat, gender)
self.muscle = self.get_muscle(control.muscle, gender)
def get_bmi(self, bmi):
for limit, msg in BMI:
if bmi <= limit:
return msg
def get_fat(self, fat, gender):
for limit_w, limit_m, msg in BFP:
if gender == 'Feminine' and fat <= limit_w:
return msg
if gender == 'Masculine' and fat <= limit_m:
return msg
def get_muscle(self, muscle, gender):
for limit_w, limit_m, msg in MUSCLE:
if gender == 'Feminine' and muscle <= limit_w:
return msg
if gender == 'Masculine' and muscle <= limit_m:
return msg
|
<commit_before>from wcontrol.conf.config import BMI, BFP
class results(object):
def __init__(self, control, gender):
self.bmi = self.get_bmi(control.bmi)
self.fat = self.get_fat(control.fat, gender)
def get_bmi(self, bmi):
for limit, msg in BMI:
if bmi <= limit:
return msg
def get_fat(self, fat, gender):
for limit_w, limit_m, msg in BFP:
if gender == 'Feminine' and fat <= limit_w:
return msg
if gender == 'Masculine' and fat <= limit_m:
return msg
<commit_msg>Add function to get the skeletal muscle result<commit_after>
|
from wcontrol.conf.config import BMI, BFP, MUSCLE
class results(object):
def __init__(self, control, gender):
self.bmi = self.get_bmi(control.bmi)
self.fat = self.get_fat(control.fat, gender)
self.muscle = self.get_muscle(control.muscle, gender)
def get_bmi(self, bmi):
for limit, msg in BMI:
if bmi <= limit:
return msg
def get_fat(self, fat, gender):
for limit_w, limit_m, msg in BFP:
if gender == 'Feminine' and fat <= limit_w:
return msg
if gender == 'Masculine' and fat <= limit_m:
return msg
def get_muscle(self, muscle, gender):
for limit_w, limit_m, msg in MUSCLE:
if gender == 'Feminine' and muscle <= limit_w:
return msg
if gender == 'Masculine' and muscle <= limit_m:
return msg
|
from wcontrol.conf.config import BMI, BFP
class results(object):
def __init__(self, control, gender):
self.bmi = self.get_bmi(control.bmi)
self.fat = self.get_fat(control.fat, gender)
def get_bmi(self, bmi):
for limit, msg in BMI:
if bmi <= limit:
return msg
def get_fat(self, fat, gender):
for limit_w, limit_m, msg in BFP:
if gender == 'Feminine' and fat <= limit_w:
return msg
if gender == 'Masculine' and fat <= limit_m:
return msg
Add function to get the skeletal muscle resultfrom wcontrol.conf.config import BMI, BFP, MUSCLE
class results(object):
def __init__(self, control, gender):
self.bmi = self.get_bmi(control.bmi)
self.fat = self.get_fat(control.fat, gender)
self.muscle = self.get_muscle(control.muscle, gender)
def get_bmi(self, bmi):
for limit, msg in BMI:
if bmi <= limit:
return msg
def get_fat(self, fat, gender):
for limit_w, limit_m, msg in BFP:
if gender == 'Feminine' and fat <= limit_w:
return msg
if gender == 'Masculine' and fat <= limit_m:
return msg
def get_muscle(self, muscle, gender):
for limit_w, limit_m, msg in MUSCLE:
if gender == 'Feminine' and muscle <= limit_w:
return msg
if gender == 'Masculine' and muscle <= limit_m:
return msg
|
<commit_before>from wcontrol.conf.config import BMI, BFP
class results(object):
def __init__(self, control, gender):
self.bmi = self.get_bmi(control.bmi)
self.fat = self.get_fat(control.fat, gender)
def get_bmi(self, bmi):
for limit, msg in BMI:
if bmi <= limit:
return msg
def get_fat(self, fat, gender):
for limit_w, limit_m, msg in BFP:
if gender == 'Feminine' and fat <= limit_w:
return msg
if gender == 'Masculine' and fat <= limit_m:
return msg
<commit_msg>Add function to get the skeletal muscle result<commit_after>from wcontrol.conf.config import BMI, BFP, MUSCLE
class results(object):
def __init__(self, control, gender):
self.bmi = self.get_bmi(control.bmi)
self.fat = self.get_fat(control.fat, gender)
self.muscle = self.get_muscle(control.muscle, gender)
def get_bmi(self, bmi):
for limit, msg in BMI:
if bmi <= limit:
return msg
def get_fat(self, fat, gender):
for limit_w, limit_m, msg in BFP:
if gender == 'Feminine' and fat <= limit_w:
return msg
if gender == 'Masculine' and fat <= limit_m:
return msg
def get_muscle(self, muscle, gender):
for limit_w, limit_m, msg in MUSCLE:
if gender == 'Feminine' and muscle <= limit_w:
return msg
if gender == 'Masculine' and muscle <= limit_m:
return msg
|
00009fe9627b947e3bb0674a3a70c61f4be59fb6
|
skimage/viewer/qt/__init__.py
|
skimage/viewer/qt/__init__.py
|
import os
import warnings
qt_api = os.environ.get('QT_API')
if qt_api is None:
try:
import PySide
qt_api = 'pyside'
except ImportError:
try:
import PyQt4
qt_api = 'pyqt'
except ImportError:
qt_api = None
# Note that we don't want to raise an error because that would
# cause the TravisCI build to fail.
warnings.warn("Could not import PyQt4: ImageViewer not available!")
if qt_api is not None:
os.environ['QT_API'] = qt_api
|
import os
import warnings
qt_api = os.environ.get('QT_API')
if qt_api is None:
try:
import PyQt4
qt_api = 'pyqt'
except ImportError:
try:
import PySide
qt_api = 'pyside'
except ImportError:
qt_api = None
# Note that we don't want to raise an error because that would
# cause the TravisCI build to fail.
warnings.warn("Could not import PyQt4: ImageViewer not available!")
if qt_api is not None:
os.environ['QT_API'] = qt_api
|
Use same order as matplotlib for PySize/PyQT
|
Use same order as matplotlib for PySize/PyQT
|
Python
|
bsd-3-clause
|
pratapvardhan/scikit-image,oew1v07/scikit-image,rjeli/scikit-image,jwiggins/scikit-image,warmspringwinds/scikit-image,youprofit/scikit-image,newville/scikit-image,warmspringwinds/scikit-image,keflavich/scikit-image,michaelpacer/scikit-image,WarrenWeckesser/scikits-image,ofgulban/scikit-image,juliusbierk/scikit-image,paalge/scikit-image,Midafi/scikit-image,emon10005/scikit-image,paalge/scikit-image,blink1073/scikit-image,bennlich/scikit-image,SamHames/scikit-image,ajaybhat/scikit-image,pratapvardhan/scikit-image,youprofit/scikit-image,chriscrosscutler/scikit-image,Britefury/scikit-image,bsipocz/scikit-image,Hiyorimi/scikit-image,Hiyorimi/scikit-image,dpshelio/scikit-image,jwiggins/scikit-image,SamHames/scikit-image,emon10005/scikit-image,juliusbierk/scikit-image,ofgulban/scikit-image,oew1v07/scikit-image,michaelpacer/scikit-image,ClinicalGraphics/scikit-image,newville/scikit-image,ajaybhat/scikit-image,dpshelio/scikit-image,SamHames/scikit-image,bsipocz/scikit-image,ClinicalGraphics/scikit-image,vighneshbirodkar/scikit-image,michaelaye/scikit-image,Britefury/scikit-image,michaelaye/scikit-image,robintw/scikit-image,rjeli/scikit-image,SamHames/scikit-image,chriscrosscutler/scikit-image,WarrenWeckesser/scikits-image,GaZ3ll3/scikit-image,vighneshbirodkar/scikit-image,keflavich/scikit-image,bennlich/scikit-image,ofgulban/scikit-image,vighneshbirodkar/scikit-image,Midafi/scikit-image,blink1073/scikit-image,GaZ3ll3/scikit-image,robintw/scikit-image,paalge/scikit-image,rjeli/scikit-image
|
import os
import warnings
qt_api = os.environ.get('QT_API')
if qt_api is None:
try:
import PySide
qt_api = 'pyside'
except ImportError:
try:
import PyQt4
qt_api = 'pyqt'
except ImportError:
qt_api = None
# Note that we don't want to raise an error because that would
# cause the TravisCI build to fail.
warnings.warn("Could not import PyQt4: ImageViewer not available!")
if qt_api is not None:
os.environ['QT_API'] = qt_api
Use same order as matplotlib for PySize/PyQT
|
import os
import warnings
qt_api = os.environ.get('QT_API')
if qt_api is None:
try:
import PyQt4
qt_api = 'pyqt'
except ImportError:
try:
import PySide
qt_api = 'pyside'
except ImportError:
qt_api = None
# Note that we don't want to raise an error because that would
# cause the TravisCI build to fail.
warnings.warn("Could not import PyQt4: ImageViewer not available!")
if qt_api is not None:
os.environ['QT_API'] = qt_api
|
<commit_before>import os
import warnings
qt_api = os.environ.get('QT_API')
if qt_api is None:
try:
import PySide
qt_api = 'pyside'
except ImportError:
try:
import PyQt4
qt_api = 'pyqt'
except ImportError:
qt_api = None
# Note that we don't want to raise an error because that would
# cause the TravisCI build to fail.
warnings.warn("Could not import PyQt4: ImageViewer not available!")
if qt_api is not None:
os.environ['QT_API'] = qt_api
<commit_msg>Use same order as matplotlib for PySize/PyQT<commit_after>
|
import os
import warnings
qt_api = os.environ.get('QT_API')
if qt_api is None:
try:
import PyQt4
qt_api = 'pyqt'
except ImportError:
try:
import PySide
qt_api = 'pyside'
except ImportError:
qt_api = None
# Note that we don't want to raise an error because that would
# cause the TravisCI build to fail.
warnings.warn("Could not import PyQt4: ImageViewer not available!")
if qt_api is not None:
os.environ['QT_API'] = qt_api
|
import os
import warnings
qt_api = os.environ.get('QT_API')
if qt_api is None:
try:
import PySide
qt_api = 'pyside'
except ImportError:
try:
import PyQt4
qt_api = 'pyqt'
except ImportError:
qt_api = None
# Note that we don't want to raise an error because that would
# cause the TravisCI build to fail.
warnings.warn("Could not import PyQt4: ImageViewer not available!")
if qt_api is not None:
os.environ['QT_API'] = qt_api
Use same order as matplotlib for PySize/PyQTimport os
import warnings
qt_api = os.environ.get('QT_API')
if qt_api is None:
try:
import PyQt4
qt_api = 'pyqt'
except ImportError:
try:
import PySide
qt_api = 'pyside'
except ImportError:
qt_api = None
# Note that we don't want to raise an error because that would
# cause the TravisCI build to fail.
warnings.warn("Could not import PyQt4: ImageViewer not available!")
if qt_api is not None:
os.environ['QT_API'] = qt_api
|
<commit_before>import os
import warnings
qt_api = os.environ.get('QT_API')
if qt_api is None:
try:
import PySide
qt_api = 'pyside'
except ImportError:
try:
import PyQt4
qt_api = 'pyqt'
except ImportError:
qt_api = None
# Note that we don't want to raise an error because that would
# cause the TravisCI build to fail.
warnings.warn("Could not import PyQt4: ImageViewer not available!")
if qt_api is not None:
os.environ['QT_API'] = qt_api
<commit_msg>Use same order as matplotlib for PySize/PyQT<commit_after>import os
import warnings
qt_api = os.environ.get('QT_API')
if qt_api is None:
try:
import PyQt4
qt_api = 'pyqt'
except ImportError:
try:
import PySide
qt_api = 'pyside'
except ImportError:
qt_api = None
# Note that we don't want to raise an error because that would
# cause the TravisCI build to fail.
warnings.warn("Could not import PyQt4: ImageViewer not available!")
if qt_api is not None:
os.environ['QT_API'] = qt_api
|
11dbea120cf035f0af4fec5285187cfd4ea03182
|
project/urls.py
|
project/urls.py
|
from django.conf.urls import patterns, include, url
from django.contrib.flatpages import urls
from search.views import CustomSearchView
from haystack.views import search_view_factory
from django.conf import settings
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
url('', include('lobbyingph.urls')),
url(r'^api/', include('api.urls')),
url(r'^search/', search_view_factory(
view_class=CustomSearchView,
), name='haystack_search'),
url(r'^about/', include(urls.urlpatterns)),
url(r'^admin/', include(admin.site.urls)),
)
if not settings.DEBUG:
urlpatterns += patterns('',
url(r'^static/fonts/(?P<path>.*)$', 'django.views.static.serve', {'document_root': settings.STATIC_ROOT}),
)
|
from django.conf.urls import patterns, include, url
from django.contrib.flatpages import urls
from search.views import CustomSearchView
from haystack.views import search_view_factory
from django.conf import settings
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
url('', include('lobbyingph.urls')),
url(r'^api/', include('api.urls')),
url(r'^search/', search_view_factory(
view_class=CustomSearchView,
), name='haystack_search'),
url(r'^about/', include(urls.urlpatterns)),
url(r'^admin/', include(admin.site.urls)),
)
|
Remove font serving from app
|
Remove font serving from app
|
Python
|
mit
|
AxisPhilly/lobbying.ph-django,AxisPhilly/lobbying.ph-django,AxisPhilly/lobbying.ph-django
|
from django.conf.urls import patterns, include, url
from django.contrib.flatpages import urls
from search.views import CustomSearchView
from haystack.views import search_view_factory
from django.conf import settings
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
url('', include('lobbyingph.urls')),
url(r'^api/', include('api.urls')),
url(r'^search/', search_view_factory(
view_class=CustomSearchView,
), name='haystack_search'),
url(r'^about/', include(urls.urlpatterns)),
url(r'^admin/', include(admin.site.urls)),
)
if not settings.DEBUG:
urlpatterns += patterns('',
url(r'^static/fonts/(?P<path>.*)$', 'django.views.static.serve', {'document_root': settings.STATIC_ROOT}),
)Remove font serving from app
|
from django.conf.urls import patterns, include, url
from django.contrib.flatpages import urls
from search.views import CustomSearchView
from haystack.views import search_view_factory
from django.conf import settings
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
url('', include('lobbyingph.urls')),
url(r'^api/', include('api.urls')),
url(r'^search/', search_view_factory(
view_class=CustomSearchView,
), name='haystack_search'),
url(r'^about/', include(urls.urlpatterns)),
url(r'^admin/', include(admin.site.urls)),
)
|
<commit_before>from django.conf.urls import patterns, include, url
from django.contrib.flatpages import urls
from search.views import CustomSearchView
from haystack.views import search_view_factory
from django.conf import settings
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
url('', include('lobbyingph.urls')),
url(r'^api/', include('api.urls')),
url(r'^search/', search_view_factory(
view_class=CustomSearchView,
), name='haystack_search'),
url(r'^about/', include(urls.urlpatterns)),
url(r'^admin/', include(admin.site.urls)),
)
if not settings.DEBUG:
urlpatterns += patterns('',
url(r'^static/fonts/(?P<path>.*)$', 'django.views.static.serve', {'document_root': settings.STATIC_ROOT}),
)<commit_msg>Remove font serving from app<commit_after>
|
from django.conf.urls import patterns, include, url
from django.contrib.flatpages import urls
from search.views import CustomSearchView
from haystack.views import search_view_factory
from django.conf import settings
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
url('', include('lobbyingph.urls')),
url(r'^api/', include('api.urls')),
url(r'^search/', search_view_factory(
view_class=CustomSearchView,
), name='haystack_search'),
url(r'^about/', include(urls.urlpatterns)),
url(r'^admin/', include(admin.site.urls)),
)
|
from django.conf.urls import patterns, include, url
from django.contrib.flatpages import urls
from search.views import CustomSearchView
from haystack.views import search_view_factory
from django.conf import settings
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
url('', include('lobbyingph.urls')),
url(r'^api/', include('api.urls')),
url(r'^search/', search_view_factory(
view_class=CustomSearchView,
), name='haystack_search'),
url(r'^about/', include(urls.urlpatterns)),
url(r'^admin/', include(admin.site.urls)),
)
if not settings.DEBUG:
urlpatterns += patterns('',
url(r'^static/fonts/(?P<path>.*)$', 'django.views.static.serve', {'document_root': settings.STATIC_ROOT}),
)Remove font serving from appfrom django.conf.urls import patterns, include, url
from django.contrib.flatpages import urls
from search.views import CustomSearchView
from haystack.views import search_view_factory
from django.conf import settings
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
url('', include('lobbyingph.urls')),
url(r'^api/', include('api.urls')),
url(r'^search/', search_view_factory(
view_class=CustomSearchView,
), name='haystack_search'),
url(r'^about/', include(urls.urlpatterns)),
url(r'^admin/', include(admin.site.urls)),
)
|
<commit_before>from django.conf.urls import patterns, include, url
from django.contrib.flatpages import urls
from search.views import CustomSearchView
from haystack.views import search_view_factory
from django.conf import settings
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
url('', include('lobbyingph.urls')),
url(r'^api/', include('api.urls')),
url(r'^search/', search_view_factory(
view_class=CustomSearchView,
), name='haystack_search'),
url(r'^about/', include(urls.urlpatterns)),
url(r'^admin/', include(admin.site.urls)),
)
if not settings.DEBUG:
urlpatterns += patterns('',
url(r'^static/fonts/(?P<path>.*)$', 'django.views.static.serve', {'document_root': settings.STATIC_ROOT}),
)<commit_msg>Remove font serving from app<commit_after>from django.conf.urls import patterns, include, url
from django.contrib.flatpages import urls
from search.views import CustomSearchView
from haystack.views import search_view_factory
from django.conf import settings
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
url('', include('lobbyingph.urls')),
url(r'^api/', include('api.urls')),
url(r'^search/', search_view_factory(
view_class=CustomSearchView,
), name='haystack_search'),
url(r'^about/', include(urls.urlpatterns)),
url(r'^admin/', include(admin.site.urls)),
)
|
510c80cb3985750a732f96f91e4730b95491f4ca
|
client/gettransactions.py
|
client/gettransactions.py
|
#!/usr/bin/python
import sys
import api
import getpass
# Banks
banks = {}
import bankofamerica
banks["bankofamerica"] = bankofamerica
print "Login"
print "Username: ",
username = sys.stdin.readline().strip()
password = getpass.getpass()
if not api.callapi("login",{"username": username, "password": password}):
print "Login failed"
sys.exit(1)
todo = api.callapi("accountstodo")
print todo
for account in todo:
if account["bankname"] not in banks:
print "No scraper for %s!" % (account["bankname"])
continue
print "Scraping %s..." % (account["bankname"])
data = banks[account["bankname"].downloadaccount(account)
for key in data:
if data[key]:
api.callapi("new"+key, data[key])
api.callapi("logout")
|
#!/usr/bin/python
import sys
import api
import getpass
# Banks
banks = {}
import bankofamerica
banks["bankofamerica"] = bankofamerica
print "Login"
print "Username: ",
username = sys.stdin.readline().strip()
password = getpass.getpass()
if not api.callapi("login",{"username": username, "password": password}):
print "Login failed"
sys.exit(1)
todo = api.callapi("accountstodo")
print todo
for account in todo:
if account["bankname"] not in banks:
print "No scraper for %s!" % (account["bankname"])
continue
print "Scraping %s..." % (account["bankname"])
data = json.dumps(banks[account["bankname"]].downloadaccount(account),default=str)
api.callapi("newtransactions", {"data": data})
api.callapi("logout")
|
Send all scraped data in one big json dump.
|
Send all scraped data in one big json dump.
|
Python
|
agpl-3.0
|
vincebusam/pyWebCash,vincebusam/pyWebCash,vincebusam/pyWebCash
|
#!/usr/bin/python
import sys
import api
import getpass
# Banks
banks = {}
import bankofamerica
banks["bankofamerica"] = bankofamerica
print "Login"
print "Username: ",
username = sys.stdin.readline().strip()
password = getpass.getpass()
if not api.callapi("login",{"username": username, "password": password}):
print "Login failed"
sys.exit(1)
todo = api.callapi("accountstodo")
print todo
for account in todo:
if account["bankname"] not in banks:
print "No scraper for %s!" % (account["bankname"])
continue
print "Scraping %s..." % (account["bankname"])
data = banks[account["bankname"].downloadaccount(account)
for key in data:
if data[key]:
api.callapi("new"+key, data[key])
api.callapi("logout")
Send all scraped data in one big json dump.
|
#!/usr/bin/python
import sys
import api
import getpass
# Banks
banks = {}
import bankofamerica
banks["bankofamerica"] = bankofamerica
print "Login"
print "Username: ",
username = sys.stdin.readline().strip()
password = getpass.getpass()
if not api.callapi("login",{"username": username, "password": password}):
print "Login failed"
sys.exit(1)
todo = api.callapi("accountstodo")
print todo
for account in todo:
if account["bankname"] not in banks:
print "No scraper for %s!" % (account["bankname"])
continue
print "Scraping %s..." % (account["bankname"])
data = json.dumps(banks[account["bankname"]].downloadaccount(account),default=str)
api.callapi("newtransactions", {"data": data})
api.callapi("logout")
|
<commit_before>#!/usr/bin/python
import sys
import api
import getpass
# Banks
banks = {}
import bankofamerica
banks["bankofamerica"] = bankofamerica
print "Login"
print "Username: ",
username = sys.stdin.readline().strip()
password = getpass.getpass()
if not api.callapi("login",{"username": username, "password": password}):
print "Login failed"
sys.exit(1)
todo = api.callapi("accountstodo")
print todo
for account in todo:
if account["bankname"] not in banks:
print "No scraper for %s!" % (account["bankname"])
continue
print "Scraping %s..." % (account["bankname"])
data = banks[account["bankname"].downloadaccount(account)
for key in data:
if data[key]:
api.callapi("new"+key, data[key])
api.callapi("logout")
<commit_msg>Send all scraped data in one big json dump.<commit_after>
|
#!/usr/bin/python
import sys
import api
import getpass
# Banks
banks = {}
import bankofamerica
banks["bankofamerica"] = bankofamerica
print "Login"
print "Username: ",
username = sys.stdin.readline().strip()
password = getpass.getpass()
if not api.callapi("login",{"username": username, "password": password}):
print "Login failed"
sys.exit(1)
todo = api.callapi("accountstodo")
print todo
for account in todo:
if account["bankname"] not in banks:
print "No scraper for %s!" % (account["bankname"])
continue
print "Scraping %s..." % (account["bankname"])
data = json.dumps(banks[account["bankname"]].downloadaccount(account),default=str)
api.callapi("newtransactions", {"data": data})
api.callapi("logout")
|
#!/usr/bin/python
import sys
import api
import getpass
# Banks
banks = {}
import bankofamerica
banks["bankofamerica"] = bankofamerica
print "Login"
print "Username: ",
username = sys.stdin.readline().strip()
password = getpass.getpass()
if not api.callapi("login",{"username": username, "password": password}):
print "Login failed"
sys.exit(1)
todo = api.callapi("accountstodo")
print todo
for account in todo:
if account["bankname"] not in banks:
print "No scraper for %s!" % (account["bankname"])
continue
print "Scraping %s..." % (account["bankname"])
data = banks[account["bankname"].downloadaccount(account)
for key in data:
if data[key]:
api.callapi("new"+key, data[key])
api.callapi("logout")
Send all scraped data in one big json dump.#!/usr/bin/python
import sys
import api
import getpass
# Banks
banks = {}
import bankofamerica
banks["bankofamerica"] = bankofamerica
print "Login"
print "Username: ",
username = sys.stdin.readline().strip()
password = getpass.getpass()
if not api.callapi("login",{"username": username, "password": password}):
print "Login failed"
sys.exit(1)
todo = api.callapi("accountstodo")
print todo
for account in todo:
if account["bankname"] not in banks:
print "No scraper for %s!" % (account["bankname"])
continue
print "Scraping %s..." % (account["bankname"])
data = json.dumps(banks[account["bankname"]].downloadaccount(account),default=str)
api.callapi("newtransactions", {"data": data})
api.callapi("logout")
|
<commit_before>#!/usr/bin/python
import sys
import api
import getpass
# Banks
banks = {}
import bankofamerica
banks["bankofamerica"] = bankofamerica
print "Login"
print "Username: ",
username = sys.stdin.readline().strip()
password = getpass.getpass()
if not api.callapi("login",{"username": username, "password": password}):
print "Login failed"
sys.exit(1)
todo = api.callapi("accountstodo")
print todo
for account in todo:
if account["bankname"] not in banks:
print "No scraper for %s!" % (account["bankname"])
continue
print "Scraping %s..." % (account["bankname"])
data = banks[account["bankname"].downloadaccount(account)
for key in data:
if data[key]:
api.callapi("new"+key, data[key])
api.callapi("logout")
<commit_msg>Send all scraped data in one big json dump.<commit_after>#!/usr/bin/python
import sys
import api
import getpass
# Banks
banks = {}
import bankofamerica
banks["bankofamerica"] = bankofamerica
print "Login"
print "Username: ",
username = sys.stdin.readline().strip()
password = getpass.getpass()
if not api.callapi("login",{"username": username, "password": password}):
print "Login failed"
sys.exit(1)
todo = api.callapi("accountstodo")
print todo
for account in todo:
if account["bankname"] not in banks:
print "No scraper for %s!" % (account["bankname"])
continue
print "Scraping %s..." % (account["bankname"])
data = json.dumps(banks[account["bankname"]].downloadaccount(account),default=str)
api.callapi("newtransactions", {"data": data})
api.callapi("logout")
|
bfe5c6a16bf8515ae6ba49f4633f1a301e445092
|
redcliff/cli.py
|
redcliff/cli.py
|
from sys import exit
import argparse
from .commands import dispatch, choices
from .config import get_config
from .utils import merge
def main():
parser = argparse.ArgumentParser()
parser.add_argument('-u', '--base-url',
metavar='https://redmine.example.com',
help='Base URL of your Redmine installation.')
parser.add_argument('-S', '--no-ssl-verify', dest='ssl_verify',
action='store_const', const=False)
parser.add_argument('-k', '--api-key',
help='Your Redmine API key.')
parser.add_argument('-C', '--config-file',
help='Override default config path.')
parser.add_argument('cmd',
choices=choices,
help='Command to execute.')
parser.add_argument('args',
nargs=argparse.REMAINDER,
help='Arguments to command. Use --help to get '
'command-specific help.')
args = vars(parser.parse_args())
conf = get_config(args.pop('config_file'))
cmd = args.pop('cmd')
cmd_args = args.pop('args')
merged_conf = merge(conf, args)
return dispatch(cmd, cmd_args, merged_conf)
if __name__ == '__main__':
exit(main())
|
from sys import exit
import argparse
from .commands import dispatch, choices
from .config import get_config
from .utils import merge, error
def main():
parser = argparse.ArgumentParser()
parser.add_argument('-u', '--base-url',
dest='url',
metavar='https://redmine.example.com',
help='Base URL of your Redmine installation.')
parser.add_argument('-S', '--no-ssl-verify', dest='ssl_verify',
action='store_const', const=False)
parser.add_argument('-k', '--api-key',
dest='key',
help='Your Redmine API key.')
parser.add_argument('-C', '--config-file',
help='Override default config path.')
parser.add_argument('cmd',
choices=choices,
help='Command to execute.')
parser.add_argument('args',
nargs=argparse.REMAINDER,
help='Arguments to command. Use --help to get '
'command-specific help.')
args = vars(parser.parse_args())
conf = get_config(args.pop('config_file'))
cmd = args.pop('cmd')
cmd_args = args.pop('args')
merged_conf = merge(conf, args)
required = ['url', 'key']
missing = lambda x: bool(merged_conf.get(x))
if not all(map(missing, required)):
error('fatal: base_url and api_key are required')
return 1
return dispatch(cmd, cmd_args, merged_conf)
if __name__ == '__main__':
exit(main())
|
Fix exception when required options are missing
|
Fix exception when required options are missing
Options can be on command line and in config file, so we check in merged
dictionary after getting from both sources.
|
Python
|
mit
|
dmedvinsky/redcliff
|
from sys import exit
import argparse
from .commands import dispatch, choices
from .config import get_config
from .utils import merge
def main():
parser = argparse.ArgumentParser()
parser.add_argument('-u', '--base-url',
metavar='https://redmine.example.com',
help='Base URL of your Redmine installation.')
parser.add_argument('-S', '--no-ssl-verify', dest='ssl_verify',
action='store_const', const=False)
parser.add_argument('-k', '--api-key',
help='Your Redmine API key.')
parser.add_argument('-C', '--config-file',
help='Override default config path.')
parser.add_argument('cmd',
choices=choices,
help='Command to execute.')
parser.add_argument('args',
nargs=argparse.REMAINDER,
help='Arguments to command. Use --help to get '
'command-specific help.')
args = vars(parser.parse_args())
conf = get_config(args.pop('config_file'))
cmd = args.pop('cmd')
cmd_args = args.pop('args')
merged_conf = merge(conf, args)
return dispatch(cmd, cmd_args, merged_conf)
if __name__ == '__main__':
exit(main())
Fix exception when required options are missing
Options can be on command line and in config file, so we check in merged
dictionary after getting from both sources.
|
from sys import exit
import argparse
from .commands import dispatch, choices
from .config import get_config
from .utils import merge, error
def main():
parser = argparse.ArgumentParser()
parser.add_argument('-u', '--base-url',
dest='url',
metavar='https://redmine.example.com',
help='Base URL of your Redmine installation.')
parser.add_argument('-S', '--no-ssl-verify', dest='ssl_verify',
action='store_const', const=False)
parser.add_argument('-k', '--api-key',
dest='key',
help='Your Redmine API key.')
parser.add_argument('-C', '--config-file',
help='Override default config path.')
parser.add_argument('cmd',
choices=choices,
help='Command to execute.')
parser.add_argument('args',
nargs=argparse.REMAINDER,
help='Arguments to command. Use --help to get '
'command-specific help.')
args = vars(parser.parse_args())
conf = get_config(args.pop('config_file'))
cmd = args.pop('cmd')
cmd_args = args.pop('args')
merged_conf = merge(conf, args)
required = ['url', 'key']
missing = lambda x: bool(merged_conf.get(x))
if not all(map(missing, required)):
error('fatal: base_url and api_key are required')
return 1
return dispatch(cmd, cmd_args, merged_conf)
if __name__ == '__main__':
exit(main())
|
<commit_before>from sys import exit
import argparse
from .commands import dispatch, choices
from .config import get_config
from .utils import merge
def main():
parser = argparse.ArgumentParser()
parser.add_argument('-u', '--base-url',
metavar='https://redmine.example.com',
help='Base URL of your Redmine installation.')
parser.add_argument('-S', '--no-ssl-verify', dest='ssl_verify',
action='store_const', const=False)
parser.add_argument('-k', '--api-key',
help='Your Redmine API key.')
parser.add_argument('-C', '--config-file',
help='Override default config path.')
parser.add_argument('cmd',
choices=choices,
help='Command to execute.')
parser.add_argument('args',
nargs=argparse.REMAINDER,
help='Arguments to command. Use --help to get '
'command-specific help.')
args = vars(parser.parse_args())
conf = get_config(args.pop('config_file'))
cmd = args.pop('cmd')
cmd_args = args.pop('args')
merged_conf = merge(conf, args)
return dispatch(cmd, cmd_args, merged_conf)
if __name__ == '__main__':
exit(main())
<commit_msg>Fix exception when required options are missing
Options can be on command line and in config file, so we check in merged
dictionary after getting from both sources.<commit_after>
|
from sys import exit
import argparse
from .commands import dispatch, choices
from .config import get_config
from .utils import merge, error
def main():
parser = argparse.ArgumentParser()
parser.add_argument('-u', '--base-url',
dest='url',
metavar='https://redmine.example.com',
help='Base URL of your Redmine installation.')
parser.add_argument('-S', '--no-ssl-verify', dest='ssl_verify',
action='store_const', const=False)
parser.add_argument('-k', '--api-key',
dest='key',
help='Your Redmine API key.')
parser.add_argument('-C', '--config-file',
help='Override default config path.')
parser.add_argument('cmd',
choices=choices,
help='Command to execute.')
parser.add_argument('args',
nargs=argparse.REMAINDER,
help='Arguments to command. Use --help to get '
'command-specific help.')
args = vars(parser.parse_args())
conf = get_config(args.pop('config_file'))
cmd = args.pop('cmd')
cmd_args = args.pop('args')
merged_conf = merge(conf, args)
required = ['url', 'key']
missing = lambda x: bool(merged_conf.get(x))
if not all(map(missing, required)):
error('fatal: base_url and api_key are required')
return 1
return dispatch(cmd, cmd_args, merged_conf)
if __name__ == '__main__':
exit(main())
|
from sys import exit
import argparse
from .commands import dispatch, choices
from .config import get_config
from .utils import merge
def main():
parser = argparse.ArgumentParser()
parser.add_argument('-u', '--base-url',
metavar='https://redmine.example.com',
help='Base URL of your Redmine installation.')
parser.add_argument('-S', '--no-ssl-verify', dest='ssl_verify',
action='store_const', const=False)
parser.add_argument('-k', '--api-key',
help='Your Redmine API key.')
parser.add_argument('-C', '--config-file',
help='Override default config path.')
parser.add_argument('cmd',
choices=choices,
help='Command to execute.')
parser.add_argument('args',
nargs=argparse.REMAINDER,
help='Arguments to command. Use --help to get '
'command-specific help.')
args = vars(parser.parse_args())
conf = get_config(args.pop('config_file'))
cmd = args.pop('cmd')
cmd_args = args.pop('args')
merged_conf = merge(conf, args)
return dispatch(cmd, cmd_args, merged_conf)
if __name__ == '__main__':
exit(main())
Fix exception when required options are missing
Options can be on command line and in config file, so we check in merged
dictionary after getting from both sources.from sys import exit
import argparse
from .commands import dispatch, choices
from .config import get_config
from .utils import merge, error
def main():
parser = argparse.ArgumentParser()
parser.add_argument('-u', '--base-url',
dest='url',
metavar='https://redmine.example.com',
help='Base URL of your Redmine installation.')
parser.add_argument('-S', '--no-ssl-verify', dest='ssl_verify',
action='store_const', const=False)
parser.add_argument('-k', '--api-key',
dest='key',
help='Your Redmine API key.')
parser.add_argument('-C', '--config-file',
help='Override default config path.')
parser.add_argument('cmd',
choices=choices,
help='Command to execute.')
parser.add_argument('args',
nargs=argparse.REMAINDER,
help='Arguments to command. Use --help to get '
'command-specific help.')
args = vars(parser.parse_args())
conf = get_config(args.pop('config_file'))
cmd = args.pop('cmd')
cmd_args = args.pop('args')
merged_conf = merge(conf, args)
required = ['url', 'key']
missing = lambda x: bool(merged_conf.get(x))
if not all(map(missing, required)):
error('fatal: base_url and api_key are required')
return 1
return dispatch(cmd, cmd_args, merged_conf)
if __name__ == '__main__':
exit(main())
|
<commit_before>from sys import exit
import argparse
from .commands import dispatch, choices
from .config import get_config
from .utils import merge
def main():
parser = argparse.ArgumentParser()
parser.add_argument('-u', '--base-url',
metavar='https://redmine.example.com',
help='Base URL of your Redmine installation.')
parser.add_argument('-S', '--no-ssl-verify', dest='ssl_verify',
action='store_const', const=False)
parser.add_argument('-k', '--api-key',
help='Your Redmine API key.')
parser.add_argument('-C', '--config-file',
help='Override default config path.')
parser.add_argument('cmd',
choices=choices,
help='Command to execute.')
parser.add_argument('args',
nargs=argparse.REMAINDER,
help='Arguments to command. Use --help to get '
'command-specific help.')
args = vars(parser.parse_args())
conf = get_config(args.pop('config_file'))
cmd = args.pop('cmd')
cmd_args = args.pop('args')
merged_conf = merge(conf, args)
return dispatch(cmd, cmd_args, merged_conf)
if __name__ == '__main__':
exit(main())
<commit_msg>Fix exception when required options are missing
Options can be on command line and in config file, so we check in merged
dictionary after getting from both sources.<commit_after>from sys import exit
import argparse
from .commands import dispatch, choices
from .config import get_config
from .utils import merge, error
def main():
parser = argparse.ArgumentParser()
parser.add_argument('-u', '--base-url',
dest='url',
metavar='https://redmine.example.com',
help='Base URL of your Redmine installation.')
parser.add_argument('-S', '--no-ssl-verify', dest='ssl_verify',
action='store_const', const=False)
parser.add_argument('-k', '--api-key',
dest='key',
help='Your Redmine API key.')
parser.add_argument('-C', '--config-file',
help='Override default config path.')
parser.add_argument('cmd',
choices=choices,
help='Command to execute.')
parser.add_argument('args',
nargs=argparse.REMAINDER,
help='Arguments to command. Use --help to get '
'command-specific help.')
args = vars(parser.parse_args())
conf = get_config(args.pop('config_file'))
cmd = args.pop('cmd')
cmd_args = args.pop('args')
merged_conf = merge(conf, args)
required = ['url', 'key']
missing = lambda x: bool(merged_conf.get(x))
if not all(map(missing, required)):
error('fatal: base_url and api_key are required')
return 1
return dispatch(cmd, cmd_args, merged_conf)
if __name__ == '__main__':
exit(main())
|
d7280801306d9c7495a08288c6e1574f6dec303d
|
astroquery/astrometry_net/__init__.py
|
astroquery/astrometry_net/__init__.py
|
# Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
astrometry.net
--------------
:author: Matt Craig (<mattwcraig@gmail.com>)
"""
# Make the URL of the server, timeout and other items configurable
from astropy import config as _config
class Conf(_config.ConfigNamespace):
""" Configuration parameters for `astroquery.astrometry_net` """
api_key = _config.ConfigItem(
'',
"The Astrometry.net API key."
)
server = _config.ConfigItem('http://nova.astrometry.net', 'Name of server')
timeout = _config.ConfigItem(60,
'Default timeout for connecting to server')
conf = Conf()
# Now import your public class
# Should probably have the same name as your module
from .core import AstrometryNet, AstrometryNetClass
__all__ = ['AstrometryNet', 'AstrometryNetClass']
|
# Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
astrometry.net
--------------
:author: Matt Craig (<mattwcraig@gmail.com>)
"""
# Make the URL of the server, timeout and other items configurable
from astropy import config as _config
class Conf(_config.ConfigNamespace):
""" Configuration parameters for `astroquery.astrometry_net` """
api_key = _config.ConfigItem(
'',
"The Astrometry.net API key."
)
server = _config.ConfigItem('http://nova.astrometry.net', 'Name of server')
timeout = _config.ConfigItem(120,
'Default timeout for connecting to server')
conf = Conf()
# Now import your public class
# Should probably have the same name as your module
from .core import AstrometryNet, AstrometryNetClass
__all__ = ['AstrometryNet', 'AstrometryNetClass']
|
Increase timeout so that remote tests pass
|
Increase timeout so that remote tests pass
|
Python
|
bsd-3-clause
|
imbasimba/astroquery,ceb8/astroquery,ceb8/astroquery,imbasimba/astroquery
|
# Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
astrometry.net
--------------
:author: Matt Craig (<mattwcraig@gmail.com>)
"""
# Make the URL of the server, timeout and other items configurable
from astropy import config as _config
class Conf(_config.ConfigNamespace):
""" Configuration parameters for `astroquery.astrometry_net` """
api_key = _config.ConfigItem(
'',
"The Astrometry.net API key."
)
server = _config.ConfigItem('http://nova.astrometry.net', 'Name of server')
timeout = _config.ConfigItem(60,
'Default timeout for connecting to server')
conf = Conf()
# Now import your public class
# Should probably have the same name as your module
from .core import AstrometryNet, AstrometryNetClass
__all__ = ['AstrometryNet', 'AstrometryNetClass']
Increase timeout so that remote tests pass
|
# Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
astrometry.net
--------------
:author: Matt Craig (<mattwcraig@gmail.com>)
"""
# Make the URL of the server, timeout and other items configurable
from astropy import config as _config
class Conf(_config.ConfigNamespace):
""" Configuration parameters for `astroquery.astrometry_net` """
api_key = _config.ConfigItem(
'',
"The Astrometry.net API key."
)
server = _config.ConfigItem('http://nova.astrometry.net', 'Name of server')
timeout = _config.ConfigItem(120,
'Default timeout for connecting to server')
conf = Conf()
# Now import your public class
# Should probably have the same name as your module
from .core import AstrometryNet, AstrometryNetClass
__all__ = ['AstrometryNet', 'AstrometryNetClass']
|
<commit_before># Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
astrometry.net
--------------
:author: Matt Craig (<mattwcraig@gmail.com>)
"""
# Make the URL of the server, timeout and other items configurable
from astropy import config as _config
class Conf(_config.ConfigNamespace):
""" Configuration parameters for `astroquery.astrometry_net` """
api_key = _config.ConfigItem(
'',
"The Astrometry.net API key."
)
server = _config.ConfigItem('http://nova.astrometry.net', 'Name of server')
timeout = _config.ConfigItem(60,
'Default timeout for connecting to server')
conf = Conf()
# Now import your public class
# Should probably have the same name as your module
from .core import AstrometryNet, AstrometryNetClass
__all__ = ['AstrometryNet', 'AstrometryNetClass']
<commit_msg>Increase timeout so that remote tests pass<commit_after>
|
# Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
astrometry.net
--------------
:author: Matt Craig (<mattwcraig@gmail.com>)
"""
# Make the URL of the server, timeout and other items configurable
from astropy import config as _config
class Conf(_config.ConfigNamespace):
""" Configuration parameters for `astroquery.astrometry_net` """
api_key = _config.ConfigItem(
'',
"The Astrometry.net API key."
)
server = _config.ConfigItem('http://nova.astrometry.net', 'Name of server')
timeout = _config.ConfigItem(120,
'Default timeout for connecting to server')
conf = Conf()
# Now import your public class
# Should probably have the same name as your module
from .core import AstrometryNet, AstrometryNetClass
__all__ = ['AstrometryNet', 'AstrometryNetClass']
|
# Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
astrometry.net
--------------
:author: Matt Craig (<mattwcraig@gmail.com>)
"""
# Make the URL of the server, timeout and other items configurable
from astropy import config as _config
class Conf(_config.ConfigNamespace):
""" Configuration parameters for `astroquery.astrometry_net` """
api_key = _config.ConfigItem(
'',
"The Astrometry.net API key."
)
server = _config.ConfigItem('http://nova.astrometry.net', 'Name of server')
timeout = _config.ConfigItem(60,
'Default timeout for connecting to server')
conf = Conf()
# Now import your public class
# Should probably have the same name as your module
from .core import AstrometryNet, AstrometryNetClass
__all__ = ['AstrometryNet', 'AstrometryNetClass']
Increase timeout so that remote tests pass# Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
astrometry.net
--------------
:author: Matt Craig (<mattwcraig@gmail.com>)
"""
# Make the URL of the server, timeout and other items configurable
from astropy import config as _config
class Conf(_config.ConfigNamespace):
""" Configuration parameters for `astroquery.astrometry_net` """
api_key = _config.ConfigItem(
'',
"The Astrometry.net API key."
)
server = _config.ConfigItem('http://nova.astrometry.net', 'Name of server')
timeout = _config.ConfigItem(120,
'Default timeout for connecting to server')
conf = Conf()
# Now import your public class
# Should probably have the same name as your module
from .core import AstrometryNet, AstrometryNetClass
__all__ = ['AstrometryNet', 'AstrometryNetClass']
|
<commit_before># Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
astrometry.net
--------------
:author: Matt Craig (<mattwcraig@gmail.com>)
"""
# Make the URL of the server, timeout and other items configurable
from astropy import config as _config
class Conf(_config.ConfigNamespace):
""" Configuration parameters for `astroquery.astrometry_net` """
api_key = _config.ConfigItem(
'',
"The Astrometry.net API key."
)
server = _config.ConfigItem('http://nova.astrometry.net', 'Name of server')
timeout = _config.ConfigItem(60,
'Default timeout for connecting to server')
conf = Conf()
# Now import your public class
# Should probably have the same name as your module
from .core import AstrometryNet, AstrometryNetClass
__all__ = ['AstrometryNet', 'AstrometryNetClass']
<commit_msg>Increase timeout so that remote tests pass<commit_after># Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
astrometry.net
--------------
:author: Matt Craig (<mattwcraig@gmail.com>)
"""
# Make the URL of the server, timeout and other items configurable
from astropy import config as _config
class Conf(_config.ConfigNamespace):
""" Configuration parameters for `astroquery.astrometry_net` """
api_key = _config.ConfigItem(
'',
"The Astrometry.net API key."
)
server = _config.ConfigItem('http://nova.astrometry.net', 'Name of server')
timeout = _config.ConfigItem(120,
'Default timeout for connecting to server')
conf = Conf()
# Now import your public class
# Should probably have the same name as your module
from .core import AstrometryNet, AstrometryNetClass
__all__ = ['AstrometryNet', 'AstrometryNetClass']
|
53fd251e22cf71c1f1f9a705d91ef91a9130129f
|
backend/django/apps/accounts/tests.py
|
backend/django/apps/accounts/tests.py
|
from django.core.urlresolvers import reverse
from rest_framework.test import APITestCase
from rest_framework import status
import factory
from .models import BaseAccount
class UserFactory(factory.Factory):
class Meta:
model = BaseAccount
first_name = 'John'
last_name = 'Doe'
email = '{}.{}@email.com'.format(first_name, last_name)
password = 'passjohn1'
class CreateUserTest(APITestCase):
def setUp(self):
self.user = UserFactory()
self.client.login(email=self.user.email, password=self.user.password)
def test_can_create_user(self):
response = self.client.get(
reverse('_accounts:account-list'),)
self.assertEqual(response.status_code, status.HTTP_200_OK)
|
from django.core.urlresolvers import reverse
from rest_framework.test import APITestCase
from rest_framework import status
import factory
import json
from .models import BaseAccount
from .serializers import WholeAccountSerializer
class UserFactory(factory.django.DjangoModelFactory):
class Meta:
model = BaseAccount
first_name = 'John'
last_name = 'Doe'
email = '{}.{}@email.com'.format(first_name, last_name)
password = 'passjohn1'
class FactoryBoyCreateUserTest(APITestCase):
def setUp(self):
self.user = UserFactory()
def test_can_create_user(self):
response = self.client.get(
reverse('_accounts:account-detail', kwargs={'pk': 1}))
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertJSONEqual(
raw=json.dumps(response.data),
expected_data=WholeAccountSerializer(self.user).data)
|
Create a test for user creation through `FactoryBoy`
|
Create a test for user creation through `FactoryBoy`
Add a test for user creation
|
Python
|
mit
|
slavpetroff/sweetshop,slavpetroff/sweetshop
|
from django.core.urlresolvers import reverse
from rest_framework.test import APITestCase
from rest_framework import status
import factory
from .models import BaseAccount
class UserFactory(factory.Factory):
class Meta:
model = BaseAccount
first_name = 'John'
last_name = 'Doe'
email = '{}.{}@email.com'.format(first_name, last_name)
password = 'passjohn1'
class CreateUserTest(APITestCase):
def setUp(self):
self.user = UserFactory()
self.client.login(email=self.user.email, password=self.user.password)
def test_can_create_user(self):
response = self.client.get(
reverse('_accounts:account-list'),)
self.assertEqual(response.status_code, status.HTTP_200_OK)
Create a test for user creation through `FactoryBoy`
Add a test for user creation
|
from django.core.urlresolvers import reverse
from rest_framework.test import APITestCase
from rest_framework import status
import factory
import json
from .models import BaseAccount
from .serializers import WholeAccountSerializer
class UserFactory(factory.django.DjangoModelFactory):
class Meta:
model = BaseAccount
first_name = 'John'
last_name = 'Doe'
email = '{}.{}@email.com'.format(first_name, last_name)
password = 'passjohn1'
class FactoryBoyCreateUserTest(APITestCase):
def setUp(self):
self.user = UserFactory()
def test_can_create_user(self):
response = self.client.get(
reverse('_accounts:account-detail', kwargs={'pk': 1}))
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertJSONEqual(
raw=json.dumps(response.data),
expected_data=WholeAccountSerializer(self.user).data)
|
<commit_before>from django.core.urlresolvers import reverse
from rest_framework.test import APITestCase
from rest_framework import status
import factory
from .models import BaseAccount
class UserFactory(factory.Factory):
class Meta:
model = BaseAccount
first_name = 'John'
last_name = 'Doe'
email = '{}.{}@email.com'.format(first_name, last_name)
password = 'passjohn1'
class CreateUserTest(APITestCase):
def setUp(self):
self.user = UserFactory()
self.client.login(email=self.user.email, password=self.user.password)
def test_can_create_user(self):
response = self.client.get(
reverse('_accounts:account-list'),)
self.assertEqual(response.status_code, status.HTTP_200_OK)
<commit_msg>Create a test for user creation through `FactoryBoy`
Add a test for user creation<commit_after>
|
from django.core.urlresolvers import reverse
from rest_framework.test import APITestCase
from rest_framework import status
import factory
import json
from .models import BaseAccount
from .serializers import WholeAccountSerializer
class UserFactory(factory.django.DjangoModelFactory):
class Meta:
model = BaseAccount
first_name = 'John'
last_name = 'Doe'
email = '{}.{}@email.com'.format(first_name, last_name)
password = 'passjohn1'
class FactoryBoyCreateUserTest(APITestCase):
def setUp(self):
self.user = UserFactory()
def test_can_create_user(self):
response = self.client.get(
reverse('_accounts:account-detail', kwargs={'pk': 1}))
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertJSONEqual(
raw=json.dumps(response.data),
expected_data=WholeAccountSerializer(self.user).data)
|
from django.core.urlresolvers import reverse
from rest_framework.test import APITestCase
from rest_framework import status
import factory
from .models import BaseAccount
class UserFactory(factory.Factory):
class Meta:
model = BaseAccount
first_name = 'John'
last_name = 'Doe'
email = '{}.{}@email.com'.format(first_name, last_name)
password = 'passjohn1'
class CreateUserTest(APITestCase):
def setUp(self):
self.user = UserFactory()
self.client.login(email=self.user.email, password=self.user.password)
def test_can_create_user(self):
response = self.client.get(
reverse('_accounts:account-list'),)
self.assertEqual(response.status_code, status.HTTP_200_OK)
Create a test for user creation through `FactoryBoy`
Add a test for user creationfrom django.core.urlresolvers import reverse
from rest_framework.test import APITestCase
from rest_framework import status
import factory
import json
from .models import BaseAccount
from .serializers import WholeAccountSerializer
class UserFactory(factory.django.DjangoModelFactory):
class Meta:
model = BaseAccount
first_name = 'John'
last_name = 'Doe'
email = '{}.{}@email.com'.format(first_name, last_name)
password = 'passjohn1'
class FactoryBoyCreateUserTest(APITestCase):
def setUp(self):
self.user = UserFactory()
def test_can_create_user(self):
response = self.client.get(
reverse('_accounts:account-detail', kwargs={'pk': 1}))
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertJSONEqual(
raw=json.dumps(response.data),
expected_data=WholeAccountSerializer(self.user).data)
|
<commit_before>from django.core.urlresolvers import reverse
from rest_framework.test import APITestCase
from rest_framework import status
import factory
from .models import BaseAccount
class UserFactory(factory.Factory):
class Meta:
model = BaseAccount
first_name = 'John'
last_name = 'Doe'
email = '{}.{}@email.com'.format(first_name, last_name)
password = 'passjohn1'
class CreateUserTest(APITestCase):
def setUp(self):
self.user = UserFactory()
self.client.login(email=self.user.email, password=self.user.password)
def test_can_create_user(self):
response = self.client.get(
reverse('_accounts:account-list'),)
self.assertEqual(response.status_code, status.HTTP_200_OK)
<commit_msg>Create a test for user creation through `FactoryBoy`
Add a test for user creation<commit_after>from django.core.urlresolvers import reverse
from rest_framework.test import APITestCase
from rest_framework import status
import factory
import json
from .models import BaseAccount
from .serializers import WholeAccountSerializer
class UserFactory(factory.django.DjangoModelFactory):
class Meta:
model = BaseAccount
first_name = 'John'
last_name = 'Doe'
email = '{}.{}@email.com'.format(first_name, last_name)
password = 'passjohn1'
class FactoryBoyCreateUserTest(APITestCase):
def setUp(self):
self.user = UserFactory()
def test_can_create_user(self):
response = self.client.get(
reverse('_accounts:account-detail', kwargs={'pk': 1}))
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertJSONEqual(
raw=json.dumps(response.data),
expected_data=WholeAccountSerializer(self.user).data)
|
8496ba409b9a340858e4473157aab87593868db7
|
pytask/views.py
|
pytask/views.py
|
from django.shortcuts import render_to_response
def show_msg(user, message, redirect_url=None, url_desc=None):
""" simply redirect to homepage """
return render_to_response('show_msg.html',{'user': user,
'message': message,
'redirect_url': redirect_url,
'url_desc': url_desc})
def home_page(request):
""" get the user and display info about the project if not logged in.
if logged in, display info of their tasks.
"""
user = request.user
if not user.is_authenticated():
return render_to_response("index.html")
profile = user.get_profile()
claimed_tasks = user.claimed_tasks.all()
selected_tasks = user.selected_tasks.all()
reviewing_tasks = user.reviewing_tasks.all()
unpublished_tasks = user.created_tasks.filter(status="UP").all()
can_create_task = True if profile.rights != "CT" else False
context = {"user": user,
"profile": profile,
"claimed_tasks": claimed_tasks,
"selected_tasks": selected_tasks,
"reviewing_tasks": reviewing_tasks,
"unpublished_tasks": unpublished_tasks,
"can_create_task": can_create_task
}
return render_to_response("index.html", context)
def under_construction(request):
return render_to_response("under_construction.html")
|
from django.shortcuts import render_to_response
from pytask.profile import models as profile_models
def show_msg(user, message, redirect_url=None, url_desc=None):
""" simply redirect to homepage """
return render_to_response('show_msg.html',{'user': user,
'message': message,
'redirect_url': redirect_url,
'url_desc': url_desc})
def home_page(request):
""" get the user and display info about the project if not logged in.
if logged in, display info of their tasks.
"""
user = request.user
if not user.is_authenticated():
return render_to_response("index.html")
profile = user.get_profile()
claimed_tasks = user.claimed_tasks.all()
selected_tasks = user.selected_tasks.all()
reviewing_tasks = user.reviewing_tasks.all()
unpublished_tasks = user.created_tasks.filter(status="UP").all()
can_create_task = True if profile.role != profile_models.ROLES_CHOICES[3][0] else False
context = {"user": user,
"profile": profile,
"claimed_tasks": claimed_tasks,
"selected_tasks": selected_tasks,
"reviewing_tasks": reviewing_tasks,
"unpublished_tasks": unpublished_tasks,
"can_create_task": can_create_task
}
return render_to_response("index.html", context)
def under_construction(request):
return render_to_response("under_construction.html")
|
Use the right name for the profile role's values.
|
Use the right name for the profile role's values.
|
Python
|
agpl-3.0
|
madhusudancs/pytask,madhusudancs/pytask,madhusudancs/pytask
|
from django.shortcuts import render_to_response
def show_msg(user, message, redirect_url=None, url_desc=None):
""" simply redirect to homepage """
return render_to_response('show_msg.html',{'user': user,
'message': message,
'redirect_url': redirect_url,
'url_desc': url_desc})
def home_page(request):
""" get the user and display info about the project if not logged in.
if logged in, display info of their tasks.
"""
user = request.user
if not user.is_authenticated():
return render_to_response("index.html")
profile = user.get_profile()
claimed_tasks = user.claimed_tasks.all()
selected_tasks = user.selected_tasks.all()
reviewing_tasks = user.reviewing_tasks.all()
unpublished_tasks = user.created_tasks.filter(status="UP").all()
can_create_task = True if profile.rights != "CT" else False
context = {"user": user,
"profile": profile,
"claimed_tasks": claimed_tasks,
"selected_tasks": selected_tasks,
"reviewing_tasks": reviewing_tasks,
"unpublished_tasks": unpublished_tasks,
"can_create_task": can_create_task
}
return render_to_response("index.html", context)
def under_construction(request):
return render_to_response("under_construction.html")
Use the right name for the profile role's values.
|
from django.shortcuts import render_to_response
from pytask.profile import models as profile_models
def show_msg(user, message, redirect_url=None, url_desc=None):
""" simply redirect to homepage """
return render_to_response('show_msg.html',{'user': user,
'message': message,
'redirect_url': redirect_url,
'url_desc': url_desc})
def home_page(request):
""" get the user and display info about the project if not logged in.
if logged in, display info of their tasks.
"""
user = request.user
if not user.is_authenticated():
return render_to_response("index.html")
profile = user.get_profile()
claimed_tasks = user.claimed_tasks.all()
selected_tasks = user.selected_tasks.all()
reviewing_tasks = user.reviewing_tasks.all()
unpublished_tasks = user.created_tasks.filter(status="UP").all()
can_create_task = True if profile.role != profile_models.ROLES_CHOICES[3][0] else False
context = {"user": user,
"profile": profile,
"claimed_tasks": claimed_tasks,
"selected_tasks": selected_tasks,
"reviewing_tasks": reviewing_tasks,
"unpublished_tasks": unpublished_tasks,
"can_create_task": can_create_task
}
return render_to_response("index.html", context)
def under_construction(request):
return render_to_response("under_construction.html")
|
<commit_before>from django.shortcuts import render_to_response
def show_msg(user, message, redirect_url=None, url_desc=None):
""" simply redirect to homepage """
return render_to_response('show_msg.html',{'user': user,
'message': message,
'redirect_url': redirect_url,
'url_desc': url_desc})
def home_page(request):
""" get the user and display info about the project if not logged in.
if logged in, display info of their tasks.
"""
user = request.user
if not user.is_authenticated():
return render_to_response("index.html")
profile = user.get_profile()
claimed_tasks = user.claimed_tasks.all()
selected_tasks = user.selected_tasks.all()
reviewing_tasks = user.reviewing_tasks.all()
unpublished_tasks = user.created_tasks.filter(status="UP").all()
can_create_task = True if profile.rights != "CT" else False
context = {"user": user,
"profile": profile,
"claimed_tasks": claimed_tasks,
"selected_tasks": selected_tasks,
"reviewing_tasks": reviewing_tasks,
"unpublished_tasks": unpublished_tasks,
"can_create_task": can_create_task
}
return render_to_response("index.html", context)
def under_construction(request):
return render_to_response("under_construction.html")
<commit_msg>Use the right name for the profile role's values.<commit_after>
|
from django.shortcuts import render_to_response
from pytask.profile import models as profile_models
def show_msg(user, message, redirect_url=None, url_desc=None):
""" simply redirect to homepage """
return render_to_response('show_msg.html',{'user': user,
'message': message,
'redirect_url': redirect_url,
'url_desc': url_desc})
def home_page(request):
""" get the user and display info about the project if not logged in.
if logged in, display info of their tasks.
"""
user = request.user
if not user.is_authenticated():
return render_to_response("index.html")
profile = user.get_profile()
claimed_tasks = user.claimed_tasks.all()
selected_tasks = user.selected_tasks.all()
reviewing_tasks = user.reviewing_tasks.all()
unpublished_tasks = user.created_tasks.filter(status="UP").all()
can_create_task = True if profile.role != profile_models.ROLES_CHOICES[3][0] else False
context = {"user": user,
"profile": profile,
"claimed_tasks": claimed_tasks,
"selected_tasks": selected_tasks,
"reviewing_tasks": reviewing_tasks,
"unpublished_tasks": unpublished_tasks,
"can_create_task": can_create_task
}
return render_to_response("index.html", context)
def under_construction(request):
return render_to_response("under_construction.html")
|
from django.shortcuts import render_to_response
def show_msg(user, message, redirect_url=None, url_desc=None):
""" simply redirect to homepage """
return render_to_response('show_msg.html',{'user': user,
'message': message,
'redirect_url': redirect_url,
'url_desc': url_desc})
def home_page(request):
""" get the user and display info about the project if not logged in.
if logged in, display info of their tasks.
"""
user = request.user
if not user.is_authenticated():
return render_to_response("index.html")
profile = user.get_profile()
claimed_tasks = user.claimed_tasks.all()
selected_tasks = user.selected_tasks.all()
reviewing_tasks = user.reviewing_tasks.all()
unpublished_tasks = user.created_tasks.filter(status="UP").all()
can_create_task = True if profile.rights != "CT" else False
context = {"user": user,
"profile": profile,
"claimed_tasks": claimed_tasks,
"selected_tasks": selected_tasks,
"reviewing_tasks": reviewing_tasks,
"unpublished_tasks": unpublished_tasks,
"can_create_task": can_create_task
}
return render_to_response("index.html", context)
def under_construction(request):
return render_to_response("under_construction.html")
Use the right name for the profile role's values.from django.shortcuts import render_to_response
from pytask.profile import models as profile_models
def show_msg(user, message, redirect_url=None, url_desc=None):
""" simply redirect to homepage """
return render_to_response('show_msg.html',{'user': user,
'message': message,
'redirect_url': redirect_url,
'url_desc': url_desc})
def home_page(request):
""" get the user and display info about the project if not logged in.
if logged in, display info of their tasks.
"""
user = request.user
if not user.is_authenticated():
return render_to_response("index.html")
profile = user.get_profile()
claimed_tasks = user.claimed_tasks.all()
selected_tasks = user.selected_tasks.all()
reviewing_tasks = user.reviewing_tasks.all()
unpublished_tasks = user.created_tasks.filter(status="UP").all()
can_create_task = True if profile.role != profile_models.ROLES_CHOICES[3][0] else False
context = {"user": user,
"profile": profile,
"claimed_tasks": claimed_tasks,
"selected_tasks": selected_tasks,
"reviewing_tasks": reviewing_tasks,
"unpublished_tasks": unpublished_tasks,
"can_create_task": can_create_task
}
return render_to_response("index.html", context)
def under_construction(request):
return render_to_response("under_construction.html")
|
<commit_before>from django.shortcuts import render_to_response
def show_msg(user, message, redirect_url=None, url_desc=None):
""" simply redirect to homepage """
return render_to_response('show_msg.html',{'user': user,
'message': message,
'redirect_url': redirect_url,
'url_desc': url_desc})
def home_page(request):
""" get the user and display info about the project if not logged in.
if logged in, display info of their tasks.
"""
user = request.user
if not user.is_authenticated():
return render_to_response("index.html")
profile = user.get_profile()
claimed_tasks = user.claimed_tasks.all()
selected_tasks = user.selected_tasks.all()
reviewing_tasks = user.reviewing_tasks.all()
unpublished_tasks = user.created_tasks.filter(status="UP").all()
can_create_task = True if profile.rights != "CT" else False
context = {"user": user,
"profile": profile,
"claimed_tasks": claimed_tasks,
"selected_tasks": selected_tasks,
"reviewing_tasks": reviewing_tasks,
"unpublished_tasks": unpublished_tasks,
"can_create_task": can_create_task
}
return render_to_response("index.html", context)
def under_construction(request):
return render_to_response("under_construction.html")
<commit_msg>Use the right name for the profile role's values.<commit_after>from django.shortcuts import render_to_response
from pytask.profile import models as profile_models
def show_msg(user, message, redirect_url=None, url_desc=None):
""" simply redirect to homepage """
return render_to_response('show_msg.html',{'user': user,
'message': message,
'redirect_url': redirect_url,
'url_desc': url_desc})
def home_page(request):
""" get the user and display info about the project if not logged in.
if logged in, display info of their tasks.
"""
user = request.user
if not user.is_authenticated():
return render_to_response("index.html")
profile = user.get_profile()
claimed_tasks = user.claimed_tasks.all()
selected_tasks = user.selected_tasks.all()
reviewing_tasks = user.reviewing_tasks.all()
unpublished_tasks = user.created_tasks.filter(status="UP").all()
can_create_task = True if profile.role != profile_models.ROLES_CHOICES[3][0] else False
context = {"user": user,
"profile": profile,
"claimed_tasks": claimed_tasks,
"selected_tasks": selected_tasks,
"reviewing_tasks": reviewing_tasks,
"unpublished_tasks": unpublished_tasks,
"can_create_task": can_create_task
}
return render_to_response("index.html", context)
def under_construction(request):
return render_to_response("under_construction.html")
|
46835075ad7dd6dd4b0478877f57b07e0e282add
|
tensorflow_datasets/image/nyu_depth_v2_test.py
|
tensorflow_datasets/image/nyu_depth_v2_test.py
|
"""Tests for NYU Depth V2 Dataset."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow_datasets import testing
from tensorflow_datasets.image import nyu_depth_v2
class NyuDepthV2Test(testing.DatasetBuilderTestCase):
DATASET_CLASS = nyu_depth_v2.NyuDepthV2
SPLITS = {"train": 2, "test": 1}
if __name__ == "__main__":
testing.test_main()
|
"""Tests for NYU Depth V2 Dataset."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow_datasets import testing
from tensorflow_datasets.image import nyu_depth_v2
class NyuDepthV2Test(testing.DatasetBuilderTestCase):
DATASET_CLASS = nyu_depth_v2.NyuDepthV2
SPLITS = {"train": 2, "validation": 1}
if __name__ == "__main__":
testing.test_main()
|
Fix validation split for NYU Depth V2 test
|
Fix validation split for NYU Depth V2 test
|
Python
|
apache-2.0
|
tensorflow/datasets,tensorflow/datasets,tensorflow/datasets,tensorflow/datasets,tensorflow/datasets
|
"""Tests for NYU Depth V2 Dataset."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow_datasets import testing
from tensorflow_datasets.image import nyu_depth_v2
class NyuDepthV2Test(testing.DatasetBuilderTestCase):
DATASET_CLASS = nyu_depth_v2.NyuDepthV2
SPLITS = {"train": 2, "test": 1}
if __name__ == "__main__":
testing.test_main()
Fix validation split for NYU Depth V2 test
|
"""Tests for NYU Depth V2 Dataset."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow_datasets import testing
from tensorflow_datasets.image import nyu_depth_v2
class NyuDepthV2Test(testing.DatasetBuilderTestCase):
DATASET_CLASS = nyu_depth_v2.NyuDepthV2
SPLITS = {"train": 2, "validation": 1}
if __name__ == "__main__":
testing.test_main()
|
<commit_before>"""Tests for NYU Depth V2 Dataset."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow_datasets import testing
from tensorflow_datasets.image import nyu_depth_v2
class NyuDepthV2Test(testing.DatasetBuilderTestCase):
DATASET_CLASS = nyu_depth_v2.NyuDepthV2
SPLITS = {"train": 2, "test": 1}
if __name__ == "__main__":
testing.test_main()
<commit_msg>Fix validation split for NYU Depth V2 test<commit_after>
|
"""Tests for NYU Depth V2 Dataset."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow_datasets import testing
from tensorflow_datasets.image import nyu_depth_v2
class NyuDepthV2Test(testing.DatasetBuilderTestCase):
DATASET_CLASS = nyu_depth_v2.NyuDepthV2
SPLITS = {"train": 2, "validation": 1}
if __name__ == "__main__":
testing.test_main()
|
"""Tests for NYU Depth V2 Dataset."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow_datasets import testing
from tensorflow_datasets.image import nyu_depth_v2
class NyuDepthV2Test(testing.DatasetBuilderTestCase):
DATASET_CLASS = nyu_depth_v2.NyuDepthV2
SPLITS = {"train": 2, "test": 1}
if __name__ == "__main__":
testing.test_main()
Fix validation split for NYU Depth V2 test"""Tests for NYU Depth V2 Dataset."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow_datasets import testing
from tensorflow_datasets.image import nyu_depth_v2
class NyuDepthV2Test(testing.DatasetBuilderTestCase):
DATASET_CLASS = nyu_depth_v2.NyuDepthV2
SPLITS = {"train": 2, "validation": 1}
if __name__ == "__main__":
testing.test_main()
|
<commit_before>"""Tests for NYU Depth V2 Dataset."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow_datasets import testing
from tensorflow_datasets.image import nyu_depth_v2
class NyuDepthV2Test(testing.DatasetBuilderTestCase):
DATASET_CLASS = nyu_depth_v2.NyuDepthV2
SPLITS = {"train": 2, "test": 1}
if __name__ == "__main__":
testing.test_main()
<commit_msg>Fix validation split for NYU Depth V2 test<commit_after>"""Tests for NYU Depth V2 Dataset."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow_datasets import testing
from tensorflow_datasets.image import nyu_depth_v2
class NyuDepthV2Test(testing.DatasetBuilderTestCase):
DATASET_CLASS = nyu_depth_v2.NyuDepthV2
SPLITS = {"train": 2, "validation": 1}
if __name__ == "__main__":
testing.test_main()
|
3cb25e903ad0fd342509d32dca2d3c507f001b5a
|
devilry/devilry_autoset_empty_email_by_username/models.py
|
devilry/devilry_autoset_empty_email_by_username/models.py
|
from django.db.models.signals import post_save
from devilry.devilry_account.models import User
from django.conf import settings
def set_email_by_username(sender, **kwargs):
"""
Signal handler which is invoked when a User is saved.
"""
user = kwargs['instance']
if not user.email:
user.email = '{0}@{1}'.format(user.username, settings.DEVILRY_DEFAULT_EMAIL_SUFFIX)
post_save.connect(set_email_by_username,
sender=User)
|
from django.conf import settings
def set_email_by_username(sender, **kwargs):
"""
Signal handler which is invoked when a User is saved.
"""
user = kwargs['instance']
if not user.email:
user.email = '{0}@{1}'.format(user.username, settings.DEVILRY_DEFAULT_EMAIL_SUFFIX)
# post_save.connect(set_email_by_username,
# sender=User)
|
Comment out the post_save connect line.
|
devilry_autoset_empty_email_by_username: Comment out the post_save connect line.
|
Python
|
bsd-3-clause
|
devilry/devilry-django,devilry/devilry-django,devilry/devilry-django,devilry/devilry-django
|
from django.db.models.signals import post_save
from devilry.devilry_account.models import User
from django.conf import settings
def set_email_by_username(sender, **kwargs):
"""
Signal handler which is invoked when a User is saved.
"""
user = kwargs['instance']
if not user.email:
user.email = '{0}@{1}'.format(user.username, settings.DEVILRY_DEFAULT_EMAIL_SUFFIX)
post_save.connect(set_email_by_username,
sender=User)
devilry_autoset_empty_email_by_username: Comment out the post_save connect line.
|
from django.conf import settings
def set_email_by_username(sender, **kwargs):
"""
Signal handler which is invoked when a User is saved.
"""
user = kwargs['instance']
if not user.email:
user.email = '{0}@{1}'.format(user.username, settings.DEVILRY_DEFAULT_EMAIL_SUFFIX)
# post_save.connect(set_email_by_username,
# sender=User)
|
<commit_before>from django.db.models.signals import post_save
from devilry.devilry_account.models import User
from django.conf import settings
def set_email_by_username(sender, **kwargs):
"""
Signal handler which is invoked when a User is saved.
"""
user = kwargs['instance']
if not user.email:
user.email = '{0}@{1}'.format(user.username, settings.DEVILRY_DEFAULT_EMAIL_SUFFIX)
post_save.connect(set_email_by_username,
sender=User)
<commit_msg>devilry_autoset_empty_email_by_username: Comment out the post_save connect line.<commit_after>
|
from django.conf import settings
def set_email_by_username(sender, **kwargs):
"""
Signal handler which is invoked when a User is saved.
"""
user = kwargs['instance']
if not user.email:
user.email = '{0}@{1}'.format(user.username, settings.DEVILRY_DEFAULT_EMAIL_SUFFIX)
# post_save.connect(set_email_by_username,
# sender=User)
|
from django.db.models.signals import post_save
from devilry.devilry_account.models import User
from django.conf import settings
def set_email_by_username(sender, **kwargs):
"""
Signal handler which is invoked when a User is saved.
"""
user = kwargs['instance']
if not user.email:
user.email = '{0}@{1}'.format(user.username, settings.DEVILRY_DEFAULT_EMAIL_SUFFIX)
post_save.connect(set_email_by_username,
sender=User)
devilry_autoset_empty_email_by_username: Comment out the post_save connect line.from django.conf import settings
def set_email_by_username(sender, **kwargs):
"""
Signal handler which is invoked when a User is saved.
"""
user = kwargs['instance']
if not user.email:
user.email = '{0}@{1}'.format(user.username, settings.DEVILRY_DEFAULT_EMAIL_SUFFIX)
# post_save.connect(set_email_by_username,
# sender=User)
|
<commit_before>from django.db.models.signals import post_save
from devilry.devilry_account.models import User
from django.conf import settings
def set_email_by_username(sender, **kwargs):
"""
Signal handler which is invoked when a User is saved.
"""
user = kwargs['instance']
if not user.email:
user.email = '{0}@{1}'.format(user.username, settings.DEVILRY_DEFAULT_EMAIL_SUFFIX)
post_save.connect(set_email_by_username,
sender=User)
<commit_msg>devilry_autoset_empty_email_by_username: Comment out the post_save connect line.<commit_after>from django.conf import settings
def set_email_by_username(sender, **kwargs):
"""
Signal handler which is invoked when a User is saved.
"""
user = kwargs['instance']
if not user.email:
user.email = '{0}@{1}'.format(user.username, settings.DEVILRY_DEFAULT_EMAIL_SUFFIX)
# post_save.connect(set_email_by_username,
# sender=User)
|
04fbd56e647de937ceae426acb6762f1cbbcf616
|
cryptography/__about__.py
|
cryptography/__about__.py
|
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function
__all__ = [
"__title__", "__summary__", "__uri__", "__version__", "__author__",
"__email__", "__license__", "__copyright__",
]
__title__ = "cryptography"
__summary__ = ("cryptography is a package designed to expose cryptographic "
"primitives and recipes to Python developers.")
__uri__ = "https://github.com/alex/cryptography"
__version__ = "0.1.dev1"
__author__ = ("Alex Gaynor, Donald Stufft, Laurens van Houvten, "
"Jean-Paul Calderone, Chris Heime, and Indivdual Contributors")
__email__ = "cryptography-dev@python.org"
__license__ = "Apache License, Version 2.0"
__copyright__ = "Copyright 2013 Donald Stufft"
|
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function
__all__ = [
"__title__", "__summary__", "__uri__", "__version__", "__author__",
"__email__", "__license__", "__copyright__",
]
__title__ = "cryptography"
__summary__ = ("cryptography is a package designed to expose cryptographic "
"primitives and recipes to Python developers.")
__uri__ = "https://github.com/alex/cryptography"
__version__ = "0.1.dev1"
__author__ = ("Alex Gaynor, Donald Stufft, Laurens van Houvten, "
"Jean-Paul Calderone, Christian Heimes, and Indivdual "
"Contributors")
__email__ = "cryptography-dev@python.org"
__license__ = "Apache License, Version 2.0"
__copyright__ = "Copyright 2013 Donald Stufft"
|
Fix spelling of my name
|
Fix spelling of my name
|
Python
|
bsd-3-clause
|
bwhmather/cryptography,dstufft/cryptography,glyph/cryptography,kimvais/cryptography,dstufft/cryptography,bwhmather/cryptography,Lukasa/cryptography,sholsapp/cryptography,bwhmather/cryptography,skeuomorf/cryptography,kimvais/cryptography,Ayrx/cryptography,sholsapp/cryptography,skeuomorf/cryptography,glyph/cryptography,Hasimir/cryptography,skeuomorf/cryptography,Ayrx/cryptography,dstufft/cryptography,dstufft/cryptography,Lukasa/cryptography,Lukasa/cryptography,bwhmather/cryptography,Ayrx/cryptography,Hasimir/cryptography,kimvais/cryptography,dstufft/cryptography,sholsapp/cryptography,Hasimir/cryptography,sholsapp/cryptography,Ayrx/cryptography,Hasimir/cryptography,kimvais/cryptography,skeuomorf/cryptography
|
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function
__all__ = [
"__title__", "__summary__", "__uri__", "__version__", "__author__",
"__email__", "__license__", "__copyright__",
]
__title__ = "cryptography"
__summary__ = ("cryptography is a package designed to expose cryptographic "
"primitives and recipes to Python developers.")
__uri__ = "https://github.com/alex/cryptography"
__version__ = "0.1.dev1"
__author__ = ("Alex Gaynor, Donald Stufft, Laurens van Houvten, "
"Jean-Paul Calderone, Chris Heime, and Indivdual Contributors")
__email__ = "cryptography-dev@python.org"
__license__ = "Apache License, Version 2.0"
__copyright__ = "Copyright 2013 Donald Stufft"
Fix spelling of my name
|
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function
__all__ = [
"__title__", "__summary__", "__uri__", "__version__", "__author__",
"__email__", "__license__", "__copyright__",
]
__title__ = "cryptography"
__summary__ = ("cryptography is a package designed to expose cryptographic "
"primitives and recipes to Python developers.")
__uri__ = "https://github.com/alex/cryptography"
__version__ = "0.1.dev1"
__author__ = ("Alex Gaynor, Donald Stufft, Laurens van Houvten, "
"Jean-Paul Calderone, Christian Heimes, and Indivdual "
"Contributors")
__email__ = "cryptography-dev@python.org"
__license__ = "Apache License, Version 2.0"
__copyright__ = "Copyright 2013 Donald Stufft"
|
<commit_before># Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function
__all__ = [
"__title__", "__summary__", "__uri__", "__version__", "__author__",
"__email__", "__license__", "__copyright__",
]
__title__ = "cryptography"
__summary__ = ("cryptography is a package designed to expose cryptographic "
"primitives and recipes to Python developers.")
__uri__ = "https://github.com/alex/cryptography"
__version__ = "0.1.dev1"
__author__ = ("Alex Gaynor, Donald Stufft, Laurens van Houvten, "
"Jean-Paul Calderone, Chris Heime, and Indivdual Contributors")
__email__ = "cryptography-dev@python.org"
__license__ = "Apache License, Version 2.0"
__copyright__ = "Copyright 2013 Donald Stufft"
<commit_msg>Fix spelling of my name<commit_after>
|
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function
__all__ = [
"__title__", "__summary__", "__uri__", "__version__", "__author__",
"__email__", "__license__", "__copyright__",
]
__title__ = "cryptography"
__summary__ = ("cryptography is a package designed to expose cryptographic "
"primitives and recipes to Python developers.")
__uri__ = "https://github.com/alex/cryptography"
__version__ = "0.1.dev1"
__author__ = ("Alex Gaynor, Donald Stufft, Laurens van Houvten, "
"Jean-Paul Calderone, Christian Heimes, and Indivdual "
"Contributors")
__email__ = "cryptography-dev@python.org"
__license__ = "Apache License, Version 2.0"
__copyright__ = "Copyright 2013 Donald Stufft"
|
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function
__all__ = [
"__title__", "__summary__", "__uri__", "__version__", "__author__",
"__email__", "__license__", "__copyright__",
]
__title__ = "cryptography"
__summary__ = ("cryptography is a package designed to expose cryptographic "
"primitives and recipes to Python developers.")
__uri__ = "https://github.com/alex/cryptography"
__version__ = "0.1.dev1"
__author__ = ("Alex Gaynor, Donald Stufft, Laurens van Houvten, "
"Jean-Paul Calderone, Chris Heime, and Indivdual Contributors")
__email__ = "cryptography-dev@python.org"
__license__ = "Apache License, Version 2.0"
__copyright__ = "Copyright 2013 Donald Stufft"
Fix spelling of my name# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function
__all__ = [
"__title__", "__summary__", "__uri__", "__version__", "__author__",
"__email__", "__license__", "__copyright__",
]
__title__ = "cryptography"
__summary__ = ("cryptography is a package designed to expose cryptographic "
"primitives and recipes to Python developers.")
__uri__ = "https://github.com/alex/cryptography"
__version__ = "0.1.dev1"
__author__ = ("Alex Gaynor, Donald Stufft, Laurens van Houvten, "
"Jean-Paul Calderone, Christian Heimes, and Indivdual "
"Contributors")
__email__ = "cryptography-dev@python.org"
__license__ = "Apache License, Version 2.0"
__copyright__ = "Copyright 2013 Donald Stufft"
|
<commit_before># Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function
__all__ = [
"__title__", "__summary__", "__uri__", "__version__", "__author__",
"__email__", "__license__", "__copyright__",
]
__title__ = "cryptography"
__summary__ = ("cryptography is a package designed to expose cryptographic "
"primitives and recipes to Python developers.")
__uri__ = "https://github.com/alex/cryptography"
__version__ = "0.1.dev1"
__author__ = ("Alex Gaynor, Donald Stufft, Laurens van Houvten, "
"Jean-Paul Calderone, Chris Heime, and Indivdual Contributors")
__email__ = "cryptography-dev@python.org"
__license__ = "Apache License, Version 2.0"
__copyright__ = "Copyright 2013 Donald Stufft"
<commit_msg>Fix spelling of my name<commit_after># Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function
__all__ = [
"__title__", "__summary__", "__uri__", "__version__", "__author__",
"__email__", "__license__", "__copyright__",
]
__title__ = "cryptography"
__summary__ = ("cryptography is a package designed to expose cryptographic "
"primitives and recipes to Python developers.")
__uri__ = "https://github.com/alex/cryptography"
__version__ = "0.1.dev1"
__author__ = ("Alex Gaynor, Donald Stufft, Laurens van Houvten, "
"Jean-Paul Calderone, Christian Heimes, and Indivdual "
"Contributors")
__email__ = "cryptography-dev@python.org"
__license__ = "Apache License, Version 2.0"
__copyright__ = "Copyright 2013 Donald Stufft"
|
092c58bbd0a8105e80e35c9c83833a62b510104f
|
similarities.py
|
similarities.py
|
#!/usr/bin/env python
from signal import signal, SIGPIPE, SIG_DFL
signal(SIGPIPE, SIG_DFL)
import argparse
import sys
from gensim.models.word2vec import Word2Vec
import csv
parser = argparse.ArgumentParser()
parser.add_argument('--sim', nargs='?', type=float, default=.3)
parser.add_argument('w2v')
args = vars(parser.parse_args())
w2v = Word2Vec.load_word2vec_format(args['w2v'], binary=True, unicode_errors='ignore')
w2v.init_sims(replace=True)
print('Using %d word2vec dimensions from "%s".' % (w2v.layer1_size, sys.argv[1]), file=sys.stderr)
reader = csv.reader(sys.stdin, delimiter='\t', quoting=csv.QUOTE_NONE)
for row in reader:
word1, word2 = row[0], row[1]
try:
similarity = w2v.similarity(word1, word2)
except KeyError:
similarity = args['sim']
print('%s\t%s\t%f' % (word1, word2, similarity))
|
#!/usr/bin/env python
from signal import signal, SIGPIPE, SIG_DFL
signal(SIGPIPE, SIG_DFL)
import argparse
import sys
from gensim.models.word2vec import Word2Vec
import csv
parser = argparse.ArgumentParser()
parser.add_argument('--sim', nargs='?', type=float, default=.3)
parser.add_argument('w2v')
args = vars(parser.parse_args())
w2v = Word2Vec.load_word2vec_format(args['w2v'], binary=True, unicode_errors='ignore')
w2v.init_sims(replace=True)
print('Using %d word2vec dimensions from "%s".' % (w2v.layer1_size, sys.argv[1]), file=sys.stderr)
reader = csv.reader(sys.stdin, delimiter='\t', quoting=csv.QUOTE_NONE)
for row in reader:
word1, word2 = row[0], row[1]
try:
similarity = w2v.similarity(word1, word2)
if similarity < 0:
similarity = args['sim']
except KeyError:
similarity = args['sim']
print('%s\t%s\t%f' % (word1, word2, similarity))
|
Add a negative similarity crutch
|
Add a negative similarity crutch
|
Python
|
mit
|
dustalov/watset,dustalov/watset
|
#!/usr/bin/env python
from signal import signal, SIGPIPE, SIG_DFL
signal(SIGPIPE, SIG_DFL)
import argparse
import sys
from gensim.models.word2vec import Word2Vec
import csv
parser = argparse.ArgumentParser()
parser.add_argument('--sim', nargs='?', type=float, default=.3)
parser.add_argument('w2v')
args = vars(parser.parse_args())
w2v = Word2Vec.load_word2vec_format(args['w2v'], binary=True, unicode_errors='ignore')
w2v.init_sims(replace=True)
print('Using %d word2vec dimensions from "%s".' % (w2v.layer1_size, sys.argv[1]), file=sys.stderr)
reader = csv.reader(sys.stdin, delimiter='\t', quoting=csv.QUOTE_NONE)
for row in reader:
word1, word2 = row[0], row[1]
try:
similarity = w2v.similarity(word1, word2)
except KeyError:
similarity = args['sim']
print('%s\t%s\t%f' % (word1, word2, similarity))
Add a negative similarity crutch
|
#!/usr/bin/env python
from signal import signal, SIGPIPE, SIG_DFL
signal(SIGPIPE, SIG_DFL)
import argparse
import sys
from gensim.models.word2vec import Word2Vec
import csv
parser = argparse.ArgumentParser()
parser.add_argument('--sim', nargs='?', type=float, default=.3)
parser.add_argument('w2v')
args = vars(parser.parse_args())
w2v = Word2Vec.load_word2vec_format(args['w2v'], binary=True, unicode_errors='ignore')
w2v.init_sims(replace=True)
print('Using %d word2vec dimensions from "%s".' % (w2v.layer1_size, sys.argv[1]), file=sys.stderr)
reader = csv.reader(sys.stdin, delimiter='\t', quoting=csv.QUOTE_NONE)
for row in reader:
word1, word2 = row[0], row[1]
try:
similarity = w2v.similarity(word1, word2)
if similarity < 0:
similarity = args['sim']
except KeyError:
similarity = args['sim']
print('%s\t%s\t%f' % (word1, word2, similarity))
|
<commit_before>#!/usr/bin/env python
from signal import signal, SIGPIPE, SIG_DFL
signal(SIGPIPE, SIG_DFL)
import argparse
import sys
from gensim.models.word2vec import Word2Vec
import csv
parser = argparse.ArgumentParser()
parser.add_argument('--sim', nargs='?', type=float, default=.3)
parser.add_argument('w2v')
args = vars(parser.parse_args())
w2v = Word2Vec.load_word2vec_format(args['w2v'], binary=True, unicode_errors='ignore')
w2v.init_sims(replace=True)
print('Using %d word2vec dimensions from "%s".' % (w2v.layer1_size, sys.argv[1]), file=sys.stderr)
reader = csv.reader(sys.stdin, delimiter='\t', quoting=csv.QUOTE_NONE)
for row in reader:
word1, word2 = row[0], row[1]
try:
similarity = w2v.similarity(word1, word2)
except KeyError:
similarity = args['sim']
print('%s\t%s\t%f' % (word1, word2, similarity))
<commit_msg>Add a negative similarity crutch<commit_after>
|
#!/usr/bin/env python
from signal import signal, SIGPIPE, SIG_DFL
signal(SIGPIPE, SIG_DFL)
import argparse
import sys
from gensim.models.word2vec import Word2Vec
import csv
parser = argparse.ArgumentParser()
parser.add_argument('--sim', nargs='?', type=float, default=.3)
parser.add_argument('w2v')
args = vars(parser.parse_args())
w2v = Word2Vec.load_word2vec_format(args['w2v'], binary=True, unicode_errors='ignore')
w2v.init_sims(replace=True)
print('Using %d word2vec dimensions from "%s".' % (w2v.layer1_size, sys.argv[1]), file=sys.stderr)
reader = csv.reader(sys.stdin, delimiter='\t', quoting=csv.QUOTE_NONE)
for row in reader:
word1, word2 = row[0], row[1]
try:
similarity = w2v.similarity(word1, word2)
if similarity < 0:
similarity = args['sim']
except KeyError:
similarity = args['sim']
print('%s\t%s\t%f' % (word1, word2, similarity))
|
#!/usr/bin/env python
from signal import signal, SIGPIPE, SIG_DFL
signal(SIGPIPE, SIG_DFL)
import argparse
import sys
from gensim.models.word2vec import Word2Vec
import csv
parser = argparse.ArgumentParser()
parser.add_argument('--sim', nargs='?', type=float, default=.3)
parser.add_argument('w2v')
args = vars(parser.parse_args())
w2v = Word2Vec.load_word2vec_format(args['w2v'], binary=True, unicode_errors='ignore')
w2v.init_sims(replace=True)
print('Using %d word2vec dimensions from "%s".' % (w2v.layer1_size, sys.argv[1]), file=sys.stderr)
reader = csv.reader(sys.stdin, delimiter='\t', quoting=csv.QUOTE_NONE)
for row in reader:
word1, word2 = row[0], row[1]
try:
similarity = w2v.similarity(word1, word2)
except KeyError:
similarity = args['sim']
print('%s\t%s\t%f' % (word1, word2, similarity))
Add a negative similarity crutch#!/usr/bin/env python
from signal import signal, SIGPIPE, SIG_DFL
signal(SIGPIPE, SIG_DFL)
import argparse
import sys
from gensim.models.word2vec import Word2Vec
import csv
parser = argparse.ArgumentParser()
parser.add_argument('--sim', nargs='?', type=float, default=.3)
parser.add_argument('w2v')
args = vars(parser.parse_args())
w2v = Word2Vec.load_word2vec_format(args['w2v'], binary=True, unicode_errors='ignore')
w2v.init_sims(replace=True)
print('Using %d word2vec dimensions from "%s".' % (w2v.layer1_size, sys.argv[1]), file=sys.stderr)
reader = csv.reader(sys.stdin, delimiter='\t', quoting=csv.QUOTE_NONE)
for row in reader:
word1, word2 = row[0], row[1]
try:
similarity = w2v.similarity(word1, word2)
if similarity < 0:
similarity = args['sim']
except KeyError:
similarity = args['sim']
print('%s\t%s\t%f' % (word1, word2, similarity))
|
<commit_before>#!/usr/bin/env python
from signal import signal, SIGPIPE, SIG_DFL
signal(SIGPIPE, SIG_DFL)
import argparse
import sys
from gensim.models.word2vec import Word2Vec
import csv
parser = argparse.ArgumentParser()
parser.add_argument('--sim', nargs='?', type=float, default=.3)
parser.add_argument('w2v')
args = vars(parser.parse_args())
w2v = Word2Vec.load_word2vec_format(args['w2v'], binary=True, unicode_errors='ignore')
w2v.init_sims(replace=True)
print('Using %d word2vec dimensions from "%s".' % (w2v.layer1_size, sys.argv[1]), file=sys.stderr)
reader = csv.reader(sys.stdin, delimiter='\t', quoting=csv.QUOTE_NONE)
for row in reader:
word1, word2 = row[0], row[1]
try:
similarity = w2v.similarity(word1, word2)
except KeyError:
similarity = args['sim']
print('%s\t%s\t%f' % (word1, word2, similarity))
<commit_msg>Add a negative similarity crutch<commit_after>#!/usr/bin/env python
from signal import signal, SIGPIPE, SIG_DFL
signal(SIGPIPE, SIG_DFL)
import argparse
import sys
from gensim.models.word2vec import Word2Vec
import csv
parser = argparse.ArgumentParser()
parser.add_argument('--sim', nargs='?', type=float, default=.3)
parser.add_argument('w2v')
args = vars(parser.parse_args())
w2v = Word2Vec.load_word2vec_format(args['w2v'], binary=True, unicode_errors='ignore')
w2v.init_sims(replace=True)
print('Using %d word2vec dimensions from "%s".' % (w2v.layer1_size, sys.argv[1]), file=sys.stderr)
reader = csv.reader(sys.stdin, delimiter='\t', quoting=csv.QUOTE_NONE)
for row in reader:
word1, word2 = row[0], row[1]
try:
similarity = w2v.similarity(word1, word2)
if similarity < 0:
similarity = args['sim']
except KeyError:
similarity = args['sim']
print('%s\t%s\t%f' % (word1, word2, similarity))
|
78154a63e86774fb8952f42883f7788e94d0c8d2
|
lib/spack/spack/operating_systems/linux_distro.py
|
lib/spack/spack/operating_systems/linux_distro.py
|
import re
from external.distro import linux_distribution
from spack.architecture import OperatingSystem
class LinuxDistro(OperatingSystem):
""" This class will represent the autodetected operating system
for a Linux System. Since there are many different flavors of
Linux, this class will attempt to encompass them all through
autodetection using the python module platform and the method
platform.dist()
"""
def __init__(self):
distname, version, _ = linux_distribution(
full_distribution_name=False)
distname, version = str(distname), str(version)
# Grabs major version from tuple on redhat; on other platforms
# grab the first legal identifier in the version field. On
# debian you get things like 'wheezy/sid'; sid means unstable.
# We just record 'wheezy' and don't get quite so detailed.
version = re.split(r'[^\w-]', version)[0]
super(LinuxDistro, self).__init__(distname, version)
|
import re
from spack.architecture import OperatingSystem
class LinuxDistro(OperatingSystem):
""" This class will represent the autodetected operating system
for a Linux System. Since there are many different flavors of
Linux, this class will attempt to encompass them all through
autodetection using the python module platform and the method
platform.dist()
"""
def __init__(self):
try:
# This will throw an error if imported on a non-Linux platform.
from external.distro import linux_distribution
distname, version, _ = linux_distribution(
full_distribution_name=False)
distname, version = str(distname), str(version)
except ImportError as e:
distname, version = 'unknown', ''
# Grabs major version from tuple on redhat; on other platforms
# grab the first legal identifier in the version field. On
# debian you get things like 'wheezy/sid'; sid means unstable.
# We just record 'wheezy' and don't get quite so detailed.
version = re.split(r'[^\w-]', version)[0]
super(LinuxDistro, self).__init__(distname, version)
|
Fix bug in distribution detection on unsupported platforms.
|
Fix bug in distribution detection on unsupported platforms.
|
Python
|
lgpl-2.1
|
EmreAtes/spack,TheTimmy/spack,mfherbst/spack,tmerrick1/spack,iulian787/spack,skosukhin/spack,matthiasdiener/spack,mfherbst/spack,lgarren/spack,EmreAtes/spack,iulian787/spack,EmreAtes/spack,matthiasdiener/spack,mfherbst/spack,LLNL/spack,TheTimmy/spack,skosukhin/spack,skosukhin/spack,LLNL/spack,krafczyk/spack,krafczyk/spack,mfherbst/spack,matthiasdiener/spack,LLNL/spack,tmerrick1/spack,krafczyk/spack,TheTimmy/spack,iulian787/spack,krafczyk/spack,TheTimmy/spack,matthiasdiener/spack,lgarren/spack,iulian787/spack,tmerrick1/spack,EmreAtes/spack,mfherbst/spack,matthiasdiener/spack,lgarren/spack,tmerrick1/spack,iulian787/spack,lgarren/spack,lgarren/spack,krafczyk/spack,skosukhin/spack,LLNL/spack,EmreAtes/spack,skosukhin/spack,TheTimmy/spack,tmerrick1/spack,LLNL/spack
|
import re
from external.distro import linux_distribution
from spack.architecture import OperatingSystem
class LinuxDistro(OperatingSystem):
""" This class will represent the autodetected operating system
for a Linux System. Since there are many different flavors of
Linux, this class will attempt to encompass them all through
autodetection using the python module platform and the method
platform.dist()
"""
def __init__(self):
distname, version, _ = linux_distribution(
full_distribution_name=False)
distname, version = str(distname), str(version)
# Grabs major version from tuple on redhat; on other platforms
# grab the first legal identifier in the version field. On
# debian you get things like 'wheezy/sid'; sid means unstable.
# We just record 'wheezy' and don't get quite so detailed.
version = re.split(r'[^\w-]', version)[0]
super(LinuxDistro, self).__init__(distname, version)
Fix bug in distribution detection on unsupported platforms.
|
import re
from spack.architecture import OperatingSystem
class LinuxDistro(OperatingSystem):
""" This class will represent the autodetected operating system
for a Linux System. Since there are many different flavors of
Linux, this class will attempt to encompass them all through
autodetection using the python module platform and the method
platform.dist()
"""
def __init__(self):
try:
# This will throw an error if imported on a non-Linux platform.
from external.distro import linux_distribution
distname, version, _ = linux_distribution(
full_distribution_name=False)
distname, version = str(distname), str(version)
except ImportError as e:
distname, version = 'unknown', ''
# Grabs major version from tuple on redhat; on other platforms
# grab the first legal identifier in the version field. On
# debian you get things like 'wheezy/sid'; sid means unstable.
# We just record 'wheezy' and don't get quite so detailed.
version = re.split(r'[^\w-]', version)[0]
super(LinuxDistro, self).__init__(distname, version)
|
<commit_before>import re
from external.distro import linux_distribution
from spack.architecture import OperatingSystem
class LinuxDistro(OperatingSystem):
""" This class will represent the autodetected operating system
for a Linux System. Since there are many different flavors of
Linux, this class will attempt to encompass them all through
autodetection using the python module platform and the method
platform.dist()
"""
def __init__(self):
distname, version, _ = linux_distribution(
full_distribution_name=False)
distname, version = str(distname), str(version)
# Grabs major version from tuple on redhat; on other platforms
# grab the first legal identifier in the version field. On
# debian you get things like 'wheezy/sid'; sid means unstable.
# We just record 'wheezy' and don't get quite so detailed.
version = re.split(r'[^\w-]', version)[0]
super(LinuxDistro, self).__init__(distname, version)
<commit_msg>Fix bug in distribution detection on unsupported platforms.<commit_after>
|
import re
from spack.architecture import OperatingSystem
class LinuxDistro(OperatingSystem):
""" This class will represent the autodetected operating system
for a Linux System. Since there are many different flavors of
Linux, this class will attempt to encompass them all through
autodetection using the python module platform and the method
platform.dist()
"""
def __init__(self):
try:
# This will throw an error if imported on a non-Linux platform.
from external.distro import linux_distribution
distname, version, _ = linux_distribution(
full_distribution_name=False)
distname, version = str(distname), str(version)
except ImportError as e:
distname, version = 'unknown', ''
# Grabs major version from tuple on redhat; on other platforms
# grab the first legal identifier in the version field. On
# debian you get things like 'wheezy/sid'; sid means unstable.
# We just record 'wheezy' and don't get quite so detailed.
version = re.split(r'[^\w-]', version)[0]
super(LinuxDistro, self).__init__(distname, version)
|
import re
from external.distro import linux_distribution
from spack.architecture import OperatingSystem
class LinuxDistro(OperatingSystem):
""" This class will represent the autodetected operating system
for a Linux System. Since there are many different flavors of
Linux, this class will attempt to encompass them all through
autodetection using the python module platform and the method
platform.dist()
"""
def __init__(self):
distname, version, _ = linux_distribution(
full_distribution_name=False)
distname, version = str(distname), str(version)
# Grabs major version from tuple on redhat; on other platforms
# grab the first legal identifier in the version field. On
# debian you get things like 'wheezy/sid'; sid means unstable.
# We just record 'wheezy' and don't get quite so detailed.
version = re.split(r'[^\w-]', version)[0]
super(LinuxDistro, self).__init__(distname, version)
Fix bug in distribution detection on unsupported platforms.import re
from spack.architecture import OperatingSystem
class LinuxDistro(OperatingSystem):
""" This class will represent the autodetected operating system
for a Linux System. Since there are many different flavors of
Linux, this class will attempt to encompass them all through
autodetection using the python module platform and the method
platform.dist()
"""
def __init__(self):
try:
# This will throw an error if imported on a non-Linux platform.
from external.distro import linux_distribution
distname, version, _ = linux_distribution(
full_distribution_name=False)
distname, version = str(distname), str(version)
except ImportError as e:
distname, version = 'unknown', ''
# Grabs major version from tuple on redhat; on other platforms
# grab the first legal identifier in the version field. On
# debian you get things like 'wheezy/sid'; sid means unstable.
# We just record 'wheezy' and don't get quite so detailed.
version = re.split(r'[^\w-]', version)[0]
super(LinuxDistro, self).__init__(distname, version)
|
<commit_before>import re
from external.distro import linux_distribution
from spack.architecture import OperatingSystem
class LinuxDistro(OperatingSystem):
""" This class will represent the autodetected operating system
for a Linux System. Since there are many different flavors of
Linux, this class will attempt to encompass them all through
autodetection using the python module platform and the method
platform.dist()
"""
def __init__(self):
distname, version, _ = linux_distribution(
full_distribution_name=False)
distname, version = str(distname), str(version)
# Grabs major version from tuple on redhat; on other platforms
# grab the first legal identifier in the version field. On
# debian you get things like 'wheezy/sid'; sid means unstable.
# We just record 'wheezy' and don't get quite so detailed.
version = re.split(r'[^\w-]', version)[0]
super(LinuxDistro, self).__init__(distname, version)
<commit_msg>Fix bug in distribution detection on unsupported platforms.<commit_after>import re
from spack.architecture import OperatingSystem
class LinuxDistro(OperatingSystem):
""" This class will represent the autodetected operating system
for a Linux System. Since there are many different flavors of
Linux, this class will attempt to encompass them all through
autodetection using the python module platform and the method
platform.dist()
"""
def __init__(self):
try:
# This will throw an error if imported on a non-Linux platform.
from external.distro import linux_distribution
distname, version, _ = linux_distribution(
full_distribution_name=False)
distname, version = str(distname), str(version)
except ImportError as e:
distname, version = 'unknown', ''
# Grabs major version from tuple on redhat; on other platforms
# grab the first legal identifier in the version field. On
# debian you get things like 'wheezy/sid'; sid means unstable.
# We just record 'wheezy' and don't get quite so detailed.
version = re.split(r'[^\w-]', version)[0]
super(LinuxDistro, self).__init__(distname, version)
|
aae36c00e6dbea1ed68d2a921021d586d5ff723e
|
openquake/baselib/safeprint.py
|
openquake/baselib/safeprint.py
|
# -*- coding: utf-8 -*-
# vim: tabstop=4 shiftwidth=4 softtabstop=4
#
# Copyright (C) 2017 GEM Foundation
#
# OpenQuake is free software: you can redistribute it and/or modify it
# under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# OpenQuake is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with OpenQuake. If not, see <http://www.gnu.org/licenses/>.
from __future__ import print_function
import sys
try:
import __builtin__
except ImportError:
# Python 3
import builtins as __builtin__
def print(*args, **kwargs):
conv_str = ()
for s in args:
conv_str = s.encode('utf-8').decode(sys.stdout.encoding, 'ignore')
return __builtin__.print(conv_str, **kwargs)
|
# -*- coding: utf-8 -*-
# vim: tabstop=4 shiftwidth=4 softtabstop=4
#
# Copyright (C) 2017 GEM Foundation
#
# OpenQuake is free software: you can redistribute it and/or modify it
# under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# OpenQuake is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with OpenQuake. If not, see <http://www.gnu.org/licenses/>.
from __future__ import print_function
from sys import stdout
try:
import __builtin__
except ImportError:
# Python 3
import builtins as __builtin__
def print(*args, **kwargs):
ret_str = ()
# when stdout is redirected to a file, python 2 uses ascii for the writer;
# python 3 uses what is configured in the system (i.e. 'utf-8')
str_encoding = stdout.encoding if stdout.encoding is not None else 'ascii'
for s in args:
ret_str = s.encode('utf-8').decode(str_encoding, 'ignore')
return __builtin__.print(ret_str, **kwargs)
|
Fix out redirection in python2
|
Fix out redirection in python2
|
Python
|
agpl-3.0
|
gem/oq-engine,gem/oq-engine,gem/oq-hazardlib,gem/oq-hazardlib,gem/oq-hazardlib,gem/oq-engine,gem/oq-engine,gem/oq-engine
|
# -*- coding: utf-8 -*-
# vim: tabstop=4 shiftwidth=4 softtabstop=4
#
# Copyright (C) 2017 GEM Foundation
#
# OpenQuake is free software: you can redistribute it and/or modify it
# under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# OpenQuake is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with OpenQuake. If not, see <http://www.gnu.org/licenses/>.
from __future__ import print_function
import sys
try:
import __builtin__
except ImportError:
# Python 3
import builtins as __builtin__
def print(*args, **kwargs):
conv_str = ()
for s in args:
conv_str = s.encode('utf-8').decode(sys.stdout.encoding, 'ignore')
return __builtin__.print(conv_str, **kwargs)
Fix out redirection in python2
|
# -*- coding: utf-8 -*-
# vim: tabstop=4 shiftwidth=4 softtabstop=4
#
# Copyright (C) 2017 GEM Foundation
#
# OpenQuake is free software: you can redistribute it and/or modify it
# under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# OpenQuake is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with OpenQuake. If not, see <http://www.gnu.org/licenses/>.
from __future__ import print_function
from sys import stdout
try:
import __builtin__
except ImportError:
# Python 3
import builtins as __builtin__
def print(*args, **kwargs):
ret_str = ()
# when stdout is redirected to a file, python 2 uses ascii for the writer;
# python 3 uses what is configured in the system (i.e. 'utf-8')
str_encoding = stdout.encoding if stdout.encoding is not None else 'ascii'
for s in args:
ret_str = s.encode('utf-8').decode(str_encoding, 'ignore')
return __builtin__.print(ret_str, **kwargs)
|
<commit_before># -*- coding: utf-8 -*-
# vim: tabstop=4 shiftwidth=4 softtabstop=4
#
# Copyright (C) 2017 GEM Foundation
#
# OpenQuake is free software: you can redistribute it and/or modify it
# under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# OpenQuake is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with OpenQuake. If not, see <http://www.gnu.org/licenses/>.
from __future__ import print_function
import sys
try:
import __builtin__
except ImportError:
# Python 3
import builtins as __builtin__
def print(*args, **kwargs):
conv_str = ()
for s in args:
conv_str = s.encode('utf-8').decode(sys.stdout.encoding, 'ignore')
return __builtin__.print(conv_str, **kwargs)
<commit_msg>Fix out redirection in python2<commit_after>
|
# -*- coding: utf-8 -*-
# vim: tabstop=4 shiftwidth=4 softtabstop=4
#
# Copyright (C) 2017 GEM Foundation
#
# OpenQuake is free software: you can redistribute it and/or modify it
# under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# OpenQuake is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with OpenQuake. If not, see <http://www.gnu.org/licenses/>.
from __future__ import print_function
from sys import stdout
try:
import __builtin__
except ImportError:
# Python 3
import builtins as __builtin__
def print(*args, **kwargs):
ret_str = ()
# when stdout is redirected to a file, python 2 uses ascii for the writer;
# python 3 uses what is configured in the system (i.e. 'utf-8')
str_encoding = stdout.encoding if stdout.encoding is not None else 'ascii'
for s in args:
ret_str = s.encode('utf-8').decode(str_encoding, 'ignore')
return __builtin__.print(ret_str, **kwargs)
|
# -*- coding: utf-8 -*-
# vim: tabstop=4 shiftwidth=4 softtabstop=4
#
# Copyright (C) 2017 GEM Foundation
#
# OpenQuake is free software: you can redistribute it and/or modify it
# under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# OpenQuake is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with OpenQuake. If not, see <http://www.gnu.org/licenses/>.
from __future__ import print_function
import sys
try:
import __builtin__
except ImportError:
# Python 3
import builtins as __builtin__
def print(*args, **kwargs):
conv_str = ()
for s in args:
conv_str = s.encode('utf-8').decode(sys.stdout.encoding, 'ignore')
return __builtin__.print(conv_str, **kwargs)
Fix out redirection in python2# -*- coding: utf-8 -*-
# vim: tabstop=4 shiftwidth=4 softtabstop=4
#
# Copyright (C) 2017 GEM Foundation
#
# OpenQuake is free software: you can redistribute it and/or modify it
# under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# OpenQuake is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with OpenQuake. If not, see <http://www.gnu.org/licenses/>.
from __future__ import print_function
from sys import stdout
try:
import __builtin__
except ImportError:
# Python 3
import builtins as __builtin__
def print(*args, **kwargs):
ret_str = ()
# when stdout is redirected to a file, python 2 uses ascii for the writer;
# python 3 uses what is configured in the system (i.e. 'utf-8')
str_encoding = stdout.encoding if stdout.encoding is not None else 'ascii'
for s in args:
ret_str = s.encode('utf-8').decode(str_encoding, 'ignore')
return __builtin__.print(ret_str, **kwargs)
|
<commit_before># -*- coding: utf-8 -*-
# vim: tabstop=4 shiftwidth=4 softtabstop=4
#
# Copyright (C) 2017 GEM Foundation
#
# OpenQuake is free software: you can redistribute it and/or modify it
# under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# OpenQuake is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with OpenQuake. If not, see <http://www.gnu.org/licenses/>.
from __future__ import print_function
import sys
try:
import __builtin__
except ImportError:
# Python 3
import builtins as __builtin__
def print(*args, **kwargs):
conv_str = ()
for s in args:
conv_str = s.encode('utf-8').decode(sys.stdout.encoding, 'ignore')
return __builtin__.print(conv_str, **kwargs)
<commit_msg>Fix out redirection in python2<commit_after># -*- coding: utf-8 -*-
# vim: tabstop=4 shiftwidth=4 softtabstop=4
#
# Copyright (C) 2017 GEM Foundation
#
# OpenQuake is free software: you can redistribute it and/or modify it
# under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# OpenQuake is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with OpenQuake. If not, see <http://www.gnu.org/licenses/>.
from __future__ import print_function
from sys import stdout
try:
import __builtin__
except ImportError:
# Python 3
import builtins as __builtin__
def print(*args, **kwargs):
ret_str = ()
# when stdout is redirected to a file, python 2 uses ascii for the writer;
# python 3 uses what is configured in the system (i.e. 'utf-8')
str_encoding = stdout.encoding if stdout.encoding is not None else 'ascii'
for s in args:
ret_str = s.encode('utf-8').decode(str_encoding, 'ignore')
return __builtin__.print(ret_str, **kwargs)
|
d7f43a15a2e4535728e7ec5d3cb550af3eed1590
|
h2o-py/h2o/tree/__init__.py
|
h2o-py/h2o/tree/__init__.py
|
from .tree import H2OTree
from .tree import H2ONode
__all__ = ["H2OTree", "H2ONode"]
|
from .tree import H2OTree
from .tree import H2ONode
from .tree import H2OSplitNode
from .tree import H2OLeafNode
__all__ = ["H2OTree", "H2ONode", "H2OSplitNode", "H2OLeafNode"]
|
Include H2OSplitNode & H2OLeafNode in __all__
|
Include H2OSplitNode & H2OLeafNode in __all__
|
Python
|
apache-2.0
|
michalkurka/h2o-3,h2oai/h2o-3,michalkurka/h2o-3,h2oai/h2o-3,h2oai/h2o-3,michalkurka/h2o-3,h2oai/h2o-3,h2oai/h2o-3,h2oai/h2o-3,michalkurka/h2o-3,h2oai/h2o-3,michalkurka/h2o-3,michalkurka/h2o-3,michalkurka/h2o-3,h2oai/h2o-3
|
from .tree import H2OTree
from .tree import H2ONode
__all__ = ["H2OTree", "H2ONode"]Include H2OSplitNode & H2OLeafNode in __all__
|
from .tree import H2OTree
from .tree import H2ONode
from .tree import H2OSplitNode
from .tree import H2OLeafNode
__all__ = ["H2OTree", "H2ONode", "H2OSplitNode", "H2OLeafNode"]
|
<commit_before>from .tree import H2OTree
from .tree import H2ONode
__all__ = ["H2OTree", "H2ONode"]<commit_msg>Include H2OSplitNode & H2OLeafNode in __all__<commit_after>
|
from .tree import H2OTree
from .tree import H2ONode
from .tree import H2OSplitNode
from .tree import H2OLeafNode
__all__ = ["H2OTree", "H2ONode", "H2OSplitNode", "H2OLeafNode"]
|
from .tree import H2OTree
from .tree import H2ONode
__all__ = ["H2OTree", "H2ONode"]Include H2OSplitNode & H2OLeafNode in __all__from .tree import H2OTree
from .tree import H2ONode
from .tree import H2OSplitNode
from .tree import H2OLeafNode
__all__ = ["H2OTree", "H2ONode", "H2OSplitNode", "H2OLeafNode"]
|
<commit_before>from .tree import H2OTree
from .tree import H2ONode
__all__ = ["H2OTree", "H2ONode"]<commit_msg>Include H2OSplitNode & H2OLeafNode in __all__<commit_after>from .tree import H2OTree
from .tree import H2ONode
from .tree import H2OSplitNode
from .tree import H2OLeafNode
__all__ = ["H2OTree", "H2ONode", "H2OSplitNode", "H2OLeafNode"]
|
b652da4dda3ed5c0a37f2d32a07b9afaf6267e53
|
organization/network/migrations/0118_team_slug.py
|
organization/network/migrations/0118_team_slug.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.8 on 2018-12-19 11:53
from __future__ import unicode_literals
from django.db import migrations, models
from organization.network.models import Team
def generate_slugs(apps, schema_editor):
teams = Team.objects.all()
for team in teams:
team.save()
class Migration(migrations.Migration):
dependencies = [
('organization-network', '0117_merge_20181204_1801'),
]
operations = [
migrations.AddField(
model_name='team',
name='slug',
field=models.CharField(blank=True, help_text='Leave blank to have the URL auto-generated from the name.', max_length=2000, null=True, verbose_name='URL'),
),
migrations.RunPython(generate_slugs),
]
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.8 on 2018-12-19 11:53
from __future__ import unicode_literals
from django.db import migrations, models
from organization.network.models import Team
# def generate_slugs(apps, schema_editor):
# teams = Team.objects.all()
# for team in teams:
# team.save()
class Migration(migrations.Migration):
dependencies = [
('organization-network', '0117_merge_20181204_1801'),
]
operations = [
migrations.AddField(
model_name='team',
name='slug',
field=models.CharField(blank=True, help_text='Leave blank to have the URL auto-generated from the name.', max_length=2000, null=True, verbose_name='URL'),
),
# migrations.RunPython(generate_slugs),
]
|
Fix migration when no existing team.user field at fresh start
|
Fix migration when no existing team.user field at fresh start
|
Python
|
agpl-3.0
|
Ircam-Web/mezzanine-organization,Ircam-Web/mezzanine-organization
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.8 on 2018-12-19 11:53
from __future__ import unicode_literals
from django.db import migrations, models
from organization.network.models import Team
def generate_slugs(apps, schema_editor):
teams = Team.objects.all()
for team in teams:
team.save()
class Migration(migrations.Migration):
dependencies = [
('organization-network', '0117_merge_20181204_1801'),
]
operations = [
migrations.AddField(
model_name='team',
name='slug',
field=models.CharField(blank=True, help_text='Leave blank to have the URL auto-generated from the name.', max_length=2000, null=True, verbose_name='URL'),
),
migrations.RunPython(generate_slugs),
]
Fix migration when no existing team.user field at fresh start
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.8 on 2018-12-19 11:53
from __future__ import unicode_literals
from django.db import migrations, models
from organization.network.models import Team
# def generate_slugs(apps, schema_editor):
# teams = Team.objects.all()
# for team in teams:
# team.save()
class Migration(migrations.Migration):
dependencies = [
('organization-network', '0117_merge_20181204_1801'),
]
operations = [
migrations.AddField(
model_name='team',
name='slug',
field=models.CharField(blank=True, help_text='Leave blank to have the URL auto-generated from the name.', max_length=2000, null=True, verbose_name='URL'),
),
# migrations.RunPython(generate_slugs),
]
|
<commit_before># -*- coding: utf-8 -*-
# Generated by Django 1.10.8 on 2018-12-19 11:53
from __future__ import unicode_literals
from django.db import migrations, models
from organization.network.models import Team
def generate_slugs(apps, schema_editor):
teams = Team.objects.all()
for team in teams:
team.save()
class Migration(migrations.Migration):
dependencies = [
('organization-network', '0117_merge_20181204_1801'),
]
operations = [
migrations.AddField(
model_name='team',
name='slug',
field=models.CharField(blank=True, help_text='Leave blank to have the URL auto-generated from the name.', max_length=2000, null=True, verbose_name='URL'),
),
migrations.RunPython(generate_slugs),
]
<commit_msg>Fix migration when no existing team.user field at fresh start<commit_after>
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.8 on 2018-12-19 11:53
from __future__ import unicode_literals
from django.db import migrations, models
from organization.network.models import Team
# def generate_slugs(apps, schema_editor):
# teams = Team.objects.all()
# for team in teams:
# team.save()
class Migration(migrations.Migration):
dependencies = [
('organization-network', '0117_merge_20181204_1801'),
]
operations = [
migrations.AddField(
model_name='team',
name='slug',
field=models.CharField(blank=True, help_text='Leave blank to have the URL auto-generated from the name.', max_length=2000, null=True, verbose_name='URL'),
),
# migrations.RunPython(generate_slugs),
]
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.8 on 2018-12-19 11:53
from __future__ import unicode_literals
from django.db import migrations, models
from organization.network.models import Team
def generate_slugs(apps, schema_editor):
teams = Team.objects.all()
for team in teams:
team.save()
class Migration(migrations.Migration):
dependencies = [
('organization-network', '0117_merge_20181204_1801'),
]
operations = [
migrations.AddField(
model_name='team',
name='slug',
field=models.CharField(blank=True, help_text='Leave blank to have the URL auto-generated from the name.', max_length=2000, null=True, verbose_name='URL'),
),
migrations.RunPython(generate_slugs),
]
Fix migration when no existing team.user field at fresh start# -*- coding: utf-8 -*-
# Generated by Django 1.10.8 on 2018-12-19 11:53
from __future__ import unicode_literals
from django.db import migrations, models
from organization.network.models import Team
# def generate_slugs(apps, schema_editor):
# teams = Team.objects.all()
# for team in teams:
# team.save()
class Migration(migrations.Migration):
dependencies = [
('organization-network', '0117_merge_20181204_1801'),
]
operations = [
migrations.AddField(
model_name='team',
name='slug',
field=models.CharField(blank=True, help_text='Leave blank to have the URL auto-generated from the name.', max_length=2000, null=True, verbose_name='URL'),
),
# migrations.RunPython(generate_slugs),
]
|
<commit_before># -*- coding: utf-8 -*-
# Generated by Django 1.10.8 on 2018-12-19 11:53
from __future__ import unicode_literals
from django.db import migrations, models
from organization.network.models import Team
def generate_slugs(apps, schema_editor):
teams = Team.objects.all()
for team in teams:
team.save()
class Migration(migrations.Migration):
dependencies = [
('organization-network', '0117_merge_20181204_1801'),
]
operations = [
migrations.AddField(
model_name='team',
name='slug',
field=models.CharField(blank=True, help_text='Leave blank to have the URL auto-generated from the name.', max_length=2000, null=True, verbose_name='URL'),
),
migrations.RunPython(generate_slugs),
]
<commit_msg>Fix migration when no existing team.user field at fresh start<commit_after># -*- coding: utf-8 -*-
# Generated by Django 1.10.8 on 2018-12-19 11:53
from __future__ import unicode_literals
from django.db import migrations, models
from organization.network.models import Team
# def generate_slugs(apps, schema_editor):
# teams = Team.objects.all()
# for team in teams:
# team.save()
class Migration(migrations.Migration):
dependencies = [
('organization-network', '0117_merge_20181204_1801'),
]
operations = [
migrations.AddField(
model_name='team',
name='slug',
field=models.CharField(blank=True, help_text='Leave blank to have the URL auto-generated from the name.', max_length=2000, null=True, verbose_name='URL'),
),
# migrations.RunPython(generate_slugs),
]
|
7e0de7c98be293bd3f48f00138c997292696ee6f
|
Lib/importlib/test/regrtest.py
|
Lib/importlib/test/regrtest.py
|
"""Run Python's standard test suite using importlib.__import__.
Tests known to fail because of assumptions that importlib (properly)
invalidates are automatically skipped if the entire test suite is run.
Otherwise all command-line options valid for test.regrtest are also valid for
this script.
XXX FAILING
test_import # execution bit, exception name differing, file name differing
between code and module (?)
"""
import importlib
import sys
from test import regrtest
if __name__ == '__main__':
__builtins__.__import__ = importlib.__import__
exclude = ['--exclude',
'test_frozen', # Does not expect __loader__ attribute
'test_pkg', # Does not expect __loader__ attribute
'test_pydoc', # Does not expect __loader__ attribute
]
# Switching on --exclude implies running all test but the ones listed, so
# only use it when one is not running an explicit test
if len(sys.argv) == 1:
# No programmatic way to specify tests to exclude
sys.argv.extend(exclude)
regrtest.main(quiet=True, verbose2=True)
|
"""Run Python's standard test suite using importlib.__import__.
Tests known to fail because of assumptions that importlib (properly)
invalidates are automatically skipped if the entire test suite is run.
Otherwise all command-line options valid for test.regrtest are also valid for
this script.
XXX FAILING
test_import
execution bit
file name differing between __file__ and co_filename (r68360 on trunk)
"""
import importlib
import sys
from test import regrtest
if __name__ == '__main__':
__builtins__.__import__ = importlib.__import__
exclude = ['--exclude',
'test_frozen', # Does not expect __loader__ attribute
'test_pkg', # Does not expect __loader__ attribute
'test_pydoc', # Does not expect __loader__ attribute
]
# Switching on --exclude implies running all test but the ones listed, so
# only use it when one is not running an explicit test
if len(sys.argv) == 1:
# No programmatic way to specify tests to exclude
sys.argv.extend(exclude)
regrtest.main(quiet=True, verbose2=True)
|
Clarify why test_import is failing under importlib.
|
Clarify why test_import is failing under importlib.
|
Python
|
mit
|
sk-/python2.7-type-annotator,sk-/python2.7-type-annotator,sk-/python2.7-type-annotator
|
"""Run Python's standard test suite using importlib.__import__.
Tests known to fail because of assumptions that importlib (properly)
invalidates are automatically skipped if the entire test suite is run.
Otherwise all command-line options valid for test.regrtest are also valid for
this script.
XXX FAILING
test_import # execution bit, exception name differing, file name differing
between code and module (?)
"""
import importlib
import sys
from test import regrtest
if __name__ == '__main__':
__builtins__.__import__ = importlib.__import__
exclude = ['--exclude',
'test_frozen', # Does not expect __loader__ attribute
'test_pkg', # Does not expect __loader__ attribute
'test_pydoc', # Does not expect __loader__ attribute
]
# Switching on --exclude implies running all test but the ones listed, so
# only use it when one is not running an explicit test
if len(sys.argv) == 1:
# No programmatic way to specify tests to exclude
sys.argv.extend(exclude)
regrtest.main(quiet=True, verbose2=True)
Clarify why test_import is failing under importlib.
|
"""Run Python's standard test suite using importlib.__import__.
Tests known to fail because of assumptions that importlib (properly)
invalidates are automatically skipped if the entire test suite is run.
Otherwise all command-line options valid for test.regrtest are also valid for
this script.
XXX FAILING
test_import
execution bit
file name differing between __file__ and co_filename (r68360 on trunk)
"""
import importlib
import sys
from test import regrtest
if __name__ == '__main__':
__builtins__.__import__ = importlib.__import__
exclude = ['--exclude',
'test_frozen', # Does not expect __loader__ attribute
'test_pkg', # Does not expect __loader__ attribute
'test_pydoc', # Does not expect __loader__ attribute
]
# Switching on --exclude implies running all test but the ones listed, so
# only use it when one is not running an explicit test
if len(sys.argv) == 1:
# No programmatic way to specify tests to exclude
sys.argv.extend(exclude)
regrtest.main(quiet=True, verbose2=True)
|
<commit_before>"""Run Python's standard test suite using importlib.__import__.
Tests known to fail because of assumptions that importlib (properly)
invalidates are automatically skipped if the entire test suite is run.
Otherwise all command-line options valid for test.regrtest are also valid for
this script.
XXX FAILING
test_import # execution bit, exception name differing, file name differing
between code and module (?)
"""
import importlib
import sys
from test import regrtest
if __name__ == '__main__':
__builtins__.__import__ = importlib.__import__
exclude = ['--exclude',
'test_frozen', # Does not expect __loader__ attribute
'test_pkg', # Does not expect __loader__ attribute
'test_pydoc', # Does not expect __loader__ attribute
]
# Switching on --exclude implies running all test but the ones listed, so
# only use it when one is not running an explicit test
if len(sys.argv) == 1:
# No programmatic way to specify tests to exclude
sys.argv.extend(exclude)
regrtest.main(quiet=True, verbose2=True)
<commit_msg>Clarify why test_import is failing under importlib.<commit_after>
|
"""Run Python's standard test suite using importlib.__import__.
Tests known to fail because of assumptions that importlib (properly)
invalidates are automatically skipped if the entire test suite is run.
Otherwise all command-line options valid for test.regrtest are also valid for
this script.
XXX FAILING
test_import
execution bit
file name differing between __file__ and co_filename (r68360 on trunk)
"""
import importlib
import sys
from test import regrtest
if __name__ == '__main__':
__builtins__.__import__ = importlib.__import__
exclude = ['--exclude',
'test_frozen', # Does not expect __loader__ attribute
'test_pkg', # Does not expect __loader__ attribute
'test_pydoc', # Does not expect __loader__ attribute
]
# Switching on --exclude implies running all test but the ones listed, so
# only use it when one is not running an explicit test
if len(sys.argv) == 1:
# No programmatic way to specify tests to exclude
sys.argv.extend(exclude)
regrtest.main(quiet=True, verbose2=True)
|
"""Run Python's standard test suite using importlib.__import__.
Tests known to fail because of assumptions that importlib (properly)
invalidates are automatically skipped if the entire test suite is run.
Otherwise all command-line options valid for test.regrtest are also valid for
this script.
XXX FAILING
test_import # execution bit, exception name differing, file name differing
between code and module (?)
"""
import importlib
import sys
from test import regrtest
if __name__ == '__main__':
__builtins__.__import__ = importlib.__import__
exclude = ['--exclude',
'test_frozen', # Does not expect __loader__ attribute
'test_pkg', # Does not expect __loader__ attribute
'test_pydoc', # Does not expect __loader__ attribute
]
# Switching on --exclude implies running all test but the ones listed, so
# only use it when one is not running an explicit test
if len(sys.argv) == 1:
# No programmatic way to specify tests to exclude
sys.argv.extend(exclude)
regrtest.main(quiet=True, verbose2=True)
Clarify why test_import is failing under importlib."""Run Python's standard test suite using importlib.__import__.
Tests known to fail because of assumptions that importlib (properly)
invalidates are automatically skipped if the entire test suite is run.
Otherwise all command-line options valid for test.regrtest are also valid for
this script.
XXX FAILING
test_import
execution bit
file name differing between __file__ and co_filename (r68360 on trunk)
"""
import importlib
import sys
from test import regrtest
if __name__ == '__main__':
__builtins__.__import__ = importlib.__import__
exclude = ['--exclude',
'test_frozen', # Does not expect __loader__ attribute
'test_pkg', # Does not expect __loader__ attribute
'test_pydoc', # Does not expect __loader__ attribute
]
# Switching on --exclude implies running all test but the ones listed, so
# only use it when one is not running an explicit test
if len(sys.argv) == 1:
# No programmatic way to specify tests to exclude
sys.argv.extend(exclude)
regrtest.main(quiet=True, verbose2=True)
|
<commit_before>"""Run Python's standard test suite using importlib.__import__.
Tests known to fail because of assumptions that importlib (properly)
invalidates are automatically skipped if the entire test suite is run.
Otherwise all command-line options valid for test.regrtest are also valid for
this script.
XXX FAILING
test_import # execution bit, exception name differing, file name differing
between code and module (?)
"""
import importlib
import sys
from test import regrtest
if __name__ == '__main__':
__builtins__.__import__ = importlib.__import__
exclude = ['--exclude',
'test_frozen', # Does not expect __loader__ attribute
'test_pkg', # Does not expect __loader__ attribute
'test_pydoc', # Does not expect __loader__ attribute
]
# Switching on --exclude implies running all test but the ones listed, so
# only use it when one is not running an explicit test
if len(sys.argv) == 1:
# No programmatic way to specify tests to exclude
sys.argv.extend(exclude)
regrtest.main(quiet=True, verbose2=True)
<commit_msg>Clarify why test_import is failing under importlib.<commit_after>"""Run Python's standard test suite using importlib.__import__.
Tests known to fail because of assumptions that importlib (properly)
invalidates are automatically skipped if the entire test suite is run.
Otherwise all command-line options valid for test.regrtest are also valid for
this script.
XXX FAILING
test_import
execution bit
file name differing between __file__ and co_filename (r68360 on trunk)
"""
import importlib
import sys
from test import regrtest
if __name__ == '__main__':
__builtins__.__import__ = importlib.__import__
exclude = ['--exclude',
'test_frozen', # Does not expect __loader__ attribute
'test_pkg', # Does not expect __loader__ attribute
'test_pydoc', # Does not expect __loader__ attribute
]
# Switching on --exclude implies running all test but the ones listed, so
# only use it when one is not running an explicit test
if len(sys.argv) == 1:
# No programmatic way to specify tests to exclude
sys.argv.extend(exclude)
regrtest.main(quiet=True, verbose2=True)
|
e77e5019ac81d6ea41e9253d394977543f26c9be
|
application.py
|
application.py
|
#!/usr/bin/env python
import os
from app import create_app
from flask.ext.script import Manager, Server
import watchtower
import logging
application = create_app(
os.getenv('EQ_ENVIRONMENT') or 'development'
)
application.debug = True
manager = Manager(application)
port = int(os.environ.get('PORT', 5000))
manager.add_command("runserver", Server(host='0.0.0.0', port=port))
cloud_watch_handler = watchtower.CloudWatchLogHandler()
FORMAT = "[%(asctime)s] %(levelname)s [%(name)s.%(funcName)s:%(lineno)d] %(message)s"
levels = {
'CRITICAL': logging.CRITICAL,
'ERROR': logging.ERROR,
'WARNING': logging.WARNING,
'INFO': logging.INFO,
'DEBUG': logging.DEBUG
}
logging.basicConfig(level=levels[os.getenv('EQ_LOG_LEVEL') or 'WARNING'], format=FORMAT)
application.logger.addHandler(cloud_watch_handler)
logging.getLogger().addHandler(cloud_watch_handler)
logging.getLogger(__name__).addHandler(cloud_watch_handler)
logging.getLogger('werkzeug').addHandler(cloud_watch_handler)
if __name__ == '__main__':
manager.run()
|
#!/usr/bin/env python
import os
from app import create_app
from flask.ext.script import Manager, Server
import watchtower
import logging
application = create_app(
os.getenv('EQ_ENVIRONMENT') or 'development'
)
application.debug = True
manager = Manager(application)
port = int(os.environ.get('PORT', 5000))
manager.add_command("runserver", Server(host='0.0.0.0', port=port))
log_group = os.getenv('EQ_SR_LOG_GROUP')
cloud_watch_handler = watchtower.CloudWatchLogHandler(log_group=log_group)
FORMAT = "[%(asctime)s] %(levelname)s [%(name)s.%(funcName)s:%(lineno)d] %(message)s"
levels = {
'CRITICAL': logging.CRITICAL,
'ERROR': logging.ERROR,
'WARNING': logging.WARNING,
'INFO': logging.INFO,
'DEBUG': logging.DEBUG
}
logging.basicConfig(level=levels[os.getenv('EQ_LOG_LEVEL') or 'WARNING'], format=FORMAT)
application.logger.addHandler(cloud_watch_handler)
logging.getLogger().addHandler(cloud_watch_handler)
logging.getLogger(__name__).addHandler(cloud_watch_handler)
logging.getLogger('werkzeug').addHandler(cloud_watch_handler)
if __name__ == '__main__':
manager.run()
|
Allow log group to be set from env
|
Allow log group to be set from env
|
Python
|
mit
|
ONSdigital/eq-survey-runner,ONSdigital/eq-survey-runner,ONSdigital/eq-survey-runner,ONSdigital/eq-survey-runner
|
#!/usr/bin/env python
import os
from app import create_app
from flask.ext.script import Manager, Server
import watchtower
import logging
application = create_app(
os.getenv('EQ_ENVIRONMENT') or 'development'
)
application.debug = True
manager = Manager(application)
port = int(os.environ.get('PORT', 5000))
manager.add_command("runserver", Server(host='0.0.0.0', port=port))
cloud_watch_handler = watchtower.CloudWatchLogHandler()
FORMAT = "[%(asctime)s] %(levelname)s [%(name)s.%(funcName)s:%(lineno)d] %(message)s"
levels = {
'CRITICAL': logging.CRITICAL,
'ERROR': logging.ERROR,
'WARNING': logging.WARNING,
'INFO': logging.INFO,
'DEBUG': logging.DEBUG
}
logging.basicConfig(level=levels[os.getenv('EQ_LOG_LEVEL') or 'WARNING'], format=FORMAT)
application.logger.addHandler(cloud_watch_handler)
logging.getLogger().addHandler(cloud_watch_handler)
logging.getLogger(__name__).addHandler(cloud_watch_handler)
logging.getLogger('werkzeug').addHandler(cloud_watch_handler)
if __name__ == '__main__':
manager.run()
Allow log group to be set from env
|
#!/usr/bin/env python
import os
from app import create_app
from flask.ext.script import Manager, Server
import watchtower
import logging
application = create_app(
os.getenv('EQ_ENVIRONMENT') or 'development'
)
application.debug = True
manager = Manager(application)
port = int(os.environ.get('PORT', 5000))
manager.add_command("runserver", Server(host='0.0.0.0', port=port))
log_group = os.getenv('EQ_SR_LOG_GROUP')
cloud_watch_handler = watchtower.CloudWatchLogHandler(log_group=log_group)
FORMAT = "[%(asctime)s] %(levelname)s [%(name)s.%(funcName)s:%(lineno)d] %(message)s"
levels = {
'CRITICAL': logging.CRITICAL,
'ERROR': logging.ERROR,
'WARNING': logging.WARNING,
'INFO': logging.INFO,
'DEBUG': logging.DEBUG
}
logging.basicConfig(level=levels[os.getenv('EQ_LOG_LEVEL') or 'WARNING'], format=FORMAT)
application.logger.addHandler(cloud_watch_handler)
logging.getLogger().addHandler(cloud_watch_handler)
logging.getLogger(__name__).addHandler(cloud_watch_handler)
logging.getLogger('werkzeug').addHandler(cloud_watch_handler)
if __name__ == '__main__':
manager.run()
|
<commit_before>#!/usr/bin/env python
import os
from app import create_app
from flask.ext.script import Manager, Server
import watchtower
import logging
application = create_app(
os.getenv('EQ_ENVIRONMENT') or 'development'
)
application.debug = True
manager = Manager(application)
port = int(os.environ.get('PORT', 5000))
manager.add_command("runserver", Server(host='0.0.0.0', port=port))
cloud_watch_handler = watchtower.CloudWatchLogHandler()
FORMAT = "[%(asctime)s] %(levelname)s [%(name)s.%(funcName)s:%(lineno)d] %(message)s"
levels = {
'CRITICAL': logging.CRITICAL,
'ERROR': logging.ERROR,
'WARNING': logging.WARNING,
'INFO': logging.INFO,
'DEBUG': logging.DEBUG
}
logging.basicConfig(level=levels[os.getenv('EQ_LOG_LEVEL') or 'WARNING'], format=FORMAT)
application.logger.addHandler(cloud_watch_handler)
logging.getLogger().addHandler(cloud_watch_handler)
logging.getLogger(__name__).addHandler(cloud_watch_handler)
logging.getLogger('werkzeug').addHandler(cloud_watch_handler)
if __name__ == '__main__':
manager.run()
<commit_msg>Allow log group to be set from env<commit_after>
|
#!/usr/bin/env python
import os
from app import create_app
from flask.ext.script import Manager, Server
import watchtower
import logging
application = create_app(
os.getenv('EQ_ENVIRONMENT') or 'development'
)
application.debug = True
manager = Manager(application)
port = int(os.environ.get('PORT', 5000))
manager.add_command("runserver", Server(host='0.0.0.0', port=port))
log_group = os.getenv('EQ_SR_LOG_GROUP')
cloud_watch_handler = watchtower.CloudWatchLogHandler(log_group=log_group)
FORMAT = "[%(asctime)s] %(levelname)s [%(name)s.%(funcName)s:%(lineno)d] %(message)s"
levels = {
'CRITICAL': logging.CRITICAL,
'ERROR': logging.ERROR,
'WARNING': logging.WARNING,
'INFO': logging.INFO,
'DEBUG': logging.DEBUG
}
logging.basicConfig(level=levels[os.getenv('EQ_LOG_LEVEL') or 'WARNING'], format=FORMAT)
application.logger.addHandler(cloud_watch_handler)
logging.getLogger().addHandler(cloud_watch_handler)
logging.getLogger(__name__).addHandler(cloud_watch_handler)
logging.getLogger('werkzeug').addHandler(cloud_watch_handler)
if __name__ == '__main__':
manager.run()
|
#!/usr/bin/env python
import os
from app import create_app
from flask.ext.script import Manager, Server
import watchtower
import logging
application = create_app(
os.getenv('EQ_ENVIRONMENT') or 'development'
)
application.debug = True
manager = Manager(application)
port = int(os.environ.get('PORT', 5000))
manager.add_command("runserver", Server(host='0.0.0.0', port=port))
cloud_watch_handler = watchtower.CloudWatchLogHandler()
FORMAT = "[%(asctime)s] %(levelname)s [%(name)s.%(funcName)s:%(lineno)d] %(message)s"
levels = {
'CRITICAL': logging.CRITICAL,
'ERROR': logging.ERROR,
'WARNING': logging.WARNING,
'INFO': logging.INFO,
'DEBUG': logging.DEBUG
}
logging.basicConfig(level=levels[os.getenv('EQ_LOG_LEVEL') or 'WARNING'], format=FORMAT)
application.logger.addHandler(cloud_watch_handler)
logging.getLogger().addHandler(cloud_watch_handler)
logging.getLogger(__name__).addHandler(cloud_watch_handler)
logging.getLogger('werkzeug').addHandler(cloud_watch_handler)
if __name__ == '__main__':
manager.run()
Allow log group to be set from env#!/usr/bin/env python
import os
from app import create_app
from flask.ext.script import Manager, Server
import watchtower
import logging
application = create_app(
os.getenv('EQ_ENVIRONMENT') or 'development'
)
application.debug = True
manager = Manager(application)
port = int(os.environ.get('PORT', 5000))
manager.add_command("runserver", Server(host='0.0.0.0', port=port))
log_group = os.getenv('EQ_SR_LOG_GROUP')
cloud_watch_handler = watchtower.CloudWatchLogHandler(log_group=log_group)
FORMAT = "[%(asctime)s] %(levelname)s [%(name)s.%(funcName)s:%(lineno)d] %(message)s"
levels = {
'CRITICAL': logging.CRITICAL,
'ERROR': logging.ERROR,
'WARNING': logging.WARNING,
'INFO': logging.INFO,
'DEBUG': logging.DEBUG
}
logging.basicConfig(level=levels[os.getenv('EQ_LOG_LEVEL') or 'WARNING'], format=FORMAT)
application.logger.addHandler(cloud_watch_handler)
logging.getLogger().addHandler(cloud_watch_handler)
logging.getLogger(__name__).addHandler(cloud_watch_handler)
logging.getLogger('werkzeug').addHandler(cloud_watch_handler)
if __name__ == '__main__':
manager.run()
|
<commit_before>#!/usr/bin/env python
import os
from app import create_app
from flask.ext.script import Manager, Server
import watchtower
import logging
application = create_app(
os.getenv('EQ_ENVIRONMENT') or 'development'
)
application.debug = True
manager = Manager(application)
port = int(os.environ.get('PORT', 5000))
manager.add_command("runserver", Server(host='0.0.0.0', port=port))
cloud_watch_handler = watchtower.CloudWatchLogHandler()
FORMAT = "[%(asctime)s] %(levelname)s [%(name)s.%(funcName)s:%(lineno)d] %(message)s"
levels = {
'CRITICAL': logging.CRITICAL,
'ERROR': logging.ERROR,
'WARNING': logging.WARNING,
'INFO': logging.INFO,
'DEBUG': logging.DEBUG
}
logging.basicConfig(level=levels[os.getenv('EQ_LOG_LEVEL') or 'WARNING'], format=FORMAT)
application.logger.addHandler(cloud_watch_handler)
logging.getLogger().addHandler(cloud_watch_handler)
logging.getLogger(__name__).addHandler(cloud_watch_handler)
logging.getLogger('werkzeug').addHandler(cloud_watch_handler)
if __name__ == '__main__':
manager.run()
<commit_msg>Allow log group to be set from env<commit_after>#!/usr/bin/env python
import os
from app import create_app
from flask.ext.script import Manager, Server
import watchtower
import logging
application = create_app(
os.getenv('EQ_ENVIRONMENT') or 'development'
)
application.debug = True
manager = Manager(application)
port = int(os.environ.get('PORT', 5000))
manager.add_command("runserver", Server(host='0.0.0.0', port=port))
log_group = os.getenv('EQ_SR_LOG_GROUP')
cloud_watch_handler = watchtower.CloudWatchLogHandler(log_group=log_group)
FORMAT = "[%(asctime)s] %(levelname)s [%(name)s.%(funcName)s:%(lineno)d] %(message)s"
levels = {
'CRITICAL': logging.CRITICAL,
'ERROR': logging.ERROR,
'WARNING': logging.WARNING,
'INFO': logging.INFO,
'DEBUG': logging.DEBUG
}
logging.basicConfig(level=levels[os.getenv('EQ_LOG_LEVEL') or 'WARNING'], format=FORMAT)
application.logger.addHandler(cloud_watch_handler)
logging.getLogger().addHandler(cloud_watch_handler)
logging.getLogger(__name__).addHandler(cloud_watch_handler)
logging.getLogger('werkzeug').addHandler(cloud_watch_handler)
if __name__ == '__main__':
manager.run()
|
33ad684b4b8efab81d82e575d97184dc004d0386
|
phy/conftest.py
|
phy/conftest.py
|
# -*- coding: utf-8 -*-
"""py.test utilities."""
#------------------------------------------------------------------------------
# Imports
#------------------------------------------------------------------------------
import logging
import numpy as np
import warnings
import matplotlib
from phylib import add_default_handler
from phylib.conftest import * # noqa
#------------------------------------------------------------------------------
# Common fixtures
#------------------------------------------------------------------------------
logger = logging.getLogger('phy')
logger.setLevel(10)
add_default_handler(5, logger=logger)
# Fix the random seed in the tests.
np.random.seed(2019)
warnings.filterwarnings('ignore', category=matplotlib.cbook.mplDeprecation)
warnings.filterwarnings("ignore", message="numpy.dtype size changed")
warnings.filterwarnings("ignore", message="numpy.ufunc size changed")
def pytest_addoption(parser):
"""Repeat option."""
parser.addoption('--repeat', action='store', help='Number of times to repeat each test')
def pytest_generate_tests(metafunc):
if metafunc.config.option.repeat is not None:
count = int(metafunc.config.option.repeat)
metafunc.fixturenames.append('tmp_ct')
metafunc.parametrize('tmp_ct', range(count))
|
# -*- coding: utf-8 -*-
"""py.test utilities."""
#------------------------------------------------------------------------------
# Imports
#------------------------------------------------------------------------------
import logging
import numpy as np
import warnings
import matplotlib
from phylib import add_default_handler
from phylib.conftest import * # noqa
#------------------------------------------------------------------------------
# Common fixtures
#------------------------------------------------------------------------------
logger = logging.getLogger('phy')
logger.setLevel(10)
add_default_handler(5, logger=logger)
# Fix the random seed in the tests.
np.random.seed(2019)
warnings.filterwarnings('ignore', category=matplotlib.cbook.mplDeprecation)
warnings.filterwarnings("ignore", message="numpy.dtype size changed")
warnings.filterwarnings("ignore", message="numpy.ufunc size changed")
def pytest_addoption(parser):
"""Repeat option."""
parser.addoption('--repeat', action='store', help='Number of times to repeat each test')
def pytest_generate_tests(metafunc): # pragma: no cover
# Use --repeat option.
if metafunc.config.option.repeat is not None:
count = int(metafunc.config.option.repeat)
metafunc.fixturenames.append('tmp_ct')
metafunc.parametrize('tmp_ct', range(count))
|
Remove repeat option from coverage
|
Remove repeat option from coverage
|
Python
|
bsd-3-clause
|
kwikteam/phy,kwikteam/phy,kwikteam/phy
|
# -*- coding: utf-8 -*-
"""py.test utilities."""
#------------------------------------------------------------------------------
# Imports
#------------------------------------------------------------------------------
import logging
import numpy as np
import warnings
import matplotlib
from phylib import add_default_handler
from phylib.conftest import * # noqa
#------------------------------------------------------------------------------
# Common fixtures
#------------------------------------------------------------------------------
logger = logging.getLogger('phy')
logger.setLevel(10)
add_default_handler(5, logger=logger)
# Fix the random seed in the tests.
np.random.seed(2019)
warnings.filterwarnings('ignore', category=matplotlib.cbook.mplDeprecation)
warnings.filterwarnings("ignore", message="numpy.dtype size changed")
warnings.filterwarnings("ignore", message="numpy.ufunc size changed")
def pytest_addoption(parser):
"""Repeat option."""
parser.addoption('--repeat', action='store', help='Number of times to repeat each test')
def pytest_generate_tests(metafunc):
if metafunc.config.option.repeat is not None:
count = int(metafunc.config.option.repeat)
metafunc.fixturenames.append('tmp_ct')
metafunc.parametrize('tmp_ct', range(count))
Remove repeat option from coverage
|
# -*- coding: utf-8 -*-
"""py.test utilities."""
#------------------------------------------------------------------------------
# Imports
#------------------------------------------------------------------------------
import logging
import numpy as np
import warnings
import matplotlib
from phylib import add_default_handler
from phylib.conftest import * # noqa
#------------------------------------------------------------------------------
# Common fixtures
#------------------------------------------------------------------------------
logger = logging.getLogger('phy')
logger.setLevel(10)
add_default_handler(5, logger=logger)
# Fix the random seed in the tests.
np.random.seed(2019)
warnings.filterwarnings('ignore', category=matplotlib.cbook.mplDeprecation)
warnings.filterwarnings("ignore", message="numpy.dtype size changed")
warnings.filterwarnings("ignore", message="numpy.ufunc size changed")
def pytest_addoption(parser):
"""Repeat option."""
parser.addoption('--repeat', action='store', help='Number of times to repeat each test')
def pytest_generate_tests(metafunc): # pragma: no cover
# Use --repeat option.
if metafunc.config.option.repeat is not None:
count = int(metafunc.config.option.repeat)
metafunc.fixturenames.append('tmp_ct')
metafunc.parametrize('tmp_ct', range(count))
|
<commit_before># -*- coding: utf-8 -*-
"""py.test utilities."""
#------------------------------------------------------------------------------
# Imports
#------------------------------------------------------------------------------
import logging
import numpy as np
import warnings
import matplotlib
from phylib import add_default_handler
from phylib.conftest import * # noqa
#------------------------------------------------------------------------------
# Common fixtures
#------------------------------------------------------------------------------
logger = logging.getLogger('phy')
logger.setLevel(10)
add_default_handler(5, logger=logger)
# Fix the random seed in the tests.
np.random.seed(2019)
warnings.filterwarnings('ignore', category=matplotlib.cbook.mplDeprecation)
warnings.filterwarnings("ignore", message="numpy.dtype size changed")
warnings.filterwarnings("ignore", message="numpy.ufunc size changed")
def pytest_addoption(parser):
"""Repeat option."""
parser.addoption('--repeat', action='store', help='Number of times to repeat each test')
def pytest_generate_tests(metafunc):
if metafunc.config.option.repeat is not None:
count = int(metafunc.config.option.repeat)
metafunc.fixturenames.append('tmp_ct')
metafunc.parametrize('tmp_ct', range(count))
<commit_msg>Remove repeat option from coverage<commit_after>
|
# -*- coding: utf-8 -*-
"""py.test utilities."""
#------------------------------------------------------------------------------
# Imports
#------------------------------------------------------------------------------
import logging
import numpy as np
import warnings
import matplotlib
from phylib import add_default_handler
from phylib.conftest import * # noqa
#------------------------------------------------------------------------------
# Common fixtures
#------------------------------------------------------------------------------
logger = logging.getLogger('phy')
logger.setLevel(10)
add_default_handler(5, logger=logger)
# Fix the random seed in the tests.
np.random.seed(2019)
warnings.filterwarnings('ignore', category=matplotlib.cbook.mplDeprecation)
warnings.filterwarnings("ignore", message="numpy.dtype size changed")
warnings.filterwarnings("ignore", message="numpy.ufunc size changed")
def pytest_addoption(parser):
"""Repeat option."""
parser.addoption('--repeat', action='store', help='Number of times to repeat each test')
def pytest_generate_tests(metafunc): # pragma: no cover
# Use --repeat option.
if metafunc.config.option.repeat is not None:
count = int(metafunc.config.option.repeat)
metafunc.fixturenames.append('tmp_ct')
metafunc.parametrize('tmp_ct', range(count))
|
# -*- coding: utf-8 -*-
"""py.test utilities."""
#------------------------------------------------------------------------------
# Imports
#------------------------------------------------------------------------------
import logging
import numpy as np
import warnings
import matplotlib
from phylib import add_default_handler
from phylib.conftest import * # noqa
#------------------------------------------------------------------------------
# Common fixtures
#------------------------------------------------------------------------------
logger = logging.getLogger('phy')
logger.setLevel(10)
add_default_handler(5, logger=logger)
# Fix the random seed in the tests.
np.random.seed(2019)
warnings.filterwarnings('ignore', category=matplotlib.cbook.mplDeprecation)
warnings.filterwarnings("ignore", message="numpy.dtype size changed")
warnings.filterwarnings("ignore", message="numpy.ufunc size changed")
def pytest_addoption(parser):
"""Repeat option."""
parser.addoption('--repeat', action='store', help='Number of times to repeat each test')
def pytest_generate_tests(metafunc):
if metafunc.config.option.repeat is not None:
count = int(metafunc.config.option.repeat)
metafunc.fixturenames.append('tmp_ct')
metafunc.parametrize('tmp_ct', range(count))
Remove repeat option from coverage# -*- coding: utf-8 -*-
"""py.test utilities."""
#------------------------------------------------------------------------------
# Imports
#------------------------------------------------------------------------------
import logging
import numpy as np
import warnings
import matplotlib
from phylib import add_default_handler
from phylib.conftest import * # noqa
#------------------------------------------------------------------------------
# Common fixtures
#------------------------------------------------------------------------------
logger = logging.getLogger('phy')
logger.setLevel(10)
add_default_handler(5, logger=logger)
# Fix the random seed in the tests.
np.random.seed(2019)
warnings.filterwarnings('ignore', category=matplotlib.cbook.mplDeprecation)
warnings.filterwarnings("ignore", message="numpy.dtype size changed")
warnings.filterwarnings("ignore", message="numpy.ufunc size changed")
def pytest_addoption(parser):
"""Repeat option."""
parser.addoption('--repeat', action='store', help='Number of times to repeat each test')
def pytest_generate_tests(metafunc): # pragma: no cover
# Use --repeat option.
if metafunc.config.option.repeat is not None:
count = int(metafunc.config.option.repeat)
metafunc.fixturenames.append('tmp_ct')
metafunc.parametrize('tmp_ct', range(count))
|
<commit_before># -*- coding: utf-8 -*-
"""py.test utilities."""
#------------------------------------------------------------------------------
# Imports
#------------------------------------------------------------------------------
import logging
import numpy as np
import warnings
import matplotlib
from phylib import add_default_handler
from phylib.conftest import * # noqa
#------------------------------------------------------------------------------
# Common fixtures
#------------------------------------------------------------------------------
logger = logging.getLogger('phy')
logger.setLevel(10)
add_default_handler(5, logger=logger)
# Fix the random seed in the tests.
np.random.seed(2019)
warnings.filterwarnings('ignore', category=matplotlib.cbook.mplDeprecation)
warnings.filterwarnings("ignore", message="numpy.dtype size changed")
warnings.filterwarnings("ignore", message="numpy.ufunc size changed")
def pytest_addoption(parser):
"""Repeat option."""
parser.addoption('--repeat', action='store', help='Number of times to repeat each test')
def pytest_generate_tests(metafunc):
if metafunc.config.option.repeat is not None:
count = int(metafunc.config.option.repeat)
metafunc.fixturenames.append('tmp_ct')
metafunc.parametrize('tmp_ct', range(count))
<commit_msg>Remove repeat option from coverage<commit_after># -*- coding: utf-8 -*-
"""py.test utilities."""
#------------------------------------------------------------------------------
# Imports
#------------------------------------------------------------------------------
import logging
import numpy as np
import warnings
import matplotlib
from phylib import add_default_handler
from phylib.conftest import * # noqa
#------------------------------------------------------------------------------
# Common fixtures
#------------------------------------------------------------------------------
logger = logging.getLogger('phy')
logger.setLevel(10)
add_default_handler(5, logger=logger)
# Fix the random seed in the tests.
np.random.seed(2019)
warnings.filterwarnings('ignore', category=matplotlib.cbook.mplDeprecation)
warnings.filterwarnings("ignore", message="numpy.dtype size changed")
warnings.filterwarnings("ignore", message="numpy.ufunc size changed")
def pytest_addoption(parser):
"""Repeat option."""
parser.addoption('--repeat', action='store', help='Number of times to repeat each test')
def pytest_generate_tests(metafunc): # pragma: no cover
# Use --repeat option.
if metafunc.config.option.repeat is not None:
count = int(metafunc.config.option.repeat)
metafunc.fixturenames.append('tmp_ct')
metafunc.parametrize('tmp_ct', range(count))
|
56d7d7f9a6d8b0994f379b9f6a8f0af85be86c1a
|
docs/code_examples/websockets_async.py
|
docs/code_examples/websockets_async.py
|
import logging
logging.basicConfig(level=logging.INFO)
from gql import gql, Client, WebsocketsTransport
import asyncio
async def main():
transport = WebsocketsTransport(url='wss://countries.trevorblades.com/graphql')
# Using `async with` on the client will start a connection on the transport
# and provide a `session` variable to execute queries on this connection
async with Client(
transport=sample_transport,
fetch_schema_from_transport=True,
) as session:
# Execute single query
query = gql('''
query getContinents {
continents {
code
name
}
}
''')
result = await session.execute(query)
print(result)
# Request subscription
subscription = gql('''
subscription {
somethingChanged {
id
}
}
''')
async for result in session.subscribe(subscription):
print(result)
asyncio.run(main())
|
import logging
logging.basicConfig(level=logging.INFO)
from gql import gql, Client, WebsocketsTransport
import asyncio
async def main():
transport = WebsocketsTransport(url='wss://countries.trevorblades.com/graphql')
# Using `async with` on the client will start a connection on the transport
# and provide a `session` variable to execute queries on this connection
async with Client(
transport=transport,
fetch_schema_from_transport=True,
) as session:
# Execute single query
query = gql('''
query getContinents {
continents {
code
name
}
}
''')
result = await session.execute(query)
print(result)
# Request subscription
subscription = gql('''
subscription {
somethingChanged {
id
}
}
''')
async for result in session.subscribe(subscription):
print(result)
asyncio.run(main())
|
Fix typo in docs websocket_async code example
|
Fix typo in docs websocket_async code example
|
Python
|
mit
|
graphql-python/gql
|
import logging
logging.basicConfig(level=logging.INFO)
from gql import gql, Client, WebsocketsTransport
import asyncio
async def main():
transport = WebsocketsTransport(url='wss://countries.trevorblades.com/graphql')
# Using `async with` on the client will start a connection on the transport
# and provide a `session` variable to execute queries on this connection
async with Client(
transport=sample_transport,
fetch_schema_from_transport=True,
) as session:
# Execute single query
query = gql('''
query getContinents {
continents {
code
name
}
}
''')
result = await session.execute(query)
print(result)
# Request subscription
subscription = gql('''
subscription {
somethingChanged {
id
}
}
''')
async for result in session.subscribe(subscription):
print(result)
asyncio.run(main())
Fix typo in docs websocket_async code example
|
import logging
logging.basicConfig(level=logging.INFO)
from gql import gql, Client, WebsocketsTransport
import asyncio
async def main():
transport = WebsocketsTransport(url='wss://countries.trevorblades.com/graphql')
# Using `async with` on the client will start a connection on the transport
# and provide a `session` variable to execute queries on this connection
async with Client(
transport=transport,
fetch_schema_from_transport=True,
) as session:
# Execute single query
query = gql('''
query getContinents {
continents {
code
name
}
}
''')
result = await session.execute(query)
print(result)
# Request subscription
subscription = gql('''
subscription {
somethingChanged {
id
}
}
''')
async for result in session.subscribe(subscription):
print(result)
asyncio.run(main())
|
<commit_before>import logging
logging.basicConfig(level=logging.INFO)
from gql import gql, Client, WebsocketsTransport
import asyncio
async def main():
transport = WebsocketsTransport(url='wss://countries.trevorblades.com/graphql')
# Using `async with` on the client will start a connection on the transport
# and provide a `session` variable to execute queries on this connection
async with Client(
transport=sample_transport,
fetch_schema_from_transport=True,
) as session:
# Execute single query
query = gql('''
query getContinents {
continents {
code
name
}
}
''')
result = await session.execute(query)
print(result)
# Request subscription
subscription = gql('''
subscription {
somethingChanged {
id
}
}
''')
async for result in session.subscribe(subscription):
print(result)
asyncio.run(main())
<commit_msg>Fix typo in docs websocket_async code example<commit_after>
|
import logging
logging.basicConfig(level=logging.INFO)
from gql import gql, Client, WebsocketsTransport
import asyncio
async def main():
transport = WebsocketsTransport(url='wss://countries.trevorblades.com/graphql')
# Using `async with` on the client will start a connection on the transport
# and provide a `session` variable to execute queries on this connection
async with Client(
transport=transport,
fetch_schema_from_transport=True,
) as session:
# Execute single query
query = gql('''
query getContinents {
continents {
code
name
}
}
''')
result = await session.execute(query)
print(result)
# Request subscription
subscription = gql('''
subscription {
somethingChanged {
id
}
}
''')
async for result in session.subscribe(subscription):
print(result)
asyncio.run(main())
|
import logging
logging.basicConfig(level=logging.INFO)
from gql import gql, Client, WebsocketsTransport
import asyncio
async def main():
transport = WebsocketsTransport(url='wss://countries.trevorblades.com/graphql')
# Using `async with` on the client will start a connection on the transport
# and provide a `session` variable to execute queries on this connection
async with Client(
transport=sample_transport,
fetch_schema_from_transport=True,
) as session:
# Execute single query
query = gql('''
query getContinents {
continents {
code
name
}
}
''')
result = await session.execute(query)
print(result)
# Request subscription
subscription = gql('''
subscription {
somethingChanged {
id
}
}
''')
async for result in session.subscribe(subscription):
print(result)
asyncio.run(main())
Fix typo in docs websocket_async code exampleimport logging
logging.basicConfig(level=logging.INFO)
from gql import gql, Client, WebsocketsTransport
import asyncio
async def main():
transport = WebsocketsTransport(url='wss://countries.trevorblades.com/graphql')
# Using `async with` on the client will start a connection on the transport
# and provide a `session` variable to execute queries on this connection
async with Client(
transport=transport,
fetch_schema_from_transport=True,
) as session:
# Execute single query
query = gql('''
query getContinents {
continents {
code
name
}
}
''')
result = await session.execute(query)
print(result)
# Request subscription
subscription = gql('''
subscription {
somethingChanged {
id
}
}
''')
async for result in session.subscribe(subscription):
print(result)
asyncio.run(main())
|
<commit_before>import logging
logging.basicConfig(level=logging.INFO)
from gql import gql, Client, WebsocketsTransport
import asyncio
async def main():
transport = WebsocketsTransport(url='wss://countries.trevorblades.com/graphql')
# Using `async with` on the client will start a connection on the transport
# and provide a `session` variable to execute queries on this connection
async with Client(
transport=sample_transport,
fetch_schema_from_transport=True,
) as session:
# Execute single query
query = gql('''
query getContinents {
continents {
code
name
}
}
''')
result = await session.execute(query)
print(result)
# Request subscription
subscription = gql('''
subscription {
somethingChanged {
id
}
}
''')
async for result in session.subscribe(subscription):
print(result)
asyncio.run(main())
<commit_msg>Fix typo in docs websocket_async code example<commit_after>import logging
logging.basicConfig(level=logging.INFO)
from gql import gql, Client, WebsocketsTransport
import asyncio
async def main():
transport = WebsocketsTransport(url='wss://countries.trevorblades.com/graphql')
# Using `async with` on the client will start a connection on the transport
# and provide a `session` variable to execute queries on this connection
async with Client(
transport=transport,
fetch_schema_from_transport=True,
) as session:
# Execute single query
query = gql('''
query getContinents {
continents {
code
name
}
}
''')
result = await session.execute(query)
print(result)
# Request subscription
subscription = gql('''
subscription {
somethingChanged {
id
}
}
''')
async for result in session.subscribe(subscription):
print(result)
asyncio.run(main())
|
d0b25766a6e36294ae2c8083664fa36be6be292f
|
signage/urls.py
|
signage/urls.py
|
from django.conf.urls import url
from .views import DisplayCreate
from .views import DisplayDelete
from .views import DisplayDetail
from .views import DisplayList
from .views import DisplayUpdate
from .views import SlideCreate
from .views import SlideDelete
from .views import SlideList
from .views import SlideUpdate
app_name = 'signage'
urlpatterns = [
url(r'^(?P<pk>\d+)/$', DisplayDetail.as_view(), name='display'),
url(r'^displays/$', DisplayList.as_view(), name='display_list'),
url(r'^displays/create/$', DisplayCreate.as_view(), name='display_create'),
url(r'^displays/delete/(?P<pk>\d+)/$', DisplayDelete.as_view(), name='display_delete'),
url(r'^displays/update/(?P<pk>\d+)/$', DisplayUpdate.as_view(), name='display_update'),
url(r'^slides/$', SlideList.as_view(), name='slide_list'),
url(r'^slides/create/$', SlideCreate.as_view(), name='slide_create'),
url(r'^slides/delete/(?P<pk>\d+)/$', SlideDelete.as_view(), name='slide_delete'),
url(r'^slides/update/(?P<pk>\d+)/$', SlideUpdate.as_view(), name='slide_update'),
]
|
from django.conf.urls import url
from . import views
app_name = 'signage'
urlpatterns = [
url(r'^display/(?P<pk>\d+)/$', views.DisplayDetail.as_view(), name='display'),
url(r'^display/create/$', views.DisplayCreate.as_view(), name='display_create'),
url(r'^display/(?P<pk>\d+)/delete/$', views.DisplayDelete.as_view(), name='display_delete'),
url(r'^display/(?P<pk>\d+)/update/$', views.DisplayUpdate.as_view(), name='display_update'),
url(r'^displays/$', views.DisplayList.as_view(), name='display_list'),
url(r'^slide/create/$', views.SlideCreate.as_view(), name='slide_create'),
url(r'^slide/(?P<pk>\d+)/delete/$', views.SlideDelete.as_view(), name='slide_delete'),
url(r'^slide/(?P<pk>\d+)/update/$', views.SlideUpdate.as_view(), name='slide_update'),
url(r'^slides/$', views.SlideList.as_view(), name='slide_list'),
]
|
Refactor URL imports and paths
|
Refactor URL imports and paths
|
Python
|
bsd-3-clause
|
jbittel/django-signage,jbittel/django-signage,jbittel/django-signage
|
from django.conf.urls import url
from .views import DisplayCreate
from .views import DisplayDelete
from .views import DisplayDetail
from .views import DisplayList
from .views import DisplayUpdate
from .views import SlideCreate
from .views import SlideDelete
from .views import SlideList
from .views import SlideUpdate
app_name = 'signage'
urlpatterns = [
url(r'^(?P<pk>\d+)/$', DisplayDetail.as_view(), name='display'),
url(r'^displays/$', DisplayList.as_view(), name='display_list'),
url(r'^displays/create/$', DisplayCreate.as_view(), name='display_create'),
url(r'^displays/delete/(?P<pk>\d+)/$', DisplayDelete.as_view(), name='display_delete'),
url(r'^displays/update/(?P<pk>\d+)/$', DisplayUpdate.as_view(), name='display_update'),
url(r'^slides/$', SlideList.as_view(), name='slide_list'),
url(r'^slides/create/$', SlideCreate.as_view(), name='slide_create'),
url(r'^slides/delete/(?P<pk>\d+)/$', SlideDelete.as_view(), name='slide_delete'),
url(r'^slides/update/(?P<pk>\d+)/$', SlideUpdate.as_view(), name='slide_update'),
]
Refactor URL imports and paths
|
from django.conf.urls import url
from . import views
app_name = 'signage'
urlpatterns = [
url(r'^display/(?P<pk>\d+)/$', views.DisplayDetail.as_view(), name='display'),
url(r'^display/create/$', views.DisplayCreate.as_view(), name='display_create'),
url(r'^display/(?P<pk>\d+)/delete/$', views.DisplayDelete.as_view(), name='display_delete'),
url(r'^display/(?P<pk>\d+)/update/$', views.DisplayUpdate.as_view(), name='display_update'),
url(r'^displays/$', views.DisplayList.as_view(), name='display_list'),
url(r'^slide/create/$', views.SlideCreate.as_view(), name='slide_create'),
url(r'^slide/(?P<pk>\d+)/delete/$', views.SlideDelete.as_view(), name='slide_delete'),
url(r'^slide/(?P<pk>\d+)/update/$', views.SlideUpdate.as_view(), name='slide_update'),
url(r'^slides/$', views.SlideList.as_view(), name='slide_list'),
]
|
<commit_before>from django.conf.urls import url
from .views import DisplayCreate
from .views import DisplayDelete
from .views import DisplayDetail
from .views import DisplayList
from .views import DisplayUpdate
from .views import SlideCreate
from .views import SlideDelete
from .views import SlideList
from .views import SlideUpdate
app_name = 'signage'
urlpatterns = [
url(r'^(?P<pk>\d+)/$', DisplayDetail.as_view(), name='display'),
url(r'^displays/$', DisplayList.as_view(), name='display_list'),
url(r'^displays/create/$', DisplayCreate.as_view(), name='display_create'),
url(r'^displays/delete/(?P<pk>\d+)/$', DisplayDelete.as_view(), name='display_delete'),
url(r'^displays/update/(?P<pk>\d+)/$', DisplayUpdate.as_view(), name='display_update'),
url(r'^slides/$', SlideList.as_view(), name='slide_list'),
url(r'^slides/create/$', SlideCreate.as_view(), name='slide_create'),
url(r'^slides/delete/(?P<pk>\d+)/$', SlideDelete.as_view(), name='slide_delete'),
url(r'^slides/update/(?P<pk>\d+)/$', SlideUpdate.as_view(), name='slide_update'),
]
<commit_msg>Refactor URL imports and paths<commit_after>
|
from django.conf.urls import url
from . import views
app_name = 'signage'
urlpatterns = [
url(r'^display/(?P<pk>\d+)/$', views.DisplayDetail.as_view(), name='display'),
url(r'^display/create/$', views.DisplayCreate.as_view(), name='display_create'),
url(r'^display/(?P<pk>\d+)/delete/$', views.DisplayDelete.as_view(), name='display_delete'),
url(r'^display/(?P<pk>\d+)/update/$', views.DisplayUpdate.as_view(), name='display_update'),
url(r'^displays/$', views.DisplayList.as_view(), name='display_list'),
url(r'^slide/create/$', views.SlideCreate.as_view(), name='slide_create'),
url(r'^slide/(?P<pk>\d+)/delete/$', views.SlideDelete.as_view(), name='slide_delete'),
url(r'^slide/(?P<pk>\d+)/update/$', views.SlideUpdate.as_view(), name='slide_update'),
url(r'^slides/$', views.SlideList.as_view(), name='slide_list'),
]
|
from django.conf.urls import url
from .views import DisplayCreate
from .views import DisplayDelete
from .views import DisplayDetail
from .views import DisplayList
from .views import DisplayUpdate
from .views import SlideCreate
from .views import SlideDelete
from .views import SlideList
from .views import SlideUpdate
app_name = 'signage'
urlpatterns = [
url(r'^(?P<pk>\d+)/$', DisplayDetail.as_view(), name='display'),
url(r'^displays/$', DisplayList.as_view(), name='display_list'),
url(r'^displays/create/$', DisplayCreate.as_view(), name='display_create'),
url(r'^displays/delete/(?P<pk>\d+)/$', DisplayDelete.as_view(), name='display_delete'),
url(r'^displays/update/(?P<pk>\d+)/$', DisplayUpdate.as_view(), name='display_update'),
url(r'^slides/$', SlideList.as_view(), name='slide_list'),
url(r'^slides/create/$', SlideCreate.as_view(), name='slide_create'),
url(r'^slides/delete/(?P<pk>\d+)/$', SlideDelete.as_view(), name='slide_delete'),
url(r'^slides/update/(?P<pk>\d+)/$', SlideUpdate.as_view(), name='slide_update'),
]
Refactor URL imports and pathsfrom django.conf.urls import url
from . import views
app_name = 'signage'
urlpatterns = [
url(r'^display/(?P<pk>\d+)/$', views.DisplayDetail.as_view(), name='display'),
url(r'^display/create/$', views.DisplayCreate.as_view(), name='display_create'),
url(r'^display/(?P<pk>\d+)/delete/$', views.DisplayDelete.as_view(), name='display_delete'),
url(r'^display/(?P<pk>\d+)/update/$', views.DisplayUpdate.as_view(), name='display_update'),
url(r'^displays/$', views.DisplayList.as_view(), name='display_list'),
url(r'^slide/create/$', views.SlideCreate.as_view(), name='slide_create'),
url(r'^slide/(?P<pk>\d+)/delete/$', views.SlideDelete.as_view(), name='slide_delete'),
url(r'^slide/(?P<pk>\d+)/update/$', views.SlideUpdate.as_view(), name='slide_update'),
url(r'^slides/$', views.SlideList.as_view(), name='slide_list'),
]
|
<commit_before>from django.conf.urls import url
from .views import DisplayCreate
from .views import DisplayDelete
from .views import DisplayDetail
from .views import DisplayList
from .views import DisplayUpdate
from .views import SlideCreate
from .views import SlideDelete
from .views import SlideList
from .views import SlideUpdate
app_name = 'signage'
urlpatterns = [
url(r'^(?P<pk>\d+)/$', DisplayDetail.as_view(), name='display'),
url(r'^displays/$', DisplayList.as_view(), name='display_list'),
url(r'^displays/create/$', DisplayCreate.as_view(), name='display_create'),
url(r'^displays/delete/(?P<pk>\d+)/$', DisplayDelete.as_view(), name='display_delete'),
url(r'^displays/update/(?P<pk>\d+)/$', DisplayUpdate.as_view(), name='display_update'),
url(r'^slides/$', SlideList.as_view(), name='slide_list'),
url(r'^slides/create/$', SlideCreate.as_view(), name='slide_create'),
url(r'^slides/delete/(?P<pk>\d+)/$', SlideDelete.as_view(), name='slide_delete'),
url(r'^slides/update/(?P<pk>\d+)/$', SlideUpdate.as_view(), name='slide_update'),
]
<commit_msg>Refactor URL imports and paths<commit_after>from django.conf.urls import url
from . import views
app_name = 'signage'
urlpatterns = [
url(r'^display/(?P<pk>\d+)/$', views.DisplayDetail.as_view(), name='display'),
url(r'^display/create/$', views.DisplayCreate.as_view(), name='display_create'),
url(r'^display/(?P<pk>\d+)/delete/$', views.DisplayDelete.as_view(), name='display_delete'),
url(r'^display/(?P<pk>\d+)/update/$', views.DisplayUpdate.as_view(), name='display_update'),
url(r'^displays/$', views.DisplayList.as_view(), name='display_list'),
url(r'^slide/create/$', views.SlideCreate.as_view(), name='slide_create'),
url(r'^slide/(?P<pk>\d+)/delete/$', views.SlideDelete.as_view(), name='slide_delete'),
url(r'^slide/(?P<pk>\d+)/update/$', views.SlideUpdate.as_view(), name='slide_update'),
url(r'^slides/$', views.SlideList.as_view(), name='slide_list'),
]
|
e42d34e2e3163488daff15c5b584d5f3757d162f
|
unit_test/memory_unit_test.py
|
unit_test/memory_unit_test.py
|
import memory
import head
# import write_heads
from keras import backend as K
number_of_memory_locations = 6
memory_vector_size = 3
memory_t = memory.initial(number_of_memory_locations, memory_vector_size)
weight_t = K.random_binomial((number_of_memory_locations, 1), 0.2)
read_vector = head.reading(memory_t, weight_t)
print memory_t.shape
print weight_t.shape
print read_vector
|
from keras import backend as K
import theano.tensor as T
import theano
import memory
import head
#
# number_of_memory_locations = 6
# memory_vector_size = 3
#
# memory_t = memory.initial(number_of_memory_locations, memory_vector_size)
#
# weight_t = K.random_binomial((number_of_memory_locations, 1), 0.2)
#
# read_vector = head.reading(memory_t, weight_t)
#
# print memory_t.shape
# print weight_t.shape
# print read_vector
#
def logistic(x):
s = 1 / (1 + K.exp(x))
log = theano.function([x], s)
return log
# return s
x = [[0, 1], [-1, -2]]
print logistic(x)
|
Update code of NTM based on Keras.
|
Update code of NTM based on Keras.
|
Python
|
mit
|
SigmaQuan/NTM-Keras
|
import memory
import head
# import write_heads
from keras import backend as K
number_of_memory_locations = 6
memory_vector_size = 3
memory_t = memory.initial(number_of_memory_locations, memory_vector_size)
weight_t = K.random_binomial((number_of_memory_locations, 1), 0.2)
read_vector = head.reading(memory_t, weight_t)
print memory_t.shape
print weight_t.shape
print read_vector
Update code of NTM based on Keras.
|
from keras import backend as K
import theano.tensor as T
import theano
import memory
import head
#
# number_of_memory_locations = 6
# memory_vector_size = 3
#
# memory_t = memory.initial(number_of_memory_locations, memory_vector_size)
#
# weight_t = K.random_binomial((number_of_memory_locations, 1), 0.2)
#
# read_vector = head.reading(memory_t, weight_t)
#
# print memory_t.shape
# print weight_t.shape
# print read_vector
#
def logistic(x):
s = 1 / (1 + K.exp(x))
log = theano.function([x], s)
return log
# return s
x = [[0, 1], [-1, -2]]
print logistic(x)
|
<commit_before>import memory
import head
# import write_heads
from keras import backend as K
number_of_memory_locations = 6
memory_vector_size = 3
memory_t = memory.initial(number_of_memory_locations, memory_vector_size)
weight_t = K.random_binomial((number_of_memory_locations, 1), 0.2)
read_vector = head.reading(memory_t, weight_t)
print memory_t.shape
print weight_t.shape
print read_vector
<commit_msg>Update code of NTM based on Keras.<commit_after>
|
from keras import backend as K
import theano.tensor as T
import theano
import memory
import head
#
# number_of_memory_locations = 6
# memory_vector_size = 3
#
# memory_t = memory.initial(number_of_memory_locations, memory_vector_size)
#
# weight_t = K.random_binomial((number_of_memory_locations, 1), 0.2)
#
# read_vector = head.reading(memory_t, weight_t)
#
# print memory_t.shape
# print weight_t.shape
# print read_vector
#
def logistic(x):
s = 1 / (1 + K.exp(x))
log = theano.function([x], s)
return log
# return s
x = [[0, 1], [-1, -2]]
print logistic(x)
|
import memory
import head
# import write_heads
from keras import backend as K
number_of_memory_locations = 6
memory_vector_size = 3
memory_t = memory.initial(number_of_memory_locations, memory_vector_size)
weight_t = K.random_binomial((number_of_memory_locations, 1), 0.2)
read_vector = head.reading(memory_t, weight_t)
print memory_t.shape
print weight_t.shape
print read_vector
Update code of NTM based on Keras.from keras import backend as K
import theano.tensor as T
import theano
import memory
import head
#
# number_of_memory_locations = 6
# memory_vector_size = 3
#
# memory_t = memory.initial(number_of_memory_locations, memory_vector_size)
#
# weight_t = K.random_binomial((number_of_memory_locations, 1), 0.2)
#
# read_vector = head.reading(memory_t, weight_t)
#
# print memory_t.shape
# print weight_t.shape
# print read_vector
#
def logistic(x):
s = 1 / (1 + K.exp(x))
log = theano.function([x], s)
return log
# return s
x = [[0, 1], [-1, -2]]
print logistic(x)
|
<commit_before>import memory
import head
# import write_heads
from keras import backend as K
number_of_memory_locations = 6
memory_vector_size = 3
memory_t = memory.initial(number_of_memory_locations, memory_vector_size)
weight_t = K.random_binomial((number_of_memory_locations, 1), 0.2)
read_vector = head.reading(memory_t, weight_t)
print memory_t.shape
print weight_t.shape
print read_vector
<commit_msg>Update code of NTM based on Keras.<commit_after>from keras import backend as K
import theano.tensor as T
import theano
import memory
import head
#
# number_of_memory_locations = 6
# memory_vector_size = 3
#
# memory_t = memory.initial(number_of_memory_locations, memory_vector_size)
#
# weight_t = K.random_binomial((number_of_memory_locations, 1), 0.2)
#
# read_vector = head.reading(memory_t, weight_t)
#
# print memory_t.shape
# print weight_t.shape
# print read_vector
#
def logistic(x):
s = 1 / (1 + K.exp(x))
log = theano.function([x], s)
return log
# return s
x = [[0, 1], [-1, -2]]
print logistic(x)
|
55098fa6ce3a6d55849f08ddcfb95cc5c241abb5
|
examples/IPLoM_example.py
|
examples/IPLoM_example.py
|
# for local run, before pygraphc packaging
import sys
sys.path.insert(0, '../pygraphc/misc')
from IPLoM import *
sys.path.insert(0, '../pygraphc/evaluation')
from ExternalEvaluation import *
# set path
ip_address = '161.166.232.17'
standard_path = '/home/hudan/Git/labeled-authlog/dataset/' + ip_address
standard_file = standard_path + 'auth.log.anon.labeled'
analyzed_file = 'auth.log.anon'
prediction_file = 'iplom-result-' + ip_address + '.txt'
OutputPath = './results'
para = Para(path=standard_path, logname=analyzed_file, save_path=OutputPath)
# call IPLoM and get clusters
myparser = IPLoM(para)
time = myparser.main_process()
clusters = myparser.get_clusters()
original_logs = myparser.logs
# set cluster label to get evaluation metrics
ExternalEvaluation.set_cluster_label_id(None, clusters, original_logs, prediction_file)
homogeneity_completeness_vmeasure = ExternalEvaluation.get_homogeneity_completeness_vmeasure(standard_file,
prediction_file)
print homogeneity_completeness_vmeasure
print ('The running time of IPLoM is', time)
|
from pygraphc.misc.IPLoM import *
from pygraphc.evaluation.ExternalEvaluation import *
# set input path
ip_address = '161.166.232.17'
standard_path = '/home/hudan/Git/labeled-authlog/dataset/Hofstede2014/dataset1/' + ip_address
standard_file = standard_path + 'auth.log.anon.labeled'
analyzed_file = 'auth.log.anon'
prediction_file = 'iplom-result-' + ip_address + '.txt'
OutputPath = './result'
para = Para(path=standard_path, logname=analyzed_file, save_path=OutputPath)
# call IPLoM and get clusters
myparser = IPLoM(para)
time = myparser.main_process()
clusters = myparser.get_clusters()
original_logs = myparser.logs
# set cluster label to get evaluation metrics
ExternalEvaluation.set_cluster_label_id(None, clusters, original_logs, prediction_file)
homogeneity_completeness_vmeasure = ExternalEvaluation.get_homogeneity_completeness_vmeasure(standard_file,
prediction_file)
# print evaluation result
print homogeneity_completeness_vmeasure
print ('The running time of IPLoM is', time)
|
Edit import module. pygraphc is now ready to be packaged
|
Edit import module. pygraphc is now ready to be packaged
|
Python
|
mit
|
studiawan/pygraphc
|
# for local run, before pygraphc packaging
import sys
sys.path.insert(0, '../pygraphc/misc')
from IPLoM import *
sys.path.insert(0, '../pygraphc/evaluation')
from ExternalEvaluation import *
# set path
ip_address = '161.166.232.17'
standard_path = '/home/hudan/Git/labeled-authlog/dataset/' + ip_address
standard_file = standard_path + 'auth.log.anon.labeled'
analyzed_file = 'auth.log.anon'
prediction_file = 'iplom-result-' + ip_address + '.txt'
OutputPath = './results'
para = Para(path=standard_path, logname=analyzed_file, save_path=OutputPath)
# call IPLoM and get clusters
myparser = IPLoM(para)
time = myparser.main_process()
clusters = myparser.get_clusters()
original_logs = myparser.logs
# set cluster label to get evaluation metrics
ExternalEvaluation.set_cluster_label_id(None, clusters, original_logs, prediction_file)
homogeneity_completeness_vmeasure = ExternalEvaluation.get_homogeneity_completeness_vmeasure(standard_file,
prediction_file)
print homogeneity_completeness_vmeasure
print ('The running time of IPLoM is', time)
Edit import module. pygraphc is now ready to be packaged
|
from pygraphc.misc.IPLoM import *
from pygraphc.evaluation.ExternalEvaluation import *
# set input path
ip_address = '161.166.232.17'
standard_path = '/home/hudan/Git/labeled-authlog/dataset/Hofstede2014/dataset1/' + ip_address
standard_file = standard_path + 'auth.log.anon.labeled'
analyzed_file = 'auth.log.anon'
prediction_file = 'iplom-result-' + ip_address + '.txt'
OutputPath = './result'
para = Para(path=standard_path, logname=analyzed_file, save_path=OutputPath)
# call IPLoM and get clusters
myparser = IPLoM(para)
time = myparser.main_process()
clusters = myparser.get_clusters()
original_logs = myparser.logs
# set cluster label to get evaluation metrics
ExternalEvaluation.set_cluster_label_id(None, clusters, original_logs, prediction_file)
homogeneity_completeness_vmeasure = ExternalEvaluation.get_homogeneity_completeness_vmeasure(standard_file,
prediction_file)
# print evaluation result
print homogeneity_completeness_vmeasure
print ('The running time of IPLoM is', time)
|
<commit_before># for local run, before pygraphc packaging
import sys
sys.path.insert(0, '../pygraphc/misc')
from IPLoM import *
sys.path.insert(0, '../pygraphc/evaluation')
from ExternalEvaluation import *
# set path
ip_address = '161.166.232.17'
standard_path = '/home/hudan/Git/labeled-authlog/dataset/' + ip_address
standard_file = standard_path + 'auth.log.anon.labeled'
analyzed_file = 'auth.log.anon'
prediction_file = 'iplom-result-' + ip_address + '.txt'
OutputPath = './results'
para = Para(path=standard_path, logname=analyzed_file, save_path=OutputPath)
# call IPLoM and get clusters
myparser = IPLoM(para)
time = myparser.main_process()
clusters = myparser.get_clusters()
original_logs = myparser.logs
# set cluster label to get evaluation metrics
ExternalEvaluation.set_cluster_label_id(None, clusters, original_logs, prediction_file)
homogeneity_completeness_vmeasure = ExternalEvaluation.get_homogeneity_completeness_vmeasure(standard_file,
prediction_file)
print homogeneity_completeness_vmeasure
print ('The running time of IPLoM is', time)
<commit_msg>Edit import module. pygraphc is now ready to be packaged<commit_after>
|
from pygraphc.misc.IPLoM import *
from pygraphc.evaluation.ExternalEvaluation import *
# set input path
ip_address = '161.166.232.17'
standard_path = '/home/hudan/Git/labeled-authlog/dataset/Hofstede2014/dataset1/' + ip_address
standard_file = standard_path + 'auth.log.anon.labeled'
analyzed_file = 'auth.log.anon'
prediction_file = 'iplom-result-' + ip_address + '.txt'
OutputPath = './result'
para = Para(path=standard_path, logname=analyzed_file, save_path=OutputPath)
# call IPLoM and get clusters
myparser = IPLoM(para)
time = myparser.main_process()
clusters = myparser.get_clusters()
original_logs = myparser.logs
# set cluster label to get evaluation metrics
ExternalEvaluation.set_cluster_label_id(None, clusters, original_logs, prediction_file)
homogeneity_completeness_vmeasure = ExternalEvaluation.get_homogeneity_completeness_vmeasure(standard_file,
prediction_file)
# print evaluation result
print homogeneity_completeness_vmeasure
print ('The running time of IPLoM is', time)
|
# for local run, before pygraphc packaging
import sys
sys.path.insert(0, '../pygraphc/misc')
from IPLoM import *
sys.path.insert(0, '../pygraphc/evaluation')
from ExternalEvaluation import *
# set path
ip_address = '161.166.232.17'
standard_path = '/home/hudan/Git/labeled-authlog/dataset/' + ip_address
standard_file = standard_path + 'auth.log.anon.labeled'
analyzed_file = 'auth.log.anon'
prediction_file = 'iplom-result-' + ip_address + '.txt'
OutputPath = './results'
para = Para(path=standard_path, logname=analyzed_file, save_path=OutputPath)
# call IPLoM and get clusters
myparser = IPLoM(para)
time = myparser.main_process()
clusters = myparser.get_clusters()
original_logs = myparser.logs
# set cluster label to get evaluation metrics
ExternalEvaluation.set_cluster_label_id(None, clusters, original_logs, prediction_file)
homogeneity_completeness_vmeasure = ExternalEvaluation.get_homogeneity_completeness_vmeasure(standard_file,
prediction_file)
print homogeneity_completeness_vmeasure
print ('The running time of IPLoM is', time)
Edit import module. pygraphc is now ready to be packagedfrom pygraphc.misc.IPLoM import *
from pygraphc.evaluation.ExternalEvaluation import *
# set input path
ip_address = '161.166.232.17'
standard_path = '/home/hudan/Git/labeled-authlog/dataset/Hofstede2014/dataset1/' + ip_address
standard_file = standard_path + 'auth.log.anon.labeled'
analyzed_file = 'auth.log.anon'
prediction_file = 'iplom-result-' + ip_address + '.txt'
OutputPath = './result'
para = Para(path=standard_path, logname=analyzed_file, save_path=OutputPath)
# call IPLoM and get clusters
myparser = IPLoM(para)
time = myparser.main_process()
clusters = myparser.get_clusters()
original_logs = myparser.logs
# set cluster label to get evaluation metrics
ExternalEvaluation.set_cluster_label_id(None, clusters, original_logs, prediction_file)
homogeneity_completeness_vmeasure = ExternalEvaluation.get_homogeneity_completeness_vmeasure(standard_file,
prediction_file)
# print evaluation result
print homogeneity_completeness_vmeasure
print ('The running time of IPLoM is', time)
|
<commit_before># for local run, before pygraphc packaging
import sys
sys.path.insert(0, '../pygraphc/misc')
from IPLoM import *
sys.path.insert(0, '../pygraphc/evaluation')
from ExternalEvaluation import *
# set path
ip_address = '161.166.232.17'
standard_path = '/home/hudan/Git/labeled-authlog/dataset/' + ip_address
standard_file = standard_path + 'auth.log.anon.labeled'
analyzed_file = 'auth.log.anon'
prediction_file = 'iplom-result-' + ip_address + '.txt'
OutputPath = './results'
para = Para(path=standard_path, logname=analyzed_file, save_path=OutputPath)
# call IPLoM and get clusters
myparser = IPLoM(para)
time = myparser.main_process()
clusters = myparser.get_clusters()
original_logs = myparser.logs
# set cluster label to get evaluation metrics
ExternalEvaluation.set_cluster_label_id(None, clusters, original_logs, prediction_file)
homogeneity_completeness_vmeasure = ExternalEvaluation.get_homogeneity_completeness_vmeasure(standard_file,
prediction_file)
print homogeneity_completeness_vmeasure
print ('The running time of IPLoM is', time)
<commit_msg>Edit import module. pygraphc is now ready to be packaged<commit_after>from pygraphc.misc.IPLoM import *
from pygraphc.evaluation.ExternalEvaluation import *
# set input path
ip_address = '161.166.232.17'
standard_path = '/home/hudan/Git/labeled-authlog/dataset/Hofstede2014/dataset1/' + ip_address
standard_file = standard_path + 'auth.log.anon.labeled'
analyzed_file = 'auth.log.anon'
prediction_file = 'iplom-result-' + ip_address + '.txt'
OutputPath = './result'
para = Para(path=standard_path, logname=analyzed_file, save_path=OutputPath)
# call IPLoM and get clusters
myparser = IPLoM(para)
time = myparser.main_process()
clusters = myparser.get_clusters()
original_logs = myparser.logs
# set cluster label to get evaluation metrics
ExternalEvaluation.set_cluster_label_id(None, clusters, original_logs, prediction_file)
homogeneity_completeness_vmeasure = ExternalEvaluation.get_homogeneity_completeness_vmeasure(standard_file,
prediction_file)
# print evaluation result
print homogeneity_completeness_vmeasure
print ('The running time of IPLoM is', time)
|
2a3fc1b82e47d23e3de8820343ab7d39c72aa35b
|
luispedro/urls.py
|
luispedro/urls.py
|
from django.conf.urls.defaults import *
# Uncomment the next two lines to enable the admin:
# from django.contrib import admin
# admin.autodiscover()
urlpatterns = patterns('',
# Example:
# (r'^luispedro/', include('luispedro.foo.urls')),
# Uncomment the admin/doc line below and add 'django.contrib.admindocs'
# to INSTALLED_APPS to enable admin documentation:
# (r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
# (r'^admin/', include(admin.site.urls)),
)
|
from django.conf.urls.defaults import *
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
# Example:
# (r'^luispedro/', include('luispedro.foo.urls')),
(r'^admin/', include(admin.site.urls)),
)
|
Make this usable (at least testable)
|
Make this usable (at least testable)
|
Python
|
agpl-3.0
|
luispedro/django-gitcms,luispedro/django-gitcms
|
from django.conf.urls.defaults import *
# Uncomment the next two lines to enable the admin:
# from django.contrib import admin
# admin.autodiscover()
urlpatterns = patterns('',
# Example:
# (r'^luispedro/', include('luispedro.foo.urls')),
# Uncomment the admin/doc line below and add 'django.contrib.admindocs'
# to INSTALLED_APPS to enable admin documentation:
# (r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
# (r'^admin/', include(admin.site.urls)),
)
Make this usable (at least testable)
|
from django.conf.urls.defaults import *
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
# Example:
# (r'^luispedro/', include('luispedro.foo.urls')),
(r'^admin/', include(admin.site.urls)),
)
|
<commit_before>from django.conf.urls.defaults import *
# Uncomment the next two lines to enable the admin:
# from django.contrib import admin
# admin.autodiscover()
urlpatterns = patterns('',
# Example:
# (r'^luispedro/', include('luispedro.foo.urls')),
# Uncomment the admin/doc line below and add 'django.contrib.admindocs'
# to INSTALLED_APPS to enable admin documentation:
# (r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
# (r'^admin/', include(admin.site.urls)),
)
<commit_msg>Make this usable (at least testable)<commit_after>
|
from django.conf.urls.defaults import *
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
# Example:
# (r'^luispedro/', include('luispedro.foo.urls')),
(r'^admin/', include(admin.site.urls)),
)
|
from django.conf.urls.defaults import *
# Uncomment the next two lines to enable the admin:
# from django.contrib import admin
# admin.autodiscover()
urlpatterns = patterns('',
# Example:
# (r'^luispedro/', include('luispedro.foo.urls')),
# Uncomment the admin/doc line below and add 'django.contrib.admindocs'
# to INSTALLED_APPS to enable admin documentation:
# (r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
# (r'^admin/', include(admin.site.urls)),
)
Make this usable (at least testable)from django.conf.urls.defaults import *
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
# Example:
# (r'^luispedro/', include('luispedro.foo.urls')),
(r'^admin/', include(admin.site.urls)),
)
|
<commit_before>from django.conf.urls.defaults import *
# Uncomment the next two lines to enable the admin:
# from django.contrib import admin
# admin.autodiscover()
urlpatterns = patterns('',
# Example:
# (r'^luispedro/', include('luispedro.foo.urls')),
# Uncomment the admin/doc line below and add 'django.contrib.admindocs'
# to INSTALLED_APPS to enable admin documentation:
# (r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
# (r'^admin/', include(admin.site.urls)),
)
<commit_msg>Make this usable (at least testable)<commit_after>from django.conf.urls.defaults import *
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
# Example:
# (r'^luispedro/', include('luispedro.foo.urls')),
(r'^admin/', include(admin.site.urls)),
)
|
e923a56f21fd6dc8d7e16005792d473788cb1925
|
markups/common.py
|
markups/common.py
|
# This file is part of python-markups module
# License: 3-clause BSD, see LICENSE file
# Copyright: (C) Dmitry Shachnev, 2012-2018
import os.path
# Some common constants and functions
(LANGUAGE_HOME_PAGE, MODULE_HOME_PAGE, SYNTAX_DOCUMENTATION) = range(3)
CONFIGURATION_DIR = (os.getenv('XDG_CONFIG_HOME') or os.getenv('APPDATA') or
os.path.expanduser('~/.config'))
MATHJAX_LOCAL_URL = 'file:///usr/share/javascript/mathjax/MathJax.js'
MATHJAX_WEB_URL = 'https://cdnjs.cloudflare.com/ajax/libs/mathjax/2.7.4/MathJax.js'
PYGMENTS_STYLE = 'default'
def get_pygments_stylesheet(selector, style=None):
if style is None:
style = PYGMENTS_STYLE
if style == '':
return ''
try:
from pygments.formatters import HtmlFormatter
except ImportError:
return ''
else:
return HtmlFormatter(style=style).get_style_defs(selector) + '\n'
def get_mathjax_url(webenv):
if os.path.exists(MATHJAX_LOCAL_URL[7:]) and not webenv:
return MATHJAX_LOCAL_URL
else:
return MATHJAX_WEB_URL
|
# This file is part of python-markups module
# License: 3-clause BSD, see LICENSE file
# Copyright: (C) Dmitry Shachnev, 2012-2018
import os.path
# Some common constants and functions
(LANGUAGE_HOME_PAGE, MODULE_HOME_PAGE, SYNTAX_DOCUMENTATION) = range(3)
CONFIGURATION_DIR = (os.getenv('XDG_CONFIG_HOME') or os.getenv('APPDATA') or
os.path.expanduser('~/.config'))
MATHJAX_LOCAL_URLS = (
'file:///usr/share/javascript/mathjax/MathJax.js', # Debian libjs-mathjax
'file:///usr/share/mathjax/MathJax.js', # Arch Linux mathjax
)
MATHJAX_WEB_URL = 'https://cdnjs.cloudflare.com/ajax/libs/mathjax/2.7.4/MathJax.js'
PYGMENTS_STYLE = 'default'
def get_pygments_stylesheet(selector, style=None):
if style is None:
style = PYGMENTS_STYLE
if style == '':
return ''
try:
from pygments.formatters import HtmlFormatter
except ImportError:
return ''
else:
return HtmlFormatter(style=style).get_style_defs(selector) + '\n'
def get_mathjax_url(webenv):
if not webenv:
for url in MATHJAX_LOCAL_URLS:
if os.path.exists(url[7:]): # strip file://
return url
return MATHJAX_WEB_URL
|
Add support for Arch Linux mathjax package
|
Add support for Arch Linux mathjax package
Fixes #4.
|
Python
|
bsd-3-clause
|
mitya57/pymarkups,retext-project/pymarkups
|
# This file is part of python-markups module
# License: 3-clause BSD, see LICENSE file
# Copyright: (C) Dmitry Shachnev, 2012-2018
import os.path
# Some common constants and functions
(LANGUAGE_HOME_PAGE, MODULE_HOME_PAGE, SYNTAX_DOCUMENTATION) = range(3)
CONFIGURATION_DIR = (os.getenv('XDG_CONFIG_HOME') or os.getenv('APPDATA') or
os.path.expanduser('~/.config'))
MATHJAX_LOCAL_URL = 'file:///usr/share/javascript/mathjax/MathJax.js'
MATHJAX_WEB_URL = 'https://cdnjs.cloudflare.com/ajax/libs/mathjax/2.7.4/MathJax.js'
PYGMENTS_STYLE = 'default'
def get_pygments_stylesheet(selector, style=None):
if style is None:
style = PYGMENTS_STYLE
if style == '':
return ''
try:
from pygments.formatters import HtmlFormatter
except ImportError:
return ''
else:
return HtmlFormatter(style=style).get_style_defs(selector) + '\n'
def get_mathjax_url(webenv):
if os.path.exists(MATHJAX_LOCAL_URL[7:]) and not webenv:
return MATHJAX_LOCAL_URL
else:
return MATHJAX_WEB_URL
Add support for Arch Linux mathjax package
Fixes #4.
|
# This file is part of python-markups module
# License: 3-clause BSD, see LICENSE file
# Copyright: (C) Dmitry Shachnev, 2012-2018
import os.path
# Some common constants and functions
(LANGUAGE_HOME_PAGE, MODULE_HOME_PAGE, SYNTAX_DOCUMENTATION) = range(3)
CONFIGURATION_DIR = (os.getenv('XDG_CONFIG_HOME') or os.getenv('APPDATA') or
os.path.expanduser('~/.config'))
MATHJAX_LOCAL_URLS = (
'file:///usr/share/javascript/mathjax/MathJax.js', # Debian libjs-mathjax
'file:///usr/share/mathjax/MathJax.js', # Arch Linux mathjax
)
MATHJAX_WEB_URL = 'https://cdnjs.cloudflare.com/ajax/libs/mathjax/2.7.4/MathJax.js'
PYGMENTS_STYLE = 'default'
def get_pygments_stylesheet(selector, style=None):
if style is None:
style = PYGMENTS_STYLE
if style == '':
return ''
try:
from pygments.formatters import HtmlFormatter
except ImportError:
return ''
else:
return HtmlFormatter(style=style).get_style_defs(selector) + '\n'
def get_mathjax_url(webenv):
if not webenv:
for url in MATHJAX_LOCAL_URLS:
if os.path.exists(url[7:]): # strip file://
return url
return MATHJAX_WEB_URL
|
<commit_before># This file is part of python-markups module
# License: 3-clause BSD, see LICENSE file
# Copyright: (C) Dmitry Shachnev, 2012-2018
import os.path
# Some common constants and functions
(LANGUAGE_HOME_PAGE, MODULE_HOME_PAGE, SYNTAX_DOCUMENTATION) = range(3)
CONFIGURATION_DIR = (os.getenv('XDG_CONFIG_HOME') or os.getenv('APPDATA') or
os.path.expanduser('~/.config'))
MATHJAX_LOCAL_URL = 'file:///usr/share/javascript/mathjax/MathJax.js'
MATHJAX_WEB_URL = 'https://cdnjs.cloudflare.com/ajax/libs/mathjax/2.7.4/MathJax.js'
PYGMENTS_STYLE = 'default'
def get_pygments_stylesheet(selector, style=None):
if style is None:
style = PYGMENTS_STYLE
if style == '':
return ''
try:
from pygments.formatters import HtmlFormatter
except ImportError:
return ''
else:
return HtmlFormatter(style=style).get_style_defs(selector) + '\n'
def get_mathjax_url(webenv):
if os.path.exists(MATHJAX_LOCAL_URL[7:]) and not webenv:
return MATHJAX_LOCAL_URL
else:
return MATHJAX_WEB_URL
<commit_msg>Add support for Arch Linux mathjax package
Fixes #4.<commit_after>
|
# This file is part of python-markups module
# License: 3-clause BSD, see LICENSE file
# Copyright: (C) Dmitry Shachnev, 2012-2018
import os.path
# Some common constants and functions
(LANGUAGE_HOME_PAGE, MODULE_HOME_PAGE, SYNTAX_DOCUMENTATION) = range(3)
CONFIGURATION_DIR = (os.getenv('XDG_CONFIG_HOME') or os.getenv('APPDATA') or
os.path.expanduser('~/.config'))
MATHJAX_LOCAL_URLS = (
'file:///usr/share/javascript/mathjax/MathJax.js', # Debian libjs-mathjax
'file:///usr/share/mathjax/MathJax.js', # Arch Linux mathjax
)
MATHJAX_WEB_URL = 'https://cdnjs.cloudflare.com/ajax/libs/mathjax/2.7.4/MathJax.js'
PYGMENTS_STYLE = 'default'
def get_pygments_stylesheet(selector, style=None):
if style is None:
style = PYGMENTS_STYLE
if style == '':
return ''
try:
from pygments.formatters import HtmlFormatter
except ImportError:
return ''
else:
return HtmlFormatter(style=style).get_style_defs(selector) + '\n'
def get_mathjax_url(webenv):
if not webenv:
for url in MATHJAX_LOCAL_URLS:
if os.path.exists(url[7:]): # strip file://
return url
return MATHJAX_WEB_URL
|
# This file is part of python-markups module
# License: 3-clause BSD, see LICENSE file
# Copyright: (C) Dmitry Shachnev, 2012-2018
import os.path
# Some common constants and functions
(LANGUAGE_HOME_PAGE, MODULE_HOME_PAGE, SYNTAX_DOCUMENTATION) = range(3)
CONFIGURATION_DIR = (os.getenv('XDG_CONFIG_HOME') or os.getenv('APPDATA') or
os.path.expanduser('~/.config'))
MATHJAX_LOCAL_URL = 'file:///usr/share/javascript/mathjax/MathJax.js'
MATHJAX_WEB_URL = 'https://cdnjs.cloudflare.com/ajax/libs/mathjax/2.7.4/MathJax.js'
PYGMENTS_STYLE = 'default'
def get_pygments_stylesheet(selector, style=None):
if style is None:
style = PYGMENTS_STYLE
if style == '':
return ''
try:
from pygments.formatters import HtmlFormatter
except ImportError:
return ''
else:
return HtmlFormatter(style=style).get_style_defs(selector) + '\n'
def get_mathjax_url(webenv):
if os.path.exists(MATHJAX_LOCAL_URL[7:]) and not webenv:
return MATHJAX_LOCAL_URL
else:
return MATHJAX_WEB_URL
Add support for Arch Linux mathjax package
Fixes #4.# This file is part of python-markups module
# License: 3-clause BSD, see LICENSE file
# Copyright: (C) Dmitry Shachnev, 2012-2018
import os.path
# Some common constants and functions
(LANGUAGE_HOME_PAGE, MODULE_HOME_PAGE, SYNTAX_DOCUMENTATION) = range(3)
CONFIGURATION_DIR = (os.getenv('XDG_CONFIG_HOME') or os.getenv('APPDATA') or
os.path.expanduser('~/.config'))
MATHJAX_LOCAL_URLS = (
'file:///usr/share/javascript/mathjax/MathJax.js', # Debian libjs-mathjax
'file:///usr/share/mathjax/MathJax.js', # Arch Linux mathjax
)
MATHJAX_WEB_URL = 'https://cdnjs.cloudflare.com/ajax/libs/mathjax/2.7.4/MathJax.js'
PYGMENTS_STYLE = 'default'
def get_pygments_stylesheet(selector, style=None):
if style is None:
style = PYGMENTS_STYLE
if style == '':
return ''
try:
from pygments.formatters import HtmlFormatter
except ImportError:
return ''
else:
return HtmlFormatter(style=style).get_style_defs(selector) + '\n'
def get_mathjax_url(webenv):
if not webenv:
for url in MATHJAX_LOCAL_URLS:
if os.path.exists(url[7:]): # strip file://
return url
return MATHJAX_WEB_URL
|
<commit_before># This file is part of python-markups module
# License: 3-clause BSD, see LICENSE file
# Copyright: (C) Dmitry Shachnev, 2012-2018
import os.path
# Some common constants and functions
(LANGUAGE_HOME_PAGE, MODULE_HOME_PAGE, SYNTAX_DOCUMENTATION) = range(3)
CONFIGURATION_DIR = (os.getenv('XDG_CONFIG_HOME') or os.getenv('APPDATA') or
os.path.expanduser('~/.config'))
MATHJAX_LOCAL_URL = 'file:///usr/share/javascript/mathjax/MathJax.js'
MATHJAX_WEB_URL = 'https://cdnjs.cloudflare.com/ajax/libs/mathjax/2.7.4/MathJax.js'
PYGMENTS_STYLE = 'default'
def get_pygments_stylesheet(selector, style=None):
if style is None:
style = PYGMENTS_STYLE
if style == '':
return ''
try:
from pygments.formatters import HtmlFormatter
except ImportError:
return ''
else:
return HtmlFormatter(style=style).get_style_defs(selector) + '\n'
def get_mathjax_url(webenv):
if os.path.exists(MATHJAX_LOCAL_URL[7:]) and not webenv:
return MATHJAX_LOCAL_URL
else:
return MATHJAX_WEB_URL
<commit_msg>Add support for Arch Linux mathjax package
Fixes #4.<commit_after># This file is part of python-markups module
# License: 3-clause BSD, see LICENSE file
# Copyright: (C) Dmitry Shachnev, 2012-2018
import os.path
# Some common constants and functions
(LANGUAGE_HOME_PAGE, MODULE_HOME_PAGE, SYNTAX_DOCUMENTATION) = range(3)
CONFIGURATION_DIR = (os.getenv('XDG_CONFIG_HOME') or os.getenv('APPDATA') or
os.path.expanduser('~/.config'))
MATHJAX_LOCAL_URLS = (
'file:///usr/share/javascript/mathjax/MathJax.js', # Debian libjs-mathjax
'file:///usr/share/mathjax/MathJax.js', # Arch Linux mathjax
)
MATHJAX_WEB_URL = 'https://cdnjs.cloudflare.com/ajax/libs/mathjax/2.7.4/MathJax.js'
PYGMENTS_STYLE = 'default'
def get_pygments_stylesheet(selector, style=None):
if style is None:
style = PYGMENTS_STYLE
if style == '':
return ''
try:
from pygments.formatters import HtmlFormatter
except ImportError:
return ''
else:
return HtmlFormatter(style=style).get_style_defs(selector) + '\n'
def get_mathjax_url(webenv):
if not webenv:
for url in MATHJAX_LOCAL_URLS:
if os.path.exists(url[7:]): # strip file://
return url
return MATHJAX_WEB_URL
|
8fcdb1b07ba39befa178d66f7b214968d3de25d8
|
coupons/settings.py
|
coupons/settings.py
|
import string
from django.conf import settings
COUPON_TYPES = getattr(settings, 'COUPONS_COUPON_TYPES', (
('monetary', 'Money based coupon'),
('percentage', 'Percentage discount'),
('virtual_currency', 'Virtual currency'),
))
CODE_LENGTH = getattr(settings, 'COUPNS_CODE_LENGTH', 15)
CODE_CHARS = getattr(settings, 'COUPNS_CODE_CHARS', string.ascii_letters+string.digits)
|
import string
from django.conf import settings
COUPON_TYPES = getattr(settings, 'COUPONS_COUPON_TYPES', (
('monetary', 'Money based coupon'),
('percentage', 'Percentage discount'),
('virtual_currency', 'Virtual currency'),
))
CODE_LENGTH = getattr(settings, 'COUPONS_CODE_LENGTH', 15)
CODE_CHARS = getattr(settings, 'COUPONS_CODE_CHARS', string.ascii_letters+string.digits)
|
Correct typo in setting names
|
Correct typo in setting names
|
Python
|
bsd-3-clause
|
byteweaver/django-coupons,byteweaver/django-coupons
|
import string
from django.conf import settings
COUPON_TYPES = getattr(settings, 'COUPONS_COUPON_TYPES', (
('monetary', 'Money based coupon'),
('percentage', 'Percentage discount'),
('virtual_currency', 'Virtual currency'),
))
CODE_LENGTH = getattr(settings, 'COUPNS_CODE_LENGTH', 15)
CODE_CHARS = getattr(settings, 'COUPNS_CODE_CHARS', string.ascii_letters+string.digits)
Correct typo in setting names
|
import string
from django.conf import settings
COUPON_TYPES = getattr(settings, 'COUPONS_COUPON_TYPES', (
('monetary', 'Money based coupon'),
('percentage', 'Percentage discount'),
('virtual_currency', 'Virtual currency'),
))
CODE_LENGTH = getattr(settings, 'COUPONS_CODE_LENGTH', 15)
CODE_CHARS = getattr(settings, 'COUPONS_CODE_CHARS', string.ascii_letters+string.digits)
|
<commit_before>import string
from django.conf import settings
COUPON_TYPES = getattr(settings, 'COUPONS_COUPON_TYPES', (
('monetary', 'Money based coupon'),
('percentage', 'Percentage discount'),
('virtual_currency', 'Virtual currency'),
))
CODE_LENGTH = getattr(settings, 'COUPNS_CODE_LENGTH', 15)
CODE_CHARS = getattr(settings, 'COUPNS_CODE_CHARS', string.ascii_letters+string.digits)
<commit_msg>Correct typo in setting names<commit_after>
|
import string
from django.conf import settings
COUPON_TYPES = getattr(settings, 'COUPONS_COUPON_TYPES', (
('monetary', 'Money based coupon'),
('percentage', 'Percentage discount'),
('virtual_currency', 'Virtual currency'),
))
CODE_LENGTH = getattr(settings, 'COUPONS_CODE_LENGTH', 15)
CODE_CHARS = getattr(settings, 'COUPONS_CODE_CHARS', string.ascii_letters+string.digits)
|
import string
from django.conf import settings
COUPON_TYPES = getattr(settings, 'COUPONS_COUPON_TYPES', (
('monetary', 'Money based coupon'),
('percentage', 'Percentage discount'),
('virtual_currency', 'Virtual currency'),
))
CODE_LENGTH = getattr(settings, 'COUPNS_CODE_LENGTH', 15)
CODE_CHARS = getattr(settings, 'COUPNS_CODE_CHARS', string.ascii_letters+string.digits)
Correct typo in setting namesimport string
from django.conf import settings
COUPON_TYPES = getattr(settings, 'COUPONS_COUPON_TYPES', (
('monetary', 'Money based coupon'),
('percentage', 'Percentage discount'),
('virtual_currency', 'Virtual currency'),
))
CODE_LENGTH = getattr(settings, 'COUPONS_CODE_LENGTH', 15)
CODE_CHARS = getattr(settings, 'COUPONS_CODE_CHARS', string.ascii_letters+string.digits)
|
<commit_before>import string
from django.conf import settings
COUPON_TYPES = getattr(settings, 'COUPONS_COUPON_TYPES', (
('monetary', 'Money based coupon'),
('percentage', 'Percentage discount'),
('virtual_currency', 'Virtual currency'),
))
CODE_LENGTH = getattr(settings, 'COUPNS_CODE_LENGTH', 15)
CODE_CHARS = getattr(settings, 'COUPNS_CODE_CHARS', string.ascii_letters+string.digits)
<commit_msg>Correct typo in setting names<commit_after>import string
from django.conf import settings
COUPON_TYPES = getattr(settings, 'COUPONS_COUPON_TYPES', (
('monetary', 'Money based coupon'),
('percentage', 'Percentage discount'),
('virtual_currency', 'Virtual currency'),
))
CODE_LENGTH = getattr(settings, 'COUPONS_CODE_LENGTH', 15)
CODE_CHARS = getattr(settings, 'COUPONS_CODE_CHARS', string.ascii_letters+string.digits)
|
b3850c475e449c0c6182629aa7521f335e86b1e1
|
scrapy_local.py
|
scrapy_local.py
|
# Local dev settings for scrapy
# This is not the same
# Local config for testing
import os
# use this for running scrapy directly
# PROJECT_ROOT = os.path.abspath(os.path.dirname(__file__))
# FILES_STORE = os.path.join(PROJECT_ROOT, 'datafiles')
# Use this for deploying to scrapyd, as it would be in stage/production
FILES_STORE = '/var/lib/scrapyd/files'
|
# Local dev settings for scrapy
# This is not the same
# Local config for testing
import os
# use this for running scrapy directly
PROJECT_ROOT = os.path.abspath(os.path.dirname(__file__))
FILES_STORE = os.path.join(PROJECT_ROOT, 'datafiles')
|
Fix issue with scrapy local settings
|
Fix issue with scrapy local settings
|
Python
|
mit
|
comsaint/legco-watch,comsaint/legco-watch,comsaint/legco-watch,legco-watch/legco-watch,legco-watch/legco-watch,legco-watch/legco-watch,legco-watch/legco-watch,comsaint/legco-watch
|
# Local dev settings for scrapy
# This is not the same
# Local config for testing
import os
# use this for running scrapy directly
# PROJECT_ROOT = os.path.abspath(os.path.dirname(__file__))
# FILES_STORE = os.path.join(PROJECT_ROOT, 'datafiles')
# Use this for deploying to scrapyd, as it would be in stage/production
FILES_STORE = '/var/lib/scrapyd/files'Fix issue with scrapy local settings
|
# Local dev settings for scrapy
# This is not the same
# Local config for testing
import os
# use this for running scrapy directly
PROJECT_ROOT = os.path.abspath(os.path.dirname(__file__))
FILES_STORE = os.path.join(PROJECT_ROOT, 'datafiles')
|
<commit_before># Local dev settings for scrapy
# This is not the same
# Local config for testing
import os
# use this for running scrapy directly
# PROJECT_ROOT = os.path.abspath(os.path.dirname(__file__))
# FILES_STORE = os.path.join(PROJECT_ROOT, 'datafiles')
# Use this for deploying to scrapyd, as it would be in stage/production
FILES_STORE = '/var/lib/scrapyd/files'<commit_msg>Fix issue with scrapy local settings<commit_after>
|
# Local dev settings for scrapy
# This is not the same
# Local config for testing
import os
# use this for running scrapy directly
PROJECT_ROOT = os.path.abspath(os.path.dirname(__file__))
FILES_STORE = os.path.join(PROJECT_ROOT, 'datafiles')
|
# Local dev settings for scrapy
# This is not the same
# Local config for testing
import os
# use this for running scrapy directly
# PROJECT_ROOT = os.path.abspath(os.path.dirname(__file__))
# FILES_STORE = os.path.join(PROJECT_ROOT, 'datafiles')
# Use this for deploying to scrapyd, as it would be in stage/production
FILES_STORE = '/var/lib/scrapyd/files'Fix issue with scrapy local settings# Local dev settings for scrapy
# This is not the same
# Local config for testing
import os
# use this for running scrapy directly
PROJECT_ROOT = os.path.abspath(os.path.dirname(__file__))
FILES_STORE = os.path.join(PROJECT_ROOT, 'datafiles')
|
<commit_before># Local dev settings for scrapy
# This is not the same
# Local config for testing
import os
# use this for running scrapy directly
# PROJECT_ROOT = os.path.abspath(os.path.dirname(__file__))
# FILES_STORE = os.path.join(PROJECT_ROOT, 'datafiles')
# Use this for deploying to scrapyd, as it would be in stage/production
FILES_STORE = '/var/lib/scrapyd/files'<commit_msg>Fix issue with scrapy local settings<commit_after># Local dev settings for scrapy
# This is not the same
# Local config for testing
import os
# use this for running scrapy directly
PROJECT_ROOT = os.path.abspath(os.path.dirname(__file__))
FILES_STORE = os.path.join(PROJECT_ROOT, 'datafiles')
|
b1504dac6d33b4f0774cabceeb219653b9b6201f
|
ui.py
|
ui.py
|
from terminaltables import SingleTable
def render(object, **kw):
if object == 'gallows':
render_gallows(**kw)
if object == 'bank':
render_bank(**kw)
if object == 'game_state':
render_game_state(**kw)
def render_gallows(parts=0, **kw):
print("""
______
| |
O |
| |
| |
/ |
|
---------
""")
def render_bank(letters=[], **kw):
sz = 6 # Size of table
if not any(letters):
let = [' ']
else:
let = sorted(list(letters))
table = SingleTable([let[i:i + sz] for i in range(0, len(let), sz)],
'Incorrect Guesses')
table.inner_heading_row_border = False
table.inner_row_border = True
table.justify_columns = {idx: val for idx, val in
enumerate(['center'] * sz)}
print()
print(table.table)
def render_game_state(word="", found=[], **kw):
for letter in word:
if letter in found:
print(letter, end='')
else:
print(' _ ', end='')
|
from terminaltables import SingleTable
def render(object, **kw):
if object == 'gallows':
render_gallows(**kw)
if object == 'bank':
render_bank(**kw)
if object == 'game_state':
render_game_state(**kw)
def render_gallows(parts=0, **kw):
print("""
______
| |
O |
| |
| |
/ |
|
---------
""")
def render_bank(letters=[], **kw):
sz = 6 # Size of table
if not any(letters):
let = [' ']
else:
let = sorted(list(letters))
table = SingleTable([let[i:i + sz] for i in range(0, len(let), sz)],
'Incorrect Guesses')
table.inner_heading_row_border = False
table.inner_row_border = True
table.justify_columns = {idx: val for idx, val in
enumerate(['center'] * sz)}
print("\n{}".format(table.table))
def render_game_state(word="", found=[], **kw):
for letter in word:
if letter in found:
print(letter, end='')
else:
print(' _ ', end='')
|
Change the way we get a clean, blank line before rendering letter bank
|
Change the way we get a clean, blank line before rendering letter bank
|
Python
|
mit
|
tml/python-hangman-2017-summer
|
from terminaltables import SingleTable
def render(object, **kw):
if object == 'gallows':
render_gallows(**kw)
if object == 'bank':
render_bank(**kw)
if object == 'game_state':
render_game_state(**kw)
def render_gallows(parts=0, **kw):
print("""
______
| |
O |
| |
| |
/ |
|
---------
""")
def render_bank(letters=[], **kw):
sz = 6 # Size of table
if not any(letters):
let = [' ']
else:
let = sorted(list(letters))
table = SingleTable([let[i:i + sz] for i in range(0, len(let), sz)],
'Incorrect Guesses')
table.inner_heading_row_border = False
table.inner_row_border = True
table.justify_columns = {idx: val for idx, val in
enumerate(['center'] * sz)}
print()
print(table.table)
def render_game_state(word="", found=[], **kw):
for letter in word:
if letter in found:
print(letter, end='')
else:
print(' _ ', end='')
Change the way we get a clean, blank line before rendering letter bank
|
from terminaltables import SingleTable
def render(object, **kw):
if object == 'gallows':
render_gallows(**kw)
if object == 'bank':
render_bank(**kw)
if object == 'game_state':
render_game_state(**kw)
def render_gallows(parts=0, **kw):
print("""
______
| |
O |
| |
| |
/ |
|
---------
""")
def render_bank(letters=[], **kw):
sz = 6 # Size of table
if not any(letters):
let = [' ']
else:
let = sorted(list(letters))
table = SingleTable([let[i:i + sz] for i in range(0, len(let), sz)],
'Incorrect Guesses')
table.inner_heading_row_border = False
table.inner_row_border = True
table.justify_columns = {idx: val for idx, val in
enumerate(['center'] * sz)}
print("\n{}".format(table.table))
def render_game_state(word="", found=[], **kw):
for letter in word:
if letter in found:
print(letter, end='')
else:
print(' _ ', end='')
|
<commit_before>from terminaltables import SingleTable
def render(object, **kw):
if object == 'gallows':
render_gallows(**kw)
if object == 'bank':
render_bank(**kw)
if object == 'game_state':
render_game_state(**kw)
def render_gallows(parts=0, **kw):
print("""
______
| |
O |
| |
| |
/ |
|
---------
""")
def render_bank(letters=[], **kw):
sz = 6 # Size of table
if not any(letters):
let = [' ']
else:
let = sorted(list(letters))
table = SingleTable([let[i:i + sz] for i in range(0, len(let), sz)],
'Incorrect Guesses')
table.inner_heading_row_border = False
table.inner_row_border = True
table.justify_columns = {idx: val for idx, val in
enumerate(['center'] * sz)}
print()
print(table.table)
def render_game_state(word="", found=[], **kw):
for letter in word:
if letter in found:
print(letter, end='')
else:
print(' _ ', end='')
<commit_msg>Change the way we get a clean, blank line before rendering letter bank<commit_after>
|
from terminaltables import SingleTable
def render(object, **kw):
if object == 'gallows':
render_gallows(**kw)
if object == 'bank':
render_bank(**kw)
if object == 'game_state':
render_game_state(**kw)
def render_gallows(parts=0, **kw):
print("""
______
| |
O |
| |
| |
/ |
|
---------
""")
def render_bank(letters=[], **kw):
sz = 6 # Size of table
if not any(letters):
let = [' ']
else:
let = sorted(list(letters))
table = SingleTable([let[i:i + sz] for i in range(0, len(let), sz)],
'Incorrect Guesses')
table.inner_heading_row_border = False
table.inner_row_border = True
table.justify_columns = {idx: val for idx, val in
enumerate(['center'] * sz)}
print("\n{}".format(table.table))
def render_game_state(word="", found=[], **kw):
for letter in word:
if letter in found:
print(letter, end='')
else:
print(' _ ', end='')
|
from terminaltables import SingleTable
def render(object, **kw):
if object == 'gallows':
render_gallows(**kw)
if object == 'bank':
render_bank(**kw)
if object == 'game_state':
render_game_state(**kw)
def render_gallows(parts=0, **kw):
print("""
______
| |
O |
| |
| |
/ |
|
---------
""")
def render_bank(letters=[], **kw):
sz = 6 # Size of table
if not any(letters):
let = [' ']
else:
let = sorted(list(letters))
table = SingleTable([let[i:i + sz] for i in range(0, len(let), sz)],
'Incorrect Guesses')
table.inner_heading_row_border = False
table.inner_row_border = True
table.justify_columns = {idx: val for idx, val in
enumerate(['center'] * sz)}
print()
print(table.table)
def render_game_state(word="", found=[], **kw):
for letter in word:
if letter in found:
print(letter, end='')
else:
print(' _ ', end='')
Change the way we get a clean, blank line before rendering letter bankfrom terminaltables import SingleTable
def render(object, **kw):
if object == 'gallows':
render_gallows(**kw)
if object == 'bank':
render_bank(**kw)
if object == 'game_state':
render_game_state(**kw)
def render_gallows(parts=0, **kw):
print("""
______
| |
O |
| |
| |
/ |
|
---------
""")
def render_bank(letters=[], **kw):
sz = 6 # Size of table
if not any(letters):
let = [' ']
else:
let = sorted(list(letters))
table = SingleTable([let[i:i + sz] for i in range(0, len(let), sz)],
'Incorrect Guesses')
table.inner_heading_row_border = False
table.inner_row_border = True
table.justify_columns = {idx: val for idx, val in
enumerate(['center'] * sz)}
print("\n{}".format(table.table))
def render_game_state(word="", found=[], **kw):
for letter in word:
if letter in found:
print(letter, end='')
else:
print(' _ ', end='')
|
<commit_before>from terminaltables import SingleTable
def render(object, **kw):
if object == 'gallows':
render_gallows(**kw)
if object == 'bank':
render_bank(**kw)
if object == 'game_state':
render_game_state(**kw)
def render_gallows(parts=0, **kw):
print("""
______
| |
O |
| |
| |
/ |
|
---------
""")
def render_bank(letters=[], **kw):
sz = 6 # Size of table
if not any(letters):
let = [' ']
else:
let = sorted(list(letters))
table = SingleTable([let[i:i + sz] for i in range(0, len(let), sz)],
'Incorrect Guesses')
table.inner_heading_row_border = False
table.inner_row_border = True
table.justify_columns = {idx: val for idx, val in
enumerate(['center'] * sz)}
print()
print(table.table)
def render_game_state(word="", found=[], **kw):
for letter in word:
if letter in found:
print(letter, end='')
else:
print(' _ ', end='')
<commit_msg>Change the way we get a clean, blank line before rendering letter bank<commit_after>from terminaltables import SingleTable
def render(object, **kw):
if object == 'gallows':
render_gallows(**kw)
if object == 'bank':
render_bank(**kw)
if object == 'game_state':
render_game_state(**kw)
def render_gallows(parts=0, **kw):
print("""
______
| |
O |
| |
| |
/ |
|
---------
""")
def render_bank(letters=[], **kw):
sz = 6 # Size of table
if not any(letters):
let = [' ']
else:
let = sorted(list(letters))
table = SingleTable([let[i:i + sz] for i in range(0, len(let), sz)],
'Incorrect Guesses')
table.inner_heading_row_border = False
table.inner_row_border = True
table.justify_columns = {idx: val for idx, val in
enumerate(['center'] * sz)}
print("\n{}".format(table.table))
def render_game_state(word="", found=[], **kw):
for letter in word:
if letter in found:
print(letter, end='')
else:
print(' _ ', end='')
|
cf2af4a006d2545bbe0ec9fc92d087d8ff6805f1
|
cah.py
|
cah.py
|
STA_F= "/home/ormiret/cah/statements.txt"
ANS_F= "/home/ormiret/cah/answers.txt"
import random
def rand_line(filename):
with open(filename) as f:
lines = f.readlines()
return random.choice(lines).strip()
def statement():
return rand_line(STA_F)
def answer():
return rand_line(ANS_F)
def fill_statement():
statement = rand_line(STA_F)
if not "<blank>" in statement:
return statement + " " + rand_line(ANS_F)
while "<blank>" in statement:
statement = statement.replace("<blank>", rand_line(ANS_F), 1)
return statement
if __name__=="__main__":
print fill_statement()
|
STA_F= "statements.txt"
ANS_F= "answers.txt"
import random
def rand_line(filename):
with open(filename) as f:
lines = f.readlines()
return random.choice(lines).strip()
def statement():
return rand_line(STA_F)
def answer():
return rand_line(ANS_F)
def fill_statement():
statement = rand_line(STA_F)
if not "<blank>" in statement:
return statement + " " + rand_line(ANS_F)
while "<blank>" in statement:
statement = statement.replace("<blank>", rand_line(ANS_F), 1)
return statement
if __name__=="__main__":
print fill_statement()
|
Fix path to statements and answers files.
|
Fix path to statements and answers files.
|
Python
|
mit
|
ormiret/cards-against-hackspace,ormiret/cards-against-hackspace
|
STA_F= "/home/ormiret/cah/statements.txt"
ANS_F= "/home/ormiret/cah/answers.txt"
import random
def rand_line(filename):
with open(filename) as f:
lines = f.readlines()
return random.choice(lines).strip()
def statement():
return rand_line(STA_F)
def answer():
return rand_line(ANS_F)
def fill_statement():
statement = rand_line(STA_F)
if not "<blank>" in statement:
return statement + " " + rand_line(ANS_F)
while "<blank>" in statement:
statement = statement.replace("<blank>", rand_line(ANS_F), 1)
return statement
if __name__=="__main__":
print fill_statement()
Fix path to statements and answers files.
|
STA_F= "statements.txt"
ANS_F= "answers.txt"
import random
def rand_line(filename):
with open(filename) as f:
lines = f.readlines()
return random.choice(lines).strip()
def statement():
return rand_line(STA_F)
def answer():
return rand_line(ANS_F)
def fill_statement():
statement = rand_line(STA_F)
if not "<blank>" in statement:
return statement + " " + rand_line(ANS_F)
while "<blank>" in statement:
statement = statement.replace("<blank>", rand_line(ANS_F), 1)
return statement
if __name__=="__main__":
print fill_statement()
|
<commit_before>STA_F= "/home/ormiret/cah/statements.txt"
ANS_F= "/home/ormiret/cah/answers.txt"
import random
def rand_line(filename):
with open(filename) as f:
lines = f.readlines()
return random.choice(lines).strip()
def statement():
return rand_line(STA_F)
def answer():
return rand_line(ANS_F)
def fill_statement():
statement = rand_line(STA_F)
if not "<blank>" in statement:
return statement + " " + rand_line(ANS_F)
while "<blank>" in statement:
statement = statement.replace("<blank>", rand_line(ANS_F), 1)
return statement
if __name__=="__main__":
print fill_statement()
<commit_msg>Fix path to statements and answers files.<commit_after>
|
STA_F= "statements.txt"
ANS_F= "answers.txt"
import random
def rand_line(filename):
with open(filename) as f:
lines = f.readlines()
return random.choice(lines).strip()
def statement():
return rand_line(STA_F)
def answer():
return rand_line(ANS_F)
def fill_statement():
statement = rand_line(STA_F)
if not "<blank>" in statement:
return statement + " " + rand_line(ANS_F)
while "<blank>" in statement:
statement = statement.replace("<blank>", rand_line(ANS_F), 1)
return statement
if __name__=="__main__":
print fill_statement()
|
STA_F= "/home/ormiret/cah/statements.txt"
ANS_F= "/home/ormiret/cah/answers.txt"
import random
def rand_line(filename):
with open(filename) as f:
lines = f.readlines()
return random.choice(lines).strip()
def statement():
return rand_line(STA_F)
def answer():
return rand_line(ANS_F)
def fill_statement():
statement = rand_line(STA_F)
if not "<blank>" in statement:
return statement + " " + rand_line(ANS_F)
while "<blank>" in statement:
statement = statement.replace("<blank>", rand_line(ANS_F), 1)
return statement
if __name__=="__main__":
print fill_statement()
Fix path to statements and answers files.STA_F= "statements.txt"
ANS_F= "answers.txt"
import random
def rand_line(filename):
with open(filename) as f:
lines = f.readlines()
return random.choice(lines).strip()
def statement():
return rand_line(STA_F)
def answer():
return rand_line(ANS_F)
def fill_statement():
statement = rand_line(STA_F)
if not "<blank>" in statement:
return statement + " " + rand_line(ANS_F)
while "<blank>" in statement:
statement = statement.replace("<blank>", rand_line(ANS_F), 1)
return statement
if __name__=="__main__":
print fill_statement()
|
<commit_before>STA_F= "/home/ormiret/cah/statements.txt"
ANS_F= "/home/ormiret/cah/answers.txt"
import random
def rand_line(filename):
with open(filename) as f:
lines = f.readlines()
return random.choice(lines).strip()
def statement():
return rand_line(STA_F)
def answer():
return rand_line(ANS_F)
def fill_statement():
statement = rand_line(STA_F)
if not "<blank>" in statement:
return statement + " " + rand_line(ANS_F)
while "<blank>" in statement:
statement = statement.replace("<blank>", rand_line(ANS_F), 1)
return statement
if __name__=="__main__":
print fill_statement()
<commit_msg>Fix path to statements and answers files.<commit_after>STA_F= "statements.txt"
ANS_F= "answers.txt"
import random
def rand_line(filename):
with open(filename) as f:
lines = f.readlines()
return random.choice(lines).strip()
def statement():
return rand_line(STA_F)
def answer():
return rand_line(ANS_F)
def fill_statement():
statement = rand_line(STA_F)
if not "<blank>" in statement:
return statement + " " + rand_line(ANS_F)
while "<blank>" in statement:
statement = statement.replace("<blank>", rand_line(ANS_F), 1)
return statement
if __name__=="__main__":
print fill_statement()
|
7dfd89b22c66eb4cfc38218b9430adc38e8ad073
|
oonib/__init__.py
|
oonib/__init__.py
|
"""
In here we shall keep track of all variables and objects that should be
instantiated only once and be common to pieces of GLBackend code.
"""
__version__ = '1.0.0'
__all__ = ['Storage', 'randomStr']
import string
import random
class Storage(dict):
"""
A Storage object is like a dictionary except `obj.foo` can be used
in addition to `obj['foo']`.
>>> o = Storage(a=1)
>>> o.a
1
>>> o['a']
1
>>> o.a = 2
>>> o['a']
2
>>> del o.a
>>> o.a
"""
def __getattr__(self, key):
return self.get(key)
def __setattr__(self, key, value):
self[key] = value
def __delattr__(self, key):
try:
del self[key]
except KeyError, k:
raise AttributeError(k)
def __repr__(self):
return '<Storage ' + dict.__repr__(self) + '>'
def __getstate__(self):
return dict(self)
def __setstate__(self, value):
self.update(value.items())
def randomStr(length, num=True):
"""
Returns a random a mixed lowercase, uppercase, alfanumerical (if num True)
string long length
"""
chars = string.ascii_lowercase + string.ascii_uppercase
if num:
chars += string.digits
return ''.join(random.choice(chars) for x in range(length))
|
"""
In here we shall keep track of all variables and objects that should be
instantiated only once and be common to pieces of GLBackend code.
"""
__version__ = '1.0.0'
__all__ = ['Storage', 'randomStr']
import string
from random import SystemRandom
random = SystemRandom()
class Storage(dict):
"""
A Storage object is like a dictionary except `obj.foo` can be used
in addition to `obj['foo']`.
>>> o = Storage(a=1)
>>> o.a
1
>>> o['a']
1
>>> o.a = 2
>>> o['a']
2
>>> del o.a
>>> o.a
"""
def __getattr__(self, key):
return self.get(key)
def __setattr__(self, key, value):
self[key] = value
def __delattr__(self, key):
try:
del self[key]
except KeyError, k:
raise AttributeError(k)
def __repr__(self):
return '<Storage ' + dict.__repr__(self) + '>'
def __getstate__(self):
return dict(self)
def __setstate__(self, value):
self.update(value.items())
def randomStr(length, num=True):
"""
Returns a random a mixed lowercase, uppercase, alfanumerical (if num True)
string long length
"""
chars = string.ascii_lowercase + string.ascii_uppercase
if num:
chars += string.digits
return ''.join(random.choice(chars) for x in range(length))
|
Use SystemRandom instead of insecure RNG
|
Use SystemRandom instead of insecure RNG
|
Python
|
bsd-2-clause
|
dstufft/ooni-backend,dstufft/ooni-backend
|
"""
In here we shall keep track of all variables and objects that should be
instantiated only once and be common to pieces of GLBackend code.
"""
__version__ = '1.0.0'
__all__ = ['Storage', 'randomStr']
import string
import random
class Storage(dict):
"""
A Storage object is like a dictionary except `obj.foo` can be used
in addition to `obj['foo']`.
>>> o = Storage(a=1)
>>> o.a
1
>>> o['a']
1
>>> o.a = 2
>>> o['a']
2
>>> del o.a
>>> o.a
"""
def __getattr__(self, key):
return self.get(key)
def __setattr__(self, key, value):
self[key] = value
def __delattr__(self, key):
try:
del self[key]
except KeyError, k:
raise AttributeError(k)
def __repr__(self):
return '<Storage ' + dict.__repr__(self) + '>'
def __getstate__(self):
return dict(self)
def __setstate__(self, value):
self.update(value.items())
def randomStr(length, num=True):
"""
Returns a random a mixed lowercase, uppercase, alfanumerical (if num True)
string long length
"""
chars = string.ascii_lowercase + string.ascii_uppercase
if num:
chars += string.digits
return ''.join(random.choice(chars) for x in range(length))
Use SystemRandom instead of insecure RNG
|
"""
In here we shall keep track of all variables and objects that should be
instantiated only once and be common to pieces of GLBackend code.
"""
__version__ = '1.0.0'
__all__ = ['Storage', 'randomStr']
import string
from random import SystemRandom
random = SystemRandom()
class Storage(dict):
"""
A Storage object is like a dictionary except `obj.foo` can be used
in addition to `obj['foo']`.
>>> o = Storage(a=1)
>>> o.a
1
>>> o['a']
1
>>> o.a = 2
>>> o['a']
2
>>> del o.a
>>> o.a
"""
def __getattr__(self, key):
return self.get(key)
def __setattr__(self, key, value):
self[key] = value
def __delattr__(self, key):
try:
del self[key]
except KeyError, k:
raise AttributeError(k)
def __repr__(self):
return '<Storage ' + dict.__repr__(self) + '>'
def __getstate__(self):
return dict(self)
def __setstate__(self, value):
self.update(value.items())
def randomStr(length, num=True):
"""
Returns a random a mixed lowercase, uppercase, alfanumerical (if num True)
string long length
"""
chars = string.ascii_lowercase + string.ascii_uppercase
if num:
chars += string.digits
return ''.join(random.choice(chars) for x in range(length))
|
<commit_before>"""
In here we shall keep track of all variables and objects that should be
instantiated only once and be common to pieces of GLBackend code.
"""
__version__ = '1.0.0'
__all__ = ['Storage', 'randomStr']
import string
import random
class Storage(dict):
"""
A Storage object is like a dictionary except `obj.foo` can be used
in addition to `obj['foo']`.
>>> o = Storage(a=1)
>>> o.a
1
>>> o['a']
1
>>> o.a = 2
>>> o['a']
2
>>> del o.a
>>> o.a
"""
def __getattr__(self, key):
return self.get(key)
def __setattr__(self, key, value):
self[key] = value
def __delattr__(self, key):
try:
del self[key]
except KeyError, k:
raise AttributeError(k)
def __repr__(self):
return '<Storage ' + dict.__repr__(self) + '>'
def __getstate__(self):
return dict(self)
def __setstate__(self, value):
self.update(value.items())
def randomStr(length, num=True):
"""
Returns a random a mixed lowercase, uppercase, alfanumerical (if num True)
string long length
"""
chars = string.ascii_lowercase + string.ascii_uppercase
if num:
chars += string.digits
return ''.join(random.choice(chars) for x in range(length))
<commit_msg>Use SystemRandom instead of insecure RNG<commit_after>
|
"""
In here we shall keep track of all variables and objects that should be
instantiated only once and be common to pieces of GLBackend code.
"""
__version__ = '1.0.0'
__all__ = ['Storage', 'randomStr']
import string
from random import SystemRandom
random = SystemRandom()
class Storage(dict):
"""
A Storage object is like a dictionary except `obj.foo` can be used
in addition to `obj['foo']`.
>>> o = Storage(a=1)
>>> o.a
1
>>> o['a']
1
>>> o.a = 2
>>> o['a']
2
>>> del o.a
>>> o.a
"""
def __getattr__(self, key):
return self.get(key)
def __setattr__(self, key, value):
self[key] = value
def __delattr__(self, key):
try:
del self[key]
except KeyError, k:
raise AttributeError(k)
def __repr__(self):
return '<Storage ' + dict.__repr__(self) + '>'
def __getstate__(self):
return dict(self)
def __setstate__(self, value):
self.update(value.items())
def randomStr(length, num=True):
"""
Returns a random a mixed lowercase, uppercase, alfanumerical (if num True)
string long length
"""
chars = string.ascii_lowercase + string.ascii_uppercase
if num:
chars += string.digits
return ''.join(random.choice(chars) for x in range(length))
|
"""
In here we shall keep track of all variables and objects that should be
instantiated only once and be common to pieces of GLBackend code.
"""
__version__ = '1.0.0'
__all__ = ['Storage', 'randomStr']
import string
import random
class Storage(dict):
"""
A Storage object is like a dictionary except `obj.foo` can be used
in addition to `obj['foo']`.
>>> o = Storage(a=1)
>>> o.a
1
>>> o['a']
1
>>> o.a = 2
>>> o['a']
2
>>> del o.a
>>> o.a
"""
def __getattr__(self, key):
return self.get(key)
def __setattr__(self, key, value):
self[key] = value
def __delattr__(self, key):
try:
del self[key]
except KeyError, k:
raise AttributeError(k)
def __repr__(self):
return '<Storage ' + dict.__repr__(self) + '>'
def __getstate__(self):
return dict(self)
def __setstate__(self, value):
self.update(value.items())
def randomStr(length, num=True):
"""
Returns a random a mixed lowercase, uppercase, alfanumerical (if num True)
string long length
"""
chars = string.ascii_lowercase + string.ascii_uppercase
if num:
chars += string.digits
return ''.join(random.choice(chars) for x in range(length))
Use SystemRandom instead of insecure RNG"""
In here we shall keep track of all variables and objects that should be
instantiated only once and be common to pieces of GLBackend code.
"""
__version__ = '1.0.0'
__all__ = ['Storage', 'randomStr']
import string
from random import SystemRandom
random = SystemRandom()
class Storage(dict):
"""
A Storage object is like a dictionary except `obj.foo` can be used
in addition to `obj['foo']`.
>>> o = Storage(a=1)
>>> o.a
1
>>> o['a']
1
>>> o.a = 2
>>> o['a']
2
>>> del o.a
>>> o.a
"""
def __getattr__(self, key):
return self.get(key)
def __setattr__(self, key, value):
self[key] = value
def __delattr__(self, key):
try:
del self[key]
except KeyError, k:
raise AttributeError(k)
def __repr__(self):
return '<Storage ' + dict.__repr__(self) + '>'
def __getstate__(self):
return dict(self)
def __setstate__(self, value):
self.update(value.items())
def randomStr(length, num=True):
"""
Returns a random a mixed lowercase, uppercase, alfanumerical (if num True)
string long length
"""
chars = string.ascii_lowercase + string.ascii_uppercase
if num:
chars += string.digits
return ''.join(random.choice(chars) for x in range(length))
|
<commit_before>"""
In here we shall keep track of all variables and objects that should be
instantiated only once and be common to pieces of GLBackend code.
"""
__version__ = '1.0.0'
__all__ = ['Storage', 'randomStr']
import string
import random
class Storage(dict):
"""
A Storage object is like a dictionary except `obj.foo` can be used
in addition to `obj['foo']`.
>>> o = Storage(a=1)
>>> o.a
1
>>> o['a']
1
>>> o.a = 2
>>> o['a']
2
>>> del o.a
>>> o.a
"""
def __getattr__(self, key):
return self.get(key)
def __setattr__(self, key, value):
self[key] = value
def __delattr__(self, key):
try:
del self[key]
except KeyError, k:
raise AttributeError(k)
def __repr__(self):
return '<Storage ' + dict.__repr__(self) + '>'
def __getstate__(self):
return dict(self)
def __setstate__(self, value):
self.update(value.items())
def randomStr(length, num=True):
"""
Returns a random a mixed lowercase, uppercase, alfanumerical (if num True)
string long length
"""
chars = string.ascii_lowercase + string.ascii_uppercase
if num:
chars += string.digits
return ''.join(random.choice(chars) for x in range(length))
<commit_msg>Use SystemRandom instead of insecure RNG<commit_after>"""
In here we shall keep track of all variables and objects that should be
instantiated only once and be common to pieces of GLBackend code.
"""
__version__ = '1.0.0'
__all__ = ['Storage', 'randomStr']
import string
from random import SystemRandom
random = SystemRandom()
class Storage(dict):
"""
A Storage object is like a dictionary except `obj.foo` can be used
in addition to `obj['foo']`.
>>> o = Storage(a=1)
>>> o.a
1
>>> o['a']
1
>>> o.a = 2
>>> o['a']
2
>>> del o.a
>>> o.a
"""
def __getattr__(self, key):
return self.get(key)
def __setattr__(self, key, value):
self[key] = value
def __delattr__(self, key):
try:
del self[key]
except KeyError, k:
raise AttributeError(k)
def __repr__(self):
return '<Storage ' + dict.__repr__(self) + '>'
def __getstate__(self):
return dict(self)
def __setstate__(self, value):
self.update(value.items())
def randomStr(length, num=True):
"""
Returns a random a mixed lowercase, uppercase, alfanumerical (if num True)
string long length
"""
chars = string.ascii_lowercase + string.ascii_uppercase
if num:
chars += string.digits
return ''.join(random.choice(chars) for x in range(length))
|
09c4cacfd5aeb5740c2c741a74043938fd0d1b0f
|
tests/test_reporters.py
|
tests/test_reporters.py
|
import pytest
from seqeval.reporters import DictReporter
@pytest.mark.parametrize(
'rows, expected',
[
([], {}),
(
[['PERSON', 0.82, 0.79, 0.81, 24]],
{
'PERSON': {
'precision': 0.82,
'recall': 0.79,
'f1-score': 0.81,
'support': 24
}
}
)
]
)
def test_dict_reporter_output(rows, expected):
reporter = DictReporter()
for row in rows:
reporter.write(*row)
assert reporter.report() == expected
|
import pytest
from seqeval.reporters import DictReporter, StringReporter
class TestDictReporter:
def test_write_empty(self):
reporter = DictReporter()
reporter.write_blank()
assert reporter.report_dict == {}
@pytest.mark.parametrize(
'rows, expected',
[
([], {}),
(
[['PERSON', 0.82, 0.79, 0.81, 24]],
{
'PERSON': {
'precision': 0.82,
'recall': 0.79,
'f1-score': 0.81,
'support': 24
}
}
)
]
)
def test_dict_reporter_output(self, rows, expected):
reporter = DictReporter()
for row in rows:
reporter.write(*row)
assert reporter.report() == expected
class TestStringReporter:
def test_write_empty(self):
reporter = StringReporter()
reporter.write_blank()
assert reporter.buffer == ['']
def test_write_header(self):
reporter = StringReporter()
report = reporter.write_header()
assert 'precision' in report
assert 'recall' in report
assert 'f1-score' in report
assert 'support' in report
def test_write(self):
reporter = StringReporter()
reporter.write('XXX', 0, 0, 0, 0)
assert 'XXX' in reporter.buffer[0]
def test_report(self):
reporter = StringReporter()
reporter.write('XXX', 0, 0, 0, 0)
report = reporter.report()
assert 'XXX' in report
assert 'precision' in report
|
Add test cases for reporters.py
|
Add test cases for reporters.py
|
Python
|
mit
|
chakki-works/seqeval,chakki-works/seqeval
|
import pytest
from seqeval.reporters import DictReporter
@pytest.mark.parametrize(
'rows, expected',
[
([], {}),
(
[['PERSON', 0.82, 0.79, 0.81, 24]],
{
'PERSON': {
'precision': 0.82,
'recall': 0.79,
'f1-score': 0.81,
'support': 24
}
}
)
]
)
def test_dict_reporter_output(rows, expected):
reporter = DictReporter()
for row in rows:
reporter.write(*row)
assert reporter.report() == expected
Add test cases for reporters.py
|
import pytest
from seqeval.reporters import DictReporter, StringReporter
class TestDictReporter:
def test_write_empty(self):
reporter = DictReporter()
reporter.write_blank()
assert reporter.report_dict == {}
@pytest.mark.parametrize(
'rows, expected',
[
([], {}),
(
[['PERSON', 0.82, 0.79, 0.81, 24]],
{
'PERSON': {
'precision': 0.82,
'recall': 0.79,
'f1-score': 0.81,
'support': 24
}
}
)
]
)
def test_dict_reporter_output(self, rows, expected):
reporter = DictReporter()
for row in rows:
reporter.write(*row)
assert reporter.report() == expected
class TestStringReporter:
def test_write_empty(self):
reporter = StringReporter()
reporter.write_blank()
assert reporter.buffer == ['']
def test_write_header(self):
reporter = StringReporter()
report = reporter.write_header()
assert 'precision' in report
assert 'recall' in report
assert 'f1-score' in report
assert 'support' in report
def test_write(self):
reporter = StringReporter()
reporter.write('XXX', 0, 0, 0, 0)
assert 'XXX' in reporter.buffer[0]
def test_report(self):
reporter = StringReporter()
reporter.write('XXX', 0, 0, 0, 0)
report = reporter.report()
assert 'XXX' in report
assert 'precision' in report
|
<commit_before>import pytest
from seqeval.reporters import DictReporter
@pytest.mark.parametrize(
'rows, expected',
[
([], {}),
(
[['PERSON', 0.82, 0.79, 0.81, 24]],
{
'PERSON': {
'precision': 0.82,
'recall': 0.79,
'f1-score': 0.81,
'support': 24
}
}
)
]
)
def test_dict_reporter_output(rows, expected):
reporter = DictReporter()
for row in rows:
reporter.write(*row)
assert reporter.report() == expected
<commit_msg>Add test cases for reporters.py<commit_after>
|
import pytest
from seqeval.reporters import DictReporter, StringReporter
class TestDictReporter:
def test_write_empty(self):
reporter = DictReporter()
reporter.write_blank()
assert reporter.report_dict == {}
@pytest.mark.parametrize(
'rows, expected',
[
([], {}),
(
[['PERSON', 0.82, 0.79, 0.81, 24]],
{
'PERSON': {
'precision': 0.82,
'recall': 0.79,
'f1-score': 0.81,
'support': 24
}
}
)
]
)
def test_dict_reporter_output(self, rows, expected):
reporter = DictReporter()
for row in rows:
reporter.write(*row)
assert reporter.report() == expected
class TestStringReporter:
def test_write_empty(self):
reporter = StringReporter()
reporter.write_blank()
assert reporter.buffer == ['']
def test_write_header(self):
reporter = StringReporter()
report = reporter.write_header()
assert 'precision' in report
assert 'recall' in report
assert 'f1-score' in report
assert 'support' in report
def test_write(self):
reporter = StringReporter()
reporter.write('XXX', 0, 0, 0, 0)
assert 'XXX' in reporter.buffer[0]
def test_report(self):
reporter = StringReporter()
reporter.write('XXX', 0, 0, 0, 0)
report = reporter.report()
assert 'XXX' in report
assert 'precision' in report
|
import pytest
from seqeval.reporters import DictReporter
@pytest.mark.parametrize(
'rows, expected',
[
([], {}),
(
[['PERSON', 0.82, 0.79, 0.81, 24]],
{
'PERSON': {
'precision': 0.82,
'recall': 0.79,
'f1-score': 0.81,
'support': 24
}
}
)
]
)
def test_dict_reporter_output(rows, expected):
reporter = DictReporter()
for row in rows:
reporter.write(*row)
assert reporter.report() == expected
Add test cases for reporters.pyimport pytest
from seqeval.reporters import DictReporter, StringReporter
class TestDictReporter:
def test_write_empty(self):
reporter = DictReporter()
reporter.write_blank()
assert reporter.report_dict == {}
@pytest.mark.parametrize(
'rows, expected',
[
([], {}),
(
[['PERSON', 0.82, 0.79, 0.81, 24]],
{
'PERSON': {
'precision': 0.82,
'recall': 0.79,
'f1-score': 0.81,
'support': 24
}
}
)
]
)
def test_dict_reporter_output(self, rows, expected):
reporter = DictReporter()
for row in rows:
reporter.write(*row)
assert reporter.report() == expected
class TestStringReporter:
def test_write_empty(self):
reporter = StringReporter()
reporter.write_blank()
assert reporter.buffer == ['']
def test_write_header(self):
reporter = StringReporter()
report = reporter.write_header()
assert 'precision' in report
assert 'recall' in report
assert 'f1-score' in report
assert 'support' in report
def test_write(self):
reporter = StringReporter()
reporter.write('XXX', 0, 0, 0, 0)
assert 'XXX' in reporter.buffer[0]
def test_report(self):
reporter = StringReporter()
reporter.write('XXX', 0, 0, 0, 0)
report = reporter.report()
assert 'XXX' in report
assert 'precision' in report
|
<commit_before>import pytest
from seqeval.reporters import DictReporter
@pytest.mark.parametrize(
'rows, expected',
[
([], {}),
(
[['PERSON', 0.82, 0.79, 0.81, 24]],
{
'PERSON': {
'precision': 0.82,
'recall': 0.79,
'f1-score': 0.81,
'support': 24
}
}
)
]
)
def test_dict_reporter_output(rows, expected):
reporter = DictReporter()
for row in rows:
reporter.write(*row)
assert reporter.report() == expected
<commit_msg>Add test cases for reporters.py<commit_after>import pytest
from seqeval.reporters import DictReporter, StringReporter
class TestDictReporter:
def test_write_empty(self):
reporter = DictReporter()
reporter.write_blank()
assert reporter.report_dict == {}
@pytest.mark.parametrize(
'rows, expected',
[
([], {}),
(
[['PERSON', 0.82, 0.79, 0.81, 24]],
{
'PERSON': {
'precision': 0.82,
'recall': 0.79,
'f1-score': 0.81,
'support': 24
}
}
)
]
)
def test_dict_reporter_output(self, rows, expected):
reporter = DictReporter()
for row in rows:
reporter.write(*row)
assert reporter.report() == expected
class TestStringReporter:
def test_write_empty(self):
reporter = StringReporter()
reporter.write_blank()
assert reporter.buffer == ['']
def test_write_header(self):
reporter = StringReporter()
report = reporter.write_header()
assert 'precision' in report
assert 'recall' in report
assert 'f1-score' in report
assert 'support' in report
def test_write(self):
reporter = StringReporter()
reporter.write('XXX', 0, 0, 0, 0)
assert 'XXX' in reporter.buffer[0]
def test_report(self):
reporter = StringReporter()
reporter.write('XXX', 0, 0, 0, 0)
report = reporter.report()
assert 'XXX' in report
assert 'precision' in report
|
43c7be216422e66a6789ea115995373cfb017c65
|
daemail/__init__.py
|
daemail/__init__.py
|
from __future__ import unicode_literals
import platform
__version__ = '0.3.0'
USER_AGENT = 'daemail {} ({} {})'.format(
__version__, platform.python_implementation(), platform.python_version()
)
|
from __future__ import unicode_literals
import platform
__version__ = '0.3.0.dev1'
USER_AGENT = 'daemail {} ({} {})'.format(
__version__, platform.python_implementation(), platform.python_version()
)
|
Mark current code as a dev version
|
Mark current code as a dev version
|
Python
|
mit
|
jwodder/daemail
|
from __future__ import unicode_literals
import platform
__version__ = '0.3.0'
USER_AGENT = 'daemail {} ({} {})'.format(
__version__, platform.python_implementation(), platform.python_version()
)
Mark current code as a dev version
|
from __future__ import unicode_literals
import platform
__version__ = '0.3.0.dev1'
USER_AGENT = 'daemail {} ({} {})'.format(
__version__, platform.python_implementation(), platform.python_version()
)
|
<commit_before>from __future__ import unicode_literals
import platform
__version__ = '0.3.0'
USER_AGENT = 'daemail {} ({} {})'.format(
__version__, platform.python_implementation(), platform.python_version()
)
<commit_msg>Mark current code as a dev version<commit_after>
|
from __future__ import unicode_literals
import platform
__version__ = '0.3.0.dev1'
USER_AGENT = 'daemail {} ({} {})'.format(
__version__, platform.python_implementation(), platform.python_version()
)
|
from __future__ import unicode_literals
import platform
__version__ = '0.3.0'
USER_AGENT = 'daemail {} ({} {})'.format(
__version__, platform.python_implementation(), platform.python_version()
)
Mark current code as a dev versionfrom __future__ import unicode_literals
import platform
__version__ = '0.3.0.dev1'
USER_AGENT = 'daemail {} ({} {})'.format(
__version__, platform.python_implementation(), platform.python_version()
)
|
<commit_before>from __future__ import unicode_literals
import platform
__version__ = '0.3.0'
USER_AGENT = 'daemail {} ({} {})'.format(
__version__, platform.python_implementation(), platform.python_version()
)
<commit_msg>Mark current code as a dev version<commit_after>from __future__ import unicode_literals
import platform
__version__ = '0.3.0.dev1'
USER_AGENT = 'daemail {} ({} {})'.format(
__version__, platform.python_implementation(), platform.python_version()
)
|
4f1f0b9d1643a6ff4934070472973e60b1eb6c26
|
tests/rules_tests/isValid_tests/NongrammarEntitiesTest.py
|
tests/rules_tests/isValid_tests/NongrammarEntitiesTest.py
|
#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 23.06.2017 16:39
:Licence GNUv3
Part of grammpy
"""
from unittest import main, TestCase
from grammpy import Rule
from .grammar import *
class NongrammarEntitiesTest(TestCase):
pass
if __name__ == '__main__':
main()
|
#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 23.06.2017 16:39
:Licence GNUv3
Part of grammpy
"""
from unittest import main, TestCase
from grammpy import Rule, Nonterminal as _N
from grammpy.exceptions import TerminalDoesNotExistsException, NonterminalDoesNotExistsException
from .grammar import *
class Invalid(_N):
pass
class NongrammarEntitiesTest(TestCase):
def test_invalidTerminal(self):
class tmp(Rule):
rules = [([NFifth], [5, NFirst])]
with self.assertRaises(TerminalDoesNotExistsException):
tmp.validate(grammar)
def test_invalidTerminalFrom(self):
class tmp(Rule):
rules = [(['asdf', NFifth], [2, NFirst])]
with self.assertRaises(TerminalDoesNotExistsException):
tmp.validate(grammar)
def test_invalidTerminalMultiple(self):
class tmp(Rule):
rules = [([TSecond, 'b', TThird], ['c', 2]),
([NFifth], [5, NFirst])]
with self.assertRaises(TerminalDoesNotExistsException):
tmp.validate(grammar)
def test_invalidTerminalFromMultiple(self):
class tmp(Rule):
rules = [([TSecond, 'b', TThird], ['c', 2]),
(['asdf', NFifth], [2, NFirst])]
with self.assertRaises(TerminalDoesNotExistsException):
tmp.validate(grammar)
def test_invalidNonterminal(self):
class tmp(Rule):
rules = [([NFifth], [2, Invalid])]
with self.assertRaises(NonterminalDoesNotExistsException):
tmp.validate(grammar)
def test_invalidNonterminalFrom(self):
class tmp(Rule):
rules = [(['a', Invalid], [2, NFirst])]
with self.assertRaises(NonterminalDoesNotExistsException):
tmp.validate(grammar)
def test_invalidNonterminalMultiple(self):
class tmp(Rule):
rules = [([TSecond, 'b', TThird], ['c', 2]),
([NFifth], [2, Invalid])]
with self.assertRaises(NonterminalDoesNotExistsException):
tmp.validate(grammar)
def test_invalidNonterminalFromMultiple(self):
class tmp(Rule):
rules = [([TSecond, 'b', TThird], ['c', 2]),
(['a', Invalid], [2, NFirst])]
with self.assertRaises(NonterminalDoesNotExistsException):
tmp.validate(grammar)
if __name__ == '__main__':
main()
|
Add tests of terminals and nonterminals that are not in grammar
|
Add tests of terminals and nonterminals that are not in grammar
|
Python
|
mit
|
PatrikValkovic/grammpy
|
#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 23.06.2017 16:39
:Licence GNUv3
Part of grammpy
"""
from unittest import main, TestCase
from grammpy import Rule
from .grammar import *
class NongrammarEntitiesTest(TestCase):
pass
if __name__ == '__main__':
main()Add tests of terminals and nonterminals that are not in grammar
|
#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 23.06.2017 16:39
:Licence GNUv3
Part of grammpy
"""
from unittest import main, TestCase
from grammpy import Rule, Nonterminal as _N
from grammpy.exceptions import TerminalDoesNotExistsException, NonterminalDoesNotExistsException
from .grammar import *
class Invalid(_N):
pass
class NongrammarEntitiesTest(TestCase):
def test_invalidTerminal(self):
class tmp(Rule):
rules = [([NFifth], [5, NFirst])]
with self.assertRaises(TerminalDoesNotExistsException):
tmp.validate(grammar)
def test_invalidTerminalFrom(self):
class tmp(Rule):
rules = [(['asdf', NFifth], [2, NFirst])]
with self.assertRaises(TerminalDoesNotExistsException):
tmp.validate(grammar)
def test_invalidTerminalMultiple(self):
class tmp(Rule):
rules = [([TSecond, 'b', TThird], ['c', 2]),
([NFifth], [5, NFirst])]
with self.assertRaises(TerminalDoesNotExistsException):
tmp.validate(grammar)
def test_invalidTerminalFromMultiple(self):
class tmp(Rule):
rules = [([TSecond, 'b', TThird], ['c', 2]),
(['asdf', NFifth], [2, NFirst])]
with self.assertRaises(TerminalDoesNotExistsException):
tmp.validate(grammar)
def test_invalidNonterminal(self):
class tmp(Rule):
rules = [([NFifth], [2, Invalid])]
with self.assertRaises(NonterminalDoesNotExistsException):
tmp.validate(grammar)
def test_invalidNonterminalFrom(self):
class tmp(Rule):
rules = [(['a', Invalid], [2, NFirst])]
with self.assertRaises(NonterminalDoesNotExistsException):
tmp.validate(grammar)
def test_invalidNonterminalMultiple(self):
class tmp(Rule):
rules = [([TSecond, 'b', TThird], ['c', 2]),
([NFifth], [2, Invalid])]
with self.assertRaises(NonterminalDoesNotExistsException):
tmp.validate(grammar)
def test_invalidNonterminalFromMultiple(self):
class tmp(Rule):
rules = [([TSecond, 'b', TThird], ['c', 2]),
(['a', Invalid], [2, NFirst])]
with self.assertRaises(NonterminalDoesNotExistsException):
tmp.validate(grammar)
if __name__ == '__main__':
main()
|
<commit_before>#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 23.06.2017 16:39
:Licence GNUv3
Part of grammpy
"""
from unittest import main, TestCase
from grammpy import Rule
from .grammar import *
class NongrammarEntitiesTest(TestCase):
pass
if __name__ == '__main__':
main()<commit_msg>Add tests of terminals and nonterminals that are not in grammar<commit_after>
|
#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 23.06.2017 16:39
:Licence GNUv3
Part of grammpy
"""
from unittest import main, TestCase
from grammpy import Rule, Nonterminal as _N
from grammpy.exceptions import TerminalDoesNotExistsException, NonterminalDoesNotExistsException
from .grammar import *
class Invalid(_N):
pass
class NongrammarEntitiesTest(TestCase):
def test_invalidTerminal(self):
class tmp(Rule):
rules = [([NFifth], [5, NFirst])]
with self.assertRaises(TerminalDoesNotExistsException):
tmp.validate(grammar)
def test_invalidTerminalFrom(self):
class tmp(Rule):
rules = [(['asdf', NFifth], [2, NFirst])]
with self.assertRaises(TerminalDoesNotExistsException):
tmp.validate(grammar)
def test_invalidTerminalMultiple(self):
class tmp(Rule):
rules = [([TSecond, 'b', TThird], ['c', 2]),
([NFifth], [5, NFirst])]
with self.assertRaises(TerminalDoesNotExistsException):
tmp.validate(grammar)
def test_invalidTerminalFromMultiple(self):
class tmp(Rule):
rules = [([TSecond, 'b', TThird], ['c', 2]),
(['asdf', NFifth], [2, NFirst])]
with self.assertRaises(TerminalDoesNotExistsException):
tmp.validate(grammar)
def test_invalidNonterminal(self):
class tmp(Rule):
rules = [([NFifth], [2, Invalid])]
with self.assertRaises(NonterminalDoesNotExistsException):
tmp.validate(grammar)
def test_invalidNonterminalFrom(self):
class tmp(Rule):
rules = [(['a', Invalid], [2, NFirst])]
with self.assertRaises(NonterminalDoesNotExistsException):
tmp.validate(grammar)
def test_invalidNonterminalMultiple(self):
class tmp(Rule):
rules = [([TSecond, 'b', TThird], ['c', 2]),
([NFifth], [2, Invalid])]
with self.assertRaises(NonterminalDoesNotExistsException):
tmp.validate(grammar)
def test_invalidNonterminalFromMultiple(self):
class tmp(Rule):
rules = [([TSecond, 'b', TThird], ['c', 2]),
(['a', Invalid], [2, NFirst])]
with self.assertRaises(NonterminalDoesNotExistsException):
tmp.validate(grammar)
if __name__ == '__main__':
main()
|
#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 23.06.2017 16:39
:Licence GNUv3
Part of grammpy
"""
from unittest import main, TestCase
from grammpy import Rule
from .grammar import *
class NongrammarEntitiesTest(TestCase):
pass
if __name__ == '__main__':
main()Add tests of terminals and nonterminals that are not in grammar#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 23.06.2017 16:39
:Licence GNUv3
Part of grammpy
"""
from unittest import main, TestCase
from grammpy import Rule, Nonterminal as _N
from grammpy.exceptions import TerminalDoesNotExistsException, NonterminalDoesNotExistsException
from .grammar import *
class Invalid(_N):
pass
class NongrammarEntitiesTest(TestCase):
def test_invalidTerminal(self):
class tmp(Rule):
rules = [([NFifth], [5, NFirst])]
with self.assertRaises(TerminalDoesNotExistsException):
tmp.validate(grammar)
def test_invalidTerminalFrom(self):
class tmp(Rule):
rules = [(['asdf', NFifth], [2, NFirst])]
with self.assertRaises(TerminalDoesNotExistsException):
tmp.validate(grammar)
def test_invalidTerminalMultiple(self):
class tmp(Rule):
rules = [([TSecond, 'b', TThird], ['c', 2]),
([NFifth], [5, NFirst])]
with self.assertRaises(TerminalDoesNotExistsException):
tmp.validate(grammar)
def test_invalidTerminalFromMultiple(self):
class tmp(Rule):
rules = [([TSecond, 'b', TThird], ['c', 2]),
(['asdf', NFifth], [2, NFirst])]
with self.assertRaises(TerminalDoesNotExistsException):
tmp.validate(grammar)
def test_invalidNonterminal(self):
class tmp(Rule):
rules = [([NFifth], [2, Invalid])]
with self.assertRaises(NonterminalDoesNotExistsException):
tmp.validate(grammar)
def test_invalidNonterminalFrom(self):
class tmp(Rule):
rules = [(['a', Invalid], [2, NFirst])]
with self.assertRaises(NonterminalDoesNotExistsException):
tmp.validate(grammar)
def test_invalidNonterminalMultiple(self):
class tmp(Rule):
rules = [([TSecond, 'b', TThird], ['c', 2]),
([NFifth], [2, Invalid])]
with self.assertRaises(NonterminalDoesNotExistsException):
tmp.validate(grammar)
def test_invalidNonterminalFromMultiple(self):
class tmp(Rule):
rules = [([TSecond, 'b', TThird], ['c', 2]),
(['a', Invalid], [2, NFirst])]
with self.assertRaises(NonterminalDoesNotExistsException):
tmp.validate(grammar)
if __name__ == '__main__':
main()
|
<commit_before>#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 23.06.2017 16:39
:Licence GNUv3
Part of grammpy
"""
from unittest import main, TestCase
from grammpy import Rule
from .grammar import *
class NongrammarEntitiesTest(TestCase):
pass
if __name__ == '__main__':
main()<commit_msg>Add tests of terminals and nonterminals that are not in grammar<commit_after>#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 23.06.2017 16:39
:Licence GNUv3
Part of grammpy
"""
from unittest import main, TestCase
from grammpy import Rule, Nonterminal as _N
from grammpy.exceptions import TerminalDoesNotExistsException, NonterminalDoesNotExistsException
from .grammar import *
class Invalid(_N):
pass
class NongrammarEntitiesTest(TestCase):
def test_invalidTerminal(self):
class tmp(Rule):
rules = [([NFifth], [5, NFirst])]
with self.assertRaises(TerminalDoesNotExistsException):
tmp.validate(grammar)
def test_invalidTerminalFrom(self):
class tmp(Rule):
rules = [(['asdf', NFifth], [2, NFirst])]
with self.assertRaises(TerminalDoesNotExistsException):
tmp.validate(grammar)
def test_invalidTerminalMultiple(self):
class tmp(Rule):
rules = [([TSecond, 'b', TThird], ['c', 2]),
([NFifth], [5, NFirst])]
with self.assertRaises(TerminalDoesNotExistsException):
tmp.validate(grammar)
def test_invalidTerminalFromMultiple(self):
class tmp(Rule):
rules = [([TSecond, 'b', TThird], ['c', 2]),
(['asdf', NFifth], [2, NFirst])]
with self.assertRaises(TerminalDoesNotExistsException):
tmp.validate(grammar)
def test_invalidNonterminal(self):
class tmp(Rule):
rules = [([NFifth], [2, Invalid])]
with self.assertRaises(NonterminalDoesNotExistsException):
tmp.validate(grammar)
def test_invalidNonterminalFrom(self):
class tmp(Rule):
rules = [(['a', Invalid], [2, NFirst])]
with self.assertRaises(NonterminalDoesNotExistsException):
tmp.validate(grammar)
def test_invalidNonterminalMultiple(self):
class tmp(Rule):
rules = [([TSecond, 'b', TThird], ['c', 2]),
([NFifth], [2, Invalid])]
with self.assertRaises(NonterminalDoesNotExistsException):
tmp.validate(grammar)
def test_invalidNonterminalFromMultiple(self):
class tmp(Rule):
rules = [([TSecond, 'b', TThird], ['c', 2]),
(['a', Invalid], [2, NFirst])]
with self.assertRaises(NonterminalDoesNotExistsException):
tmp.validate(grammar)
if __name__ == '__main__':
main()
|
e435592d64dbd4f75a7cc9d1ac8bb17ab4177a2b
|
erpnext/patches/v4_2/default_website_style.py
|
erpnext/patches/v4_2/default_website_style.py
|
import frappe
from frappe.templates.pages.style_settings import default_properties
def execute():
style_settings = frappe.get_doc("Style Settings", "Style Settings")
if not style_settings.apply_style:
style_settings.update(default_properties)
style_settings.apply_style = 1
style_settings.save()
|
import frappe
from frappe.templates.pages.style_settings import default_properties
def execute():
frappe.reload_doc('website', 'doctype', 'style_settings')
style_settings = frappe.get_doc("Style Settings", "Style Settings")
if not style_settings.apply_style:
style_settings.update(default_properties)
style_settings.apply_style = 1
style_settings.save()
|
Fix default website style patch (reload doc)
|
[minor] Fix default website style patch (reload doc)
|
Python
|
agpl-3.0
|
gangadharkadam/saloon_erp,hatwar/buyback-erpnext,gangadharkadam/v6_erp,indictranstech/Das_Erpnext,gangadharkadam/vlinkerp,shft117/SteckerApp,sheafferusa/erpnext,mahabuber/erpnext,hernad/erpnext,suyashphadtare/gd-erp,gangadharkadam/letzerp,indictranstech/internal-erpnext,indictranstech/buyback-erp,4commerce-technologies-AG/erpnext,indictranstech/buyback-erp,shitolepriya/test-erp,rohitwaghchaure/New_Theme_Erp,indictranstech/trufil-erpnext,gangadharkadam/v5_erp,mahabuber/erpnext,indictranstech/Das_Erpnext,suyashphadtare/vestasi-erp-jan-end,gangadhar-kadam/verve_test_erp,SPKian/Testing2,mbauskar/omnitech-demo-erpnext,gsnbng/erpnext,hernad/erpnext,sheafferusa/erpnext,mbauskar/phrerp,indictranstech/trufil-erpnext,mbauskar/omnitech-demo-erpnext,Tejal011089/huntercamp_erpnext,netfirms/erpnext,MartinEnder/erpnext-de,gangadhar-kadam/latestchurcherp,hatwar/Das_erpnext,indictranstech/fbd_erpnext,gangadharkadam/saloon_erp,hanselke/erpnext-1,njmube/erpnext,Tejal011089/trufil-erpnext,fuhongliang/erpnext,gangadhar-kadam/helpdesk-erpnext,SPKian/Testing2,pombredanne/erpnext,sagar30051991/ozsmart-erp,gangadharkadam/verveerp,gangadharkadam/v4_erp,gangadhar-kadam/verve_test_erp,mbauskar/phrerp,indictranstech/focal-erpnext,rohitwaghchaure/GenieManager-erpnext,rohitwaghchaure/New_Theme_Erp,mbauskar/Das_Erpnext,mbauskar/helpdesk-erpnext,suyashphadtare/sajil-erp,gangadhar-kadam/verve_test_erp,susuchina/ERPNEXT,gangadharkadam/verveerp,ShashaQin/erpnext,netfirms/erpnext,njmube/erpnext,SPKian/Testing,mbauskar/Das_Erpnext,mbauskar/sapphire-erpnext,gangadhar-kadam/verve_erp,suyashphadtare/vestasi-erp-1,gangadharkadam/vlinkerp,Tejal011089/paypal_erpnext,indictranstech/reciphergroup-erpnext,treejames/erpnext,rohitwaghchaure/erpnext_smart,gangadharkadam/v4_erp,tmimori/erpnext,suyashphadtare/vestasi-erp-final,mahabuber/erpnext,indictranstech/osmosis-erpnext,hatwar/focal-erpnext,hatwar/buyback-erpnext,treejames/erpnext,suyashphadtare/vestasi-update-erp,gangadharkadam/contributionerp,geekroot/erpnext,shitolepriya/test-erp,Tejal011089/trufil-erpnext,ThiagoGarciaAlves/erpnext,rohitwaghchaure/erpnext_smart,hernad/erpnext,ThiagoGarciaAlves/erpnext,rohitwaghchaure/digitales_erpnext,rohitwaghchaure/erpnext-receipher,BhupeshGupta/erpnext,gangadhar-kadam/helpdesk-erpnext,anandpdoshi/erpnext,meisterkleister/erpnext,suyashphadtare/gd-erp,mbauskar/omnitech-erpnext,susuchina/ERPNEXT,pombredanne/erpnext,Tejal011089/fbd_erpnext,gangadhar-kadam/verve_test_erp,Tejal011089/trufil-erpnext,indictranstech/erpnext,gsnbng/erpnext,indictranstech/trufil-erpnext,indictranstech/vestasi-erpnext,Suninus/erpnext,mbauskar/alec_frappe5_erpnext,dieface/erpnext,indictranstech/focal-erpnext,aruizramon/alec_erpnext,gangadharkadam/saloon_erp_install,sagar30051991/ozsmart-erp,indictranstech/vestasi-erpnext,mbauskar/alec_frappe5_erpnext,indictranstech/biggift-erpnext,gmarke/erpnext,gangadhar-kadam/latestchurcherp,gangadhar-kadam/verve-erp,ShashaQin/erpnext,suyashphadtare/gd-erp,suyashphadtare/sajil-final-erp,geekroot/erpnext,Drooids/erpnext,geekroot/erpnext,gangadharkadam/saloon_erp_install,indictranstech/focal-erpnext,suyashphadtare/sajil-erp,gangadharkadam/verveerp,saurabh6790/test-erp,hanselke/erpnext-1,hatwar/Das_erpnext,gangadharkadam/letzerp,gangadhar-kadam/helpdesk-erpnext,indictranstech/Das_Erpnext,mbauskar/Das_Erpnext,mbauskar/helpdesk-erpnext,Tejal011089/digitales_erpnext,anandpdoshi/erpnext,indictranstech/osmosis-erpnext,gmarke/erpnext,gangadharkadam/v4_erp,MartinEnder/erpnext-de,mbauskar/omnitech-erpnext,Tejal011089/fbd_erpnext,saurabh6790/test-erp,indictranstech/buyback-erp,mbauskar/phrerp,indictranstech/phrerp,indictranstech/trufil-erpnext,Suninus/erpnext,gsnbng/erpnext,rohitwaghchaure/digitales_erpnext,Drooids/erpnext,fuhongliang/erpnext,sheafferusa/erpnext,gangadharkadam/saloon_erp,mbauskar/sapphire-erpnext,Tejal011089/huntercamp_erpnext,indictranstech/internal-erpnext,mbauskar/sapphire-erpnext,indictranstech/fbd_erpnext,hanselke/erpnext-1,sheafferusa/erpnext,gangadharkadam/v6_erp,MartinEnder/erpnext-de,4commerce-technologies-AG/erpnext,indictranstech/biggift-erpnext,mbauskar/helpdesk-erpnext,anandpdoshi/erpnext,gangadharkadam/vlinkerp,indictranstech/reciphergroup-erpnext,gangadhar-kadam/verve-erp,mbauskar/helpdesk-erpnext,geekroot/erpnext,gangadharkadam/v5_erp,Tejal011089/digitales_erpnext,indictranstech/tele-erpnext,ThiagoGarciaAlves/erpnext,suyashphadtare/vestasi-erp-final,Tejal011089/huntercamp_erpnext,indictranstech/reciphergroup-erpnext,suyashphadtare/vestasi-erp-jan-end,indictranstech/biggift-erpnext,rohitwaghchaure/GenieManager-erpnext,mbauskar/omnitech-erpnext,suyashphadtare/vestasi-erp-final,indictranstech/vestasi-erpnext,saurabh6790/test-erp,treejames/erpnext,pawaranand/phrerp,gangadhar-kadam/verve_erp,mbauskar/sapphire-erpnext,gangadharkadam/letzerp,aruizramon/alec_erpnext,suyashphadtare/vestasi-update-erp,gangadharkadam/contributionerp,fuhongliang/erpnext,netfirms/erpnext,indictranstech/osmosis-erpnext,SPKian/Testing,meisterkleister/erpnext,gangadhar-kadam/verve_live_erp,tmimori/erpnext,hatwar/focal-erpnext,rohitwaghchaure/GenieManager-erpnext,indictranstech/tele-erpnext,gangadharkadam/saloon_erp_install,indictranstech/fbd_erpnext,pawaranand/phrerp,gangadharkadam/v6_erp,suyashphadtare/sajil-final-erp,indictranstech/buyback-erp,treejames/erpnext,suyashphadtare/test,mbauskar/alec_frappe5_erpnext,suyashphadtare/vestasi-erp-jan-end,SPKian/Testing,tmimori/erpnext,gangadharkadam/v4_erp,suyashphadtare/vestasi-erp-1,indictranstech/phrerp,suyashphadtare/sajil-final-erp,netfirms/erpnext,gmarke/erpnext,BhupeshGupta/erpnext,indictranstech/tele-erpnext,Tejal011089/osmosis_erpnext,hatwar/focal-erpnext,Tejal011089/digitales_erpnext,suyashphadtare/vestasi-erp-jan-end,MartinEnder/erpnext-de,Suninus/erpnext,indictranstech/Das_Erpnext,Tejal011089/osmosis_erpnext,rohitwaghchaure/GenieManager-erpnext,gangadhar-kadam/verve_live_erp,rohitwaghchaure/erpnext-receipher,Tejal011089/digitales_erpnext,shitolepriya/test-erp,shft117/SteckerApp,indictranstech/osmosis-erpnext,Tejal011089/fbd_erpnext,gangadhar-kadam/verve_live_erp,hatwar/buyback-erpnext,shft117/SteckerApp,tmimori/erpnext,dieface/erpnext,mbauskar/alec_frappe5_erpnext,gangadharkadam/v5_erp,indictranstech/phrerp,gangadhar-kadam/verve_erp,indictranstech/internal-erpnext,hatwar/buyback-erpnext,gangadharkadam/verveerp,njmube/erpnext,rohitwaghchaure/erpnext-receipher,Aptitudetech/ERPNext,aruizramon/alec_erpnext,gsnbng/erpnext,susuchina/ERPNEXT,gangadharkadam/v5_erp,hanselke/erpnext-1,rohitwaghchaure/digitales_erpnext,rohitwaghchaure/digitales_erpnext,Drooids/erpnext,susuchina/ERPNEXT,njmube/erpnext,mbauskar/omnitech-demo-erpnext,gangadharkadam/v6_erp,dieface/erpnext,gangadharkadam/contributionerp,ShashaQin/erpnext,saurabh6790/test-erp,suyashphadtare/vestasi-erp-1,SPKian/Testing,suyashphadtare/test,rohitwaghchaure/New_Theme_Erp,hernad/erpnext,rohitwaghchaure/New_Theme_Erp,meisterkleister/erpnext,hatwar/Das_erpnext,4commerce-technologies-AG/erpnext,Tejal011089/osmosis_erpnext,mahabuber/erpnext,suyashphadtare/vestasi-update-erp,pombredanne/erpnext,Tejal011089/trufil-erpnext,gangadharkadam/saloon_erp_install,SPKian/Testing2,mbauskar/Das_Erpnext,indictranstech/reciphergroup-erpnext,ThiagoGarciaAlves/erpnext,gangadharkadam/contributionerp,suyashphadtare/gd-erp,shitolepriya/test-erp,gangadhar-kadam/verve_erp,gangadhar-kadam/helpdesk-erpnext,Tejal011089/fbd_erpnext,pombredanne/erpnext,dieface/erpnext,hatwar/focal-erpnext,ShashaQin/erpnext,indictranstech/vestasi-erpnext,gangadhar-kadam/latestchurcherp,gangadhar-kadam/verve-erp,gangadhar-kadam/latestchurcherp,indictranstech/biggift-erpnext,fuhongliang/erpnext,suyashphadtare/sajil-erp,shft117/SteckerApp,indictranstech/erpnext,mbauskar/phrerp,BhupeshGupta/erpnext,indictranstech/tele-erpnext,SPKian/Testing2,aruizramon/alec_erpnext,indictranstech/fbd_erpnext,rohitwaghchaure/erpnext_smart,hatwar/Das_erpnext,pawaranand/phrerp,pawaranand/phrerp,Tejal011089/osmosis_erpnext,sagar30051991/ozsmart-erp,anandpdoshi/erpnext,suyashphadtare/test,indictranstech/phrerp,indictranstech/erpnext,Tejal011089/paypal_erpnext,indictranstech/internal-erpnext,gangadharkadam/saloon_erp,gmarke/erpnext,Tejal011089/paypal_erpnext,sagar30051991/ozsmart-erp,BhupeshGupta/erpnext,Drooids/erpnext,Suninus/erpnext,gangadharkadam/vlinkerp,gangadhar-kadam/verve_live_erp,meisterkleister/erpnext,mbauskar/omnitech-erpnext,gangadharkadam/letzerp,mbauskar/omnitech-demo-erpnext,rohitwaghchaure/erpnext-receipher,indictranstech/focal-erpnext,Tejal011089/paypal_erpnext,Tejal011089/huntercamp_erpnext,indictranstech/erpnext
|
import frappe
from frappe.templates.pages.style_settings import default_properties
def execute():
style_settings = frappe.get_doc("Style Settings", "Style Settings")
if not style_settings.apply_style:
style_settings.update(default_properties)
style_settings.apply_style = 1
style_settings.save()
[minor] Fix default website style patch (reload doc)
|
import frappe
from frappe.templates.pages.style_settings import default_properties
def execute():
frappe.reload_doc('website', 'doctype', 'style_settings')
style_settings = frappe.get_doc("Style Settings", "Style Settings")
if not style_settings.apply_style:
style_settings.update(default_properties)
style_settings.apply_style = 1
style_settings.save()
|
<commit_before>import frappe
from frappe.templates.pages.style_settings import default_properties
def execute():
style_settings = frappe.get_doc("Style Settings", "Style Settings")
if not style_settings.apply_style:
style_settings.update(default_properties)
style_settings.apply_style = 1
style_settings.save()
<commit_msg>[minor] Fix default website style patch (reload doc)<commit_after>
|
import frappe
from frappe.templates.pages.style_settings import default_properties
def execute():
frappe.reload_doc('website', 'doctype', 'style_settings')
style_settings = frappe.get_doc("Style Settings", "Style Settings")
if not style_settings.apply_style:
style_settings.update(default_properties)
style_settings.apply_style = 1
style_settings.save()
|
import frappe
from frappe.templates.pages.style_settings import default_properties
def execute():
style_settings = frappe.get_doc("Style Settings", "Style Settings")
if not style_settings.apply_style:
style_settings.update(default_properties)
style_settings.apply_style = 1
style_settings.save()
[minor] Fix default website style patch (reload doc)import frappe
from frappe.templates.pages.style_settings import default_properties
def execute():
frappe.reload_doc('website', 'doctype', 'style_settings')
style_settings = frappe.get_doc("Style Settings", "Style Settings")
if not style_settings.apply_style:
style_settings.update(default_properties)
style_settings.apply_style = 1
style_settings.save()
|
<commit_before>import frappe
from frappe.templates.pages.style_settings import default_properties
def execute():
style_settings = frappe.get_doc("Style Settings", "Style Settings")
if not style_settings.apply_style:
style_settings.update(default_properties)
style_settings.apply_style = 1
style_settings.save()
<commit_msg>[minor] Fix default website style patch (reload doc)<commit_after>import frappe
from frappe.templates.pages.style_settings import default_properties
def execute():
frappe.reload_doc('website', 'doctype', 'style_settings')
style_settings = frappe.get_doc("Style Settings", "Style Settings")
if not style_settings.apply_style:
style_settings.update(default_properties)
style_settings.apply_style = 1
style_settings.save()
|
0378572237df9c3a4bfa7a5a7009fdd664e527e5
|
wagtail/wagtailadmin/templatetags/gravatar.py
|
wagtail/wagtailadmin/templatetags/gravatar.py
|
# place inside a 'templatetags' directory inside the top level of a Django app (not project, must be inside an app)
# at the top of your page template include this:
# {% load gravatar %}
# and to use the url do this:
# <img src="{% gravatar_url 'someone@somewhere.com' %}">
# or
# <img src="{% gravatar_url sometemplatevariable %}">
# just make sure to update the "default" image path below
from __future__ import absolute_import, unicode_literals
import hashlib
from django import template
from django.utils.six.moves.urllib.parse import urlencode
register = template.Library()
class GravatarUrlNode(template.Node):
def __init__(self, email, size=50):
self.email = template.Variable(email)
self.size = size
def render(self, context):
try:
email = self.email.resolve(context)
except template.VariableDoesNotExist:
return ''
default = "blank"
size = int(self.size) * 2 # requested at retina size by default and scaled down at point of use with css
gravatar_url = "//www.gravatar.com/avatar/{hash}?{params}".format(
hash=hashlib.md5(email.lower().encode('utf-8')).hexdigest(),
params=urlencode({'s': size, 'd': default})
)
return gravatar_url
@register.tag
def gravatar_url(parser, token):
bits = token.split_contents()
return GravatarUrlNode(*bits[1:])
|
# place inside a 'templatetags' directory inside the top level of a Django app (not project, must be inside an app)
# at the top of your page template include this:
# {% load gravatar %}
# and to use the url do this:
# <img src="{% gravatar_url 'someone@somewhere.com' %}">
# or
# <img src="{% gravatar_url sometemplatevariable %}">
# just make sure to update the "default" image path below
from __future__ import absolute_import, unicode_literals
import hashlib
from django import template
from django.utils.six.moves.urllib.parse import urlencode
register = template.Library()
class GravatarUrlNode(template.Node):
def __init__(self, email, size=50):
self.email = template.Variable(email)
self.size = size
def render(self, context):
try:
email = self.email.resolve(context)
except template.VariableDoesNotExist:
return ''
default = "mm"
size = int(self.size) * 2 # requested at retina size by default and scaled down at point of use with css
gravatar_url = "//www.gravatar.com/avatar/{hash}?{params}".format(
hash=hashlib.md5(email.lower().encode('utf-8')).hexdigest(),
params=urlencode({'s': size, 'd': default})
)
return gravatar_url
@register.tag
def gravatar_url(parser, token):
bits = token.split_contents()
return GravatarUrlNode(*bits[1:])
|
Make mystery man the default Gravatar image
|
Make mystery man the default Gravatar image
|
Python
|
bsd-3-clause
|
gasman/wagtail,rsalmaso/wagtail,nealtodd/wagtail,timorieber/wagtail,jnns/wagtail,wagtail/wagtail,gasman/wagtail,thenewguy/wagtail,mikedingjan/wagtail,wagtail/wagtail,gasman/wagtail,torchbox/wagtail,iansprice/wagtail,zerolab/wagtail,takeflight/wagtail,kaedroho/wagtail,Toshakins/wagtail,jnns/wagtail,nimasmi/wagtail,kaedroho/wagtail,mixxorz/wagtail,nealtodd/wagtail,thenewguy/wagtail,takeflight/wagtail,Toshakins/wagtail,timorieber/wagtail,mikedingjan/wagtail,zerolab/wagtail,timorieber/wagtail,FlipperPA/wagtail,nealtodd/wagtail,wagtail/wagtail,mixxorz/wagtail,iansprice/wagtail,rsalmaso/wagtail,rsalmaso/wagtail,kaedroho/wagtail,Toshakins/wagtail,nealtodd/wagtail,FlipperPA/wagtail,takeflight/wagtail,rsalmaso/wagtail,mixxorz/wagtail,wagtail/wagtail,thenewguy/wagtail,zerolab/wagtail,rsalmaso/wagtail,gasman/wagtail,kaedroho/wagtail,thenewguy/wagtail,takeflight/wagtail,kaedroho/wagtail,torchbox/wagtail,wagtail/wagtail,jnns/wagtail,torchbox/wagtail,thenewguy/wagtail,nimasmi/wagtail,mikedingjan/wagtail,FlipperPA/wagtail,nimasmi/wagtail,zerolab/wagtail,jnns/wagtail,Toshakins/wagtail,iansprice/wagtail,FlipperPA/wagtail,iansprice/wagtail,torchbox/wagtail,mixxorz/wagtail,timorieber/wagtail,mikedingjan/wagtail,gasman/wagtail,zerolab/wagtail,nimasmi/wagtail,mixxorz/wagtail
|
# place inside a 'templatetags' directory inside the top level of a Django app (not project, must be inside an app)
# at the top of your page template include this:
# {% load gravatar %}
# and to use the url do this:
# <img src="{% gravatar_url 'someone@somewhere.com' %}">
# or
# <img src="{% gravatar_url sometemplatevariable %}">
# just make sure to update the "default" image path below
from __future__ import absolute_import, unicode_literals
import hashlib
from django import template
from django.utils.six.moves.urllib.parse import urlencode
register = template.Library()
class GravatarUrlNode(template.Node):
def __init__(self, email, size=50):
self.email = template.Variable(email)
self.size = size
def render(self, context):
try:
email = self.email.resolve(context)
except template.VariableDoesNotExist:
return ''
default = "blank"
size = int(self.size) * 2 # requested at retina size by default and scaled down at point of use with css
gravatar_url = "//www.gravatar.com/avatar/{hash}?{params}".format(
hash=hashlib.md5(email.lower().encode('utf-8')).hexdigest(),
params=urlencode({'s': size, 'd': default})
)
return gravatar_url
@register.tag
def gravatar_url(parser, token):
bits = token.split_contents()
return GravatarUrlNode(*bits[1:])
Make mystery man the default Gravatar image
|
# place inside a 'templatetags' directory inside the top level of a Django app (not project, must be inside an app)
# at the top of your page template include this:
# {% load gravatar %}
# and to use the url do this:
# <img src="{% gravatar_url 'someone@somewhere.com' %}">
# or
# <img src="{% gravatar_url sometemplatevariable %}">
# just make sure to update the "default" image path below
from __future__ import absolute_import, unicode_literals
import hashlib
from django import template
from django.utils.six.moves.urllib.parse import urlencode
register = template.Library()
class GravatarUrlNode(template.Node):
def __init__(self, email, size=50):
self.email = template.Variable(email)
self.size = size
def render(self, context):
try:
email = self.email.resolve(context)
except template.VariableDoesNotExist:
return ''
default = "mm"
size = int(self.size) * 2 # requested at retina size by default and scaled down at point of use with css
gravatar_url = "//www.gravatar.com/avatar/{hash}?{params}".format(
hash=hashlib.md5(email.lower().encode('utf-8')).hexdigest(),
params=urlencode({'s': size, 'd': default})
)
return gravatar_url
@register.tag
def gravatar_url(parser, token):
bits = token.split_contents()
return GravatarUrlNode(*bits[1:])
|
<commit_before># place inside a 'templatetags' directory inside the top level of a Django app (not project, must be inside an app)
# at the top of your page template include this:
# {% load gravatar %}
# and to use the url do this:
# <img src="{% gravatar_url 'someone@somewhere.com' %}">
# or
# <img src="{% gravatar_url sometemplatevariable %}">
# just make sure to update the "default" image path below
from __future__ import absolute_import, unicode_literals
import hashlib
from django import template
from django.utils.six.moves.urllib.parse import urlencode
register = template.Library()
class GravatarUrlNode(template.Node):
def __init__(self, email, size=50):
self.email = template.Variable(email)
self.size = size
def render(self, context):
try:
email = self.email.resolve(context)
except template.VariableDoesNotExist:
return ''
default = "blank"
size = int(self.size) * 2 # requested at retina size by default and scaled down at point of use with css
gravatar_url = "//www.gravatar.com/avatar/{hash}?{params}".format(
hash=hashlib.md5(email.lower().encode('utf-8')).hexdigest(),
params=urlencode({'s': size, 'd': default})
)
return gravatar_url
@register.tag
def gravatar_url(parser, token):
bits = token.split_contents()
return GravatarUrlNode(*bits[1:])
<commit_msg>Make mystery man the default Gravatar image<commit_after>
|
# place inside a 'templatetags' directory inside the top level of a Django app (not project, must be inside an app)
# at the top of your page template include this:
# {% load gravatar %}
# and to use the url do this:
# <img src="{% gravatar_url 'someone@somewhere.com' %}">
# or
# <img src="{% gravatar_url sometemplatevariable %}">
# just make sure to update the "default" image path below
from __future__ import absolute_import, unicode_literals
import hashlib
from django import template
from django.utils.six.moves.urllib.parse import urlencode
register = template.Library()
class GravatarUrlNode(template.Node):
def __init__(self, email, size=50):
self.email = template.Variable(email)
self.size = size
def render(self, context):
try:
email = self.email.resolve(context)
except template.VariableDoesNotExist:
return ''
default = "mm"
size = int(self.size) * 2 # requested at retina size by default and scaled down at point of use with css
gravatar_url = "//www.gravatar.com/avatar/{hash}?{params}".format(
hash=hashlib.md5(email.lower().encode('utf-8')).hexdigest(),
params=urlencode({'s': size, 'd': default})
)
return gravatar_url
@register.tag
def gravatar_url(parser, token):
bits = token.split_contents()
return GravatarUrlNode(*bits[1:])
|
# place inside a 'templatetags' directory inside the top level of a Django app (not project, must be inside an app)
# at the top of your page template include this:
# {% load gravatar %}
# and to use the url do this:
# <img src="{% gravatar_url 'someone@somewhere.com' %}">
# or
# <img src="{% gravatar_url sometemplatevariable %}">
# just make sure to update the "default" image path below
from __future__ import absolute_import, unicode_literals
import hashlib
from django import template
from django.utils.six.moves.urllib.parse import urlencode
register = template.Library()
class GravatarUrlNode(template.Node):
def __init__(self, email, size=50):
self.email = template.Variable(email)
self.size = size
def render(self, context):
try:
email = self.email.resolve(context)
except template.VariableDoesNotExist:
return ''
default = "blank"
size = int(self.size) * 2 # requested at retina size by default and scaled down at point of use with css
gravatar_url = "//www.gravatar.com/avatar/{hash}?{params}".format(
hash=hashlib.md5(email.lower().encode('utf-8')).hexdigest(),
params=urlencode({'s': size, 'd': default})
)
return gravatar_url
@register.tag
def gravatar_url(parser, token):
bits = token.split_contents()
return GravatarUrlNode(*bits[1:])
Make mystery man the default Gravatar image# place inside a 'templatetags' directory inside the top level of a Django app (not project, must be inside an app)
# at the top of your page template include this:
# {% load gravatar %}
# and to use the url do this:
# <img src="{% gravatar_url 'someone@somewhere.com' %}">
# or
# <img src="{% gravatar_url sometemplatevariable %}">
# just make sure to update the "default" image path below
from __future__ import absolute_import, unicode_literals
import hashlib
from django import template
from django.utils.six.moves.urllib.parse import urlencode
register = template.Library()
class GravatarUrlNode(template.Node):
def __init__(self, email, size=50):
self.email = template.Variable(email)
self.size = size
def render(self, context):
try:
email = self.email.resolve(context)
except template.VariableDoesNotExist:
return ''
default = "mm"
size = int(self.size) * 2 # requested at retina size by default and scaled down at point of use with css
gravatar_url = "//www.gravatar.com/avatar/{hash}?{params}".format(
hash=hashlib.md5(email.lower().encode('utf-8')).hexdigest(),
params=urlencode({'s': size, 'd': default})
)
return gravatar_url
@register.tag
def gravatar_url(parser, token):
bits = token.split_contents()
return GravatarUrlNode(*bits[1:])
|
<commit_before># place inside a 'templatetags' directory inside the top level of a Django app (not project, must be inside an app)
# at the top of your page template include this:
# {% load gravatar %}
# and to use the url do this:
# <img src="{% gravatar_url 'someone@somewhere.com' %}">
# or
# <img src="{% gravatar_url sometemplatevariable %}">
# just make sure to update the "default" image path below
from __future__ import absolute_import, unicode_literals
import hashlib
from django import template
from django.utils.six.moves.urllib.parse import urlencode
register = template.Library()
class GravatarUrlNode(template.Node):
def __init__(self, email, size=50):
self.email = template.Variable(email)
self.size = size
def render(self, context):
try:
email = self.email.resolve(context)
except template.VariableDoesNotExist:
return ''
default = "blank"
size = int(self.size) * 2 # requested at retina size by default and scaled down at point of use with css
gravatar_url = "//www.gravatar.com/avatar/{hash}?{params}".format(
hash=hashlib.md5(email.lower().encode('utf-8')).hexdigest(),
params=urlencode({'s': size, 'd': default})
)
return gravatar_url
@register.tag
def gravatar_url(parser, token):
bits = token.split_contents()
return GravatarUrlNode(*bits[1:])
<commit_msg>Make mystery man the default Gravatar image<commit_after># place inside a 'templatetags' directory inside the top level of a Django app (not project, must be inside an app)
# at the top of your page template include this:
# {% load gravatar %}
# and to use the url do this:
# <img src="{% gravatar_url 'someone@somewhere.com' %}">
# or
# <img src="{% gravatar_url sometemplatevariable %}">
# just make sure to update the "default" image path below
from __future__ import absolute_import, unicode_literals
import hashlib
from django import template
from django.utils.six.moves.urllib.parse import urlencode
register = template.Library()
class GravatarUrlNode(template.Node):
def __init__(self, email, size=50):
self.email = template.Variable(email)
self.size = size
def render(self, context):
try:
email = self.email.resolve(context)
except template.VariableDoesNotExist:
return ''
default = "mm"
size = int(self.size) * 2 # requested at retina size by default and scaled down at point of use with css
gravatar_url = "//www.gravatar.com/avatar/{hash}?{params}".format(
hash=hashlib.md5(email.lower().encode('utf-8')).hexdigest(),
params=urlencode({'s': size, 'd': default})
)
return gravatar_url
@register.tag
def gravatar_url(parser, token):
bits = token.split_contents()
return GravatarUrlNode(*bits[1:])
|
9ff346834a39605a707d66d4a2c6e3dc20dcdd78
|
markov_chain.py
|
markov_chain.py
|
from random import choice
class MarkovChain(object):
""" An interface for signle-word states Markov Chains """
def __init__(self, text=None):
self._states_map = {}
if text is not None:
self.add_text(text)
def add_text(self, text, separator=" "):
""" Adds text to the markov chain """
word_list = text.split(separator)
for i in range(0, len(word_list)-1):
self._states_map.setdefault(word_list[i], []).append(word_list[i+1])
return self
def get_word(self, key):
""" Returns a word from Markov Chain associated with the key """
values = self._states_map.get(key)
return choice(values) if values is not None else None
|
from random import choice
class MarkovChain(object):
""" An interface for signle-word states Markov Chains """
def __init__(self, text=None):
self._states_map = {}
if text is not None:
self.add_text(text)
def add_text(self, text, separator=" "):
""" Adds text to the markov chain """
word_list = text.split(separator)
for i in range(0, len(word_list)-1):
self._states_map.setdefault(word_list[i], []).append(word_list[i+1])
return self
def add_text_collection(self, text_col, separator=" "):
""" Adds a collection of text strings to the markov chain """
for line in text_col:
if line not in ["", "\n", None]:
self.add_text(line, separator)
def get_word(self, key):
""" Returns a word from Markov Chain associated with the key """
values = self._states_map.get(key)
return choice(values) if values is not None else None
|
Add Markov Chain representation class
|
Add Markov Chain representation class
|
Python
|
mit
|
iluxonchik/lyricist
|
from random import choice
class MarkovChain(object):
""" An interface for signle-word states Markov Chains """
def __init__(self, text=None):
self._states_map = {}
if text is not None:
self.add_text(text)
def add_text(self, text, separator=" "):
""" Adds text to the markov chain """
word_list = text.split(separator)
for i in range(0, len(word_list)-1):
self._states_map.setdefault(word_list[i], []).append(word_list[i+1])
return self
def get_word(self, key):
""" Returns a word from Markov Chain associated with the key """
values = self._states_map.get(key)
return choice(values) if values is not None else None
Add Markov Chain representation class
|
from random import choice
class MarkovChain(object):
""" An interface for signle-word states Markov Chains """
def __init__(self, text=None):
self._states_map = {}
if text is not None:
self.add_text(text)
def add_text(self, text, separator=" "):
""" Adds text to the markov chain """
word_list = text.split(separator)
for i in range(0, len(word_list)-1):
self._states_map.setdefault(word_list[i], []).append(word_list[i+1])
return self
def add_text_collection(self, text_col, separator=" "):
""" Adds a collection of text strings to the markov chain """
for line in text_col:
if line not in ["", "\n", None]:
self.add_text(line, separator)
def get_word(self, key):
""" Returns a word from Markov Chain associated with the key """
values = self._states_map.get(key)
return choice(values) if values is not None else None
|
<commit_before>from random import choice
class MarkovChain(object):
""" An interface for signle-word states Markov Chains """
def __init__(self, text=None):
self._states_map = {}
if text is not None:
self.add_text(text)
def add_text(self, text, separator=" "):
""" Adds text to the markov chain """
word_list = text.split(separator)
for i in range(0, len(word_list)-1):
self._states_map.setdefault(word_list[i], []).append(word_list[i+1])
return self
def get_word(self, key):
""" Returns a word from Markov Chain associated with the key """
values = self._states_map.get(key)
return choice(values) if values is not None else None
<commit_msg>Add Markov Chain representation class<commit_after>
|
from random import choice
class MarkovChain(object):
""" An interface for signle-word states Markov Chains """
def __init__(self, text=None):
self._states_map = {}
if text is not None:
self.add_text(text)
def add_text(self, text, separator=" "):
""" Adds text to the markov chain """
word_list = text.split(separator)
for i in range(0, len(word_list)-1):
self._states_map.setdefault(word_list[i], []).append(word_list[i+1])
return self
def add_text_collection(self, text_col, separator=" "):
""" Adds a collection of text strings to the markov chain """
for line in text_col:
if line not in ["", "\n", None]:
self.add_text(line, separator)
def get_word(self, key):
""" Returns a word from Markov Chain associated with the key """
values = self._states_map.get(key)
return choice(values) if values is not None else None
|
from random import choice
class MarkovChain(object):
""" An interface for signle-word states Markov Chains """
def __init__(self, text=None):
self._states_map = {}
if text is not None:
self.add_text(text)
def add_text(self, text, separator=" "):
""" Adds text to the markov chain """
word_list = text.split(separator)
for i in range(0, len(word_list)-1):
self._states_map.setdefault(word_list[i], []).append(word_list[i+1])
return self
def get_word(self, key):
""" Returns a word from Markov Chain associated with the key """
values = self._states_map.get(key)
return choice(values) if values is not None else None
Add Markov Chain representation classfrom random import choice
class MarkovChain(object):
""" An interface for signle-word states Markov Chains """
def __init__(self, text=None):
self._states_map = {}
if text is not None:
self.add_text(text)
def add_text(self, text, separator=" "):
""" Adds text to the markov chain """
word_list = text.split(separator)
for i in range(0, len(word_list)-1):
self._states_map.setdefault(word_list[i], []).append(word_list[i+1])
return self
def add_text_collection(self, text_col, separator=" "):
""" Adds a collection of text strings to the markov chain """
for line in text_col:
if line not in ["", "\n", None]:
self.add_text(line, separator)
def get_word(self, key):
""" Returns a word from Markov Chain associated with the key """
values = self._states_map.get(key)
return choice(values) if values is not None else None
|
<commit_before>from random import choice
class MarkovChain(object):
""" An interface for signle-word states Markov Chains """
def __init__(self, text=None):
self._states_map = {}
if text is not None:
self.add_text(text)
def add_text(self, text, separator=" "):
""" Adds text to the markov chain """
word_list = text.split(separator)
for i in range(0, len(word_list)-1):
self._states_map.setdefault(word_list[i], []).append(word_list[i+1])
return self
def get_word(self, key):
""" Returns a word from Markov Chain associated with the key """
values = self._states_map.get(key)
return choice(values) if values is not None else None
<commit_msg>Add Markov Chain representation class<commit_after>from random import choice
class MarkovChain(object):
""" An interface for signle-word states Markov Chains """
def __init__(self, text=None):
self._states_map = {}
if text is not None:
self.add_text(text)
def add_text(self, text, separator=" "):
""" Adds text to the markov chain """
word_list = text.split(separator)
for i in range(0, len(word_list)-1):
self._states_map.setdefault(word_list[i], []).append(word_list[i+1])
return self
def add_text_collection(self, text_col, separator=" "):
""" Adds a collection of text strings to the markov chain """
for line in text_col:
if line not in ["", "\n", None]:
self.add_text(line, separator)
def get_word(self, key):
""" Returns a word from Markov Chain associated with the key """
values = self._states_map.get(key)
return choice(values) if values is not None else None
|
28afc9f6f81e1e7ed94e2ec561ef321bff8bb56a
|
sphinxdoc/urls.py
|
sphinxdoc/urls.py
|
# encoding: utf-8
"""
URL conf for django-sphinxdoc.
"""
from django.conf.urls.defaults import patterns, url
from django.views.generic import list_detail
from sphinxdoc import models
from sphinxdoc.views import ProjectSearchView
project_info = {
'queryset': models.Project.objects.all().order_by('name'),
'template_object_name': 'project',
}
urlpatterns = patterns('sphinxdoc.views',
url(
r'^$',
list_detail.object_list,
project_info,
),
url(
r'^(?P<slug>[\w-]+)/search/$',
ProjectSearchView(),
name='doc-search',
),
url(
r'^(?P<slug>[\w-]+)/_images/(?P<path>.*)$',
'images',
),
url(
r'^(?P<slug>[\w-]+)/_source/(?P<path>.*)$',
'source',
),
url(
r'^(?P<slug>[\w-]+)/_objects/$',
'objects_inventory',
name='objects-inv',
),
url(
r'^(?P<slug>[\w-]+)/$',
'documentation',
{'path': ''},
name='doc-index',
),
url(
r'^(?P<slug>[\w-]+)/(?P<path>(([\w-]+)/)+)$',
'documentation',
name='doc-detail',
),
)
|
# encoding: utf-8
"""
URL conf for django-sphinxdoc.
"""
from django.conf.urls.defaults import patterns, url
from django.views.generic import list_detail
from sphinxdoc import models
from sphinxdoc.views import ProjectSearchView
project_info = {
'queryset': models.Project.objects.all().order_by('name'),
'template_object_name': 'project',
}
urlpatterns = patterns('sphinxdoc.views',
url(
r'^$',
list_detail.object_list,
project_info,
),
url(
r'^(?P<slug>[\w-]+)/search/$',
ProjectSearchView(),
name='doc-search',
),
url(
r'^(?P<slug>[\w-]+)/_images/(?P<path>.*)$',
'images',
),
url(
r'^(?P<slug>[\w-]+)/_source/(?P<path>.*)$',
'source',
),
url(
r'^(?P<slug>[\w-]+)/_objects/$',
'objects_inventory',
name='objects-inv',
),
url(
r'^(?P<slug>[\w-]+)/$',
'documentation',
{'path': ''},
name='doc-index',
),
url(
r'^(?P<slug>[\w-]+)/(?P<path>.+)/$',
'documentation',
name='doc-detail',
),
)
|
Support more general documentation path names.
|
Support more general documentation path names.
|
Python
|
bsd-3-clause
|
30loops/django-sphinxdoc,kamni/django-sphinxdoc
|
# encoding: utf-8
"""
URL conf for django-sphinxdoc.
"""
from django.conf.urls.defaults import patterns, url
from django.views.generic import list_detail
from sphinxdoc import models
from sphinxdoc.views import ProjectSearchView
project_info = {
'queryset': models.Project.objects.all().order_by('name'),
'template_object_name': 'project',
}
urlpatterns = patterns('sphinxdoc.views',
url(
r'^$',
list_detail.object_list,
project_info,
),
url(
r'^(?P<slug>[\w-]+)/search/$',
ProjectSearchView(),
name='doc-search',
),
url(
r'^(?P<slug>[\w-]+)/_images/(?P<path>.*)$',
'images',
),
url(
r'^(?P<slug>[\w-]+)/_source/(?P<path>.*)$',
'source',
),
url(
r'^(?P<slug>[\w-]+)/_objects/$',
'objects_inventory',
name='objects-inv',
),
url(
r'^(?P<slug>[\w-]+)/$',
'documentation',
{'path': ''},
name='doc-index',
),
url(
r'^(?P<slug>[\w-]+)/(?P<path>(([\w-]+)/)+)$',
'documentation',
name='doc-detail',
),
)
Support more general documentation path names.
|
# encoding: utf-8
"""
URL conf for django-sphinxdoc.
"""
from django.conf.urls.defaults import patterns, url
from django.views.generic import list_detail
from sphinxdoc import models
from sphinxdoc.views import ProjectSearchView
project_info = {
'queryset': models.Project.objects.all().order_by('name'),
'template_object_name': 'project',
}
urlpatterns = patterns('sphinxdoc.views',
url(
r'^$',
list_detail.object_list,
project_info,
),
url(
r'^(?P<slug>[\w-]+)/search/$',
ProjectSearchView(),
name='doc-search',
),
url(
r'^(?P<slug>[\w-]+)/_images/(?P<path>.*)$',
'images',
),
url(
r'^(?P<slug>[\w-]+)/_source/(?P<path>.*)$',
'source',
),
url(
r'^(?P<slug>[\w-]+)/_objects/$',
'objects_inventory',
name='objects-inv',
),
url(
r'^(?P<slug>[\w-]+)/$',
'documentation',
{'path': ''},
name='doc-index',
),
url(
r'^(?P<slug>[\w-]+)/(?P<path>.+)/$',
'documentation',
name='doc-detail',
),
)
|
<commit_before># encoding: utf-8
"""
URL conf for django-sphinxdoc.
"""
from django.conf.urls.defaults import patterns, url
from django.views.generic import list_detail
from sphinxdoc import models
from sphinxdoc.views import ProjectSearchView
project_info = {
'queryset': models.Project.objects.all().order_by('name'),
'template_object_name': 'project',
}
urlpatterns = patterns('sphinxdoc.views',
url(
r'^$',
list_detail.object_list,
project_info,
),
url(
r'^(?P<slug>[\w-]+)/search/$',
ProjectSearchView(),
name='doc-search',
),
url(
r'^(?P<slug>[\w-]+)/_images/(?P<path>.*)$',
'images',
),
url(
r'^(?P<slug>[\w-]+)/_source/(?P<path>.*)$',
'source',
),
url(
r'^(?P<slug>[\w-]+)/_objects/$',
'objects_inventory',
name='objects-inv',
),
url(
r'^(?P<slug>[\w-]+)/$',
'documentation',
{'path': ''},
name='doc-index',
),
url(
r'^(?P<slug>[\w-]+)/(?P<path>(([\w-]+)/)+)$',
'documentation',
name='doc-detail',
),
)
<commit_msg>Support more general documentation path names.<commit_after>
|
# encoding: utf-8
"""
URL conf for django-sphinxdoc.
"""
from django.conf.urls.defaults import patterns, url
from django.views.generic import list_detail
from sphinxdoc import models
from sphinxdoc.views import ProjectSearchView
project_info = {
'queryset': models.Project.objects.all().order_by('name'),
'template_object_name': 'project',
}
urlpatterns = patterns('sphinxdoc.views',
url(
r'^$',
list_detail.object_list,
project_info,
),
url(
r'^(?P<slug>[\w-]+)/search/$',
ProjectSearchView(),
name='doc-search',
),
url(
r'^(?P<slug>[\w-]+)/_images/(?P<path>.*)$',
'images',
),
url(
r'^(?P<slug>[\w-]+)/_source/(?P<path>.*)$',
'source',
),
url(
r'^(?P<slug>[\w-]+)/_objects/$',
'objects_inventory',
name='objects-inv',
),
url(
r'^(?P<slug>[\w-]+)/$',
'documentation',
{'path': ''},
name='doc-index',
),
url(
r'^(?P<slug>[\w-]+)/(?P<path>.+)/$',
'documentation',
name='doc-detail',
),
)
|
# encoding: utf-8
"""
URL conf for django-sphinxdoc.
"""
from django.conf.urls.defaults import patterns, url
from django.views.generic import list_detail
from sphinxdoc import models
from sphinxdoc.views import ProjectSearchView
project_info = {
'queryset': models.Project.objects.all().order_by('name'),
'template_object_name': 'project',
}
urlpatterns = patterns('sphinxdoc.views',
url(
r'^$',
list_detail.object_list,
project_info,
),
url(
r'^(?P<slug>[\w-]+)/search/$',
ProjectSearchView(),
name='doc-search',
),
url(
r'^(?P<slug>[\w-]+)/_images/(?P<path>.*)$',
'images',
),
url(
r'^(?P<slug>[\w-]+)/_source/(?P<path>.*)$',
'source',
),
url(
r'^(?P<slug>[\w-]+)/_objects/$',
'objects_inventory',
name='objects-inv',
),
url(
r'^(?P<slug>[\w-]+)/$',
'documentation',
{'path': ''},
name='doc-index',
),
url(
r'^(?P<slug>[\w-]+)/(?P<path>(([\w-]+)/)+)$',
'documentation',
name='doc-detail',
),
)
Support more general documentation path names.# encoding: utf-8
"""
URL conf for django-sphinxdoc.
"""
from django.conf.urls.defaults import patterns, url
from django.views.generic import list_detail
from sphinxdoc import models
from sphinxdoc.views import ProjectSearchView
project_info = {
'queryset': models.Project.objects.all().order_by('name'),
'template_object_name': 'project',
}
urlpatterns = patterns('sphinxdoc.views',
url(
r'^$',
list_detail.object_list,
project_info,
),
url(
r'^(?P<slug>[\w-]+)/search/$',
ProjectSearchView(),
name='doc-search',
),
url(
r'^(?P<slug>[\w-]+)/_images/(?P<path>.*)$',
'images',
),
url(
r'^(?P<slug>[\w-]+)/_source/(?P<path>.*)$',
'source',
),
url(
r'^(?P<slug>[\w-]+)/_objects/$',
'objects_inventory',
name='objects-inv',
),
url(
r'^(?P<slug>[\w-]+)/$',
'documentation',
{'path': ''},
name='doc-index',
),
url(
r'^(?P<slug>[\w-]+)/(?P<path>.+)/$',
'documentation',
name='doc-detail',
),
)
|
<commit_before># encoding: utf-8
"""
URL conf for django-sphinxdoc.
"""
from django.conf.urls.defaults import patterns, url
from django.views.generic import list_detail
from sphinxdoc import models
from sphinxdoc.views import ProjectSearchView
project_info = {
'queryset': models.Project.objects.all().order_by('name'),
'template_object_name': 'project',
}
urlpatterns = patterns('sphinxdoc.views',
url(
r'^$',
list_detail.object_list,
project_info,
),
url(
r'^(?P<slug>[\w-]+)/search/$',
ProjectSearchView(),
name='doc-search',
),
url(
r'^(?P<slug>[\w-]+)/_images/(?P<path>.*)$',
'images',
),
url(
r'^(?P<slug>[\w-]+)/_source/(?P<path>.*)$',
'source',
),
url(
r'^(?P<slug>[\w-]+)/_objects/$',
'objects_inventory',
name='objects-inv',
),
url(
r'^(?P<slug>[\w-]+)/$',
'documentation',
{'path': ''},
name='doc-index',
),
url(
r'^(?P<slug>[\w-]+)/(?P<path>(([\w-]+)/)+)$',
'documentation',
name='doc-detail',
),
)
<commit_msg>Support more general documentation path names.<commit_after># encoding: utf-8
"""
URL conf for django-sphinxdoc.
"""
from django.conf.urls.defaults import patterns, url
from django.views.generic import list_detail
from sphinxdoc import models
from sphinxdoc.views import ProjectSearchView
project_info = {
'queryset': models.Project.objects.all().order_by('name'),
'template_object_name': 'project',
}
urlpatterns = patterns('sphinxdoc.views',
url(
r'^$',
list_detail.object_list,
project_info,
),
url(
r'^(?P<slug>[\w-]+)/search/$',
ProjectSearchView(),
name='doc-search',
),
url(
r'^(?P<slug>[\w-]+)/_images/(?P<path>.*)$',
'images',
),
url(
r'^(?P<slug>[\w-]+)/_source/(?P<path>.*)$',
'source',
),
url(
r'^(?P<slug>[\w-]+)/_objects/$',
'objects_inventory',
name='objects-inv',
),
url(
r'^(?P<slug>[\w-]+)/$',
'documentation',
{'path': ''},
name='doc-index',
),
url(
r'^(?P<slug>[\w-]+)/(?P<path>.+)/$',
'documentation',
name='doc-detail',
),
)
|
c7fef59a7e8c473a568b1aad7f30229eb71013dd
|
src/azure/cli/commands/resourcegroup.py
|
src/azure/cli/commands/resourcegroup.py
|
from msrest import Serializer
from ..commands import command, description
from ._command_creation import get_service_client
@command('resource group list')
@description('List resource groups')
# TODO: waiting on Python Azure SDK bug fixes
# @option('--tag-name -g <tagName>', L('the resource group's tag name'))
# @option('--tag-value -g <tagValue>', L('the resource group's tag value'))
# @option('--top -g <number>', L('Top N resource groups to retrieve'))
def list_groups(args, unexpected): #pylint: disable=unused-argument
from azure.mgmt.resource.resources import ResourceManagementClient, \
ResourceManagementClientConfiguration
from azure.mgmt.resource.resources.models import ResourceGroup, ResourceGroupFilter
rmc = get_service_client(ResourceManagementClient, ResourceManagementClientConfiguration)
# TODO: waiting on Python Azure SDK bug fixes
#group_filter = ResourceGroupFilter(args.get('tag-name'), args.get('tag-value'))
#groups = rmc.resource_groups.list(filter=None, top=args.get('top'))
groups = rmc.resource_groups.list()
serializable = Serializer().serialize_data(groups, "[ResourceGroup]")
return serializable
|
from msrest import Serializer
from ..commands import command, description
from ._command_creation import get_mgmt_service_client
@command('resource group list')
@description('List resource groups')
# TODO: waiting on Python Azure SDK bug fixes
# @option('--tag-name -g <tagName>', L('the resource group's tag name'))
# @option('--tag-value -g <tagValue>', L('the resource group's tag value'))
# @option('--top -g <number>', L('Top N resource groups to retrieve'))
def list_groups(args, unexpected): #pylint: disable=unused-argument
from azure.mgmt.resource.resources import ResourceManagementClient, \
ResourceManagementClientConfiguration
from azure.mgmt.resource.resources.models import ResourceGroup, ResourceGroupFilter
rmc = get_mgmt_service_client(ResourceManagementClient, ResourceManagementClientConfiguration)
# TODO: waiting on Python Azure SDK bug fixes
#group_filter = ResourceGroupFilter(args.get('tag-name'), args.get('tag-value'))
#groups = rmc.resource_groups.list(filter=None, top=args.get('top'))
groups = rmc.resource_groups.list()
serializable = Serializer().serialize_data(groups, "[ResourceGroup]")
return serializable
|
Fix build break (renamed function)
|
Fix build break (renamed function)
|
Python
|
mit
|
BurtBiel/azure-cli,samedder/azure-cli,yugangw-msft/azure-cli,yugangw-msft/azure-cli,yugangw-msft/azure-cli,samedder/azure-cli,QingChenmsft/azure-cli,samedder/azure-cli,yugangw-msft/azure-cli,QingChenmsft/azure-cli,BurtBiel/azure-cli,QingChenmsft/azure-cli,yugangw-msft/azure-cli,yugangw-msft/azure-cli,QingChenmsft/azure-cli,samedder/azure-cli
|
from msrest import Serializer
from ..commands import command, description
from ._command_creation import get_service_client
@command('resource group list')
@description('List resource groups')
# TODO: waiting on Python Azure SDK bug fixes
# @option('--tag-name -g <tagName>', L('the resource group's tag name'))
# @option('--tag-value -g <tagValue>', L('the resource group's tag value'))
# @option('--top -g <number>', L('Top N resource groups to retrieve'))
def list_groups(args, unexpected): #pylint: disable=unused-argument
from azure.mgmt.resource.resources import ResourceManagementClient, \
ResourceManagementClientConfiguration
from azure.mgmt.resource.resources.models import ResourceGroup, ResourceGroupFilter
rmc = get_service_client(ResourceManagementClient, ResourceManagementClientConfiguration)
# TODO: waiting on Python Azure SDK bug fixes
#group_filter = ResourceGroupFilter(args.get('tag-name'), args.get('tag-value'))
#groups = rmc.resource_groups.list(filter=None, top=args.get('top'))
groups = rmc.resource_groups.list()
serializable = Serializer().serialize_data(groups, "[ResourceGroup]")
return serializable
Fix build break (renamed function)
|
from msrest import Serializer
from ..commands import command, description
from ._command_creation import get_mgmt_service_client
@command('resource group list')
@description('List resource groups')
# TODO: waiting on Python Azure SDK bug fixes
# @option('--tag-name -g <tagName>', L('the resource group's tag name'))
# @option('--tag-value -g <tagValue>', L('the resource group's tag value'))
# @option('--top -g <number>', L('Top N resource groups to retrieve'))
def list_groups(args, unexpected): #pylint: disable=unused-argument
from azure.mgmt.resource.resources import ResourceManagementClient, \
ResourceManagementClientConfiguration
from azure.mgmt.resource.resources.models import ResourceGroup, ResourceGroupFilter
rmc = get_mgmt_service_client(ResourceManagementClient, ResourceManagementClientConfiguration)
# TODO: waiting on Python Azure SDK bug fixes
#group_filter = ResourceGroupFilter(args.get('tag-name'), args.get('tag-value'))
#groups = rmc.resource_groups.list(filter=None, top=args.get('top'))
groups = rmc.resource_groups.list()
serializable = Serializer().serialize_data(groups, "[ResourceGroup]")
return serializable
|
<commit_before>from msrest import Serializer
from ..commands import command, description
from ._command_creation import get_service_client
@command('resource group list')
@description('List resource groups')
# TODO: waiting on Python Azure SDK bug fixes
# @option('--tag-name -g <tagName>', L('the resource group's tag name'))
# @option('--tag-value -g <tagValue>', L('the resource group's tag value'))
# @option('--top -g <number>', L('Top N resource groups to retrieve'))
def list_groups(args, unexpected): #pylint: disable=unused-argument
from azure.mgmt.resource.resources import ResourceManagementClient, \
ResourceManagementClientConfiguration
from azure.mgmt.resource.resources.models import ResourceGroup, ResourceGroupFilter
rmc = get_service_client(ResourceManagementClient, ResourceManagementClientConfiguration)
# TODO: waiting on Python Azure SDK bug fixes
#group_filter = ResourceGroupFilter(args.get('tag-name'), args.get('tag-value'))
#groups = rmc.resource_groups.list(filter=None, top=args.get('top'))
groups = rmc.resource_groups.list()
serializable = Serializer().serialize_data(groups, "[ResourceGroup]")
return serializable
<commit_msg>Fix build break (renamed function)<commit_after>
|
from msrest import Serializer
from ..commands import command, description
from ._command_creation import get_mgmt_service_client
@command('resource group list')
@description('List resource groups')
# TODO: waiting on Python Azure SDK bug fixes
# @option('--tag-name -g <tagName>', L('the resource group's tag name'))
# @option('--tag-value -g <tagValue>', L('the resource group's tag value'))
# @option('--top -g <number>', L('Top N resource groups to retrieve'))
def list_groups(args, unexpected): #pylint: disable=unused-argument
from azure.mgmt.resource.resources import ResourceManagementClient, \
ResourceManagementClientConfiguration
from azure.mgmt.resource.resources.models import ResourceGroup, ResourceGroupFilter
rmc = get_mgmt_service_client(ResourceManagementClient, ResourceManagementClientConfiguration)
# TODO: waiting on Python Azure SDK bug fixes
#group_filter = ResourceGroupFilter(args.get('tag-name'), args.get('tag-value'))
#groups = rmc.resource_groups.list(filter=None, top=args.get('top'))
groups = rmc.resource_groups.list()
serializable = Serializer().serialize_data(groups, "[ResourceGroup]")
return serializable
|
from msrest import Serializer
from ..commands import command, description
from ._command_creation import get_service_client
@command('resource group list')
@description('List resource groups')
# TODO: waiting on Python Azure SDK bug fixes
# @option('--tag-name -g <tagName>', L('the resource group's tag name'))
# @option('--tag-value -g <tagValue>', L('the resource group's tag value'))
# @option('--top -g <number>', L('Top N resource groups to retrieve'))
def list_groups(args, unexpected): #pylint: disable=unused-argument
from azure.mgmt.resource.resources import ResourceManagementClient, \
ResourceManagementClientConfiguration
from azure.mgmt.resource.resources.models import ResourceGroup, ResourceGroupFilter
rmc = get_service_client(ResourceManagementClient, ResourceManagementClientConfiguration)
# TODO: waiting on Python Azure SDK bug fixes
#group_filter = ResourceGroupFilter(args.get('tag-name'), args.get('tag-value'))
#groups = rmc.resource_groups.list(filter=None, top=args.get('top'))
groups = rmc.resource_groups.list()
serializable = Serializer().serialize_data(groups, "[ResourceGroup]")
return serializable
Fix build break (renamed function)from msrest import Serializer
from ..commands import command, description
from ._command_creation import get_mgmt_service_client
@command('resource group list')
@description('List resource groups')
# TODO: waiting on Python Azure SDK bug fixes
# @option('--tag-name -g <tagName>', L('the resource group's tag name'))
# @option('--tag-value -g <tagValue>', L('the resource group's tag value'))
# @option('--top -g <number>', L('Top N resource groups to retrieve'))
def list_groups(args, unexpected): #pylint: disable=unused-argument
from azure.mgmt.resource.resources import ResourceManagementClient, \
ResourceManagementClientConfiguration
from azure.mgmt.resource.resources.models import ResourceGroup, ResourceGroupFilter
rmc = get_mgmt_service_client(ResourceManagementClient, ResourceManagementClientConfiguration)
# TODO: waiting on Python Azure SDK bug fixes
#group_filter = ResourceGroupFilter(args.get('tag-name'), args.get('tag-value'))
#groups = rmc.resource_groups.list(filter=None, top=args.get('top'))
groups = rmc.resource_groups.list()
serializable = Serializer().serialize_data(groups, "[ResourceGroup]")
return serializable
|
<commit_before>from msrest import Serializer
from ..commands import command, description
from ._command_creation import get_service_client
@command('resource group list')
@description('List resource groups')
# TODO: waiting on Python Azure SDK bug fixes
# @option('--tag-name -g <tagName>', L('the resource group's tag name'))
# @option('--tag-value -g <tagValue>', L('the resource group's tag value'))
# @option('--top -g <number>', L('Top N resource groups to retrieve'))
def list_groups(args, unexpected): #pylint: disable=unused-argument
from azure.mgmt.resource.resources import ResourceManagementClient, \
ResourceManagementClientConfiguration
from azure.mgmt.resource.resources.models import ResourceGroup, ResourceGroupFilter
rmc = get_service_client(ResourceManagementClient, ResourceManagementClientConfiguration)
# TODO: waiting on Python Azure SDK bug fixes
#group_filter = ResourceGroupFilter(args.get('tag-name'), args.get('tag-value'))
#groups = rmc.resource_groups.list(filter=None, top=args.get('top'))
groups = rmc.resource_groups.list()
serializable = Serializer().serialize_data(groups, "[ResourceGroup]")
return serializable
<commit_msg>Fix build break (renamed function)<commit_after>from msrest import Serializer
from ..commands import command, description
from ._command_creation import get_mgmt_service_client
@command('resource group list')
@description('List resource groups')
# TODO: waiting on Python Azure SDK bug fixes
# @option('--tag-name -g <tagName>', L('the resource group's tag name'))
# @option('--tag-value -g <tagValue>', L('the resource group's tag value'))
# @option('--top -g <number>', L('Top N resource groups to retrieve'))
def list_groups(args, unexpected): #pylint: disable=unused-argument
from azure.mgmt.resource.resources import ResourceManagementClient, \
ResourceManagementClientConfiguration
from azure.mgmt.resource.resources.models import ResourceGroup, ResourceGroupFilter
rmc = get_mgmt_service_client(ResourceManagementClient, ResourceManagementClientConfiguration)
# TODO: waiting on Python Azure SDK bug fixes
#group_filter = ResourceGroupFilter(args.get('tag-name'), args.get('tag-value'))
#groups = rmc.resource_groups.list(filter=None, top=args.get('top'))
groups = rmc.resource_groups.list()
serializable = Serializer().serialize_data(groups, "[ResourceGroup]")
return serializable
|
f456f4e44a95ff36516395f41bb3a35afa33a90e
|
examples/pub_sub_more_on_filter.py
|
examples/pub_sub_more_on_filter.py
|
import time
from osbrain import run_agent
from osbrain import run_nameserver
def log_a(agent, message):
agent.log_info('Log a: %s' % message)
def log_b(agent, message):
agent.log_info('Log b: %s' % message)
def send_messages(agent):
agent.send('main', 'Apple', topic='a')
agent.send('main', 'Banana', topic='b')
if __name__ == '__main__':
# System deployment
ns = run_nameserver()
alice = run_agent('Alice')
bob = run_agent('Bob')
# System configuration
addr = alice.bind('PUB', alias='main')
alice.each(0.5, send_messages)
bob.connect(addr, alias='listener', handler={'a': log_a})
time.sleep(2)
bob.unsubscribe('listener', 'a')
bob.subscribe('listener', handlers={'b': log_b})
time.sleep(2)
ns.shutdown()
|
import time
from osbrain import run_agent
from osbrain import run_nameserver
def log_a(agent, message):
agent.log_info('Log a: %s' % message)
def log_b(agent, message):
agent.log_info('Log b: %s' % message)
def send_messages(agent):
agent.send('main', 'Apple', topic='a')
agent.send('main', 'Banana', topic='b')
if __name__ == '__main__':
# System deployment
ns = run_nameserver()
alice = run_agent('Alice')
bob = run_agent('Bob')
# System configuration
addr = alice.bind('PUB', alias='main')
alice.each(0.5, send_messages)
bob.connect(addr, alias='listener', handler={'a': log_a})
time.sleep(2)
bob.unsubscribe('listener', 'a')
bob.subscribe('listener', handler={'b': log_b})
time.sleep(2)
ns.shutdown()
|
Fix example with wrong `handler` parameter name
|
Fix example with wrong `handler` parameter name
|
Python
|
apache-2.0
|
opensistemas-hub/osbrain
|
import time
from osbrain import run_agent
from osbrain import run_nameserver
def log_a(agent, message):
agent.log_info('Log a: %s' % message)
def log_b(agent, message):
agent.log_info('Log b: %s' % message)
def send_messages(agent):
agent.send('main', 'Apple', topic='a')
agent.send('main', 'Banana', topic='b')
if __name__ == '__main__':
# System deployment
ns = run_nameserver()
alice = run_agent('Alice')
bob = run_agent('Bob')
# System configuration
addr = alice.bind('PUB', alias='main')
alice.each(0.5, send_messages)
bob.connect(addr, alias='listener', handler={'a': log_a})
time.sleep(2)
bob.unsubscribe('listener', 'a')
bob.subscribe('listener', handlers={'b': log_b})
time.sleep(2)
ns.shutdown()
Fix example with wrong `handler` parameter name
|
import time
from osbrain import run_agent
from osbrain import run_nameserver
def log_a(agent, message):
agent.log_info('Log a: %s' % message)
def log_b(agent, message):
agent.log_info('Log b: %s' % message)
def send_messages(agent):
agent.send('main', 'Apple', topic='a')
agent.send('main', 'Banana', topic='b')
if __name__ == '__main__':
# System deployment
ns = run_nameserver()
alice = run_agent('Alice')
bob = run_agent('Bob')
# System configuration
addr = alice.bind('PUB', alias='main')
alice.each(0.5, send_messages)
bob.connect(addr, alias='listener', handler={'a': log_a})
time.sleep(2)
bob.unsubscribe('listener', 'a')
bob.subscribe('listener', handler={'b': log_b})
time.sleep(2)
ns.shutdown()
|
<commit_before>import time
from osbrain import run_agent
from osbrain import run_nameserver
def log_a(agent, message):
agent.log_info('Log a: %s' % message)
def log_b(agent, message):
agent.log_info('Log b: %s' % message)
def send_messages(agent):
agent.send('main', 'Apple', topic='a')
agent.send('main', 'Banana', topic='b')
if __name__ == '__main__':
# System deployment
ns = run_nameserver()
alice = run_agent('Alice')
bob = run_agent('Bob')
# System configuration
addr = alice.bind('PUB', alias='main')
alice.each(0.5, send_messages)
bob.connect(addr, alias='listener', handler={'a': log_a})
time.sleep(2)
bob.unsubscribe('listener', 'a')
bob.subscribe('listener', handlers={'b': log_b})
time.sleep(2)
ns.shutdown()
<commit_msg>Fix example with wrong `handler` parameter name<commit_after>
|
import time
from osbrain import run_agent
from osbrain import run_nameserver
def log_a(agent, message):
agent.log_info('Log a: %s' % message)
def log_b(agent, message):
agent.log_info('Log b: %s' % message)
def send_messages(agent):
agent.send('main', 'Apple', topic='a')
agent.send('main', 'Banana', topic='b')
if __name__ == '__main__':
# System deployment
ns = run_nameserver()
alice = run_agent('Alice')
bob = run_agent('Bob')
# System configuration
addr = alice.bind('PUB', alias='main')
alice.each(0.5, send_messages)
bob.connect(addr, alias='listener', handler={'a': log_a})
time.sleep(2)
bob.unsubscribe('listener', 'a')
bob.subscribe('listener', handler={'b': log_b})
time.sleep(2)
ns.shutdown()
|
import time
from osbrain import run_agent
from osbrain import run_nameserver
def log_a(agent, message):
agent.log_info('Log a: %s' % message)
def log_b(agent, message):
agent.log_info('Log b: %s' % message)
def send_messages(agent):
agent.send('main', 'Apple', topic='a')
agent.send('main', 'Banana', topic='b')
if __name__ == '__main__':
# System deployment
ns = run_nameserver()
alice = run_agent('Alice')
bob = run_agent('Bob')
# System configuration
addr = alice.bind('PUB', alias='main')
alice.each(0.5, send_messages)
bob.connect(addr, alias='listener', handler={'a': log_a})
time.sleep(2)
bob.unsubscribe('listener', 'a')
bob.subscribe('listener', handlers={'b': log_b})
time.sleep(2)
ns.shutdown()
Fix example with wrong `handler` parameter nameimport time
from osbrain import run_agent
from osbrain import run_nameserver
def log_a(agent, message):
agent.log_info('Log a: %s' % message)
def log_b(agent, message):
agent.log_info('Log b: %s' % message)
def send_messages(agent):
agent.send('main', 'Apple', topic='a')
agent.send('main', 'Banana', topic='b')
if __name__ == '__main__':
# System deployment
ns = run_nameserver()
alice = run_agent('Alice')
bob = run_agent('Bob')
# System configuration
addr = alice.bind('PUB', alias='main')
alice.each(0.5, send_messages)
bob.connect(addr, alias='listener', handler={'a': log_a})
time.sleep(2)
bob.unsubscribe('listener', 'a')
bob.subscribe('listener', handler={'b': log_b})
time.sleep(2)
ns.shutdown()
|
<commit_before>import time
from osbrain import run_agent
from osbrain import run_nameserver
def log_a(agent, message):
agent.log_info('Log a: %s' % message)
def log_b(agent, message):
agent.log_info('Log b: %s' % message)
def send_messages(agent):
agent.send('main', 'Apple', topic='a')
agent.send('main', 'Banana', topic='b')
if __name__ == '__main__':
# System deployment
ns = run_nameserver()
alice = run_agent('Alice')
bob = run_agent('Bob')
# System configuration
addr = alice.bind('PUB', alias='main')
alice.each(0.5, send_messages)
bob.connect(addr, alias='listener', handler={'a': log_a})
time.sleep(2)
bob.unsubscribe('listener', 'a')
bob.subscribe('listener', handlers={'b': log_b})
time.sleep(2)
ns.shutdown()
<commit_msg>Fix example with wrong `handler` parameter name<commit_after>import time
from osbrain import run_agent
from osbrain import run_nameserver
def log_a(agent, message):
agent.log_info('Log a: %s' % message)
def log_b(agent, message):
agent.log_info('Log b: %s' % message)
def send_messages(agent):
agent.send('main', 'Apple', topic='a')
agent.send('main', 'Banana', topic='b')
if __name__ == '__main__':
# System deployment
ns = run_nameserver()
alice = run_agent('Alice')
bob = run_agent('Bob')
# System configuration
addr = alice.bind('PUB', alias='main')
alice.each(0.5, send_messages)
bob.connect(addr, alias='listener', handler={'a': log_a})
time.sleep(2)
bob.unsubscribe('listener', 'a')
bob.subscribe('listener', handler={'b': log_b})
time.sleep(2)
ns.shutdown()
|
dbce975bcb348e0f878f39557d911e99ba08294c
|
corehq/apps/hqcase/management/commands/ptop_reindexer_v2.py
|
corehq/apps/hqcase/management/commands/ptop_reindexer_v2.py
|
from django.core.management import BaseCommand, CommandError
from corehq.pillows.case import get_couch_case_reindexer, get_sql_case_reindexer
from corehq.pillows.xform import get_couch_form_reindexer, get_sql_form_reindexer
class Command(BaseCommand):
args = 'index'
help = 'Reindex a pillowtop index'
def handle(self, index, *args, **options):
reindex_fns = {
'case': get_couch_case_reindexer,
'form': get_couch_form_reindexer,
'sql-case': get_sql_case_reindexer,
'sql-form': get_sql_form_reindexer,
}
if index not in reindex_fns:
raise CommandError('Supported indices to reindex are: {}'.format(','.join(reindex_fns.keys())))
reindexer = reindex_fns[index]()
reindexer.reindex()
|
from django.core.management import BaseCommand, CommandError
from corehq.pillows.case import get_couch_case_reindexer, get_sql_case_reindexer
from corehq.pillows.case_search import get_couch_case_search_reindexer
from corehq.pillows.xform import get_couch_form_reindexer, get_sql_form_reindexer
class Command(BaseCommand):
args = 'index'
help = 'Reindex a pillowtop index'
def handle(self, index, *args, **options):
reindex_fns = {
'case': get_couch_case_reindexer,
'form': get_couch_form_reindexer,
'sql-case': get_sql_case_reindexer,
'sql-form': get_sql_form_reindexer,
'case-search': get_couch_case_search_reindexer
}
if index not in reindex_fns:
raise CommandError('Supported indices to reindex are: {}'.format(','.join(reindex_fns.keys())))
reindexer = reindex_fns[index]()
reindexer.reindex()
|
Enable reindexing with v2 reindexer
|
Enable reindexing with v2 reindexer
|
Python
|
bsd-3-clause
|
dimagi/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq
|
from django.core.management import BaseCommand, CommandError
from corehq.pillows.case import get_couch_case_reindexer, get_sql_case_reindexer
from corehq.pillows.xform import get_couch_form_reindexer, get_sql_form_reindexer
class Command(BaseCommand):
args = 'index'
help = 'Reindex a pillowtop index'
def handle(self, index, *args, **options):
reindex_fns = {
'case': get_couch_case_reindexer,
'form': get_couch_form_reindexer,
'sql-case': get_sql_case_reindexer,
'sql-form': get_sql_form_reindexer,
}
if index not in reindex_fns:
raise CommandError('Supported indices to reindex are: {}'.format(','.join(reindex_fns.keys())))
reindexer = reindex_fns[index]()
reindexer.reindex()
Enable reindexing with v2 reindexer
|
from django.core.management import BaseCommand, CommandError
from corehq.pillows.case import get_couch_case_reindexer, get_sql_case_reindexer
from corehq.pillows.case_search import get_couch_case_search_reindexer
from corehq.pillows.xform import get_couch_form_reindexer, get_sql_form_reindexer
class Command(BaseCommand):
args = 'index'
help = 'Reindex a pillowtop index'
def handle(self, index, *args, **options):
reindex_fns = {
'case': get_couch_case_reindexer,
'form': get_couch_form_reindexer,
'sql-case': get_sql_case_reindexer,
'sql-form': get_sql_form_reindexer,
'case-search': get_couch_case_search_reindexer
}
if index not in reindex_fns:
raise CommandError('Supported indices to reindex are: {}'.format(','.join(reindex_fns.keys())))
reindexer = reindex_fns[index]()
reindexer.reindex()
|
<commit_before>from django.core.management import BaseCommand, CommandError
from corehq.pillows.case import get_couch_case_reindexer, get_sql_case_reindexer
from corehq.pillows.xform import get_couch_form_reindexer, get_sql_form_reindexer
class Command(BaseCommand):
args = 'index'
help = 'Reindex a pillowtop index'
def handle(self, index, *args, **options):
reindex_fns = {
'case': get_couch_case_reindexer,
'form': get_couch_form_reindexer,
'sql-case': get_sql_case_reindexer,
'sql-form': get_sql_form_reindexer,
}
if index not in reindex_fns:
raise CommandError('Supported indices to reindex are: {}'.format(','.join(reindex_fns.keys())))
reindexer = reindex_fns[index]()
reindexer.reindex()
<commit_msg>Enable reindexing with v2 reindexer<commit_after>
|
from django.core.management import BaseCommand, CommandError
from corehq.pillows.case import get_couch_case_reindexer, get_sql_case_reindexer
from corehq.pillows.case_search import get_couch_case_search_reindexer
from corehq.pillows.xform import get_couch_form_reindexer, get_sql_form_reindexer
class Command(BaseCommand):
args = 'index'
help = 'Reindex a pillowtop index'
def handle(self, index, *args, **options):
reindex_fns = {
'case': get_couch_case_reindexer,
'form': get_couch_form_reindexer,
'sql-case': get_sql_case_reindexer,
'sql-form': get_sql_form_reindexer,
'case-search': get_couch_case_search_reindexer
}
if index not in reindex_fns:
raise CommandError('Supported indices to reindex are: {}'.format(','.join(reindex_fns.keys())))
reindexer = reindex_fns[index]()
reindexer.reindex()
|
from django.core.management import BaseCommand, CommandError
from corehq.pillows.case import get_couch_case_reindexer, get_sql_case_reindexer
from corehq.pillows.xform import get_couch_form_reindexer, get_sql_form_reindexer
class Command(BaseCommand):
args = 'index'
help = 'Reindex a pillowtop index'
def handle(self, index, *args, **options):
reindex_fns = {
'case': get_couch_case_reindexer,
'form': get_couch_form_reindexer,
'sql-case': get_sql_case_reindexer,
'sql-form': get_sql_form_reindexer,
}
if index not in reindex_fns:
raise CommandError('Supported indices to reindex are: {}'.format(','.join(reindex_fns.keys())))
reindexer = reindex_fns[index]()
reindexer.reindex()
Enable reindexing with v2 reindexerfrom django.core.management import BaseCommand, CommandError
from corehq.pillows.case import get_couch_case_reindexer, get_sql_case_reindexer
from corehq.pillows.case_search import get_couch_case_search_reindexer
from corehq.pillows.xform import get_couch_form_reindexer, get_sql_form_reindexer
class Command(BaseCommand):
args = 'index'
help = 'Reindex a pillowtop index'
def handle(self, index, *args, **options):
reindex_fns = {
'case': get_couch_case_reindexer,
'form': get_couch_form_reindexer,
'sql-case': get_sql_case_reindexer,
'sql-form': get_sql_form_reindexer,
'case-search': get_couch_case_search_reindexer
}
if index not in reindex_fns:
raise CommandError('Supported indices to reindex are: {}'.format(','.join(reindex_fns.keys())))
reindexer = reindex_fns[index]()
reindexer.reindex()
|
<commit_before>from django.core.management import BaseCommand, CommandError
from corehq.pillows.case import get_couch_case_reindexer, get_sql_case_reindexer
from corehq.pillows.xform import get_couch_form_reindexer, get_sql_form_reindexer
class Command(BaseCommand):
args = 'index'
help = 'Reindex a pillowtop index'
def handle(self, index, *args, **options):
reindex_fns = {
'case': get_couch_case_reindexer,
'form': get_couch_form_reindexer,
'sql-case': get_sql_case_reindexer,
'sql-form': get_sql_form_reindexer,
}
if index not in reindex_fns:
raise CommandError('Supported indices to reindex are: {}'.format(','.join(reindex_fns.keys())))
reindexer = reindex_fns[index]()
reindexer.reindex()
<commit_msg>Enable reindexing with v2 reindexer<commit_after>from django.core.management import BaseCommand, CommandError
from corehq.pillows.case import get_couch_case_reindexer, get_sql_case_reindexer
from corehq.pillows.case_search import get_couch_case_search_reindexer
from corehq.pillows.xform import get_couch_form_reindexer, get_sql_form_reindexer
class Command(BaseCommand):
args = 'index'
help = 'Reindex a pillowtop index'
def handle(self, index, *args, **options):
reindex_fns = {
'case': get_couch_case_reindexer,
'form': get_couch_form_reindexer,
'sql-case': get_sql_case_reindexer,
'sql-form': get_sql_form_reindexer,
'case-search': get_couch_case_search_reindexer
}
if index not in reindex_fns:
raise CommandError('Supported indices to reindex are: {}'.format(','.join(reindex_fns.keys())))
reindexer = reindex_fns[index]()
reindexer.reindex()
|
cc5028b58736ca7e06083d733d2e0a16a7ec8696
|
src/vrun/cli.py
|
src/vrun/cli.py
|
from __future__ import print_function
import os
import sys
def main():
prefix = sys.prefix
binpath = os.path.join(prefix, 'bin')
PATH = os.environ.get('PATH', [])
PATH = binpath + os.pathsep + PATH
os.putenv('PATH', PATH)
os.putenv('VRUN_ACTIVATED', '1')
os.putenv('VIRTUAL_ENV', sys.prefix)
newargv = sys.argv[1:]
if not newargv:
print('vrun requires the program to execute as an argument.', file=sys.stderr)
print('Example: ./venv/bin/vrun /bin/bash', file=sys.stderr)
sys.exit(-1)
execbin = newargv[0]
if os.sep not in execbin:
execbin = os.path.join(binpath, execbin)
if not os.path.exists(execbin):
print('vrun requires that the target executable exists.', file=sys.stderr)
print('Unable to find: {}'.format(execbin), file=sys.stderr)
sys.exit(-1)
try:
# Execute the actual executable...
os.execv(execbin, newargv)
except Exception as e:
print('vrun was unable to execute the target executable.', file=sys.stderr)
print('Executable: {}'.format(execbin), file=sys.stderr)
print('Exception as follows: {}'.format(e), file=sys.stderr)
|
from __future__ import print_function
import os
import sys
def main():
prefix = sys.prefix
binpath = os.path.join(prefix, 'bin')
PATH = os.environ.get('PATH', '')
if PATH:
PATH = binpath + os.pathsep + PATH
else:
PATH = binpath
os.putenv('PATH', PATH)
os.putenv('VRUN_ACTIVATED', '1')
os.putenv('VIRTUAL_ENV', sys.prefix)
newargv = sys.argv[1:]
if not newargv:
print('vrun requires the program to execute as an argument.', file=sys.stderr)
print('Example: ./venv/bin/vrun /bin/bash', file=sys.stderr)
sys.exit(-1)
execbin = newargv[0]
if os.sep not in execbin:
execbin = os.path.join(binpath, execbin)
if not os.path.exists(execbin):
print('vrun requires that the target executable exists.', file=sys.stderr)
print('Unable to find: {}'.format(execbin), file=sys.stderr)
sys.exit(-1)
try:
# Execute the actual executable...
os.execv(execbin, newargv)
except Exception as e:
print('vrun was unable to execute the target executable.', file=sys.stderr)
print('Executable: {}'.format(execbin), file=sys.stderr)
print('Exception as follows: {}'.format(e), file=sys.stderr)
|
Add protection against empty PATH
|
Add protection against empty PATH
|
Python
|
isc
|
bertjwregeer/vrun
|
from __future__ import print_function
import os
import sys
def main():
prefix = sys.prefix
binpath = os.path.join(prefix, 'bin')
PATH = os.environ.get('PATH', [])
PATH = binpath + os.pathsep + PATH
os.putenv('PATH', PATH)
os.putenv('VRUN_ACTIVATED', '1')
os.putenv('VIRTUAL_ENV', sys.prefix)
newargv = sys.argv[1:]
if not newargv:
print('vrun requires the program to execute as an argument.', file=sys.stderr)
print('Example: ./venv/bin/vrun /bin/bash', file=sys.stderr)
sys.exit(-1)
execbin = newargv[0]
if os.sep not in execbin:
execbin = os.path.join(binpath, execbin)
if not os.path.exists(execbin):
print('vrun requires that the target executable exists.', file=sys.stderr)
print('Unable to find: {}'.format(execbin), file=sys.stderr)
sys.exit(-1)
try:
# Execute the actual executable...
os.execv(execbin, newargv)
except Exception as e:
print('vrun was unable to execute the target executable.', file=sys.stderr)
print('Executable: {}'.format(execbin), file=sys.stderr)
print('Exception as follows: {}'.format(e), file=sys.stderr)
Add protection against empty PATH
|
from __future__ import print_function
import os
import sys
def main():
prefix = sys.prefix
binpath = os.path.join(prefix, 'bin')
PATH = os.environ.get('PATH', '')
if PATH:
PATH = binpath + os.pathsep + PATH
else:
PATH = binpath
os.putenv('PATH', PATH)
os.putenv('VRUN_ACTIVATED', '1')
os.putenv('VIRTUAL_ENV', sys.prefix)
newargv = sys.argv[1:]
if not newargv:
print('vrun requires the program to execute as an argument.', file=sys.stderr)
print('Example: ./venv/bin/vrun /bin/bash', file=sys.stderr)
sys.exit(-1)
execbin = newargv[0]
if os.sep not in execbin:
execbin = os.path.join(binpath, execbin)
if not os.path.exists(execbin):
print('vrun requires that the target executable exists.', file=sys.stderr)
print('Unable to find: {}'.format(execbin), file=sys.stderr)
sys.exit(-1)
try:
# Execute the actual executable...
os.execv(execbin, newargv)
except Exception as e:
print('vrun was unable to execute the target executable.', file=sys.stderr)
print('Executable: {}'.format(execbin), file=sys.stderr)
print('Exception as follows: {}'.format(e), file=sys.stderr)
|
<commit_before>from __future__ import print_function
import os
import sys
def main():
prefix = sys.prefix
binpath = os.path.join(prefix, 'bin')
PATH = os.environ.get('PATH', [])
PATH = binpath + os.pathsep + PATH
os.putenv('PATH', PATH)
os.putenv('VRUN_ACTIVATED', '1')
os.putenv('VIRTUAL_ENV', sys.prefix)
newargv = sys.argv[1:]
if not newargv:
print('vrun requires the program to execute as an argument.', file=sys.stderr)
print('Example: ./venv/bin/vrun /bin/bash', file=sys.stderr)
sys.exit(-1)
execbin = newargv[0]
if os.sep not in execbin:
execbin = os.path.join(binpath, execbin)
if not os.path.exists(execbin):
print('vrun requires that the target executable exists.', file=sys.stderr)
print('Unable to find: {}'.format(execbin), file=sys.stderr)
sys.exit(-1)
try:
# Execute the actual executable...
os.execv(execbin, newargv)
except Exception as e:
print('vrun was unable to execute the target executable.', file=sys.stderr)
print('Executable: {}'.format(execbin), file=sys.stderr)
print('Exception as follows: {}'.format(e), file=sys.stderr)
<commit_msg>Add protection against empty PATH<commit_after>
|
from __future__ import print_function
import os
import sys
def main():
prefix = sys.prefix
binpath = os.path.join(prefix, 'bin')
PATH = os.environ.get('PATH', '')
if PATH:
PATH = binpath + os.pathsep + PATH
else:
PATH = binpath
os.putenv('PATH', PATH)
os.putenv('VRUN_ACTIVATED', '1')
os.putenv('VIRTUAL_ENV', sys.prefix)
newargv = sys.argv[1:]
if not newargv:
print('vrun requires the program to execute as an argument.', file=sys.stderr)
print('Example: ./venv/bin/vrun /bin/bash', file=sys.stderr)
sys.exit(-1)
execbin = newargv[0]
if os.sep not in execbin:
execbin = os.path.join(binpath, execbin)
if not os.path.exists(execbin):
print('vrun requires that the target executable exists.', file=sys.stderr)
print('Unable to find: {}'.format(execbin), file=sys.stderr)
sys.exit(-1)
try:
# Execute the actual executable...
os.execv(execbin, newargv)
except Exception as e:
print('vrun was unable to execute the target executable.', file=sys.stderr)
print('Executable: {}'.format(execbin), file=sys.stderr)
print('Exception as follows: {}'.format(e), file=sys.stderr)
|
from __future__ import print_function
import os
import sys
def main():
prefix = sys.prefix
binpath = os.path.join(prefix, 'bin')
PATH = os.environ.get('PATH', [])
PATH = binpath + os.pathsep + PATH
os.putenv('PATH', PATH)
os.putenv('VRUN_ACTIVATED', '1')
os.putenv('VIRTUAL_ENV', sys.prefix)
newargv = sys.argv[1:]
if not newargv:
print('vrun requires the program to execute as an argument.', file=sys.stderr)
print('Example: ./venv/bin/vrun /bin/bash', file=sys.stderr)
sys.exit(-1)
execbin = newargv[0]
if os.sep not in execbin:
execbin = os.path.join(binpath, execbin)
if not os.path.exists(execbin):
print('vrun requires that the target executable exists.', file=sys.stderr)
print('Unable to find: {}'.format(execbin), file=sys.stderr)
sys.exit(-1)
try:
# Execute the actual executable...
os.execv(execbin, newargv)
except Exception as e:
print('vrun was unable to execute the target executable.', file=sys.stderr)
print('Executable: {}'.format(execbin), file=sys.stderr)
print('Exception as follows: {}'.format(e), file=sys.stderr)
Add protection against empty PATHfrom __future__ import print_function
import os
import sys
def main():
prefix = sys.prefix
binpath = os.path.join(prefix, 'bin')
PATH = os.environ.get('PATH', '')
if PATH:
PATH = binpath + os.pathsep + PATH
else:
PATH = binpath
os.putenv('PATH', PATH)
os.putenv('VRUN_ACTIVATED', '1')
os.putenv('VIRTUAL_ENV', sys.prefix)
newargv = sys.argv[1:]
if not newargv:
print('vrun requires the program to execute as an argument.', file=sys.stderr)
print('Example: ./venv/bin/vrun /bin/bash', file=sys.stderr)
sys.exit(-1)
execbin = newargv[0]
if os.sep not in execbin:
execbin = os.path.join(binpath, execbin)
if not os.path.exists(execbin):
print('vrun requires that the target executable exists.', file=sys.stderr)
print('Unable to find: {}'.format(execbin), file=sys.stderr)
sys.exit(-1)
try:
# Execute the actual executable...
os.execv(execbin, newargv)
except Exception as e:
print('vrun was unable to execute the target executable.', file=sys.stderr)
print('Executable: {}'.format(execbin), file=sys.stderr)
print('Exception as follows: {}'.format(e), file=sys.stderr)
|
<commit_before>from __future__ import print_function
import os
import sys
def main():
prefix = sys.prefix
binpath = os.path.join(prefix, 'bin')
PATH = os.environ.get('PATH', [])
PATH = binpath + os.pathsep + PATH
os.putenv('PATH', PATH)
os.putenv('VRUN_ACTIVATED', '1')
os.putenv('VIRTUAL_ENV', sys.prefix)
newargv = sys.argv[1:]
if not newargv:
print('vrun requires the program to execute as an argument.', file=sys.stderr)
print('Example: ./venv/bin/vrun /bin/bash', file=sys.stderr)
sys.exit(-1)
execbin = newargv[0]
if os.sep not in execbin:
execbin = os.path.join(binpath, execbin)
if not os.path.exists(execbin):
print('vrun requires that the target executable exists.', file=sys.stderr)
print('Unable to find: {}'.format(execbin), file=sys.stderr)
sys.exit(-1)
try:
# Execute the actual executable...
os.execv(execbin, newargv)
except Exception as e:
print('vrun was unable to execute the target executable.', file=sys.stderr)
print('Executable: {}'.format(execbin), file=sys.stderr)
print('Exception as follows: {}'.format(e), file=sys.stderr)
<commit_msg>Add protection against empty PATH<commit_after>from __future__ import print_function
import os
import sys
def main():
prefix = sys.prefix
binpath = os.path.join(prefix, 'bin')
PATH = os.environ.get('PATH', '')
if PATH:
PATH = binpath + os.pathsep + PATH
else:
PATH = binpath
os.putenv('PATH', PATH)
os.putenv('VRUN_ACTIVATED', '1')
os.putenv('VIRTUAL_ENV', sys.prefix)
newargv = sys.argv[1:]
if not newargv:
print('vrun requires the program to execute as an argument.', file=sys.stderr)
print('Example: ./venv/bin/vrun /bin/bash', file=sys.stderr)
sys.exit(-1)
execbin = newargv[0]
if os.sep not in execbin:
execbin = os.path.join(binpath, execbin)
if not os.path.exists(execbin):
print('vrun requires that the target executable exists.', file=sys.stderr)
print('Unable to find: {}'.format(execbin), file=sys.stderr)
sys.exit(-1)
try:
# Execute the actual executable...
os.execv(execbin, newargv)
except Exception as e:
print('vrun was unable to execute the target executable.', file=sys.stderr)
print('Executable: {}'.format(execbin), file=sys.stderr)
print('Exception as follows: {}'.format(e), file=sys.stderr)
|
064b4c80018d9b76c2bedc010ab45c8b9ea7faa3
|
netlib/utils.py
|
netlib/utils.py
|
def cleanBin(s, fixspacing=False):
"""
Cleans binary data to make it safe to display. If fixspacing is True,
tabs, newlines and so forth will be maintained, if not, they will be
replaced with a placeholder.
"""
parts = []
for i in s:
o = ord(i)
if (o > 31 and o < 127):
parts.append(i)
elif i in "\n\r\t" and not fixspacing:
parts.append(i)
else:
parts.append(".")
return "".join(parts)
def hexdump(s):
"""
Returns a set of tuples:
(offset, hex, str)
"""
parts = []
for i in range(0, len(s), 16):
o = "%.10x"%i
part = s[i:i+16]
x = " ".join("%.2x"%ord(i) for i in part)
if len(part) < 16:
x += " "
x += " ".join(" " for i in range(16 - len(part)))
parts.append(
(o, x, cleanBin(part, True))
)
return parts
|
def cleanBin(s, fixspacing=False):
"""
Cleans binary data to make it safe to display. If fixspacing is True,
tabs, newlines and so forth will be maintained, if not, they will be
replaced with a placeholder.
"""
parts = []
for i in s:
o = ord(i)
if (o > 31 and o < 127):
parts.append(i)
elif i in "\n\t" and not fixspacing:
parts.append(i)
else:
parts.append(".")
return "".join(parts)
def hexdump(s):
"""
Returns a set of tuples:
(offset, hex, str)
"""
parts = []
for i in range(0, len(s), 16):
o = "%.10x"%i
part = s[i:i+16]
x = " ".join("%.2x"%ord(i) for i in part)
if len(part) < 16:
x += " "
x += " ".join(" " for i in range(16 - len(part)))
parts.append(
(o, x, cleanBin(part, True))
)
return parts
|
Make cleanBin escape carriage returns.
|
Make cleanBin escape carriage returns.
We get confusing output on terminals if we leave \r unescaped.
|
Python
|
mit
|
Kriechi/mitmproxy,cortesi/mitmproxy,dwfreed/mitmproxy,ParthGanatra/mitmproxy,dwfreed/mitmproxy,fimad/mitmproxy,cortesi/mitmproxy,cortesi/mitmproxy,laurmurclar/mitmproxy,MatthewShao/mitmproxy,Kriechi/mitmproxy,xaxa89/mitmproxy,xaxa89/mitmproxy,mhils/mitmproxy,ddworken/mitmproxy,dwfreed/mitmproxy,tdickers/mitmproxy,laurmurclar/mitmproxy,Kriechi/mitmproxy,mosajjal/mitmproxy,mhils/mitmproxy,mosajjal/mitmproxy,ujjwal96/mitmproxy,ddworken/mitmproxy,tdickers/mitmproxy,gzzhanghao/mitmproxy,jvillacorta/mitmproxy,mitmproxy/mitmproxy,ParthGanatra/mitmproxy,mhils/mitmproxy,ddworken/mitmproxy,laurmurclar/mitmproxy,cortesi/mitmproxy,ujjwal96/mitmproxy,jvillacorta/mitmproxy,xaxa89/mitmproxy,mhils/mitmproxy,ujjwal96/mitmproxy,pombredanne/netlib,pombredanne/netlib,mitmproxy/netlib,Kriechi/mitmproxy,zlorb/mitmproxy,tdickers/mitmproxy,dufferzafar/mitmproxy,gzzhanghao/mitmproxy,ikoz/mitmproxy,ikoz/mitmproxy,ikoz/mitmproxy,akihikodaki/netlib,vhaupert/mitmproxy,dufferzafar/mitmproxy,mitmproxy/mitmproxy,MatthewShao/mitmproxy,mitmproxy/mitmproxy,ParthGanatra/mitmproxy,mhils/mitmproxy,ikoz/mitmproxy,Kriechi/netlib,Kriechi/netlib,mitmproxy/mitmproxy,gzzhanghao/mitmproxy,mitmproxy/mitmproxy,vhaupert/mitmproxy,akihikodaki/netlib,ddworken/mitmproxy,mosajjal/mitmproxy,StevenVanAcker/mitmproxy,laurmurclar/mitmproxy,gzzhanghao/mitmproxy,ParthGanatra/mitmproxy,zlorb/mitmproxy,MatthewShao/mitmproxy,ujjwal96/mitmproxy,dufferzafar/mitmproxy,xaxa89/mitmproxy,fimad/mitmproxy,jvillacorta/mitmproxy,MatthewShao/mitmproxy,vhaupert/mitmproxy,StevenVanAcker/mitmproxy,zlorb/mitmproxy,StevenVanAcker/mitmproxy,dufferzafar/mitmproxy,vhaupert/mitmproxy,jvillacorta/mitmproxy,StevenVanAcker/mitmproxy,fimad/mitmproxy,zlorb/mitmproxy,mosajjal/mitmproxy,fimad/mitmproxy,tdickers/mitmproxy,dwfreed/mitmproxy
|
def cleanBin(s, fixspacing=False):
"""
Cleans binary data to make it safe to display. If fixspacing is True,
tabs, newlines and so forth will be maintained, if not, they will be
replaced with a placeholder.
"""
parts = []
for i in s:
o = ord(i)
if (o > 31 and o < 127):
parts.append(i)
elif i in "\n\r\t" and not fixspacing:
parts.append(i)
else:
parts.append(".")
return "".join(parts)
def hexdump(s):
"""
Returns a set of tuples:
(offset, hex, str)
"""
parts = []
for i in range(0, len(s), 16):
o = "%.10x"%i
part = s[i:i+16]
x = " ".join("%.2x"%ord(i) for i in part)
if len(part) < 16:
x += " "
x += " ".join(" " for i in range(16 - len(part)))
parts.append(
(o, x, cleanBin(part, True))
)
return parts
Make cleanBin escape carriage returns.
We get confusing output on terminals if we leave \r unescaped.
|
def cleanBin(s, fixspacing=False):
"""
Cleans binary data to make it safe to display. If fixspacing is True,
tabs, newlines and so forth will be maintained, if not, they will be
replaced with a placeholder.
"""
parts = []
for i in s:
o = ord(i)
if (o > 31 and o < 127):
parts.append(i)
elif i in "\n\t" and not fixspacing:
parts.append(i)
else:
parts.append(".")
return "".join(parts)
def hexdump(s):
"""
Returns a set of tuples:
(offset, hex, str)
"""
parts = []
for i in range(0, len(s), 16):
o = "%.10x"%i
part = s[i:i+16]
x = " ".join("%.2x"%ord(i) for i in part)
if len(part) < 16:
x += " "
x += " ".join(" " for i in range(16 - len(part)))
parts.append(
(o, x, cleanBin(part, True))
)
return parts
|
<commit_before>
def cleanBin(s, fixspacing=False):
"""
Cleans binary data to make it safe to display. If fixspacing is True,
tabs, newlines and so forth will be maintained, if not, they will be
replaced with a placeholder.
"""
parts = []
for i in s:
o = ord(i)
if (o > 31 and o < 127):
parts.append(i)
elif i in "\n\r\t" and not fixspacing:
parts.append(i)
else:
parts.append(".")
return "".join(parts)
def hexdump(s):
"""
Returns a set of tuples:
(offset, hex, str)
"""
parts = []
for i in range(0, len(s), 16):
o = "%.10x"%i
part = s[i:i+16]
x = " ".join("%.2x"%ord(i) for i in part)
if len(part) < 16:
x += " "
x += " ".join(" " for i in range(16 - len(part)))
parts.append(
(o, x, cleanBin(part, True))
)
return parts
<commit_msg>Make cleanBin escape carriage returns.
We get confusing output on terminals if we leave \r unescaped.<commit_after>
|
def cleanBin(s, fixspacing=False):
"""
Cleans binary data to make it safe to display. If fixspacing is True,
tabs, newlines and so forth will be maintained, if not, they will be
replaced with a placeholder.
"""
parts = []
for i in s:
o = ord(i)
if (o > 31 and o < 127):
parts.append(i)
elif i in "\n\t" and not fixspacing:
parts.append(i)
else:
parts.append(".")
return "".join(parts)
def hexdump(s):
"""
Returns a set of tuples:
(offset, hex, str)
"""
parts = []
for i in range(0, len(s), 16):
o = "%.10x"%i
part = s[i:i+16]
x = " ".join("%.2x"%ord(i) for i in part)
if len(part) < 16:
x += " "
x += " ".join(" " for i in range(16 - len(part)))
parts.append(
(o, x, cleanBin(part, True))
)
return parts
|
def cleanBin(s, fixspacing=False):
"""
Cleans binary data to make it safe to display. If fixspacing is True,
tabs, newlines and so forth will be maintained, if not, they will be
replaced with a placeholder.
"""
parts = []
for i in s:
o = ord(i)
if (o > 31 and o < 127):
parts.append(i)
elif i in "\n\r\t" and not fixspacing:
parts.append(i)
else:
parts.append(".")
return "".join(parts)
def hexdump(s):
"""
Returns a set of tuples:
(offset, hex, str)
"""
parts = []
for i in range(0, len(s), 16):
o = "%.10x"%i
part = s[i:i+16]
x = " ".join("%.2x"%ord(i) for i in part)
if len(part) < 16:
x += " "
x += " ".join(" " for i in range(16 - len(part)))
parts.append(
(o, x, cleanBin(part, True))
)
return parts
Make cleanBin escape carriage returns.
We get confusing output on terminals if we leave \r unescaped.
def cleanBin(s, fixspacing=False):
"""
Cleans binary data to make it safe to display. If fixspacing is True,
tabs, newlines and so forth will be maintained, if not, they will be
replaced with a placeholder.
"""
parts = []
for i in s:
o = ord(i)
if (o > 31 and o < 127):
parts.append(i)
elif i in "\n\t" and not fixspacing:
parts.append(i)
else:
parts.append(".")
return "".join(parts)
def hexdump(s):
"""
Returns a set of tuples:
(offset, hex, str)
"""
parts = []
for i in range(0, len(s), 16):
o = "%.10x"%i
part = s[i:i+16]
x = " ".join("%.2x"%ord(i) for i in part)
if len(part) < 16:
x += " "
x += " ".join(" " for i in range(16 - len(part)))
parts.append(
(o, x, cleanBin(part, True))
)
return parts
|
<commit_before>
def cleanBin(s, fixspacing=False):
"""
Cleans binary data to make it safe to display. If fixspacing is True,
tabs, newlines and so forth will be maintained, if not, they will be
replaced with a placeholder.
"""
parts = []
for i in s:
o = ord(i)
if (o > 31 and o < 127):
parts.append(i)
elif i in "\n\r\t" and not fixspacing:
parts.append(i)
else:
parts.append(".")
return "".join(parts)
def hexdump(s):
"""
Returns a set of tuples:
(offset, hex, str)
"""
parts = []
for i in range(0, len(s), 16):
o = "%.10x"%i
part = s[i:i+16]
x = " ".join("%.2x"%ord(i) for i in part)
if len(part) < 16:
x += " "
x += " ".join(" " for i in range(16 - len(part)))
parts.append(
(o, x, cleanBin(part, True))
)
return parts
<commit_msg>Make cleanBin escape carriage returns.
We get confusing output on terminals if we leave \r unescaped.<commit_after>
def cleanBin(s, fixspacing=False):
"""
Cleans binary data to make it safe to display. If fixspacing is True,
tabs, newlines and so forth will be maintained, if not, they will be
replaced with a placeholder.
"""
parts = []
for i in s:
o = ord(i)
if (o > 31 and o < 127):
parts.append(i)
elif i in "\n\t" and not fixspacing:
parts.append(i)
else:
parts.append(".")
return "".join(parts)
def hexdump(s):
"""
Returns a set of tuples:
(offset, hex, str)
"""
parts = []
for i in range(0, len(s), 16):
o = "%.10x"%i
part = s[i:i+16]
x = " ".join("%.2x"%ord(i) for i in part)
if len(part) < 16:
x += " "
x += " ".join(" " for i in range(16 - len(part)))
parts.append(
(o, x, cleanBin(part, True))
)
return parts
|
28ee80f7f544d06daa9cab51e082fd74cc6e4619
|
dotfiledir/bin/runmutt.py
|
dotfiledir/bin/runmutt.py
|
#!/usr/bin/env python3
import subprocess
import threading
import time
import os
# Sync accounts asynchronously, but wait for all syncs to finish
def offlineimap():
AAU = subprocess.Popen(['offlineimap', '-a AAU'], stderr = AAUlog)
AU = subprocess.Popen(['offlineimap', '-a AU'], stderr = AUlog)
AAU.communicate()
AU.communicate()
# Sync every wait_time seconds, and when mutt closes
def autosync():
while not mutt_has_closed:
offlineimap()
for i in range(wait_time):
if not mutt_has_closed:
time.sleep(1)
else:
offlineimap()
break
wait_time = 300 # Seconds to wait between syncs
mutt_has_closed = False
imap_thread = threading.Thread(target=autosync)
# Open log files, start autosync, start mutt. When Mutt closes, wait for autosync to finish.
with open(os.path.expanduser('~/.config/offlineimap/AAU.log'),'w') as AAUlog, open(os.path.expanduser('~/.config/offlineimap/AU.log'),'w') as AUlog:
imap_thread.start()
subprocess.call('mutt')
mutt_has_closed = True
print('Synchronizing mailboxes. This may take a while.')
imap_thread.join()
|
#!/usr/bin/env python3
import subprocess
import threading
import time
import os
# Sync accounts asynchronously, but wait for all syncs to finish
def offlineimap():
AAU = subprocess.Popen(['offlineimap', '-a AAU'], stderr = AAUlog)
AU = subprocess.Popen(['offlineimap', '-a AU'], stderr = AUlog)
AAU.communicate()
AU.communicate()
# Sync every wait_time seconds, and when mutt closes
def autosync():
while not mutt_has_closed:
offlineimap()
for i in range(wait_time):
if not mutt_has_closed:
time.sleep(1)
else:
offlineimap()
break
wait_time = 60 # Seconds to wait between syncs
mutt_has_closed = False
imap_thread = threading.Thread(target=autosync)
# Open log files, start autosync, start mutt. When Mutt closes, wait for autosync to finish.
with open(os.path.expanduser('~/.config/offlineimap/AAU.log'),'w') as AAUlog, open(os.path.expanduser('~/.config/offlineimap/AU.log'),'w') as AUlog:
imap_thread.start()
subprocess.call('mutt')
mutt_has_closed = True
print('Synchronizing mailboxes. This may take a while.')
imap_thread.join()
|
Make offlineimap sync every minute
|
Make offlineimap sync every minute
|
Python
|
mit
|
dahlbaek/Ubuntu-dotfiles
|
#!/usr/bin/env python3
import subprocess
import threading
import time
import os
# Sync accounts asynchronously, but wait for all syncs to finish
def offlineimap():
AAU = subprocess.Popen(['offlineimap', '-a AAU'], stderr = AAUlog)
AU = subprocess.Popen(['offlineimap', '-a AU'], stderr = AUlog)
AAU.communicate()
AU.communicate()
# Sync every wait_time seconds, and when mutt closes
def autosync():
while not mutt_has_closed:
offlineimap()
for i in range(wait_time):
if not mutt_has_closed:
time.sleep(1)
else:
offlineimap()
break
wait_time = 300 # Seconds to wait between syncs
mutt_has_closed = False
imap_thread = threading.Thread(target=autosync)
# Open log files, start autosync, start mutt. When Mutt closes, wait for autosync to finish.
with open(os.path.expanduser('~/.config/offlineimap/AAU.log'),'w') as AAUlog, open(os.path.expanduser('~/.config/offlineimap/AU.log'),'w') as AUlog:
imap_thread.start()
subprocess.call('mutt')
mutt_has_closed = True
print('Synchronizing mailboxes. This may take a while.')
imap_thread.join()
Make offlineimap sync every minute
|
#!/usr/bin/env python3
import subprocess
import threading
import time
import os
# Sync accounts asynchronously, but wait for all syncs to finish
def offlineimap():
AAU = subprocess.Popen(['offlineimap', '-a AAU'], stderr = AAUlog)
AU = subprocess.Popen(['offlineimap', '-a AU'], stderr = AUlog)
AAU.communicate()
AU.communicate()
# Sync every wait_time seconds, and when mutt closes
def autosync():
while not mutt_has_closed:
offlineimap()
for i in range(wait_time):
if not mutt_has_closed:
time.sleep(1)
else:
offlineimap()
break
wait_time = 60 # Seconds to wait between syncs
mutt_has_closed = False
imap_thread = threading.Thread(target=autosync)
# Open log files, start autosync, start mutt. When Mutt closes, wait for autosync to finish.
with open(os.path.expanduser('~/.config/offlineimap/AAU.log'),'w') as AAUlog, open(os.path.expanduser('~/.config/offlineimap/AU.log'),'w') as AUlog:
imap_thread.start()
subprocess.call('mutt')
mutt_has_closed = True
print('Synchronizing mailboxes. This may take a while.')
imap_thread.join()
|
<commit_before>#!/usr/bin/env python3
import subprocess
import threading
import time
import os
# Sync accounts asynchronously, but wait for all syncs to finish
def offlineimap():
AAU = subprocess.Popen(['offlineimap', '-a AAU'], stderr = AAUlog)
AU = subprocess.Popen(['offlineimap', '-a AU'], stderr = AUlog)
AAU.communicate()
AU.communicate()
# Sync every wait_time seconds, and when mutt closes
def autosync():
while not mutt_has_closed:
offlineimap()
for i in range(wait_time):
if not mutt_has_closed:
time.sleep(1)
else:
offlineimap()
break
wait_time = 300 # Seconds to wait between syncs
mutt_has_closed = False
imap_thread = threading.Thread(target=autosync)
# Open log files, start autosync, start mutt. When Mutt closes, wait for autosync to finish.
with open(os.path.expanduser('~/.config/offlineimap/AAU.log'),'w') as AAUlog, open(os.path.expanduser('~/.config/offlineimap/AU.log'),'w') as AUlog:
imap_thread.start()
subprocess.call('mutt')
mutt_has_closed = True
print('Synchronizing mailboxes. This may take a while.')
imap_thread.join()
<commit_msg>Make offlineimap sync every minute<commit_after>
|
#!/usr/bin/env python3
import subprocess
import threading
import time
import os
# Sync accounts asynchronously, but wait for all syncs to finish
def offlineimap():
AAU = subprocess.Popen(['offlineimap', '-a AAU'], stderr = AAUlog)
AU = subprocess.Popen(['offlineimap', '-a AU'], stderr = AUlog)
AAU.communicate()
AU.communicate()
# Sync every wait_time seconds, and when mutt closes
def autosync():
while not mutt_has_closed:
offlineimap()
for i in range(wait_time):
if not mutt_has_closed:
time.sleep(1)
else:
offlineimap()
break
wait_time = 60 # Seconds to wait between syncs
mutt_has_closed = False
imap_thread = threading.Thread(target=autosync)
# Open log files, start autosync, start mutt. When Mutt closes, wait for autosync to finish.
with open(os.path.expanduser('~/.config/offlineimap/AAU.log'),'w') as AAUlog, open(os.path.expanduser('~/.config/offlineimap/AU.log'),'w') as AUlog:
imap_thread.start()
subprocess.call('mutt')
mutt_has_closed = True
print('Synchronizing mailboxes. This may take a while.')
imap_thread.join()
|
#!/usr/bin/env python3
import subprocess
import threading
import time
import os
# Sync accounts asynchronously, but wait for all syncs to finish
def offlineimap():
AAU = subprocess.Popen(['offlineimap', '-a AAU'], stderr = AAUlog)
AU = subprocess.Popen(['offlineimap', '-a AU'], stderr = AUlog)
AAU.communicate()
AU.communicate()
# Sync every wait_time seconds, and when mutt closes
def autosync():
while not mutt_has_closed:
offlineimap()
for i in range(wait_time):
if not mutt_has_closed:
time.sleep(1)
else:
offlineimap()
break
wait_time = 300 # Seconds to wait between syncs
mutt_has_closed = False
imap_thread = threading.Thread(target=autosync)
# Open log files, start autosync, start mutt. When Mutt closes, wait for autosync to finish.
with open(os.path.expanduser('~/.config/offlineimap/AAU.log'),'w') as AAUlog, open(os.path.expanduser('~/.config/offlineimap/AU.log'),'w') as AUlog:
imap_thread.start()
subprocess.call('mutt')
mutt_has_closed = True
print('Synchronizing mailboxes. This may take a while.')
imap_thread.join()
Make offlineimap sync every minute#!/usr/bin/env python3
import subprocess
import threading
import time
import os
# Sync accounts asynchronously, but wait for all syncs to finish
def offlineimap():
AAU = subprocess.Popen(['offlineimap', '-a AAU'], stderr = AAUlog)
AU = subprocess.Popen(['offlineimap', '-a AU'], stderr = AUlog)
AAU.communicate()
AU.communicate()
# Sync every wait_time seconds, and when mutt closes
def autosync():
while not mutt_has_closed:
offlineimap()
for i in range(wait_time):
if not mutt_has_closed:
time.sleep(1)
else:
offlineimap()
break
wait_time = 60 # Seconds to wait between syncs
mutt_has_closed = False
imap_thread = threading.Thread(target=autosync)
# Open log files, start autosync, start mutt. When Mutt closes, wait for autosync to finish.
with open(os.path.expanduser('~/.config/offlineimap/AAU.log'),'w') as AAUlog, open(os.path.expanduser('~/.config/offlineimap/AU.log'),'w') as AUlog:
imap_thread.start()
subprocess.call('mutt')
mutt_has_closed = True
print('Synchronizing mailboxes. This may take a while.')
imap_thread.join()
|
<commit_before>#!/usr/bin/env python3
import subprocess
import threading
import time
import os
# Sync accounts asynchronously, but wait for all syncs to finish
def offlineimap():
AAU = subprocess.Popen(['offlineimap', '-a AAU'], stderr = AAUlog)
AU = subprocess.Popen(['offlineimap', '-a AU'], stderr = AUlog)
AAU.communicate()
AU.communicate()
# Sync every wait_time seconds, and when mutt closes
def autosync():
while not mutt_has_closed:
offlineimap()
for i in range(wait_time):
if not mutt_has_closed:
time.sleep(1)
else:
offlineimap()
break
wait_time = 300 # Seconds to wait between syncs
mutt_has_closed = False
imap_thread = threading.Thread(target=autosync)
# Open log files, start autosync, start mutt. When Mutt closes, wait for autosync to finish.
with open(os.path.expanduser('~/.config/offlineimap/AAU.log'),'w') as AAUlog, open(os.path.expanduser('~/.config/offlineimap/AU.log'),'w') as AUlog:
imap_thread.start()
subprocess.call('mutt')
mutt_has_closed = True
print('Synchronizing mailboxes. This may take a while.')
imap_thread.join()
<commit_msg>Make offlineimap sync every minute<commit_after>#!/usr/bin/env python3
import subprocess
import threading
import time
import os
# Sync accounts asynchronously, but wait for all syncs to finish
def offlineimap():
AAU = subprocess.Popen(['offlineimap', '-a AAU'], stderr = AAUlog)
AU = subprocess.Popen(['offlineimap', '-a AU'], stderr = AUlog)
AAU.communicate()
AU.communicate()
# Sync every wait_time seconds, and when mutt closes
def autosync():
while not mutt_has_closed:
offlineimap()
for i in range(wait_time):
if not mutt_has_closed:
time.sleep(1)
else:
offlineimap()
break
wait_time = 60 # Seconds to wait between syncs
mutt_has_closed = False
imap_thread = threading.Thread(target=autosync)
# Open log files, start autosync, start mutt. When Mutt closes, wait for autosync to finish.
with open(os.path.expanduser('~/.config/offlineimap/AAU.log'),'w') as AAUlog, open(os.path.expanduser('~/.config/offlineimap/AU.log'),'w') as AUlog:
imap_thread.start()
subprocess.call('mutt')
mutt_has_closed = True
print('Synchronizing mailboxes. This may take a while.')
imap_thread.join()
|
0dfea440014b4e1701fd42a20c45f4d8992c00bb
|
misp_modules/modules/import_mod/stiximport.py
|
misp_modules/modules/import_mod/stiximport.py
|
import json
import re
import base64
import hashlib
import tempfile
import os
from pymisp.tools import stix
misperrors = {'error': 'Error'}
userConfig = {}
inputSource = ['file']
moduleinfo = {'version': '0.2', 'author': 'Hannah Ward',
'description': 'Import some stix stuff',
'module-type': ['import']}
moduleconfig = []
def handler(q=False):
# Just in case we have no data
if q is False:
return False
# The return value
r = {'results': []}
# Load up that JSON
q = json.loads(q)
# It's b64 encoded, so decode that stuff
package = base64.b64decode(q.get("data")).decode('utf-8')
# If something really weird happened
if not package:
return json.dumps({"success": 0})
tfile = tempfile.NamedTemporaryFile(mode="w", prefix="STIX", delete=False)
tfile.write(package)
tfile.close()
pkg = stix.load_stix(tfile.name)
for attrib in pkg.attributes:
r["results"].append({ "values" : [attrib.value] , "types": [attrib.type], "categories": [attrib.category]})
os.unlink(tfile.name)
return r
def introspection():
modulesetup = {}
try:
userConfig
modulesetup['userConfig'] = userConfig
except NameError:
pass
try:
inputSource
modulesetup['inputSource'] = inputSource
except NameError:
pass
return modulesetup
def version():
moduleinfo['config'] = moduleconfig
return moduleinfo
|
import json
import re
import base64
import hashlib
from pymisp.tools import stix
misperrors = {'error': 'Error'}
userConfig = {}
inputSource = ['file']
moduleinfo = {'version': '0.2', 'author': 'Hannah Ward',
'description': 'Import some stix stuff',
'module-type': ['import']}
moduleconfig = []
def handler(q=False):
# Just in case we have no data
if q is False:
return False
# The return value
r = {'results': []}
# Load up that JSON
q = json.loads(q)
# It's b64 encoded, so decode that stuff
package = base64.b64decode(q.get("data")).decode('utf-8')
# If something really weird happened
if not package:
return json.dumps({"success": 0})
pkg = stix.load_stix(package)
for attrib in pkg.attributes:
r["results"].append({ "values" : [attrib.value] , "types": [attrib.type], "categories": [attrib.category]})
return r
def introspection():
modulesetup = {}
try:
userConfig
modulesetup['userConfig'] = userConfig
except NameError:
pass
try:
inputSource
modulesetup['inputSource'] = inputSource
except NameError:
pass
return modulesetup
def version():
moduleinfo['config'] = moduleconfig
return moduleinfo
|
Use SpooledTemp, not NamedTemp file
|
Use SpooledTemp, not NamedTemp file
|
Python
|
agpl-3.0
|
VirusTotal/misp-modules,MISP/misp-modules,amuehlem/misp-modules,MISP/misp-modules,VirusTotal/misp-modules,Rafiot/misp-modules,amuehlem/misp-modules,MISP/misp-modules,Rafiot/misp-modules,Rafiot/misp-modules,VirusTotal/misp-modules,amuehlem/misp-modules
|
import json
import re
import base64
import hashlib
import tempfile
import os
from pymisp.tools import stix
misperrors = {'error': 'Error'}
userConfig = {}
inputSource = ['file']
moduleinfo = {'version': '0.2', 'author': 'Hannah Ward',
'description': 'Import some stix stuff',
'module-type': ['import']}
moduleconfig = []
def handler(q=False):
# Just in case we have no data
if q is False:
return False
# The return value
r = {'results': []}
# Load up that JSON
q = json.loads(q)
# It's b64 encoded, so decode that stuff
package = base64.b64decode(q.get("data")).decode('utf-8')
# If something really weird happened
if not package:
return json.dumps({"success": 0})
tfile = tempfile.NamedTemporaryFile(mode="w", prefix="STIX", delete=False)
tfile.write(package)
tfile.close()
pkg = stix.load_stix(tfile.name)
for attrib in pkg.attributes:
r["results"].append({ "values" : [attrib.value] , "types": [attrib.type], "categories": [attrib.category]})
os.unlink(tfile.name)
return r
def introspection():
modulesetup = {}
try:
userConfig
modulesetup['userConfig'] = userConfig
except NameError:
pass
try:
inputSource
modulesetup['inputSource'] = inputSource
except NameError:
pass
return modulesetup
def version():
moduleinfo['config'] = moduleconfig
return moduleinfo
Use SpooledTemp, not NamedTemp file
|
import json
import re
import base64
import hashlib
from pymisp.tools import stix
misperrors = {'error': 'Error'}
userConfig = {}
inputSource = ['file']
moduleinfo = {'version': '0.2', 'author': 'Hannah Ward',
'description': 'Import some stix stuff',
'module-type': ['import']}
moduleconfig = []
def handler(q=False):
# Just in case we have no data
if q is False:
return False
# The return value
r = {'results': []}
# Load up that JSON
q = json.loads(q)
# It's b64 encoded, so decode that stuff
package = base64.b64decode(q.get("data")).decode('utf-8')
# If something really weird happened
if not package:
return json.dumps({"success": 0})
pkg = stix.load_stix(package)
for attrib in pkg.attributes:
r["results"].append({ "values" : [attrib.value] , "types": [attrib.type], "categories": [attrib.category]})
return r
def introspection():
modulesetup = {}
try:
userConfig
modulesetup['userConfig'] = userConfig
except NameError:
pass
try:
inputSource
modulesetup['inputSource'] = inputSource
except NameError:
pass
return modulesetup
def version():
moduleinfo['config'] = moduleconfig
return moduleinfo
|
<commit_before>import json
import re
import base64
import hashlib
import tempfile
import os
from pymisp.tools import stix
misperrors = {'error': 'Error'}
userConfig = {}
inputSource = ['file']
moduleinfo = {'version': '0.2', 'author': 'Hannah Ward',
'description': 'Import some stix stuff',
'module-type': ['import']}
moduleconfig = []
def handler(q=False):
# Just in case we have no data
if q is False:
return False
# The return value
r = {'results': []}
# Load up that JSON
q = json.loads(q)
# It's b64 encoded, so decode that stuff
package = base64.b64decode(q.get("data")).decode('utf-8')
# If something really weird happened
if not package:
return json.dumps({"success": 0})
tfile = tempfile.NamedTemporaryFile(mode="w", prefix="STIX", delete=False)
tfile.write(package)
tfile.close()
pkg = stix.load_stix(tfile.name)
for attrib in pkg.attributes:
r["results"].append({ "values" : [attrib.value] , "types": [attrib.type], "categories": [attrib.category]})
os.unlink(tfile.name)
return r
def introspection():
modulesetup = {}
try:
userConfig
modulesetup['userConfig'] = userConfig
except NameError:
pass
try:
inputSource
modulesetup['inputSource'] = inputSource
except NameError:
pass
return modulesetup
def version():
moduleinfo['config'] = moduleconfig
return moduleinfo
<commit_msg>Use SpooledTemp, not NamedTemp file<commit_after>
|
import json
import re
import base64
import hashlib
from pymisp.tools import stix
misperrors = {'error': 'Error'}
userConfig = {}
inputSource = ['file']
moduleinfo = {'version': '0.2', 'author': 'Hannah Ward',
'description': 'Import some stix stuff',
'module-type': ['import']}
moduleconfig = []
def handler(q=False):
# Just in case we have no data
if q is False:
return False
# The return value
r = {'results': []}
# Load up that JSON
q = json.loads(q)
# It's b64 encoded, so decode that stuff
package = base64.b64decode(q.get("data")).decode('utf-8')
# If something really weird happened
if not package:
return json.dumps({"success": 0})
pkg = stix.load_stix(package)
for attrib in pkg.attributes:
r["results"].append({ "values" : [attrib.value] , "types": [attrib.type], "categories": [attrib.category]})
return r
def introspection():
modulesetup = {}
try:
userConfig
modulesetup['userConfig'] = userConfig
except NameError:
pass
try:
inputSource
modulesetup['inputSource'] = inputSource
except NameError:
pass
return modulesetup
def version():
moduleinfo['config'] = moduleconfig
return moduleinfo
|
import json
import re
import base64
import hashlib
import tempfile
import os
from pymisp.tools import stix
misperrors = {'error': 'Error'}
userConfig = {}
inputSource = ['file']
moduleinfo = {'version': '0.2', 'author': 'Hannah Ward',
'description': 'Import some stix stuff',
'module-type': ['import']}
moduleconfig = []
def handler(q=False):
# Just in case we have no data
if q is False:
return False
# The return value
r = {'results': []}
# Load up that JSON
q = json.loads(q)
# It's b64 encoded, so decode that stuff
package = base64.b64decode(q.get("data")).decode('utf-8')
# If something really weird happened
if not package:
return json.dumps({"success": 0})
tfile = tempfile.NamedTemporaryFile(mode="w", prefix="STIX", delete=False)
tfile.write(package)
tfile.close()
pkg = stix.load_stix(tfile.name)
for attrib in pkg.attributes:
r["results"].append({ "values" : [attrib.value] , "types": [attrib.type], "categories": [attrib.category]})
os.unlink(tfile.name)
return r
def introspection():
modulesetup = {}
try:
userConfig
modulesetup['userConfig'] = userConfig
except NameError:
pass
try:
inputSource
modulesetup['inputSource'] = inputSource
except NameError:
pass
return modulesetup
def version():
moduleinfo['config'] = moduleconfig
return moduleinfo
Use SpooledTemp, not NamedTemp fileimport json
import re
import base64
import hashlib
from pymisp.tools import stix
misperrors = {'error': 'Error'}
userConfig = {}
inputSource = ['file']
moduleinfo = {'version': '0.2', 'author': 'Hannah Ward',
'description': 'Import some stix stuff',
'module-type': ['import']}
moduleconfig = []
def handler(q=False):
# Just in case we have no data
if q is False:
return False
# The return value
r = {'results': []}
# Load up that JSON
q = json.loads(q)
# It's b64 encoded, so decode that stuff
package = base64.b64decode(q.get("data")).decode('utf-8')
# If something really weird happened
if not package:
return json.dumps({"success": 0})
pkg = stix.load_stix(package)
for attrib in pkg.attributes:
r["results"].append({ "values" : [attrib.value] , "types": [attrib.type], "categories": [attrib.category]})
return r
def introspection():
modulesetup = {}
try:
userConfig
modulesetup['userConfig'] = userConfig
except NameError:
pass
try:
inputSource
modulesetup['inputSource'] = inputSource
except NameError:
pass
return modulesetup
def version():
moduleinfo['config'] = moduleconfig
return moduleinfo
|
<commit_before>import json
import re
import base64
import hashlib
import tempfile
import os
from pymisp.tools import stix
misperrors = {'error': 'Error'}
userConfig = {}
inputSource = ['file']
moduleinfo = {'version': '0.2', 'author': 'Hannah Ward',
'description': 'Import some stix stuff',
'module-type': ['import']}
moduleconfig = []
def handler(q=False):
# Just in case we have no data
if q is False:
return False
# The return value
r = {'results': []}
# Load up that JSON
q = json.loads(q)
# It's b64 encoded, so decode that stuff
package = base64.b64decode(q.get("data")).decode('utf-8')
# If something really weird happened
if not package:
return json.dumps({"success": 0})
tfile = tempfile.NamedTemporaryFile(mode="w", prefix="STIX", delete=False)
tfile.write(package)
tfile.close()
pkg = stix.load_stix(tfile.name)
for attrib in pkg.attributes:
r["results"].append({ "values" : [attrib.value] , "types": [attrib.type], "categories": [attrib.category]})
os.unlink(tfile.name)
return r
def introspection():
modulesetup = {}
try:
userConfig
modulesetup['userConfig'] = userConfig
except NameError:
pass
try:
inputSource
modulesetup['inputSource'] = inputSource
except NameError:
pass
return modulesetup
def version():
moduleinfo['config'] = moduleconfig
return moduleinfo
<commit_msg>Use SpooledTemp, not NamedTemp file<commit_after>import json
import re
import base64
import hashlib
from pymisp.tools import stix
misperrors = {'error': 'Error'}
userConfig = {}
inputSource = ['file']
moduleinfo = {'version': '0.2', 'author': 'Hannah Ward',
'description': 'Import some stix stuff',
'module-type': ['import']}
moduleconfig = []
def handler(q=False):
# Just in case we have no data
if q is False:
return False
# The return value
r = {'results': []}
# Load up that JSON
q = json.loads(q)
# It's b64 encoded, so decode that stuff
package = base64.b64decode(q.get("data")).decode('utf-8')
# If something really weird happened
if not package:
return json.dumps({"success": 0})
pkg = stix.load_stix(package)
for attrib in pkg.attributes:
r["results"].append({ "values" : [attrib.value] , "types": [attrib.type], "categories": [attrib.category]})
return r
def introspection():
modulesetup = {}
try:
userConfig
modulesetup['userConfig'] = userConfig
except NameError:
pass
try:
inputSource
modulesetup['inputSource'] = inputSource
except NameError:
pass
return modulesetup
def version():
moduleinfo['config'] = moduleconfig
return moduleinfo
|
153025aaa585e70d09509248ab18b214194759ae
|
tasks/static.py
|
tasks/static.py
|
# Copyright 2013 Donald Stufft
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os.path
import shutil
import invoke
@invoke.task
def build():
# Build our CSS files
invoke.run("compass compile -c compass.rb --force")
@invoke.task
def watch():
try:
# Watch With Compass
invoke.run("compass watch -c compass.rb")
except KeyboardInterrupt:
pass
|
# Copyright 2013 Donald Stufft
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os.path
import shutil
import invoke
@invoke.task
def build():
# Build our CSS files
invoke.run("compass compile -c config.rb --force")
@invoke.task
def watch():
try:
# Watch With Compass
invoke.run("compass watch -c config.rb")
except KeyboardInterrupt:
pass
|
Deal with the compass.rb -> config.rb change
|
Deal with the compass.rb -> config.rb change
|
Python
|
apache-2.0
|
techtonik/warehouse,techtonik/warehouse
|
# Copyright 2013 Donald Stufft
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os.path
import shutil
import invoke
@invoke.task
def build():
# Build our CSS files
invoke.run("compass compile -c compass.rb --force")
@invoke.task
def watch():
try:
# Watch With Compass
invoke.run("compass watch -c compass.rb")
except KeyboardInterrupt:
pass
Deal with the compass.rb -> config.rb change
|
# Copyright 2013 Donald Stufft
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os.path
import shutil
import invoke
@invoke.task
def build():
# Build our CSS files
invoke.run("compass compile -c config.rb --force")
@invoke.task
def watch():
try:
# Watch With Compass
invoke.run("compass watch -c config.rb")
except KeyboardInterrupt:
pass
|
<commit_before># Copyright 2013 Donald Stufft
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os.path
import shutil
import invoke
@invoke.task
def build():
# Build our CSS files
invoke.run("compass compile -c compass.rb --force")
@invoke.task
def watch():
try:
# Watch With Compass
invoke.run("compass watch -c compass.rb")
except KeyboardInterrupt:
pass
<commit_msg>Deal with the compass.rb -> config.rb change<commit_after>
|
# Copyright 2013 Donald Stufft
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os.path
import shutil
import invoke
@invoke.task
def build():
# Build our CSS files
invoke.run("compass compile -c config.rb --force")
@invoke.task
def watch():
try:
# Watch With Compass
invoke.run("compass watch -c config.rb")
except KeyboardInterrupt:
pass
|
# Copyright 2013 Donald Stufft
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os.path
import shutil
import invoke
@invoke.task
def build():
# Build our CSS files
invoke.run("compass compile -c compass.rb --force")
@invoke.task
def watch():
try:
# Watch With Compass
invoke.run("compass watch -c compass.rb")
except KeyboardInterrupt:
pass
Deal with the compass.rb -> config.rb change# Copyright 2013 Donald Stufft
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os.path
import shutil
import invoke
@invoke.task
def build():
# Build our CSS files
invoke.run("compass compile -c config.rb --force")
@invoke.task
def watch():
try:
# Watch With Compass
invoke.run("compass watch -c config.rb")
except KeyboardInterrupt:
pass
|
<commit_before># Copyright 2013 Donald Stufft
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os.path
import shutil
import invoke
@invoke.task
def build():
# Build our CSS files
invoke.run("compass compile -c compass.rb --force")
@invoke.task
def watch():
try:
# Watch With Compass
invoke.run("compass watch -c compass.rb")
except KeyboardInterrupt:
pass
<commit_msg>Deal with the compass.rb -> config.rb change<commit_after># Copyright 2013 Donald Stufft
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os.path
import shutil
import invoke
@invoke.task
def build():
# Build our CSS files
invoke.run("compass compile -c config.rb --force")
@invoke.task
def watch():
try:
# Watch With Compass
invoke.run("compass watch -c config.rb")
except KeyboardInterrupt:
pass
|
78f8ce5e046f2abaa8cef60c6980257713eff9d7
|
server.py
|
server.py
|
from gevent.server import StreamServer
import os
def mangodb(socket, address):
socket.sendall('HELLO\r\n')
client = socket.makefile()
output = open('/dev/null', 'w')
while True:
line = client.readline()
if not line:
break
cmd_bits = line.split(' ', 1)
cmd = cmd_bits[0]
if cmd == 'BYE':
break
if len(cmd_bits) > 1:
output.write(cmd_bits[1])
if os.environ.get('MANGODB_DURABLE', False):
output.flush()
os.fsync(output.fileno())
client.write('OK' + os.urandom(1024) + '\r\n')
client.flush()
if __name__ == '__main__':
server = StreamServer(('0.0.0.0', 27017), mangodb)
print ('Starting MangoDB on port 27017')
server.serve_forever()
|
from gevent.server import StreamServer
import os
def mangodb(socket, address):
socket.sendall('HELLO\r\n')
client = socket.makefile()
output = open('/dev/null', 'w')
while 1:
line = client.readline()
if not line:
break
cmd_bits = line.split(' ', 1)
cmd = cmd_bits[0]
if cmd == 'BYE':
break
if len(cmd_bits) > 1:
output.write(cmd_bits[1])
if os.environ.get('MANGODB_DURABLE', False):
output.flush()
os.fsync(output.fileno())
client.write('OK' + os.urandom(1024) + '\r\n')
client.flush()
if __name__ == '__main__':
server = StreamServer(('0.0.0.0', 27017), mangodb)
print ('Starting MangoDB on port 27017')
server.serve_forever()
|
Replace `while True` with `while 1` for more web scale sauce.
|
Replace `while True` with `while 1` for more web scale sauce.
In Python 2.x, True is not a keyword, so it can be reassigned. Compiler replaces `while 1` loop with a single jump, so it is faster by 10%.
http://www.reddit.com/r/Python/comments/ppote/how_would_you_explain_a_performance_gap_between/
|
Python
|
agpl-3.0
|
mitsuhiko/mangodb
|
from gevent.server import StreamServer
import os
def mangodb(socket, address):
socket.sendall('HELLO\r\n')
client = socket.makefile()
output = open('/dev/null', 'w')
while True:
line = client.readline()
if not line:
break
cmd_bits = line.split(' ', 1)
cmd = cmd_bits[0]
if cmd == 'BYE':
break
if len(cmd_bits) > 1:
output.write(cmd_bits[1])
if os.environ.get('MANGODB_DURABLE', False):
output.flush()
os.fsync(output.fileno())
client.write('OK' + os.urandom(1024) + '\r\n')
client.flush()
if __name__ == '__main__':
server = StreamServer(('0.0.0.0', 27017), mangodb)
print ('Starting MangoDB on port 27017')
server.serve_forever()
Replace `while True` with `while 1` for more web scale sauce.
In Python 2.x, True is not a keyword, so it can be reassigned. Compiler replaces `while 1` loop with a single jump, so it is faster by 10%.
http://www.reddit.com/r/Python/comments/ppote/how_would_you_explain_a_performance_gap_between/
|
from gevent.server import StreamServer
import os
def mangodb(socket, address):
socket.sendall('HELLO\r\n')
client = socket.makefile()
output = open('/dev/null', 'w')
while 1:
line = client.readline()
if not line:
break
cmd_bits = line.split(' ', 1)
cmd = cmd_bits[0]
if cmd == 'BYE':
break
if len(cmd_bits) > 1:
output.write(cmd_bits[1])
if os.environ.get('MANGODB_DURABLE', False):
output.flush()
os.fsync(output.fileno())
client.write('OK' + os.urandom(1024) + '\r\n')
client.flush()
if __name__ == '__main__':
server = StreamServer(('0.0.0.0', 27017), mangodb)
print ('Starting MangoDB on port 27017')
server.serve_forever()
|
<commit_before>from gevent.server import StreamServer
import os
def mangodb(socket, address):
socket.sendall('HELLO\r\n')
client = socket.makefile()
output = open('/dev/null', 'w')
while True:
line = client.readline()
if not line:
break
cmd_bits = line.split(' ', 1)
cmd = cmd_bits[0]
if cmd == 'BYE':
break
if len(cmd_bits) > 1:
output.write(cmd_bits[1])
if os.environ.get('MANGODB_DURABLE', False):
output.flush()
os.fsync(output.fileno())
client.write('OK' + os.urandom(1024) + '\r\n')
client.flush()
if __name__ == '__main__':
server = StreamServer(('0.0.0.0', 27017), mangodb)
print ('Starting MangoDB on port 27017')
server.serve_forever()
<commit_msg>Replace `while True` with `while 1` for more web scale sauce.
In Python 2.x, True is not a keyword, so it can be reassigned. Compiler replaces `while 1` loop with a single jump, so it is faster by 10%.
http://www.reddit.com/r/Python/comments/ppote/how_would_you_explain_a_performance_gap_between/<commit_after>
|
from gevent.server import StreamServer
import os
def mangodb(socket, address):
socket.sendall('HELLO\r\n')
client = socket.makefile()
output = open('/dev/null', 'w')
while 1:
line = client.readline()
if not line:
break
cmd_bits = line.split(' ', 1)
cmd = cmd_bits[0]
if cmd == 'BYE':
break
if len(cmd_bits) > 1:
output.write(cmd_bits[1])
if os.environ.get('MANGODB_DURABLE', False):
output.flush()
os.fsync(output.fileno())
client.write('OK' + os.urandom(1024) + '\r\n')
client.flush()
if __name__ == '__main__':
server = StreamServer(('0.0.0.0', 27017), mangodb)
print ('Starting MangoDB on port 27017')
server.serve_forever()
|
from gevent.server import StreamServer
import os
def mangodb(socket, address):
socket.sendall('HELLO\r\n')
client = socket.makefile()
output = open('/dev/null', 'w')
while True:
line = client.readline()
if not line:
break
cmd_bits = line.split(' ', 1)
cmd = cmd_bits[0]
if cmd == 'BYE':
break
if len(cmd_bits) > 1:
output.write(cmd_bits[1])
if os.environ.get('MANGODB_DURABLE', False):
output.flush()
os.fsync(output.fileno())
client.write('OK' + os.urandom(1024) + '\r\n')
client.flush()
if __name__ == '__main__':
server = StreamServer(('0.0.0.0', 27017), mangodb)
print ('Starting MangoDB on port 27017')
server.serve_forever()
Replace `while True` with `while 1` for more web scale sauce.
In Python 2.x, True is not a keyword, so it can be reassigned. Compiler replaces `while 1` loop with a single jump, so it is faster by 10%.
http://www.reddit.com/r/Python/comments/ppote/how_would_you_explain_a_performance_gap_between/from gevent.server import StreamServer
import os
def mangodb(socket, address):
socket.sendall('HELLO\r\n')
client = socket.makefile()
output = open('/dev/null', 'w')
while 1:
line = client.readline()
if not line:
break
cmd_bits = line.split(' ', 1)
cmd = cmd_bits[0]
if cmd == 'BYE':
break
if len(cmd_bits) > 1:
output.write(cmd_bits[1])
if os.environ.get('MANGODB_DURABLE', False):
output.flush()
os.fsync(output.fileno())
client.write('OK' + os.urandom(1024) + '\r\n')
client.flush()
if __name__ == '__main__':
server = StreamServer(('0.0.0.0', 27017), mangodb)
print ('Starting MangoDB on port 27017')
server.serve_forever()
|
<commit_before>from gevent.server import StreamServer
import os
def mangodb(socket, address):
socket.sendall('HELLO\r\n')
client = socket.makefile()
output = open('/dev/null', 'w')
while True:
line = client.readline()
if not line:
break
cmd_bits = line.split(' ', 1)
cmd = cmd_bits[0]
if cmd == 'BYE':
break
if len(cmd_bits) > 1:
output.write(cmd_bits[1])
if os.environ.get('MANGODB_DURABLE', False):
output.flush()
os.fsync(output.fileno())
client.write('OK' + os.urandom(1024) + '\r\n')
client.flush()
if __name__ == '__main__':
server = StreamServer(('0.0.0.0', 27017), mangodb)
print ('Starting MangoDB on port 27017')
server.serve_forever()
<commit_msg>Replace `while True` with `while 1` for more web scale sauce.
In Python 2.x, True is not a keyword, so it can be reassigned. Compiler replaces `while 1` loop with a single jump, so it is faster by 10%.
http://www.reddit.com/r/Python/comments/ppote/how_would_you_explain_a_performance_gap_between/<commit_after>from gevent.server import StreamServer
import os
def mangodb(socket, address):
socket.sendall('HELLO\r\n')
client = socket.makefile()
output = open('/dev/null', 'w')
while 1:
line = client.readline()
if not line:
break
cmd_bits = line.split(' ', 1)
cmd = cmd_bits[0]
if cmd == 'BYE':
break
if len(cmd_bits) > 1:
output.write(cmd_bits[1])
if os.environ.get('MANGODB_DURABLE', False):
output.flush()
os.fsync(output.fileno())
client.write('OK' + os.urandom(1024) + '\r\n')
client.flush()
if __name__ == '__main__':
server = StreamServer(('0.0.0.0', 27017), mangodb)
print ('Starting MangoDB on port 27017')
server.serve_forever()
|
f1ed7dd603ace84b1b8015c2d7d57515d9de3947
|
src/detector.py
|
src/detector.py
|
#!/usr/bin/python
from sys import argv
import numpy as np
import cv2
import cv2.cv as cv
def detectCircle(imagePath):
image = cv2.imread(imagePath)
gray = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
gray = cv2.Canny(gray, 32, 2)
cv2.imwrite("canny.jpg", gray)
circles = cv2.HoughCircles(gray, cv.CV_HOUGH_GRADIENT, 1, 50, 10, 50, 6, 10)
if circles is not None:
circles = np.uint16(np.around(circles))
gray = cv2.cvtColor(gray, cv2.COLOR_GRAY2BGR)
for i in circles[0,:]:
# draw the outer circle
cv2.circle(gray,(i[0],i[1]),i[2],(0,255,0),2)
# draw the center of the circle
cv2.circle(gray,(i[0],i[1]),2,(0,0,255),3)
cv2.imwrite('circled.jpg', gray)
return len(circles[0])
if __name__ == '__main__':
if len(argv) < 2:
exit(1)
print detectCircle(argv[1])
|
#!/usr/bin/python
from sys import argv
import numpy as np
import cv2
import cv2.cv as cv
def detectCircle(imagePath):
image = cv2.imread(imagePath)
gray = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
gray = cv2.Canny(gray, 32, 2)
cv2.imwrite("canny.jpg", gray)
circles = cv2.HoughCircles(gray, cv.CV_HOUGH_GRADIENT, 1, 10, np.array([]), 10, 20, 6, 20)
if circles is not None:
circles = np.uint16(np.around(circles))
gray = cv2.cvtColor(gray, cv2.COLOR_GRAY2BGR)
for i in circles[0,:]:
# draw the outer circle
cv2.circle(gray,(i[0],i[1]),i[2],(0,255,0),2)
# draw the center of the circle
cv2.circle(gray,(i[0],i[1]),2,(0,0,255),3)
cv2.imwrite('circled.jpg', gray)
return len(circles[0])
if __name__ == '__main__':
if len(argv) < 2:
exit(1)
print detectCircle(argv[1])
|
Fix error in RaspberryPi environment <numpy type error>.
|
Fix error in RaspberryPi environment <numpy type error>.
|
Python
|
apache-2.0
|
Jarrey/BotEyePi,Jarrey/BotEyePi
|
#!/usr/bin/python
from sys import argv
import numpy as np
import cv2
import cv2.cv as cv
def detectCircle(imagePath):
image = cv2.imread(imagePath)
gray = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
gray = cv2.Canny(gray, 32, 2)
cv2.imwrite("canny.jpg", gray)
circles = cv2.HoughCircles(gray, cv.CV_HOUGH_GRADIENT, 1, 50, 10, 50, 6, 10)
if circles is not None:
circles = np.uint16(np.around(circles))
gray = cv2.cvtColor(gray, cv2.COLOR_GRAY2BGR)
for i in circles[0,:]:
# draw the outer circle
cv2.circle(gray,(i[0],i[1]),i[2],(0,255,0),2)
# draw the center of the circle
cv2.circle(gray,(i[0],i[1]),2,(0,0,255),3)
cv2.imwrite('circled.jpg', gray)
return len(circles[0])
if __name__ == '__main__':
if len(argv) < 2:
exit(1)
print detectCircle(argv[1])
Fix error in RaspberryPi environment <numpy type error>.
|
#!/usr/bin/python
from sys import argv
import numpy as np
import cv2
import cv2.cv as cv
def detectCircle(imagePath):
image = cv2.imread(imagePath)
gray = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
gray = cv2.Canny(gray, 32, 2)
cv2.imwrite("canny.jpg", gray)
circles = cv2.HoughCircles(gray, cv.CV_HOUGH_GRADIENT, 1, 10, np.array([]), 10, 20, 6, 20)
if circles is not None:
circles = np.uint16(np.around(circles))
gray = cv2.cvtColor(gray, cv2.COLOR_GRAY2BGR)
for i in circles[0,:]:
# draw the outer circle
cv2.circle(gray,(i[0],i[1]),i[2],(0,255,0),2)
# draw the center of the circle
cv2.circle(gray,(i[0],i[1]),2,(0,0,255),3)
cv2.imwrite('circled.jpg', gray)
return len(circles[0])
if __name__ == '__main__':
if len(argv) < 2:
exit(1)
print detectCircle(argv[1])
|
<commit_before>#!/usr/bin/python
from sys import argv
import numpy as np
import cv2
import cv2.cv as cv
def detectCircle(imagePath):
image = cv2.imread(imagePath)
gray = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
gray = cv2.Canny(gray, 32, 2)
cv2.imwrite("canny.jpg", gray)
circles = cv2.HoughCircles(gray, cv.CV_HOUGH_GRADIENT, 1, 50, 10, 50, 6, 10)
if circles is not None:
circles = np.uint16(np.around(circles))
gray = cv2.cvtColor(gray, cv2.COLOR_GRAY2BGR)
for i in circles[0,:]:
# draw the outer circle
cv2.circle(gray,(i[0],i[1]),i[2],(0,255,0),2)
# draw the center of the circle
cv2.circle(gray,(i[0],i[1]),2,(0,0,255),3)
cv2.imwrite('circled.jpg', gray)
return len(circles[0])
if __name__ == '__main__':
if len(argv) < 2:
exit(1)
print detectCircle(argv[1])
<commit_msg>Fix error in RaspberryPi environment <numpy type error>.<commit_after>
|
#!/usr/bin/python
from sys import argv
import numpy as np
import cv2
import cv2.cv as cv
def detectCircle(imagePath):
image = cv2.imread(imagePath)
gray = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
gray = cv2.Canny(gray, 32, 2)
cv2.imwrite("canny.jpg", gray)
circles = cv2.HoughCircles(gray, cv.CV_HOUGH_GRADIENT, 1, 10, np.array([]), 10, 20, 6, 20)
if circles is not None:
circles = np.uint16(np.around(circles))
gray = cv2.cvtColor(gray, cv2.COLOR_GRAY2BGR)
for i in circles[0,:]:
# draw the outer circle
cv2.circle(gray,(i[0],i[1]),i[2],(0,255,0),2)
# draw the center of the circle
cv2.circle(gray,(i[0],i[1]),2,(0,0,255),3)
cv2.imwrite('circled.jpg', gray)
return len(circles[0])
if __name__ == '__main__':
if len(argv) < 2:
exit(1)
print detectCircle(argv[1])
|
#!/usr/bin/python
from sys import argv
import numpy as np
import cv2
import cv2.cv as cv
def detectCircle(imagePath):
image = cv2.imread(imagePath)
gray = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
gray = cv2.Canny(gray, 32, 2)
cv2.imwrite("canny.jpg", gray)
circles = cv2.HoughCircles(gray, cv.CV_HOUGH_GRADIENT, 1, 50, 10, 50, 6, 10)
if circles is not None:
circles = np.uint16(np.around(circles))
gray = cv2.cvtColor(gray, cv2.COLOR_GRAY2BGR)
for i in circles[0,:]:
# draw the outer circle
cv2.circle(gray,(i[0],i[1]),i[2],(0,255,0),2)
# draw the center of the circle
cv2.circle(gray,(i[0],i[1]),2,(0,0,255),3)
cv2.imwrite('circled.jpg', gray)
return len(circles[0])
if __name__ == '__main__':
if len(argv) < 2:
exit(1)
print detectCircle(argv[1])
Fix error in RaspberryPi environment <numpy type error>.#!/usr/bin/python
from sys import argv
import numpy as np
import cv2
import cv2.cv as cv
def detectCircle(imagePath):
image = cv2.imread(imagePath)
gray = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
gray = cv2.Canny(gray, 32, 2)
cv2.imwrite("canny.jpg", gray)
circles = cv2.HoughCircles(gray, cv.CV_HOUGH_GRADIENT, 1, 10, np.array([]), 10, 20, 6, 20)
if circles is not None:
circles = np.uint16(np.around(circles))
gray = cv2.cvtColor(gray, cv2.COLOR_GRAY2BGR)
for i in circles[0,:]:
# draw the outer circle
cv2.circle(gray,(i[0],i[1]),i[2],(0,255,0),2)
# draw the center of the circle
cv2.circle(gray,(i[0],i[1]),2,(0,0,255),3)
cv2.imwrite('circled.jpg', gray)
return len(circles[0])
if __name__ == '__main__':
if len(argv) < 2:
exit(1)
print detectCircle(argv[1])
|
<commit_before>#!/usr/bin/python
from sys import argv
import numpy as np
import cv2
import cv2.cv as cv
def detectCircle(imagePath):
image = cv2.imread(imagePath)
gray = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
gray = cv2.Canny(gray, 32, 2)
cv2.imwrite("canny.jpg", gray)
circles = cv2.HoughCircles(gray, cv.CV_HOUGH_GRADIENT, 1, 50, 10, 50, 6, 10)
if circles is not None:
circles = np.uint16(np.around(circles))
gray = cv2.cvtColor(gray, cv2.COLOR_GRAY2BGR)
for i in circles[0,:]:
# draw the outer circle
cv2.circle(gray,(i[0],i[1]),i[2],(0,255,0),2)
# draw the center of the circle
cv2.circle(gray,(i[0],i[1]),2,(0,0,255),3)
cv2.imwrite('circled.jpg', gray)
return len(circles[0])
if __name__ == '__main__':
if len(argv) < 2:
exit(1)
print detectCircle(argv[1])
<commit_msg>Fix error in RaspberryPi environment <numpy type error>.<commit_after>#!/usr/bin/python
from sys import argv
import numpy as np
import cv2
import cv2.cv as cv
def detectCircle(imagePath):
image = cv2.imread(imagePath)
gray = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
gray = cv2.Canny(gray, 32, 2)
cv2.imwrite("canny.jpg", gray)
circles = cv2.HoughCircles(gray, cv.CV_HOUGH_GRADIENT, 1, 10, np.array([]), 10, 20, 6, 20)
if circles is not None:
circles = np.uint16(np.around(circles))
gray = cv2.cvtColor(gray, cv2.COLOR_GRAY2BGR)
for i in circles[0,:]:
# draw the outer circle
cv2.circle(gray,(i[0],i[1]),i[2],(0,255,0),2)
# draw the center of the circle
cv2.circle(gray,(i[0],i[1]),2,(0,0,255),3)
cv2.imwrite('circled.jpg', gray)
return len(circles[0])
if __name__ == '__main__':
if len(argv) < 2:
exit(1)
print detectCircle(argv[1])
|
1b7e4ae56a5e56823f7639ab6940d63ed11f18ae
|
dataproperty/logger/_logger.py
|
dataproperty/logger/_logger.py
|
# encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
"""
from __future__ import absolute_import, unicode_literals
from ._null_logger import NullLogger
try:
import logbook
logger = logbook.Logger("DataProperty")
logger.disable()
except ImportError:
logger = NullLogger()
def set_logger(is_enable):
if is_enable:
logger.enable()
else:
logger.disable()
def set_log_level(log_level):
"""
Set logging level of this module. The module using
`logbook <https://logbook.readthedocs.io/en/stable/>`__ module for logging.
:param int log_level:
One of the log level of the
`logbook <https://logbook.readthedocs.io/en/stable/api/base.html>`__.
Disabled logging if the ``log_level`` is ``logbook.NOTSET``.
:raises LookupError: If ``log_level`` is an invalid value.
"""
# validate log level
logbook.get_level_name(log_level)
if log_level == logger.level:
return
if log_level == logbook.NOTSET:
set_logger(is_enable=False)
else:
set_logger(is_enable=True)
logger.level = log_level
|
# encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
"""
from __future__ import absolute_import, unicode_literals
from ._null_logger import NullLogger
try:
import logbook
logger = logbook.Logger("DataProperty")
logger.disable()
LOGBOOK_INSTALLED = True
except ImportError:
logger = NullLogger()
LOGBOOK_INSTALLED = False
def set_logger(is_enable):
if not LOGBOOK_INSTALLED:
return
if is_enable:
logger.enable()
else:
logger.disable()
def set_log_level(log_level):
"""
Set logging level of this module. The module using
`logbook <https://logbook.readthedocs.io/en/stable/>`__ module for logging.
:param int log_level:
One of the log level of the
`logbook <https://logbook.readthedocs.io/en/stable/api/base.html>`__.
Disabled logging if the ``log_level`` is ``logbook.NOTSET``.
:raises LookupError: If ``log_level`` is an invalid value.
"""
if not LOGBOOK_INSTALLED:
return
# validate log level
logbook.get_level_name(log_level)
if log_level == logger.level:
return
if log_level == logbook.NOTSET:
set_logger(is_enable=False)
else:
set_logger(is_enable=True)
logger.level = log_level
|
Fix logger functions failures when an optional package not installed
|
Fix logger functions failures when an optional package not installed
|
Python
|
mit
|
thombashi/DataProperty
|
# encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
"""
from __future__ import absolute_import, unicode_literals
from ._null_logger import NullLogger
try:
import logbook
logger = logbook.Logger("DataProperty")
logger.disable()
except ImportError:
logger = NullLogger()
def set_logger(is_enable):
if is_enable:
logger.enable()
else:
logger.disable()
def set_log_level(log_level):
"""
Set logging level of this module. The module using
`logbook <https://logbook.readthedocs.io/en/stable/>`__ module for logging.
:param int log_level:
One of the log level of the
`logbook <https://logbook.readthedocs.io/en/stable/api/base.html>`__.
Disabled logging if the ``log_level`` is ``logbook.NOTSET``.
:raises LookupError: If ``log_level`` is an invalid value.
"""
# validate log level
logbook.get_level_name(log_level)
if log_level == logger.level:
return
if log_level == logbook.NOTSET:
set_logger(is_enable=False)
else:
set_logger(is_enable=True)
logger.level = log_level
Fix logger functions failures when an optional package not installed
|
# encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
"""
from __future__ import absolute_import, unicode_literals
from ._null_logger import NullLogger
try:
import logbook
logger = logbook.Logger("DataProperty")
logger.disable()
LOGBOOK_INSTALLED = True
except ImportError:
logger = NullLogger()
LOGBOOK_INSTALLED = False
def set_logger(is_enable):
if not LOGBOOK_INSTALLED:
return
if is_enable:
logger.enable()
else:
logger.disable()
def set_log_level(log_level):
"""
Set logging level of this module. The module using
`logbook <https://logbook.readthedocs.io/en/stable/>`__ module for logging.
:param int log_level:
One of the log level of the
`logbook <https://logbook.readthedocs.io/en/stable/api/base.html>`__.
Disabled logging if the ``log_level`` is ``logbook.NOTSET``.
:raises LookupError: If ``log_level`` is an invalid value.
"""
if not LOGBOOK_INSTALLED:
return
# validate log level
logbook.get_level_name(log_level)
if log_level == logger.level:
return
if log_level == logbook.NOTSET:
set_logger(is_enable=False)
else:
set_logger(is_enable=True)
logger.level = log_level
|
<commit_before># encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
"""
from __future__ import absolute_import, unicode_literals
from ._null_logger import NullLogger
try:
import logbook
logger = logbook.Logger("DataProperty")
logger.disable()
except ImportError:
logger = NullLogger()
def set_logger(is_enable):
if is_enable:
logger.enable()
else:
logger.disable()
def set_log_level(log_level):
"""
Set logging level of this module. The module using
`logbook <https://logbook.readthedocs.io/en/stable/>`__ module for logging.
:param int log_level:
One of the log level of the
`logbook <https://logbook.readthedocs.io/en/stable/api/base.html>`__.
Disabled logging if the ``log_level`` is ``logbook.NOTSET``.
:raises LookupError: If ``log_level`` is an invalid value.
"""
# validate log level
logbook.get_level_name(log_level)
if log_level == logger.level:
return
if log_level == logbook.NOTSET:
set_logger(is_enable=False)
else:
set_logger(is_enable=True)
logger.level = log_level
<commit_msg>Fix logger functions failures when an optional package not installed<commit_after>
|
# encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
"""
from __future__ import absolute_import, unicode_literals
from ._null_logger import NullLogger
try:
import logbook
logger = logbook.Logger("DataProperty")
logger.disable()
LOGBOOK_INSTALLED = True
except ImportError:
logger = NullLogger()
LOGBOOK_INSTALLED = False
def set_logger(is_enable):
if not LOGBOOK_INSTALLED:
return
if is_enable:
logger.enable()
else:
logger.disable()
def set_log_level(log_level):
"""
Set logging level of this module. The module using
`logbook <https://logbook.readthedocs.io/en/stable/>`__ module for logging.
:param int log_level:
One of the log level of the
`logbook <https://logbook.readthedocs.io/en/stable/api/base.html>`__.
Disabled logging if the ``log_level`` is ``logbook.NOTSET``.
:raises LookupError: If ``log_level`` is an invalid value.
"""
if not LOGBOOK_INSTALLED:
return
# validate log level
logbook.get_level_name(log_level)
if log_level == logger.level:
return
if log_level == logbook.NOTSET:
set_logger(is_enable=False)
else:
set_logger(is_enable=True)
logger.level = log_level
|
# encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
"""
from __future__ import absolute_import, unicode_literals
from ._null_logger import NullLogger
try:
import logbook
logger = logbook.Logger("DataProperty")
logger.disable()
except ImportError:
logger = NullLogger()
def set_logger(is_enable):
if is_enable:
logger.enable()
else:
logger.disable()
def set_log_level(log_level):
"""
Set logging level of this module. The module using
`logbook <https://logbook.readthedocs.io/en/stable/>`__ module for logging.
:param int log_level:
One of the log level of the
`logbook <https://logbook.readthedocs.io/en/stable/api/base.html>`__.
Disabled logging if the ``log_level`` is ``logbook.NOTSET``.
:raises LookupError: If ``log_level`` is an invalid value.
"""
# validate log level
logbook.get_level_name(log_level)
if log_level == logger.level:
return
if log_level == logbook.NOTSET:
set_logger(is_enable=False)
else:
set_logger(is_enable=True)
logger.level = log_level
Fix logger functions failures when an optional package not installed# encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
"""
from __future__ import absolute_import, unicode_literals
from ._null_logger import NullLogger
try:
import logbook
logger = logbook.Logger("DataProperty")
logger.disable()
LOGBOOK_INSTALLED = True
except ImportError:
logger = NullLogger()
LOGBOOK_INSTALLED = False
def set_logger(is_enable):
if not LOGBOOK_INSTALLED:
return
if is_enable:
logger.enable()
else:
logger.disable()
def set_log_level(log_level):
"""
Set logging level of this module. The module using
`logbook <https://logbook.readthedocs.io/en/stable/>`__ module for logging.
:param int log_level:
One of the log level of the
`logbook <https://logbook.readthedocs.io/en/stable/api/base.html>`__.
Disabled logging if the ``log_level`` is ``logbook.NOTSET``.
:raises LookupError: If ``log_level`` is an invalid value.
"""
if not LOGBOOK_INSTALLED:
return
# validate log level
logbook.get_level_name(log_level)
if log_level == logger.level:
return
if log_level == logbook.NOTSET:
set_logger(is_enable=False)
else:
set_logger(is_enable=True)
logger.level = log_level
|
<commit_before># encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
"""
from __future__ import absolute_import, unicode_literals
from ._null_logger import NullLogger
try:
import logbook
logger = logbook.Logger("DataProperty")
logger.disable()
except ImportError:
logger = NullLogger()
def set_logger(is_enable):
if is_enable:
logger.enable()
else:
logger.disable()
def set_log_level(log_level):
"""
Set logging level of this module. The module using
`logbook <https://logbook.readthedocs.io/en/stable/>`__ module for logging.
:param int log_level:
One of the log level of the
`logbook <https://logbook.readthedocs.io/en/stable/api/base.html>`__.
Disabled logging if the ``log_level`` is ``logbook.NOTSET``.
:raises LookupError: If ``log_level`` is an invalid value.
"""
# validate log level
logbook.get_level_name(log_level)
if log_level == logger.level:
return
if log_level == logbook.NOTSET:
set_logger(is_enable=False)
else:
set_logger(is_enable=True)
logger.level = log_level
<commit_msg>Fix logger functions failures when an optional package not installed<commit_after># encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
"""
from __future__ import absolute_import, unicode_literals
from ._null_logger import NullLogger
try:
import logbook
logger = logbook.Logger("DataProperty")
logger.disable()
LOGBOOK_INSTALLED = True
except ImportError:
logger = NullLogger()
LOGBOOK_INSTALLED = False
def set_logger(is_enable):
if not LOGBOOK_INSTALLED:
return
if is_enable:
logger.enable()
else:
logger.disable()
def set_log_level(log_level):
"""
Set logging level of this module. The module using
`logbook <https://logbook.readthedocs.io/en/stable/>`__ module for logging.
:param int log_level:
One of the log level of the
`logbook <https://logbook.readthedocs.io/en/stable/api/base.html>`__.
Disabled logging if the ``log_level`` is ``logbook.NOTSET``.
:raises LookupError: If ``log_level`` is an invalid value.
"""
if not LOGBOOK_INSTALLED:
return
# validate log level
logbook.get_level_name(log_level)
if log_level == logger.level:
return
if log_level == logbook.NOTSET:
set_logger(is_enable=False)
else:
set_logger(is_enable=True)
logger.level = log_level
|
efe30ee01d3b1eb46cd7d986beba09ec47a51e14
|
app/api/cruds/weekday_crud.py
|
app/api/cruds/weekday_crud.py
|
from django.core.exceptions import ValidationError
import graphene
from graphene_django import DjangoObjectType
from app.timetables.models import Weekday
from .utils import get_errors
class WeekdayNode(DjangoObjectType):
original_id = graphene.Int()
class Meta:
model = Weekday
filter_fields = {
'name': ['icontains']
}
filter_order_by = ['name', '-name']
interfaces = (graphene.relay.Node,)
def resolve_original_id(self, args, context, info):
return self.id
class CreateWeekday(graphene.relay.ClientIDMutation):
class Input:
name = graphene.String(required=True)
weekday = graphene.Field(WeekdayNode)
errors = graphene.List(graphene.String)
@classmethod
def mutate_and_get_payload(cls, input, context, info):
try:
weekday = Weekday()
weekday.name = input.get('name')
weekday.full_clean()
weekday.save()
return Weekday(weekday=weekday)
except ValidationError as e:
return Weekday(weekday=None, errors=get_errors(e))
|
from django.core.exceptions import ValidationError
import graphene
from graphene_django import DjangoObjectType
from app.timetables.models import Weekday
from .utils import get_errors
class WeekdayNode(DjangoObjectType):
original_id = graphene.Int()
class Meta:
model = Weekday
filter_fields = {
'name': ['icontains']
}
filter_order_by = ['name', '-name']
interfaces = (graphene.relay.Node,)
def resolve_original_id(self, args, context, info):
return self.id
class CreateWeekday(graphene.relay.ClientIDMutation):
class Input:
name = graphene.String(required=True)
weekday = graphene.Field(WeekdayNode)
errors = graphene.List(graphene.String)
@classmethod
def mutate_and_get_payload(cls, input, context, info):
try:
weekday = Weekday()
weekday.name = input.get('name')
weekday.full_clean()
weekday.save()
return CreateWeekday(weekday=weekday)
except ValidationError as e:
return CreateWeekday(weekday=None, errors=get_errors(e))
|
Fix errors on create weekday
|
Fix errors on create weekday
|
Python
|
mit
|
teamtaverna/core
|
from django.core.exceptions import ValidationError
import graphene
from graphene_django import DjangoObjectType
from app.timetables.models import Weekday
from .utils import get_errors
class WeekdayNode(DjangoObjectType):
original_id = graphene.Int()
class Meta:
model = Weekday
filter_fields = {
'name': ['icontains']
}
filter_order_by = ['name', '-name']
interfaces = (graphene.relay.Node,)
def resolve_original_id(self, args, context, info):
return self.id
class CreateWeekday(graphene.relay.ClientIDMutation):
class Input:
name = graphene.String(required=True)
weekday = graphene.Field(WeekdayNode)
errors = graphene.List(graphene.String)
@classmethod
def mutate_and_get_payload(cls, input, context, info):
try:
weekday = Weekday()
weekday.name = input.get('name')
weekday.full_clean()
weekday.save()
return Weekday(weekday=weekday)
except ValidationError as e:
return Weekday(weekday=None, errors=get_errors(e))
Fix errors on create weekday
|
from django.core.exceptions import ValidationError
import graphene
from graphene_django import DjangoObjectType
from app.timetables.models import Weekday
from .utils import get_errors
class WeekdayNode(DjangoObjectType):
original_id = graphene.Int()
class Meta:
model = Weekday
filter_fields = {
'name': ['icontains']
}
filter_order_by = ['name', '-name']
interfaces = (graphene.relay.Node,)
def resolve_original_id(self, args, context, info):
return self.id
class CreateWeekday(graphene.relay.ClientIDMutation):
class Input:
name = graphene.String(required=True)
weekday = graphene.Field(WeekdayNode)
errors = graphene.List(graphene.String)
@classmethod
def mutate_and_get_payload(cls, input, context, info):
try:
weekday = Weekday()
weekday.name = input.get('name')
weekday.full_clean()
weekday.save()
return CreateWeekday(weekday=weekday)
except ValidationError as e:
return CreateWeekday(weekday=None, errors=get_errors(e))
|
<commit_before>from django.core.exceptions import ValidationError
import graphene
from graphene_django import DjangoObjectType
from app.timetables.models import Weekday
from .utils import get_errors
class WeekdayNode(DjangoObjectType):
original_id = graphene.Int()
class Meta:
model = Weekday
filter_fields = {
'name': ['icontains']
}
filter_order_by = ['name', '-name']
interfaces = (graphene.relay.Node,)
def resolve_original_id(self, args, context, info):
return self.id
class CreateWeekday(graphene.relay.ClientIDMutation):
class Input:
name = graphene.String(required=True)
weekday = graphene.Field(WeekdayNode)
errors = graphene.List(graphene.String)
@classmethod
def mutate_and_get_payload(cls, input, context, info):
try:
weekday = Weekday()
weekday.name = input.get('name')
weekday.full_clean()
weekday.save()
return Weekday(weekday=weekday)
except ValidationError as e:
return Weekday(weekday=None, errors=get_errors(e))
<commit_msg>Fix errors on create weekday<commit_after>
|
from django.core.exceptions import ValidationError
import graphene
from graphene_django import DjangoObjectType
from app.timetables.models import Weekday
from .utils import get_errors
class WeekdayNode(DjangoObjectType):
original_id = graphene.Int()
class Meta:
model = Weekday
filter_fields = {
'name': ['icontains']
}
filter_order_by = ['name', '-name']
interfaces = (graphene.relay.Node,)
def resolve_original_id(self, args, context, info):
return self.id
class CreateWeekday(graphene.relay.ClientIDMutation):
class Input:
name = graphene.String(required=True)
weekday = graphene.Field(WeekdayNode)
errors = graphene.List(graphene.String)
@classmethod
def mutate_and_get_payload(cls, input, context, info):
try:
weekday = Weekday()
weekday.name = input.get('name')
weekday.full_clean()
weekday.save()
return CreateWeekday(weekday=weekday)
except ValidationError as e:
return CreateWeekday(weekday=None, errors=get_errors(e))
|
from django.core.exceptions import ValidationError
import graphene
from graphene_django import DjangoObjectType
from app.timetables.models import Weekday
from .utils import get_errors
class WeekdayNode(DjangoObjectType):
original_id = graphene.Int()
class Meta:
model = Weekday
filter_fields = {
'name': ['icontains']
}
filter_order_by = ['name', '-name']
interfaces = (graphene.relay.Node,)
def resolve_original_id(self, args, context, info):
return self.id
class CreateWeekday(graphene.relay.ClientIDMutation):
class Input:
name = graphene.String(required=True)
weekday = graphene.Field(WeekdayNode)
errors = graphene.List(graphene.String)
@classmethod
def mutate_and_get_payload(cls, input, context, info):
try:
weekday = Weekday()
weekday.name = input.get('name')
weekday.full_clean()
weekday.save()
return Weekday(weekday=weekday)
except ValidationError as e:
return Weekday(weekday=None, errors=get_errors(e))
Fix errors on create weekdayfrom django.core.exceptions import ValidationError
import graphene
from graphene_django import DjangoObjectType
from app.timetables.models import Weekday
from .utils import get_errors
class WeekdayNode(DjangoObjectType):
original_id = graphene.Int()
class Meta:
model = Weekday
filter_fields = {
'name': ['icontains']
}
filter_order_by = ['name', '-name']
interfaces = (graphene.relay.Node,)
def resolve_original_id(self, args, context, info):
return self.id
class CreateWeekday(graphene.relay.ClientIDMutation):
class Input:
name = graphene.String(required=True)
weekday = graphene.Field(WeekdayNode)
errors = graphene.List(graphene.String)
@classmethod
def mutate_and_get_payload(cls, input, context, info):
try:
weekday = Weekday()
weekday.name = input.get('name')
weekday.full_clean()
weekday.save()
return CreateWeekday(weekday=weekday)
except ValidationError as e:
return CreateWeekday(weekday=None, errors=get_errors(e))
|
<commit_before>from django.core.exceptions import ValidationError
import graphene
from graphene_django import DjangoObjectType
from app.timetables.models import Weekday
from .utils import get_errors
class WeekdayNode(DjangoObjectType):
original_id = graphene.Int()
class Meta:
model = Weekday
filter_fields = {
'name': ['icontains']
}
filter_order_by = ['name', '-name']
interfaces = (graphene.relay.Node,)
def resolve_original_id(self, args, context, info):
return self.id
class CreateWeekday(graphene.relay.ClientIDMutation):
class Input:
name = graphene.String(required=True)
weekday = graphene.Field(WeekdayNode)
errors = graphene.List(graphene.String)
@classmethod
def mutate_and_get_payload(cls, input, context, info):
try:
weekday = Weekday()
weekday.name = input.get('name')
weekday.full_clean()
weekday.save()
return Weekday(weekday=weekday)
except ValidationError as e:
return Weekday(weekday=None, errors=get_errors(e))
<commit_msg>Fix errors on create weekday<commit_after>from django.core.exceptions import ValidationError
import graphene
from graphene_django import DjangoObjectType
from app.timetables.models import Weekday
from .utils import get_errors
class WeekdayNode(DjangoObjectType):
original_id = graphene.Int()
class Meta:
model = Weekday
filter_fields = {
'name': ['icontains']
}
filter_order_by = ['name', '-name']
interfaces = (graphene.relay.Node,)
def resolve_original_id(self, args, context, info):
return self.id
class CreateWeekday(graphene.relay.ClientIDMutation):
class Input:
name = graphene.String(required=True)
weekday = graphene.Field(WeekdayNode)
errors = graphene.List(graphene.String)
@classmethod
def mutate_and_get_payload(cls, input, context, info):
try:
weekday = Weekday()
weekday.name = input.get('name')
weekday.full_clean()
weekday.save()
return CreateWeekday(weekday=weekday)
except ValidationError as e:
return CreateWeekday(weekday=None, errors=get_errors(e))
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.