commit
stringlengths 40
40
| old_file
stringlengths 4
118
| new_file
stringlengths 4
118
| old_contents
stringlengths 0
2.94k
| new_contents
stringlengths 1
4.43k
| subject
stringlengths 15
444
| message
stringlengths 16
3.45k
| lang
stringclasses 1
value | license
stringclasses 13
values | repos
stringlengths 5
43.2k
| prompt
stringlengths 17
4.58k
| response
stringlengths 1
4.43k
| prompt_tagged
stringlengths 58
4.62k
| response_tagged
stringlengths 1
4.43k
| text
stringlengths 132
7.29k
| text_tagged
stringlengths 173
7.33k
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
9c9757278b38ad9846752c32ca7153b3802d137f
|
setup.py
|
setup.py
|
#!/usr/bin/env python2
from setuptools import setup
import bayesian_changepoint_detection
setup(
name='bayesian_changepoint_detection',
version=bayesian_changepoint_detection.__version__,
description='Some Bayesian changepoint detection algorithms',
author='Johannes Kulick',
author_email='mail@johanneskulick.net',
url='http://github.com/hildensia/bayesian_changepoint_detection',
packages=['bayesian_changepoint_detection'],
install_requires=['scipy', 'numpy', 'decorator'],
extras_require={
'dev': ['pytest'],
'multivariate': ['scipy>=1.6.0'],
'plot': ['matplotlib'],
}
)
|
#!/usr/bin/env python2
from setuptools import setup
import bayesian_changepoint_detection
setup(
name='bayescd',
version=bayesian_changepoint_detection.__version__,
description='Some Bayesian changepoint detection algorithms',
author='Johannes Kulick',
author_email='mail@johanneskulick.net',
url='http://github.com/hildensia/bayesian_changepoint_detection',
packages=['bayesian_changepoint_detection'],
install_requires=['scipy', 'numpy', 'decorator'],
extras_require={
'dev': ['pytest'],
'multivariate': ['scipy>=1.6.0'],
'plot': ['matplotlib'],
}
)
|
Rename project since someone else created the pypi package
|
Rename project since someone else created the pypi package
|
Python
|
mit
|
hildensia/bayesian_changepoint_detection
|
#!/usr/bin/env python2
from setuptools import setup
import bayesian_changepoint_detection
setup(
name='bayesian_changepoint_detection',
version=bayesian_changepoint_detection.__version__,
description='Some Bayesian changepoint detection algorithms',
author='Johannes Kulick',
author_email='mail@johanneskulick.net',
url='http://github.com/hildensia/bayesian_changepoint_detection',
packages=['bayesian_changepoint_detection'],
install_requires=['scipy', 'numpy', 'decorator'],
extras_require={
'dev': ['pytest'],
'multivariate': ['scipy>=1.6.0'],
'plot': ['matplotlib'],
}
)
Rename project since someone else created the pypi package
|
#!/usr/bin/env python2
from setuptools import setup
import bayesian_changepoint_detection
setup(
name='bayescd',
version=bayesian_changepoint_detection.__version__,
description='Some Bayesian changepoint detection algorithms',
author='Johannes Kulick',
author_email='mail@johanneskulick.net',
url='http://github.com/hildensia/bayesian_changepoint_detection',
packages=['bayesian_changepoint_detection'],
install_requires=['scipy', 'numpy', 'decorator'],
extras_require={
'dev': ['pytest'],
'multivariate': ['scipy>=1.6.0'],
'plot': ['matplotlib'],
}
)
|
<commit_before>#!/usr/bin/env python2
from setuptools import setup
import bayesian_changepoint_detection
setup(
name='bayesian_changepoint_detection',
version=bayesian_changepoint_detection.__version__,
description='Some Bayesian changepoint detection algorithms',
author='Johannes Kulick',
author_email='mail@johanneskulick.net',
url='http://github.com/hildensia/bayesian_changepoint_detection',
packages=['bayesian_changepoint_detection'],
install_requires=['scipy', 'numpy', 'decorator'],
extras_require={
'dev': ['pytest'],
'multivariate': ['scipy>=1.6.0'],
'plot': ['matplotlib'],
}
)
<commit_msg>Rename project since someone else created the pypi package<commit_after>
|
#!/usr/bin/env python2
from setuptools import setup
import bayesian_changepoint_detection
setup(
name='bayescd',
version=bayesian_changepoint_detection.__version__,
description='Some Bayesian changepoint detection algorithms',
author='Johannes Kulick',
author_email='mail@johanneskulick.net',
url='http://github.com/hildensia/bayesian_changepoint_detection',
packages=['bayesian_changepoint_detection'],
install_requires=['scipy', 'numpy', 'decorator'],
extras_require={
'dev': ['pytest'],
'multivariate': ['scipy>=1.6.0'],
'plot': ['matplotlib'],
}
)
|
#!/usr/bin/env python2
from setuptools import setup
import bayesian_changepoint_detection
setup(
name='bayesian_changepoint_detection',
version=bayesian_changepoint_detection.__version__,
description='Some Bayesian changepoint detection algorithms',
author='Johannes Kulick',
author_email='mail@johanneskulick.net',
url='http://github.com/hildensia/bayesian_changepoint_detection',
packages=['bayesian_changepoint_detection'],
install_requires=['scipy', 'numpy', 'decorator'],
extras_require={
'dev': ['pytest'],
'multivariate': ['scipy>=1.6.0'],
'plot': ['matplotlib'],
}
)
Rename project since someone else created the pypi package#!/usr/bin/env python2
from setuptools import setup
import bayesian_changepoint_detection
setup(
name='bayescd',
version=bayesian_changepoint_detection.__version__,
description='Some Bayesian changepoint detection algorithms',
author='Johannes Kulick',
author_email='mail@johanneskulick.net',
url='http://github.com/hildensia/bayesian_changepoint_detection',
packages=['bayesian_changepoint_detection'],
install_requires=['scipy', 'numpy', 'decorator'],
extras_require={
'dev': ['pytest'],
'multivariate': ['scipy>=1.6.0'],
'plot': ['matplotlib'],
}
)
|
<commit_before>#!/usr/bin/env python2
from setuptools import setup
import bayesian_changepoint_detection
setup(
name='bayesian_changepoint_detection',
version=bayesian_changepoint_detection.__version__,
description='Some Bayesian changepoint detection algorithms',
author='Johannes Kulick',
author_email='mail@johanneskulick.net',
url='http://github.com/hildensia/bayesian_changepoint_detection',
packages=['bayesian_changepoint_detection'],
install_requires=['scipy', 'numpy', 'decorator'],
extras_require={
'dev': ['pytest'],
'multivariate': ['scipy>=1.6.0'],
'plot': ['matplotlib'],
}
)
<commit_msg>Rename project since someone else created the pypi package<commit_after>#!/usr/bin/env python2
from setuptools import setup
import bayesian_changepoint_detection
setup(
name='bayescd',
version=bayesian_changepoint_detection.__version__,
description='Some Bayesian changepoint detection algorithms',
author='Johannes Kulick',
author_email='mail@johanneskulick.net',
url='http://github.com/hildensia/bayesian_changepoint_detection',
packages=['bayesian_changepoint_detection'],
install_requires=['scipy', 'numpy', 'decorator'],
extras_require={
'dev': ['pytest'],
'multivariate': ['scipy>=1.6.0'],
'plot': ['matplotlib'],
}
)
|
264214426051617d47d94a74801c635ff6b428e3
|
setup.py
|
setup.py
|
from setuptools import setup
with open('README.rst') as f:
long_description = f.read()
setup(
name='pytest-faulthandler',
version='1.3.0',
py_modules=['pytest_faulthandler'],
url='https://github.com/pytest-dev/pytest-faulthandler',
license='MIT',
install_requires=['pytest>=2.6'],
test_requires=['pytest-mock>=0.6'],
author='Bruno Oliveira',
author_email='nicoddemus@gmail.com',
description='py.test plugin that activates the fault handler module for tests',
long_description=long_description,
extras_require={
':python_version=="2.6" or python_version=="2.7"': ['faulthandler'],
},
entry_points={
'pytest11': ['pytest_faulthandler = pytest_faulthandler'],
},
keywords='pytest faulthandler',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Topic :: Software Development :: Testing',
]
)
|
from setuptools import setup
with open('README.rst') as f:
long_description = f.read()
setup(
name='pytest-faulthandler',
version='1.3.0',
py_modules=['pytest_faulthandler'],
url='https://github.com/pytest-dev/pytest-faulthandler',
license='MIT',
install_requires=['pytest>=2.6'],
test_requires=['pytest-mock>=0.6'],
author='Bruno Oliveira',
author_email='nicoddemus@gmail.com',
description='py.test plugin that activates the fault handler module for tests',
long_description=long_description,
extras_require={
':python_version=="2.6" or python_version=="2.7"': ['faulthandler'],
},
entry_points={
'pytest11': ['pytest_faulthandler = pytest_faulthandler'],
},
keywords='pytest faulthandler',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Framework :: Pytest',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Topic :: Software Development :: Testing',
]
)
|
Add Framework::Pytest to list of classifiers
|
Add Framework::Pytest to list of classifiers
|
Python
|
mit
|
pytest-dev/pytest-faulthandler
|
from setuptools import setup
with open('README.rst') as f:
long_description = f.read()
setup(
name='pytest-faulthandler',
version='1.3.0',
py_modules=['pytest_faulthandler'],
url='https://github.com/pytest-dev/pytest-faulthandler',
license='MIT',
install_requires=['pytest>=2.6'],
test_requires=['pytest-mock>=0.6'],
author='Bruno Oliveira',
author_email='nicoddemus@gmail.com',
description='py.test plugin that activates the fault handler module for tests',
long_description=long_description,
extras_require={
':python_version=="2.6" or python_version=="2.7"': ['faulthandler'],
},
entry_points={
'pytest11': ['pytest_faulthandler = pytest_faulthandler'],
},
keywords='pytest faulthandler',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Topic :: Software Development :: Testing',
]
)
Add Framework::Pytest to list of classifiers
|
from setuptools import setup
with open('README.rst') as f:
long_description = f.read()
setup(
name='pytest-faulthandler',
version='1.3.0',
py_modules=['pytest_faulthandler'],
url='https://github.com/pytest-dev/pytest-faulthandler',
license='MIT',
install_requires=['pytest>=2.6'],
test_requires=['pytest-mock>=0.6'],
author='Bruno Oliveira',
author_email='nicoddemus@gmail.com',
description='py.test plugin that activates the fault handler module for tests',
long_description=long_description,
extras_require={
':python_version=="2.6" or python_version=="2.7"': ['faulthandler'],
},
entry_points={
'pytest11': ['pytest_faulthandler = pytest_faulthandler'],
},
keywords='pytest faulthandler',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Framework :: Pytest',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Topic :: Software Development :: Testing',
]
)
|
<commit_before>from setuptools import setup
with open('README.rst') as f:
long_description = f.read()
setup(
name='pytest-faulthandler',
version='1.3.0',
py_modules=['pytest_faulthandler'],
url='https://github.com/pytest-dev/pytest-faulthandler',
license='MIT',
install_requires=['pytest>=2.6'],
test_requires=['pytest-mock>=0.6'],
author='Bruno Oliveira',
author_email='nicoddemus@gmail.com',
description='py.test plugin that activates the fault handler module for tests',
long_description=long_description,
extras_require={
':python_version=="2.6" or python_version=="2.7"': ['faulthandler'],
},
entry_points={
'pytest11': ['pytest_faulthandler = pytest_faulthandler'],
},
keywords='pytest faulthandler',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Topic :: Software Development :: Testing',
]
)
<commit_msg>Add Framework::Pytest to list of classifiers<commit_after>
|
from setuptools import setup
with open('README.rst') as f:
long_description = f.read()
setup(
name='pytest-faulthandler',
version='1.3.0',
py_modules=['pytest_faulthandler'],
url='https://github.com/pytest-dev/pytest-faulthandler',
license='MIT',
install_requires=['pytest>=2.6'],
test_requires=['pytest-mock>=0.6'],
author='Bruno Oliveira',
author_email='nicoddemus@gmail.com',
description='py.test plugin that activates the fault handler module for tests',
long_description=long_description,
extras_require={
':python_version=="2.6" or python_version=="2.7"': ['faulthandler'],
},
entry_points={
'pytest11': ['pytest_faulthandler = pytest_faulthandler'],
},
keywords='pytest faulthandler',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Framework :: Pytest',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Topic :: Software Development :: Testing',
]
)
|
from setuptools import setup
with open('README.rst') as f:
long_description = f.read()
setup(
name='pytest-faulthandler',
version='1.3.0',
py_modules=['pytest_faulthandler'],
url='https://github.com/pytest-dev/pytest-faulthandler',
license='MIT',
install_requires=['pytest>=2.6'],
test_requires=['pytest-mock>=0.6'],
author='Bruno Oliveira',
author_email='nicoddemus@gmail.com',
description='py.test plugin that activates the fault handler module for tests',
long_description=long_description,
extras_require={
':python_version=="2.6" or python_version=="2.7"': ['faulthandler'],
},
entry_points={
'pytest11': ['pytest_faulthandler = pytest_faulthandler'],
},
keywords='pytest faulthandler',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Topic :: Software Development :: Testing',
]
)
Add Framework::Pytest to list of classifiersfrom setuptools import setup
with open('README.rst') as f:
long_description = f.read()
setup(
name='pytest-faulthandler',
version='1.3.0',
py_modules=['pytest_faulthandler'],
url='https://github.com/pytest-dev/pytest-faulthandler',
license='MIT',
install_requires=['pytest>=2.6'],
test_requires=['pytest-mock>=0.6'],
author='Bruno Oliveira',
author_email='nicoddemus@gmail.com',
description='py.test plugin that activates the fault handler module for tests',
long_description=long_description,
extras_require={
':python_version=="2.6" or python_version=="2.7"': ['faulthandler'],
},
entry_points={
'pytest11': ['pytest_faulthandler = pytest_faulthandler'],
},
keywords='pytest faulthandler',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Framework :: Pytest',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Topic :: Software Development :: Testing',
]
)
|
<commit_before>from setuptools import setup
with open('README.rst') as f:
long_description = f.read()
setup(
name='pytest-faulthandler',
version='1.3.0',
py_modules=['pytest_faulthandler'],
url='https://github.com/pytest-dev/pytest-faulthandler',
license='MIT',
install_requires=['pytest>=2.6'],
test_requires=['pytest-mock>=0.6'],
author='Bruno Oliveira',
author_email='nicoddemus@gmail.com',
description='py.test plugin that activates the fault handler module for tests',
long_description=long_description,
extras_require={
':python_version=="2.6" or python_version=="2.7"': ['faulthandler'],
},
entry_points={
'pytest11': ['pytest_faulthandler = pytest_faulthandler'],
},
keywords='pytest faulthandler',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Topic :: Software Development :: Testing',
]
)
<commit_msg>Add Framework::Pytest to list of classifiers<commit_after>from setuptools import setup
with open('README.rst') as f:
long_description = f.read()
setup(
name='pytest-faulthandler',
version='1.3.0',
py_modules=['pytest_faulthandler'],
url='https://github.com/pytest-dev/pytest-faulthandler',
license='MIT',
install_requires=['pytest>=2.6'],
test_requires=['pytest-mock>=0.6'],
author='Bruno Oliveira',
author_email='nicoddemus@gmail.com',
description='py.test plugin that activates the fault handler module for tests',
long_description=long_description,
extras_require={
':python_version=="2.6" or python_version=="2.7"': ['faulthandler'],
},
entry_points={
'pytest11': ['pytest_faulthandler = pytest_faulthandler'],
},
keywords='pytest faulthandler',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Framework :: Pytest',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Topic :: Software Development :: Testing',
]
)
|
6d03ab8ee75732778607cd6f575715a3aea0358a
|
setup.py
|
setup.py
|
import amazon
from setuptools import setup, find_packages
long_description = pypandoc.convert('README.md', 'rst')
setup(name='python-amazon-simple-product-api',
version=amazon.__version__,
description="A simple Python wrapper for the Amazon.com Product Advertising API",
long_description=long_description,
# http://pypi.python.org/pypi?:action=list_classifiers
classifiers=[
"Development Status :: 5 - Production/Stable",
"Environment :: Console",
"Intended Audience :: Developers",
"Natural Language :: English",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Topic :: Software Development :: Libraries :: Python Modules",
"Topic :: Internet :: WWW/HTTP :: Dynamic Content :: CGI Tools/Libraries",
"Topic :: Utilities",
"License :: OSI Approved :: Apache Software License",
],
keywords='amazon, product advertising, api',
author='Yoav Aviram',
author_email='yoav.aviram@gmail.com',
url='https://github.com/yoavaviram/python-amazon-simple-product-api',
license='Apache 2.0',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
include_package_data=True,
zip_safe=True,
install_requires=["bottlenose", "lxml", "python-dateutil"],
)
|
import amazon
from setuptools import setup, find_packages
import pypandoc
long_description = pypandoc.convert('README.md', 'rst')
setup(name='python-amazon-simple-product-api',
version=amazon.__version__,
description="A simple Python wrapper for the Amazon.com Product Advertising API",
long_description=long_description,
# http://pypi.python.org/pypi?:action=list_classifiers
classifiers=[
"Development Status :: 5 - Production/Stable",
"Environment :: Console",
"Intended Audience :: Developers",
"Natural Language :: English",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Topic :: Software Development :: Libraries :: Python Modules",
"Topic :: Internet :: WWW/HTTP :: Dynamic Content :: CGI Tools/Libraries",
"Topic :: Utilities",
"License :: OSI Approved :: Apache Software License",
],
keywords='amazon, product advertising, api',
author='Yoav Aviram',
author_email='yoav.aviram@gmail.com',
url='https://github.com/yoavaviram/python-amazon-simple-product-api',
license='Apache 2.0',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
include_package_data=True,
zip_safe=True,
install_requires=["bottlenose", "lxml", "python-dateutil"],
)
|
Debug travis to pypi deploy
|
Debug travis to pypi deploy
|
Python
|
apache-2.0
|
yoavaviram/python-amazon-simple-product-api
|
import amazon
from setuptools import setup, find_packages
long_description = pypandoc.convert('README.md', 'rst')
setup(name='python-amazon-simple-product-api',
version=amazon.__version__,
description="A simple Python wrapper for the Amazon.com Product Advertising API",
long_description=long_description,
# http://pypi.python.org/pypi?:action=list_classifiers
classifiers=[
"Development Status :: 5 - Production/Stable",
"Environment :: Console",
"Intended Audience :: Developers",
"Natural Language :: English",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Topic :: Software Development :: Libraries :: Python Modules",
"Topic :: Internet :: WWW/HTTP :: Dynamic Content :: CGI Tools/Libraries",
"Topic :: Utilities",
"License :: OSI Approved :: Apache Software License",
],
keywords='amazon, product advertising, api',
author='Yoav Aviram',
author_email='yoav.aviram@gmail.com',
url='https://github.com/yoavaviram/python-amazon-simple-product-api',
license='Apache 2.0',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
include_package_data=True,
zip_safe=True,
install_requires=["bottlenose", "lxml", "python-dateutil"],
)
Debug travis to pypi deploy
|
import amazon
from setuptools import setup, find_packages
import pypandoc
long_description = pypandoc.convert('README.md', 'rst')
setup(name='python-amazon-simple-product-api',
version=amazon.__version__,
description="A simple Python wrapper for the Amazon.com Product Advertising API",
long_description=long_description,
# http://pypi.python.org/pypi?:action=list_classifiers
classifiers=[
"Development Status :: 5 - Production/Stable",
"Environment :: Console",
"Intended Audience :: Developers",
"Natural Language :: English",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Topic :: Software Development :: Libraries :: Python Modules",
"Topic :: Internet :: WWW/HTTP :: Dynamic Content :: CGI Tools/Libraries",
"Topic :: Utilities",
"License :: OSI Approved :: Apache Software License",
],
keywords='amazon, product advertising, api',
author='Yoav Aviram',
author_email='yoav.aviram@gmail.com',
url='https://github.com/yoavaviram/python-amazon-simple-product-api',
license='Apache 2.0',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
include_package_data=True,
zip_safe=True,
install_requires=["bottlenose", "lxml", "python-dateutil"],
)
|
<commit_before>import amazon
from setuptools import setup, find_packages
long_description = pypandoc.convert('README.md', 'rst')
setup(name='python-amazon-simple-product-api',
version=amazon.__version__,
description="A simple Python wrapper for the Amazon.com Product Advertising API",
long_description=long_description,
# http://pypi.python.org/pypi?:action=list_classifiers
classifiers=[
"Development Status :: 5 - Production/Stable",
"Environment :: Console",
"Intended Audience :: Developers",
"Natural Language :: English",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Topic :: Software Development :: Libraries :: Python Modules",
"Topic :: Internet :: WWW/HTTP :: Dynamic Content :: CGI Tools/Libraries",
"Topic :: Utilities",
"License :: OSI Approved :: Apache Software License",
],
keywords='amazon, product advertising, api',
author='Yoav Aviram',
author_email='yoav.aviram@gmail.com',
url='https://github.com/yoavaviram/python-amazon-simple-product-api',
license='Apache 2.0',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
include_package_data=True,
zip_safe=True,
install_requires=["bottlenose", "lxml", "python-dateutil"],
)
<commit_msg>Debug travis to pypi deploy<commit_after>
|
import amazon
from setuptools import setup, find_packages
import pypandoc
long_description = pypandoc.convert('README.md', 'rst')
setup(name='python-amazon-simple-product-api',
version=amazon.__version__,
description="A simple Python wrapper for the Amazon.com Product Advertising API",
long_description=long_description,
# http://pypi.python.org/pypi?:action=list_classifiers
classifiers=[
"Development Status :: 5 - Production/Stable",
"Environment :: Console",
"Intended Audience :: Developers",
"Natural Language :: English",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Topic :: Software Development :: Libraries :: Python Modules",
"Topic :: Internet :: WWW/HTTP :: Dynamic Content :: CGI Tools/Libraries",
"Topic :: Utilities",
"License :: OSI Approved :: Apache Software License",
],
keywords='amazon, product advertising, api',
author='Yoav Aviram',
author_email='yoav.aviram@gmail.com',
url='https://github.com/yoavaviram/python-amazon-simple-product-api',
license='Apache 2.0',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
include_package_data=True,
zip_safe=True,
install_requires=["bottlenose", "lxml", "python-dateutil"],
)
|
import amazon
from setuptools import setup, find_packages
long_description = pypandoc.convert('README.md', 'rst')
setup(name='python-amazon-simple-product-api',
version=amazon.__version__,
description="A simple Python wrapper for the Amazon.com Product Advertising API",
long_description=long_description,
# http://pypi.python.org/pypi?:action=list_classifiers
classifiers=[
"Development Status :: 5 - Production/Stable",
"Environment :: Console",
"Intended Audience :: Developers",
"Natural Language :: English",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Topic :: Software Development :: Libraries :: Python Modules",
"Topic :: Internet :: WWW/HTTP :: Dynamic Content :: CGI Tools/Libraries",
"Topic :: Utilities",
"License :: OSI Approved :: Apache Software License",
],
keywords='amazon, product advertising, api',
author='Yoav Aviram',
author_email='yoav.aviram@gmail.com',
url='https://github.com/yoavaviram/python-amazon-simple-product-api',
license='Apache 2.0',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
include_package_data=True,
zip_safe=True,
install_requires=["bottlenose", "lxml", "python-dateutil"],
)
Debug travis to pypi deployimport amazon
from setuptools import setup, find_packages
import pypandoc
long_description = pypandoc.convert('README.md', 'rst')
setup(name='python-amazon-simple-product-api',
version=amazon.__version__,
description="A simple Python wrapper for the Amazon.com Product Advertising API",
long_description=long_description,
# http://pypi.python.org/pypi?:action=list_classifiers
classifiers=[
"Development Status :: 5 - Production/Stable",
"Environment :: Console",
"Intended Audience :: Developers",
"Natural Language :: English",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Topic :: Software Development :: Libraries :: Python Modules",
"Topic :: Internet :: WWW/HTTP :: Dynamic Content :: CGI Tools/Libraries",
"Topic :: Utilities",
"License :: OSI Approved :: Apache Software License",
],
keywords='amazon, product advertising, api',
author='Yoav Aviram',
author_email='yoav.aviram@gmail.com',
url='https://github.com/yoavaviram/python-amazon-simple-product-api',
license='Apache 2.0',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
include_package_data=True,
zip_safe=True,
install_requires=["bottlenose", "lxml", "python-dateutil"],
)
|
<commit_before>import amazon
from setuptools import setup, find_packages
long_description = pypandoc.convert('README.md', 'rst')
setup(name='python-amazon-simple-product-api',
version=amazon.__version__,
description="A simple Python wrapper for the Amazon.com Product Advertising API",
long_description=long_description,
# http://pypi.python.org/pypi?:action=list_classifiers
classifiers=[
"Development Status :: 5 - Production/Stable",
"Environment :: Console",
"Intended Audience :: Developers",
"Natural Language :: English",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Topic :: Software Development :: Libraries :: Python Modules",
"Topic :: Internet :: WWW/HTTP :: Dynamic Content :: CGI Tools/Libraries",
"Topic :: Utilities",
"License :: OSI Approved :: Apache Software License",
],
keywords='amazon, product advertising, api',
author='Yoav Aviram',
author_email='yoav.aviram@gmail.com',
url='https://github.com/yoavaviram/python-amazon-simple-product-api',
license='Apache 2.0',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
include_package_data=True,
zip_safe=True,
install_requires=["bottlenose", "lxml", "python-dateutil"],
)
<commit_msg>Debug travis to pypi deploy<commit_after>import amazon
from setuptools import setup, find_packages
import pypandoc
long_description = pypandoc.convert('README.md', 'rst')
setup(name='python-amazon-simple-product-api',
version=amazon.__version__,
description="A simple Python wrapper for the Amazon.com Product Advertising API",
long_description=long_description,
# http://pypi.python.org/pypi?:action=list_classifiers
classifiers=[
"Development Status :: 5 - Production/Stable",
"Environment :: Console",
"Intended Audience :: Developers",
"Natural Language :: English",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Topic :: Software Development :: Libraries :: Python Modules",
"Topic :: Internet :: WWW/HTTP :: Dynamic Content :: CGI Tools/Libraries",
"Topic :: Utilities",
"License :: OSI Approved :: Apache Software License",
],
keywords='amazon, product advertising, api',
author='Yoav Aviram',
author_email='yoav.aviram@gmail.com',
url='https://github.com/yoavaviram/python-amazon-simple-product-api',
license='Apache 2.0',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
include_package_data=True,
zip_safe=True,
install_requires=["bottlenose", "lxml", "python-dateutil"],
)
|
af4ee10bbab0929719628560c2b6b13cb724c4cf
|
setup.py
|
setup.py
|
#!/usr/bin/env python
from setuptools import setup
conf = dict(name='magiclog',
version='1.0.0',
author = 'Jason Dusek',
author_email = 'jason.dusek@gmail.com',
url = 'https://github.com/drcloud/magiclog',
install_requires=[],
setup_requires=['pytest-runner', 'setuptools'],
tests_require=['flake8', 'pytest', 'tox'],
description='Easy logger management for libraries and CLI tools.',
py_modules=['magiclog'],
classifiers=['Environment :: Console',
'Intended Audience :: Developers',
'Operating System :: Unix',
'Operating System :: POSIX',
'Programming Language :: Python',
'Topic :: Software Development',
'Development Status :: 4 - Beta'])
if __name__ == '__main__':
setup(**conf)
|
#!/usr/bin/env python
from setuptools import setup
conf = dict(name='magiclog',
version='1.0.1',
author='Jason Dusek',
author_email='jason.dusek@gmail.com',
url='https://github.com/drcloud/magiclog',
install_requires=[],
setup_requires=['pytest-runner', 'setuptools'],
tests_require=['flake8', 'pytest', 'tox'],
description='Easy logger management for libraries and CLI tools.',
py_modules=['magiclog'],
classifiers=['Environment :: Console',
'Intended Audience :: Developers',
'Operating System :: Unix',
'Operating System :: POSIX',
'Programming Language :: Python',
'Topic :: Software Development',
'Development Status :: 4 - Beta'])
if __name__ == '__main__':
setup(**conf)
|
Fix version and format and rereleas
|
Fix version and format and rereleas
|
Python
|
mit
|
drcloud/magiclog
|
#!/usr/bin/env python
from setuptools import setup
conf = dict(name='magiclog',
version='1.0.0',
author = 'Jason Dusek',
author_email = 'jason.dusek@gmail.com',
url = 'https://github.com/drcloud/magiclog',
install_requires=[],
setup_requires=['pytest-runner', 'setuptools'],
tests_require=['flake8', 'pytest', 'tox'],
description='Easy logger management for libraries and CLI tools.',
py_modules=['magiclog'],
classifiers=['Environment :: Console',
'Intended Audience :: Developers',
'Operating System :: Unix',
'Operating System :: POSIX',
'Programming Language :: Python',
'Topic :: Software Development',
'Development Status :: 4 - Beta'])
if __name__ == '__main__':
setup(**conf)
Fix version and format and rereleas
|
#!/usr/bin/env python
from setuptools import setup
conf = dict(name='magiclog',
version='1.0.1',
author='Jason Dusek',
author_email='jason.dusek@gmail.com',
url='https://github.com/drcloud/magiclog',
install_requires=[],
setup_requires=['pytest-runner', 'setuptools'],
tests_require=['flake8', 'pytest', 'tox'],
description='Easy logger management for libraries and CLI tools.',
py_modules=['magiclog'],
classifiers=['Environment :: Console',
'Intended Audience :: Developers',
'Operating System :: Unix',
'Operating System :: POSIX',
'Programming Language :: Python',
'Topic :: Software Development',
'Development Status :: 4 - Beta'])
if __name__ == '__main__':
setup(**conf)
|
<commit_before>#!/usr/bin/env python
from setuptools import setup
conf = dict(name='magiclog',
version='1.0.0',
author = 'Jason Dusek',
author_email = 'jason.dusek@gmail.com',
url = 'https://github.com/drcloud/magiclog',
install_requires=[],
setup_requires=['pytest-runner', 'setuptools'],
tests_require=['flake8', 'pytest', 'tox'],
description='Easy logger management for libraries and CLI tools.',
py_modules=['magiclog'],
classifiers=['Environment :: Console',
'Intended Audience :: Developers',
'Operating System :: Unix',
'Operating System :: POSIX',
'Programming Language :: Python',
'Topic :: Software Development',
'Development Status :: 4 - Beta'])
if __name__ == '__main__':
setup(**conf)
<commit_msg>Fix version and format and rereleas<commit_after>
|
#!/usr/bin/env python
from setuptools import setup
conf = dict(name='magiclog',
version='1.0.1',
author='Jason Dusek',
author_email='jason.dusek@gmail.com',
url='https://github.com/drcloud/magiclog',
install_requires=[],
setup_requires=['pytest-runner', 'setuptools'],
tests_require=['flake8', 'pytest', 'tox'],
description='Easy logger management for libraries and CLI tools.',
py_modules=['magiclog'],
classifiers=['Environment :: Console',
'Intended Audience :: Developers',
'Operating System :: Unix',
'Operating System :: POSIX',
'Programming Language :: Python',
'Topic :: Software Development',
'Development Status :: 4 - Beta'])
if __name__ == '__main__':
setup(**conf)
|
#!/usr/bin/env python
from setuptools import setup
conf = dict(name='magiclog',
version='1.0.0',
author = 'Jason Dusek',
author_email = 'jason.dusek@gmail.com',
url = 'https://github.com/drcloud/magiclog',
install_requires=[],
setup_requires=['pytest-runner', 'setuptools'],
tests_require=['flake8', 'pytest', 'tox'],
description='Easy logger management for libraries and CLI tools.',
py_modules=['magiclog'],
classifiers=['Environment :: Console',
'Intended Audience :: Developers',
'Operating System :: Unix',
'Operating System :: POSIX',
'Programming Language :: Python',
'Topic :: Software Development',
'Development Status :: 4 - Beta'])
if __name__ == '__main__':
setup(**conf)
Fix version and format and rereleas#!/usr/bin/env python
from setuptools import setup
conf = dict(name='magiclog',
version='1.0.1',
author='Jason Dusek',
author_email='jason.dusek@gmail.com',
url='https://github.com/drcloud/magiclog',
install_requires=[],
setup_requires=['pytest-runner', 'setuptools'],
tests_require=['flake8', 'pytest', 'tox'],
description='Easy logger management for libraries and CLI tools.',
py_modules=['magiclog'],
classifiers=['Environment :: Console',
'Intended Audience :: Developers',
'Operating System :: Unix',
'Operating System :: POSIX',
'Programming Language :: Python',
'Topic :: Software Development',
'Development Status :: 4 - Beta'])
if __name__ == '__main__':
setup(**conf)
|
<commit_before>#!/usr/bin/env python
from setuptools import setup
conf = dict(name='magiclog',
version='1.0.0',
author = 'Jason Dusek',
author_email = 'jason.dusek@gmail.com',
url = 'https://github.com/drcloud/magiclog',
install_requires=[],
setup_requires=['pytest-runner', 'setuptools'],
tests_require=['flake8', 'pytest', 'tox'],
description='Easy logger management for libraries and CLI tools.',
py_modules=['magiclog'],
classifiers=['Environment :: Console',
'Intended Audience :: Developers',
'Operating System :: Unix',
'Operating System :: POSIX',
'Programming Language :: Python',
'Topic :: Software Development',
'Development Status :: 4 - Beta'])
if __name__ == '__main__':
setup(**conf)
<commit_msg>Fix version and format and rereleas<commit_after>#!/usr/bin/env python
from setuptools import setup
conf = dict(name='magiclog',
version='1.0.1',
author='Jason Dusek',
author_email='jason.dusek@gmail.com',
url='https://github.com/drcloud/magiclog',
install_requires=[],
setup_requires=['pytest-runner', 'setuptools'],
tests_require=['flake8', 'pytest', 'tox'],
description='Easy logger management for libraries and CLI tools.',
py_modules=['magiclog'],
classifiers=['Environment :: Console',
'Intended Audience :: Developers',
'Operating System :: Unix',
'Operating System :: POSIX',
'Programming Language :: Python',
'Topic :: Software Development',
'Development Status :: 4 - Beta'])
if __name__ == '__main__':
setup(**conf)
|
74ae1e2809b2c69db0617e1516f2efc30aba13bb
|
setup.py
|
setup.py
|
#!/usr/bin/env python
import os
import sys
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
__version__ = '0.11.10'
packages = [
'lassie',
'lassie.filters',
'lassie.filters.oembed'
]
setup(
name='lassie',
version=__version__,
install_requires=open("requirements.txt").read().split("\n"),
author='Mike Helmick',
license=open('LICENSE').read(),
url='https://github.com/michaelhelmick/lassie/tree/master',
keywords='lassie open graph web content scrape scraper',
description='Lassie is a Python library for retrieving basic content from websites',
long_description=open('README.rst').read(),
long_description_content_type="text/x-rst",
include_package_data=True,
packages=packages,
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Internet',
'Natural Language :: English',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.8',
]
)
|
#!/usr/bin/env python
import os
import sys
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
__version__ = '0.11.10'
packages = [
'lassie',
'lassie.filters',
'lassie.filters.oembed'
]
setup(
name='lassie',
version=__version__,
install_requires=open("requirements.txt").read().split("\n"),
author='Mike Helmick',
license=open('LICENSE').read(),
url='https://github.com/michaelhelmick/lassie/tree/master',
keywords='lassie open graph web content scrape scraper',
description='Lassie is a Python library for retrieving content from websites and being returned in a pretty format.',
include_package_data=True,
packages=packages,
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Internet',
'Natural Language :: English',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.8',
]
)
|
Remove long desc for now.
|
Remove long desc for now.
|
Python
|
mit
|
michaelhelmick/lassie,michaelhelmick/lassie
|
#!/usr/bin/env python
import os
import sys
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
__version__ = '0.11.10'
packages = [
'lassie',
'lassie.filters',
'lassie.filters.oembed'
]
setup(
name='lassie',
version=__version__,
install_requires=open("requirements.txt").read().split("\n"),
author='Mike Helmick',
license=open('LICENSE').read(),
url='https://github.com/michaelhelmick/lassie/tree/master',
keywords='lassie open graph web content scrape scraper',
description='Lassie is a Python library for retrieving basic content from websites',
long_description=open('README.rst').read(),
long_description_content_type="text/x-rst",
include_package_data=True,
packages=packages,
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Internet',
'Natural Language :: English',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.8',
]
)
Remove long desc for now.
|
#!/usr/bin/env python
import os
import sys
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
__version__ = '0.11.10'
packages = [
'lassie',
'lassie.filters',
'lassie.filters.oembed'
]
setup(
name='lassie',
version=__version__,
install_requires=open("requirements.txt").read().split("\n"),
author='Mike Helmick',
license=open('LICENSE').read(),
url='https://github.com/michaelhelmick/lassie/tree/master',
keywords='lassie open graph web content scrape scraper',
description='Lassie is a Python library for retrieving content from websites and being returned in a pretty format.',
include_package_data=True,
packages=packages,
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Internet',
'Natural Language :: English',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.8',
]
)
|
<commit_before>#!/usr/bin/env python
import os
import sys
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
__version__ = '0.11.10'
packages = [
'lassie',
'lassie.filters',
'lassie.filters.oembed'
]
setup(
name='lassie',
version=__version__,
install_requires=open("requirements.txt").read().split("\n"),
author='Mike Helmick',
license=open('LICENSE').read(),
url='https://github.com/michaelhelmick/lassie/tree/master',
keywords='lassie open graph web content scrape scraper',
description='Lassie is a Python library for retrieving basic content from websites',
long_description=open('README.rst').read(),
long_description_content_type="text/x-rst",
include_package_data=True,
packages=packages,
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Internet',
'Natural Language :: English',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.8',
]
)
<commit_msg>Remove long desc for now.<commit_after>
|
#!/usr/bin/env python
import os
import sys
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
__version__ = '0.11.10'
packages = [
'lassie',
'lassie.filters',
'lassie.filters.oembed'
]
setup(
name='lassie',
version=__version__,
install_requires=open("requirements.txt").read().split("\n"),
author='Mike Helmick',
license=open('LICENSE').read(),
url='https://github.com/michaelhelmick/lassie/tree/master',
keywords='lassie open graph web content scrape scraper',
description='Lassie is a Python library for retrieving content from websites and being returned in a pretty format.',
include_package_data=True,
packages=packages,
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Internet',
'Natural Language :: English',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.8',
]
)
|
#!/usr/bin/env python
import os
import sys
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
__version__ = '0.11.10'
packages = [
'lassie',
'lassie.filters',
'lassie.filters.oembed'
]
setup(
name='lassie',
version=__version__,
install_requires=open("requirements.txt").read().split("\n"),
author='Mike Helmick',
license=open('LICENSE').read(),
url='https://github.com/michaelhelmick/lassie/tree/master',
keywords='lassie open graph web content scrape scraper',
description='Lassie is a Python library for retrieving basic content from websites',
long_description=open('README.rst').read(),
long_description_content_type="text/x-rst",
include_package_data=True,
packages=packages,
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Internet',
'Natural Language :: English',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.8',
]
)
Remove long desc for now.#!/usr/bin/env python
import os
import sys
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
__version__ = '0.11.10'
packages = [
'lassie',
'lassie.filters',
'lassie.filters.oembed'
]
setup(
name='lassie',
version=__version__,
install_requires=open("requirements.txt").read().split("\n"),
author='Mike Helmick',
license=open('LICENSE').read(),
url='https://github.com/michaelhelmick/lassie/tree/master',
keywords='lassie open graph web content scrape scraper',
description='Lassie is a Python library for retrieving content from websites and being returned in a pretty format.',
include_package_data=True,
packages=packages,
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Internet',
'Natural Language :: English',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.8',
]
)
|
<commit_before>#!/usr/bin/env python
import os
import sys
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
__version__ = '0.11.10'
packages = [
'lassie',
'lassie.filters',
'lassie.filters.oembed'
]
setup(
name='lassie',
version=__version__,
install_requires=open("requirements.txt").read().split("\n"),
author='Mike Helmick',
license=open('LICENSE').read(),
url='https://github.com/michaelhelmick/lassie/tree/master',
keywords='lassie open graph web content scrape scraper',
description='Lassie is a Python library for retrieving basic content from websites',
long_description=open('README.rst').read(),
long_description_content_type="text/x-rst",
include_package_data=True,
packages=packages,
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Internet',
'Natural Language :: English',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.8',
]
)
<commit_msg>Remove long desc for now.<commit_after>#!/usr/bin/env python
import os
import sys
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
__version__ = '0.11.10'
packages = [
'lassie',
'lassie.filters',
'lassie.filters.oembed'
]
setup(
name='lassie',
version=__version__,
install_requires=open("requirements.txt").read().split("\n"),
author='Mike Helmick',
license=open('LICENSE').read(),
url='https://github.com/michaelhelmick/lassie/tree/master',
keywords='lassie open graph web content scrape scraper',
description='Lassie is a Python library for retrieving content from websites and being returned in a pretty format.',
include_package_data=True,
packages=packages,
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Internet',
'Natural Language :: English',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.8',
]
)
|
4c1a7c7d666b7cde77f26f7d373ed9792769592f
|
setup.py
|
setup.py
|
import os
from setuptools import setup
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name = 'xym',
version = '0.4',
description = ('A tool to fetch and extract YANG modules from IETF RFCs and Drafts'),
long_description = """xym is a simple tool for fetching and extracting YANG modules from IETF RFCs and drafts as local files and from URLs.""",
packages = ['xym'],
scripts = ['bin/xym'],
author = 'Jan Medved',
author_email = 'jmedved@cisco.com',
license = 'New-style BSD',
url = 'https://github.com/xym-tool/xym',
install_requires = ['requests>=2.6'],
include_package_data = True,
keywords = ['yang', 'extraction'],
classifiers = [],
)
|
import os
from setuptools import setup
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name = 'xym',
version = '0.4.1',
description = ('A tool to fetch and extract YANG modules from IETF RFCs and Drafts'),
long_description = """xym is a simple tool for fetching and extracting YANG modules from IETF RFCs and drafts as local files and from URLs.""",
packages = ['xym'],
scripts = ['bin/xym'],
author = 'Jan Medved',
author_email = 'jmedved@cisco.com',
license = 'New-style BSD',
url = 'https://github.com/xym-tool/xym',
install_requires = ['requests>=2.6'],
include_package_data = True,
keywords = ['yang', 'extraction'],
classifiers = [],
)
|
Add support for missing .yang suffix check
|
Add support for missing .yang suffix check
|
Python
|
bsd-3-clause
|
xym-tool/xym
|
import os
from setuptools import setup
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name = 'xym',
version = '0.4',
description = ('A tool to fetch and extract YANG modules from IETF RFCs and Drafts'),
long_description = """xym is a simple tool for fetching and extracting YANG modules from IETF RFCs and drafts as local files and from URLs.""",
packages = ['xym'],
scripts = ['bin/xym'],
author = 'Jan Medved',
author_email = 'jmedved@cisco.com',
license = 'New-style BSD',
url = 'https://github.com/xym-tool/xym',
install_requires = ['requests>=2.6'],
include_package_data = True,
keywords = ['yang', 'extraction'],
classifiers = [],
)
Add support for missing .yang suffix check
|
import os
from setuptools import setup
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name = 'xym',
version = '0.4.1',
description = ('A tool to fetch and extract YANG modules from IETF RFCs and Drafts'),
long_description = """xym is a simple tool for fetching and extracting YANG modules from IETF RFCs and drafts as local files and from URLs.""",
packages = ['xym'],
scripts = ['bin/xym'],
author = 'Jan Medved',
author_email = 'jmedved@cisco.com',
license = 'New-style BSD',
url = 'https://github.com/xym-tool/xym',
install_requires = ['requests>=2.6'],
include_package_data = True,
keywords = ['yang', 'extraction'],
classifiers = [],
)
|
<commit_before>import os
from setuptools import setup
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name = 'xym',
version = '0.4',
description = ('A tool to fetch and extract YANG modules from IETF RFCs and Drafts'),
long_description = """xym is a simple tool for fetching and extracting YANG modules from IETF RFCs and drafts as local files and from URLs.""",
packages = ['xym'],
scripts = ['bin/xym'],
author = 'Jan Medved',
author_email = 'jmedved@cisco.com',
license = 'New-style BSD',
url = 'https://github.com/xym-tool/xym',
install_requires = ['requests>=2.6'],
include_package_data = True,
keywords = ['yang', 'extraction'],
classifiers = [],
)
<commit_msg>Add support for missing .yang suffix check<commit_after>
|
import os
from setuptools import setup
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name = 'xym',
version = '0.4.1',
description = ('A tool to fetch and extract YANG modules from IETF RFCs and Drafts'),
long_description = """xym is a simple tool for fetching and extracting YANG modules from IETF RFCs and drafts as local files and from URLs.""",
packages = ['xym'],
scripts = ['bin/xym'],
author = 'Jan Medved',
author_email = 'jmedved@cisco.com',
license = 'New-style BSD',
url = 'https://github.com/xym-tool/xym',
install_requires = ['requests>=2.6'],
include_package_data = True,
keywords = ['yang', 'extraction'],
classifiers = [],
)
|
import os
from setuptools import setup
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name = 'xym',
version = '0.4',
description = ('A tool to fetch and extract YANG modules from IETF RFCs and Drafts'),
long_description = """xym is a simple tool for fetching and extracting YANG modules from IETF RFCs and drafts as local files and from URLs.""",
packages = ['xym'],
scripts = ['bin/xym'],
author = 'Jan Medved',
author_email = 'jmedved@cisco.com',
license = 'New-style BSD',
url = 'https://github.com/xym-tool/xym',
install_requires = ['requests>=2.6'],
include_package_data = True,
keywords = ['yang', 'extraction'],
classifiers = [],
)
Add support for missing .yang suffix checkimport os
from setuptools import setup
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name = 'xym',
version = '0.4.1',
description = ('A tool to fetch and extract YANG modules from IETF RFCs and Drafts'),
long_description = """xym is a simple tool for fetching and extracting YANG modules from IETF RFCs and drafts as local files and from URLs.""",
packages = ['xym'],
scripts = ['bin/xym'],
author = 'Jan Medved',
author_email = 'jmedved@cisco.com',
license = 'New-style BSD',
url = 'https://github.com/xym-tool/xym',
install_requires = ['requests>=2.6'],
include_package_data = True,
keywords = ['yang', 'extraction'],
classifiers = [],
)
|
<commit_before>import os
from setuptools import setup
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name = 'xym',
version = '0.4',
description = ('A tool to fetch and extract YANG modules from IETF RFCs and Drafts'),
long_description = """xym is a simple tool for fetching and extracting YANG modules from IETF RFCs and drafts as local files and from URLs.""",
packages = ['xym'],
scripts = ['bin/xym'],
author = 'Jan Medved',
author_email = 'jmedved@cisco.com',
license = 'New-style BSD',
url = 'https://github.com/xym-tool/xym',
install_requires = ['requests>=2.6'],
include_package_data = True,
keywords = ['yang', 'extraction'],
classifiers = [],
)
<commit_msg>Add support for missing .yang suffix check<commit_after>import os
from setuptools import setup
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name = 'xym',
version = '0.4.1',
description = ('A tool to fetch and extract YANG modules from IETF RFCs and Drafts'),
long_description = """xym is a simple tool for fetching and extracting YANG modules from IETF RFCs and drafts as local files and from URLs.""",
packages = ['xym'],
scripts = ['bin/xym'],
author = 'Jan Medved',
author_email = 'jmedved@cisco.com',
license = 'New-style BSD',
url = 'https://github.com/xym-tool/xym',
install_requires = ['requests>=2.6'],
include_package_data = True,
keywords = ['yang', 'extraction'],
classifiers = [],
)
|
553d6d1a03afcf47f57821e055e058a662d951ca
|
setup.py
|
setup.py
|
#!/usr/bin/env python
# -*- coding: utf-8-*-
from setuptools import setup
setup(name='cmuclmtk',
version='0.1.3',
description='Wrapper library for accessing the language model tools for CMU Sphinx (CMUCLMTK).',
long_description=open('README.rst').read(),
author='Jan Holthuis',
author_email='holthuis.jan@googlemail.com',
license='BSD',
url='https://github.com/Holzhaus/python-cmuclmtk',
packages=['cmuclmtk'],
keywords='cmu sphinx cmuclmtk language modeling training vocabulary dictionary vocab dict',
zip_safe=True
)
|
#!/usr/bin/env python
# -*- coding: utf-8-*-
from setuptools import setup
setup(name='cmuclmtk',
version='0.1.3',
description='Wrapper library for accessing the language model tools for CMU Sphinx (CMUCLMTK).',
long_description=open('README.md').read(),
author='Jan Holthuis',
author_email='holthuis.jan@googlemail.com',
license='BSD',
url='https://github.com/Holzhaus/python-cmuclmtk',
packages=['cmuclmtk'],
keywords='cmu sphinx cmuclmtk language modeling training vocabulary dictionary vocab dict',
zip_safe=True
)
|
Use README.md instead of README.rst
|
Use README.md instead of README.rst
|
Python
|
bsd-3-clause
|
Holzhaus/python-cmuclmtk
|
#!/usr/bin/env python
# -*- coding: utf-8-*-
from setuptools import setup
setup(name='cmuclmtk',
version='0.1.3',
description='Wrapper library for accessing the language model tools for CMU Sphinx (CMUCLMTK).',
long_description=open('README.rst').read(),
author='Jan Holthuis',
author_email='holthuis.jan@googlemail.com',
license='BSD',
url='https://github.com/Holzhaus/python-cmuclmtk',
packages=['cmuclmtk'],
keywords='cmu sphinx cmuclmtk language modeling training vocabulary dictionary vocab dict',
zip_safe=True
)
Use README.md instead of README.rst
|
#!/usr/bin/env python
# -*- coding: utf-8-*-
from setuptools import setup
setup(name='cmuclmtk',
version='0.1.3',
description='Wrapper library for accessing the language model tools for CMU Sphinx (CMUCLMTK).',
long_description=open('README.md').read(),
author='Jan Holthuis',
author_email='holthuis.jan@googlemail.com',
license='BSD',
url='https://github.com/Holzhaus/python-cmuclmtk',
packages=['cmuclmtk'],
keywords='cmu sphinx cmuclmtk language modeling training vocabulary dictionary vocab dict',
zip_safe=True
)
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8-*-
from setuptools import setup
setup(name='cmuclmtk',
version='0.1.3',
description='Wrapper library for accessing the language model tools for CMU Sphinx (CMUCLMTK).',
long_description=open('README.rst').read(),
author='Jan Holthuis',
author_email='holthuis.jan@googlemail.com',
license='BSD',
url='https://github.com/Holzhaus/python-cmuclmtk',
packages=['cmuclmtk'],
keywords='cmu sphinx cmuclmtk language modeling training vocabulary dictionary vocab dict',
zip_safe=True
)
<commit_msg>Use README.md instead of README.rst<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8-*-
from setuptools import setup
setup(name='cmuclmtk',
version='0.1.3',
description='Wrapper library for accessing the language model tools for CMU Sphinx (CMUCLMTK).',
long_description=open('README.md').read(),
author='Jan Holthuis',
author_email='holthuis.jan@googlemail.com',
license='BSD',
url='https://github.com/Holzhaus/python-cmuclmtk',
packages=['cmuclmtk'],
keywords='cmu sphinx cmuclmtk language modeling training vocabulary dictionary vocab dict',
zip_safe=True
)
|
#!/usr/bin/env python
# -*- coding: utf-8-*-
from setuptools import setup
setup(name='cmuclmtk',
version='0.1.3',
description='Wrapper library for accessing the language model tools for CMU Sphinx (CMUCLMTK).',
long_description=open('README.rst').read(),
author='Jan Holthuis',
author_email='holthuis.jan@googlemail.com',
license='BSD',
url='https://github.com/Holzhaus/python-cmuclmtk',
packages=['cmuclmtk'],
keywords='cmu sphinx cmuclmtk language modeling training vocabulary dictionary vocab dict',
zip_safe=True
)
Use README.md instead of README.rst#!/usr/bin/env python
# -*- coding: utf-8-*-
from setuptools import setup
setup(name='cmuclmtk',
version='0.1.3',
description='Wrapper library for accessing the language model tools for CMU Sphinx (CMUCLMTK).',
long_description=open('README.md').read(),
author='Jan Holthuis',
author_email='holthuis.jan@googlemail.com',
license='BSD',
url='https://github.com/Holzhaus/python-cmuclmtk',
packages=['cmuclmtk'],
keywords='cmu sphinx cmuclmtk language modeling training vocabulary dictionary vocab dict',
zip_safe=True
)
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8-*-
from setuptools import setup
setup(name='cmuclmtk',
version='0.1.3',
description='Wrapper library for accessing the language model tools for CMU Sphinx (CMUCLMTK).',
long_description=open('README.rst').read(),
author='Jan Holthuis',
author_email='holthuis.jan@googlemail.com',
license='BSD',
url='https://github.com/Holzhaus/python-cmuclmtk',
packages=['cmuclmtk'],
keywords='cmu sphinx cmuclmtk language modeling training vocabulary dictionary vocab dict',
zip_safe=True
)
<commit_msg>Use README.md instead of README.rst<commit_after>#!/usr/bin/env python
# -*- coding: utf-8-*-
from setuptools import setup
setup(name='cmuclmtk',
version='0.1.3',
description='Wrapper library for accessing the language model tools for CMU Sphinx (CMUCLMTK).',
long_description=open('README.md').read(),
author='Jan Holthuis',
author_email='holthuis.jan@googlemail.com',
license='BSD',
url='https://github.com/Holzhaus/python-cmuclmtk',
packages=['cmuclmtk'],
keywords='cmu sphinx cmuclmtk language modeling training vocabulary dictionary vocab dict',
zip_safe=True
)
|
33337ee5bd50f73af07063fde5ee7d01874b9739
|
setup.py
|
setup.py
|
"""
django-gstorage
"""
import re
from setuptools import setup
version = ''
with open('gstorage/__init__.py', 'r') as fd:
version = re.search(r'^__version__\s*=\s*[\'"]([^\'"]*)[\'"]',
fd.read(), re.MULTILINE).group(1)
setup(
name='django-gstorage',
version=version,
description='Allow easy integration with Google storage for Django projects',
long_description=readme + '\n\n' + history,
author='Pradip Caulagi',
author_email='caulagi@gmail.com',
url='http://github.com/fyndiq/django-gstorage/',
packages=['gstorage'],
include_package_data=False,
install_requires=[
'Django',
'gcloud',
],
license='MIT',
zip_safe=False,
classifiers=[
'Development Status :: 1 - Planning',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules'
],
)
|
"""
django-gstorage
"""
import re
from setuptools import setup
version = ''
with open('gstorage/__init__.py', 'r') as fd:
version = re.search(r'^__version__\s*=\s*[\'"]([^\'"]*)[\'"]',
fd.read(), re.MULTILINE).group(1)
with open('README.rst', 'r', 'utf-8') as f:
readme = f.read()
with open('HISTORY.rst', 'r', 'utf-8') as f:
history = f.read()
setup(
name='django-gstorage',
version=version,
description='Allow easy integration with Google storage for Django projects',
long_description=readme + '\n\n' + history,
author='Pradip Caulagi',
author_email='caulagi@gmail.com',
url='http://github.com/fyndiq/django-gstorage/',
packages=['gstorage'],
include_package_data=False,
install_requires=[
'Django',
'gcloud',
],
license='MIT',
zip_safe=False,
classifiers=[
'Development Status :: 1 - Planning',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules'
],
)
|
Fix readme to be able to build a wheel
|
Fix readme to be able to build a wheel
|
Python
|
mit
|
fyndiq/django-gstorage
|
"""
django-gstorage
"""
import re
from setuptools import setup
version = ''
with open('gstorage/__init__.py', 'r') as fd:
version = re.search(r'^__version__\s*=\s*[\'"]([^\'"]*)[\'"]',
fd.read(), re.MULTILINE).group(1)
setup(
name='django-gstorage',
version=version,
description='Allow easy integration with Google storage for Django projects',
long_description=readme + '\n\n' + history,
author='Pradip Caulagi',
author_email='caulagi@gmail.com',
url='http://github.com/fyndiq/django-gstorage/',
packages=['gstorage'],
include_package_data=False,
install_requires=[
'Django',
'gcloud',
],
license='MIT',
zip_safe=False,
classifiers=[
'Development Status :: 1 - Planning',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules'
],
)
Fix readme to be able to build a wheel
|
"""
django-gstorage
"""
import re
from setuptools import setup
version = ''
with open('gstorage/__init__.py', 'r') as fd:
version = re.search(r'^__version__\s*=\s*[\'"]([^\'"]*)[\'"]',
fd.read(), re.MULTILINE).group(1)
with open('README.rst', 'r', 'utf-8') as f:
readme = f.read()
with open('HISTORY.rst', 'r', 'utf-8') as f:
history = f.read()
setup(
name='django-gstorage',
version=version,
description='Allow easy integration with Google storage for Django projects',
long_description=readme + '\n\n' + history,
author='Pradip Caulagi',
author_email='caulagi@gmail.com',
url='http://github.com/fyndiq/django-gstorage/',
packages=['gstorage'],
include_package_data=False,
install_requires=[
'Django',
'gcloud',
],
license='MIT',
zip_safe=False,
classifiers=[
'Development Status :: 1 - Planning',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules'
],
)
|
<commit_before>"""
django-gstorage
"""
import re
from setuptools import setup
version = ''
with open('gstorage/__init__.py', 'r') as fd:
version = re.search(r'^__version__\s*=\s*[\'"]([^\'"]*)[\'"]',
fd.read(), re.MULTILINE).group(1)
setup(
name='django-gstorage',
version=version,
description='Allow easy integration with Google storage for Django projects',
long_description=readme + '\n\n' + history,
author='Pradip Caulagi',
author_email='caulagi@gmail.com',
url='http://github.com/fyndiq/django-gstorage/',
packages=['gstorage'],
include_package_data=False,
install_requires=[
'Django',
'gcloud',
],
license='MIT',
zip_safe=False,
classifiers=[
'Development Status :: 1 - Planning',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules'
],
)
<commit_msg>Fix readme to be able to build a wheel<commit_after>
|
"""
django-gstorage
"""
import re
from setuptools import setup
version = ''
with open('gstorage/__init__.py', 'r') as fd:
version = re.search(r'^__version__\s*=\s*[\'"]([^\'"]*)[\'"]',
fd.read(), re.MULTILINE).group(1)
with open('README.rst', 'r', 'utf-8') as f:
readme = f.read()
with open('HISTORY.rst', 'r', 'utf-8') as f:
history = f.read()
setup(
name='django-gstorage',
version=version,
description='Allow easy integration with Google storage for Django projects',
long_description=readme + '\n\n' + history,
author='Pradip Caulagi',
author_email='caulagi@gmail.com',
url='http://github.com/fyndiq/django-gstorage/',
packages=['gstorage'],
include_package_data=False,
install_requires=[
'Django',
'gcloud',
],
license='MIT',
zip_safe=False,
classifiers=[
'Development Status :: 1 - Planning',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules'
],
)
|
"""
django-gstorage
"""
import re
from setuptools import setup
version = ''
with open('gstorage/__init__.py', 'r') as fd:
version = re.search(r'^__version__\s*=\s*[\'"]([^\'"]*)[\'"]',
fd.read(), re.MULTILINE).group(1)
setup(
name='django-gstorage',
version=version,
description='Allow easy integration with Google storage for Django projects',
long_description=readme + '\n\n' + history,
author='Pradip Caulagi',
author_email='caulagi@gmail.com',
url='http://github.com/fyndiq/django-gstorage/',
packages=['gstorage'],
include_package_data=False,
install_requires=[
'Django',
'gcloud',
],
license='MIT',
zip_safe=False,
classifiers=[
'Development Status :: 1 - Planning',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules'
],
)
Fix readme to be able to build a wheel"""
django-gstorage
"""
import re
from setuptools import setup
version = ''
with open('gstorage/__init__.py', 'r') as fd:
version = re.search(r'^__version__\s*=\s*[\'"]([^\'"]*)[\'"]',
fd.read(), re.MULTILINE).group(1)
with open('README.rst', 'r', 'utf-8') as f:
readme = f.read()
with open('HISTORY.rst', 'r', 'utf-8') as f:
history = f.read()
setup(
name='django-gstorage',
version=version,
description='Allow easy integration with Google storage for Django projects',
long_description=readme + '\n\n' + history,
author='Pradip Caulagi',
author_email='caulagi@gmail.com',
url='http://github.com/fyndiq/django-gstorage/',
packages=['gstorage'],
include_package_data=False,
install_requires=[
'Django',
'gcloud',
],
license='MIT',
zip_safe=False,
classifiers=[
'Development Status :: 1 - Planning',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules'
],
)
|
<commit_before>"""
django-gstorage
"""
import re
from setuptools import setup
version = ''
with open('gstorage/__init__.py', 'r') as fd:
version = re.search(r'^__version__\s*=\s*[\'"]([^\'"]*)[\'"]',
fd.read(), re.MULTILINE).group(1)
setup(
name='django-gstorage',
version=version,
description='Allow easy integration with Google storage for Django projects',
long_description=readme + '\n\n' + history,
author='Pradip Caulagi',
author_email='caulagi@gmail.com',
url='http://github.com/fyndiq/django-gstorage/',
packages=['gstorage'],
include_package_data=False,
install_requires=[
'Django',
'gcloud',
],
license='MIT',
zip_safe=False,
classifiers=[
'Development Status :: 1 - Planning',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules'
],
)
<commit_msg>Fix readme to be able to build a wheel<commit_after>"""
django-gstorage
"""
import re
from setuptools import setup
version = ''
with open('gstorage/__init__.py', 'r') as fd:
version = re.search(r'^__version__\s*=\s*[\'"]([^\'"]*)[\'"]',
fd.read(), re.MULTILINE).group(1)
with open('README.rst', 'r', 'utf-8') as f:
readme = f.read()
with open('HISTORY.rst', 'r', 'utf-8') as f:
history = f.read()
setup(
name='django-gstorage',
version=version,
description='Allow easy integration with Google storage for Django projects',
long_description=readme + '\n\n' + history,
author='Pradip Caulagi',
author_email='caulagi@gmail.com',
url='http://github.com/fyndiq/django-gstorage/',
packages=['gstorage'],
include_package_data=False,
install_requires=[
'Django',
'gcloud',
],
license='MIT',
zip_safe=False,
classifiers=[
'Development Status :: 1 - Planning',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules'
],
)
|
81ea1101839059cbb57011f0d1af5b06ebe3d458
|
setup.py
|
setup.py
|
from setuptools import setup
setup(
name='lektor-root-relative-path',
author=u'Atsushi Suga',
author_email='a2csuga@users.noreply.github.com',
version='0.1',
url='http://github.com/a2csuga/lektor-root-relative-path',
license='MIT',
packages=['lektor_root_relative_path'],
description='Root relative path plugin for Lektor',
entry_points={
'lektor.plugins': [
'root-relative-path = lektor_root_relative_path:RootRelativePathPlugin',
]
}
)
|
from setuptools import setup
setup(
name='lektor-root-relative-path',
author=u'Atsushi Suga',
author_email='a2csuga@users.noreply.github.com',
version='0.1',
url='http://github.com/a2csuga/lektor-root-relative-path',
license='MIT',
install_requires=open('requirements.txt').read(),
packages=['lektor_root_relative_path'],
description='Root relative path plugin for Lektor',
entry_points={
'lektor.plugins': [
'root-relative-path = lektor_root_relative_path:RootRelativePathPlugin',
]
}
)
|
Add dependency, so it gets auto installed when installing the plugin.
|
Add dependency, so it gets auto installed when installing the plugin.
|
Python
|
mit
|
a2csuga/lektor-root-relative-path
|
from setuptools import setup
setup(
name='lektor-root-relative-path',
author=u'Atsushi Suga',
author_email='a2csuga@users.noreply.github.com',
version='0.1',
url='http://github.com/a2csuga/lektor-root-relative-path',
license='MIT',
packages=['lektor_root_relative_path'],
description='Root relative path plugin for Lektor',
entry_points={
'lektor.plugins': [
'root-relative-path = lektor_root_relative_path:RootRelativePathPlugin',
]
}
)
Add dependency, so it gets auto installed when installing the plugin.
|
from setuptools import setup
setup(
name='lektor-root-relative-path',
author=u'Atsushi Suga',
author_email='a2csuga@users.noreply.github.com',
version='0.1',
url='http://github.com/a2csuga/lektor-root-relative-path',
license='MIT',
install_requires=open('requirements.txt').read(),
packages=['lektor_root_relative_path'],
description='Root relative path plugin for Lektor',
entry_points={
'lektor.plugins': [
'root-relative-path = lektor_root_relative_path:RootRelativePathPlugin',
]
}
)
|
<commit_before>from setuptools import setup
setup(
name='lektor-root-relative-path',
author=u'Atsushi Suga',
author_email='a2csuga@users.noreply.github.com',
version='0.1',
url='http://github.com/a2csuga/lektor-root-relative-path',
license='MIT',
packages=['lektor_root_relative_path'],
description='Root relative path plugin for Lektor',
entry_points={
'lektor.plugins': [
'root-relative-path = lektor_root_relative_path:RootRelativePathPlugin',
]
}
)
<commit_msg>Add dependency, so it gets auto installed when installing the plugin.<commit_after>
|
from setuptools import setup
setup(
name='lektor-root-relative-path',
author=u'Atsushi Suga',
author_email='a2csuga@users.noreply.github.com',
version='0.1',
url='http://github.com/a2csuga/lektor-root-relative-path',
license='MIT',
install_requires=open('requirements.txt').read(),
packages=['lektor_root_relative_path'],
description='Root relative path plugin for Lektor',
entry_points={
'lektor.plugins': [
'root-relative-path = lektor_root_relative_path:RootRelativePathPlugin',
]
}
)
|
from setuptools import setup
setup(
name='lektor-root-relative-path',
author=u'Atsushi Suga',
author_email='a2csuga@users.noreply.github.com',
version='0.1',
url='http://github.com/a2csuga/lektor-root-relative-path',
license='MIT',
packages=['lektor_root_relative_path'],
description='Root relative path plugin for Lektor',
entry_points={
'lektor.plugins': [
'root-relative-path = lektor_root_relative_path:RootRelativePathPlugin',
]
}
)
Add dependency, so it gets auto installed when installing the plugin.from setuptools import setup
setup(
name='lektor-root-relative-path',
author=u'Atsushi Suga',
author_email='a2csuga@users.noreply.github.com',
version='0.1',
url='http://github.com/a2csuga/lektor-root-relative-path',
license='MIT',
install_requires=open('requirements.txt').read(),
packages=['lektor_root_relative_path'],
description='Root relative path plugin for Lektor',
entry_points={
'lektor.plugins': [
'root-relative-path = lektor_root_relative_path:RootRelativePathPlugin',
]
}
)
|
<commit_before>from setuptools import setup
setup(
name='lektor-root-relative-path',
author=u'Atsushi Suga',
author_email='a2csuga@users.noreply.github.com',
version='0.1',
url='http://github.com/a2csuga/lektor-root-relative-path',
license='MIT',
packages=['lektor_root_relative_path'],
description='Root relative path plugin for Lektor',
entry_points={
'lektor.plugins': [
'root-relative-path = lektor_root_relative_path:RootRelativePathPlugin',
]
}
)
<commit_msg>Add dependency, so it gets auto installed when installing the plugin.<commit_after>from setuptools import setup
setup(
name='lektor-root-relative-path',
author=u'Atsushi Suga',
author_email='a2csuga@users.noreply.github.com',
version='0.1',
url='http://github.com/a2csuga/lektor-root-relative-path',
license='MIT',
install_requires=open('requirements.txt').read(),
packages=['lektor_root_relative_path'],
description='Root relative path plugin for Lektor',
entry_points={
'lektor.plugins': [
'root-relative-path = lektor_root_relative_path:RootRelativePathPlugin',
]
}
)
|
ece484b3beeae72543a3f3b139bc584200050d03
|
setup.py
|
setup.py
|
from setuptools import setup
setup(name = 'OWSLib',
version = '0.1.0',
description = 'OGC Web Service utility library',
license = 'GPL',
keywords = 'gis ogc ows wfs wms capabilities metadata',
author = 'Sean Gillies',
author_email = 'sgillies@frii.com',
maintainer = 'Sean Gillies',
maintainer_email = 'sgillies@frii.com',
url = 'http://trac.gispython.org/projects/PCL/wiki/OwsLib',
packages = ['owslib'],
classifiers = [
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: GNU General Public License (GPL)',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Scientific/Engineering :: GIS',
],
)
|
from setuptools import setup
setup(name = 'OWSLib',
version = '0.2.0',
description = 'OGC Web Service utility library',
license = 'BSD',
keywords = 'gis ogc ows wfs wms capabilities metadata',
author = 'Sean Gillies',
author_email = 'sgillies@frii.com',
maintainer = 'Sean Gillies',
maintainer_email = 'sgillies@frii.com',
url = 'http://trac.gispython.org/projects/PCL/wiki/OwsLib',
packages = ['owslib'],
classifiers = [
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Scientific/Engineering :: GIS',
],
)
|
Change version and license for 0.2
|
Change version and license for 0.2
git-svn-id: 8e0fbe17d71f9a07a4f24b82f5b9fb44b438f95e@617 b426a367-1105-0410-b9ff-cdf4ab011145
|
Python
|
bsd-3-clause
|
monoid/owslib,monoid/owslib,sabman/OWSLib
|
from setuptools import setup
setup(name = 'OWSLib',
version = '0.1.0',
description = 'OGC Web Service utility library',
license = 'GPL',
keywords = 'gis ogc ows wfs wms capabilities metadata',
author = 'Sean Gillies',
author_email = 'sgillies@frii.com',
maintainer = 'Sean Gillies',
maintainer_email = 'sgillies@frii.com',
url = 'http://trac.gispython.org/projects/PCL/wiki/OwsLib',
packages = ['owslib'],
classifiers = [
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: GNU General Public License (GPL)',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Scientific/Engineering :: GIS',
],
)
Change version and license for 0.2
git-svn-id: 8e0fbe17d71f9a07a4f24b82f5b9fb44b438f95e@617 b426a367-1105-0410-b9ff-cdf4ab011145
|
from setuptools import setup
setup(name = 'OWSLib',
version = '0.2.0',
description = 'OGC Web Service utility library',
license = 'BSD',
keywords = 'gis ogc ows wfs wms capabilities metadata',
author = 'Sean Gillies',
author_email = 'sgillies@frii.com',
maintainer = 'Sean Gillies',
maintainer_email = 'sgillies@frii.com',
url = 'http://trac.gispython.org/projects/PCL/wiki/OwsLib',
packages = ['owslib'],
classifiers = [
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Scientific/Engineering :: GIS',
],
)
|
<commit_before>
from setuptools import setup
setup(name = 'OWSLib',
version = '0.1.0',
description = 'OGC Web Service utility library',
license = 'GPL',
keywords = 'gis ogc ows wfs wms capabilities metadata',
author = 'Sean Gillies',
author_email = 'sgillies@frii.com',
maintainer = 'Sean Gillies',
maintainer_email = 'sgillies@frii.com',
url = 'http://trac.gispython.org/projects/PCL/wiki/OwsLib',
packages = ['owslib'],
classifiers = [
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: GNU General Public License (GPL)',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Scientific/Engineering :: GIS',
],
)
<commit_msg>Change version and license for 0.2
git-svn-id: 8e0fbe17d71f9a07a4f24b82f5b9fb44b438f95e@617 b426a367-1105-0410-b9ff-cdf4ab011145<commit_after>
|
from setuptools import setup
setup(name = 'OWSLib',
version = '0.2.0',
description = 'OGC Web Service utility library',
license = 'BSD',
keywords = 'gis ogc ows wfs wms capabilities metadata',
author = 'Sean Gillies',
author_email = 'sgillies@frii.com',
maintainer = 'Sean Gillies',
maintainer_email = 'sgillies@frii.com',
url = 'http://trac.gispython.org/projects/PCL/wiki/OwsLib',
packages = ['owslib'],
classifiers = [
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Scientific/Engineering :: GIS',
],
)
|
from setuptools import setup
setup(name = 'OWSLib',
version = '0.1.0',
description = 'OGC Web Service utility library',
license = 'GPL',
keywords = 'gis ogc ows wfs wms capabilities metadata',
author = 'Sean Gillies',
author_email = 'sgillies@frii.com',
maintainer = 'Sean Gillies',
maintainer_email = 'sgillies@frii.com',
url = 'http://trac.gispython.org/projects/PCL/wiki/OwsLib',
packages = ['owslib'],
classifiers = [
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: GNU General Public License (GPL)',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Scientific/Engineering :: GIS',
],
)
Change version and license for 0.2
git-svn-id: 8e0fbe17d71f9a07a4f24b82f5b9fb44b438f95e@617 b426a367-1105-0410-b9ff-cdf4ab011145
from setuptools import setup
setup(name = 'OWSLib',
version = '0.2.0',
description = 'OGC Web Service utility library',
license = 'BSD',
keywords = 'gis ogc ows wfs wms capabilities metadata',
author = 'Sean Gillies',
author_email = 'sgillies@frii.com',
maintainer = 'Sean Gillies',
maintainer_email = 'sgillies@frii.com',
url = 'http://trac.gispython.org/projects/PCL/wiki/OwsLib',
packages = ['owslib'],
classifiers = [
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Scientific/Engineering :: GIS',
],
)
|
<commit_before>
from setuptools import setup
setup(name = 'OWSLib',
version = '0.1.0',
description = 'OGC Web Service utility library',
license = 'GPL',
keywords = 'gis ogc ows wfs wms capabilities metadata',
author = 'Sean Gillies',
author_email = 'sgillies@frii.com',
maintainer = 'Sean Gillies',
maintainer_email = 'sgillies@frii.com',
url = 'http://trac.gispython.org/projects/PCL/wiki/OwsLib',
packages = ['owslib'],
classifiers = [
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: GNU General Public License (GPL)',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Scientific/Engineering :: GIS',
],
)
<commit_msg>Change version and license for 0.2
git-svn-id: 8e0fbe17d71f9a07a4f24b82f5b9fb44b438f95e@617 b426a367-1105-0410-b9ff-cdf4ab011145<commit_after>
from setuptools import setup
setup(name = 'OWSLib',
version = '0.2.0',
description = 'OGC Web Service utility library',
license = 'BSD',
keywords = 'gis ogc ows wfs wms capabilities metadata',
author = 'Sean Gillies',
author_email = 'sgillies@frii.com',
maintainer = 'Sean Gillies',
maintainer_email = 'sgillies@frii.com',
url = 'http://trac.gispython.org/projects/PCL/wiki/OwsLib',
packages = ['owslib'],
classifiers = [
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Scientific/Engineering :: GIS',
],
)
|
69471b0b8bbc28e820c239a6e5bdef32f644d9e8
|
setup.py
|
setup.py
|
from setuptools import setup
import django_jobvite
setup(
name='django-jobvite',
version=django_jobvite.__version__,
description='Simpler, JSON based interface to Jobvite',
long_description=open('README.rst').read(),
author='Paul Osman',
author_email='paul@mozillafoundation.org',
url='http://github.com/mozilla/django-jobvite',
license='BSD',
packages=['django_jobvite'],
)
|
from setuptools import setup, find_packages
import django_jobvite
setup(
name='django-jobvite',
version=django_jobvite.__version__,
description='Simpler, JSON based interface to Jobvite',
long_description=open('README.rst').read(),
author='Paul Osman',
author_email='paul@mozillafoundation.org',
url='http://github.com/mozilla/django-jobvite',
license='BSD',
packages=find_packages(),
)
|
Use find_packages() to package all packages of module.
|
Use find_packages() to package all packages of module.
Current setup.py includes only the django_jobvite package and excludes all
the packages within the django_jobvite directory, like migrations,
management, etc. Long story short, if you install django_jobvite using
pip it doesn't work.
|
Python
|
bsd-3-clause
|
mozilla/django-jobvite
|
from setuptools import setup
import django_jobvite
setup(
name='django-jobvite',
version=django_jobvite.__version__,
description='Simpler, JSON based interface to Jobvite',
long_description=open('README.rst').read(),
author='Paul Osman',
author_email='paul@mozillafoundation.org',
url='http://github.com/mozilla/django-jobvite',
license='BSD',
packages=['django_jobvite'],
)
Use find_packages() to package all packages of module.
Current setup.py includes only the django_jobvite package and excludes all
the packages within the django_jobvite directory, like migrations,
management, etc. Long story short, if you install django_jobvite using
pip it doesn't work.
|
from setuptools import setup, find_packages
import django_jobvite
setup(
name='django-jobvite',
version=django_jobvite.__version__,
description='Simpler, JSON based interface to Jobvite',
long_description=open('README.rst').read(),
author='Paul Osman',
author_email='paul@mozillafoundation.org',
url='http://github.com/mozilla/django-jobvite',
license='BSD',
packages=find_packages(),
)
|
<commit_before>from setuptools import setup
import django_jobvite
setup(
name='django-jobvite',
version=django_jobvite.__version__,
description='Simpler, JSON based interface to Jobvite',
long_description=open('README.rst').read(),
author='Paul Osman',
author_email='paul@mozillafoundation.org',
url='http://github.com/mozilla/django-jobvite',
license='BSD',
packages=['django_jobvite'],
)
<commit_msg>Use find_packages() to package all packages of module.
Current setup.py includes only the django_jobvite package and excludes all
the packages within the django_jobvite directory, like migrations,
management, etc. Long story short, if you install django_jobvite using
pip it doesn't work.<commit_after>
|
from setuptools import setup, find_packages
import django_jobvite
setup(
name='django-jobvite',
version=django_jobvite.__version__,
description='Simpler, JSON based interface to Jobvite',
long_description=open('README.rst').read(),
author='Paul Osman',
author_email='paul@mozillafoundation.org',
url='http://github.com/mozilla/django-jobvite',
license='BSD',
packages=find_packages(),
)
|
from setuptools import setup
import django_jobvite
setup(
name='django-jobvite',
version=django_jobvite.__version__,
description='Simpler, JSON based interface to Jobvite',
long_description=open('README.rst').read(),
author='Paul Osman',
author_email='paul@mozillafoundation.org',
url='http://github.com/mozilla/django-jobvite',
license='BSD',
packages=['django_jobvite'],
)
Use find_packages() to package all packages of module.
Current setup.py includes only the django_jobvite package and excludes all
the packages within the django_jobvite directory, like migrations,
management, etc. Long story short, if you install django_jobvite using
pip it doesn't work.from setuptools import setup, find_packages
import django_jobvite
setup(
name='django-jobvite',
version=django_jobvite.__version__,
description='Simpler, JSON based interface to Jobvite',
long_description=open('README.rst').read(),
author='Paul Osman',
author_email='paul@mozillafoundation.org',
url='http://github.com/mozilla/django-jobvite',
license='BSD',
packages=find_packages(),
)
|
<commit_before>from setuptools import setup
import django_jobvite
setup(
name='django-jobvite',
version=django_jobvite.__version__,
description='Simpler, JSON based interface to Jobvite',
long_description=open('README.rst').read(),
author='Paul Osman',
author_email='paul@mozillafoundation.org',
url='http://github.com/mozilla/django-jobvite',
license='BSD',
packages=['django_jobvite'],
)
<commit_msg>Use find_packages() to package all packages of module.
Current setup.py includes only the django_jobvite package and excludes all
the packages within the django_jobvite directory, like migrations,
management, etc. Long story short, if you install django_jobvite using
pip it doesn't work.<commit_after>from setuptools import setup, find_packages
import django_jobvite
setup(
name='django-jobvite',
version=django_jobvite.__version__,
description='Simpler, JSON based interface to Jobvite',
long_description=open('README.rst').read(),
author='Paul Osman',
author_email='paul@mozillafoundation.org',
url='http://github.com/mozilla/django-jobvite',
license='BSD',
packages=find_packages(),
)
|
74f07511d810447f8c357aadebb810f6cb67ef55
|
algoliasearch/__init__.py
|
algoliasearch/__init__.py
|
# -*- coding: utf-8 -*-
"""
Copyright (c) 2013 Algolia
http://www.algolia.com/
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
from . import client
from . import index
from . import helpers
# Compatibility with old import
class algoliasearch(object):
Client = client.Client
Index = index.Index
AlgoliaException = helpers.AlgoliaException
|
# -*- coding: utf-8 -*-
"""
Copyright (c) 2013 Algolia
http://www.algolia.com/
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
from . import client
from . import index
from . import helpers
from . import version
# Compatibility with old import
class algoliasearch(object):
VERSION = version.VERSION
Client = client.Client
Index = index.Index
AlgoliaException = helpers.AlgoliaException
|
Make VERSION easier to access
|
Make VERSION easier to access
Until now `algoliasearch.version.VERSION` was needed to obtain the
current version. Only `algoliasearch.VERSION` is now needed.
The change is backward compatible: it is still possible to do
`algoliasearch.version.VERSION`.
|
Python
|
mit
|
algolia/algoliasearch-client-python
|
# -*- coding: utf-8 -*-
"""
Copyright (c) 2013 Algolia
http://www.algolia.com/
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
from . import client
from . import index
from . import helpers
# Compatibility with old import
class algoliasearch(object):
Client = client.Client
Index = index.Index
AlgoliaException = helpers.AlgoliaException
Make VERSION easier to access
Until now `algoliasearch.version.VERSION` was needed to obtain the
current version. Only `algoliasearch.VERSION` is now needed.
The change is backward compatible: it is still possible to do
`algoliasearch.version.VERSION`.
|
# -*- coding: utf-8 -*-
"""
Copyright (c) 2013 Algolia
http://www.algolia.com/
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
from . import client
from . import index
from . import helpers
from . import version
# Compatibility with old import
class algoliasearch(object):
VERSION = version.VERSION
Client = client.Client
Index = index.Index
AlgoliaException = helpers.AlgoliaException
|
<commit_before># -*- coding: utf-8 -*-
"""
Copyright (c) 2013 Algolia
http://www.algolia.com/
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
from . import client
from . import index
from . import helpers
# Compatibility with old import
class algoliasearch(object):
Client = client.Client
Index = index.Index
AlgoliaException = helpers.AlgoliaException
<commit_msg>Make VERSION easier to access
Until now `algoliasearch.version.VERSION` was needed to obtain the
current version. Only `algoliasearch.VERSION` is now needed.
The change is backward compatible: it is still possible to do
`algoliasearch.version.VERSION`.<commit_after>
|
# -*- coding: utf-8 -*-
"""
Copyright (c) 2013 Algolia
http://www.algolia.com/
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
from . import client
from . import index
from . import helpers
from . import version
# Compatibility with old import
class algoliasearch(object):
VERSION = version.VERSION
Client = client.Client
Index = index.Index
AlgoliaException = helpers.AlgoliaException
|
# -*- coding: utf-8 -*-
"""
Copyright (c) 2013 Algolia
http://www.algolia.com/
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
from . import client
from . import index
from . import helpers
# Compatibility with old import
class algoliasearch(object):
Client = client.Client
Index = index.Index
AlgoliaException = helpers.AlgoliaException
Make VERSION easier to access
Until now `algoliasearch.version.VERSION` was needed to obtain the
current version. Only `algoliasearch.VERSION` is now needed.
The change is backward compatible: it is still possible to do
`algoliasearch.version.VERSION`.# -*- coding: utf-8 -*-
"""
Copyright (c) 2013 Algolia
http://www.algolia.com/
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
from . import client
from . import index
from . import helpers
from . import version
# Compatibility with old import
class algoliasearch(object):
VERSION = version.VERSION
Client = client.Client
Index = index.Index
AlgoliaException = helpers.AlgoliaException
|
<commit_before># -*- coding: utf-8 -*-
"""
Copyright (c) 2013 Algolia
http://www.algolia.com/
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
from . import client
from . import index
from . import helpers
# Compatibility with old import
class algoliasearch(object):
Client = client.Client
Index = index.Index
AlgoliaException = helpers.AlgoliaException
<commit_msg>Make VERSION easier to access
Until now `algoliasearch.version.VERSION` was needed to obtain the
current version. Only `algoliasearch.VERSION` is now needed.
The change is backward compatible: it is still possible to do
`algoliasearch.version.VERSION`.<commit_after># -*- coding: utf-8 -*-
"""
Copyright (c) 2013 Algolia
http://www.algolia.com/
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
from . import client
from . import index
from . import helpers
from . import version
# Compatibility with old import
class algoliasearch(object):
VERSION = version.VERSION
Client = client.Client
Index = index.Index
AlgoliaException = helpers.AlgoliaException
|
542daac6533b65f6a92e86987b913b5981d1638d
|
setup.py
|
setup.py
|
"""Lusmu setup information
Copyright 2013 Eniram Ltd. See the LICENSE file at the top-level directory of
this distribution and at https://github.com/akaihola/lusmu/blob/master/LICENSE
"""
from setuptools import setup
setup(name='lusmu',
version='0.2.4.dev',
packages=['lusmu'],
author='Antti Kaihola',
author_email='antti.kaihola@eniram.fi',
license='BSD',
description='A dataflow/reactive programming library for Python',
keywords='eniram dataflow reactive',
url='https://github.com/akaihola/lusmu',
test_suite='nose.collector',
tests_require=['mock==1.0.1', 'nose==1.3.0'])
|
"""Lusmu setup information
Copyright 2013 Eniram Ltd. See the LICENSE file at the top-level directory of
this distribution and at https://github.com/akaihola/lusmu/blob/master/LICENSE
"""
import os
from setuptools import setup
here = os.path.abspath(os.path.dirname(__file__))
with open(os.path.join(here, 'README.rst')) as f:
README = f.read()
setup(name='lusmu',
version='0.2.4.dev',
packages=['lusmu'],
author='Antti Kaihola',
author_email='antti.kaihola@eniram.fi',
license='BSD',
description='A dataflow/reactive programming library for Python',
long_description=README,
keywords='eniram dataflow reactive',
url='https://github.com/akaihola/lusmu',
test_suite='nose.collector',
tests_require=['mock==1.0.1', 'nose==1.3.0'])
|
Include README in PyPi release
|
Include README in PyPi release
|
Python
|
bsd-3-clause
|
akaihola/lusmu
|
"""Lusmu setup information
Copyright 2013 Eniram Ltd. See the LICENSE file at the top-level directory of
this distribution and at https://github.com/akaihola/lusmu/blob/master/LICENSE
"""
from setuptools import setup
setup(name='lusmu',
version='0.2.4.dev',
packages=['lusmu'],
author='Antti Kaihola',
author_email='antti.kaihola@eniram.fi',
license='BSD',
description='A dataflow/reactive programming library for Python',
keywords='eniram dataflow reactive',
url='https://github.com/akaihola/lusmu',
test_suite='nose.collector',
tests_require=['mock==1.0.1', 'nose==1.3.0'])
Include README in PyPi release
|
"""Lusmu setup information
Copyright 2013 Eniram Ltd. See the LICENSE file at the top-level directory of
this distribution and at https://github.com/akaihola/lusmu/blob/master/LICENSE
"""
import os
from setuptools import setup
here = os.path.abspath(os.path.dirname(__file__))
with open(os.path.join(here, 'README.rst')) as f:
README = f.read()
setup(name='lusmu',
version='0.2.4.dev',
packages=['lusmu'],
author='Antti Kaihola',
author_email='antti.kaihola@eniram.fi',
license='BSD',
description='A dataflow/reactive programming library for Python',
long_description=README,
keywords='eniram dataflow reactive',
url='https://github.com/akaihola/lusmu',
test_suite='nose.collector',
tests_require=['mock==1.0.1', 'nose==1.3.0'])
|
<commit_before>"""Lusmu setup information
Copyright 2013 Eniram Ltd. See the LICENSE file at the top-level directory of
this distribution and at https://github.com/akaihola/lusmu/blob/master/LICENSE
"""
from setuptools import setup
setup(name='lusmu',
version='0.2.4.dev',
packages=['lusmu'],
author='Antti Kaihola',
author_email='antti.kaihola@eniram.fi',
license='BSD',
description='A dataflow/reactive programming library for Python',
keywords='eniram dataflow reactive',
url='https://github.com/akaihola/lusmu',
test_suite='nose.collector',
tests_require=['mock==1.0.1', 'nose==1.3.0'])
<commit_msg>Include README in PyPi release<commit_after>
|
"""Lusmu setup information
Copyright 2013 Eniram Ltd. See the LICENSE file at the top-level directory of
this distribution and at https://github.com/akaihola/lusmu/blob/master/LICENSE
"""
import os
from setuptools import setup
here = os.path.abspath(os.path.dirname(__file__))
with open(os.path.join(here, 'README.rst')) as f:
README = f.read()
setup(name='lusmu',
version='0.2.4.dev',
packages=['lusmu'],
author='Antti Kaihola',
author_email='antti.kaihola@eniram.fi',
license='BSD',
description='A dataflow/reactive programming library for Python',
long_description=README,
keywords='eniram dataflow reactive',
url='https://github.com/akaihola/lusmu',
test_suite='nose.collector',
tests_require=['mock==1.0.1', 'nose==1.3.0'])
|
"""Lusmu setup information
Copyright 2013 Eniram Ltd. See the LICENSE file at the top-level directory of
this distribution and at https://github.com/akaihola/lusmu/blob/master/LICENSE
"""
from setuptools import setup
setup(name='lusmu',
version='0.2.4.dev',
packages=['lusmu'],
author='Antti Kaihola',
author_email='antti.kaihola@eniram.fi',
license='BSD',
description='A dataflow/reactive programming library for Python',
keywords='eniram dataflow reactive',
url='https://github.com/akaihola/lusmu',
test_suite='nose.collector',
tests_require=['mock==1.0.1', 'nose==1.3.0'])
Include README in PyPi release"""Lusmu setup information
Copyright 2013 Eniram Ltd. See the LICENSE file at the top-level directory of
this distribution and at https://github.com/akaihola/lusmu/blob/master/LICENSE
"""
import os
from setuptools import setup
here = os.path.abspath(os.path.dirname(__file__))
with open(os.path.join(here, 'README.rst')) as f:
README = f.read()
setup(name='lusmu',
version='0.2.4.dev',
packages=['lusmu'],
author='Antti Kaihola',
author_email='antti.kaihola@eniram.fi',
license='BSD',
description='A dataflow/reactive programming library for Python',
long_description=README,
keywords='eniram dataflow reactive',
url='https://github.com/akaihola/lusmu',
test_suite='nose.collector',
tests_require=['mock==1.0.1', 'nose==1.3.0'])
|
<commit_before>"""Lusmu setup information
Copyright 2013 Eniram Ltd. See the LICENSE file at the top-level directory of
this distribution and at https://github.com/akaihola/lusmu/blob/master/LICENSE
"""
from setuptools import setup
setup(name='lusmu',
version='0.2.4.dev',
packages=['lusmu'],
author='Antti Kaihola',
author_email='antti.kaihola@eniram.fi',
license='BSD',
description='A dataflow/reactive programming library for Python',
keywords='eniram dataflow reactive',
url='https://github.com/akaihola/lusmu',
test_suite='nose.collector',
tests_require=['mock==1.0.1', 'nose==1.3.0'])
<commit_msg>Include README in PyPi release<commit_after>"""Lusmu setup information
Copyright 2013 Eniram Ltd. See the LICENSE file at the top-level directory of
this distribution and at https://github.com/akaihola/lusmu/blob/master/LICENSE
"""
import os
from setuptools import setup
here = os.path.abspath(os.path.dirname(__file__))
with open(os.path.join(here, 'README.rst')) as f:
README = f.read()
setup(name='lusmu',
version='0.2.4.dev',
packages=['lusmu'],
author='Antti Kaihola',
author_email='antti.kaihola@eniram.fi',
license='BSD',
description='A dataflow/reactive programming library for Python',
long_description=README,
keywords='eniram dataflow reactive',
url='https://github.com/akaihola/lusmu',
test_suite='nose.collector',
tests_require=['mock==1.0.1', 'nose==1.3.0'])
|
35c023f78c2d2c735cba9f6acf504d62d5ac5c83
|
setup.py
|
setup.py
|
import os
from setuptools import setup
with open(os.path.join(os.path.dirname(__file__), 'README.md')) as readme:
README = readme.read()
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name='pybbm-private-messages',
version='0.2.1',
packages=['private_messages', 'private_messages.migrations'],
include_package_data=True,
install_requires=[
'pybbm',
],
test_suite='runtests.runtests',
license='MIT License',
description='A private messaging plugin for the pybbm forum.',
long_description=README,
url='https://github.com/skolsuper/pybbm_private_messages',
author='James Keys',
author_email='skolsuper@gmail.com',
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
],
)
|
import os
from setuptools import setup, find_packages
with open(os.path.join(os.path.dirname(__file__), 'README.md')) as readme:
README = readme.read()
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name='pybbm-private-messages',
version='0.2.2',
packages=find_packages(),
include_package_data=True,
install_requires=[
'pybbm',
],
test_suite='runtests.runtests',
license='MIT License',
description='A private messaging plugin for the pybbm forum.',
long_description=README,
url='https://github.com/skolsuper/pybbm_private_messages',
author='James Keys',
author_email='skolsuper@gmail.com',
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
],
)
|
Include custom templatetags in package build.
|
Include custom templatetags in package build.
|
Python
|
mit
|
skolsuper/pybbm_private_messages,skolsuper/pybbm_private_messages,skolsuper/pybbm_private_messages
|
import os
from setuptools import setup
with open(os.path.join(os.path.dirname(__file__), 'README.md')) as readme:
README = readme.read()
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name='pybbm-private-messages',
version='0.2.1',
packages=['private_messages', 'private_messages.migrations'],
include_package_data=True,
install_requires=[
'pybbm',
],
test_suite='runtests.runtests',
license='MIT License',
description='A private messaging plugin for the pybbm forum.',
long_description=README,
url='https://github.com/skolsuper/pybbm_private_messages',
author='James Keys',
author_email='skolsuper@gmail.com',
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
],
)
Include custom templatetags in package build.
|
import os
from setuptools import setup, find_packages
with open(os.path.join(os.path.dirname(__file__), 'README.md')) as readme:
README = readme.read()
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name='pybbm-private-messages',
version='0.2.2',
packages=find_packages(),
include_package_data=True,
install_requires=[
'pybbm',
],
test_suite='runtests.runtests',
license='MIT License',
description='A private messaging plugin for the pybbm forum.',
long_description=README,
url='https://github.com/skolsuper/pybbm_private_messages',
author='James Keys',
author_email='skolsuper@gmail.com',
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
],
)
|
<commit_before>import os
from setuptools import setup
with open(os.path.join(os.path.dirname(__file__), 'README.md')) as readme:
README = readme.read()
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name='pybbm-private-messages',
version='0.2.1',
packages=['private_messages', 'private_messages.migrations'],
include_package_data=True,
install_requires=[
'pybbm',
],
test_suite='runtests.runtests',
license='MIT License',
description='A private messaging plugin for the pybbm forum.',
long_description=README,
url='https://github.com/skolsuper/pybbm_private_messages',
author='James Keys',
author_email='skolsuper@gmail.com',
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
],
)
<commit_msg>Include custom templatetags in package build.<commit_after>
|
import os
from setuptools import setup, find_packages
with open(os.path.join(os.path.dirname(__file__), 'README.md')) as readme:
README = readme.read()
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name='pybbm-private-messages',
version='0.2.2',
packages=find_packages(),
include_package_data=True,
install_requires=[
'pybbm',
],
test_suite='runtests.runtests',
license='MIT License',
description='A private messaging plugin for the pybbm forum.',
long_description=README,
url='https://github.com/skolsuper/pybbm_private_messages',
author='James Keys',
author_email='skolsuper@gmail.com',
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
],
)
|
import os
from setuptools import setup
with open(os.path.join(os.path.dirname(__file__), 'README.md')) as readme:
README = readme.read()
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name='pybbm-private-messages',
version='0.2.1',
packages=['private_messages', 'private_messages.migrations'],
include_package_data=True,
install_requires=[
'pybbm',
],
test_suite='runtests.runtests',
license='MIT License',
description='A private messaging plugin for the pybbm forum.',
long_description=README,
url='https://github.com/skolsuper/pybbm_private_messages',
author='James Keys',
author_email='skolsuper@gmail.com',
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
],
)
Include custom templatetags in package build.import os
from setuptools import setup, find_packages
with open(os.path.join(os.path.dirname(__file__), 'README.md')) as readme:
README = readme.read()
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name='pybbm-private-messages',
version='0.2.2',
packages=find_packages(),
include_package_data=True,
install_requires=[
'pybbm',
],
test_suite='runtests.runtests',
license='MIT License',
description='A private messaging plugin for the pybbm forum.',
long_description=README,
url='https://github.com/skolsuper/pybbm_private_messages',
author='James Keys',
author_email='skolsuper@gmail.com',
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
],
)
|
<commit_before>import os
from setuptools import setup
with open(os.path.join(os.path.dirname(__file__), 'README.md')) as readme:
README = readme.read()
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name='pybbm-private-messages',
version='0.2.1',
packages=['private_messages', 'private_messages.migrations'],
include_package_data=True,
install_requires=[
'pybbm',
],
test_suite='runtests.runtests',
license='MIT License',
description='A private messaging plugin for the pybbm forum.',
long_description=README,
url='https://github.com/skolsuper/pybbm_private_messages',
author='James Keys',
author_email='skolsuper@gmail.com',
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
],
)
<commit_msg>Include custom templatetags in package build.<commit_after>import os
from setuptools import setup, find_packages
with open(os.path.join(os.path.dirname(__file__), 'README.md')) as readme:
README = readme.read()
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name='pybbm-private-messages',
version='0.2.2',
packages=find_packages(),
include_package_data=True,
install_requires=[
'pybbm',
],
test_suite='runtests.runtests',
license='MIT License',
description='A private messaging plugin for the pybbm forum.',
long_description=README,
url='https://github.com/skolsuper/pybbm_private_messages',
author='James Keys',
author_email='skolsuper@gmail.com',
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
],
)
|
e09d2a22bd91b114d291f05131ed7a487370e438
|
short.py
|
short.py
|
def ok(n):
s = n + 3
print 's = %d' % (s,)
return s
def ident(val):
return val
def main():
while False:
inside = 1
a = 1 + 2
ok(a)
if True:
print 'ok'
elif False:
print 'what'
else:
print 'no'
assert True, 'WHAT'
t = (1, 2)
c = None
c = 'ok'
Pair = DT('Pair', ('first', int), ('second', int))
Maybe, Just, Nothing = ADT('Maybe', 'Just', ('just', 'a'), 'Nothing')
m = Just(1)
match(m, ("Just(n)", identity), ("Nothing()", lambda: 0))
return 0
# vi: set sw=4 ts=4 sts=4 tw=79 ai et nocindent:
|
def ok(n):
s = n + 3
print 's = %d' % (s,)
return s
def ident(val):
return val
Pair = DT('Pair', ('first', int), ('second', int))
Maybe, Just, Nothing = ADT('Maybe', 'Just', ('just', 'a'), 'Nothing')
def main():
while False:
inside = 1
a = 1 + 2
ok(a)
if True:
print 'ok'
elif False:
print 'what'
else:
print 'no'
assert True, 'WHAT'
t = (1, 2)
c = None
c = 'ok'
m = Just(1)
match(m, ("Just(n)", identity), ("Nothing()", lambda: 0))
return 0
# vi: set sw=4 ts=4 sts=4 tw=79 ai et nocindent:
|
Move data structures out of main
|
Move data structures out of main
|
Python
|
mit
|
pshc/archipelago,pshc/archipelago,pshc/archipelago
|
def ok(n):
s = n + 3
print 's = %d' % (s,)
return s
def ident(val):
return val
def main():
while False:
inside = 1
a = 1 + 2
ok(a)
if True:
print 'ok'
elif False:
print 'what'
else:
print 'no'
assert True, 'WHAT'
t = (1, 2)
c = None
c = 'ok'
Pair = DT('Pair', ('first', int), ('second', int))
Maybe, Just, Nothing = ADT('Maybe', 'Just', ('just', 'a'), 'Nothing')
m = Just(1)
match(m, ("Just(n)", identity), ("Nothing()", lambda: 0))
return 0
# vi: set sw=4 ts=4 sts=4 tw=79 ai et nocindent:
Move data structures out of main
|
def ok(n):
s = n + 3
print 's = %d' % (s,)
return s
def ident(val):
return val
Pair = DT('Pair', ('first', int), ('second', int))
Maybe, Just, Nothing = ADT('Maybe', 'Just', ('just', 'a'), 'Nothing')
def main():
while False:
inside = 1
a = 1 + 2
ok(a)
if True:
print 'ok'
elif False:
print 'what'
else:
print 'no'
assert True, 'WHAT'
t = (1, 2)
c = None
c = 'ok'
m = Just(1)
match(m, ("Just(n)", identity), ("Nothing()", lambda: 0))
return 0
# vi: set sw=4 ts=4 sts=4 tw=79 ai et nocindent:
|
<commit_before>def ok(n):
s = n + 3
print 's = %d' % (s,)
return s
def ident(val):
return val
def main():
while False:
inside = 1
a = 1 + 2
ok(a)
if True:
print 'ok'
elif False:
print 'what'
else:
print 'no'
assert True, 'WHAT'
t = (1, 2)
c = None
c = 'ok'
Pair = DT('Pair', ('first', int), ('second', int))
Maybe, Just, Nothing = ADT('Maybe', 'Just', ('just', 'a'), 'Nothing')
m = Just(1)
match(m, ("Just(n)", identity), ("Nothing()", lambda: 0))
return 0
# vi: set sw=4 ts=4 sts=4 tw=79 ai et nocindent:
<commit_msg>Move data structures out of main<commit_after>
|
def ok(n):
s = n + 3
print 's = %d' % (s,)
return s
def ident(val):
return val
Pair = DT('Pair', ('first', int), ('second', int))
Maybe, Just, Nothing = ADT('Maybe', 'Just', ('just', 'a'), 'Nothing')
def main():
while False:
inside = 1
a = 1 + 2
ok(a)
if True:
print 'ok'
elif False:
print 'what'
else:
print 'no'
assert True, 'WHAT'
t = (1, 2)
c = None
c = 'ok'
m = Just(1)
match(m, ("Just(n)", identity), ("Nothing()", lambda: 0))
return 0
# vi: set sw=4 ts=4 sts=4 tw=79 ai et nocindent:
|
def ok(n):
s = n + 3
print 's = %d' % (s,)
return s
def ident(val):
return val
def main():
while False:
inside = 1
a = 1 + 2
ok(a)
if True:
print 'ok'
elif False:
print 'what'
else:
print 'no'
assert True, 'WHAT'
t = (1, 2)
c = None
c = 'ok'
Pair = DT('Pair', ('first', int), ('second', int))
Maybe, Just, Nothing = ADT('Maybe', 'Just', ('just', 'a'), 'Nothing')
m = Just(1)
match(m, ("Just(n)", identity), ("Nothing()", lambda: 0))
return 0
# vi: set sw=4 ts=4 sts=4 tw=79 ai et nocindent:
Move data structures out of maindef ok(n):
s = n + 3
print 's = %d' % (s,)
return s
def ident(val):
return val
Pair = DT('Pair', ('first', int), ('second', int))
Maybe, Just, Nothing = ADT('Maybe', 'Just', ('just', 'a'), 'Nothing')
def main():
while False:
inside = 1
a = 1 + 2
ok(a)
if True:
print 'ok'
elif False:
print 'what'
else:
print 'no'
assert True, 'WHAT'
t = (1, 2)
c = None
c = 'ok'
m = Just(1)
match(m, ("Just(n)", identity), ("Nothing()", lambda: 0))
return 0
# vi: set sw=4 ts=4 sts=4 tw=79 ai et nocindent:
|
<commit_before>def ok(n):
s = n + 3
print 's = %d' % (s,)
return s
def ident(val):
return val
def main():
while False:
inside = 1
a = 1 + 2
ok(a)
if True:
print 'ok'
elif False:
print 'what'
else:
print 'no'
assert True, 'WHAT'
t = (1, 2)
c = None
c = 'ok'
Pair = DT('Pair', ('first', int), ('second', int))
Maybe, Just, Nothing = ADT('Maybe', 'Just', ('just', 'a'), 'Nothing')
m = Just(1)
match(m, ("Just(n)", identity), ("Nothing()", lambda: 0))
return 0
# vi: set sw=4 ts=4 sts=4 tw=79 ai et nocindent:
<commit_msg>Move data structures out of main<commit_after>def ok(n):
s = n + 3
print 's = %d' % (s,)
return s
def ident(val):
return val
Pair = DT('Pair', ('first', int), ('second', int))
Maybe, Just, Nothing = ADT('Maybe', 'Just', ('just', 'a'), 'Nothing')
def main():
while False:
inside = 1
a = 1 + 2
ok(a)
if True:
print 'ok'
elif False:
print 'what'
else:
print 'no'
assert True, 'WHAT'
t = (1, 2)
c = None
c = 'ok'
m = Just(1)
match(m, ("Just(n)", identity), ("Nothing()", lambda: 0))
return 0
# vi: set sw=4 ts=4 sts=4 tw=79 ai et nocindent:
|
d0bcfebd2f85ec0ba17812ad4e98ef738dae1163
|
menpo/shape/groupops.py
|
menpo/shape/groupops.py
|
from .pointcloud import PointCloud
import numpy as np
def mean_pointcloud(pointclouds):
r"""
Compute the mean of a `list` of :map:`PointCloud` objects.
Parameters
----------
pointclouds: `list` of :map:`PointCloud`
List of point cloud objects from which we want to compute the mean.
Returns
-------
mean_pointcloud : :map:`PointCloud`
The mean point cloud.
"""
return PointCloud(np.mean([pc.points for pc in pointclouds], axis=0))
|
from __future__ import division
from .pointcloud import PointCloud
def mean_pointcloud(pointclouds):
r"""
Compute the mean of a `list` of :map:`PointCloud` objects.
Parameters
----------
pointclouds: `list` of :map:`PointCloud`
List of point cloud objects from which we want to compute the mean.
Returns
-------
mean_pointcloud : :map:`PointCloud`
The mean point cloud.
"""
return PointCloud(sum(pc.points for pc in pointclouds) / len(pointclouds))
|
Update mean_pointcloud to be faster
|
Update mean_pointcloud to be faster
This is actually faster than using numpy. It is also MUCH
faster if it gets jitted by something like pypy or numba.
|
Python
|
bsd-3-clause
|
mozata/menpo,mozata/menpo,patricksnape/menpo,menpo/menpo,grigorisg9gr/menpo,mozata/menpo,yuxiang-zhou/menpo,menpo/menpo,mozata/menpo,grigorisg9gr/menpo,yuxiang-zhou/menpo,grigorisg9gr/menpo,patricksnape/menpo,menpo/menpo,patricksnape/menpo,yuxiang-zhou/menpo
|
from .pointcloud import PointCloud
import numpy as np
def mean_pointcloud(pointclouds):
r"""
Compute the mean of a `list` of :map:`PointCloud` objects.
Parameters
----------
pointclouds: `list` of :map:`PointCloud`
List of point cloud objects from which we want to compute the mean.
Returns
-------
mean_pointcloud : :map:`PointCloud`
The mean point cloud.
"""
return PointCloud(np.mean([pc.points for pc in pointclouds], axis=0))
Update mean_pointcloud to be faster
This is actually faster than using numpy. It is also MUCH
faster if it gets jitted by something like pypy or numba.
|
from __future__ import division
from .pointcloud import PointCloud
def mean_pointcloud(pointclouds):
r"""
Compute the mean of a `list` of :map:`PointCloud` objects.
Parameters
----------
pointclouds: `list` of :map:`PointCloud`
List of point cloud objects from which we want to compute the mean.
Returns
-------
mean_pointcloud : :map:`PointCloud`
The mean point cloud.
"""
return PointCloud(sum(pc.points for pc in pointclouds) / len(pointclouds))
|
<commit_before>from .pointcloud import PointCloud
import numpy as np
def mean_pointcloud(pointclouds):
r"""
Compute the mean of a `list` of :map:`PointCloud` objects.
Parameters
----------
pointclouds: `list` of :map:`PointCloud`
List of point cloud objects from which we want to compute the mean.
Returns
-------
mean_pointcloud : :map:`PointCloud`
The mean point cloud.
"""
return PointCloud(np.mean([pc.points for pc in pointclouds], axis=0))
<commit_msg>Update mean_pointcloud to be faster
This is actually faster than using numpy. It is also MUCH
faster if it gets jitted by something like pypy or numba.<commit_after>
|
from __future__ import division
from .pointcloud import PointCloud
def mean_pointcloud(pointclouds):
r"""
Compute the mean of a `list` of :map:`PointCloud` objects.
Parameters
----------
pointclouds: `list` of :map:`PointCloud`
List of point cloud objects from which we want to compute the mean.
Returns
-------
mean_pointcloud : :map:`PointCloud`
The mean point cloud.
"""
return PointCloud(sum(pc.points for pc in pointclouds) / len(pointclouds))
|
from .pointcloud import PointCloud
import numpy as np
def mean_pointcloud(pointclouds):
r"""
Compute the mean of a `list` of :map:`PointCloud` objects.
Parameters
----------
pointclouds: `list` of :map:`PointCloud`
List of point cloud objects from which we want to compute the mean.
Returns
-------
mean_pointcloud : :map:`PointCloud`
The mean point cloud.
"""
return PointCloud(np.mean([pc.points for pc in pointclouds], axis=0))
Update mean_pointcloud to be faster
This is actually faster than using numpy. It is also MUCH
faster if it gets jitted by something like pypy or numba.from __future__ import division
from .pointcloud import PointCloud
def mean_pointcloud(pointclouds):
r"""
Compute the mean of a `list` of :map:`PointCloud` objects.
Parameters
----------
pointclouds: `list` of :map:`PointCloud`
List of point cloud objects from which we want to compute the mean.
Returns
-------
mean_pointcloud : :map:`PointCloud`
The mean point cloud.
"""
return PointCloud(sum(pc.points for pc in pointclouds) / len(pointclouds))
|
<commit_before>from .pointcloud import PointCloud
import numpy as np
def mean_pointcloud(pointclouds):
r"""
Compute the mean of a `list` of :map:`PointCloud` objects.
Parameters
----------
pointclouds: `list` of :map:`PointCloud`
List of point cloud objects from which we want to compute the mean.
Returns
-------
mean_pointcloud : :map:`PointCloud`
The mean point cloud.
"""
return PointCloud(np.mean([pc.points for pc in pointclouds], axis=0))
<commit_msg>Update mean_pointcloud to be faster
This is actually faster than using numpy. It is also MUCH
faster if it gets jitted by something like pypy or numba.<commit_after>from __future__ import division
from .pointcloud import PointCloud
def mean_pointcloud(pointclouds):
r"""
Compute the mean of a `list` of :map:`PointCloud` objects.
Parameters
----------
pointclouds: `list` of :map:`PointCloud`
List of point cloud objects from which we want to compute the mean.
Returns
-------
mean_pointcloud : :map:`PointCloud`
The mean point cloud.
"""
return PointCloud(sum(pc.points for pc in pointclouds) / len(pointclouds))
|
536b99916140f01216e2f9482086a83438250977
|
web/mailman-web/urls.py
|
web/mailman-web/urls.py
|
# -*- coding: utf-8 -*-
# Copyright (C) 1998-2016 by the Free Software Foundation, Inc.
#
# This file is part of Postorius.
#
# Postorius is free software: you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free
# Software Foundation, either version 3 of the License, or (at your option)
# any later version.
#
# Postorius is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
# more details.
#
# You should have received a copy of the GNU General Public License along with
# Postorius. If not, see <http://www.gnu.org/licenses/>.
from django.conf.urls import include, url
from django.contrib import admin
from django.urls import reverse_lazy
from django.views.generic import RedirectView
urlpatterns = [
url(r'^$', RedirectView.as_view(
url=reverse_lazy('list_index'),
permanent=True)),
url(r'^postorius/', include('postorius.urls')),
url(r'^hyperkitty/', include('hyperkitty.urls')),
url(r'', include('django_mailman3.urls')),
url(r'^accounts/', include('allauth.urls')),
# Django admin
url(r'^admin/', include(admin.site.urls)),
]
|
# -*- coding: utf-8 -*-
# Copyright (C) 1998-2016 by the Free Software Foundation, Inc.
#
# This file is part of Postorius.
#
# Postorius is free software: you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free
# Software Foundation, either version 3 of the License, or (at your option)
# any later version.
#
# Postorius is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
# more details.
#
# You should have received a copy of the GNU General Public License along with
# Postorius. If not, see <http://www.gnu.org/licenses/>.
from django.conf.urls import include, url
from django.contrib import admin
from django.urls import reverse_lazy
from django.views.generic import RedirectView
urlpatterns = [
url(r'^$', RedirectView.as_view(
url=reverse_lazy('list_index'),
permanent=True)),
url(r'^postorius/', include('postorius.urls')),
url(r'^hyperkitty/', include('hyperkitty.urls')),
url(r'', include('django_mailman3.urls')),
url(r'^accounts/', include('allauth.urls')),
# Django admin
url(r'^admin/', admin.site.urls),
]
|
Fix the admin url configuration.
|
Fix the admin url configuration.
|
Python
|
mit
|
maxking/docker-mailman,maxking/docker-mailman
|
# -*- coding: utf-8 -*-
# Copyright (C) 1998-2016 by the Free Software Foundation, Inc.
#
# This file is part of Postorius.
#
# Postorius is free software: you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free
# Software Foundation, either version 3 of the License, or (at your option)
# any later version.
#
# Postorius is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
# more details.
#
# You should have received a copy of the GNU General Public License along with
# Postorius. If not, see <http://www.gnu.org/licenses/>.
from django.conf.urls import include, url
from django.contrib import admin
from django.urls import reverse_lazy
from django.views.generic import RedirectView
urlpatterns = [
url(r'^$', RedirectView.as_view(
url=reverse_lazy('list_index'),
permanent=True)),
url(r'^postorius/', include('postorius.urls')),
url(r'^hyperkitty/', include('hyperkitty.urls')),
url(r'', include('django_mailman3.urls')),
url(r'^accounts/', include('allauth.urls')),
# Django admin
url(r'^admin/', include(admin.site.urls)),
]
Fix the admin url configuration.
|
# -*- coding: utf-8 -*-
# Copyright (C) 1998-2016 by the Free Software Foundation, Inc.
#
# This file is part of Postorius.
#
# Postorius is free software: you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free
# Software Foundation, either version 3 of the License, or (at your option)
# any later version.
#
# Postorius is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
# more details.
#
# You should have received a copy of the GNU General Public License along with
# Postorius. If not, see <http://www.gnu.org/licenses/>.
from django.conf.urls import include, url
from django.contrib import admin
from django.urls import reverse_lazy
from django.views.generic import RedirectView
urlpatterns = [
url(r'^$', RedirectView.as_view(
url=reverse_lazy('list_index'),
permanent=True)),
url(r'^postorius/', include('postorius.urls')),
url(r'^hyperkitty/', include('hyperkitty.urls')),
url(r'', include('django_mailman3.urls')),
url(r'^accounts/', include('allauth.urls')),
# Django admin
url(r'^admin/', admin.site.urls),
]
|
<commit_before># -*- coding: utf-8 -*-
# Copyright (C) 1998-2016 by the Free Software Foundation, Inc.
#
# This file is part of Postorius.
#
# Postorius is free software: you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free
# Software Foundation, either version 3 of the License, or (at your option)
# any later version.
#
# Postorius is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
# more details.
#
# You should have received a copy of the GNU General Public License along with
# Postorius. If not, see <http://www.gnu.org/licenses/>.
from django.conf.urls import include, url
from django.contrib import admin
from django.urls import reverse_lazy
from django.views.generic import RedirectView
urlpatterns = [
url(r'^$', RedirectView.as_view(
url=reverse_lazy('list_index'),
permanent=True)),
url(r'^postorius/', include('postorius.urls')),
url(r'^hyperkitty/', include('hyperkitty.urls')),
url(r'', include('django_mailman3.urls')),
url(r'^accounts/', include('allauth.urls')),
# Django admin
url(r'^admin/', include(admin.site.urls)),
]
<commit_msg>Fix the admin url configuration.<commit_after>
|
# -*- coding: utf-8 -*-
# Copyright (C) 1998-2016 by the Free Software Foundation, Inc.
#
# This file is part of Postorius.
#
# Postorius is free software: you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free
# Software Foundation, either version 3 of the License, or (at your option)
# any later version.
#
# Postorius is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
# more details.
#
# You should have received a copy of the GNU General Public License along with
# Postorius. If not, see <http://www.gnu.org/licenses/>.
from django.conf.urls import include, url
from django.contrib import admin
from django.urls import reverse_lazy
from django.views.generic import RedirectView
urlpatterns = [
url(r'^$', RedirectView.as_view(
url=reverse_lazy('list_index'),
permanent=True)),
url(r'^postorius/', include('postorius.urls')),
url(r'^hyperkitty/', include('hyperkitty.urls')),
url(r'', include('django_mailman3.urls')),
url(r'^accounts/', include('allauth.urls')),
# Django admin
url(r'^admin/', admin.site.urls),
]
|
# -*- coding: utf-8 -*-
# Copyright (C) 1998-2016 by the Free Software Foundation, Inc.
#
# This file is part of Postorius.
#
# Postorius is free software: you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free
# Software Foundation, either version 3 of the License, or (at your option)
# any later version.
#
# Postorius is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
# more details.
#
# You should have received a copy of the GNU General Public License along with
# Postorius. If not, see <http://www.gnu.org/licenses/>.
from django.conf.urls import include, url
from django.contrib import admin
from django.urls import reverse_lazy
from django.views.generic import RedirectView
urlpatterns = [
url(r'^$', RedirectView.as_view(
url=reverse_lazy('list_index'),
permanent=True)),
url(r'^postorius/', include('postorius.urls')),
url(r'^hyperkitty/', include('hyperkitty.urls')),
url(r'', include('django_mailman3.urls')),
url(r'^accounts/', include('allauth.urls')),
# Django admin
url(r'^admin/', include(admin.site.urls)),
]
Fix the admin url configuration.# -*- coding: utf-8 -*-
# Copyright (C) 1998-2016 by the Free Software Foundation, Inc.
#
# This file is part of Postorius.
#
# Postorius is free software: you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free
# Software Foundation, either version 3 of the License, or (at your option)
# any later version.
#
# Postorius is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
# more details.
#
# You should have received a copy of the GNU General Public License along with
# Postorius. If not, see <http://www.gnu.org/licenses/>.
from django.conf.urls import include, url
from django.contrib import admin
from django.urls import reverse_lazy
from django.views.generic import RedirectView
urlpatterns = [
url(r'^$', RedirectView.as_view(
url=reverse_lazy('list_index'),
permanent=True)),
url(r'^postorius/', include('postorius.urls')),
url(r'^hyperkitty/', include('hyperkitty.urls')),
url(r'', include('django_mailman3.urls')),
url(r'^accounts/', include('allauth.urls')),
# Django admin
url(r'^admin/', admin.site.urls),
]
|
<commit_before># -*- coding: utf-8 -*-
# Copyright (C) 1998-2016 by the Free Software Foundation, Inc.
#
# This file is part of Postorius.
#
# Postorius is free software: you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free
# Software Foundation, either version 3 of the License, or (at your option)
# any later version.
#
# Postorius is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
# more details.
#
# You should have received a copy of the GNU General Public License along with
# Postorius. If not, see <http://www.gnu.org/licenses/>.
from django.conf.urls import include, url
from django.contrib import admin
from django.urls import reverse_lazy
from django.views.generic import RedirectView
urlpatterns = [
url(r'^$', RedirectView.as_view(
url=reverse_lazy('list_index'),
permanent=True)),
url(r'^postorius/', include('postorius.urls')),
url(r'^hyperkitty/', include('hyperkitty.urls')),
url(r'', include('django_mailman3.urls')),
url(r'^accounts/', include('allauth.urls')),
# Django admin
url(r'^admin/', include(admin.site.urls)),
]
<commit_msg>Fix the admin url configuration.<commit_after># -*- coding: utf-8 -*-
# Copyright (C) 1998-2016 by the Free Software Foundation, Inc.
#
# This file is part of Postorius.
#
# Postorius is free software: you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free
# Software Foundation, either version 3 of the License, or (at your option)
# any later version.
#
# Postorius is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
# more details.
#
# You should have received a copy of the GNU General Public License along with
# Postorius. If not, see <http://www.gnu.org/licenses/>.
from django.conf.urls import include, url
from django.contrib import admin
from django.urls import reverse_lazy
from django.views.generic import RedirectView
urlpatterns = [
url(r'^$', RedirectView.as_view(
url=reverse_lazy('list_index'),
permanent=True)),
url(r'^postorius/', include('postorius.urls')),
url(r'^hyperkitty/', include('hyperkitty.urls')),
url(r'', include('django_mailman3.urls')),
url(r'^accounts/', include('allauth.urls')),
# Django admin
url(r'^admin/', admin.site.urls),
]
|
8ea6176719cd0c167420e3a7332efc7ece947a0d
|
genderjobcheck/views.py
|
genderjobcheck/views.py
|
from django.views.decorators.csrf import csrf_exempt
from django.shortcuts import render
import assess
def home(request):
if request.method == 'GET':
return render(request, 'home.html', {})
@csrf_exempt
def assessJobAd(request):
if request.method == 'POST':
ad_text = request.POST["adtext"]
results = assess.assess(ad_text)
return render(request, 'results.html', results
)
|
from django.views.decorators.csrf import csrf_exempt
from django.shortcuts import render
from django.shortcuts import redirect
import assess
def home(request):
if request.method == 'GET':
return render(request, 'home.html', {})
@csrf_exempt
def assessJobAd(request):
if request.method == 'POST':
ad_text = request.POST["adtext"]
if len(ad_text):
results = assess.assess(ad_text)
return render(request, 'results.html', results
)
else:
return redirect('/')
|
Handle form submits without ad text
|
Handle form submits without ad text
|
Python
|
mit
|
lovedaybrooke/gender-decoder,lovedaybrooke/gender-decoder
|
from django.views.decorators.csrf import csrf_exempt
from django.shortcuts import render
import assess
def home(request):
if request.method == 'GET':
return render(request, 'home.html', {})
@csrf_exempt
def assessJobAd(request):
if request.method == 'POST':
ad_text = request.POST["adtext"]
results = assess.assess(ad_text)
return render(request, 'results.html', results
)Handle form submits without ad text
|
from django.views.decorators.csrf import csrf_exempt
from django.shortcuts import render
from django.shortcuts import redirect
import assess
def home(request):
if request.method == 'GET':
return render(request, 'home.html', {})
@csrf_exempt
def assessJobAd(request):
if request.method == 'POST':
ad_text = request.POST["adtext"]
if len(ad_text):
results = assess.assess(ad_text)
return render(request, 'results.html', results
)
else:
return redirect('/')
|
<commit_before>from django.views.decorators.csrf import csrf_exempt
from django.shortcuts import render
import assess
def home(request):
if request.method == 'GET':
return render(request, 'home.html', {})
@csrf_exempt
def assessJobAd(request):
if request.method == 'POST':
ad_text = request.POST["adtext"]
results = assess.assess(ad_text)
return render(request, 'results.html', results
)<commit_msg>Handle form submits without ad text<commit_after>
|
from django.views.decorators.csrf import csrf_exempt
from django.shortcuts import render
from django.shortcuts import redirect
import assess
def home(request):
if request.method == 'GET':
return render(request, 'home.html', {})
@csrf_exempt
def assessJobAd(request):
if request.method == 'POST':
ad_text = request.POST["adtext"]
if len(ad_text):
results = assess.assess(ad_text)
return render(request, 'results.html', results
)
else:
return redirect('/')
|
from django.views.decorators.csrf import csrf_exempt
from django.shortcuts import render
import assess
def home(request):
if request.method == 'GET':
return render(request, 'home.html', {})
@csrf_exempt
def assessJobAd(request):
if request.method == 'POST':
ad_text = request.POST["adtext"]
results = assess.assess(ad_text)
return render(request, 'results.html', results
)Handle form submits without ad textfrom django.views.decorators.csrf import csrf_exempt
from django.shortcuts import render
from django.shortcuts import redirect
import assess
def home(request):
if request.method == 'GET':
return render(request, 'home.html', {})
@csrf_exempt
def assessJobAd(request):
if request.method == 'POST':
ad_text = request.POST["adtext"]
if len(ad_text):
results = assess.assess(ad_text)
return render(request, 'results.html', results
)
else:
return redirect('/')
|
<commit_before>from django.views.decorators.csrf import csrf_exempt
from django.shortcuts import render
import assess
def home(request):
if request.method == 'GET':
return render(request, 'home.html', {})
@csrf_exempt
def assessJobAd(request):
if request.method == 'POST':
ad_text = request.POST["adtext"]
results = assess.assess(ad_text)
return render(request, 'results.html', results
)<commit_msg>Handle form submits without ad text<commit_after>from django.views.decorators.csrf import csrf_exempt
from django.shortcuts import render
from django.shortcuts import redirect
import assess
def home(request):
if request.method == 'GET':
return render(request, 'home.html', {})
@csrf_exempt
def assessJobAd(request):
if request.method == 'POST':
ad_text = request.POST["adtext"]
if len(ad_text):
results = assess.assess(ad_text)
return render(request, 'results.html', results
)
else:
return redirect('/')
|
d666c5c818fbfc00f642cfeb24cb90aab94035cd
|
keyring/devpi_client.py
|
keyring/devpi_client.py
|
import contextlib
import functools
import pluggy
import keyring
from keyring.errors import KeyringError
hookimpl = pluggy.HookimplMarker("devpiclient")
# https://github.com/jaraco/jaraco.context/blob/c3a9b739/jaraco/context.py#L205
suppress = type('suppress', (contextlib.suppress, contextlib.ContextDecorator), {})
def restore_signature(func):
# workaround for pytest-dev/pluggy#358
@functools.wraps(func)
def wrapper(url, username):
return func(url, username)
return wrapper
@hookimpl()
@restore_signature
@suppress(KeyringError)
def devpiclient_get_password(url, username):
"""
>>> pluggy._hooks.varnames(devpiclient_get_password)
(('url', 'username'), ())
>>>
"""
return keyring.get_password(url, username)
|
import contextlib
import functools
import pluggy
import keyring.errors
hookimpl = pluggy.HookimplMarker("devpiclient")
# https://github.com/jaraco/jaraco.context/blob/c3a9b739/jaraco/context.py#L205
suppress = type('suppress', (contextlib.suppress, contextlib.ContextDecorator), {})
def restore_signature(func):
# workaround for pytest-dev/pluggy#358
@functools.wraps(func)
def wrapper(url, username):
return func(url, username)
return wrapper
@hookimpl()
@restore_signature
@suppress(keyring.errors.KeyringError)
def devpiclient_get_password(url, username):
"""
>>> pluggy._hooks.varnames(devpiclient_get_password)
(('url', 'username'), ())
>>>
"""
return keyring.get_password(url, username)
|
Remove superfluous import by using the exception from the namespace.
|
Remove superfluous import by using the exception from the namespace.
|
Python
|
mit
|
jaraco/keyring
|
import contextlib
import functools
import pluggy
import keyring
from keyring.errors import KeyringError
hookimpl = pluggy.HookimplMarker("devpiclient")
# https://github.com/jaraco/jaraco.context/blob/c3a9b739/jaraco/context.py#L205
suppress = type('suppress', (contextlib.suppress, contextlib.ContextDecorator), {})
def restore_signature(func):
# workaround for pytest-dev/pluggy#358
@functools.wraps(func)
def wrapper(url, username):
return func(url, username)
return wrapper
@hookimpl()
@restore_signature
@suppress(KeyringError)
def devpiclient_get_password(url, username):
"""
>>> pluggy._hooks.varnames(devpiclient_get_password)
(('url', 'username'), ())
>>>
"""
return keyring.get_password(url, username)
Remove superfluous import by using the exception from the namespace.
|
import contextlib
import functools
import pluggy
import keyring.errors
hookimpl = pluggy.HookimplMarker("devpiclient")
# https://github.com/jaraco/jaraco.context/blob/c3a9b739/jaraco/context.py#L205
suppress = type('suppress', (contextlib.suppress, contextlib.ContextDecorator), {})
def restore_signature(func):
# workaround for pytest-dev/pluggy#358
@functools.wraps(func)
def wrapper(url, username):
return func(url, username)
return wrapper
@hookimpl()
@restore_signature
@suppress(keyring.errors.KeyringError)
def devpiclient_get_password(url, username):
"""
>>> pluggy._hooks.varnames(devpiclient_get_password)
(('url', 'username'), ())
>>>
"""
return keyring.get_password(url, username)
|
<commit_before>import contextlib
import functools
import pluggy
import keyring
from keyring.errors import KeyringError
hookimpl = pluggy.HookimplMarker("devpiclient")
# https://github.com/jaraco/jaraco.context/blob/c3a9b739/jaraco/context.py#L205
suppress = type('suppress', (contextlib.suppress, contextlib.ContextDecorator), {})
def restore_signature(func):
# workaround for pytest-dev/pluggy#358
@functools.wraps(func)
def wrapper(url, username):
return func(url, username)
return wrapper
@hookimpl()
@restore_signature
@suppress(KeyringError)
def devpiclient_get_password(url, username):
"""
>>> pluggy._hooks.varnames(devpiclient_get_password)
(('url', 'username'), ())
>>>
"""
return keyring.get_password(url, username)
<commit_msg>Remove superfluous import by using the exception from the namespace.<commit_after>
|
import contextlib
import functools
import pluggy
import keyring.errors
hookimpl = pluggy.HookimplMarker("devpiclient")
# https://github.com/jaraco/jaraco.context/blob/c3a9b739/jaraco/context.py#L205
suppress = type('suppress', (contextlib.suppress, contextlib.ContextDecorator), {})
def restore_signature(func):
# workaround for pytest-dev/pluggy#358
@functools.wraps(func)
def wrapper(url, username):
return func(url, username)
return wrapper
@hookimpl()
@restore_signature
@suppress(keyring.errors.KeyringError)
def devpiclient_get_password(url, username):
"""
>>> pluggy._hooks.varnames(devpiclient_get_password)
(('url', 'username'), ())
>>>
"""
return keyring.get_password(url, username)
|
import contextlib
import functools
import pluggy
import keyring
from keyring.errors import KeyringError
hookimpl = pluggy.HookimplMarker("devpiclient")
# https://github.com/jaraco/jaraco.context/blob/c3a9b739/jaraco/context.py#L205
suppress = type('suppress', (contextlib.suppress, contextlib.ContextDecorator), {})
def restore_signature(func):
# workaround for pytest-dev/pluggy#358
@functools.wraps(func)
def wrapper(url, username):
return func(url, username)
return wrapper
@hookimpl()
@restore_signature
@suppress(KeyringError)
def devpiclient_get_password(url, username):
"""
>>> pluggy._hooks.varnames(devpiclient_get_password)
(('url', 'username'), ())
>>>
"""
return keyring.get_password(url, username)
Remove superfluous import by using the exception from the namespace.import contextlib
import functools
import pluggy
import keyring.errors
hookimpl = pluggy.HookimplMarker("devpiclient")
# https://github.com/jaraco/jaraco.context/blob/c3a9b739/jaraco/context.py#L205
suppress = type('suppress', (contextlib.suppress, contextlib.ContextDecorator), {})
def restore_signature(func):
# workaround for pytest-dev/pluggy#358
@functools.wraps(func)
def wrapper(url, username):
return func(url, username)
return wrapper
@hookimpl()
@restore_signature
@suppress(keyring.errors.KeyringError)
def devpiclient_get_password(url, username):
"""
>>> pluggy._hooks.varnames(devpiclient_get_password)
(('url', 'username'), ())
>>>
"""
return keyring.get_password(url, username)
|
<commit_before>import contextlib
import functools
import pluggy
import keyring
from keyring.errors import KeyringError
hookimpl = pluggy.HookimplMarker("devpiclient")
# https://github.com/jaraco/jaraco.context/blob/c3a9b739/jaraco/context.py#L205
suppress = type('suppress', (contextlib.suppress, contextlib.ContextDecorator), {})
def restore_signature(func):
# workaround for pytest-dev/pluggy#358
@functools.wraps(func)
def wrapper(url, username):
return func(url, username)
return wrapper
@hookimpl()
@restore_signature
@suppress(KeyringError)
def devpiclient_get_password(url, username):
"""
>>> pluggy._hooks.varnames(devpiclient_get_password)
(('url', 'username'), ())
>>>
"""
return keyring.get_password(url, username)
<commit_msg>Remove superfluous import by using the exception from the namespace.<commit_after>import contextlib
import functools
import pluggy
import keyring.errors
hookimpl = pluggy.HookimplMarker("devpiclient")
# https://github.com/jaraco/jaraco.context/blob/c3a9b739/jaraco/context.py#L205
suppress = type('suppress', (contextlib.suppress, contextlib.ContextDecorator), {})
def restore_signature(func):
# workaround for pytest-dev/pluggy#358
@functools.wraps(func)
def wrapper(url, username):
return func(url, username)
return wrapper
@hookimpl()
@restore_signature
@suppress(keyring.errors.KeyringError)
def devpiclient_get_password(url, username):
"""
>>> pluggy._hooks.varnames(devpiclient_get_password)
(('url', 'username'), ())
>>>
"""
return keyring.get_password(url, username)
|
142de9e809a9bc82bca6d12eaf492c1ce12a618d
|
geotrek/authent/migrations/0002_auto_20181107_1620.py
|
geotrek/authent/migrations/0002_auto_20181107_1620.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
import django.apps
from django.core.management import call_command
def add_permissions():
call_command('update_geotrek_permissions', verbosity=0)
UserModel = django.apps.apps.get_model('auth', 'User')
PermissionModel = django.apps.apps.get_model('auth', 'Permission')
permissions = ['path', 'service', 'poi', 'touristicevent', 'touristiccontent', 'project',
'trek', 'intervention', 'signage', 'workmanagementedge',
'signagemanagementedge', 'physicaledge', 'competenceedge', 'infrastructure',
'report', 'trail', 'path'
]
for user in UserModel.objects.all():
for perm in permissions:
user.user_permissions.add(PermissionModel.objects.get(
codename='change_geom_%s' % perm))
class Migration(migrations.Migration):
dependencies = [
('authent', '0001_initial'),
]
operations = [
migrations.RunPython(add_permissions)
]
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
from django.core.management import call_command
def add_permissions(apps, schema_editor):
call_command('update_geotrek_permissions', verbosity=0)
UserModel = apps.get_model('auth', 'User')
PermissionModel = apps.get_model('auth', 'Permission')
permissions = ['path', 'service', 'poi', 'touristicevent', 'touristiccontent', 'project',
'trek', 'intervention', 'signage', 'workmanagementedge',
'signagemanagementedge', 'physicaledge', 'competenceedge', 'infrastructure',
'report', 'trail', 'path'
]
for user in UserModel.objects.all():
for perm in permissions:
user.user_permissions.add(PermissionModel.objects.get(
codename='change_geom_%s' % perm))
class Migration(migrations.Migration):
dependencies = [
('authent', '0001_initial'),
]
operations = [
migrations.RunPython(add_permissions)
]
|
Change migrations lack of apps, schema_editor
|
Change migrations lack of apps, schema_editor
|
Python
|
bsd-2-clause
|
GeotrekCE/Geotrek-admin,makinacorpus/Geotrek,GeotrekCE/Geotrek-admin,makinacorpus/Geotrek,makinacorpus/Geotrek,GeotrekCE/Geotrek-admin,GeotrekCE/Geotrek-admin,makinacorpus/Geotrek
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
import django.apps
from django.core.management import call_command
def add_permissions():
call_command('update_geotrek_permissions', verbosity=0)
UserModel = django.apps.apps.get_model('auth', 'User')
PermissionModel = django.apps.apps.get_model('auth', 'Permission')
permissions = ['path', 'service', 'poi', 'touristicevent', 'touristiccontent', 'project',
'trek', 'intervention', 'signage', 'workmanagementedge',
'signagemanagementedge', 'physicaledge', 'competenceedge', 'infrastructure',
'report', 'trail', 'path'
]
for user in UserModel.objects.all():
for perm in permissions:
user.user_permissions.add(PermissionModel.objects.get(
codename='change_geom_%s' % perm))
class Migration(migrations.Migration):
dependencies = [
('authent', '0001_initial'),
]
operations = [
migrations.RunPython(add_permissions)
]
Change migrations lack of apps, schema_editor
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
from django.core.management import call_command
def add_permissions(apps, schema_editor):
call_command('update_geotrek_permissions', verbosity=0)
UserModel = apps.get_model('auth', 'User')
PermissionModel = apps.get_model('auth', 'Permission')
permissions = ['path', 'service', 'poi', 'touristicevent', 'touristiccontent', 'project',
'trek', 'intervention', 'signage', 'workmanagementedge',
'signagemanagementedge', 'physicaledge', 'competenceedge', 'infrastructure',
'report', 'trail', 'path'
]
for user in UserModel.objects.all():
for perm in permissions:
user.user_permissions.add(PermissionModel.objects.get(
codename='change_geom_%s' % perm))
class Migration(migrations.Migration):
dependencies = [
('authent', '0001_initial'),
]
operations = [
migrations.RunPython(add_permissions)
]
|
<commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
import django.apps
from django.core.management import call_command
def add_permissions():
call_command('update_geotrek_permissions', verbosity=0)
UserModel = django.apps.apps.get_model('auth', 'User')
PermissionModel = django.apps.apps.get_model('auth', 'Permission')
permissions = ['path', 'service', 'poi', 'touristicevent', 'touristiccontent', 'project',
'trek', 'intervention', 'signage', 'workmanagementedge',
'signagemanagementedge', 'physicaledge', 'competenceedge', 'infrastructure',
'report', 'trail', 'path'
]
for user in UserModel.objects.all():
for perm in permissions:
user.user_permissions.add(PermissionModel.objects.get(
codename='change_geom_%s' % perm))
class Migration(migrations.Migration):
dependencies = [
('authent', '0001_initial'),
]
operations = [
migrations.RunPython(add_permissions)
]
<commit_msg>Change migrations lack of apps, schema_editor<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
from django.core.management import call_command
def add_permissions(apps, schema_editor):
call_command('update_geotrek_permissions', verbosity=0)
UserModel = apps.get_model('auth', 'User')
PermissionModel = apps.get_model('auth', 'Permission')
permissions = ['path', 'service', 'poi', 'touristicevent', 'touristiccontent', 'project',
'trek', 'intervention', 'signage', 'workmanagementedge',
'signagemanagementedge', 'physicaledge', 'competenceedge', 'infrastructure',
'report', 'trail', 'path'
]
for user in UserModel.objects.all():
for perm in permissions:
user.user_permissions.add(PermissionModel.objects.get(
codename='change_geom_%s' % perm))
class Migration(migrations.Migration):
dependencies = [
('authent', '0001_initial'),
]
operations = [
migrations.RunPython(add_permissions)
]
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
import django.apps
from django.core.management import call_command
def add_permissions():
call_command('update_geotrek_permissions', verbosity=0)
UserModel = django.apps.apps.get_model('auth', 'User')
PermissionModel = django.apps.apps.get_model('auth', 'Permission')
permissions = ['path', 'service', 'poi', 'touristicevent', 'touristiccontent', 'project',
'trek', 'intervention', 'signage', 'workmanagementedge',
'signagemanagementedge', 'physicaledge', 'competenceedge', 'infrastructure',
'report', 'trail', 'path'
]
for user in UserModel.objects.all():
for perm in permissions:
user.user_permissions.add(PermissionModel.objects.get(
codename='change_geom_%s' % perm))
class Migration(migrations.Migration):
dependencies = [
('authent', '0001_initial'),
]
operations = [
migrations.RunPython(add_permissions)
]
Change migrations lack of apps, schema_editor# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
from django.core.management import call_command
def add_permissions(apps, schema_editor):
call_command('update_geotrek_permissions', verbosity=0)
UserModel = apps.get_model('auth', 'User')
PermissionModel = apps.get_model('auth', 'Permission')
permissions = ['path', 'service', 'poi', 'touristicevent', 'touristiccontent', 'project',
'trek', 'intervention', 'signage', 'workmanagementedge',
'signagemanagementedge', 'physicaledge', 'competenceedge', 'infrastructure',
'report', 'trail', 'path'
]
for user in UserModel.objects.all():
for perm in permissions:
user.user_permissions.add(PermissionModel.objects.get(
codename='change_geom_%s' % perm))
class Migration(migrations.Migration):
dependencies = [
('authent', '0001_initial'),
]
operations = [
migrations.RunPython(add_permissions)
]
|
<commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
import django.apps
from django.core.management import call_command
def add_permissions():
call_command('update_geotrek_permissions', verbosity=0)
UserModel = django.apps.apps.get_model('auth', 'User')
PermissionModel = django.apps.apps.get_model('auth', 'Permission')
permissions = ['path', 'service', 'poi', 'touristicevent', 'touristiccontent', 'project',
'trek', 'intervention', 'signage', 'workmanagementedge',
'signagemanagementedge', 'physicaledge', 'competenceedge', 'infrastructure',
'report', 'trail', 'path'
]
for user in UserModel.objects.all():
for perm in permissions:
user.user_permissions.add(PermissionModel.objects.get(
codename='change_geom_%s' % perm))
class Migration(migrations.Migration):
dependencies = [
('authent', '0001_initial'),
]
operations = [
migrations.RunPython(add_permissions)
]
<commit_msg>Change migrations lack of apps, schema_editor<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
from django.core.management import call_command
def add_permissions(apps, schema_editor):
call_command('update_geotrek_permissions', verbosity=0)
UserModel = apps.get_model('auth', 'User')
PermissionModel = apps.get_model('auth', 'Permission')
permissions = ['path', 'service', 'poi', 'touristicevent', 'touristiccontent', 'project',
'trek', 'intervention', 'signage', 'workmanagementedge',
'signagemanagementedge', 'physicaledge', 'competenceedge', 'infrastructure',
'report', 'trail', 'path'
]
for user in UserModel.objects.all():
for perm in permissions:
user.user_permissions.add(PermissionModel.objects.get(
codename='change_geom_%s' % perm))
class Migration(migrations.Migration):
dependencies = [
('authent', '0001_initial'),
]
operations = [
migrations.RunPython(add_permissions)
]
|
1d4aea091883ad464d1c7fcdf734b1916337b25e
|
zeus/utils/revisions.py
|
zeus/utils/revisions.py
|
from zeus.exceptions import UnknownRepositoryBackend
from zeus.models import Repository, Revision
from zeus.vcs.base import UnknownRevision
def identify_revision(repository: Repository, treeish: str):
"""
Attempt to transform a a commit-like reference into a valid revision.
"""
# try to find it from the database first
if len(treeish) == 40:
revision = Revision.query.filter(
Revision.repository_id == repository.id, Revision.sha == treeish
).first()
if revision:
return revision
try:
vcs = repository.get_vcs()
except UnknownRepositoryBackend:
return None
vcs.ensure(update_if_exists=False)
try:
commit = next(vcs.log(parent=treeish, limit=1))
except UnknownRevision:
vcs.update()
commit = next(vcs.log(parent=treeish, limit=1))
revision, _ = commit.save(repository)
return revision
|
from zeus.config import redis
from zeus.exceptions import UnknownRepositoryBackend
from zeus.models import Repository, Revision
from zeus.vcs.base import UnknownRevision
def identify_revision(repository: Repository, treeish: str):
"""
Attempt to transform a a commit-like reference into a valid revision.
"""
# try to find it from the database first
if len(treeish) == 40:
revision = Revision.query.filter(
Revision.repository_id == repository.id, Revision.sha == treeish
).first()
if revision:
return revision
try:
vcs = repository.get_vcs()
except UnknownRepositoryBackend:
return None
vcs.ensure(update_if_exists=False)
lock_key = "sync_repo:{repo_id}".format(repo_id=repository.id)
# lock this update to avoild piling up duplicate fetch/save calls
with redis.lock(lock_key):
try:
commit = next(vcs.log(parent=treeish, limit=1))
except UnknownRevision:
vcs.update()
commit = next(vcs.log(parent=treeish, limit=1))
revision, _ = commit.save(repository)
return revision
|
Add lock on identify_revision when revision is missing
|
ref: Add lock on identify_revision when revision is missing
|
Python
|
apache-2.0
|
getsentry/zeus,getsentry/zeus,getsentry/zeus,getsentry/zeus
|
from zeus.exceptions import UnknownRepositoryBackend
from zeus.models import Repository, Revision
from zeus.vcs.base import UnknownRevision
def identify_revision(repository: Repository, treeish: str):
"""
Attempt to transform a a commit-like reference into a valid revision.
"""
# try to find it from the database first
if len(treeish) == 40:
revision = Revision.query.filter(
Revision.repository_id == repository.id, Revision.sha == treeish
).first()
if revision:
return revision
try:
vcs = repository.get_vcs()
except UnknownRepositoryBackend:
return None
vcs.ensure(update_if_exists=False)
try:
commit = next(vcs.log(parent=treeish, limit=1))
except UnknownRevision:
vcs.update()
commit = next(vcs.log(parent=treeish, limit=1))
revision, _ = commit.save(repository)
return revision
ref: Add lock on identify_revision when revision is missing
|
from zeus.config import redis
from zeus.exceptions import UnknownRepositoryBackend
from zeus.models import Repository, Revision
from zeus.vcs.base import UnknownRevision
def identify_revision(repository: Repository, treeish: str):
"""
Attempt to transform a a commit-like reference into a valid revision.
"""
# try to find it from the database first
if len(treeish) == 40:
revision = Revision.query.filter(
Revision.repository_id == repository.id, Revision.sha == treeish
).first()
if revision:
return revision
try:
vcs = repository.get_vcs()
except UnknownRepositoryBackend:
return None
vcs.ensure(update_if_exists=False)
lock_key = "sync_repo:{repo_id}".format(repo_id=repository.id)
# lock this update to avoild piling up duplicate fetch/save calls
with redis.lock(lock_key):
try:
commit = next(vcs.log(parent=treeish, limit=1))
except UnknownRevision:
vcs.update()
commit = next(vcs.log(parent=treeish, limit=1))
revision, _ = commit.save(repository)
return revision
|
<commit_before>from zeus.exceptions import UnknownRepositoryBackend
from zeus.models import Repository, Revision
from zeus.vcs.base import UnknownRevision
def identify_revision(repository: Repository, treeish: str):
"""
Attempt to transform a a commit-like reference into a valid revision.
"""
# try to find it from the database first
if len(treeish) == 40:
revision = Revision.query.filter(
Revision.repository_id == repository.id, Revision.sha == treeish
).first()
if revision:
return revision
try:
vcs = repository.get_vcs()
except UnknownRepositoryBackend:
return None
vcs.ensure(update_if_exists=False)
try:
commit = next(vcs.log(parent=treeish, limit=1))
except UnknownRevision:
vcs.update()
commit = next(vcs.log(parent=treeish, limit=1))
revision, _ = commit.save(repository)
return revision
<commit_msg>ref: Add lock on identify_revision when revision is missing<commit_after>
|
from zeus.config import redis
from zeus.exceptions import UnknownRepositoryBackend
from zeus.models import Repository, Revision
from zeus.vcs.base import UnknownRevision
def identify_revision(repository: Repository, treeish: str):
"""
Attempt to transform a a commit-like reference into a valid revision.
"""
# try to find it from the database first
if len(treeish) == 40:
revision = Revision.query.filter(
Revision.repository_id == repository.id, Revision.sha == treeish
).first()
if revision:
return revision
try:
vcs = repository.get_vcs()
except UnknownRepositoryBackend:
return None
vcs.ensure(update_if_exists=False)
lock_key = "sync_repo:{repo_id}".format(repo_id=repository.id)
# lock this update to avoild piling up duplicate fetch/save calls
with redis.lock(lock_key):
try:
commit = next(vcs.log(parent=treeish, limit=1))
except UnknownRevision:
vcs.update()
commit = next(vcs.log(parent=treeish, limit=1))
revision, _ = commit.save(repository)
return revision
|
from zeus.exceptions import UnknownRepositoryBackend
from zeus.models import Repository, Revision
from zeus.vcs.base import UnknownRevision
def identify_revision(repository: Repository, treeish: str):
"""
Attempt to transform a a commit-like reference into a valid revision.
"""
# try to find it from the database first
if len(treeish) == 40:
revision = Revision.query.filter(
Revision.repository_id == repository.id, Revision.sha == treeish
).first()
if revision:
return revision
try:
vcs = repository.get_vcs()
except UnknownRepositoryBackend:
return None
vcs.ensure(update_if_exists=False)
try:
commit = next(vcs.log(parent=treeish, limit=1))
except UnknownRevision:
vcs.update()
commit = next(vcs.log(parent=treeish, limit=1))
revision, _ = commit.save(repository)
return revision
ref: Add lock on identify_revision when revision is missingfrom zeus.config import redis
from zeus.exceptions import UnknownRepositoryBackend
from zeus.models import Repository, Revision
from zeus.vcs.base import UnknownRevision
def identify_revision(repository: Repository, treeish: str):
"""
Attempt to transform a a commit-like reference into a valid revision.
"""
# try to find it from the database first
if len(treeish) == 40:
revision = Revision.query.filter(
Revision.repository_id == repository.id, Revision.sha == treeish
).first()
if revision:
return revision
try:
vcs = repository.get_vcs()
except UnknownRepositoryBackend:
return None
vcs.ensure(update_if_exists=False)
lock_key = "sync_repo:{repo_id}".format(repo_id=repository.id)
# lock this update to avoild piling up duplicate fetch/save calls
with redis.lock(lock_key):
try:
commit = next(vcs.log(parent=treeish, limit=1))
except UnknownRevision:
vcs.update()
commit = next(vcs.log(parent=treeish, limit=1))
revision, _ = commit.save(repository)
return revision
|
<commit_before>from zeus.exceptions import UnknownRepositoryBackend
from zeus.models import Repository, Revision
from zeus.vcs.base import UnknownRevision
def identify_revision(repository: Repository, treeish: str):
"""
Attempt to transform a a commit-like reference into a valid revision.
"""
# try to find it from the database first
if len(treeish) == 40:
revision = Revision.query.filter(
Revision.repository_id == repository.id, Revision.sha == treeish
).first()
if revision:
return revision
try:
vcs = repository.get_vcs()
except UnknownRepositoryBackend:
return None
vcs.ensure(update_if_exists=False)
try:
commit = next(vcs.log(parent=treeish, limit=1))
except UnknownRevision:
vcs.update()
commit = next(vcs.log(parent=treeish, limit=1))
revision, _ = commit.save(repository)
return revision
<commit_msg>ref: Add lock on identify_revision when revision is missing<commit_after>from zeus.config import redis
from zeus.exceptions import UnknownRepositoryBackend
from zeus.models import Repository, Revision
from zeus.vcs.base import UnknownRevision
def identify_revision(repository: Repository, treeish: str):
"""
Attempt to transform a a commit-like reference into a valid revision.
"""
# try to find it from the database first
if len(treeish) == 40:
revision = Revision.query.filter(
Revision.repository_id == repository.id, Revision.sha == treeish
).first()
if revision:
return revision
try:
vcs = repository.get_vcs()
except UnknownRepositoryBackend:
return None
vcs.ensure(update_if_exists=False)
lock_key = "sync_repo:{repo_id}".format(repo_id=repository.id)
# lock this update to avoild piling up duplicate fetch/save calls
with redis.lock(lock_key):
try:
commit = next(vcs.log(parent=treeish, limit=1))
except UnknownRevision:
vcs.update()
commit = next(vcs.log(parent=treeish, limit=1))
revision, _ = commit.save(repository)
return revision
|
d5a3285b05d96ffc99049867256cdba87a5b420a
|
packages/mono_crypto.py
|
packages/mono_crypto.py
|
from mono_master import MonoMasterPackage
from bockbuild.util.util import *
class MonoMasterEncryptedPackage (MonoMasterPackage):
def __init__(self):
MonoMasterPackage.__init__ (self)
self.configure_flags.extend(['--enable-extension-module=crypto --enable-native-types'])
def prep(self):
MonoMasterPackage.prep(self)
retry (self.checkout_mono_extensions)
def checkout_mono_extensions(self):
ext = 'git@github.com:xamarin/mono-extensions.git'
dirname = os.path.join(self.profile.build_root, "mono-extensions")
if not os.path.exists(dirname):
self.sh('%' + '{git} clone --local --shared "%s" "%s"' % (ext, dirname))
self.pushd(dirname)
try:
self.sh('%{git} clean -xffd')
self.sh('%{git} fetch --all --prune')
if "pr/" not in self.git_branch:
self.sh('%' + '{git} checkout origin/%s' % self.git_branch)
else:
self.sh('%{git} checkout origin/master')
self.sh ('%{git} reset --hard')
except Exception as e:
self.popd ()
self.rm_if_exists (dirname)
raise
finally:
info ('Mono crypto extensions (rev. %s)' % git_get_revision (self))
self.popd ()
MonoMasterEncryptedPackage()
|
from mono_master import MonoMasterPackage
from bockbuild.util.util import *
class MonoMasterEncryptedPackage (MonoMasterPackage):
def __init__(self):
MonoMasterPackage.__init__ (self)
self.configure_flags.extend(['--enable-extension-module=crypto --enable-native-types'])
def prep(self):
MonoMasterPackage.prep(self)
retry (self.checkout_mono_extensions)
def checkout_mono_extensions(self):
ext = 'git@github.com:xamarin/mono-extensions.git'
dirname = os.path.join(self.profile.build_root, "mono-extensions")
if not os.path.exists(dirname):
self.sh('%' + '{git} clone --local --shared "%s" "%s"' % (ext, dirname))
self.pushd(dirname)
try:
self.sh('%{git} clean -xffd')
self.sh('%{git} fetch --all --prune')
if 'pull/' in self.git_branch: # pull request
self.sh('%{git} checkout origin/master')
else:
self.sh('%' + '{git} checkout origin/%s' % self.git_branch)
self.sh ('%{git} reset --hard')
except Exception as e:
self.popd ()
self.rm_if_exists (dirname)
raise
finally:
info ('Mono crypto extensions (rev. %s)' % git_get_revision (self))
self.popd ()
MonoMasterEncryptedPackage()
|
Fix mono-extensions checkout for PR branches ('origin/pull/N/merge')
|
Fix mono-extensions checkout for PR branches ('origin/pull/N/merge')
|
Python
|
mit
|
mono/bockbuild,mono/bockbuild
|
from mono_master import MonoMasterPackage
from bockbuild.util.util import *
class MonoMasterEncryptedPackage (MonoMasterPackage):
def __init__(self):
MonoMasterPackage.__init__ (self)
self.configure_flags.extend(['--enable-extension-module=crypto --enable-native-types'])
def prep(self):
MonoMasterPackage.prep(self)
retry (self.checkout_mono_extensions)
def checkout_mono_extensions(self):
ext = 'git@github.com:xamarin/mono-extensions.git'
dirname = os.path.join(self.profile.build_root, "mono-extensions")
if not os.path.exists(dirname):
self.sh('%' + '{git} clone --local --shared "%s" "%s"' % (ext, dirname))
self.pushd(dirname)
try:
self.sh('%{git} clean -xffd')
self.sh('%{git} fetch --all --prune')
if "pr/" not in self.git_branch:
self.sh('%' + '{git} checkout origin/%s' % self.git_branch)
else:
self.sh('%{git} checkout origin/master')
self.sh ('%{git} reset --hard')
except Exception as e:
self.popd ()
self.rm_if_exists (dirname)
raise
finally:
info ('Mono crypto extensions (rev. %s)' % git_get_revision (self))
self.popd ()
MonoMasterEncryptedPackage()Fix mono-extensions checkout for PR branches ('origin/pull/N/merge')
|
from mono_master import MonoMasterPackage
from bockbuild.util.util import *
class MonoMasterEncryptedPackage (MonoMasterPackage):
def __init__(self):
MonoMasterPackage.__init__ (self)
self.configure_flags.extend(['--enable-extension-module=crypto --enable-native-types'])
def prep(self):
MonoMasterPackage.prep(self)
retry (self.checkout_mono_extensions)
def checkout_mono_extensions(self):
ext = 'git@github.com:xamarin/mono-extensions.git'
dirname = os.path.join(self.profile.build_root, "mono-extensions")
if not os.path.exists(dirname):
self.sh('%' + '{git} clone --local --shared "%s" "%s"' % (ext, dirname))
self.pushd(dirname)
try:
self.sh('%{git} clean -xffd')
self.sh('%{git} fetch --all --prune')
if 'pull/' in self.git_branch: # pull request
self.sh('%{git} checkout origin/master')
else:
self.sh('%' + '{git} checkout origin/%s' % self.git_branch)
self.sh ('%{git} reset --hard')
except Exception as e:
self.popd ()
self.rm_if_exists (dirname)
raise
finally:
info ('Mono crypto extensions (rev. %s)' % git_get_revision (self))
self.popd ()
MonoMasterEncryptedPackage()
|
<commit_before>from mono_master import MonoMasterPackage
from bockbuild.util.util import *
class MonoMasterEncryptedPackage (MonoMasterPackage):
def __init__(self):
MonoMasterPackage.__init__ (self)
self.configure_flags.extend(['--enable-extension-module=crypto --enable-native-types'])
def prep(self):
MonoMasterPackage.prep(self)
retry (self.checkout_mono_extensions)
def checkout_mono_extensions(self):
ext = 'git@github.com:xamarin/mono-extensions.git'
dirname = os.path.join(self.profile.build_root, "mono-extensions")
if not os.path.exists(dirname):
self.sh('%' + '{git} clone --local --shared "%s" "%s"' % (ext, dirname))
self.pushd(dirname)
try:
self.sh('%{git} clean -xffd')
self.sh('%{git} fetch --all --prune')
if "pr/" not in self.git_branch:
self.sh('%' + '{git} checkout origin/%s' % self.git_branch)
else:
self.sh('%{git} checkout origin/master')
self.sh ('%{git} reset --hard')
except Exception as e:
self.popd ()
self.rm_if_exists (dirname)
raise
finally:
info ('Mono crypto extensions (rev. %s)' % git_get_revision (self))
self.popd ()
MonoMasterEncryptedPackage()<commit_msg>Fix mono-extensions checkout for PR branches ('origin/pull/N/merge')<commit_after>
|
from mono_master import MonoMasterPackage
from bockbuild.util.util import *
class MonoMasterEncryptedPackage (MonoMasterPackage):
def __init__(self):
MonoMasterPackage.__init__ (self)
self.configure_flags.extend(['--enable-extension-module=crypto --enable-native-types'])
def prep(self):
MonoMasterPackage.prep(self)
retry (self.checkout_mono_extensions)
def checkout_mono_extensions(self):
ext = 'git@github.com:xamarin/mono-extensions.git'
dirname = os.path.join(self.profile.build_root, "mono-extensions")
if not os.path.exists(dirname):
self.sh('%' + '{git} clone --local --shared "%s" "%s"' % (ext, dirname))
self.pushd(dirname)
try:
self.sh('%{git} clean -xffd')
self.sh('%{git} fetch --all --prune')
if 'pull/' in self.git_branch: # pull request
self.sh('%{git} checkout origin/master')
else:
self.sh('%' + '{git} checkout origin/%s' % self.git_branch)
self.sh ('%{git} reset --hard')
except Exception as e:
self.popd ()
self.rm_if_exists (dirname)
raise
finally:
info ('Mono crypto extensions (rev. %s)' % git_get_revision (self))
self.popd ()
MonoMasterEncryptedPackage()
|
from mono_master import MonoMasterPackage
from bockbuild.util.util import *
class MonoMasterEncryptedPackage (MonoMasterPackage):
def __init__(self):
MonoMasterPackage.__init__ (self)
self.configure_flags.extend(['--enable-extension-module=crypto --enable-native-types'])
def prep(self):
MonoMasterPackage.prep(self)
retry (self.checkout_mono_extensions)
def checkout_mono_extensions(self):
ext = 'git@github.com:xamarin/mono-extensions.git'
dirname = os.path.join(self.profile.build_root, "mono-extensions")
if not os.path.exists(dirname):
self.sh('%' + '{git} clone --local --shared "%s" "%s"' % (ext, dirname))
self.pushd(dirname)
try:
self.sh('%{git} clean -xffd')
self.sh('%{git} fetch --all --prune')
if "pr/" not in self.git_branch:
self.sh('%' + '{git} checkout origin/%s' % self.git_branch)
else:
self.sh('%{git} checkout origin/master')
self.sh ('%{git} reset --hard')
except Exception as e:
self.popd ()
self.rm_if_exists (dirname)
raise
finally:
info ('Mono crypto extensions (rev. %s)' % git_get_revision (self))
self.popd ()
MonoMasterEncryptedPackage()Fix mono-extensions checkout for PR branches ('origin/pull/N/merge')from mono_master import MonoMasterPackage
from bockbuild.util.util import *
class MonoMasterEncryptedPackage (MonoMasterPackage):
def __init__(self):
MonoMasterPackage.__init__ (self)
self.configure_flags.extend(['--enable-extension-module=crypto --enable-native-types'])
def prep(self):
MonoMasterPackage.prep(self)
retry (self.checkout_mono_extensions)
def checkout_mono_extensions(self):
ext = 'git@github.com:xamarin/mono-extensions.git'
dirname = os.path.join(self.profile.build_root, "mono-extensions")
if not os.path.exists(dirname):
self.sh('%' + '{git} clone --local --shared "%s" "%s"' % (ext, dirname))
self.pushd(dirname)
try:
self.sh('%{git} clean -xffd')
self.sh('%{git} fetch --all --prune')
if 'pull/' in self.git_branch: # pull request
self.sh('%{git} checkout origin/master')
else:
self.sh('%' + '{git} checkout origin/%s' % self.git_branch)
self.sh ('%{git} reset --hard')
except Exception as e:
self.popd ()
self.rm_if_exists (dirname)
raise
finally:
info ('Mono crypto extensions (rev. %s)' % git_get_revision (self))
self.popd ()
MonoMasterEncryptedPackage()
|
<commit_before>from mono_master import MonoMasterPackage
from bockbuild.util.util import *
class MonoMasterEncryptedPackage (MonoMasterPackage):
def __init__(self):
MonoMasterPackage.__init__ (self)
self.configure_flags.extend(['--enable-extension-module=crypto --enable-native-types'])
def prep(self):
MonoMasterPackage.prep(self)
retry (self.checkout_mono_extensions)
def checkout_mono_extensions(self):
ext = 'git@github.com:xamarin/mono-extensions.git'
dirname = os.path.join(self.profile.build_root, "mono-extensions")
if not os.path.exists(dirname):
self.sh('%' + '{git} clone --local --shared "%s" "%s"' % (ext, dirname))
self.pushd(dirname)
try:
self.sh('%{git} clean -xffd')
self.sh('%{git} fetch --all --prune')
if "pr/" not in self.git_branch:
self.sh('%' + '{git} checkout origin/%s' % self.git_branch)
else:
self.sh('%{git} checkout origin/master')
self.sh ('%{git} reset --hard')
except Exception as e:
self.popd ()
self.rm_if_exists (dirname)
raise
finally:
info ('Mono crypto extensions (rev. %s)' % git_get_revision (self))
self.popd ()
MonoMasterEncryptedPackage()<commit_msg>Fix mono-extensions checkout for PR branches ('origin/pull/N/merge')<commit_after>from mono_master import MonoMasterPackage
from bockbuild.util.util import *
class MonoMasterEncryptedPackage (MonoMasterPackage):
def __init__(self):
MonoMasterPackage.__init__ (self)
self.configure_flags.extend(['--enable-extension-module=crypto --enable-native-types'])
def prep(self):
MonoMasterPackage.prep(self)
retry (self.checkout_mono_extensions)
def checkout_mono_extensions(self):
ext = 'git@github.com:xamarin/mono-extensions.git'
dirname = os.path.join(self.profile.build_root, "mono-extensions")
if not os.path.exists(dirname):
self.sh('%' + '{git} clone --local --shared "%s" "%s"' % (ext, dirname))
self.pushd(dirname)
try:
self.sh('%{git} clean -xffd')
self.sh('%{git} fetch --all --prune')
if 'pull/' in self.git_branch: # pull request
self.sh('%{git} checkout origin/master')
else:
self.sh('%' + '{git} checkout origin/%s' % self.git_branch)
self.sh ('%{git} reset --hard')
except Exception as e:
self.popd ()
self.rm_if_exists (dirname)
raise
finally:
info ('Mono crypto extensions (rev. %s)' % git_get_revision (self))
self.popd ()
MonoMasterEncryptedPackage()
|
75bc100fb49588c057a6049975ce7c5803aa9145
|
zvm/zcpu.py
|
zvm/zcpu.py
|
#
# A class which represents the CPU itself, the brain of the virtual
# machine. It ties all the systems together and runs the story.
#
# For the license of this file, please consult the LICENSE file in the
# root directory of this distribution.
#
class ZCpuError(Exception):
"General exception for Zcpu class"
pass
class ZCpu(object):
def __init__(self, zmem):
""
self._memory = zmem
self._opcode_handlers = {}
# Introspect ourselves, discover all functions that look like
# opcode handlers, and add them to our mapper
for func in self.__class__.__dict__:
print "Considering %s" % func
instance_func = getattr(self, func)
if instance_func != None:
doc_head = instance_func.__doc__.split('\n')[0]
print "Potential candidate, docstring is %s" % doc_head
if doc_head.startswith("ZOPCODE "):
opcode_num = int(doc_head[8:], 16)
self._opcode_handlers[opcode_num] = instance_func
print self._opcode_handlers
def test_opcode(self, zop):
"""ZOPCODE 0x20
This is a test opcode."""
|
#
# A class which represents the CPU itself, the brain of the virtual
# machine. It ties all the systems together and runs the story.
#
# For the license of this file, please consult the LICENSE file in the
# root directory of this distribution.
#
class ZCpuError(Exception):
"General exception for Zcpu class"
pass
class ZCpu(object):
_opcodes = {}
def __init__(self, zmem):
self._memory = zmem
print self._opcodes
def _get_handler(self, opcode):
return getattr(self, _opcodes[opcode])
def test_opcode(self, zop):
"""This is a test opcode."""
test_opcode._opcode = 0x20
# This is the "automagic" opcode handler registration system.
# After each function that is an opcode handler, we assign the
# function object an _opcode attribute, giving the numeric opcode
# the function implements.
#
# Then, the following code iterates back over all items in the
# class, and registers all objects with that attribute in the
# _opcodes dictionary.
#
# Then, at runtime, the _get_handler method can be invoked to
# retrieve the function implementing a given opcode. Pretty cool
# voodoo if you ask me.
for k,v in vars().items():
if hasattr(v, "_opcode"):
_opcodes[v._opcode] = k
|
Make the CPU use lovely decorator syntax for registering opcode implementations.
|
Make the CPU use lovely decorator syntax for registering opcode implementations.
|
Python
|
bsd-3-clause
|
BGCX262/zvm-hg-to-git,BGCX262/zvm-hg-to-git
|
#
# A class which represents the CPU itself, the brain of the virtual
# machine. It ties all the systems together and runs the story.
#
# For the license of this file, please consult the LICENSE file in the
# root directory of this distribution.
#
class ZCpuError(Exception):
"General exception for Zcpu class"
pass
class ZCpu(object):
def __init__(self, zmem):
""
self._memory = zmem
self._opcode_handlers = {}
# Introspect ourselves, discover all functions that look like
# opcode handlers, and add them to our mapper
for func in self.__class__.__dict__:
print "Considering %s" % func
instance_func = getattr(self, func)
if instance_func != None:
doc_head = instance_func.__doc__.split('\n')[0]
print "Potential candidate, docstring is %s" % doc_head
if doc_head.startswith("ZOPCODE "):
opcode_num = int(doc_head[8:], 16)
self._opcode_handlers[opcode_num] = instance_func
print self._opcode_handlers
def test_opcode(self, zop):
"""ZOPCODE 0x20
This is a test opcode."""
Make the CPU use lovely decorator syntax for registering opcode implementations.
|
#
# A class which represents the CPU itself, the brain of the virtual
# machine. It ties all the systems together and runs the story.
#
# For the license of this file, please consult the LICENSE file in the
# root directory of this distribution.
#
class ZCpuError(Exception):
"General exception for Zcpu class"
pass
class ZCpu(object):
_opcodes = {}
def __init__(self, zmem):
self._memory = zmem
print self._opcodes
def _get_handler(self, opcode):
return getattr(self, _opcodes[opcode])
def test_opcode(self, zop):
"""This is a test opcode."""
test_opcode._opcode = 0x20
# This is the "automagic" opcode handler registration system.
# After each function that is an opcode handler, we assign the
# function object an _opcode attribute, giving the numeric opcode
# the function implements.
#
# Then, the following code iterates back over all items in the
# class, and registers all objects with that attribute in the
# _opcodes dictionary.
#
# Then, at runtime, the _get_handler method can be invoked to
# retrieve the function implementing a given opcode. Pretty cool
# voodoo if you ask me.
for k,v in vars().items():
if hasattr(v, "_opcode"):
_opcodes[v._opcode] = k
|
<commit_before>#
# A class which represents the CPU itself, the brain of the virtual
# machine. It ties all the systems together and runs the story.
#
# For the license of this file, please consult the LICENSE file in the
# root directory of this distribution.
#
class ZCpuError(Exception):
"General exception for Zcpu class"
pass
class ZCpu(object):
def __init__(self, zmem):
""
self._memory = zmem
self._opcode_handlers = {}
# Introspect ourselves, discover all functions that look like
# opcode handlers, and add them to our mapper
for func in self.__class__.__dict__:
print "Considering %s" % func
instance_func = getattr(self, func)
if instance_func != None:
doc_head = instance_func.__doc__.split('\n')[0]
print "Potential candidate, docstring is %s" % doc_head
if doc_head.startswith("ZOPCODE "):
opcode_num = int(doc_head[8:], 16)
self._opcode_handlers[opcode_num] = instance_func
print self._opcode_handlers
def test_opcode(self, zop):
"""ZOPCODE 0x20
This is a test opcode."""
<commit_msg>Make the CPU use lovely decorator syntax for registering opcode implementations.<commit_after>
|
#
# A class which represents the CPU itself, the brain of the virtual
# machine. It ties all the systems together and runs the story.
#
# For the license of this file, please consult the LICENSE file in the
# root directory of this distribution.
#
class ZCpuError(Exception):
"General exception for Zcpu class"
pass
class ZCpu(object):
_opcodes = {}
def __init__(self, zmem):
self._memory = zmem
print self._opcodes
def _get_handler(self, opcode):
return getattr(self, _opcodes[opcode])
def test_opcode(self, zop):
"""This is a test opcode."""
test_opcode._opcode = 0x20
# This is the "automagic" opcode handler registration system.
# After each function that is an opcode handler, we assign the
# function object an _opcode attribute, giving the numeric opcode
# the function implements.
#
# Then, the following code iterates back over all items in the
# class, and registers all objects with that attribute in the
# _opcodes dictionary.
#
# Then, at runtime, the _get_handler method can be invoked to
# retrieve the function implementing a given opcode. Pretty cool
# voodoo if you ask me.
for k,v in vars().items():
if hasattr(v, "_opcode"):
_opcodes[v._opcode] = k
|
#
# A class which represents the CPU itself, the brain of the virtual
# machine. It ties all the systems together and runs the story.
#
# For the license of this file, please consult the LICENSE file in the
# root directory of this distribution.
#
class ZCpuError(Exception):
"General exception for Zcpu class"
pass
class ZCpu(object):
def __init__(self, zmem):
""
self._memory = zmem
self._opcode_handlers = {}
# Introspect ourselves, discover all functions that look like
# opcode handlers, and add them to our mapper
for func in self.__class__.__dict__:
print "Considering %s" % func
instance_func = getattr(self, func)
if instance_func != None:
doc_head = instance_func.__doc__.split('\n')[0]
print "Potential candidate, docstring is %s" % doc_head
if doc_head.startswith("ZOPCODE "):
opcode_num = int(doc_head[8:], 16)
self._opcode_handlers[opcode_num] = instance_func
print self._opcode_handlers
def test_opcode(self, zop):
"""ZOPCODE 0x20
This is a test opcode."""
Make the CPU use lovely decorator syntax for registering opcode implementations.#
# A class which represents the CPU itself, the brain of the virtual
# machine. It ties all the systems together and runs the story.
#
# For the license of this file, please consult the LICENSE file in the
# root directory of this distribution.
#
class ZCpuError(Exception):
"General exception for Zcpu class"
pass
class ZCpu(object):
_opcodes = {}
def __init__(self, zmem):
self._memory = zmem
print self._opcodes
def _get_handler(self, opcode):
return getattr(self, _opcodes[opcode])
def test_opcode(self, zop):
"""This is a test opcode."""
test_opcode._opcode = 0x20
# This is the "automagic" opcode handler registration system.
# After each function that is an opcode handler, we assign the
# function object an _opcode attribute, giving the numeric opcode
# the function implements.
#
# Then, the following code iterates back over all items in the
# class, and registers all objects with that attribute in the
# _opcodes dictionary.
#
# Then, at runtime, the _get_handler method can be invoked to
# retrieve the function implementing a given opcode. Pretty cool
# voodoo if you ask me.
for k,v in vars().items():
if hasattr(v, "_opcode"):
_opcodes[v._opcode] = k
|
<commit_before>#
# A class which represents the CPU itself, the brain of the virtual
# machine. It ties all the systems together and runs the story.
#
# For the license of this file, please consult the LICENSE file in the
# root directory of this distribution.
#
class ZCpuError(Exception):
"General exception for Zcpu class"
pass
class ZCpu(object):
def __init__(self, zmem):
""
self._memory = zmem
self._opcode_handlers = {}
# Introspect ourselves, discover all functions that look like
# opcode handlers, and add them to our mapper
for func in self.__class__.__dict__:
print "Considering %s" % func
instance_func = getattr(self, func)
if instance_func != None:
doc_head = instance_func.__doc__.split('\n')[0]
print "Potential candidate, docstring is %s" % doc_head
if doc_head.startswith("ZOPCODE "):
opcode_num = int(doc_head[8:], 16)
self._opcode_handlers[opcode_num] = instance_func
print self._opcode_handlers
def test_opcode(self, zop):
"""ZOPCODE 0x20
This is a test opcode."""
<commit_msg>Make the CPU use lovely decorator syntax for registering opcode implementations.<commit_after>#
# A class which represents the CPU itself, the brain of the virtual
# machine. It ties all the systems together and runs the story.
#
# For the license of this file, please consult the LICENSE file in the
# root directory of this distribution.
#
class ZCpuError(Exception):
"General exception for Zcpu class"
pass
class ZCpu(object):
_opcodes = {}
def __init__(self, zmem):
self._memory = zmem
print self._opcodes
def _get_handler(self, opcode):
return getattr(self, _opcodes[opcode])
def test_opcode(self, zop):
"""This is a test opcode."""
test_opcode._opcode = 0x20
# This is the "automagic" opcode handler registration system.
# After each function that is an opcode handler, we assign the
# function object an _opcode attribute, giving the numeric opcode
# the function implements.
#
# Then, the following code iterates back over all items in the
# class, and registers all objects with that attribute in the
# _opcodes dictionary.
#
# Then, at runtime, the _get_handler method can be invoked to
# retrieve the function implementing a given opcode. Pretty cool
# voodoo if you ask me.
for k,v in vars().items():
if hasattr(v, "_opcode"):
_opcodes[v._opcode] = k
|
80a4c503675026c1274e2f1c20de6e3363cfb0f6
|
utils.py
|
utils.py
|
import datetime
import time
def datetime_to_sec_since_epoch(dt):
'''Take a python datetime object and convert it to seconds since the unix epoch. Not timezone
aware'''
return time.mktime(dt.timetuple())
def sec_since_epoch_to_datetime(sec_since_epoch):
'''Take some number seconds since the epoch and convert them to a python datetime object. Not
timezone aware'''
return datetime.datetime.fromtimestamp(sec_since_epoch)
def epoch_sec_to_minutes_since_epoch(sec_since_epoch):
'''Return the number of minutes since the epoch (quite a large number)'''
return (sec_since_epoch / 60) % 60
|
import datetime
import time
def datetime_to_sec_since_epoch(dt):
'''Take a python datetime object and convert it to seconds since the unix epoch. Not timezone
aware'''
return int(time.mktime(dt.timetuple()))
def sec_since_epoch_to_datetime(sec_since_epoch):
'''Take some number seconds since the epoch and convert them to a python datetime object. Not
timezone aware'''
return datetime.datetime.fromtimestamp(sec_since_epoch)
def epoch_sec_to_minutes_since_epoch(sec_since_epoch):
'''Return the number of minutes since the epoch (quite a large number)'''
return (sec_since_epoch / 60)
|
Make datetime_to_sec_since_epoch return an int and fix epoch_sec_to_minutes_since_epoch
|
Make datetime_to_sec_since_epoch return an int and fix epoch_sec_to_minutes_since_epoch
|
Python
|
bsd-2-clause
|
tofu702/varz_python_client
|
import datetime
import time
def datetime_to_sec_since_epoch(dt):
'''Take a python datetime object and convert it to seconds since the unix epoch. Not timezone
aware'''
return time.mktime(dt.timetuple())
def sec_since_epoch_to_datetime(sec_since_epoch):
'''Take some number seconds since the epoch and convert them to a python datetime object. Not
timezone aware'''
return datetime.datetime.fromtimestamp(sec_since_epoch)
def epoch_sec_to_minutes_since_epoch(sec_since_epoch):
'''Return the number of minutes since the epoch (quite a large number)'''
return (sec_since_epoch / 60) % 60
Make datetime_to_sec_since_epoch return an int and fix epoch_sec_to_minutes_since_epoch
|
import datetime
import time
def datetime_to_sec_since_epoch(dt):
'''Take a python datetime object and convert it to seconds since the unix epoch. Not timezone
aware'''
return int(time.mktime(dt.timetuple()))
def sec_since_epoch_to_datetime(sec_since_epoch):
'''Take some number seconds since the epoch and convert them to a python datetime object. Not
timezone aware'''
return datetime.datetime.fromtimestamp(sec_since_epoch)
def epoch_sec_to_minutes_since_epoch(sec_since_epoch):
'''Return the number of minutes since the epoch (quite a large number)'''
return (sec_since_epoch / 60)
|
<commit_before>import datetime
import time
def datetime_to_sec_since_epoch(dt):
'''Take a python datetime object and convert it to seconds since the unix epoch. Not timezone
aware'''
return time.mktime(dt.timetuple())
def sec_since_epoch_to_datetime(sec_since_epoch):
'''Take some number seconds since the epoch and convert them to a python datetime object. Not
timezone aware'''
return datetime.datetime.fromtimestamp(sec_since_epoch)
def epoch_sec_to_minutes_since_epoch(sec_since_epoch):
'''Return the number of minutes since the epoch (quite a large number)'''
return (sec_since_epoch / 60) % 60
<commit_msg>Make datetime_to_sec_since_epoch return an int and fix epoch_sec_to_minutes_since_epoch<commit_after>
|
import datetime
import time
def datetime_to_sec_since_epoch(dt):
'''Take a python datetime object and convert it to seconds since the unix epoch. Not timezone
aware'''
return int(time.mktime(dt.timetuple()))
def sec_since_epoch_to_datetime(sec_since_epoch):
'''Take some number seconds since the epoch and convert them to a python datetime object. Not
timezone aware'''
return datetime.datetime.fromtimestamp(sec_since_epoch)
def epoch_sec_to_minutes_since_epoch(sec_since_epoch):
'''Return the number of minutes since the epoch (quite a large number)'''
return (sec_since_epoch / 60)
|
import datetime
import time
def datetime_to_sec_since_epoch(dt):
'''Take a python datetime object and convert it to seconds since the unix epoch. Not timezone
aware'''
return time.mktime(dt.timetuple())
def sec_since_epoch_to_datetime(sec_since_epoch):
'''Take some number seconds since the epoch and convert them to a python datetime object. Not
timezone aware'''
return datetime.datetime.fromtimestamp(sec_since_epoch)
def epoch_sec_to_minutes_since_epoch(sec_since_epoch):
'''Return the number of minutes since the epoch (quite a large number)'''
return (sec_since_epoch / 60) % 60
Make datetime_to_sec_since_epoch return an int and fix epoch_sec_to_minutes_since_epochimport datetime
import time
def datetime_to_sec_since_epoch(dt):
'''Take a python datetime object and convert it to seconds since the unix epoch. Not timezone
aware'''
return int(time.mktime(dt.timetuple()))
def sec_since_epoch_to_datetime(sec_since_epoch):
'''Take some number seconds since the epoch and convert them to a python datetime object. Not
timezone aware'''
return datetime.datetime.fromtimestamp(sec_since_epoch)
def epoch_sec_to_minutes_since_epoch(sec_since_epoch):
'''Return the number of minutes since the epoch (quite a large number)'''
return (sec_since_epoch / 60)
|
<commit_before>import datetime
import time
def datetime_to_sec_since_epoch(dt):
'''Take a python datetime object and convert it to seconds since the unix epoch. Not timezone
aware'''
return time.mktime(dt.timetuple())
def sec_since_epoch_to_datetime(sec_since_epoch):
'''Take some number seconds since the epoch and convert them to a python datetime object. Not
timezone aware'''
return datetime.datetime.fromtimestamp(sec_since_epoch)
def epoch_sec_to_minutes_since_epoch(sec_since_epoch):
'''Return the number of minutes since the epoch (quite a large number)'''
return (sec_since_epoch / 60) % 60
<commit_msg>Make datetime_to_sec_since_epoch return an int and fix epoch_sec_to_minutes_since_epoch<commit_after>import datetime
import time
def datetime_to_sec_since_epoch(dt):
'''Take a python datetime object and convert it to seconds since the unix epoch. Not timezone
aware'''
return int(time.mktime(dt.timetuple()))
def sec_since_epoch_to_datetime(sec_since_epoch):
'''Take some number seconds since the epoch and convert them to a python datetime object. Not
timezone aware'''
return datetime.datetime.fromtimestamp(sec_since_epoch)
def epoch_sec_to_minutes_since_epoch(sec_since_epoch):
'''Return the number of minutes since the epoch (quite a large number)'''
return (sec_since_epoch / 60)
|
33fcaf9b7a54dfb3cf065455eba75ee74fbb313b
|
pep8speaks/constants.py
|
pep8speaks/constants.py
|
import os
# HEADERS is deprecated, use AUTH only
HEADERS = {"Authorization": "token " + os.environ.setdefault("GITHUB_TOKEN", "")}
AUTH = (os.environ.setdefault("BOT_USERNAME", ""), os.environ.setdefault("BOT_PASSWORD", ""))
BASE_URL = 'https://api.github.com'
|
import os
# HEADERS is deprecated, use AUTH only
HEADERS = {"Authorization": "token " + os.environ.setdefault("GITHUB_TOKEN", "")}
AUTH = (os.environ.setdefault("BOT_USERNAME", ""), os.environ.setdefault("GITHUB_TOKEN", ""))
BASE_URL = 'https://api.github.com'
|
Use GITHUB_TOKEN instead of BOT_PASSWORD
|
Use GITHUB_TOKEN instead of BOT_PASSWORD
When using BOT_PASSWORD, the bot cannot have
two factor authentication enabled as GitHub expects
an additional header parameter that is the one time
2FA password: https://developer.github.com/v3/auth/#working-with-two-factor-authentication
GITHUB_TOKEN can be used in place of BOT_PASSWORD
in order to correct this. https://developer.github.com/v3/auth/#via-oauth-tokens
Tested on a private organization with 2FA enabled.
|
Python
|
mit
|
OrkoHunter/pep8speaks
|
import os
# HEADERS is deprecated, use AUTH only
HEADERS = {"Authorization": "token " + os.environ.setdefault("GITHUB_TOKEN", "")}
AUTH = (os.environ.setdefault("BOT_USERNAME", ""), os.environ.setdefault("BOT_PASSWORD", ""))
BASE_URL = 'https://api.github.com'
Use GITHUB_TOKEN instead of BOT_PASSWORD
When using BOT_PASSWORD, the bot cannot have
two factor authentication enabled as GitHub expects
an additional header parameter that is the one time
2FA password: https://developer.github.com/v3/auth/#working-with-two-factor-authentication
GITHUB_TOKEN can be used in place of BOT_PASSWORD
in order to correct this. https://developer.github.com/v3/auth/#via-oauth-tokens
Tested on a private organization with 2FA enabled.
|
import os
# HEADERS is deprecated, use AUTH only
HEADERS = {"Authorization": "token " + os.environ.setdefault("GITHUB_TOKEN", "")}
AUTH = (os.environ.setdefault("BOT_USERNAME", ""), os.environ.setdefault("GITHUB_TOKEN", ""))
BASE_URL = 'https://api.github.com'
|
<commit_before>import os
# HEADERS is deprecated, use AUTH only
HEADERS = {"Authorization": "token " + os.environ.setdefault("GITHUB_TOKEN", "")}
AUTH = (os.environ.setdefault("BOT_USERNAME", ""), os.environ.setdefault("BOT_PASSWORD", ""))
BASE_URL = 'https://api.github.com'
<commit_msg>Use GITHUB_TOKEN instead of BOT_PASSWORD
When using BOT_PASSWORD, the bot cannot have
two factor authentication enabled as GitHub expects
an additional header parameter that is the one time
2FA password: https://developer.github.com/v3/auth/#working-with-two-factor-authentication
GITHUB_TOKEN can be used in place of BOT_PASSWORD
in order to correct this. https://developer.github.com/v3/auth/#via-oauth-tokens
Tested on a private organization with 2FA enabled.<commit_after>
|
import os
# HEADERS is deprecated, use AUTH only
HEADERS = {"Authorization": "token " + os.environ.setdefault("GITHUB_TOKEN", "")}
AUTH = (os.environ.setdefault("BOT_USERNAME", ""), os.environ.setdefault("GITHUB_TOKEN", ""))
BASE_URL = 'https://api.github.com'
|
import os
# HEADERS is deprecated, use AUTH only
HEADERS = {"Authorization": "token " + os.environ.setdefault("GITHUB_TOKEN", "")}
AUTH = (os.environ.setdefault("BOT_USERNAME", ""), os.environ.setdefault("BOT_PASSWORD", ""))
BASE_URL = 'https://api.github.com'
Use GITHUB_TOKEN instead of BOT_PASSWORD
When using BOT_PASSWORD, the bot cannot have
two factor authentication enabled as GitHub expects
an additional header parameter that is the one time
2FA password: https://developer.github.com/v3/auth/#working-with-two-factor-authentication
GITHUB_TOKEN can be used in place of BOT_PASSWORD
in order to correct this. https://developer.github.com/v3/auth/#via-oauth-tokens
Tested on a private organization with 2FA enabled.import os
# HEADERS is deprecated, use AUTH only
HEADERS = {"Authorization": "token " + os.environ.setdefault("GITHUB_TOKEN", "")}
AUTH = (os.environ.setdefault("BOT_USERNAME", ""), os.environ.setdefault("GITHUB_TOKEN", ""))
BASE_URL = 'https://api.github.com'
|
<commit_before>import os
# HEADERS is deprecated, use AUTH only
HEADERS = {"Authorization": "token " + os.environ.setdefault("GITHUB_TOKEN", "")}
AUTH = (os.environ.setdefault("BOT_USERNAME", ""), os.environ.setdefault("BOT_PASSWORD", ""))
BASE_URL = 'https://api.github.com'
<commit_msg>Use GITHUB_TOKEN instead of BOT_PASSWORD
When using BOT_PASSWORD, the bot cannot have
two factor authentication enabled as GitHub expects
an additional header parameter that is the one time
2FA password: https://developer.github.com/v3/auth/#working-with-two-factor-authentication
GITHUB_TOKEN can be used in place of BOT_PASSWORD
in order to correct this. https://developer.github.com/v3/auth/#via-oauth-tokens
Tested on a private organization with 2FA enabled.<commit_after>import os
# HEADERS is deprecated, use AUTH only
HEADERS = {"Authorization": "token " + os.environ.setdefault("GITHUB_TOKEN", "")}
AUTH = (os.environ.setdefault("BOT_USERNAME", ""), os.environ.setdefault("GITHUB_TOKEN", ""))
BASE_URL = 'https://api.github.com'
|
4267ef9e8fc555a460b53fcfdee0f048bbdb84cf
|
accounts/tests/test_views.py
|
accounts/tests/test_views.py
|
"""accounts app unittests for views
"""
from django.test import TestCase
from django.urls import reverse
class WelcomePageTest(TestCase):
"""Tests relating to the welcome_page view.
"""
def test_uses_welcome_template(self):
"""The root url should respond with the welcome page template.
"""
response = self.client.get('/')
self.assertTemplateUsed(response, 'accounts/welcome.html')
class SendLoginEmailTest(TestCase):
"""Tests for the view which sends the login email.
"""
def setUp(self):
self.url = reverse('send_login_email')
self.test_email = 'newvisitor@example.com'
def test_uses_emailsent_template(self):
"""The send_login_email url responds with login_email_sent template.
"""
response = self.client.post(self.url, data={'email': self.test_email})
self.assertTemplateUsed(response, 'accounts/login_email_sent.html')
|
"""accounts app unittests for views
"""
from django.test import TestCase
from django.urls import reverse
class WelcomePageTest(TestCase):
"""Tests relating to the welcome_page view.
"""
def test_uses_welcome_template(self):
"""The root url should respond with the welcome page template.
"""
response = self.client.get('/')
self.assertTemplateUsed(response, 'accounts/welcome.html')
class SendLoginEmailTest(TestCase):
"""Tests for the view which sends the login email.
"""
def setUp(self):
self.url = reverse('send_login_email')
self.test_email = 'newvisitor@example.com'
def test_uses_emailsent_template(self):
"""The send_login_email url responds with login_email_sent template.
"""
response = self.client.post(self.url, data={'email': self.test_email})
self.assertTemplateUsed(response, 'accounts/login_email_sent.html')
def test_get_request_yields_405(self):
"""Accessing the view via get request is not allowed.
"""
response = self.client.get(self.url)
self.assertEqual(response.status_code, 405)
|
Add test for denying get requests
|
Add test for denying get requests
|
Python
|
mit
|
randomic/aniauth-tdd,randomic/aniauth-tdd
|
"""accounts app unittests for views
"""
from django.test import TestCase
from django.urls import reverse
class WelcomePageTest(TestCase):
"""Tests relating to the welcome_page view.
"""
def test_uses_welcome_template(self):
"""The root url should respond with the welcome page template.
"""
response = self.client.get('/')
self.assertTemplateUsed(response, 'accounts/welcome.html')
class SendLoginEmailTest(TestCase):
"""Tests for the view which sends the login email.
"""
def setUp(self):
self.url = reverse('send_login_email')
self.test_email = 'newvisitor@example.com'
def test_uses_emailsent_template(self):
"""The send_login_email url responds with login_email_sent template.
"""
response = self.client.post(self.url, data={'email': self.test_email})
self.assertTemplateUsed(response, 'accounts/login_email_sent.html')
Add test for denying get requests
|
"""accounts app unittests for views
"""
from django.test import TestCase
from django.urls import reverse
class WelcomePageTest(TestCase):
"""Tests relating to the welcome_page view.
"""
def test_uses_welcome_template(self):
"""The root url should respond with the welcome page template.
"""
response = self.client.get('/')
self.assertTemplateUsed(response, 'accounts/welcome.html')
class SendLoginEmailTest(TestCase):
"""Tests for the view which sends the login email.
"""
def setUp(self):
self.url = reverse('send_login_email')
self.test_email = 'newvisitor@example.com'
def test_uses_emailsent_template(self):
"""The send_login_email url responds with login_email_sent template.
"""
response = self.client.post(self.url, data={'email': self.test_email})
self.assertTemplateUsed(response, 'accounts/login_email_sent.html')
def test_get_request_yields_405(self):
"""Accessing the view via get request is not allowed.
"""
response = self.client.get(self.url)
self.assertEqual(response.status_code, 405)
|
<commit_before>"""accounts app unittests for views
"""
from django.test import TestCase
from django.urls import reverse
class WelcomePageTest(TestCase):
"""Tests relating to the welcome_page view.
"""
def test_uses_welcome_template(self):
"""The root url should respond with the welcome page template.
"""
response = self.client.get('/')
self.assertTemplateUsed(response, 'accounts/welcome.html')
class SendLoginEmailTest(TestCase):
"""Tests for the view which sends the login email.
"""
def setUp(self):
self.url = reverse('send_login_email')
self.test_email = 'newvisitor@example.com'
def test_uses_emailsent_template(self):
"""The send_login_email url responds with login_email_sent template.
"""
response = self.client.post(self.url, data={'email': self.test_email})
self.assertTemplateUsed(response, 'accounts/login_email_sent.html')
<commit_msg>Add test for denying get requests<commit_after>
|
"""accounts app unittests for views
"""
from django.test import TestCase
from django.urls import reverse
class WelcomePageTest(TestCase):
"""Tests relating to the welcome_page view.
"""
def test_uses_welcome_template(self):
"""The root url should respond with the welcome page template.
"""
response = self.client.get('/')
self.assertTemplateUsed(response, 'accounts/welcome.html')
class SendLoginEmailTest(TestCase):
"""Tests for the view which sends the login email.
"""
def setUp(self):
self.url = reverse('send_login_email')
self.test_email = 'newvisitor@example.com'
def test_uses_emailsent_template(self):
"""The send_login_email url responds with login_email_sent template.
"""
response = self.client.post(self.url, data={'email': self.test_email})
self.assertTemplateUsed(response, 'accounts/login_email_sent.html')
def test_get_request_yields_405(self):
"""Accessing the view via get request is not allowed.
"""
response = self.client.get(self.url)
self.assertEqual(response.status_code, 405)
|
"""accounts app unittests for views
"""
from django.test import TestCase
from django.urls import reverse
class WelcomePageTest(TestCase):
"""Tests relating to the welcome_page view.
"""
def test_uses_welcome_template(self):
"""The root url should respond with the welcome page template.
"""
response = self.client.get('/')
self.assertTemplateUsed(response, 'accounts/welcome.html')
class SendLoginEmailTest(TestCase):
"""Tests for the view which sends the login email.
"""
def setUp(self):
self.url = reverse('send_login_email')
self.test_email = 'newvisitor@example.com'
def test_uses_emailsent_template(self):
"""The send_login_email url responds with login_email_sent template.
"""
response = self.client.post(self.url, data={'email': self.test_email})
self.assertTemplateUsed(response, 'accounts/login_email_sent.html')
Add test for denying get requests"""accounts app unittests for views
"""
from django.test import TestCase
from django.urls import reverse
class WelcomePageTest(TestCase):
"""Tests relating to the welcome_page view.
"""
def test_uses_welcome_template(self):
"""The root url should respond with the welcome page template.
"""
response = self.client.get('/')
self.assertTemplateUsed(response, 'accounts/welcome.html')
class SendLoginEmailTest(TestCase):
"""Tests for the view which sends the login email.
"""
def setUp(self):
self.url = reverse('send_login_email')
self.test_email = 'newvisitor@example.com'
def test_uses_emailsent_template(self):
"""The send_login_email url responds with login_email_sent template.
"""
response = self.client.post(self.url, data={'email': self.test_email})
self.assertTemplateUsed(response, 'accounts/login_email_sent.html')
def test_get_request_yields_405(self):
"""Accessing the view via get request is not allowed.
"""
response = self.client.get(self.url)
self.assertEqual(response.status_code, 405)
|
<commit_before>"""accounts app unittests for views
"""
from django.test import TestCase
from django.urls import reverse
class WelcomePageTest(TestCase):
"""Tests relating to the welcome_page view.
"""
def test_uses_welcome_template(self):
"""The root url should respond with the welcome page template.
"""
response = self.client.get('/')
self.assertTemplateUsed(response, 'accounts/welcome.html')
class SendLoginEmailTest(TestCase):
"""Tests for the view which sends the login email.
"""
def setUp(self):
self.url = reverse('send_login_email')
self.test_email = 'newvisitor@example.com'
def test_uses_emailsent_template(self):
"""The send_login_email url responds with login_email_sent template.
"""
response = self.client.post(self.url, data={'email': self.test_email})
self.assertTemplateUsed(response, 'accounts/login_email_sent.html')
<commit_msg>Add test for denying get requests<commit_after>"""accounts app unittests for views
"""
from django.test import TestCase
from django.urls import reverse
class WelcomePageTest(TestCase):
"""Tests relating to the welcome_page view.
"""
def test_uses_welcome_template(self):
"""The root url should respond with the welcome page template.
"""
response = self.client.get('/')
self.assertTemplateUsed(response, 'accounts/welcome.html')
class SendLoginEmailTest(TestCase):
"""Tests for the view which sends the login email.
"""
def setUp(self):
self.url = reverse('send_login_email')
self.test_email = 'newvisitor@example.com'
def test_uses_emailsent_template(self):
"""The send_login_email url responds with login_email_sent template.
"""
response = self.client.post(self.url, data={'email': self.test_email})
self.assertTemplateUsed(response, 'accounts/login_email_sent.html')
def test_get_request_yields_405(self):
"""Accessing the view via get request is not allowed.
"""
response = self.client.get(self.url)
self.assertEqual(response.status_code, 405)
|
8b7df2f297fde16525821a14755c870c290850af
|
salt/thorium/runner.py
|
salt/thorium/runner.py
|
# -*- coding: utf-8 -*-
'''
React by calling async runners
'''
# Import python libs
from __future__ import absolute_import, print_function, unicode_literals
# import salt libs
import salt.runner
def cmd(
name,
func=None,
arg=(),
**kwargs):
'''
Execute a runner asynchronous:
USAGE:
.. code-block:: yaml
run_cloud:
runner.cmd:
- func: cloud.create
- arg:
- my-ec2-config
- myinstance
run_cloud:
runner.cmd:
- func: cloud.create
- kwargs:
provider: my-ec2-config
instances: myinstance
'''
ret = {'name': name,
'changes': {},
'comment': '',
'result': True}
if func is None:
func = name
local_opts = {}
local_opts.update(__opts__)
local_opts['async'] = True # ensure this will be run async
local_opts.update({
'fun': func,
'arg': arg,
'kwarg': kwargs
})
runner = salt.runner.Runner(local_opts)
runner.run()
return ret
|
# -*- coding: utf-8 -*-
'''
React by calling async runners
'''
# Import python libs
from __future__ import absolute_import, print_function, unicode_literals
# import salt libs
import salt.runner
def cmd(
name,
func=None,
arg=(),
**kwargs):
'''
Execute a runner asynchronous:
USAGE:
.. code-block:: yaml
run_cloud:
runner.cmd:
- func: cloud.create
- arg:
- my-ec2-config
- myinstance
run_cloud:
runner.cmd:
- func: cloud.create
- kwargs:
provider: my-ec2-config
instances: myinstance
'''
ret = {'name': name,
'changes': {},
'comment': '',
'result': True}
if func is None:
func = name
local_opts = {}
local_opts.update(__opts__)
local_opts['asynchronous'] = True # ensure this will be run asynchronous
local_opts.update({
'fun': func,
'arg': arg,
'kwarg': kwargs
})
runner = salt.runner.Runner(local_opts)
runner.run()
return ret
|
Fix local opts from CLI
|
Fix local opts from CLI
|
Python
|
apache-2.0
|
saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt
|
# -*- coding: utf-8 -*-
'''
React by calling async runners
'''
# Import python libs
from __future__ import absolute_import, print_function, unicode_literals
# import salt libs
import salt.runner
def cmd(
name,
func=None,
arg=(),
**kwargs):
'''
Execute a runner asynchronous:
USAGE:
.. code-block:: yaml
run_cloud:
runner.cmd:
- func: cloud.create
- arg:
- my-ec2-config
- myinstance
run_cloud:
runner.cmd:
- func: cloud.create
- kwargs:
provider: my-ec2-config
instances: myinstance
'''
ret = {'name': name,
'changes': {},
'comment': '',
'result': True}
if func is None:
func = name
local_opts = {}
local_opts.update(__opts__)
local_opts['async'] = True # ensure this will be run async
local_opts.update({
'fun': func,
'arg': arg,
'kwarg': kwargs
})
runner = salt.runner.Runner(local_opts)
runner.run()
return ret
Fix local opts from CLI
|
# -*- coding: utf-8 -*-
'''
React by calling async runners
'''
# Import python libs
from __future__ import absolute_import, print_function, unicode_literals
# import salt libs
import salt.runner
def cmd(
name,
func=None,
arg=(),
**kwargs):
'''
Execute a runner asynchronous:
USAGE:
.. code-block:: yaml
run_cloud:
runner.cmd:
- func: cloud.create
- arg:
- my-ec2-config
- myinstance
run_cloud:
runner.cmd:
- func: cloud.create
- kwargs:
provider: my-ec2-config
instances: myinstance
'''
ret = {'name': name,
'changes': {},
'comment': '',
'result': True}
if func is None:
func = name
local_opts = {}
local_opts.update(__opts__)
local_opts['asynchronous'] = True # ensure this will be run asynchronous
local_opts.update({
'fun': func,
'arg': arg,
'kwarg': kwargs
})
runner = salt.runner.Runner(local_opts)
runner.run()
return ret
|
<commit_before># -*- coding: utf-8 -*-
'''
React by calling async runners
'''
# Import python libs
from __future__ import absolute_import, print_function, unicode_literals
# import salt libs
import salt.runner
def cmd(
name,
func=None,
arg=(),
**kwargs):
'''
Execute a runner asynchronous:
USAGE:
.. code-block:: yaml
run_cloud:
runner.cmd:
- func: cloud.create
- arg:
- my-ec2-config
- myinstance
run_cloud:
runner.cmd:
- func: cloud.create
- kwargs:
provider: my-ec2-config
instances: myinstance
'''
ret = {'name': name,
'changes': {},
'comment': '',
'result': True}
if func is None:
func = name
local_opts = {}
local_opts.update(__opts__)
local_opts['async'] = True # ensure this will be run async
local_opts.update({
'fun': func,
'arg': arg,
'kwarg': kwargs
})
runner = salt.runner.Runner(local_opts)
runner.run()
return ret
<commit_msg>Fix local opts from CLI<commit_after>
|
# -*- coding: utf-8 -*-
'''
React by calling async runners
'''
# Import python libs
from __future__ import absolute_import, print_function, unicode_literals
# import salt libs
import salt.runner
def cmd(
name,
func=None,
arg=(),
**kwargs):
'''
Execute a runner asynchronous:
USAGE:
.. code-block:: yaml
run_cloud:
runner.cmd:
- func: cloud.create
- arg:
- my-ec2-config
- myinstance
run_cloud:
runner.cmd:
- func: cloud.create
- kwargs:
provider: my-ec2-config
instances: myinstance
'''
ret = {'name': name,
'changes': {},
'comment': '',
'result': True}
if func is None:
func = name
local_opts = {}
local_opts.update(__opts__)
local_opts['asynchronous'] = True # ensure this will be run asynchronous
local_opts.update({
'fun': func,
'arg': arg,
'kwarg': kwargs
})
runner = salt.runner.Runner(local_opts)
runner.run()
return ret
|
# -*- coding: utf-8 -*-
'''
React by calling async runners
'''
# Import python libs
from __future__ import absolute_import, print_function, unicode_literals
# import salt libs
import salt.runner
def cmd(
name,
func=None,
arg=(),
**kwargs):
'''
Execute a runner asynchronous:
USAGE:
.. code-block:: yaml
run_cloud:
runner.cmd:
- func: cloud.create
- arg:
- my-ec2-config
- myinstance
run_cloud:
runner.cmd:
- func: cloud.create
- kwargs:
provider: my-ec2-config
instances: myinstance
'''
ret = {'name': name,
'changes': {},
'comment': '',
'result': True}
if func is None:
func = name
local_opts = {}
local_opts.update(__opts__)
local_opts['async'] = True # ensure this will be run async
local_opts.update({
'fun': func,
'arg': arg,
'kwarg': kwargs
})
runner = salt.runner.Runner(local_opts)
runner.run()
return ret
Fix local opts from CLI# -*- coding: utf-8 -*-
'''
React by calling async runners
'''
# Import python libs
from __future__ import absolute_import, print_function, unicode_literals
# import salt libs
import salt.runner
def cmd(
name,
func=None,
arg=(),
**kwargs):
'''
Execute a runner asynchronous:
USAGE:
.. code-block:: yaml
run_cloud:
runner.cmd:
- func: cloud.create
- arg:
- my-ec2-config
- myinstance
run_cloud:
runner.cmd:
- func: cloud.create
- kwargs:
provider: my-ec2-config
instances: myinstance
'''
ret = {'name': name,
'changes': {},
'comment': '',
'result': True}
if func is None:
func = name
local_opts = {}
local_opts.update(__opts__)
local_opts['asynchronous'] = True # ensure this will be run asynchronous
local_opts.update({
'fun': func,
'arg': arg,
'kwarg': kwargs
})
runner = salt.runner.Runner(local_opts)
runner.run()
return ret
|
<commit_before># -*- coding: utf-8 -*-
'''
React by calling async runners
'''
# Import python libs
from __future__ import absolute_import, print_function, unicode_literals
# import salt libs
import salt.runner
def cmd(
name,
func=None,
arg=(),
**kwargs):
'''
Execute a runner asynchronous:
USAGE:
.. code-block:: yaml
run_cloud:
runner.cmd:
- func: cloud.create
- arg:
- my-ec2-config
- myinstance
run_cloud:
runner.cmd:
- func: cloud.create
- kwargs:
provider: my-ec2-config
instances: myinstance
'''
ret = {'name': name,
'changes': {},
'comment': '',
'result': True}
if func is None:
func = name
local_opts = {}
local_opts.update(__opts__)
local_opts['async'] = True # ensure this will be run async
local_opts.update({
'fun': func,
'arg': arg,
'kwarg': kwargs
})
runner = salt.runner.Runner(local_opts)
runner.run()
return ret
<commit_msg>Fix local opts from CLI<commit_after># -*- coding: utf-8 -*-
'''
React by calling async runners
'''
# Import python libs
from __future__ import absolute_import, print_function, unicode_literals
# import salt libs
import salt.runner
def cmd(
name,
func=None,
arg=(),
**kwargs):
'''
Execute a runner asynchronous:
USAGE:
.. code-block:: yaml
run_cloud:
runner.cmd:
- func: cloud.create
- arg:
- my-ec2-config
- myinstance
run_cloud:
runner.cmd:
- func: cloud.create
- kwargs:
provider: my-ec2-config
instances: myinstance
'''
ret = {'name': name,
'changes': {},
'comment': '',
'result': True}
if func is None:
func = name
local_opts = {}
local_opts.update(__opts__)
local_opts['asynchronous'] = True # ensure this will be run asynchronous
local_opts.update({
'fun': func,
'arg': arg,
'kwarg': kwargs
})
runner = salt.runner.Runner(local_opts)
runner.run()
return ret
|
945195071418762de447dfdb8a73c386f3796e96
|
backend/unichat/models/user.py
|
backend/unichat/models/user.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models
class User(models.Model):
MALE = -1
UNDEFINED = 0
FEMALE = 1
GENDER_CHOICES = (
(MALE, 'Male'),
(UNDEFINED, 'Undefined'),
(FEMALE, 'Female')
)
school = models.ForeignKey('unichat.School')
email = models.EmailField(
unique=True,
help_text=("The user's academic email.")
)
gender = models.IntegerField(
default=0,
choices=GENDER_CHOICES,
help_text=("The user's gender, by default UNDEFINED, unless otherwise "
"explicitly specified by the user.")
)
interestedInGender = models.IntegerField(
default=0,
choices=GENDER_CHOICES,
help_text=("The gender that the user is interested in talking to, by "
"default UNDEFINED.")
)
interestedInSchools = models.ManyToManyField('unichat.School', related_name='user_interested_schools')
cookie = models.CharField(
default='',
max_length=255,
db_index=True,
help_text=("The user's active cookie.")
)
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models
class User(models.Model):
MALE = -1
UNDEFINED = 0
FEMALE = 1
GENDER_CHOICES = (
(MALE, 'Male'),
(UNDEFINED, 'Undefined'),
(FEMALE, 'Female')
)
school = models.ForeignKey('unichat.School')
email = models.EmailField(
unique=True,
help_text=("The user's academic email.")
)
password = models.CharField(
max_length=100,
help_text=("The user's password.")
)
gender = models.IntegerField(
default=0,
choices=GENDER_CHOICES,
help_text=("The user's gender, by default UNDEFINED, unless otherwise "
"explicitly specified by the user.")
)
interestedInGender = models.IntegerField(
default=0,
choices=GENDER_CHOICES,
help_text=("The gender that the user is interested in talking to, by "
"default UNDEFINED.")
)
interestedInSchools = models.ManyToManyField('unichat.School', related_name='user_interested_schools')
cookie = models.CharField(
default='',
max_length=255,
db_index=True,
help_text=("The user's active cookie.")
)
|
Add password field to User model
|
Add password field to User model
|
Python
|
mit
|
dimkarakostas/unimeet,dimkarakostas/unimeet,dimkarakostas/unimeet,dimkarakostas/unimeet
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models
class User(models.Model):
MALE = -1
UNDEFINED = 0
FEMALE = 1
GENDER_CHOICES = (
(MALE, 'Male'),
(UNDEFINED, 'Undefined'),
(FEMALE, 'Female')
)
school = models.ForeignKey('unichat.School')
email = models.EmailField(
unique=True,
help_text=("The user's academic email.")
)
gender = models.IntegerField(
default=0,
choices=GENDER_CHOICES,
help_text=("The user's gender, by default UNDEFINED, unless otherwise "
"explicitly specified by the user.")
)
interestedInGender = models.IntegerField(
default=0,
choices=GENDER_CHOICES,
help_text=("The gender that the user is interested in talking to, by "
"default UNDEFINED.")
)
interestedInSchools = models.ManyToManyField('unichat.School', related_name='user_interested_schools')
cookie = models.CharField(
default='',
max_length=255,
db_index=True,
help_text=("The user's active cookie.")
)
Add password field to User model
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models
class User(models.Model):
MALE = -1
UNDEFINED = 0
FEMALE = 1
GENDER_CHOICES = (
(MALE, 'Male'),
(UNDEFINED, 'Undefined'),
(FEMALE, 'Female')
)
school = models.ForeignKey('unichat.School')
email = models.EmailField(
unique=True,
help_text=("The user's academic email.")
)
password = models.CharField(
max_length=100,
help_text=("The user's password.")
)
gender = models.IntegerField(
default=0,
choices=GENDER_CHOICES,
help_text=("The user's gender, by default UNDEFINED, unless otherwise "
"explicitly specified by the user.")
)
interestedInGender = models.IntegerField(
default=0,
choices=GENDER_CHOICES,
help_text=("The gender that the user is interested in talking to, by "
"default UNDEFINED.")
)
interestedInSchools = models.ManyToManyField('unichat.School', related_name='user_interested_schools')
cookie = models.CharField(
default='',
max_length=255,
db_index=True,
help_text=("The user's active cookie.")
)
|
<commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models
class User(models.Model):
MALE = -1
UNDEFINED = 0
FEMALE = 1
GENDER_CHOICES = (
(MALE, 'Male'),
(UNDEFINED, 'Undefined'),
(FEMALE, 'Female')
)
school = models.ForeignKey('unichat.School')
email = models.EmailField(
unique=True,
help_text=("The user's academic email.")
)
gender = models.IntegerField(
default=0,
choices=GENDER_CHOICES,
help_text=("The user's gender, by default UNDEFINED, unless otherwise "
"explicitly specified by the user.")
)
interestedInGender = models.IntegerField(
default=0,
choices=GENDER_CHOICES,
help_text=("The gender that the user is interested in talking to, by "
"default UNDEFINED.")
)
interestedInSchools = models.ManyToManyField('unichat.School', related_name='user_interested_schools')
cookie = models.CharField(
default='',
max_length=255,
db_index=True,
help_text=("The user's active cookie.")
)
<commit_msg>Add password field to User model<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models
class User(models.Model):
MALE = -1
UNDEFINED = 0
FEMALE = 1
GENDER_CHOICES = (
(MALE, 'Male'),
(UNDEFINED, 'Undefined'),
(FEMALE, 'Female')
)
school = models.ForeignKey('unichat.School')
email = models.EmailField(
unique=True,
help_text=("The user's academic email.")
)
password = models.CharField(
max_length=100,
help_text=("The user's password.")
)
gender = models.IntegerField(
default=0,
choices=GENDER_CHOICES,
help_text=("The user's gender, by default UNDEFINED, unless otherwise "
"explicitly specified by the user.")
)
interestedInGender = models.IntegerField(
default=0,
choices=GENDER_CHOICES,
help_text=("The gender that the user is interested in talking to, by "
"default UNDEFINED.")
)
interestedInSchools = models.ManyToManyField('unichat.School', related_name='user_interested_schools')
cookie = models.CharField(
default='',
max_length=255,
db_index=True,
help_text=("The user's active cookie.")
)
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models
class User(models.Model):
MALE = -1
UNDEFINED = 0
FEMALE = 1
GENDER_CHOICES = (
(MALE, 'Male'),
(UNDEFINED, 'Undefined'),
(FEMALE, 'Female')
)
school = models.ForeignKey('unichat.School')
email = models.EmailField(
unique=True,
help_text=("The user's academic email.")
)
gender = models.IntegerField(
default=0,
choices=GENDER_CHOICES,
help_text=("The user's gender, by default UNDEFINED, unless otherwise "
"explicitly specified by the user.")
)
interestedInGender = models.IntegerField(
default=0,
choices=GENDER_CHOICES,
help_text=("The gender that the user is interested in talking to, by "
"default UNDEFINED.")
)
interestedInSchools = models.ManyToManyField('unichat.School', related_name='user_interested_schools')
cookie = models.CharField(
default='',
max_length=255,
db_index=True,
help_text=("The user's active cookie.")
)
Add password field to User model# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models
class User(models.Model):
MALE = -1
UNDEFINED = 0
FEMALE = 1
GENDER_CHOICES = (
(MALE, 'Male'),
(UNDEFINED, 'Undefined'),
(FEMALE, 'Female')
)
school = models.ForeignKey('unichat.School')
email = models.EmailField(
unique=True,
help_text=("The user's academic email.")
)
password = models.CharField(
max_length=100,
help_text=("The user's password.")
)
gender = models.IntegerField(
default=0,
choices=GENDER_CHOICES,
help_text=("The user's gender, by default UNDEFINED, unless otherwise "
"explicitly specified by the user.")
)
interestedInGender = models.IntegerField(
default=0,
choices=GENDER_CHOICES,
help_text=("The gender that the user is interested in talking to, by "
"default UNDEFINED.")
)
interestedInSchools = models.ManyToManyField('unichat.School', related_name='user_interested_schools')
cookie = models.CharField(
default='',
max_length=255,
db_index=True,
help_text=("The user's active cookie.")
)
|
<commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models
class User(models.Model):
MALE = -1
UNDEFINED = 0
FEMALE = 1
GENDER_CHOICES = (
(MALE, 'Male'),
(UNDEFINED, 'Undefined'),
(FEMALE, 'Female')
)
school = models.ForeignKey('unichat.School')
email = models.EmailField(
unique=True,
help_text=("The user's academic email.")
)
gender = models.IntegerField(
default=0,
choices=GENDER_CHOICES,
help_text=("The user's gender, by default UNDEFINED, unless otherwise "
"explicitly specified by the user.")
)
interestedInGender = models.IntegerField(
default=0,
choices=GENDER_CHOICES,
help_text=("The gender that the user is interested in talking to, by "
"default UNDEFINED.")
)
interestedInSchools = models.ManyToManyField('unichat.School', related_name='user_interested_schools')
cookie = models.CharField(
default='',
max_length=255,
db_index=True,
help_text=("The user's active cookie.")
)
<commit_msg>Add password field to User model<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models
class User(models.Model):
MALE = -1
UNDEFINED = 0
FEMALE = 1
GENDER_CHOICES = (
(MALE, 'Male'),
(UNDEFINED, 'Undefined'),
(FEMALE, 'Female')
)
school = models.ForeignKey('unichat.School')
email = models.EmailField(
unique=True,
help_text=("The user's academic email.")
)
password = models.CharField(
max_length=100,
help_text=("The user's password.")
)
gender = models.IntegerField(
default=0,
choices=GENDER_CHOICES,
help_text=("The user's gender, by default UNDEFINED, unless otherwise "
"explicitly specified by the user.")
)
interestedInGender = models.IntegerField(
default=0,
choices=GENDER_CHOICES,
help_text=("The gender that the user is interested in talking to, by "
"default UNDEFINED.")
)
interestedInSchools = models.ManyToManyField('unichat.School', related_name='user_interested_schools')
cookie = models.CharField(
default='',
max_length=255,
db_index=True,
help_text=("The user's active cookie.")
)
|
9d339fa198738765a3f3a1a0202b2082c1d9919c
|
settings/dev.sample.py
|
settings/dev.sample.py
|
# Database
# https://docs.djangoproject.com/en/1.8/ref/settings/#databases
DEBUG = True
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': "wye",
'USER': "",
'PASSWORD': "",
'HOST': "localhost",
'PORT': "5432",
}
}
|
# Database
# https://docs.djangoproject.com/en/1.8/ref/settings/#databases
DEBUG = True
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': "wye",
'USER': "",
'PASSWORD': "",
'HOST': "localhost",
'PORT': "5432",
}
}
# E-Mail Settings
EMAIL_HOST = 'localhost'
EMAIL_PORT = 1025
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
|
Add console as email backend
|
fix(settings): Add console as email backend
|
Python
|
mit
|
DESHRAJ/wye,shankig/wye,shankig/wye,DESHRAJ/wye,DESHRAJ/wye,pythonindia/wye,harisibrahimkv/wye,pythonindia/wye,DESHRAJ/wye,harisibrahimkv/wye,shankisg/wye,shankisg/wye,shankisg/wye,pythonindia/wye,shankisg/wye,shankig/wye,harisibrahimkv/wye,shankig/wye,pythonindia/wye,harisibrahimkv/wye
|
# Database
# https://docs.djangoproject.com/en/1.8/ref/settings/#databases
DEBUG = True
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': "wye",
'USER': "",
'PASSWORD': "",
'HOST': "localhost",
'PORT': "5432",
}
}
fix(settings): Add console as email backend
|
# Database
# https://docs.djangoproject.com/en/1.8/ref/settings/#databases
DEBUG = True
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': "wye",
'USER': "",
'PASSWORD': "",
'HOST': "localhost",
'PORT': "5432",
}
}
# E-Mail Settings
EMAIL_HOST = 'localhost'
EMAIL_PORT = 1025
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
|
<commit_before># Database
# https://docs.djangoproject.com/en/1.8/ref/settings/#databases
DEBUG = True
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': "wye",
'USER': "",
'PASSWORD': "",
'HOST': "localhost",
'PORT': "5432",
}
}
<commit_msg>fix(settings): Add console as email backend<commit_after>
|
# Database
# https://docs.djangoproject.com/en/1.8/ref/settings/#databases
DEBUG = True
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': "wye",
'USER': "",
'PASSWORD': "",
'HOST': "localhost",
'PORT': "5432",
}
}
# E-Mail Settings
EMAIL_HOST = 'localhost'
EMAIL_PORT = 1025
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
|
# Database
# https://docs.djangoproject.com/en/1.8/ref/settings/#databases
DEBUG = True
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': "wye",
'USER': "",
'PASSWORD': "",
'HOST': "localhost",
'PORT': "5432",
}
}
fix(settings): Add console as email backend# Database
# https://docs.djangoproject.com/en/1.8/ref/settings/#databases
DEBUG = True
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': "wye",
'USER': "",
'PASSWORD': "",
'HOST': "localhost",
'PORT': "5432",
}
}
# E-Mail Settings
EMAIL_HOST = 'localhost'
EMAIL_PORT = 1025
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
|
<commit_before># Database
# https://docs.djangoproject.com/en/1.8/ref/settings/#databases
DEBUG = True
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': "wye",
'USER': "",
'PASSWORD': "",
'HOST': "localhost",
'PORT': "5432",
}
}
<commit_msg>fix(settings): Add console as email backend<commit_after># Database
# https://docs.djangoproject.com/en/1.8/ref/settings/#databases
DEBUG = True
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': "wye",
'USER': "",
'PASSWORD': "",
'HOST': "localhost",
'PORT': "5432",
}
}
# E-Mail Settings
EMAIL_HOST = 'localhost'
EMAIL_PORT = 1025
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
|
f8ea4266082fba1210be270d6ae7607717591978
|
skimage/io/__init__.py
|
skimage/io/__init__.py
|
"""Utilities to read and write images in various formats.
The following plug-ins are available:
"""
from ._plugins import *
from .sift import *
from .collection import *
from ._io import *
from ._image_stack import *
from .video import *
reset_plugins()
def _update_doc(doc):
"""Add a list of plugins to the module docstring, formatted as
a ReStructuredText table.
"""
from textwrap import wrap
info = [(p, plugin_info(p)) for p in available_plugins if not p == 'test']
col_1_len = max([len(n) for (n, _) in info])
wrap_len = 73
col_2_len = wrap_len - 1 - col_1_len
# Insert table header
info.insert(0, ('=' * col_1_len, {'description': '=' * col_2_len}))
info.insert(1, ('Plugin', {'description': 'Description'}))
info.insert(2, ('-' * col_1_len, {'description': '-' * col_2_len}))
info.append(('=' * col_1_len, {'description': '=' * col_2_len}))
for (name, meta_data) in info:
wrapped_descr = wrap(meta_data.get('description', ''),
col_2_len)
doc += "%s %s\n" % (name.ljust(col_1_len),
'\n'.join(wrapped_descr))
doc = doc.strip()
return doc
__doc__ = _update_doc(__doc__)
|
"""Utilities to read and write images in various formats.
The following plug-ins are available:
"""
from ._plugins import *
from .sift import *
from .collection import *
from ._io import *
from ._image_stack import *
from .video import *
reset_plugins()
WRAP_LEN = 73
def _separator(char, lengths):
return [char * separator_length for separator_length in lengths]
def _update_doc(doc):
"""Add a list of plugins to the module docstring, formatted as
a ReStructuredText table.
"""
from textwrap import wrap
info = [(p, plugin_info(p).get('description', 'no description'))
for p in available_plugins if not p == 'test']
col_1_len = max([len(n) for (n, _) in info])
col_2_len = WRAP_LEN - 1 - col_1_len
# Insert table header
info.insert(0, _separator('=', (col_1_len, col_2_len)))
info.insert(1, ('Plugin', 'Description'))
info.insert(2, _separator('-', (col_1_len, col_2_len)))
info.append(_separator('-', (col_1_len, col_2_len)))
for (name, plugin_description) in info:
wrapped_descr = wrap(plugin_description, col_2_len)
doc += "%s %s\n" % (name.ljust(col_1_len), '\n'.join(wrapped_descr))
doc = doc.strip()
return doc
__doc__ = _update_doc(__doc__)
|
Refactor io doc building code
|
Refactor io doc building code
|
Python
|
bsd-3-clause
|
youprofit/scikit-image,ofgulban/scikit-image,Hiyorimi/scikit-image,ofgulban/scikit-image,pratapvardhan/scikit-image,chintak/scikit-image,WarrenWeckesser/scikits-image,blink1073/scikit-image,SamHames/scikit-image,bsipocz/scikit-image,chriscrosscutler/scikit-image,vighneshbirodkar/scikit-image,dpshelio/scikit-image,Midafi/scikit-image,juliusbierk/scikit-image,michaelpacer/scikit-image,chintak/scikit-image,emon10005/scikit-image,paalge/scikit-image,paalge/scikit-image,paalge/scikit-image,WarrenWeckesser/scikits-image,michaelaye/scikit-image,michaelpacer/scikit-image,ClinicalGraphics/scikit-image,emon10005/scikit-image,bennlich/scikit-image,vighneshbirodkar/scikit-image,GaZ3ll3/scikit-image,GaZ3ll3/scikit-image,ofgulban/scikit-image,oew1v07/scikit-image,keflavich/scikit-image,robintw/scikit-image,bennlich/scikit-image,newville/scikit-image,juliusbierk/scikit-image,vighneshbirodkar/scikit-image,youprofit/scikit-image,jwiggins/scikit-image,SamHames/scikit-image,ajaybhat/scikit-image,Hiyorimi/scikit-image,bsipocz/scikit-image,ClinicalGraphics/scikit-image,blink1073/scikit-image,keflavich/scikit-image,Britefury/scikit-image,chriscrosscutler/scikit-image,Midafi/scikit-image,rjeli/scikit-image,ajaybhat/scikit-image,robintw/scikit-image,newville/scikit-image,warmspringwinds/scikit-image,chintak/scikit-image,SamHames/scikit-image,chintak/scikit-image,jwiggins/scikit-image,Britefury/scikit-image,oew1v07/scikit-image,SamHames/scikit-image,warmspringwinds/scikit-image,michaelaye/scikit-image,pratapvardhan/scikit-image,rjeli/scikit-image,rjeli/scikit-image,dpshelio/scikit-image
|
"""Utilities to read and write images in various formats.
The following plug-ins are available:
"""
from ._plugins import *
from .sift import *
from .collection import *
from ._io import *
from ._image_stack import *
from .video import *
reset_plugins()
def _update_doc(doc):
"""Add a list of plugins to the module docstring, formatted as
a ReStructuredText table.
"""
from textwrap import wrap
info = [(p, plugin_info(p)) for p in available_plugins if not p == 'test']
col_1_len = max([len(n) for (n, _) in info])
wrap_len = 73
col_2_len = wrap_len - 1 - col_1_len
# Insert table header
info.insert(0, ('=' * col_1_len, {'description': '=' * col_2_len}))
info.insert(1, ('Plugin', {'description': 'Description'}))
info.insert(2, ('-' * col_1_len, {'description': '-' * col_2_len}))
info.append(('=' * col_1_len, {'description': '=' * col_2_len}))
for (name, meta_data) in info:
wrapped_descr = wrap(meta_data.get('description', ''),
col_2_len)
doc += "%s %s\n" % (name.ljust(col_1_len),
'\n'.join(wrapped_descr))
doc = doc.strip()
return doc
__doc__ = _update_doc(__doc__)
Refactor io doc building code
|
"""Utilities to read and write images in various formats.
The following plug-ins are available:
"""
from ._plugins import *
from .sift import *
from .collection import *
from ._io import *
from ._image_stack import *
from .video import *
reset_plugins()
WRAP_LEN = 73
def _separator(char, lengths):
return [char * separator_length for separator_length in lengths]
def _update_doc(doc):
"""Add a list of plugins to the module docstring, formatted as
a ReStructuredText table.
"""
from textwrap import wrap
info = [(p, plugin_info(p).get('description', 'no description'))
for p in available_plugins if not p == 'test']
col_1_len = max([len(n) for (n, _) in info])
col_2_len = WRAP_LEN - 1 - col_1_len
# Insert table header
info.insert(0, _separator('=', (col_1_len, col_2_len)))
info.insert(1, ('Plugin', 'Description'))
info.insert(2, _separator('-', (col_1_len, col_2_len)))
info.append(_separator('-', (col_1_len, col_2_len)))
for (name, plugin_description) in info:
wrapped_descr = wrap(plugin_description, col_2_len)
doc += "%s %s\n" % (name.ljust(col_1_len), '\n'.join(wrapped_descr))
doc = doc.strip()
return doc
__doc__ = _update_doc(__doc__)
|
<commit_before>"""Utilities to read and write images in various formats.
The following plug-ins are available:
"""
from ._plugins import *
from .sift import *
from .collection import *
from ._io import *
from ._image_stack import *
from .video import *
reset_plugins()
def _update_doc(doc):
"""Add a list of plugins to the module docstring, formatted as
a ReStructuredText table.
"""
from textwrap import wrap
info = [(p, plugin_info(p)) for p in available_plugins if not p == 'test']
col_1_len = max([len(n) for (n, _) in info])
wrap_len = 73
col_2_len = wrap_len - 1 - col_1_len
# Insert table header
info.insert(0, ('=' * col_1_len, {'description': '=' * col_2_len}))
info.insert(1, ('Plugin', {'description': 'Description'}))
info.insert(2, ('-' * col_1_len, {'description': '-' * col_2_len}))
info.append(('=' * col_1_len, {'description': '=' * col_2_len}))
for (name, meta_data) in info:
wrapped_descr = wrap(meta_data.get('description', ''),
col_2_len)
doc += "%s %s\n" % (name.ljust(col_1_len),
'\n'.join(wrapped_descr))
doc = doc.strip()
return doc
__doc__ = _update_doc(__doc__)
<commit_msg>Refactor io doc building code<commit_after>
|
"""Utilities to read and write images in various formats.
The following plug-ins are available:
"""
from ._plugins import *
from .sift import *
from .collection import *
from ._io import *
from ._image_stack import *
from .video import *
reset_plugins()
WRAP_LEN = 73
def _separator(char, lengths):
return [char * separator_length for separator_length in lengths]
def _update_doc(doc):
"""Add a list of plugins to the module docstring, formatted as
a ReStructuredText table.
"""
from textwrap import wrap
info = [(p, plugin_info(p).get('description', 'no description'))
for p in available_plugins if not p == 'test']
col_1_len = max([len(n) for (n, _) in info])
col_2_len = WRAP_LEN - 1 - col_1_len
# Insert table header
info.insert(0, _separator('=', (col_1_len, col_2_len)))
info.insert(1, ('Plugin', 'Description'))
info.insert(2, _separator('-', (col_1_len, col_2_len)))
info.append(_separator('-', (col_1_len, col_2_len)))
for (name, plugin_description) in info:
wrapped_descr = wrap(plugin_description, col_2_len)
doc += "%s %s\n" % (name.ljust(col_1_len), '\n'.join(wrapped_descr))
doc = doc.strip()
return doc
__doc__ = _update_doc(__doc__)
|
"""Utilities to read and write images in various formats.
The following plug-ins are available:
"""
from ._plugins import *
from .sift import *
from .collection import *
from ._io import *
from ._image_stack import *
from .video import *
reset_plugins()
def _update_doc(doc):
"""Add a list of plugins to the module docstring, formatted as
a ReStructuredText table.
"""
from textwrap import wrap
info = [(p, plugin_info(p)) for p in available_plugins if not p == 'test']
col_1_len = max([len(n) for (n, _) in info])
wrap_len = 73
col_2_len = wrap_len - 1 - col_1_len
# Insert table header
info.insert(0, ('=' * col_1_len, {'description': '=' * col_2_len}))
info.insert(1, ('Plugin', {'description': 'Description'}))
info.insert(2, ('-' * col_1_len, {'description': '-' * col_2_len}))
info.append(('=' * col_1_len, {'description': '=' * col_2_len}))
for (name, meta_data) in info:
wrapped_descr = wrap(meta_data.get('description', ''),
col_2_len)
doc += "%s %s\n" % (name.ljust(col_1_len),
'\n'.join(wrapped_descr))
doc = doc.strip()
return doc
__doc__ = _update_doc(__doc__)
Refactor io doc building code"""Utilities to read and write images in various formats.
The following plug-ins are available:
"""
from ._plugins import *
from .sift import *
from .collection import *
from ._io import *
from ._image_stack import *
from .video import *
reset_plugins()
WRAP_LEN = 73
def _separator(char, lengths):
return [char * separator_length for separator_length in lengths]
def _update_doc(doc):
"""Add a list of plugins to the module docstring, formatted as
a ReStructuredText table.
"""
from textwrap import wrap
info = [(p, plugin_info(p).get('description', 'no description'))
for p in available_plugins if not p == 'test']
col_1_len = max([len(n) for (n, _) in info])
col_2_len = WRAP_LEN - 1 - col_1_len
# Insert table header
info.insert(0, _separator('=', (col_1_len, col_2_len)))
info.insert(1, ('Plugin', 'Description'))
info.insert(2, _separator('-', (col_1_len, col_2_len)))
info.append(_separator('-', (col_1_len, col_2_len)))
for (name, plugin_description) in info:
wrapped_descr = wrap(plugin_description, col_2_len)
doc += "%s %s\n" % (name.ljust(col_1_len), '\n'.join(wrapped_descr))
doc = doc.strip()
return doc
__doc__ = _update_doc(__doc__)
|
<commit_before>"""Utilities to read and write images in various formats.
The following plug-ins are available:
"""
from ._plugins import *
from .sift import *
from .collection import *
from ._io import *
from ._image_stack import *
from .video import *
reset_plugins()
def _update_doc(doc):
"""Add a list of plugins to the module docstring, formatted as
a ReStructuredText table.
"""
from textwrap import wrap
info = [(p, plugin_info(p)) for p in available_plugins if not p == 'test']
col_1_len = max([len(n) for (n, _) in info])
wrap_len = 73
col_2_len = wrap_len - 1 - col_1_len
# Insert table header
info.insert(0, ('=' * col_1_len, {'description': '=' * col_2_len}))
info.insert(1, ('Plugin', {'description': 'Description'}))
info.insert(2, ('-' * col_1_len, {'description': '-' * col_2_len}))
info.append(('=' * col_1_len, {'description': '=' * col_2_len}))
for (name, meta_data) in info:
wrapped_descr = wrap(meta_data.get('description', ''),
col_2_len)
doc += "%s %s\n" % (name.ljust(col_1_len),
'\n'.join(wrapped_descr))
doc = doc.strip()
return doc
__doc__ = _update_doc(__doc__)
<commit_msg>Refactor io doc building code<commit_after>"""Utilities to read and write images in various formats.
The following plug-ins are available:
"""
from ._plugins import *
from .sift import *
from .collection import *
from ._io import *
from ._image_stack import *
from .video import *
reset_plugins()
WRAP_LEN = 73
def _separator(char, lengths):
return [char * separator_length for separator_length in lengths]
def _update_doc(doc):
"""Add a list of plugins to the module docstring, formatted as
a ReStructuredText table.
"""
from textwrap import wrap
info = [(p, plugin_info(p).get('description', 'no description'))
for p in available_plugins if not p == 'test']
col_1_len = max([len(n) for (n, _) in info])
col_2_len = WRAP_LEN - 1 - col_1_len
# Insert table header
info.insert(0, _separator('=', (col_1_len, col_2_len)))
info.insert(1, ('Plugin', 'Description'))
info.insert(2, _separator('-', (col_1_len, col_2_len)))
info.append(_separator('-', (col_1_len, col_2_len)))
for (name, plugin_description) in info:
wrapped_descr = wrap(plugin_description, col_2_len)
doc += "%s %s\n" % (name.ljust(col_1_len), '\n'.join(wrapped_descr))
doc = doc.strip()
return doc
__doc__ = _update_doc(__doc__)
|
e31790412c9e869841b448f3e7f8bb4a965da81d
|
mygpo/web/templatetags/devices.py
|
mygpo/web/templatetags/devices.py
|
from django import template
from django.utils.safestring import mark_safe
from django.utils.translation import ugettext as _
from mygpo.api.models import DEVICE_TYPES
register = template.Library()
# Create a dictionary of device_type -> caption mappings
DEVICE_TYPES_DICT = dict(DEVICE_TYPES)
# This dictionary maps device types to their icon files
DEVICE_TYPE_ICONS = {
'desktop': 'computer.png',
'laptop': 'stock_notebook.png',
'mobile': 'stock_cell-phone.png',
'server': 'server.png',
'other': 'audio-x-generic.png',
}
@register.filter
def device_icon(device, size=16):
icon = DEVICE_TYPE_ICONS.get(device, None)
caption = DEVICE_TYPES_DICT.get(device, None)
if icon is not None and caption is not None:
html = ('<img src="/media/%(size)dx%(size)d/%(icon)s" '+
'alt="%(caption)s" class="device_icon"/>') % locals()
return mark_safe(html)
return ''
@register.filter
def device_list(devices):
return mark_safe(', '.join([ '<a href="/device/%s">%s %s</a>' % (d.id, device_icon(d), d.name) for d in devices]))
|
from django import template
from django.utils.safestring import mark_safe
from django.utils.translation import ugettext as _
from mygpo.api.models import DEVICE_TYPES
register = template.Library()
# Create a dictionary of device_type -> caption mappings
DEVICE_TYPES_DICT = dict(DEVICE_TYPES)
# This dictionary maps device types to their icon files
DEVICE_TYPE_ICONS = {
'desktop': 'computer.png',
'laptop': 'stock_notebook.png',
'mobile': 'stock_cell-phone.png',
'server': 'server.png',
'other': 'audio-x-generic.png',
}
@register.filter
def device_icon(device, size=16):
icon = DEVICE_TYPE_ICONS.get(device.type, None)
caption = DEVICE_TYPES_DICT.get(device.type, None)
if icon is not None and caption is not None:
html = ('<img src="/media/%(size)dx%(size)d/%(icon)s" '+
'alt="%(caption)s" class="device_icon"/>') % locals()
return mark_safe(html)
return ''
@register.filter
def device_list(devices):
return mark_safe(', '.join([ '<a href="/device/%s">%s %s</a>' % (d.id, device_icon(d), d.name) for d in devices]))
|
Fix problem with device icons
|
Fix problem with device icons
|
Python
|
agpl-3.0
|
gpodder/mygpo,gpodder/mygpo,gpodder/mygpo,gpodder/mygpo
|
from django import template
from django.utils.safestring import mark_safe
from django.utils.translation import ugettext as _
from mygpo.api.models import DEVICE_TYPES
register = template.Library()
# Create a dictionary of device_type -> caption mappings
DEVICE_TYPES_DICT = dict(DEVICE_TYPES)
# This dictionary maps device types to their icon files
DEVICE_TYPE_ICONS = {
'desktop': 'computer.png',
'laptop': 'stock_notebook.png',
'mobile': 'stock_cell-phone.png',
'server': 'server.png',
'other': 'audio-x-generic.png',
}
@register.filter
def device_icon(device, size=16):
icon = DEVICE_TYPE_ICONS.get(device, None)
caption = DEVICE_TYPES_DICT.get(device, None)
if icon is not None and caption is not None:
html = ('<img src="/media/%(size)dx%(size)d/%(icon)s" '+
'alt="%(caption)s" class="device_icon"/>') % locals()
return mark_safe(html)
return ''
@register.filter
def device_list(devices):
return mark_safe(', '.join([ '<a href="/device/%s">%s %s</a>' % (d.id, device_icon(d), d.name) for d in devices]))
Fix problem with device icons
|
from django import template
from django.utils.safestring import mark_safe
from django.utils.translation import ugettext as _
from mygpo.api.models import DEVICE_TYPES
register = template.Library()
# Create a dictionary of device_type -> caption mappings
DEVICE_TYPES_DICT = dict(DEVICE_TYPES)
# This dictionary maps device types to their icon files
DEVICE_TYPE_ICONS = {
'desktop': 'computer.png',
'laptop': 'stock_notebook.png',
'mobile': 'stock_cell-phone.png',
'server': 'server.png',
'other': 'audio-x-generic.png',
}
@register.filter
def device_icon(device, size=16):
icon = DEVICE_TYPE_ICONS.get(device.type, None)
caption = DEVICE_TYPES_DICT.get(device.type, None)
if icon is not None and caption is not None:
html = ('<img src="/media/%(size)dx%(size)d/%(icon)s" '+
'alt="%(caption)s" class="device_icon"/>') % locals()
return mark_safe(html)
return ''
@register.filter
def device_list(devices):
return mark_safe(', '.join([ '<a href="/device/%s">%s %s</a>' % (d.id, device_icon(d), d.name) for d in devices]))
|
<commit_before>from django import template
from django.utils.safestring import mark_safe
from django.utils.translation import ugettext as _
from mygpo.api.models import DEVICE_TYPES
register = template.Library()
# Create a dictionary of device_type -> caption mappings
DEVICE_TYPES_DICT = dict(DEVICE_TYPES)
# This dictionary maps device types to their icon files
DEVICE_TYPE_ICONS = {
'desktop': 'computer.png',
'laptop': 'stock_notebook.png',
'mobile': 'stock_cell-phone.png',
'server': 'server.png',
'other': 'audio-x-generic.png',
}
@register.filter
def device_icon(device, size=16):
icon = DEVICE_TYPE_ICONS.get(device, None)
caption = DEVICE_TYPES_DICT.get(device, None)
if icon is not None and caption is not None:
html = ('<img src="/media/%(size)dx%(size)d/%(icon)s" '+
'alt="%(caption)s" class="device_icon"/>') % locals()
return mark_safe(html)
return ''
@register.filter
def device_list(devices):
return mark_safe(', '.join([ '<a href="/device/%s">%s %s</a>' % (d.id, device_icon(d), d.name) for d in devices]))
<commit_msg>Fix problem with device icons<commit_after>
|
from django import template
from django.utils.safestring import mark_safe
from django.utils.translation import ugettext as _
from mygpo.api.models import DEVICE_TYPES
register = template.Library()
# Create a dictionary of device_type -> caption mappings
DEVICE_TYPES_DICT = dict(DEVICE_TYPES)
# This dictionary maps device types to their icon files
DEVICE_TYPE_ICONS = {
'desktop': 'computer.png',
'laptop': 'stock_notebook.png',
'mobile': 'stock_cell-phone.png',
'server': 'server.png',
'other': 'audio-x-generic.png',
}
@register.filter
def device_icon(device, size=16):
icon = DEVICE_TYPE_ICONS.get(device.type, None)
caption = DEVICE_TYPES_DICT.get(device.type, None)
if icon is not None and caption is not None:
html = ('<img src="/media/%(size)dx%(size)d/%(icon)s" '+
'alt="%(caption)s" class="device_icon"/>') % locals()
return mark_safe(html)
return ''
@register.filter
def device_list(devices):
return mark_safe(', '.join([ '<a href="/device/%s">%s %s</a>' % (d.id, device_icon(d), d.name) for d in devices]))
|
from django import template
from django.utils.safestring import mark_safe
from django.utils.translation import ugettext as _
from mygpo.api.models import DEVICE_TYPES
register = template.Library()
# Create a dictionary of device_type -> caption mappings
DEVICE_TYPES_DICT = dict(DEVICE_TYPES)
# This dictionary maps device types to their icon files
DEVICE_TYPE_ICONS = {
'desktop': 'computer.png',
'laptop': 'stock_notebook.png',
'mobile': 'stock_cell-phone.png',
'server': 'server.png',
'other': 'audio-x-generic.png',
}
@register.filter
def device_icon(device, size=16):
icon = DEVICE_TYPE_ICONS.get(device, None)
caption = DEVICE_TYPES_DICT.get(device, None)
if icon is not None and caption is not None:
html = ('<img src="/media/%(size)dx%(size)d/%(icon)s" '+
'alt="%(caption)s" class="device_icon"/>') % locals()
return mark_safe(html)
return ''
@register.filter
def device_list(devices):
return mark_safe(', '.join([ '<a href="/device/%s">%s %s</a>' % (d.id, device_icon(d), d.name) for d in devices]))
Fix problem with device iconsfrom django import template
from django.utils.safestring import mark_safe
from django.utils.translation import ugettext as _
from mygpo.api.models import DEVICE_TYPES
register = template.Library()
# Create a dictionary of device_type -> caption mappings
DEVICE_TYPES_DICT = dict(DEVICE_TYPES)
# This dictionary maps device types to their icon files
DEVICE_TYPE_ICONS = {
'desktop': 'computer.png',
'laptop': 'stock_notebook.png',
'mobile': 'stock_cell-phone.png',
'server': 'server.png',
'other': 'audio-x-generic.png',
}
@register.filter
def device_icon(device, size=16):
icon = DEVICE_TYPE_ICONS.get(device.type, None)
caption = DEVICE_TYPES_DICT.get(device.type, None)
if icon is not None and caption is not None:
html = ('<img src="/media/%(size)dx%(size)d/%(icon)s" '+
'alt="%(caption)s" class="device_icon"/>') % locals()
return mark_safe(html)
return ''
@register.filter
def device_list(devices):
return mark_safe(', '.join([ '<a href="/device/%s">%s %s</a>' % (d.id, device_icon(d), d.name) for d in devices]))
|
<commit_before>from django import template
from django.utils.safestring import mark_safe
from django.utils.translation import ugettext as _
from mygpo.api.models import DEVICE_TYPES
register = template.Library()
# Create a dictionary of device_type -> caption mappings
DEVICE_TYPES_DICT = dict(DEVICE_TYPES)
# This dictionary maps device types to their icon files
DEVICE_TYPE_ICONS = {
'desktop': 'computer.png',
'laptop': 'stock_notebook.png',
'mobile': 'stock_cell-phone.png',
'server': 'server.png',
'other': 'audio-x-generic.png',
}
@register.filter
def device_icon(device, size=16):
icon = DEVICE_TYPE_ICONS.get(device, None)
caption = DEVICE_TYPES_DICT.get(device, None)
if icon is not None and caption is not None:
html = ('<img src="/media/%(size)dx%(size)d/%(icon)s" '+
'alt="%(caption)s" class="device_icon"/>') % locals()
return mark_safe(html)
return ''
@register.filter
def device_list(devices):
return mark_safe(', '.join([ '<a href="/device/%s">%s %s</a>' % (d.id, device_icon(d), d.name) for d in devices]))
<commit_msg>Fix problem with device icons<commit_after>from django import template
from django.utils.safestring import mark_safe
from django.utils.translation import ugettext as _
from mygpo.api.models import DEVICE_TYPES
register = template.Library()
# Create a dictionary of device_type -> caption mappings
DEVICE_TYPES_DICT = dict(DEVICE_TYPES)
# This dictionary maps device types to their icon files
DEVICE_TYPE_ICONS = {
'desktop': 'computer.png',
'laptop': 'stock_notebook.png',
'mobile': 'stock_cell-phone.png',
'server': 'server.png',
'other': 'audio-x-generic.png',
}
@register.filter
def device_icon(device, size=16):
icon = DEVICE_TYPE_ICONS.get(device.type, None)
caption = DEVICE_TYPES_DICT.get(device.type, None)
if icon is not None and caption is not None:
html = ('<img src="/media/%(size)dx%(size)d/%(icon)s" '+
'alt="%(caption)s" class="device_icon"/>') % locals()
return mark_safe(html)
return ''
@register.filter
def device_list(devices):
return mark_safe(', '.join([ '<a href="/device/%s">%s %s</a>' % (d.id, device_icon(d), d.name) for d in devices]))
|
362d60b4ab982efa96a0ef255f5de97b80c0b569
|
skan/test/test_pipe.py
|
skan/test/test_pipe.py
|
import os
import pytest
import pandas
from skan import pipe
@pytest.fixture
def image_filename():
rundir = os.path.abspath(os.path.dirname(__file__))
datadir = os.path.join(rundir, 'data')
return os.path.join(datadir, 'retic.tif')
def test_pipe(image_filename):
data = pipe.process_images([image_filename], 'fei', 5e-8, 0.1, 0.075,
'Scan/PixelHeight')
assert type(data) == pandas.DataFrame
assert data.shape[0] > 0
|
import os
import pytest
import tempfile
import pandas
from skan import pipe
@pytest.fixture
def image_filename():
rundir = os.path.abspath(os.path.dirname(__file__))
datadir = os.path.join(rundir, 'data')
return os.path.join(datadir, 'retic.tif')
def test_pipe(image_filename):
data = pipe.process_images([image_filename], 'fei', 5e-8, 0.1, 0.075,
'Scan/PixelHeight')
assert type(data) == pandas.DataFrame
assert data.shape[0] > 0
def test_pipe_figure(image_filename):
with tempfile.TemporaryDirectory() as tempdir:
data = pipe.process_images([image_filename], 'fei', 5e-8, 0.1, 0.075,
'Scan/PixelHeight',
save_skeleton='skeleton-plot-',
output_folder=tempdir)
assert type(data) == pandas.DataFrame
assert data.shape[0] > 0
assert os.path.exists(os.path.join(tempdir,
'skeleton-plot-' +
image_filename[:-4] +
'.png'))
|
Add test for saving skeleton image
|
Add test for saving skeleton image
|
Python
|
bsd-3-clause
|
jni/skan
|
import os
import pytest
import pandas
from skan import pipe
@pytest.fixture
def image_filename():
rundir = os.path.abspath(os.path.dirname(__file__))
datadir = os.path.join(rundir, 'data')
return os.path.join(datadir, 'retic.tif')
def test_pipe(image_filename):
data = pipe.process_images([image_filename], 'fei', 5e-8, 0.1, 0.075,
'Scan/PixelHeight')
assert type(data) == pandas.DataFrame
assert data.shape[0] > 0
Add test for saving skeleton image
|
import os
import pytest
import tempfile
import pandas
from skan import pipe
@pytest.fixture
def image_filename():
rundir = os.path.abspath(os.path.dirname(__file__))
datadir = os.path.join(rundir, 'data')
return os.path.join(datadir, 'retic.tif')
def test_pipe(image_filename):
data = pipe.process_images([image_filename], 'fei', 5e-8, 0.1, 0.075,
'Scan/PixelHeight')
assert type(data) == pandas.DataFrame
assert data.shape[0] > 0
def test_pipe_figure(image_filename):
with tempfile.TemporaryDirectory() as tempdir:
data = pipe.process_images([image_filename], 'fei', 5e-8, 0.1, 0.075,
'Scan/PixelHeight',
save_skeleton='skeleton-plot-',
output_folder=tempdir)
assert type(data) == pandas.DataFrame
assert data.shape[0] > 0
assert os.path.exists(os.path.join(tempdir,
'skeleton-plot-' +
image_filename[:-4] +
'.png'))
|
<commit_before>import os
import pytest
import pandas
from skan import pipe
@pytest.fixture
def image_filename():
rundir = os.path.abspath(os.path.dirname(__file__))
datadir = os.path.join(rundir, 'data')
return os.path.join(datadir, 'retic.tif')
def test_pipe(image_filename):
data = pipe.process_images([image_filename], 'fei', 5e-8, 0.1, 0.075,
'Scan/PixelHeight')
assert type(data) == pandas.DataFrame
assert data.shape[0] > 0
<commit_msg>Add test for saving skeleton image<commit_after>
|
import os
import pytest
import tempfile
import pandas
from skan import pipe
@pytest.fixture
def image_filename():
rundir = os.path.abspath(os.path.dirname(__file__))
datadir = os.path.join(rundir, 'data')
return os.path.join(datadir, 'retic.tif')
def test_pipe(image_filename):
data = pipe.process_images([image_filename], 'fei', 5e-8, 0.1, 0.075,
'Scan/PixelHeight')
assert type(data) == pandas.DataFrame
assert data.shape[0] > 0
def test_pipe_figure(image_filename):
with tempfile.TemporaryDirectory() as tempdir:
data = pipe.process_images([image_filename], 'fei', 5e-8, 0.1, 0.075,
'Scan/PixelHeight',
save_skeleton='skeleton-plot-',
output_folder=tempdir)
assert type(data) == pandas.DataFrame
assert data.shape[0] > 0
assert os.path.exists(os.path.join(tempdir,
'skeleton-plot-' +
image_filename[:-4] +
'.png'))
|
import os
import pytest
import pandas
from skan import pipe
@pytest.fixture
def image_filename():
rundir = os.path.abspath(os.path.dirname(__file__))
datadir = os.path.join(rundir, 'data')
return os.path.join(datadir, 'retic.tif')
def test_pipe(image_filename):
data = pipe.process_images([image_filename], 'fei', 5e-8, 0.1, 0.075,
'Scan/PixelHeight')
assert type(data) == pandas.DataFrame
assert data.shape[0] > 0
Add test for saving skeleton imageimport os
import pytest
import tempfile
import pandas
from skan import pipe
@pytest.fixture
def image_filename():
rundir = os.path.abspath(os.path.dirname(__file__))
datadir = os.path.join(rundir, 'data')
return os.path.join(datadir, 'retic.tif')
def test_pipe(image_filename):
data = pipe.process_images([image_filename], 'fei', 5e-8, 0.1, 0.075,
'Scan/PixelHeight')
assert type(data) == pandas.DataFrame
assert data.shape[0] > 0
def test_pipe_figure(image_filename):
with tempfile.TemporaryDirectory() as tempdir:
data = pipe.process_images([image_filename], 'fei', 5e-8, 0.1, 0.075,
'Scan/PixelHeight',
save_skeleton='skeleton-plot-',
output_folder=tempdir)
assert type(data) == pandas.DataFrame
assert data.shape[0] > 0
assert os.path.exists(os.path.join(tempdir,
'skeleton-plot-' +
image_filename[:-4] +
'.png'))
|
<commit_before>import os
import pytest
import pandas
from skan import pipe
@pytest.fixture
def image_filename():
rundir = os.path.abspath(os.path.dirname(__file__))
datadir = os.path.join(rundir, 'data')
return os.path.join(datadir, 'retic.tif')
def test_pipe(image_filename):
data = pipe.process_images([image_filename], 'fei', 5e-8, 0.1, 0.075,
'Scan/PixelHeight')
assert type(data) == pandas.DataFrame
assert data.shape[0] > 0
<commit_msg>Add test for saving skeleton image<commit_after>import os
import pytest
import tempfile
import pandas
from skan import pipe
@pytest.fixture
def image_filename():
rundir = os.path.abspath(os.path.dirname(__file__))
datadir = os.path.join(rundir, 'data')
return os.path.join(datadir, 'retic.tif')
def test_pipe(image_filename):
data = pipe.process_images([image_filename], 'fei', 5e-8, 0.1, 0.075,
'Scan/PixelHeight')
assert type(data) == pandas.DataFrame
assert data.shape[0] > 0
def test_pipe_figure(image_filename):
with tempfile.TemporaryDirectory() as tempdir:
data = pipe.process_images([image_filename], 'fei', 5e-8, 0.1, 0.075,
'Scan/PixelHeight',
save_skeleton='skeleton-plot-',
output_folder=tempdir)
assert type(data) == pandas.DataFrame
assert data.shape[0] > 0
assert os.path.exists(os.path.join(tempdir,
'skeleton-plot-' +
image_filename[:-4] +
'.png'))
|
ada3d309541daaa8591a6bcb6ec42a2a2ff468db
|
catsnap/worker/tasks.py
|
catsnap/worker/tasks.py
|
from __future__ import unicode_literals, absolute_import
from boto.cloudfront.exception import CloudFrontServerError
from catsnap.worker import worker
from catsnap import Client
class Invalidate(worker.Task):
def run(self, filename):
config = Client().config()
try:
distro_id = config['cloudfront_distribution_id']
Client().get_cloudfront().create_invalidation_request(
distro_id, filename)
except KeyError:
pass
except CloudFrontServerError as e:
if e.error_code == 'TooManyInvalidationsInProgress':
print 'yeah hi'
self.retry(e)
else:
raise
|
from __future__ import unicode_literals, absolute_import
from boto.cloudfront.exception import CloudFrontServerError
from catsnap.worker import worker
from catsnap import Client
class Invalidate(worker.Task):
def run(self, filename):
config = Client().config()
try:
distro_id = config['cloudfront_distribution_id']
Client().get_cloudfront().create_invalidation_request(
distro_id, filename)
except KeyError:
pass
except CloudFrontServerError as e:
if e.error_code == 'TooManyInvalidationsInProgress':
self.retry(e)
else:
raise
|
Remove a line of debug output
|
Remove a line of debug output
|
Python
|
mit
|
ErinCall/catsnap,ErinCall/catsnap,ErinCall/catsnap
|
from __future__ import unicode_literals, absolute_import
from boto.cloudfront.exception import CloudFrontServerError
from catsnap.worker import worker
from catsnap import Client
class Invalidate(worker.Task):
def run(self, filename):
config = Client().config()
try:
distro_id = config['cloudfront_distribution_id']
Client().get_cloudfront().create_invalidation_request(
distro_id, filename)
except KeyError:
pass
except CloudFrontServerError as e:
if e.error_code == 'TooManyInvalidationsInProgress':
print 'yeah hi'
self.retry(e)
else:
raise
Remove a line of debug output
|
from __future__ import unicode_literals, absolute_import
from boto.cloudfront.exception import CloudFrontServerError
from catsnap.worker import worker
from catsnap import Client
class Invalidate(worker.Task):
def run(self, filename):
config = Client().config()
try:
distro_id = config['cloudfront_distribution_id']
Client().get_cloudfront().create_invalidation_request(
distro_id, filename)
except KeyError:
pass
except CloudFrontServerError as e:
if e.error_code == 'TooManyInvalidationsInProgress':
self.retry(e)
else:
raise
|
<commit_before>from __future__ import unicode_literals, absolute_import
from boto.cloudfront.exception import CloudFrontServerError
from catsnap.worker import worker
from catsnap import Client
class Invalidate(worker.Task):
def run(self, filename):
config = Client().config()
try:
distro_id = config['cloudfront_distribution_id']
Client().get_cloudfront().create_invalidation_request(
distro_id, filename)
except KeyError:
pass
except CloudFrontServerError as e:
if e.error_code == 'TooManyInvalidationsInProgress':
print 'yeah hi'
self.retry(e)
else:
raise
<commit_msg>Remove a line of debug output<commit_after>
|
from __future__ import unicode_literals, absolute_import
from boto.cloudfront.exception import CloudFrontServerError
from catsnap.worker import worker
from catsnap import Client
class Invalidate(worker.Task):
def run(self, filename):
config = Client().config()
try:
distro_id = config['cloudfront_distribution_id']
Client().get_cloudfront().create_invalidation_request(
distro_id, filename)
except KeyError:
pass
except CloudFrontServerError as e:
if e.error_code == 'TooManyInvalidationsInProgress':
self.retry(e)
else:
raise
|
from __future__ import unicode_literals, absolute_import
from boto.cloudfront.exception import CloudFrontServerError
from catsnap.worker import worker
from catsnap import Client
class Invalidate(worker.Task):
def run(self, filename):
config = Client().config()
try:
distro_id = config['cloudfront_distribution_id']
Client().get_cloudfront().create_invalidation_request(
distro_id, filename)
except KeyError:
pass
except CloudFrontServerError as e:
if e.error_code == 'TooManyInvalidationsInProgress':
print 'yeah hi'
self.retry(e)
else:
raise
Remove a line of debug outputfrom __future__ import unicode_literals, absolute_import
from boto.cloudfront.exception import CloudFrontServerError
from catsnap.worker import worker
from catsnap import Client
class Invalidate(worker.Task):
def run(self, filename):
config = Client().config()
try:
distro_id = config['cloudfront_distribution_id']
Client().get_cloudfront().create_invalidation_request(
distro_id, filename)
except KeyError:
pass
except CloudFrontServerError as e:
if e.error_code == 'TooManyInvalidationsInProgress':
self.retry(e)
else:
raise
|
<commit_before>from __future__ import unicode_literals, absolute_import
from boto.cloudfront.exception import CloudFrontServerError
from catsnap.worker import worker
from catsnap import Client
class Invalidate(worker.Task):
def run(self, filename):
config = Client().config()
try:
distro_id = config['cloudfront_distribution_id']
Client().get_cloudfront().create_invalidation_request(
distro_id, filename)
except KeyError:
pass
except CloudFrontServerError as e:
if e.error_code == 'TooManyInvalidationsInProgress':
print 'yeah hi'
self.retry(e)
else:
raise
<commit_msg>Remove a line of debug output<commit_after>from __future__ import unicode_literals, absolute_import
from boto.cloudfront.exception import CloudFrontServerError
from catsnap.worker import worker
from catsnap import Client
class Invalidate(worker.Task):
def run(self, filename):
config = Client().config()
try:
distro_id = config['cloudfront_distribution_id']
Client().get_cloudfront().create_invalidation_request(
distro_id, filename)
except KeyError:
pass
except CloudFrontServerError as e:
if e.error_code == 'TooManyInvalidationsInProgress':
self.retry(e)
else:
raise
|
cc71d25b85d991801f2dc980ad37aa7bdbe7e2f3
|
ocradmin/lib/nodetree/registry.py
|
ocradmin/lib/nodetree/registry.py
|
"""
Registry class and global node registry.
"""
import inspect
class NotRegistered(KeyError):
pass
class NodeRegistry(dict):
NotRegistered = NotRegistered
def register(self, node):
"""Register a node class in the node registry."""
self[node.name] = inspect.isclass(node) and node or node.__class__
def unregister(self, name):
"""Unregister node by name."""
try:
# Might be a node class
name = name.name
except AttributeError:
pass
self.pop(name)
def get_by_attr(self, attr, value=None):
"""Return all nodes of a specific type that have a matching attr.
If `value` is given, only return nodes where the attr value matches."""
ret = {}
for name, node in self.iteritems():
if hasattr(node, attr) and value is None\
or hasattr(node, name) and getattr(node, name) == value:
ret[name] = node
return ret
def __getitem__(self, key):
try:
return dict.__getitem__(self, key)
except KeyError:
raise self.NotRegistered(key)
def pop(self, key, *args):
try:
return dict.pop(self, key, *args)
except KeyError:
raise self.NotRegistered(key)
nodes = NodeRegistry()
|
"""
Registry class and global node registry.
This class was adapted from the Celery Project's task registry.
"""
import inspect
class NotRegistered(KeyError):
pass
class NodeRegistry(dict):
NotRegistered = NotRegistered
def register(self, node):
"""Register a node class in the node registry."""
self[node.name] = inspect.isclass(node) and node or node.__class__
def unregister(self, name):
"""Unregister node by name."""
try:
# Might be a node class
name = name.name
except AttributeError:
pass
self.pop(name)
def get_by_attr(self, attr, value=None):
"""Return all nodes of a specific type that have a matching attr.
If `value` is given, only return nodes where the attr value matches."""
ret = {}
for name, node in self.iteritems():
if hasattr(node, attr) and value is None\
or hasattr(node, name) and getattr(node, name) == value:
ret[name] = node
return ret
def __getitem__(self, key):
try:
return dict.__getitem__(self, key)
except KeyError:
raise self.NotRegistered(key)
def pop(self, key, *args):
try:
return dict.pop(self, key, *args)
except KeyError:
raise self.NotRegistered(key)
nodes = NodeRegistry()
|
Add a note acknowledging the source of this class
|
Add a note acknowledging the source of this class
|
Python
|
apache-2.0
|
vitorio/ocropodium,vitorio/ocropodium,vitorio/ocropodium,vitorio/ocropodium
|
"""
Registry class and global node registry.
"""
import inspect
class NotRegistered(KeyError):
pass
class NodeRegistry(dict):
NotRegistered = NotRegistered
def register(self, node):
"""Register a node class in the node registry."""
self[node.name] = inspect.isclass(node) and node or node.__class__
def unregister(self, name):
"""Unregister node by name."""
try:
# Might be a node class
name = name.name
except AttributeError:
pass
self.pop(name)
def get_by_attr(self, attr, value=None):
"""Return all nodes of a specific type that have a matching attr.
If `value` is given, only return nodes where the attr value matches."""
ret = {}
for name, node in self.iteritems():
if hasattr(node, attr) and value is None\
or hasattr(node, name) and getattr(node, name) == value:
ret[name] = node
return ret
def __getitem__(self, key):
try:
return dict.__getitem__(self, key)
except KeyError:
raise self.NotRegistered(key)
def pop(self, key, *args):
try:
return dict.pop(self, key, *args)
except KeyError:
raise self.NotRegistered(key)
nodes = NodeRegistry()
Add a note acknowledging the source of this class
|
"""
Registry class and global node registry.
This class was adapted from the Celery Project's task registry.
"""
import inspect
class NotRegistered(KeyError):
pass
class NodeRegistry(dict):
NotRegistered = NotRegistered
def register(self, node):
"""Register a node class in the node registry."""
self[node.name] = inspect.isclass(node) and node or node.__class__
def unregister(self, name):
"""Unregister node by name."""
try:
# Might be a node class
name = name.name
except AttributeError:
pass
self.pop(name)
def get_by_attr(self, attr, value=None):
"""Return all nodes of a specific type that have a matching attr.
If `value` is given, only return nodes where the attr value matches."""
ret = {}
for name, node in self.iteritems():
if hasattr(node, attr) and value is None\
or hasattr(node, name) and getattr(node, name) == value:
ret[name] = node
return ret
def __getitem__(self, key):
try:
return dict.__getitem__(self, key)
except KeyError:
raise self.NotRegistered(key)
def pop(self, key, *args):
try:
return dict.pop(self, key, *args)
except KeyError:
raise self.NotRegistered(key)
nodes = NodeRegistry()
|
<commit_before>"""
Registry class and global node registry.
"""
import inspect
class NotRegistered(KeyError):
pass
class NodeRegistry(dict):
NotRegistered = NotRegistered
def register(self, node):
"""Register a node class in the node registry."""
self[node.name] = inspect.isclass(node) and node or node.__class__
def unregister(self, name):
"""Unregister node by name."""
try:
# Might be a node class
name = name.name
except AttributeError:
pass
self.pop(name)
def get_by_attr(self, attr, value=None):
"""Return all nodes of a specific type that have a matching attr.
If `value` is given, only return nodes where the attr value matches."""
ret = {}
for name, node in self.iteritems():
if hasattr(node, attr) and value is None\
or hasattr(node, name) and getattr(node, name) == value:
ret[name] = node
return ret
def __getitem__(self, key):
try:
return dict.__getitem__(self, key)
except KeyError:
raise self.NotRegistered(key)
def pop(self, key, *args):
try:
return dict.pop(self, key, *args)
except KeyError:
raise self.NotRegistered(key)
nodes = NodeRegistry()
<commit_msg>Add a note acknowledging the source of this class<commit_after>
|
"""
Registry class and global node registry.
This class was adapted from the Celery Project's task registry.
"""
import inspect
class NotRegistered(KeyError):
pass
class NodeRegistry(dict):
NotRegistered = NotRegistered
def register(self, node):
"""Register a node class in the node registry."""
self[node.name] = inspect.isclass(node) and node or node.__class__
def unregister(self, name):
"""Unregister node by name."""
try:
# Might be a node class
name = name.name
except AttributeError:
pass
self.pop(name)
def get_by_attr(self, attr, value=None):
"""Return all nodes of a specific type that have a matching attr.
If `value` is given, only return nodes where the attr value matches."""
ret = {}
for name, node in self.iteritems():
if hasattr(node, attr) and value is None\
or hasattr(node, name) and getattr(node, name) == value:
ret[name] = node
return ret
def __getitem__(self, key):
try:
return dict.__getitem__(self, key)
except KeyError:
raise self.NotRegistered(key)
def pop(self, key, *args):
try:
return dict.pop(self, key, *args)
except KeyError:
raise self.NotRegistered(key)
nodes = NodeRegistry()
|
"""
Registry class and global node registry.
"""
import inspect
class NotRegistered(KeyError):
pass
class NodeRegistry(dict):
NotRegistered = NotRegistered
def register(self, node):
"""Register a node class in the node registry."""
self[node.name] = inspect.isclass(node) and node or node.__class__
def unregister(self, name):
"""Unregister node by name."""
try:
# Might be a node class
name = name.name
except AttributeError:
pass
self.pop(name)
def get_by_attr(self, attr, value=None):
"""Return all nodes of a specific type that have a matching attr.
If `value` is given, only return nodes where the attr value matches."""
ret = {}
for name, node in self.iteritems():
if hasattr(node, attr) and value is None\
or hasattr(node, name) and getattr(node, name) == value:
ret[name] = node
return ret
def __getitem__(self, key):
try:
return dict.__getitem__(self, key)
except KeyError:
raise self.NotRegistered(key)
def pop(self, key, *args):
try:
return dict.pop(self, key, *args)
except KeyError:
raise self.NotRegistered(key)
nodes = NodeRegistry()
Add a note acknowledging the source of this class"""
Registry class and global node registry.
This class was adapted from the Celery Project's task registry.
"""
import inspect
class NotRegistered(KeyError):
pass
class NodeRegistry(dict):
NotRegistered = NotRegistered
def register(self, node):
"""Register a node class in the node registry."""
self[node.name] = inspect.isclass(node) and node or node.__class__
def unregister(self, name):
"""Unregister node by name."""
try:
# Might be a node class
name = name.name
except AttributeError:
pass
self.pop(name)
def get_by_attr(self, attr, value=None):
"""Return all nodes of a specific type that have a matching attr.
If `value` is given, only return nodes where the attr value matches."""
ret = {}
for name, node in self.iteritems():
if hasattr(node, attr) and value is None\
or hasattr(node, name) and getattr(node, name) == value:
ret[name] = node
return ret
def __getitem__(self, key):
try:
return dict.__getitem__(self, key)
except KeyError:
raise self.NotRegistered(key)
def pop(self, key, *args):
try:
return dict.pop(self, key, *args)
except KeyError:
raise self.NotRegistered(key)
nodes = NodeRegistry()
|
<commit_before>"""
Registry class and global node registry.
"""
import inspect
class NotRegistered(KeyError):
pass
class NodeRegistry(dict):
NotRegistered = NotRegistered
def register(self, node):
"""Register a node class in the node registry."""
self[node.name] = inspect.isclass(node) and node or node.__class__
def unregister(self, name):
"""Unregister node by name."""
try:
# Might be a node class
name = name.name
except AttributeError:
pass
self.pop(name)
def get_by_attr(self, attr, value=None):
"""Return all nodes of a specific type that have a matching attr.
If `value` is given, only return nodes where the attr value matches."""
ret = {}
for name, node in self.iteritems():
if hasattr(node, attr) and value is None\
or hasattr(node, name) and getattr(node, name) == value:
ret[name] = node
return ret
def __getitem__(self, key):
try:
return dict.__getitem__(self, key)
except KeyError:
raise self.NotRegistered(key)
def pop(self, key, *args):
try:
return dict.pop(self, key, *args)
except KeyError:
raise self.NotRegistered(key)
nodes = NodeRegistry()
<commit_msg>Add a note acknowledging the source of this class<commit_after>"""
Registry class and global node registry.
This class was adapted from the Celery Project's task registry.
"""
import inspect
class NotRegistered(KeyError):
pass
class NodeRegistry(dict):
NotRegistered = NotRegistered
def register(self, node):
"""Register a node class in the node registry."""
self[node.name] = inspect.isclass(node) and node or node.__class__
def unregister(self, name):
"""Unregister node by name."""
try:
# Might be a node class
name = name.name
except AttributeError:
pass
self.pop(name)
def get_by_attr(self, attr, value=None):
"""Return all nodes of a specific type that have a matching attr.
If `value` is given, only return nodes where the attr value matches."""
ret = {}
for name, node in self.iteritems():
if hasattr(node, attr) and value is None\
or hasattr(node, name) and getattr(node, name) == value:
ret[name] = node
return ret
def __getitem__(self, key):
try:
return dict.__getitem__(self, key)
except KeyError:
raise self.NotRegistered(key)
def pop(self, key, *args):
try:
return dict.pop(self, key, *args)
except KeyError:
raise self.NotRegistered(key)
nodes = NodeRegistry()
|
58102d86c5c6165cc32f3f4f303cca2b0bc516f7
|
dimod/package_info.py
|
dimod/package_info.py
|
# Copyright 2018 D-Wave Systems Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# ================================================================================================
__version__ = '0.7.0'
__author__ = 'D-Wave Systems Inc.'
__authoremail__ = 'acondello@dwavesys.com'
__description__ = 'A shared API for binary quadratic model samplers.'
|
# Copyright 2018 D-Wave Systems Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# ================================================================================================
__version__ = '0.7.1'
__author__ = 'D-Wave Systems Inc.'
__authoremail__ = 'acondello@dwavesys.com'
__description__ = 'A shared API for binary quadratic model samplers.'
|
Update version 0.7.0 -> 0.7.1
|
Update version 0.7.0 -> 0.7.1
|
Python
|
apache-2.0
|
dwavesystems/dimod,dwavesystems/dimod
|
# Copyright 2018 D-Wave Systems Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# ================================================================================================
__version__ = '0.7.0'
__author__ = 'D-Wave Systems Inc.'
__authoremail__ = 'acondello@dwavesys.com'
__description__ = 'A shared API for binary quadratic model samplers.'
Update version 0.7.0 -> 0.7.1
|
# Copyright 2018 D-Wave Systems Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# ================================================================================================
__version__ = '0.7.1'
__author__ = 'D-Wave Systems Inc.'
__authoremail__ = 'acondello@dwavesys.com'
__description__ = 'A shared API for binary quadratic model samplers.'
|
<commit_before># Copyright 2018 D-Wave Systems Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# ================================================================================================
__version__ = '0.7.0'
__author__ = 'D-Wave Systems Inc.'
__authoremail__ = 'acondello@dwavesys.com'
__description__ = 'A shared API for binary quadratic model samplers.'
<commit_msg>Update version 0.7.0 -> 0.7.1<commit_after>
|
# Copyright 2018 D-Wave Systems Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# ================================================================================================
__version__ = '0.7.1'
__author__ = 'D-Wave Systems Inc.'
__authoremail__ = 'acondello@dwavesys.com'
__description__ = 'A shared API for binary quadratic model samplers.'
|
# Copyright 2018 D-Wave Systems Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# ================================================================================================
__version__ = '0.7.0'
__author__ = 'D-Wave Systems Inc.'
__authoremail__ = 'acondello@dwavesys.com'
__description__ = 'A shared API for binary quadratic model samplers.'
Update version 0.7.0 -> 0.7.1# Copyright 2018 D-Wave Systems Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# ================================================================================================
__version__ = '0.7.1'
__author__ = 'D-Wave Systems Inc.'
__authoremail__ = 'acondello@dwavesys.com'
__description__ = 'A shared API for binary quadratic model samplers.'
|
<commit_before># Copyright 2018 D-Wave Systems Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# ================================================================================================
__version__ = '0.7.0'
__author__ = 'D-Wave Systems Inc.'
__authoremail__ = 'acondello@dwavesys.com'
__description__ = 'A shared API for binary quadratic model samplers.'
<commit_msg>Update version 0.7.0 -> 0.7.1<commit_after># Copyright 2018 D-Wave Systems Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# ================================================================================================
__version__ = '0.7.1'
__author__ = 'D-Wave Systems Inc.'
__authoremail__ = 'acondello@dwavesys.com'
__description__ = 'A shared API for binary quadratic model samplers.'
|
279a7dfcdd854999d490164da3dc3790430e639a
|
membership/management/commands/public_memberlist.py
|
membership/management/commands/public_memberlist.py
|
# -*- encoding: utf-8 -*-
from django.db.models import Q
from django.core.management.base import NoArgsCommand
from django.template.loader import render_to_string
from django.conf import settings
from membership.models import *
from membership.public_memberlist import public_memberlist_data
class Command(NoArgsCommand):
def handle_noargs(self, **options):
template_name = 'membership/public_memberlist.xml'
data = public_memberlist_data()
return render_to_string(template_name, data).encode('utf-8')
|
# -*- encoding: utf-8 -*-
from django.db.models import Q
from django.core.management.base import NoArgsCommand
from django.template.loader import render_to_string
from django.conf import settings
from membership.models import *
from membership.public_memberlist import public_memberlist_data
class Command(NoArgsCommand):
def handle_noargs(self, **options):
template_name = 'membership/public_memberlist.xml'
data = public_memberlist_data()
return render_to_string(template_name, data)
|
Fix UnicodeDecodeError: Return text string, not bytes
|
Fix UnicodeDecodeError: Return text string, not bytes
|
Python
|
mit
|
kapsiry/sikteeri,AriMartti/sikteeri,kapsiry/sikteeri,kapsiry/sikteeri,annttu/sikteeri,joneskoo/sikteeri,annttu/sikteeri,AriMartti/sikteeri,kapsiry/sikteeri,joneskoo/sikteeri,AriMartti/sikteeri,annttu/sikteeri,joneskoo/sikteeri,annttu/sikteeri,AriMartti/sikteeri,joneskoo/sikteeri
|
# -*- encoding: utf-8 -*-
from django.db.models import Q
from django.core.management.base import NoArgsCommand
from django.template.loader import render_to_string
from django.conf import settings
from membership.models import *
from membership.public_memberlist import public_memberlist_data
class Command(NoArgsCommand):
def handle_noargs(self, **options):
template_name = 'membership/public_memberlist.xml'
data = public_memberlist_data()
return render_to_string(template_name, data).encode('utf-8')
Fix UnicodeDecodeError: Return text string, not bytes
|
# -*- encoding: utf-8 -*-
from django.db.models import Q
from django.core.management.base import NoArgsCommand
from django.template.loader import render_to_string
from django.conf import settings
from membership.models import *
from membership.public_memberlist import public_memberlist_data
class Command(NoArgsCommand):
def handle_noargs(self, **options):
template_name = 'membership/public_memberlist.xml'
data = public_memberlist_data()
return render_to_string(template_name, data)
|
<commit_before># -*- encoding: utf-8 -*-
from django.db.models import Q
from django.core.management.base import NoArgsCommand
from django.template.loader import render_to_string
from django.conf import settings
from membership.models import *
from membership.public_memberlist import public_memberlist_data
class Command(NoArgsCommand):
def handle_noargs(self, **options):
template_name = 'membership/public_memberlist.xml'
data = public_memberlist_data()
return render_to_string(template_name, data).encode('utf-8')
<commit_msg>Fix UnicodeDecodeError: Return text string, not bytes<commit_after>
|
# -*- encoding: utf-8 -*-
from django.db.models import Q
from django.core.management.base import NoArgsCommand
from django.template.loader import render_to_string
from django.conf import settings
from membership.models import *
from membership.public_memberlist import public_memberlist_data
class Command(NoArgsCommand):
def handle_noargs(self, **options):
template_name = 'membership/public_memberlist.xml'
data = public_memberlist_data()
return render_to_string(template_name, data)
|
# -*- encoding: utf-8 -*-
from django.db.models import Q
from django.core.management.base import NoArgsCommand
from django.template.loader import render_to_string
from django.conf import settings
from membership.models import *
from membership.public_memberlist import public_memberlist_data
class Command(NoArgsCommand):
def handle_noargs(self, **options):
template_name = 'membership/public_memberlist.xml'
data = public_memberlist_data()
return render_to_string(template_name, data).encode('utf-8')
Fix UnicodeDecodeError: Return text string, not bytes# -*- encoding: utf-8 -*-
from django.db.models import Q
from django.core.management.base import NoArgsCommand
from django.template.loader import render_to_string
from django.conf import settings
from membership.models import *
from membership.public_memberlist import public_memberlist_data
class Command(NoArgsCommand):
def handle_noargs(self, **options):
template_name = 'membership/public_memberlist.xml'
data = public_memberlist_data()
return render_to_string(template_name, data)
|
<commit_before># -*- encoding: utf-8 -*-
from django.db.models import Q
from django.core.management.base import NoArgsCommand
from django.template.loader import render_to_string
from django.conf import settings
from membership.models import *
from membership.public_memberlist import public_memberlist_data
class Command(NoArgsCommand):
def handle_noargs(self, **options):
template_name = 'membership/public_memberlist.xml'
data = public_memberlist_data()
return render_to_string(template_name, data).encode('utf-8')
<commit_msg>Fix UnicodeDecodeError: Return text string, not bytes<commit_after># -*- encoding: utf-8 -*-
from django.db.models import Q
from django.core.management.base import NoArgsCommand
from django.template.loader import render_to_string
from django.conf import settings
from membership.models import *
from membership.public_memberlist import public_memberlist_data
class Command(NoArgsCommand):
def handle_noargs(self, **options):
template_name = 'membership/public_memberlist.xml'
data = public_memberlist_data()
return render_to_string(template_name, data)
|
763939db37e3b9f93f1201aada4e893bbe478249
|
17-createWallOfWoolWithRandomColour.py
|
17-createWallOfWoolWithRandomColour.py
|
#import the needed modules fo communication with minecraft world
from mcpi.minecraft import *
# import needed block defintiions
from mcpi.block import *
# needed to slow down the wall building
from time import sleep
# needed to generate a random number for the colour of wool
from random import randint
if __name__ == "__main__":
# Create a connection to the Minecraft game
mc = Minecraft.create()
# Get the player position
playerPosition = mc.player.getTilePos()
# define the position of the bottom left block of the wall
blockXposn = playerPosition.x + 6
firstColumnX = blockXposn
blockYposn = playerPosition.y + 1
blockZposn = playerPosition.z + 6
# Create a wall using nested for loops
for row in range(6):
# increase the height of th current row to be built
blockYposn += 1
blockXposn = firstColumnX
for column in range(10):
#increase the distance along the row that the block is placed at
blockXposn += 1
#Generate a random number within the allowed range of colours
randomNumber = randint(0,15)
print("random number to be used = "+ str(randomNumber))
print("Creating block at", blockXposn, blockYposn)
# Create a block
mc.setBlock(blockXposn, blockYposn, blockZposn, WOOL.withData(randomNumber))
sleep(0.5)
|
#import the needed modules fo communication with minecraft world
from mcpi.minecraft import *
# import needed block defintiions
from mcpi.block import WOOL
# needed to slow down the wall building
from time import sleep
# needed to generate a random number for the colour of wool
from random import randint
# create a function to create a random block of wool
def getWoolBlockWithRandomColour():
#Generate a random number within the allowed range of colours (0 to 15 inclusive)
randomNumber = randint(0,15)
print("random number to be used = "+ str(randomNumber))
block = WOOL.withData(randomNumber)
return block
if __name__ == "__main__":
# Create a connection to the Minecraft game
mc = Minecraft.create()
# Get the player position
playerPosition = mc.player.getTilePos()
# define the position of the bottom left block of the wall
blockXposn = playerPosition.x + 6
firstColumnX = blockXposn
blockYposn = playerPosition.y + 1
blockZposn = playerPosition.z + 6
# Create a wall using nested for loops
for row in range(6):
# increase the height of th current row to be built
blockYposn += 1
blockXposn = firstColumnX
for column in range(10):
#increase the distance along the row that the block is placed at
blockXposn += 1
print("Creating block at", blockXposn, blockYposn, blockZposn)
# Create a block
mc.setBlock(blockXposn, blockYposn, blockZposn, getWoolBlockWithRandomColour())
sleep(0.5)
|
Change to use function for block generation
|
Change to use function for block generation
Use a separate function for random block generation
move code from loop to the function
|
Python
|
bsd-3-clause
|
hashbangstudio/Python-Minecraft-Examples
|
#import the needed modules fo communication with minecraft world
from mcpi.minecraft import *
# import needed block defintiions
from mcpi.block import *
# needed to slow down the wall building
from time import sleep
# needed to generate a random number for the colour of wool
from random import randint
if __name__ == "__main__":
# Create a connection to the Minecraft game
mc = Minecraft.create()
# Get the player position
playerPosition = mc.player.getTilePos()
# define the position of the bottom left block of the wall
blockXposn = playerPosition.x + 6
firstColumnX = blockXposn
blockYposn = playerPosition.y + 1
blockZposn = playerPosition.z + 6
# Create a wall using nested for loops
for row in range(6):
# increase the height of th current row to be built
blockYposn += 1
blockXposn = firstColumnX
for column in range(10):
#increase the distance along the row that the block is placed at
blockXposn += 1
#Generate a random number within the allowed range of colours
randomNumber = randint(0,15)
print("random number to be used = "+ str(randomNumber))
print("Creating block at", blockXposn, blockYposn)
# Create a block
mc.setBlock(blockXposn, blockYposn, blockZposn, WOOL.withData(randomNumber))
sleep(0.5)
Change to use function for block generation
Use a separate function for random block generation
move code from loop to the function
|
#import the needed modules fo communication with minecraft world
from mcpi.minecraft import *
# import needed block defintiions
from mcpi.block import WOOL
# needed to slow down the wall building
from time import sleep
# needed to generate a random number for the colour of wool
from random import randint
# create a function to create a random block of wool
def getWoolBlockWithRandomColour():
#Generate a random number within the allowed range of colours (0 to 15 inclusive)
randomNumber = randint(0,15)
print("random number to be used = "+ str(randomNumber))
block = WOOL.withData(randomNumber)
return block
if __name__ == "__main__":
# Create a connection to the Minecraft game
mc = Minecraft.create()
# Get the player position
playerPosition = mc.player.getTilePos()
# define the position of the bottom left block of the wall
blockXposn = playerPosition.x + 6
firstColumnX = blockXposn
blockYposn = playerPosition.y + 1
blockZposn = playerPosition.z + 6
# Create a wall using nested for loops
for row in range(6):
# increase the height of th current row to be built
blockYposn += 1
blockXposn = firstColumnX
for column in range(10):
#increase the distance along the row that the block is placed at
blockXposn += 1
print("Creating block at", blockXposn, blockYposn, blockZposn)
# Create a block
mc.setBlock(blockXposn, blockYposn, blockZposn, getWoolBlockWithRandomColour())
sleep(0.5)
|
<commit_before>#import the needed modules fo communication with minecraft world
from mcpi.minecraft import *
# import needed block defintiions
from mcpi.block import *
# needed to slow down the wall building
from time import sleep
# needed to generate a random number for the colour of wool
from random import randint
if __name__ == "__main__":
# Create a connection to the Minecraft game
mc = Minecraft.create()
# Get the player position
playerPosition = mc.player.getTilePos()
# define the position of the bottom left block of the wall
blockXposn = playerPosition.x + 6
firstColumnX = blockXposn
blockYposn = playerPosition.y + 1
blockZposn = playerPosition.z + 6
# Create a wall using nested for loops
for row in range(6):
# increase the height of th current row to be built
blockYposn += 1
blockXposn = firstColumnX
for column in range(10):
#increase the distance along the row that the block is placed at
blockXposn += 1
#Generate a random number within the allowed range of colours
randomNumber = randint(0,15)
print("random number to be used = "+ str(randomNumber))
print("Creating block at", blockXposn, blockYposn)
# Create a block
mc.setBlock(blockXposn, blockYposn, blockZposn, WOOL.withData(randomNumber))
sleep(0.5)
<commit_msg>Change to use function for block generation
Use a separate function for random block generation
move code from loop to the function<commit_after>
|
#import the needed modules fo communication with minecraft world
from mcpi.minecraft import *
# import needed block defintiions
from mcpi.block import WOOL
# needed to slow down the wall building
from time import sleep
# needed to generate a random number for the colour of wool
from random import randint
# create a function to create a random block of wool
def getWoolBlockWithRandomColour():
#Generate a random number within the allowed range of colours (0 to 15 inclusive)
randomNumber = randint(0,15)
print("random number to be used = "+ str(randomNumber))
block = WOOL.withData(randomNumber)
return block
if __name__ == "__main__":
# Create a connection to the Minecraft game
mc = Minecraft.create()
# Get the player position
playerPosition = mc.player.getTilePos()
# define the position of the bottom left block of the wall
blockXposn = playerPosition.x + 6
firstColumnX = blockXposn
blockYposn = playerPosition.y + 1
blockZposn = playerPosition.z + 6
# Create a wall using nested for loops
for row in range(6):
# increase the height of th current row to be built
blockYposn += 1
blockXposn = firstColumnX
for column in range(10):
#increase the distance along the row that the block is placed at
blockXposn += 1
print("Creating block at", blockXposn, blockYposn, blockZposn)
# Create a block
mc.setBlock(blockXposn, blockYposn, blockZposn, getWoolBlockWithRandomColour())
sleep(0.5)
|
#import the needed modules fo communication with minecraft world
from mcpi.minecraft import *
# import needed block defintiions
from mcpi.block import *
# needed to slow down the wall building
from time import sleep
# needed to generate a random number for the colour of wool
from random import randint
if __name__ == "__main__":
# Create a connection to the Minecraft game
mc = Minecraft.create()
# Get the player position
playerPosition = mc.player.getTilePos()
# define the position of the bottom left block of the wall
blockXposn = playerPosition.x + 6
firstColumnX = blockXposn
blockYposn = playerPosition.y + 1
blockZposn = playerPosition.z + 6
# Create a wall using nested for loops
for row in range(6):
# increase the height of th current row to be built
blockYposn += 1
blockXposn = firstColumnX
for column in range(10):
#increase the distance along the row that the block is placed at
blockXposn += 1
#Generate a random number within the allowed range of colours
randomNumber = randint(0,15)
print("random number to be used = "+ str(randomNumber))
print("Creating block at", blockXposn, blockYposn)
# Create a block
mc.setBlock(blockXposn, blockYposn, blockZposn, WOOL.withData(randomNumber))
sleep(0.5)
Change to use function for block generation
Use a separate function for random block generation
move code from loop to the function#import the needed modules fo communication with minecraft world
from mcpi.minecraft import *
# import needed block defintiions
from mcpi.block import WOOL
# needed to slow down the wall building
from time import sleep
# needed to generate a random number for the colour of wool
from random import randint
# create a function to create a random block of wool
def getWoolBlockWithRandomColour():
#Generate a random number within the allowed range of colours (0 to 15 inclusive)
randomNumber = randint(0,15)
print("random number to be used = "+ str(randomNumber))
block = WOOL.withData(randomNumber)
return block
if __name__ == "__main__":
# Create a connection to the Minecraft game
mc = Minecraft.create()
# Get the player position
playerPosition = mc.player.getTilePos()
# define the position of the bottom left block of the wall
blockXposn = playerPosition.x + 6
firstColumnX = blockXposn
blockYposn = playerPosition.y + 1
blockZposn = playerPosition.z + 6
# Create a wall using nested for loops
for row in range(6):
# increase the height of th current row to be built
blockYposn += 1
blockXposn = firstColumnX
for column in range(10):
#increase the distance along the row that the block is placed at
blockXposn += 1
print("Creating block at", blockXposn, blockYposn, blockZposn)
# Create a block
mc.setBlock(blockXposn, blockYposn, blockZposn, getWoolBlockWithRandomColour())
sleep(0.5)
|
<commit_before>#import the needed modules fo communication with minecraft world
from mcpi.minecraft import *
# import needed block defintiions
from mcpi.block import *
# needed to slow down the wall building
from time import sleep
# needed to generate a random number for the colour of wool
from random import randint
if __name__ == "__main__":
# Create a connection to the Minecraft game
mc = Minecraft.create()
# Get the player position
playerPosition = mc.player.getTilePos()
# define the position of the bottom left block of the wall
blockXposn = playerPosition.x + 6
firstColumnX = blockXposn
blockYposn = playerPosition.y + 1
blockZposn = playerPosition.z + 6
# Create a wall using nested for loops
for row in range(6):
# increase the height of th current row to be built
blockYposn += 1
blockXposn = firstColumnX
for column in range(10):
#increase the distance along the row that the block is placed at
blockXposn += 1
#Generate a random number within the allowed range of colours
randomNumber = randint(0,15)
print("random number to be used = "+ str(randomNumber))
print("Creating block at", blockXposn, blockYposn)
# Create a block
mc.setBlock(blockXposn, blockYposn, blockZposn, WOOL.withData(randomNumber))
sleep(0.5)
<commit_msg>Change to use function for block generation
Use a separate function for random block generation
move code from loop to the function<commit_after>#import the needed modules fo communication with minecraft world
from mcpi.minecraft import *
# import needed block defintiions
from mcpi.block import WOOL
# needed to slow down the wall building
from time import sleep
# needed to generate a random number for the colour of wool
from random import randint
# create a function to create a random block of wool
def getWoolBlockWithRandomColour():
#Generate a random number within the allowed range of colours (0 to 15 inclusive)
randomNumber = randint(0,15)
print("random number to be used = "+ str(randomNumber))
block = WOOL.withData(randomNumber)
return block
if __name__ == "__main__":
# Create a connection to the Minecraft game
mc = Minecraft.create()
# Get the player position
playerPosition = mc.player.getTilePos()
# define the position of the bottom left block of the wall
blockXposn = playerPosition.x + 6
firstColumnX = blockXposn
blockYposn = playerPosition.y + 1
blockZposn = playerPosition.z + 6
# Create a wall using nested for loops
for row in range(6):
# increase the height of th current row to be built
blockYposn += 1
blockXposn = firstColumnX
for column in range(10):
#increase the distance along the row that the block is placed at
blockXposn += 1
print("Creating block at", blockXposn, blockYposn, blockZposn)
# Create a block
mc.setBlock(blockXposn, blockYposn, blockZposn, getWoolBlockWithRandomColour())
sleep(0.5)
|
1b95969110f97af397cb3314b59c30679911da48
|
scripts/scrape-cdc-state-case-counts.py
|
scripts/scrape-cdc-state-case-counts.py
|
#!/usr/bin/env python
import requests
import lxml.html
import pandas as pd
import sys
URL = "http://www.cdc.gov/zika/geo/united-states.html"
INT_COLS = [ "travel_associated_cases", "locally_acquired_cases" ]
COLS = [ "state_or_territory" ] + INT_COLS
def scrape():
html = requests.get(URL).content
dom = lxml.html.fromstring(html)
table = dom.cssselect("table")[0]
rows = table.cssselect("tr")
cells = [ [ td.text_content().strip()
for td in tr.cssselect("td") ]
for tr in rows ]
data = [ c for c in cells
if sum(len(x) != 0 for x in c) == 3 ]
df = pd.DataFrame(data, columns=COLS)
df[INT_COLS] = df[INT_COLS].astype(int)
return df
if __name__ == "__main__":
df = scrape()
df.to_csv(sys.stdout, index=False, encoding="utf-8")
|
#!/usr/bin/env python
import requests
import lxml.html
import pandas as pd
import re
import sys
URL = "http://www.cdc.gov/zika/geo/united-states.html"
INT_COLS = [ "travel_associated_cases", "locally_acquired_cases" ]
COLS = [ "state_or_territory" ] + INT_COLS
paren_pat = re.compile(r"\([^\)]+\)")
def parse_cell(text):
return re.sub(paren_pat, "", text).strip()
def scrape():
html = requests.get(URL).content
dom = lxml.html.fromstring(html)
table = dom.cssselect("table")[0]
rows = table.cssselect("tr")
cells = [ [ parse_cell(td.text_content())
for td in tr.cssselect("td") ]
for tr in rows ]
data = [ c for c in cells
if sum(len(x) != 0 for x in c) == 3 ]
df = pd.DataFrame(data, columns=COLS)
df[INT_COLS] = df[INT_COLS].astype(int)
return df
if __name__ == "__main__":
df = scrape()
df.to_csv(sys.stdout, index=False, encoding="utf-8")
|
Update CDC scraper to handle new format
|
Update CDC scraper to handle new format
|
Python
|
mit
|
BuzzFeedNews/zika-data
|
#!/usr/bin/env python
import requests
import lxml.html
import pandas as pd
import sys
URL = "http://www.cdc.gov/zika/geo/united-states.html"
INT_COLS = [ "travel_associated_cases", "locally_acquired_cases" ]
COLS = [ "state_or_territory" ] + INT_COLS
def scrape():
html = requests.get(URL).content
dom = lxml.html.fromstring(html)
table = dom.cssselect("table")[0]
rows = table.cssselect("tr")
cells = [ [ td.text_content().strip()
for td in tr.cssselect("td") ]
for tr in rows ]
data = [ c for c in cells
if sum(len(x) != 0 for x in c) == 3 ]
df = pd.DataFrame(data, columns=COLS)
df[INT_COLS] = df[INT_COLS].astype(int)
return df
if __name__ == "__main__":
df = scrape()
df.to_csv(sys.stdout, index=False, encoding="utf-8")
Update CDC scraper to handle new format
|
#!/usr/bin/env python
import requests
import lxml.html
import pandas as pd
import re
import sys
URL = "http://www.cdc.gov/zika/geo/united-states.html"
INT_COLS = [ "travel_associated_cases", "locally_acquired_cases" ]
COLS = [ "state_or_territory" ] + INT_COLS
paren_pat = re.compile(r"\([^\)]+\)")
def parse_cell(text):
return re.sub(paren_pat, "", text).strip()
def scrape():
html = requests.get(URL).content
dom = lxml.html.fromstring(html)
table = dom.cssselect("table")[0]
rows = table.cssselect("tr")
cells = [ [ parse_cell(td.text_content())
for td in tr.cssselect("td") ]
for tr in rows ]
data = [ c for c in cells
if sum(len(x) != 0 for x in c) == 3 ]
df = pd.DataFrame(data, columns=COLS)
df[INT_COLS] = df[INT_COLS].astype(int)
return df
if __name__ == "__main__":
df = scrape()
df.to_csv(sys.stdout, index=False, encoding="utf-8")
|
<commit_before>#!/usr/bin/env python
import requests
import lxml.html
import pandas as pd
import sys
URL = "http://www.cdc.gov/zika/geo/united-states.html"
INT_COLS = [ "travel_associated_cases", "locally_acquired_cases" ]
COLS = [ "state_or_territory" ] + INT_COLS
def scrape():
html = requests.get(URL).content
dom = lxml.html.fromstring(html)
table = dom.cssselect("table")[0]
rows = table.cssselect("tr")
cells = [ [ td.text_content().strip()
for td in tr.cssselect("td") ]
for tr in rows ]
data = [ c for c in cells
if sum(len(x) != 0 for x in c) == 3 ]
df = pd.DataFrame(data, columns=COLS)
df[INT_COLS] = df[INT_COLS].astype(int)
return df
if __name__ == "__main__":
df = scrape()
df.to_csv(sys.stdout, index=False, encoding="utf-8")
<commit_msg>Update CDC scraper to handle new format<commit_after>
|
#!/usr/bin/env python
import requests
import lxml.html
import pandas as pd
import re
import sys
URL = "http://www.cdc.gov/zika/geo/united-states.html"
INT_COLS = [ "travel_associated_cases", "locally_acquired_cases" ]
COLS = [ "state_or_territory" ] + INT_COLS
paren_pat = re.compile(r"\([^\)]+\)")
def parse_cell(text):
return re.sub(paren_pat, "", text).strip()
def scrape():
html = requests.get(URL).content
dom = lxml.html.fromstring(html)
table = dom.cssselect("table")[0]
rows = table.cssselect("tr")
cells = [ [ parse_cell(td.text_content())
for td in tr.cssselect("td") ]
for tr in rows ]
data = [ c for c in cells
if sum(len(x) != 0 for x in c) == 3 ]
df = pd.DataFrame(data, columns=COLS)
df[INT_COLS] = df[INT_COLS].astype(int)
return df
if __name__ == "__main__":
df = scrape()
df.to_csv(sys.stdout, index=False, encoding="utf-8")
|
#!/usr/bin/env python
import requests
import lxml.html
import pandas as pd
import sys
URL = "http://www.cdc.gov/zika/geo/united-states.html"
INT_COLS = [ "travel_associated_cases", "locally_acquired_cases" ]
COLS = [ "state_or_territory" ] + INT_COLS
def scrape():
html = requests.get(URL).content
dom = lxml.html.fromstring(html)
table = dom.cssselect("table")[0]
rows = table.cssselect("tr")
cells = [ [ td.text_content().strip()
for td in tr.cssselect("td") ]
for tr in rows ]
data = [ c for c in cells
if sum(len(x) != 0 for x in c) == 3 ]
df = pd.DataFrame(data, columns=COLS)
df[INT_COLS] = df[INT_COLS].astype(int)
return df
if __name__ == "__main__":
df = scrape()
df.to_csv(sys.stdout, index=False, encoding="utf-8")
Update CDC scraper to handle new format#!/usr/bin/env python
import requests
import lxml.html
import pandas as pd
import re
import sys
URL = "http://www.cdc.gov/zika/geo/united-states.html"
INT_COLS = [ "travel_associated_cases", "locally_acquired_cases" ]
COLS = [ "state_or_territory" ] + INT_COLS
paren_pat = re.compile(r"\([^\)]+\)")
def parse_cell(text):
return re.sub(paren_pat, "", text).strip()
def scrape():
html = requests.get(URL).content
dom = lxml.html.fromstring(html)
table = dom.cssselect("table")[0]
rows = table.cssselect("tr")
cells = [ [ parse_cell(td.text_content())
for td in tr.cssselect("td") ]
for tr in rows ]
data = [ c for c in cells
if sum(len(x) != 0 for x in c) == 3 ]
df = pd.DataFrame(data, columns=COLS)
df[INT_COLS] = df[INT_COLS].astype(int)
return df
if __name__ == "__main__":
df = scrape()
df.to_csv(sys.stdout, index=False, encoding="utf-8")
|
<commit_before>#!/usr/bin/env python
import requests
import lxml.html
import pandas as pd
import sys
URL = "http://www.cdc.gov/zika/geo/united-states.html"
INT_COLS = [ "travel_associated_cases", "locally_acquired_cases" ]
COLS = [ "state_or_territory" ] + INT_COLS
def scrape():
html = requests.get(URL).content
dom = lxml.html.fromstring(html)
table = dom.cssselect("table")[0]
rows = table.cssselect("tr")
cells = [ [ td.text_content().strip()
for td in tr.cssselect("td") ]
for tr in rows ]
data = [ c for c in cells
if sum(len(x) != 0 for x in c) == 3 ]
df = pd.DataFrame(data, columns=COLS)
df[INT_COLS] = df[INT_COLS].astype(int)
return df
if __name__ == "__main__":
df = scrape()
df.to_csv(sys.stdout, index=False, encoding="utf-8")
<commit_msg>Update CDC scraper to handle new format<commit_after>#!/usr/bin/env python
import requests
import lxml.html
import pandas as pd
import re
import sys
URL = "http://www.cdc.gov/zika/geo/united-states.html"
INT_COLS = [ "travel_associated_cases", "locally_acquired_cases" ]
COLS = [ "state_or_territory" ] + INT_COLS
paren_pat = re.compile(r"\([^\)]+\)")
def parse_cell(text):
return re.sub(paren_pat, "", text).strip()
def scrape():
html = requests.get(URL).content
dom = lxml.html.fromstring(html)
table = dom.cssselect("table")[0]
rows = table.cssselect("tr")
cells = [ [ parse_cell(td.text_content())
for td in tr.cssselect("td") ]
for tr in rows ]
data = [ c for c in cells
if sum(len(x) != 0 for x in c) == 3 ]
df = pd.DataFrame(data, columns=COLS)
df[INT_COLS] = df[INT_COLS].astype(int)
return df
if __name__ == "__main__":
df = scrape()
df.to_csv(sys.stdout, index=False, encoding="utf-8")
|
697d30430fa908c6e2baf88285f0a464993d6636
|
formapi/compat.py
|
formapi/compat.py
|
# coding=utf-8
# flake8: noqa
import sys
if sys.version_info[0] == 3:
from django.utils.encoding import smart_bytes as smart_b, force_str as force_u, smart_text as smart_u
# noinspection PyUnresolvedReferences
from urllib.parse import quote
ifilter = filter
b_str = bytes
u_str = str
iteritems = lambda dic: dic.items()
else:
from django.utils.encoding import smart_str as smart_b, force_unicode as force_u, smart_text as smart_u
# noinspection PyUnresolvedReferences
from urllib2 import quote
# noinspection PyUnresolvedReferences
from itertools import ifilter
b_str = str
# noinspection PyUnresolvedReferences
u_str = unicode
iteritems = lambda dic: dic.iteritems()
try:
from django.conf.urls import patterns, url, include
except ImportError:
# noinspection PyUnresolvedReferences
from django.conf.urls.defaults import patterns, url, include
# Calm down unused import warnings:
assert [smart_b, smart_u, force_u, quote, ifilter]
|
# coding=utf-8
# flake8: noqa
import sys
if sys.version_info[0] == 3:
from django.utils.encoding import smart_bytes as smart_b, force_str as force_u, smart_text as smart_u
# noinspection PyUnresolvedReferences
from urllib.parse import quote
ifilter = filter
b_str = bytes
u_str = str
iteritems = lambda dic: dic.items()
else:
from django.utils.encoding import smart_str as smart_b, force_unicode as force_u
try:
from django.utils.encoding import smart_text as smart_u
except:
# Django 1.3
from django.utils.encoding import smart_unicode as smart_u
# noinspection PyUnresolvedReferences
from urllib2 import quote
# noinspection PyUnresolvedReferences
from itertools import ifilter
b_str = str
# noinspection PyUnresolvedReferences
u_str = unicode
iteritems = lambda dic: dic.iteritems()
try:
from django.conf.urls import patterns, url, include
except ImportError:
# noinspection PyUnresolvedReferences
from django.conf.urls.defaults import patterns, url, include
# Calm down unused import warnings:
assert [smart_b, smart_u, force_u, quote, ifilter]
|
Fix smart_u for Django 1.3
|
Fix smart_u for Django 1.3
|
Python
|
mit
|
5monkeys/django-formapi,andreif/django-formapi,5monkeys/django-formapi,andreif/django-formapi
|
# coding=utf-8
# flake8: noqa
import sys
if sys.version_info[0] == 3:
from django.utils.encoding import smart_bytes as smart_b, force_str as force_u, smart_text as smart_u
# noinspection PyUnresolvedReferences
from urllib.parse import quote
ifilter = filter
b_str = bytes
u_str = str
iteritems = lambda dic: dic.items()
else:
from django.utils.encoding import smart_str as smart_b, force_unicode as force_u, smart_text as smart_u
# noinspection PyUnresolvedReferences
from urllib2 import quote
# noinspection PyUnresolvedReferences
from itertools import ifilter
b_str = str
# noinspection PyUnresolvedReferences
u_str = unicode
iteritems = lambda dic: dic.iteritems()
try:
from django.conf.urls import patterns, url, include
except ImportError:
# noinspection PyUnresolvedReferences
from django.conf.urls.defaults import patterns, url, include
# Calm down unused import warnings:
assert [smart_b, smart_u, force_u, quote, ifilter]
Fix smart_u for Django 1.3
|
# coding=utf-8
# flake8: noqa
import sys
if sys.version_info[0] == 3:
from django.utils.encoding import smart_bytes as smart_b, force_str as force_u, smart_text as smart_u
# noinspection PyUnresolvedReferences
from urllib.parse import quote
ifilter = filter
b_str = bytes
u_str = str
iteritems = lambda dic: dic.items()
else:
from django.utils.encoding import smart_str as smart_b, force_unicode as force_u
try:
from django.utils.encoding import smart_text as smart_u
except:
# Django 1.3
from django.utils.encoding import smart_unicode as smart_u
# noinspection PyUnresolvedReferences
from urllib2 import quote
# noinspection PyUnresolvedReferences
from itertools import ifilter
b_str = str
# noinspection PyUnresolvedReferences
u_str = unicode
iteritems = lambda dic: dic.iteritems()
try:
from django.conf.urls import patterns, url, include
except ImportError:
# noinspection PyUnresolvedReferences
from django.conf.urls.defaults import patterns, url, include
# Calm down unused import warnings:
assert [smart_b, smart_u, force_u, quote, ifilter]
|
<commit_before># coding=utf-8
# flake8: noqa
import sys
if sys.version_info[0] == 3:
from django.utils.encoding import smart_bytes as smart_b, force_str as force_u, smart_text as smart_u
# noinspection PyUnresolvedReferences
from urllib.parse import quote
ifilter = filter
b_str = bytes
u_str = str
iteritems = lambda dic: dic.items()
else:
from django.utils.encoding import smart_str as smart_b, force_unicode as force_u, smart_text as smart_u
# noinspection PyUnresolvedReferences
from urllib2 import quote
# noinspection PyUnresolvedReferences
from itertools import ifilter
b_str = str
# noinspection PyUnresolvedReferences
u_str = unicode
iteritems = lambda dic: dic.iteritems()
try:
from django.conf.urls import patterns, url, include
except ImportError:
# noinspection PyUnresolvedReferences
from django.conf.urls.defaults import patterns, url, include
# Calm down unused import warnings:
assert [smart_b, smart_u, force_u, quote, ifilter]
<commit_msg>Fix smart_u for Django 1.3<commit_after>
|
# coding=utf-8
# flake8: noqa
import sys
if sys.version_info[0] == 3:
from django.utils.encoding import smart_bytes as smart_b, force_str as force_u, smart_text as smart_u
# noinspection PyUnresolvedReferences
from urllib.parse import quote
ifilter = filter
b_str = bytes
u_str = str
iteritems = lambda dic: dic.items()
else:
from django.utils.encoding import smart_str as smart_b, force_unicode as force_u
try:
from django.utils.encoding import smart_text as smart_u
except:
# Django 1.3
from django.utils.encoding import smart_unicode as smart_u
# noinspection PyUnresolvedReferences
from urllib2 import quote
# noinspection PyUnresolvedReferences
from itertools import ifilter
b_str = str
# noinspection PyUnresolvedReferences
u_str = unicode
iteritems = lambda dic: dic.iteritems()
try:
from django.conf.urls import patterns, url, include
except ImportError:
# noinspection PyUnresolvedReferences
from django.conf.urls.defaults import patterns, url, include
# Calm down unused import warnings:
assert [smart_b, smart_u, force_u, quote, ifilter]
|
# coding=utf-8
# flake8: noqa
import sys
if sys.version_info[0] == 3:
from django.utils.encoding import smart_bytes as smart_b, force_str as force_u, smart_text as smart_u
# noinspection PyUnresolvedReferences
from urllib.parse import quote
ifilter = filter
b_str = bytes
u_str = str
iteritems = lambda dic: dic.items()
else:
from django.utils.encoding import smart_str as smart_b, force_unicode as force_u, smart_text as smart_u
# noinspection PyUnresolvedReferences
from urllib2 import quote
# noinspection PyUnresolvedReferences
from itertools import ifilter
b_str = str
# noinspection PyUnresolvedReferences
u_str = unicode
iteritems = lambda dic: dic.iteritems()
try:
from django.conf.urls import patterns, url, include
except ImportError:
# noinspection PyUnresolvedReferences
from django.conf.urls.defaults import patterns, url, include
# Calm down unused import warnings:
assert [smart_b, smart_u, force_u, quote, ifilter]
Fix smart_u for Django 1.3# coding=utf-8
# flake8: noqa
import sys
if sys.version_info[0] == 3:
from django.utils.encoding import smart_bytes as smart_b, force_str as force_u, smart_text as smart_u
# noinspection PyUnresolvedReferences
from urllib.parse import quote
ifilter = filter
b_str = bytes
u_str = str
iteritems = lambda dic: dic.items()
else:
from django.utils.encoding import smart_str as smart_b, force_unicode as force_u
try:
from django.utils.encoding import smart_text as smart_u
except:
# Django 1.3
from django.utils.encoding import smart_unicode as smart_u
# noinspection PyUnresolvedReferences
from urllib2 import quote
# noinspection PyUnresolvedReferences
from itertools import ifilter
b_str = str
# noinspection PyUnresolvedReferences
u_str = unicode
iteritems = lambda dic: dic.iteritems()
try:
from django.conf.urls import patterns, url, include
except ImportError:
# noinspection PyUnresolvedReferences
from django.conf.urls.defaults import patterns, url, include
# Calm down unused import warnings:
assert [smart_b, smart_u, force_u, quote, ifilter]
|
<commit_before># coding=utf-8
# flake8: noqa
import sys
if sys.version_info[0] == 3:
from django.utils.encoding import smart_bytes as smart_b, force_str as force_u, smart_text as smart_u
# noinspection PyUnresolvedReferences
from urllib.parse import quote
ifilter = filter
b_str = bytes
u_str = str
iteritems = lambda dic: dic.items()
else:
from django.utils.encoding import smart_str as smart_b, force_unicode as force_u, smart_text as smart_u
# noinspection PyUnresolvedReferences
from urllib2 import quote
# noinspection PyUnresolvedReferences
from itertools import ifilter
b_str = str
# noinspection PyUnresolvedReferences
u_str = unicode
iteritems = lambda dic: dic.iteritems()
try:
from django.conf.urls import patterns, url, include
except ImportError:
# noinspection PyUnresolvedReferences
from django.conf.urls.defaults import patterns, url, include
# Calm down unused import warnings:
assert [smart_b, smart_u, force_u, quote, ifilter]
<commit_msg>Fix smart_u for Django 1.3<commit_after># coding=utf-8
# flake8: noqa
import sys
if sys.version_info[0] == 3:
from django.utils.encoding import smart_bytes as smart_b, force_str as force_u, smart_text as smart_u
# noinspection PyUnresolvedReferences
from urllib.parse import quote
ifilter = filter
b_str = bytes
u_str = str
iteritems = lambda dic: dic.items()
else:
from django.utils.encoding import smart_str as smart_b, force_unicode as force_u
try:
from django.utils.encoding import smart_text as smart_u
except:
# Django 1.3
from django.utils.encoding import smart_unicode as smart_u
# noinspection PyUnresolvedReferences
from urllib2 import quote
# noinspection PyUnresolvedReferences
from itertools import ifilter
b_str = str
# noinspection PyUnresolvedReferences
u_str = unicode
iteritems = lambda dic: dic.iteritems()
try:
from django.conf.urls import patterns, url, include
except ImportError:
# noinspection PyUnresolvedReferences
from django.conf.urls.defaults import patterns, url, include
# Calm down unused import warnings:
assert [smart_b, smart_u, force_u, quote, ifilter]
|
d3e13351c514581a5460097624f82aa696398f78
|
iamhhb/views.py
|
iamhhb/views.py
|
from django.shortcuts import render
from django.contrib import admin, auth
# We don't need a user management
admin.site.unregister(auth.models.User)
admin.site.unregister(auth.models.Group)
def index(request):
return render(request, 'index.html')
def about_me(request):
return render(request, 'about-me.html')
def a_lot_tech(request):
techs = {
'Language': ['Python', 'HTML', 'JavaScript', 'Sass'],
'Framework': ['Django', 'Semantic UI'],
'Package Manager': ['PyPI', 'NPM', 'Bower'],
'Platform': ['GitHub', 'Heroku'],
'Database': ['PostgreSQL', 'SQLite']
}
return render(request, 'a-lot.html', locals())
|
from django.shortcuts import render
from django.contrib import admin, auth
# We don't need a user management
admin.site.unregister(auth.models.User)
admin.site.unregister(auth.models.Group)
def index(request):
return render(request, 'index.html')
def about_me(request):
return render(request, 'about-me.html')
def a_lot_tech(request):
techs = {
'Language': ['Python', 'HTML', 'JavaScript', 'Sass'],
'Framework': ['Django', 'Semantic UI'],
'Package Manager': ['PyPI', 'NPM', 'Bower'],
'Platform': ['GitHub', 'Heroku', 'Travis-CI'],
'Database': ['PostgreSQL', 'SQLite']
}
return render(request, 'a-lot.html', locals())
|
Add travis-ci to `powerd by` page.
|
Add travis-ci to `powerd by` page.
|
Python
|
mit
|
graycarl/iamhhb,graycarl/iamhhb,graycarl/iamhhb,graycarl/iamhhb
|
from django.shortcuts import render
from django.contrib import admin, auth
# We don't need a user management
admin.site.unregister(auth.models.User)
admin.site.unregister(auth.models.Group)
def index(request):
return render(request, 'index.html')
def about_me(request):
return render(request, 'about-me.html')
def a_lot_tech(request):
techs = {
'Language': ['Python', 'HTML', 'JavaScript', 'Sass'],
'Framework': ['Django', 'Semantic UI'],
'Package Manager': ['PyPI', 'NPM', 'Bower'],
'Platform': ['GitHub', 'Heroku'],
'Database': ['PostgreSQL', 'SQLite']
}
return render(request, 'a-lot.html', locals())
Add travis-ci to `powerd by` page.
|
from django.shortcuts import render
from django.contrib import admin, auth
# We don't need a user management
admin.site.unregister(auth.models.User)
admin.site.unregister(auth.models.Group)
def index(request):
return render(request, 'index.html')
def about_me(request):
return render(request, 'about-me.html')
def a_lot_tech(request):
techs = {
'Language': ['Python', 'HTML', 'JavaScript', 'Sass'],
'Framework': ['Django', 'Semantic UI'],
'Package Manager': ['PyPI', 'NPM', 'Bower'],
'Platform': ['GitHub', 'Heroku', 'Travis-CI'],
'Database': ['PostgreSQL', 'SQLite']
}
return render(request, 'a-lot.html', locals())
|
<commit_before>from django.shortcuts import render
from django.contrib import admin, auth
# We don't need a user management
admin.site.unregister(auth.models.User)
admin.site.unregister(auth.models.Group)
def index(request):
return render(request, 'index.html')
def about_me(request):
return render(request, 'about-me.html')
def a_lot_tech(request):
techs = {
'Language': ['Python', 'HTML', 'JavaScript', 'Sass'],
'Framework': ['Django', 'Semantic UI'],
'Package Manager': ['PyPI', 'NPM', 'Bower'],
'Platform': ['GitHub', 'Heroku'],
'Database': ['PostgreSQL', 'SQLite']
}
return render(request, 'a-lot.html', locals())
<commit_msg>Add travis-ci to `powerd by` page.<commit_after>
|
from django.shortcuts import render
from django.contrib import admin, auth
# We don't need a user management
admin.site.unregister(auth.models.User)
admin.site.unregister(auth.models.Group)
def index(request):
return render(request, 'index.html')
def about_me(request):
return render(request, 'about-me.html')
def a_lot_tech(request):
techs = {
'Language': ['Python', 'HTML', 'JavaScript', 'Sass'],
'Framework': ['Django', 'Semantic UI'],
'Package Manager': ['PyPI', 'NPM', 'Bower'],
'Platform': ['GitHub', 'Heroku', 'Travis-CI'],
'Database': ['PostgreSQL', 'SQLite']
}
return render(request, 'a-lot.html', locals())
|
from django.shortcuts import render
from django.contrib import admin, auth
# We don't need a user management
admin.site.unregister(auth.models.User)
admin.site.unregister(auth.models.Group)
def index(request):
return render(request, 'index.html')
def about_me(request):
return render(request, 'about-me.html')
def a_lot_tech(request):
techs = {
'Language': ['Python', 'HTML', 'JavaScript', 'Sass'],
'Framework': ['Django', 'Semantic UI'],
'Package Manager': ['PyPI', 'NPM', 'Bower'],
'Platform': ['GitHub', 'Heroku'],
'Database': ['PostgreSQL', 'SQLite']
}
return render(request, 'a-lot.html', locals())
Add travis-ci to `powerd by` page.from django.shortcuts import render
from django.contrib import admin, auth
# We don't need a user management
admin.site.unregister(auth.models.User)
admin.site.unregister(auth.models.Group)
def index(request):
return render(request, 'index.html')
def about_me(request):
return render(request, 'about-me.html')
def a_lot_tech(request):
techs = {
'Language': ['Python', 'HTML', 'JavaScript', 'Sass'],
'Framework': ['Django', 'Semantic UI'],
'Package Manager': ['PyPI', 'NPM', 'Bower'],
'Platform': ['GitHub', 'Heroku', 'Travis-CI'],
'Database': ['PostgreSQL', 'SQLite']
}
return render(request, 'a-lot.html', locals())
|
<commit_before>from django.shortcuts import render
from django.contrib import admin, auth
# We don't need a user management
admin.site.unregister(auth.models.User)
admin.site.unregister(auth.models.Group)
def index(request):
return render(request, 'index.html')
def about_me(request):
return render(request, 'about-me.html')
def a_lot_tech(request):
techs = {
'Language': ['Python', 'HTML', 'JavaScript', 'Sass'],
'Framework': ['Django', 'Semantic UI'],
'Package Manager': ['PyPI', 'NPM', 'Bower'],
'Platform': ['GitHub', 'Heroku'],
'Database': ['PostgreSQL', 'SQLite']
}
return render(request, 'a-lot.html', locals())
<commit_msg>Add travis-ci to `powerd by` page.<commit_after>from django.shortcuts import render
from django.contrib import admin, auth
# We don't need a user management
admin.site.unregister(auth.models.User)
admin.site.unregister(auth.models.Group)
def index(request):
return render(request, 'index.html')
def about_me(request):
return render(request, 'about-me.html')
def a_lot_tech(request):
techs = {
'Language': ['Python', 'HTML', 'JavaScript', 'Sass'],
'Framework': ['Django', 'Semantic UI'],
'Package Manager': ['PyPI', 'NPM', 'Bower'],
'Platform': ['GitHub', 'Heroku', 'Travis-CI'],
'Database': ['PostgreSQL', 'SQLite']
}
return render(request, 'a-lot.html', locals())
|
b011ccf5c4ce5f93c7b02f938385432325012569
|
tt/core/tt.py
|
tt/core/tt.py
|
# Here we import all necessary staff from external files
# tools
from .tools import matvec, col, kron, dot, mkron, concatenate, sum, reshape
from .tools import eye, diag, Toeplitz, qshift, qlaplace_dd, IpaS
from .tools import ones, rand, linspace, sin, cos, delta, stepfun, unit, xfun
# main classes
from .matrix import matrix
from .vector import vector, tensor
# utility
from . import utils
|
# Here we import all necessary staff from external files
# main classes
from .matrix import matrix
from .vector import vector, tensor
# tools
from .tools import matvec, col, kron, dot, mkron, concatenate, sum, reshape
from .tools import eye, diag, Toeplitz, qshift, qlaplace_dd, IpaS
from .tools import ones, rand, linspace, sin, cos, delta, stepfun, unit, xfun
# utility
from . import utils
|
Revert "Import order changed to break tools dependency"
|
Revert "Import order changed to break tools dependency"
This reverts commit 3a75fd530b1ecb9e6466ac99532d06032ae3a049.
|
Python
|
mit
|
uranix/ttpy,uranix/ttpy
|
# Here we import all necessary staff from external files
# tools
from .tools import matvec, col, kron, dot, mkron, concatenate, sum, reshape
from .tools import eye, diag, Toeplitz, qshift, qlaplace_dd, IpaS
from .tools import ones, rand, linspace, sin, cos, delta, stepfun, unit, xfun
# main classes
from .matrix import matrix
from .vector import vector, tensor
# utility
from . import utils
Revert "Import order changed to break tools dependency"
This reverts commit 3a75fd530b1ecb9e6466ac99532d06032ae3a049.
|
# Here we import all necessary staff from external files
# main classes
from .matrix import matrix
from .vector import vector, tensor
# tools
from .tools import matvec, col, kron, dot, mkron, concatenate, sum, reshape
from .tools import eye, diag, Toeplitz, qshift, qlaplace_dd, IpaS
from .tools import ones, rand, linspace, sin, cos, delta, stepfun, unit, xfun
# utility
from . import utils
|
<commit_before># Here we import all necessary staff from external files
# tools
from .tools import matvec, col, kron, dot, mkron, concatenate, sum, reshape
from .tools import eye, diag, Toeplitz, qshift, qlaplace_dd, IpaS
from .tools import ones, rand, linspace, sin, cos, delta, stepfun, unit, xfun
# main classes
from .matrix import matrix
from .vector import vector, tensor
# utility
from . import utils
<commit_msg>Revert "Import order changed to break tools dependency"
This reverts commit 3a75fd530b1ecb9e6466ac99532d06032ae3a049.<commit_after>
|
# Here we import all necessary staff from external files
# main classes
from .matrix import matrix
from .vector import vector, tensor
# tools
from .tools import matvec, col, kron, dot, mkron, concatenate, sum, reshape
from .tools import eye, diag, Toeplitz, qshift, qlaplace_dd, IpaS
from .tools import ones, rand, linspace, sin, cos, delta, stepfun, unit, xfun
# utility
from . import utils
|
# Here we import all necessary staff from external files
# tools
from .tools import matvec, col, kron, dot, mkron, concatenate, sum, reshape
from .tools import eye, diag, Toeplitz, qshift, qlaplace_dd, IpaS
from .tools import ones, rand, linspace, sin, cos, delta, stepfun, unit, xfun
# main classes
from .matrix import matrix
from .vector import vector, tensor
# utility
from . import utils
Revert "Import order changed to break tools dependency"
This reverts commit 3a75fd530b1ecb9e6466ac99532d06032ae3a049.# Here we import all necessary staff from external files
# main classes
from .matrix import matrix
from .vector import vector, tensor
# tools
from .tools import matvec, col, kron, dot, mkron, concatenate, sum, reshape
from .tools import eye, diag, Toeplitz, qshift, qlaplace_dd, IpaS
from .tools import ones, rand, linspace, sin, cos, delta, stepfun, unit, xfun
# utility
from . import utils
|
<commit_before># Here we import all necessary staff from external files
# tools
from .tools import matvec, col, kron, dot, mkron, concatenate, sum, reshape
from .tools import eye, diag, Toeplitz, qshift, qlaplace_dd, IpaS
from .tools import ones, rand, linspace, sin, cos, delta, stepfun, unit, xfun
# main classes
from .matrix import matrix
from .vector import vector, tensor
# utility
from . import utils
<commit_msg>Revert "Import order changed to break tools dependency"
This reverts commit 3a75fd530b1ecb9e6466ac99532d06032ae3a049.<commit_after># Here we import all necessary staff from external files
# main classes
from .matrix import matrix
from .vector import vector, tensor
# tools
from .tools import matvec, col, kron, dot, mkron, concatenate, sum, reshape
from .tools import eye, diag, Toeplitz, qshift, qlaplace_dd, IpaS
from .tools import ones, rand, linspace, sin, cos, delta, stepfun, unit, xfun
# utility
from . import utils
|
59fb7b9d1078a7b0199b9613a523f3d2fce80c13
|
pombola_sayit/models.py
|
pombola_sayit/models.py
|
from django.db import models
from pombola.core.models import Person
from speeches.models import Speaker
from south.modelsinspector import add_introspection_rules
add_introspection_rules([], ["^instances\.fields\.DNSLabelField"])
class PombolaSayItJoin(models.Model):
"""This model provides a join table between Pombola and SsayIt people"""
pombola_person = models.OneToOneField(Person, related_name='sayit_link')
sayit_speaker = models.OneToOneField(Speaker, related_name='pombola_link')
|
from django.db import models
from pombola.core.models import Person
from speeches.models import Speaker
class PombolaSayItJoin(models.Model):
"""This model provides a join table between Pombola and SsayIt people"""
pombola_person = models.OneToOneField(Person, related_name='sayit_link')
sayit_speaker = models.OneToOneField(Speaker, related_name='pombola_link')
|
Remove the last reference to South - it can be pip uninstalled now
|
Remove the last reference to South - it can be pip uninstalled now
|
Python
|
agpl-3.0
|
mysociety/pombola,mysociety/pombola,mysociety/pombola,mysociety/pombola,mysociety/pombola,mysociety/pombola
|
from django.db import models
from pombola.core.models import Person
from speeches.models import Speaker
from south.modelsinspector import add_introspection_rules
add_introspection_rules([], ["^instances\.fields\.DNSLabelField"])
class PombolaSayItJoin(models.Model):
"""This model provides a join table between Pombola and SsayIt people"""
pombola_person = models.OneToOneField(Person, related_name='sayit_link')
sayit_speaker = models.OneToOneField(Speaker, related_name='pombola_link')
Remove the last reference to South - it can be pip uninstalled now
|
from django.db import models
from pombola.core.models import Person
from speeches.models import Speaker
class PombolaSayItJoin(models.Model):
"""This model provides a join table between Pombola and SsayIt people"""
pombola_person = models.OneToOneField(Person, related_name='sayit_link')
sayit_speaker = models.OneToOneField(Speaker, related_name='pombola_link')
|
<commit_before>from django.db import models
from pombola.core.models import Person
from speeches.models import Speaker
from south.modelsinspector import add_introspection_rules
add_introspection_rules([], ["^instances\.fields\.DNSLabelField"])
class PombolaSayItJoin(models.Model):
"""This model provides a join table between Pombola and SsayIt people"""
pombola_person = models.OneToOneField(Person, related_name='sayit_link')
sayit_speaker = models.OneToOneField(Speaker, related_name='pombola_link')
<commit_msg>Remove the last reference to South - it can be pip uninstalled now<commit_after>
|
from django.db import models
from pombola.core.models import Person
from speeches.models import Speaker
class PombolaSayItJoin(models.Model):
"""This model provides a join table between Pombola and SsayIt people"""
pombola_person = models.OneToOneField(Person, related_name='sayit_link')
sayit_speaker = models.OneToOneField(Speaker, related_name='pombola_link')
|
from django.db import models
from pombola.core.models import Person
from speeches.models import Speaker
from south.modelsinspector import add_introspection_rules
add_introspection_rules([], ["^instances\.fields\.DNSLabelField"])
class PombolaSayItJoin(models.Model):
"""This model provides a join table between Pombola and SsayIt people"""
pombola_person = models.OneToOneField(Person, related_name='sayit_link')
sayit_speaker = models.OneToOneField(Speaker, related_name='pombola_link')
Remove the last reference to South - it can be pip uninstalled nowfrom django.db import models
from pombola.core.models import Person
from speeches.models import Speaker
class PombolaSayItJoin(models.Model):
"""This model provides a join table between Pombola and SsayIt people"""
pombola_person = models.OneToOneField(Person, related_name='sayit_link')
sayit_speaker = models.OneToOneField(Speaker, related_name='pombola_link')
|
<commit_before>from django.db import models
from pombola.core.models import Person
from speeches.models import Speaker
from south.modelsinspector import add_introspection_rules
add_introspection_rules([], ["^instances\.fields\.DNSLabelField"])
class PombolaSayItJoin(models.Model):
"""This model provides a join table between Pombola and SsayIt people"""
pombola_person = models.OneToOneField(Person, related_name='sayit_link')
sayit_speaker = models.OneToOneField(Speaker, related_name='pombola_link')
<commit_msg>Remove the last reference to South - it can be pip uninstalled now<commit_after>from django.db import models
from pombola.core.models import Person
from speeches.models import Speaker
class PombolaSayItJoin(models.Model):
"""This model provides a join table between Pombola and SsayIt people"""
pombola_person = models.OneToOneField(Person, related_name='sayit_link')
sayit_speaker = models.OneToOneField(Speaker, related_name='pombola_link')
|
e73e237a5c712aea9866168c8bb0fb7c56c21d90
|
gpytorch/kernels/white_noise_kernel.py
|
gpytorch/kernels/white_noise_kernel.py
|
import torch
from . import Kernel
from gpytorch.lazy import DiagLazyVariable, ZeroLazyVariable
class WhiteNoiseKernel(Kernel):
def __init__(self, variances):
super(WhiteNoiseKernel, self).__init__()
self.register_buffer("variances", variances)
def forward(self, x1, x2):
if self.training:
return DiagLazyVariable(self.variances.unsqueeze(0))
elif x1.size(-2) == x2.size(-2) and x1.size(-2) == self.variances.size(-1) and torch.equal(x1, x2):
return DiagLazyVariable(self.variances.unsqueeze(0))
else:
return ZeroLazyVariable(x1.size(-3), x1.size(-2), x2.size(-2))
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import torch
from . import Kernel
from gpytorch.lazy import DiagLazyVariable, ZeroLazyVariable
class WhiteNoiseKernel(Kernel):
def __init__(self, variances):
super(WhiteNoiseKernel, self).__init__()
self.register_buffer("variances", variances)
def forward(self, x1, x2):
if self.training:
return DiagLazyVariable(self.variances.unsqueeze(0))
elif x1.size(-2) == x2.size(-2) and x1.size(-2) == self.variances.size(-1) and torch.equal(x1, x2):
return DiagLazyVariable(self.variances.unsqueeze(0))
else:
return ZeroLazyVariable(x1.size(-3), x1.size(-2), x2.size(-2))
|
Add missing py2py3 compatibility imports
|
Add missing py2py3 compatibility imports
|
Python
|
mit
|
jrg365/gpytorch,jrg365/gpytorch,jrg365/gpytorch
|
import torch
from . import Kernel
from gpytorch.lazy import DiagLazyVariable, ZeroLazyVariable
class WhiteNoiseKernel(Kernel):
def __init__(self, variances):
super(WhiteNoiseKernel, self).__init__()
self.register_buffer("variances", variances)
def forward(self, x1, x2):
if self.training:
return DiagLazyVariable(self.variances.unsqueeze(0))
elif x1.size(-2) == x2.size(-2) and x1.size(-2) == self.variances.size(-1) and torch.equal(x1, x2):
return DiagLazyVariable(self.variances.unsqueeze(0))
else:
return ZeroLazyVariable(x1.size(-3), x1.size(-2), x2.size(-2))
Add missing py2py3 compatibility imports
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import torch
from . import Kernel
from gpytorch.lazy import DiagLazyVariable, ZeroLazyVariable
class WhiteNoiseKernel(Kernel):
def __init__(self, variances):
super(WhiteNoiseKernel, self).__init__()
self.register_buffer("variances", variances)
def forward(self, x1, x2):
if self.training:
return DiagLazyVariable(self.variances.unsqueeze(0))
elif x1.size(-2) == x2.size(-2) and x1.size(-2) == self.variances.size(-1) and torch.equal(x1, x2):
return DiagLazyVariable(self.variances.unsqueeze(0))
else:
return ZeroLazyVariable(x1.size(-3), x1.size(-2), x2.size(-2))
|
<commit_before>import torch
from . import Kernel
from gpytorch.lazy import DiagLazyVariable, ZeroLazyVariable
class WhiteNoiseKernel(Kernel):
def __init__(self, variances):
super(WhiteNoiseKernel, self).__init__()
self.register_buffer("variances", variances)
def forward(self, x1, x2):
if self.training:
return DiagLazyVariable(self.variances.unsqueeze(0))
elif x1.size(-2) == x2.size(-2) and x1.size(-2) == self.variances.size(-1) and torch.equal(x1, x2):
return DiagLazyVariable(self.variances.unsqueeze(0))
else:
return ZeroLazyVariable(x1.size(-3), x1.size(-2), x2.size(-2))
<commit_msg>Add missing py2py3 compatibility imports<commit_after>
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import torch
from . import Kernel
from gpytorch.lazy import DiagLazyVariable, ZeroLazyVariable
class WhiteNoiseKernel(Kernel):
def __init__(self, variances):
super(WhiteNoiseKernel, self).__init__()
self.register_buffer("variances", variances)
def forward(self, x1, x2):
if self.training:
return DiagLazyVariable(self.variances.unsqueeze(0))
elif x1.size(-2) == x2.size(-2) and x1.size(-2) == self.variances.size(-1) and torch.equal(x1, x2):
return DiagLazyVariable(self.variances.unsqueeze(0))
else:
return ZeroLazyVariable(x1.size(-3), x1.size(-2), x2.size(-2))
|
import torch
from . import Kernel
from gpytorch.lazy import DiagLazyVariable, ZeroLazyVariable
class WhiteNoiseKernel(Kernel):
def __init__(self, variances):
super(WhiteNoiseKernel, self).__init__()
self.register_buffer("variances", variances)
def forward(self, x1, x2):
if self.training:
return DiagLazyVariable(self.variances.unsqueeze(0))
elif x1.size(-2) == x2.size(-2) and x1.size(-2) == self.variances.size(-1) and torch.equal(x1, x2):
return DiagLazyVariable(self.variances.unsqueeze(0))
else:
return ZeroLazyVariable(x1.size(-3), x1.size(-2), x2.size(-2))
Add missing py2py3 compatibility importsfrom __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import torch
from . import Kernel
from gpytorch.lazy import DiagLazyVariable, ZeroLazyVariable
class WhiteNoiseKernel(Kernel):
def __init__(self, variances):
super(WhiteNoiseKernel, self).__init__()
self.register_buffer("variances", variances)
def forward(self, x1, x2):
if self.training:
return DiagLazyVariable(self.variances.unsqueeze(0))
elif x1.size(-2) == x2.size(-2) and x1.size(-2) == self.variances.size(-1) and torch.equal(x1, x2):
return DiagLazyVariable(self.variances.unsqueeze(0))
else:
return ZeroLazyVariable(x1.size(-3), x1.size(-2), x2.size(-2))
|
<commit_before>import torch
from . import Kernel
from gpytorch.lazy import DiagLazyVariable, ZeroLazyVariable
class WhiteNoiseKernel(Kernel):
def __init__(self, variances):
super(WhiteNoiseKernel, self).__init__()
self.register_buffer("variances", variances)
def forward(self, x1, x2):
if self.training:
return DiagLazyVariable(self.variances.unsqueeze(0))
elif x1.size(-2) == x2.size(-2) and x1.size(-2) == self.variances.size(-1) and torch.equal(x1, x2):
return DiagLazyVariable(self.variances.unsqueeze(0))
else:
return ZeroLazyVariable(x1.size(-3), x1.size(-2), x2.size(-2))
<commit_msg>Add missing py2py3 compatibility imports<commit_after>from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import torch
from . import Kernel
from gpytorch.lazy import DiagLazyVariable, ZeroLazyVariable
class WhiteNoiseKernel(Kernel):
def __init__(self, variances):
super(WhiteNoiseKernel, self).__init__()
self.register_buffer("variances", variances)
def forward(self, x1, x2):
if self.training:
return DiagLazyVariable(self.variances.unsqueeze(0))
elif x1.size(-2) == x2.size(-2) and x1.size(-2) == self.variances.size(-1) and torch.equal(x1, x2):
return DiagLazyVariable(self.variances.unsqueeze(0))
else:
return ZeroLazyVariable(x1.size(-3), x1.size(-2), x2.size(-2))
|
20d7c4113a96c92f8353761da2c2a00ed7a35e0e
|
gym_ple/__init__.py
|
gym_ple/__init__.py
|
from gym.envs.registration import registry, register, make, spec
from gym_ple.ple_env import PLEEnv
# Pygame
# ----------------------------------------
for game in ['Catcher', 'MonsterKong', 'FlappyBird', 'PixelCopter', 'PuckWorld', 'RaycastMaze', 'Snake', 'WaterWorld']:
nondeterministic = False
register(
id='{}-v0'.format(game),
entry_point='gym_ple:PLEEnv',
kwargs={'game_name': game, 'display_screen':False},
timestep_limit=10000,
nondeterministic=nondeterministic,
)
|
from gym.envs.registration import registry, register, make, spec
from gym_ple.ple_env import PLEEnv
# Pygame
# ----------------------------------------
for game in ['Catcher', 'MonsterKong', 'FlappyBird', 'PixelCopter', 'PuckWorld', 'RaycastMaze', 'Snake', 'WaterWorld']:
nondeterministic = False
register(
id='{}-v0'.format(game),
entry_point='gym_ple:PLEEnv',
kwargs={'game_name': game, 'display_screen':False},
tags={'wrapper_config.TimeLimit.max_episode_steps': 10000},
nondeterministic=nondeterministic,
)
|
Replace the timestep_limit call with the new tags api.
|
Replace the timestep_limit call with the new tags api.
|
Python
|
mit
|
lusob/gym-ple
|
from gym.envs.registration import registry, register, make, spec
from gym_ple.ple_env import PLEEnv
# Pygame
# ----------------------------------------
for game in ['Catcher', 'MonsterKong', 'FlappyBird', 'PixelCopter', 'PuckWorld', 'RaycastMaze', 'Snake', 'WaterWorld']:
nondeterministic = False
register(
id='{}-v0'.format(game),
entry_point='gym_ple:PLEEnv',
kwargs={'game_name': game, 'display_screen':False},
timestep_limit=10000,
nondeterministic=nondeterministic,
)
Replace the timestep_limit call with the new tags api.
|
from gym.envs.registration import registry, register, make, spec
from gym_ple.ple_env import PLEEnv
# Pygame
# ----------------------------------------
for game in ['Catcher', 'MonsterKong', 'FlappyBird', 'PixelCopter', 'PuckWorld', 'RaycastMaze', 'Snake', 'WaterWorld']:
nondeterministic = False
register(
id='{}-v0'.format(game),
entry_point='gym_ple:PLEEnv',
kwargs={'game_name': game, 'display_screen':False},
tags={'wrapper_config.TimeLimit.max_episode_steps': 10000},
nondeterministic=nondeterministic,
)
|
<commit_before>from gym.envs.registration import registry, register, make, spec
from gym_ple.ple_env import PLEEnv
# Pygame
# ----------------------------------------
for game in ['Catcher', 'MonsterKong', 'FlappyBird', 'PixelCopter', 'PuckWorld', 'RaycastMaze', 'Snake', 'WaterWorld']:
nondeterministic = False
register(
id='{}-v0'.format(game),
entry_point='gym_ple:PLEEnv',
kwargs={'game_name': game, 'display_screen':False},
timestep_limit=10000,
nondeterministic=nondeterministic,
)
<commit_msg>Replace the timestep_limit call with the new tags api.<commit_after>
|
from gym.envs.registration import registry, register, make, spec
from gym_ple.ple_env import PLEEnv
# Pygame
# ----------------------------------------
for game in ['Catcher', 'MonsterKong', 'FlappyBird', 'PixelCopter', 'PuckWorld', 'RaycastMaze', 'Snake', 'WaterWorld']:
nondeterministic = False
register(
id='{}-v0'.format(game),
entry_point='gym_ple:PLEEnv',
kwargs={'game_name': game, 'display_screen':False},
tags={'wrapper_config.TimeLimit.max_episode_steps': 10000},
nondeterministic=nondeterministic,
)
|
from gym.envs.registration import registry, register, make, spec
from gym_ple.ple_env import PLEEnv
# Pygame
# ----------------------------------------
for game in ['Catcher', 'MonsterKong', 'FlappyBird', 'PixelCopter', 'PuckWorld', 'RaycastMaze', 'Snake', 'WaterWorld']:
nondeterministic = False
register(
id='{}-v0'.format(game),
entry_point='gym_ple:PLEEnv',
kwargs={'game_name': game, 'display_screen':False},
timestep_limit=10000,
nondeterministic=nondeterministic,
)
Replace the timestep_limit call with the new tags api.from gym.envs.registration import registry, register, make, spec
from gym_ple.ple_env import PLEEnv
# Pygame
# ----------------------------------------
for game in ['Catcher', 'MonsterKong', 'FlappyBird', 'PixelCopter', 'PuckWorld', 'RaycastMaze', 'Snake', 'WaterWorld']:
nondeterministic = False
register(
id='{}-v0'.format(game),
entry_point='gym_ple:PLEEnv',
kwargs={'game_name': game, 'display_screen':False},
tags={'wrapper_config.TimeLimit.max_episode_steps': 10000},
nondeterministic=nondeterministic,
)
|
<commit_before>from gym.envs.registration import registry, register, make, spec
from gym_ple.ple_env import PLEEnv
# Pygame
# ----------------------------------------
for game in ['Catcher', 'MonsterKong', 'FlappyBird', 'PixelCopter', 'PuckWorld', 'RaycastMaze', 'Snake', 'WaterWorld']:
nondeterministic = False
register(
id='{}-v0'.format(game),
entry_point='gym_ple:PLEEnv',
kwargs={'game_name': game, 'display_screen':False},
timestep_limit=10000,
nondeterministic=nondeterministic,
)
<commit_msg>Replace the timestep_limit call with the new tags api.<commit_after>from gym.envs.registration import registry, register, make, spec
from gym_ple.ple_env import PLEEnv
# Pygame
# ----------------------------------------
for game in ['Catcher', 'MonsterKong', 'FlappyBird', 'PixelCopter', 'PuckWorld', 'RaycastMaze', 'Snake', 'WaterWorld']:
nondeterministic = False
register(
id='{}-v0'.format(game),
entry_point='gym_ple:PLEEnv',
kwargs={'game_name': game, 'display_screen':False},
tags={'wrapper_config.TimeLimit.max_episode_steps': 10000},
nondeterministic=nondeterministic,
)
|
8aceb4bcfeef05874bbd6eec66eeb7b69f20f02e
|
pinax/blog/templatetags/pinax_blog_tags.py
|
pinax/blog/templatetags/pinax_blog_tags.py
|
from django import template
from ..models import Post, Section
register = template.Library()
@register.assignment_tag
def latest_blog_posts(scoper=None):
qs = Post.objects.current()
if scoper:
qs = qs.filter(scoper=scoper)
return qs[:5]
@register.assignment_tag
def latest_blog_post(scoper=None):
qs = Post.objects.current()
if scoper:
qs = qs.filter(scoper=scoper)
return qs[0]
@register.assignment_tag
def latest_section_post(section, scoper=None):
qs = Post.objects.published().filter(section__name=section).order_by("-published")
if scoper:
qs = qs.filter(scoper=scoper)
return qs[0] if qs.count() > 0 else None
@register.assignment_tag
def blog_sections():
return Section.objects.filter(enabled=True)
|
from django import template
from ..models import Post, Section
register = template.Library()
@register.assignment_tag
def latest_blog_posts(scoper=None):
qs = Post.objects.current()
if scoper:
qs = qs.filter(blog__scoper=scoper)
return qs[:5]
@register.assignment_tag
def latest_blog_post(scoper=None):
qs = Post.objects.current()
if scoper:
qs = qs.filter(blog__scoper=scoper)
return qs[0]
@register.assignment_tag
def latest_section_post(section, scoper=None):
qs = Post.objects.published().filter(section__name=section).order_by("-published")
if scoper:
qs = qs.filter(blog__scoper=scoper)
return qs[0] if qs.count() > 0 else None
@register.assignment_tag
def blog_sections():
return Section.objects.filter(enabled=True)
|
Fix small bug in templatetags
|
Fix small bug in templatetags
|
Python
|
mit
|
pinax/pinax-blog,pinax/pinax-blog,pinax/pinax-blog
|
from django import template
from ..models import Post, Section
register = template.Library()
@register.assignment_tag
def latest_blog_posts(scoper=None):
qs = Post.objects.current()
if scoper:
qs = qs.filter(scoper=scoper)
return qs[:5]
@register.assignment_tag
def latest_blog_post(scoper=None):
qs = Post.objects.current()
if scoper:
qs = qs.filter(scoper=scoper)
return qs[0]
@register.assignment_tag
def latest_section_post(section, scoper=None):
qs = Post.objects.published().filter(section__name=section).order_by("-published")
if scoper:
qs = qs.filter(scoper=scoper)
return qs[0] if qs.count() > 0 else None
@register.assignment_tag
def blog_sections():
return Section.objects.filter(enabled=True)
Fix small bug in templatetags
|
from django import template
from ..models import Post, Section
register = template.Library()
@register.assignment_tag
def latest_blog_posts(scoper=None):
qs = Post.objects.current()
if scoper:
qs = qs.filter(blog__scoper=scoper)
return qs[:5]
@register.assignment_tag
def latest_blog_post(scoper=None):
qs = Post.objects.current()
if scoper:
qs = qs.filter(blog__scoper=scoper)
return qs[0]
@register.assignment_tag
def latest_section_post(section, scoper=None):
qs = Post.objects.published().filter(section__name=section).order_by("-published")
if scoper:
qs = qs.filter(blog__scoper=scoper)
return qs[0] if qs.count() > 0 else None
@register.assignment_tag
def blog_sections():
return Section.objects.filter(enabled=True)
|
<commit_before>from django import template
from ..models import Post, Section
register = template.Library()
@register.assignment_tag
def latest_blog_posts(scoper=None):
qs = Post.objects.current()
if scoper:
qs = qs.filter(scoper=scoper)
return qs[:5]
@register.assignment_tag
def latest_blog_post(scoper=None):
qs = Post.objects.current()
if scoper:
qs = qs.filter(scoper=scoper)
return qs[0]
@register.assignment_tag
def latest_section_post(section, scoper=None):
qs = Post.objects.published().filter(section__name=section).order_by("-published")
if scoper:
qs = qs.filter(scoper=scoper)
return qs[0] if qs.count() > 0 else None
@register.assignment_tag
def blog_sections():
return Section.objects.filter(enabled=True)
<commit_msg>Fix small bug in templatetags<commit_after>
|
from django import template
from ..models import Post, Section
register = template.Library()
@register.assignment_tag
def latest_blog_posts(scoper=None):
qs = Post.objects.current()
if scoper:
qs = qs.filter(blog__scoper=scoper)
return qs[:5]
@register.assignment_tag
def latest_blog_post(scoper=None):
qs = Post.objects.current()
if scoper:
qs = qs.filter(blog__scoper=scoper)
return qs[0]
@register.assignment_tag
def latest_section_post(section, scoper=None):
qs = Post.objects.published().filter(section__name=section).order_by("-published")
if scoper:
qs = qs.filter(blog__scoper=scoper)
return qs[0] if qs.count() > 0 else None
@register.assignment_tag
def blog_sections():
return Section.objects.filter(enabled=True)
|
from django import template
from ..models import Post, Section
register = template.Library()
@register.assignment_tag
def latest_blog_posts(scoper=None):
qs = Post.objects.current()
if scoper:
qs = qs.filter(scoper=scoper)
return qs[:5]
@register.assignment_tag
def latest_blog_post(scoper=None):
qs = Post.objects.current()
if scoper:
qs = qs.filter(scoper=scoper)
return qs[0]
@register.assignment_tag
def latest_section_post(section, scoper=None):
qs = Post.objects.published().filter(section__name=section).order_by("-published")
if scoper:
qs = qs.filter(scoper=scoper)
return qs[0] if qs.count() > 0 else None
@register.assignment_tag
def blog_sections():
return Section.objects.filter(enabled=True)
Fix small bug in templatetagsfrom django import template
from ..models import Post, Section
register = template.Library()
@register.assignment_tag
def latest_blog_posts(scoper=None):
qs = Post.objects.current()
if scoper:
qs = qs.filter(blog__scoper=scoper)
return qs[:5]
@register.assignment_tag
def latest_blog_post(scoper=None):
qs = Post.objects.current()
if scoper:
qs = qs.filter(blog__scoper=scoper)
return qs[0]
@register.assignment_tag
def latest_section_post(section, scoper=None):
qs = Post.objects.published().filter(section__name=section).order_by("-published")
if scoper:
qs = qs.filter(blog__scoper=scoper)
return qs[0] if qs.count() > 0 else None
@register.assignment_tag
def blog_sections():
return Section.objects.filter(enabled=True)
|
<commit_before>from django import template
from ..models import Post, Section
register = template.Library()
@register.assignment_tag
def latest_blog_posts(scoper=None):
qs = Post.objects.current()
if scoper:
qs = qs.filter(scoper=scoper)
return qs[:5]
@register.assignment_tag
def latest_blog_post(scoper=None):
qs = Post.objects.current()
if scoper:
qs = qs.filter(scoper=scoper)
return qs[0]
@register.assignment_tag
def latest_section_post(section, scoper=None):
qs = Post.objects.published().filter(section__name=section).order_by("-published")
if scoper:
qs = qs.filter(scoper=scoper)
return qs[0] if qs.count() > 0 else None
@register.assignment_tag
def blog_sections():
return Section.objects.filter(enabled=True)
<commit_msg>Fix small bug in templatetags<commit_after>from django import template
from ..models import Post, Section
register = template.Library()
@register.assignment_tag
def latest_blog_posts(scoper=None):
qs = Post.objects.current()
if scoper:
qs = qs.filter(blog__scoper=scoper)
return qs[:5]
@register.assignment_tag
def latest_blog_post(scoper=None):
qs = Post.objects.current()
if scoper:
qs = qs.filter(blog__scoper=scoper)
return qs[0]
@register.assignment_tag
def latest_section_post(section, scoper=None):
qs = Post.objects.published().filter(section__name=section).order_by("-published")
if scoper:
qs = qs.filter(blog__scoper=scoper)
return qs[0] if qs.count() > 0 else None
@register.assignment_tag
def blog_sections():
return Section.objects.filter(enabled=True)
|
c2d44c5b25b5827b12f4e4cd653c4f80f7289894
|
hexfile/__init__.py
|
hexfile/__init__.py
|
__title__ = 'hexfile'
__version__ = '0.1.1'
__author__ = 'Ryan Sturmer'
__license__ = 'MIT'
__copyright__ = 'Copyright 2014 Ryan Sturmer'
__docformat__ = 'restructuredtext'
from core import load, HexFile, Segment
|
__title__ = 'hexfile'
__version__ = '0.1.1'
__author__ = 'Ryan Sturmer'
__license__ = 'MIT'
__copyright__ = 'Copyright 2014 Ryan Sturmer'
__docformat__ = 'restructuredtext'
from .core import load, HexFile, Segment
|
Use relative import to fix Python 3.
|
Use relative import to fix Python 3.
|
Python
|
mit
|
ryansturmer/hexfile
|
__title__ = 'hexfile'
__version__ = '0.1.1'
__author__ = 'Ryan Sturmer'
__license__ = 'MIT'
__copyright__ = 'Copyright 2014 Ryan Sturmer'
__docformat__ = 'restructuredtext'
from core import load, HexFile, Segment
Use relative import to fix Python 3.
|
__title__ = 'hexfile'
__version__ = '0.1.1'
__author__ = 'Ryan Sturmer'
__license__ = 'MIT'
__copyright__ = 'Copyright 2014 Ryan Sturmer'
__docformat__ = 'restructuredtext'
from .core import load, HexFile, Segment
|
<commit_before>__title__ = 'hexfile'
__version__ = '0.1.1'
__author__ = 'Ryan Sturmer'
__license__ = 'MIT'
__copyright__ = 'Copyright 2014 Ryan Sturmer'
__docformat__ = 'restructuredtext'
from core import load, HexFile, Segment
<commit_msg>Use relative import to fix Python 3.<commit_after>
|
__title__ = 'hexfile'
__version__ = '0.1.1'
__author__ = 'Ryan Sturmer'
__license__ = 'MIT'
__copyright__ = 'Copyright 2014 Ryan Sturmer'
__docformat__ = 'restructuredtext'
from .core import load, HexFile, Segment
|
__title__ = 'hexfile'
__version__ = '0.1.1'
__author__ = 'Ryan Sturmer'
__license__ = 'MIT'
__copyright__ = 'Copyright 2014 Ryan Sturmer'
__docformat__ = 'restructuredtext'
from core import load, HexFile, Segment
Use relative import to fix Python 3.__title__ = 'hexfile'
__version__ = '0.1.1'
__author__ = 'Ryan Sturmer'
__license__ = 'MIT'
__copyright__ = 'Copyright 2014 Ryan Sturmer'
__docformat__ = 'restructuredtext'
from .core import load, HexFile, Segment
|
<commit_before>__title__ = 'hexfile'
__version__ = '0.1.1'
__author__ = 'Ryan Sturmer'
__license__ = 'MIT'
__copyright__ = 'Copyright 2014 Ryan Sturmer'
__docformat__ = 'restructuredtext'
from core import load, HexFile, Segment
<commit_msg>Use relative import to fix Python 3.<commit_after>__title__ = 'hexfile'
__version__ = '0.1.1'
__author__ = 'Ryan Sturmer'
__license__ = 'MIT'
__copyright__ = 'Copyright 2014 Ryan Sturmer'
__docformat__ = 'restructuredtext'
from .core import load, HexFile, Segment
|
ac6c9f4ad35a8c2c8ede616366b50995afff6992
|
hurricane/runner.py
|
hurricane/runner.py
|
#!/usr/bin/env python
import multiprocessing
import optparse
from django.core.exceptions import ImproperlyConfigured
from django.utils.importlib import import_module
from hurricane.utils import run_until_stopped
class ApplicationManager(object):
@run_until_stopped
def run(self):
parser = optparse.OptionParser()
parser.add_option('--settings', dest='settings')
options, args = parser.parse_args()
if not options.settings:
raise ImproperlyConfigured("You didn't provide a settings module.")
settings = import_module(options.settings)
self.producer_queue = multiprocessing.Queue()
for producer in settings.PRODUCERS:
ProducerClass = import_module(producer).Producer
producer = ProducerClass(settings, self.producer_queue)
multiprocessing.Process(target=producer.run).start()
self.receiver_queues = []
for consumer in settings.CONSUMERS:
ConsumerClass = import_module(consumer).Consumer
recv_queue = multiprocessing.Queue()
consumer = ConsumerClass(settings, recv_queue)
self.receiver_queues.append(recv_queue)
multiprocessing.Process(target=consumer.run).start()
while True:
item = self.producer_queue.get()
for recv_queue in self.receiver_queues:
recv_queue.put(item)
if __name__ == '__main__':
app = ApplicationManager()
app.run()
|
#!/usr/bin/env python
import multiprocessing
import optparse
from django.conf import settings as django_settings
from django.core.exceptions import ImproperlyConfigured
from django.utils.importlib import import_module
from hurricane.utils import run_until_stopped
class ApplicationManager(object):
@run_until_stopped
def run(self):
parser = optparse.OptionParser()
parser.add_option('--settings', dest='settings')
options, args = parser.parse_args()
if not options.settings:
raise ImproperlyConfigured("You didn't provide a settings module.")
settings = import_module(options.settings)
django_settings.configure(settings)
self.producer_queue = multiprocessing.Queue()
for producer in settings.PRODUCERS:
ProducerClass = import_module(producer).Producer
producer = ProducerClass(settings, self.producer_queue)
multiprocessing.Process(target=producer.run).start()
self.receiver_queues = []
for consumer in settings.CONSUMERS:
ConsumerClass = import_module(consumer).Consumer
recv_queue = multiprocessing.Queue()
consumer = ConsumerClass(settings, recv_queue)
self.receiver_queues.append(recv_queue)
multiprocessing.Process(target=consumer.run).start()
while True:
item = self.producer_queue.get()
for recv_queue in self.receiver_queues:
recv_queue.put(item)
if __name__ == '__main__':
app = ApplicationManager()
app.run()
|
Configure django correctly when we setup our env
|
Configure django correctly when we setup our env
|
Python
|
bsd-3-clause
|
ericflo/hurricane,ericflo/hurricane
|
#!/usr/bin/env python
import multiprocessing
import optparse
from django.core.exceptions import ImproperlyConfigured
from django.utils.importlib import import_module
from hurricane.utils import run_until_stopped
class ApplicationManager(object):
@run_until_stopped
def run(self):
parser = optparse.OptionParser()
parser.add_option('--settings', dest='settings')
options, args = parser.parse_args()
if not options.settings:
raise ImproperlyConfigured("You didn't provide a settings module.")
settings = import_module(options.settings)
self.producer_queue = multiprocessing.Queue()
for producer in settings.PRODUCERS:
ProducerClass = import_module(producer).Producer
producer = ProducerClass(settings, self.producer_queue)
multiprocessing.Process(target=producer.run).start()
self.receiver_queues = []
for consumer in settings.CONSUMERS:
ConsumerClass = import_module(consumer).Consumer
recv_queue = multiprocessing.Queue()
consumer = ConsumerClass(settings, recv_queue)
self.receiver_queues.append(recv_queue)
multiprocessing.Process(target=consumer.run).start()
while True:
item = self.producer_queue.get()
for recv_queue in self.receiver_queues:
recv_queue.put(item)
if __name__ == '__main__':
app = ApplicationManager()
app.run()
Configure django correctly when we setup our env
|
#!/usr/bin/env python
import multiprocessing
import optparse
from django.conf import settings as django_settings
from django.core.exceptions import ImproperlyConfigured
from django.utils.importlib import import_module
from hurricane.utils import run_until_stopped
class ApplicationManager(object):
@run_until_stopped
def run(self):
parser = optparse.OptionParser()
parser.add_option('--settings', dest='settings')
options, args = parser.parse_args()
if not options.settings:
raise ImproperlyConfigured("You didn't provide a settings module.")
settings = import_module(options.settings)
django_settings.configure(settings)
self.producer_queue = multiprocessing.Queue()
for producer in settings.PRODUCERS:
ProducerClass = import_module(producer).Producer
producer = ProducerClass(settings, self.producer_queue)
multiprocessing.Process(target=producer.run).start()
self.receiver_queues = []
for consumer in settings.CONSUMERS:
ConsumerClass = import_module(consumer).Consumer
recv_queue = multiprocessing.Queue()
consumer = ConsumerClass(settings, recv_queue)
self.receiver_queues.append(recv_queue)
multiprocessing.Process(target=consumer.run).start()
while True:
item = self.producer_queue.get()
for recv_queue in self.receiver_queues:
recv_queue.put(item)
if __name__ == '__main__':
app = ApplicationManager()
app.run()
|
<commit_before>#!/usr/bin/env python
import multiprocessing
import optparse
from django.core.exceptions import ImproperlyConfigured
from django.utils.importlib import import_module
from hurricane.utils import run_until_stopped
class ApplicationManager(object):
@run_until_stopped
def run(self):
parser = optparse.OptionParser()
parser.add_option('--settings', dest='settings')
options, args = parser.parse_args()
if not options.settings:
raise ImproperlyConfigured("You didn't provide a settings module.")
settings = import_module(options.settings)
self.producer_queue = multiprocessing.Queue()
for producer in settings.PRODUCERS:
ProducerClass = import_module(producer).Producer
producer = ProducerClass(settings, self.producer_queue)
multiprocessing.Process(target=producer.run).start()
self.receiver_queues = []
for consumer in settings.CONSUMERS:
ConsumerClass = import_module(consumer).Consumer
recv_queue = multiprocessing.Queue()
consumer = ConsumerClass(settings, recv_queue)
self.receiver_queues.append(recv_queue)
multiprocessing.Process(target=consumer.run).start()
while True:
item = self.producer_queue.get()
for recv_queue in self.receiver_queues:
recv_queue.put(item)
if __name__ == '__main__':
app = ApplicationManager()
app.run()
<commit_msg>Configure django correctly when we setup our env<commit_after>
|
#!/usr/bin/env python
import multiprocessing
import optparse
from django.conf import settings as django_settings
from django.core.exceptions import ImproperlyConfigured
from django.utils.importlib import import_module
from hurricane.utils import run_until_stopped
class ApplicationManager(object):
@run_until_stopped
def run(self):
parser = optparse.OptionParser()
parser.add_option('--settings', dest='settings')
options, args = parser.parse_args()
if not options.settings:
raise ImproperlyConfigured("You didn't provide a settings module.")
settings = import_module(options.settings)
django_settings.configure(settings)
self.producer_queue = multiprocessing.Queue()
for producer in settings.PRODUCERS:
ProducerClass = import_module(producer).Producer
producer = ProducerClass(settings, self.producer_queue)
multiprocessing.Process(target=producer.run).start()
self.receiver_queues = []
for consumer in settings.CONSUMERS:
ConsumerClass = import_module(consumer).Consumer
recv_queue = multiprocessing.Queue()
consumer = ConsumerClass(settings, recv_queue)
self.receiver_queues.append(recv_queue)
multiprocessing.Process(target=consumer.run).start()
while True:
item = self.producer_queue.get()
for recv_queue in self.receiver_queues:
recv_queue.put(item)
if __name__ == '__main__':
app = ApplicationManager()
app.run()
|
#!/usr/bin/env python
import multiprocessing
import optparse
from django.core.exceptions import ImproperlyConfigured
from django.utils.importlib import import_module
from hurricane.utils import run_until_stopped
class ApplicationManager(object):
@run_until_stopped
def run(self):
parser = optparse.OptionParser()
parser.add_option('--settings', dest='settings')
options, args = parser.parse_args()
if not options.settings:
raise ImproperlyConfigured("You didn't provide a settings module.")
settings = import_module(options.settings)
self.producer_queue = multiprocessing.Queue()
for producer in settings.PRODUCERS:
ProducerClass = import_module(producer).Producer
producer = ProducerClass(settings, self.producer_queue)
multiprocessing.Process(target=producer.run).start()
self.receiver_queues = []
for consumer in settings.CONSUMERS:
ConsumerClass = import_module(consumer).Consumer
recv_queue = multiprocessing.Queue()
consumer = ConsumerClass(settings, recv_queue)
self.receiver_queues.append(recv_queue)
multiprocessing.Process(target=consumer.run).start()
while True:
item = self.producer_queue.get()
for recv_queue in self.receiver_queues:
recv_queue.put(item)
if __name__ == '__main__':
app = ApplicationManager()
app.run()
Configure django correctly when we setup our env#!/usr/bin/env python
import multiprocessing
import optparse
from django.conf import settings as django_settings
from django.core.exceptions import ImproperlyConfigured
from django.utils.importlib import import_module
from hurricane.utils import run_until_stopped
class ApplicationManager(object):
@run_until_stopped
def run(self):
parser = optparse.OptionParser()
parser.add_option('--settings', dest='settings')
options, args = parser.parse_args()
if not options.settings:
raise ImproperlyConfigured("You didn't provide a settings module.")
settings = import_module(options.settings)
django_settings.configure(settings)
self.producer_queue = multiprocessing.Queue()
for producer in settings.PRODUCERS:
ProducerClass = import_module(producer).Producer
producer = ProducerClass(settings, self.producer_queue)
multiprocessing.Process(target=producer.run).start()
self.receiver_queues = []
for consumer in settings.CONSUMERS:
ConsumerClass = import_module(consumer).Consumer
recv_queue = multiprocessing.Queue()
consumer = ConsumerClass(settings, recv_queue)
self.receiver_queues.append(recv_queue)
multiprocessing.Process(target=consumer.run).start()
while True:
item = self.producer_queue.get()
for recv_queue in self.receiver_queues:
recv_queue.put(item)
if __name__ == '__main__':
app = ApplicationManager()
app.run()
|
<commit_before>#!/usr/bin/env python
import multiprocessing
import optparse
from django.core.exceptions import ImproperlyConfigured
from django.utils.importlib import import_module
from hurricane.utils import run_until_stopped
class ApplicationManager(object):
@run_until_stopped
def run(self):
parser = optparse.OptionParser()
parser.add_option('--settings', dest='settings')
options, args = parser.parse_args()
if not options.settings:
raise ImproperlyConfigured("You didn't provide a settings module.")
settings = import_module(options.settings)
self.producer_queue = multiprocessing.Queue()
for producer in settings.PRODUCERS:
ProducerClass = import_module(producer).Producer
producer = ProducerClass(settings, self.producer_queue)
multiprocessing.Process(target=producer.run).start()
self.receiver_queues = []
for consumer in settings.CONSUMERS:
ConsumerClass = import_module(consumer).Consumer
recv_queue = multiprocessing.Queue()
consumer = ConsumerClass(settings, recv_queue)
self.receiver_queues.append(recv_queue)
multiprocessing.Process(target=consumer.run).start()
while True:
item = self.producer_queue.get()
for recv_queue in self.receiver_queues:
recv_queue.put(item)
if __name__ == '__main__':
app = ApplicationManager()
app.run()
<commit_msg>Configure django correctly when we setup our env<commit_after>#!/usr/bin/env python
import multiprocessing
import optparse
from django.conf import settings as django_settings
from django.core.exceptions import ImproperlyConfigured
from django.utils.importlib import import_module
from hurricane.utils import run_until_stopped
class ApplicationManager(object):
@run_until_stopped
def run(self):
parser = optparse.OptionParser()
parser.add_option('--settings', dest='settings')
options, args = parser.parse_args()
if not options.settings:
raise ImproperlyConfigured("You didn't provide a settings module.")
settings = import_module(options.settings)
django_settings.configure(settings)
self.producer_queue = multiprocessing.Queue()
for producer in settings.PRODUCERS:
ProducerClass = import_module(producer).Producer
producer = ProducerClass(settings, self.producer_queue)
multiprocessing.Process(target=producer.run).start()
self.receiver_queues = []
for consumer in settings.CONSUMERS:
ConsumerClass = import_module(consumer).Consumer
recv_queue = multiprocessing.Queue()
consumer = ConsumerClass(settings, recv_queue)
self.receiver_queues.append(recv_queue)
multiprocessing.Process(target=consumer.run).start()
while True:
item = self.producer_queue.get()
for recv_queue in self.receiver_queues:
recv_queue.put(item)
if __name__ == '__main__':
app = ApplicationManager()
app.run()
|
119025b231b0f3b9077445334fc08d1ad076abfc
|
generic_links/migrations/0001_initial.py
|
generic_links/migrations/0001_initial.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
('contenttypes', '0002_remove_content_type_name'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='GenericLink',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('object_id', models.PositiveIntegerField(db_index=True)),
('url', models.URLField()),
('title', models.CharField(max_length=200)),
('description', models.TextField(max_length=1000, null=True, blank=True)),
('created_at', models.DateTimeField(auto_now_add=True, db_index=True)),
('is_external', models.BooleanField(default=True, db_index=True)),
('content_type', models.ForeignKey(to='contenttypes.ContentType')),
('user', models.ForeignKey(blank=True, to=settings.AUTH_USER_MODEL, null=True)),
],
options={
'ordering': ('-created_at',),
'verbose_name': 'Generic Link',
'verbose_name_plural': 'Generic Links',
},
),
]
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
('contenttypes', '__first__'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='GenericLink',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('object_id', models.PositiveIntegerField(db_index=True)),
('url', models.URLField()),
('title', models.CharField(max_length=200)),
('description', models.TextField(max_length=1000, null=True, blank=True)),
('created_at', models.DateTimeField(auto_now_add=True, db_index=True)),
('is_external', models.BooleanField(default=True, db_index=True)),
('content_type', models.ForeignKey(to='contenttypes.ContentType')),
('user', models.ForeignKey(blank=True, to=settings.AUTH_USER_MODEL, null=True)),
],
options={
'ordering': ('-created_at',),
'verbose_name': 'Generic Link',
'verbose_name_plural': 'Generic Links',
},
),
]
|
Remove Django 1.8 dependency in initial migration
|
Remove Django 1.8 dependency in initial migration
The ('contenttypes', '0002_remove_content_type_name') migration was part of Django 1.8, replacing it with '__first__' allows the use of Django 1.7
|
Python
|
bsd-3-clause
|
matagus/django-generic-links,matagus/django-generic-links
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
('contenttypes', '0002_remove_content_type_name'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='GenericLink',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('object_id', models.PositiveIntegerField(db_index=True)),
('url', models.URLField()),
('title', models.CharField(max_length=200)),
('description', models.TextField(max_length=1000, null=True, blank=True)),
('created_at', models.DateTimeField(auto_now_add=True, db_index=True)),
('is_external', models.BooleanField(default=True, db_index=True)),
('content_type', models.ForeignKey(to='contenttypes.ContentType')),
('user', models.ForeignKey(blank=True, to=settings.AUTH_USER_MODEL, null=True)),
],
options={
'ordering': ('-created_at',),
'verbose_name': 'Generic Link',
'verbose_name_plural': 'Generic Links',
},
),
]
Remove Django 1.8 dependency in initial migration
The ('contenttypes', '0002_remove_content_type_name') migration was part of Django 1.8, replacing it with '__first__' allows the use of Django 1.7
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
('contenttypes', '__first__'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='GenericLink',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('object_id', models.PositiveIntegerField(db_index=True)),
('url', models.URLField()),
('title', models.CharField(max_length=200)),
('description', models.TextField(max_length=1000, null=True, blank=True)),
('created_at', models.DateTimeField(auto_now_add=True, db_index=True)),
('is_external', models.BooleanField(default=True, db_index=True)),
('content_type', models.ForeignKey(to='contenttypes.ContentType')),
('user', models.ForeignKey(blank=True, to=settings.AUTH_USER_MODEL, null=True)),
],
options={
'ordering': ('-created_at',),
'verbose_name': 'Generic Link',
'verbose_name_plural': 'Generic Links',
},
),
]
|
<commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
('contenttypes', '0002_remove_content_type_name'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='GenericLink',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('object_id', models.PositiveIntegerField(db_index=True)),
('url', models.URLField()),
('title', models.CharField(max_length=200)),
('description', models.TextField(max_length=1000, null=True, blank=True)),
('created_at', models.DateTimeField(auto_now_add=True, db_index=True)),
('is_external', models.BooleanField(default=True, db_index=True)),
('content_type', models.ForeignKey(to='contenttypes.ContentType')),
('user', models.ForeignKey(blank=True, to=settings.AUTH_USER_MODEL, null=True)),
],
options={
'ordering': ('-created_at',),
'verbose_name': 'Generic Link',
'verbose_name_plural': 'Generic Links',
},
),
]
<commit_msg>Remove Django 1.8 dependency in initial migration
The ('contenttypes', '0002_remove_content_type_name') migration was part of Django 1.8, replacing it with '__first__' allows the use of Django 1.7<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
('contenttypes', '__first__'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='GenericLink',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('object_id', models.PositiveIntegerField(db_index=True)),
('url', models.URLField()),
('title', models.CharField(max_length=200)),
('description', models.TextField(max_length=1000, null=True, blank=True)),
('created_at', models.DateTimeField(auto_now_add=True, db_index=True)),
('is_external', models.BooleanField(default=True, db_index=True)),
('content_type', models.ForeignKey(to='contenttypes.ContentType')),
('user', models.ForeignKey(blank=True, to=settings.AUTH_USER_MODEL, null=True)),
],
options={
'ordering': ('-created_at',),
'verbose_name': 'Generic Link',
'verbose_name_plural': 'Generic Links',
},
),
]
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
('contenttypes', '0002_remove_content_type_name'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='GenericLink',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('object_id', models.PositiveIntegerField(db_index=True)),
('url', models.URLField()),
('title', models.CharField(max_length=200)),
('description', models.TextField(max_length=1000, null=True, blank=True)),
('created_at', models.DateTimeField(auto_now_add=True, db_index=True)),
('is_external', models.BooleanField(default=True, db_index=True)),
('content_type', models.ForeignKey(to='contenttypes.ContentType')),
('user', models.ForeignKey(blank=True, to=settings.AUTH_USER_MODEL, null=True)),
],
options={
'ordering': ('-created_at',),
'verbose_name': 'Generic Link',
'verbose_name_plural': 'Generic Links',
},
),
]
Remove Django 1.8 dependency in initial migration
The ('contenttypes', '0002_remove_content_type_name') migration was part of Django 1.8, replacing it with '__first__' allows the use of Django 1.7# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
('contenttypes', '__first__'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='GenericLink',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('object_id', models.PositiveIntegerField(db_index=True)),
('url', models.URLField()),
('title', models.CharField(max_length=200)),
('description', models.TextField(max_length=1000, null=True, blank=True)),
('created_at', models.DateTimeField(auto_now_add=True, db_index=True)),
('is_external', models.BooleanField(default=True, db_index=True)),
('content_type', models.ForeignKey(to='contenttypes.ContentType')),
('user', models.ForeignKey(blank=True, to=settings.AUTH_USER_MODEL, null=True)),
],
options={
'ordering': ('-created_at',),
'verbose_name': 'Generic Link',
'verbose_name_plural': 'Generic Links',
},
),
]
|
<commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
('contenttypes', '0002_remove_content_type_name'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='GenericLink',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('object_id', models.PositiveIntegerField(db_index=True)),
('url', models.URLField()),
('title', models.CharField(max_length=200)),
('description', models.TextField(max_length=1000, null=True, blank=True)),
('created_at', models.DateTimeField(auto_now_add=True, db_index=True)),
('is_external', models.BooleanField(default=True, db_index=True)),
('content_type', models.ForeignKey(to='contenttypes.ContentType')),
('user', models.ForeignKey(blank=True, to=settings.AUTH_USER_MODEL, null=True)),
],
options={
'ordering': ('-created_at',),
'verbose_name': 'Generic Link',
'verbose_name_plural': 'Generic Links',
},
),
]
<commit_msg>Remove Django 1.8 dependency in initial migration
The ('contenttypes', '0002_remove_content_type_name') migration was part of Django 1.8, replacing it with '__first__' allows the use of Django 1.7<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
('contenttypes', '__first__'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='GenericLink',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('object_id', models.PositiveIntegerField(db_index=True)),
('url', models.URLField()),
('title', models.CharField(max_length=200)),
('description', models.TextField(max_length=1000, null=True, blank=True)),
('created_at', models.DateTimeField(auto_now_add=True, db_index=True)),
('is_external', models.BooleanField(default=True, db_index=True)),
('content_type', models.ForeignKey(to='contenttypes.ContentType')),
('user', models.ForeignKey(blank=True, to=settings.AUTH_USER_MODEL, null=True)),
],
options={
'ordering': ('-created_at',),
'verbose_name': 'Generic Link',
'verbose_name_plural': 'Generic Links',
},
),
]
|
c7941340336b3fe584dd192583c088eb1f1f972e
|
genomic_neuralnet/common/celeryconfig.py
|
genomic_neuralnet/common/celeryconfig.py
|
# Wait up to 15 minutes for each iteration.
BROKER_TRANSPORT_OPTIONS = {'visibility_timeout': 3600} # Seconds = 1 hour.
# Do not pre-fetch work.
CELERYD_PREFETCH_MULTIPLIER = 1
# Do not ack messages until work is completed.
CELERY_ACKS_LATE = True
# Stop warning me about PICKLE.
CELERY_ACCEPT_CONTENT = ['pickle']
|
# Wait up to 15 minutes for each iteration.
BROKER_TRANSPORT_OPTIONS = {'visibility_timeout': 60*60} # 60*60 Seconds = 1 hour.
# Do not pre-fetch work.
CELERYD_PREFETCH_MULTIPLIER = 1
# Do not ack messages until work is completed.
CELERY_ACKS_LATE = True
# Stop warning me about PICKLE.
CELERY_ACCEPT_CONTENT = ['pickle']
# Clear out finished results after 30 minutes.
CELERY_TASK_RESULT_EXPIRES = 60*30
|
Reduce result broker message timeout
|
Reduce result broker message timeout
|
Python
|
mit
|
rileymcdowell/genomic-neuralnet,rileymcdowell/genomic-neuralnet,rileymcdowell/genomic-neuralnet
|
# Wait up to 15 minutes for each iteration.
BROKER_TRANSPORT_OPTIONS = {'visibility_timeout': 3600} # Seconds = 1 hour.
# Do not pre-fetch work.
CELERYD_PREFETCH_MULTIPLIER = 1
# Do not ack messages until work is completed.
CELERY_ACKS_LATE = True
# Stop warning me about PICKLE.
CELERY_ACCEPT_CONTENT = ['pickle']
Reduce result broker message timeout
|
# Wait up to 15 minutes for each iteration.
BROKER_TRANSPORT_OPTIONS = {'visibility_timeout': 60*60} # 60*60 Seconds = 1 hour.
# Do not pre-fetch work.
CELERYD_PREFETCH_MULTIPLIER = 1
# Do not ack messages until work is completed.
CELERY_ACKS_LATE = True
# Stop warning me about PICKLE.
CELERY_ACCEPT_CONTENT = ['pickle']
# Clear out finished results after 30 minutes.
CELERY_TASK_RESULT_EXPIRES = 60*30
|
<commit_before>
# Wait up to 15 minutes for each iteration.
BROKER_TRANSPORT_OPTIONS = {'visibility_timeout': 3600} # Seconds = 1 hour.
# Do not pre-fetch work.
CELERYD_PREFETCH_MULTIPLIER = 1
# Do not ack messages until work is completed.
CELERY_ACKS_LATE = True
# Stop warning me about PICKLE.
CELERY_ACCEPT_CONTENT = ['pickle']
<commit_msg>Reduce result broker message timeout<commit_after>
|
# Wait up to 15 minutes for each iteration.
BROKER_TRANSPORT_OPTIONS = {'visibility_timeout': 60*60} # 60*60 Seconds = 1 hour.
# Do not pre-fetch work.
CELERYD_PREFETCH_MULTIPLIER = 1
# Do not ack messages until work is completed.
CELERY_ACKS_LATE = True
# Stop warning me about PICKLE.
CELERY_ACCEPT_CONTENT = ['pickle']
# Clear out finished results after 30 minutes.
CELERY_TASK_RESULT_EXPIRES = 60*30
|
# Wait up to 15 minutes for each iteration.
BROKER_TRANSPORT_OPTIONS = {'visibility_timeout': 3600} # Seconds = 1 hour.
# Do not pre-fetch work.
CELERYD_PREFETCH_MULTIPLIER = 1
# Do not ack messages until work is completed.
CELERY_ACKS_LATE = True
# Stop warning me about PICKLE.
CELERY_ACCEPT_CONTENT = ['pickle']
Reduce result broker message timeout
# Wait up to 15 minutes for each iteration.
BROKER_TRANSPORT_OPTIONS = {'visibility_timeout': 60*60} # 60*60 Seconds = 1 hour.
# Do not pre-fetch work.
CELERYD_PREFETCH_MULTIPLIER = 1
# Do not ack messages until work is completed.
CELERY_ACKS_LATE = True
# Stop warning me about PICKLE.
CELERY_ACCEPT_CONTENT = ['pickle']
# Clear out finished results after 30 minutes.
CELERY_TASK_RESULT_EXPIRES = 60*30
|
<commit_before>
# Wait up to 15 minutes for each iteration.
BROKER_TRANSPORT_OPTIONS = {'visibility_timeout': 3600} # Seconds = 1 hour.
# Do not pre-fetch work.
CELERYD_PREFETCH_MULTIPLIER = 1
# Do not ack messages until work is completed.
CELERY_ACKS_LATE = True
# Stop warning me about PICKLE.
CELERY_ACCEPT_CONTENT = ['pickle']
<commit_msg>Reduce result broker message timeout<commit_after>
# Wait up to 15 minutes for each iteration.
BROKER_TRANSPORT_OPTIONS = {'visibility_timeout': 60*60} # 60*60 Seconds = 1 hour.
# Do not pre-fetch work.
CELERYD_PREFETCH_MULTIPLIER = 1
# Do not ack messages until work is completed.
CELERY_ACKS_LATE = True
# Stop warning me about PICKLE.
CELERY_ACCEPT_CONTENT = ['pickle']
# Clear out finished results after 30 minutes.
CELERY_TASK_RESULT_EXPIRES = 60*30
|
2ae6974ee04a9c5d39ad18788fe14a432994f6bd
|
zou/event_stream.py
|
zou/event_stream.py
|
import os
from flask import Flask
from flask_sse import sse
app = Flask(__name__)
app.config["REDIS_URL"] = os.environ.get(
"REDIS_URL",
"redis://localhost/2"
)
app.register_blueprint(sse, url_prefix='/events')
|
import os
from flask import Flask
from flask_sse import sse
app = Flask(__name__)
redis_host = os.environ.get("KV_HOST", "localhost")
redis_port = os.environ.get("KV_PORT", "5379")
redis_url = "redis://%s:%s/2" % (redis_host, redis_port)
app.config["REDIS_URL"] = redis_url
app.register_blueprint(sse, url_prefix='/events')
|
Use right env variable to build redis url
|
Use right env variable to build redis url
It is for the events stream daemon.
|
Python
|
agpl-3.0
|
cgwire/zou
|
import os
from flask import Flask
from flask_sse import sse
app = Flask(__name__)
app.config["REDIS_URL"] = os.environ.get(
"REDIS_URL",
"redis://localhost/2"
)
app.register_blueprint(sse, url_prefix='/events')
Use right env variable to build redis url
It is for the events stream daemon.
|
import os
from flask import Flask
from flask_sse import sse
app = Flask(__name__)
redis_host = os.environ.get("KV_HOST", "localhost")
redis_port = os.environ.get("KV_PORT", "5379")
redis_url = "redis://%s:%s/2" % (redis_host, redis_port)
app.config["REDIS_URL"] = redis_url
app.register_blueprint(sse, url_prefix='/events')
|
<commit_before>import os
from flask import Flask
from flask_sse import sse
app = Flask(__name__)
app.config["REDIS_URL"] = os.environ.get(
"REDIS_URL",
"redis://localhost/2"
)
app.register_blueprint(sse, url_prefix='/events')
<commit_msg>Use right env variable to build redis url
It is for the events stream daemon.<commit_after>
|
import os
from flask import Flask
from flask_sse import sse
app = Flask(__name__)
redis_host = os.environ.get("KV_HOST", "localhost")
redis_port = os.environ.get("KV_PORT", "5379")
redis_url = "redis://%s:%s/2" % (redis_host, redis_port)
app.config["REDIS_URL"] = redis_url
app.register_blueprint(sse, url_prefix='/events')
|
import os
from flask import Flask
from flask_sse import sse
app = Flask(__name__)
app.config["REDIS_URL"] = os.environ.get(
"REDIS_URL",
"redis://localhost/2"
)
app.register_blueprint(sse, url_prefix='/events')
Use right env variable to build redis url
It is for the events stream daemon.import os
from flask import Flask
from flask_sse import sse
app = Flask(__name__)
redis_host = os.environ.get("KV_HOST", "localhost")
redis_port = os.environ.get("KV_PORT", "5379")
redis_url = "redis://%s:%s/2" % (redis_host, redis_port)
app.config["REDIS_URL"] = redis_url
app.register_blueprint(sse, url_prefix='/events')
|
<commit_before>import os
from flask import Flask
from flask_sse import sse
app = Flask(__name__)
app.config["REDIS_URL"] = os.environ.get(
"REDIS_URL",
"redis://localhost/2"
)
app.register_blueprint(sse, url_prefix='/events')
<commit_msg>Use right env variable to build redis url
It is for the events stream daemon.<commit_after>import os
from flask import Flask
from flask_sse import sse
app = Flask(__name__)
redis_host = os.environ.get("KV_HOST", "localhost")
redis_port = os.environ.get("KV_PORT", "5379")
redis_url = "redis://%s:%s/2" % (redis_host, redis_port)
app.config["REDIS_URL"] = redis_url
app.register_blueprint(sse, url_prefix='/events')
|
3fe67d3dc52919751217d6e73be436c3e291ab04
|
soho/renderers/__init__.py
|
soho/renderers/__init__.py
|
import os
from soho.utils import register_plugin
registry = {}
def register_renderer(spec, *ext):
"""Register a renderer.
``spec``
a string that represents the full path to a class, for example
``'soho.renderers.zpt.ZPTRenderer'``. The class must implement
the same interface as :class:`soho.renderers.BaseRenderer`.
``ext``
one or more file extensions to which the plugin will be
associated. At least one file extension must be provided. File
extensions should not contain the dot, for example ``'html'``,
not ``'.html'``.
"""
register_plugin(registry, spec, *ext)
class BaseRenderer(object):
"""The base class that any renderer must implement.
There is only one renderer for now, so the API is subject to
change (as soon as a second renderer is implemented).
"""
def __init__(self, template_path): # pragma: no coverage
raise NotImplementedError
def render(self, **bindings): # pragma: no coverage
"""Render the template with the given ``bindings``."""
raise NotImplementedError
def get_renderer(path, *args, **kwargs):
"""Return a renderer for the given template, or ``None`` if none
could be found.
"""
ext = os.path.splitext(path)[1][1:]
klass = registry.get(ext, None)
if klass is None:
return None
return klass(path, *args, **kwargs)
|
import os
from soho.utils import register_plugin
registry = {}
def register_renderer(spec, *ext):
"""Register a renderer.
``spec``
a string that represents the full path to a class, for example
``'soho.renderers.zpt.ZPTRenderer'``. The class must implement
the same interface as :class:`soho.renderers.BaseRenderer`.
``ext``
one or more file extensions to which the plugin will be
associated. At least one file extension must be provided. File
extensions should not contain the dot, for example ``'html'``,
not ``'.html'``.
"""
register_plugin(registry, spec, *ext)
class BaseRenderer(object):
"""The base class that any renderer must implement.
There is only one renderer for now, so the API is subject to
change (as soon as a second renderer is implemented).
"""
def __init__(self, template_path): # pragma: no coverage
raise NotImplementedError
def render(self, **bindings): # pragma: no coverage
"""Render the template with the given ``bindings``."""
raise NotImplementedError
def get_renderer(path, *args, **kwargs):
"""Return a renderer for the given template, or ``None`` if none
could be found.
"""
ext = os.path.splitext(path)[1][1:]
klass = registry.get(ext, None)
if klass is None:
return None
return klass(path, *args, **kwargs)
|
Remove empty line. Add bland commit message.
|
Remove empty line. Add bland commit message.
|
Python
|
bsd-3-clause
|
dbaty/soho,dbaty/soho
|
import os
from soho.utils import register_plugin
registry = {}
def register_renderer(spec, *ext):
"""Register a renderer.
``spec``
a string that represents the full path to a class, for example
``'soho.renderers.zpt.ZPTRenderer'``. The class must implement
the same interface as :class:`soho.renderers.BaseRenderer`.
``ext``
one or more file extensions to which the plugin will be
associated. At least one file extension must be provided. File
extensions should not contain the dot, for example ``'html'``,
not ``'.html'``.
"""
register_plugin(registry, spec, *ext)
class BaseRenderer(object):
"""The base class that any renderer must implement.
There is only one renderer for now, so the API is subject to
change (as soon as a second renderer is implemented).
"""
def __init__(self, template_path): # pragma: no coverage
raise NotImplementedError
def render(self, **bindings): # pragma: no coverage
"""Render the template with the given ``bindings``."""
raise NotImplementedError
def get_renderer(path, *args, **kwargs):
"""Return a renderer for the given template, or ``None`` if none
could be found.
"""
ext = os.path.splitext(path)[1][1:]
klass = registry.get(ext, None)
if klass is None:
return None
return klass(path, *args, **kwargs)
Remove empty line. Add bland commit message.
|
import os
from soho.utils import register_plugin
registry = {}
def register_renderer(spec, *ext):
"""Register a renderer.
``spec``
a string that represents the full path to a class, for example
``'soho.renderers.zpt.ZPTRenderer'``. The class must implement
the same interface as :class:`soho.renderers.BaseRenderer`.
``ext``
one or more file extensions to which the plugin will be
associated. At least one file extension must be provided. File
extensions should not contain the dot, for example ``'html'``,
not ``'.html'``.
"""
register_plugin(registry, spec, *ext)
class BaseRenderer(object):
"""The base class that any renderer must implement.
There is only one renderer for now, so the API is subject to
change (as soon as a second renderer is implemented).
"""
def __init__(self, template_path): # pragma: no coverage
raise NotImplementedError
def render(self, **bindings): # pragma: no coverage
"""Render the template with the given ``bindings``."""
raise NotImplementedError
def get_renderer(path, *args, **kwargs):
"""Return a renderer for the given template, or ``None`` if none
could be found.
"""
ext = os.path.splitext(path)[1][1:]
klass = registry.get(ext, None)
if klass is None:
return None
return klass(path, *args, **kwargs)
|
<commit_before>import os
from soho.utils import register_plugin
registry = {}
def register_renderer(spec, *ext):
"""Register a renderer.
``spec``
a string that represents the full path to a class, for example
``'soho.renderers.zpt.ZPTRenderer'``. The class must implement
the same interface as :class:`soho.renderers.BaseRenderer`.
``ext``
one or more file extensions to which the plugin will be
associated. At least one file extension must be provided. File
extensions should not contain the dot, for example ``'html'``,
not ``'.html'``.
"""
register_plugin(registry, spec, *ext)
class BaseRenderer(object):
"""The base class that any renderer must implement.
There is only one renderer for now, so the API is subject to
change (as soon as a second renderer is implemented).
"""
def __init__(self, template_path): # pragma: no coverage
raise NotImplementedError
def render(self, **bindings): # pragma: no coverage
"""Render the template with the given ``bindings``."""
raise NotImplementedError
def get_renderer(path, *args, **kwargs):
"""Return a renderer for the given template, or ``None`` if none
could be found.
"""
ext = os.path.splitext(path)[1][1:]
klass = registry.get(ext, None)
if klass is None:
return None
return klass(path, *args, **kwargs)
<commit_msg>Remove empty line. Add bland commit message.<commit_after>
|
import os
from soho.utils import register_plugin
registry = {}
def register_renderer(spec, *ext):
"""Register a renderer.
``spec``
a string that represents the full path to a class, for example
``'soho.renderers.zpt.ZPTRenderer'``. The class must implement
the same interface as :class:`soho.renderers.BaseRenderer`.
``ext``
one or more file extensions to which the plugin will be
associated. At least one file extension must be provided. File
extensions should not contain the dot, for example ``'html'``,
not ``'.html'``.
"""
register_plugin(registry, spec, *ext)
class BaseRenderer(object):
"""The base class that any renderer must implement.
There is only one renderer for now, so the API is subject to
change (as soon as a second renderer is implemented).
"""
def __init__(self, template_path): # pragma: no coverage
raise NotImplementedError
def render(self, **bindings): # pragma: no coverage
"""Render the template with the given ``bindings``."""
raise NotImplementedError
def get_renderer(path, *args, **kwargs):
"""Return a renderer for the given template, or ``None`` if none
could be found.
"""
ext = os.path.splitext(path)[1][1:]
klass = registry.get(ext, None)
if klass is None:
return None
return klass(path, *args, **kwargs)
|
import os
from soho.utils import register_plugin
registry = {}
def register_renderer(spec, *ext):
"""Register a renderer.
``spec``
a string that represents the full path to a class, for example
``'soho.renderers.zpt.ZPTRenderer'``. The class must implement
the same interface as :class:`soho.renderers.BaseRenderer`.
``ext``
one or more file extensions to which the plugin will be
associated. At least one file extension must be provided. File
extensions should not contain the dot, for example ``'html'``,
not ``'.html'``.
"""
register_plugin(registry, spec, *ext)
class BaseRenderer(object):
"""The base class that any renderer must implement.
There is only one renderer for now, so the API is subject to
change (as soon as a second renderer is implemented).
"""
def __init__(self, template_path): # pragma: no coverage
raise NotImplementedError
def render(self, **bindings): # pragma: no coverage
"""Render the template with the given ``bindings``."""
raise NotImplementedError
def get_renderer(path, *args, **kwargs):
"""Return a renderer for the given template, or ``None`` if none
could be found.
"""
ext = os.path.splitext(path)[1][1:]
klass = registry.get(ext, None)
if klass is None:
return None
return klass(path, *args, **kwargs)
Remove empty line. Add bland commit message.import os
from soho.utils import register_plugin
registry = {}
def register_renderer(spec, *ext):
"""Register a renderer.
``spec``
a string that represents the full path to a class, for example
``'soho.renderers.zpt.ZPTRenderer'``. The class must implement
the same interface as :class:`soho.renderers.BaseRenderer`.
``ext``
one or more file extensions to which the plugin will be
associated. At least one file extension must be provided. File
extensions should not contain the dot, for example ``'html'``,
not ``'.html'``.
"""
register_plugin(registry, spec, *ext)
class BaseRenderer(object):
"""The base class that any renderer must implement.
There is only one renderer for now, so the API is subject to
change (as soon as a second renderer is implemented).
"""
def __init__(self, template_path): # pragma: no coverage
raise NotImplementedError
def render(self, **bindings): # pragma: no coverage
"""Render the template with the given ``bindings``."""
raise NotImplementedError
def get_renderer(path, *args, **kwargs):
"""Return a renderer for the given template, or ``None`` if none
could be found.
"""
ext = os.path.splitext(path)[1][1:]
klass = registry.get(ext, None)
if klass is None:
return None
return klass(path, *args, **kwargs)
|
<commit_before>import os
from soho.utils import register_plugin
registry = {}
def register_renderer(spec, *ext):
"""Register a renderer.
``spec``
a string that represents the full path to a class, for example
``'soho.renderers.zpt.ZPTRenderer'``. The class must implement
the same interface as :class:`soho.renderers.BaseRenderer`.
``ext``
one or more file extensions to which the plugin will be
associated. At least one file extension must be provided. File
extensions should not contain the dot, for example ``'html'``,
not ``'.html'``.
"""
register_plugin(registry, spec, *ext)
class BaseRenderer(object):
"""The base class that any renderer must implement.
There is only one renderer for now, so the API is subject to
change (as soon as a second renderer is implemented).
"""
def __init__(self, template_path): # pragma: no coverage
raise NotImplementedError
def render(self, **bindings): # pragma: no coverage
"""Render the template with the given ``bindings``."""
raise NotImplementedError
def get_renderer(path, *args, **kwargs):
"""Return a renderer for the given template, or ``None`` if none
could be found.
"""
ext = os.path.splitext(path)[1][1:]
klass = registry.get(ext, None)
if klass is None:
return None
return klass(path, *args, **kwargs)
<commit_msg>Remove empty line. Add bland commit message.<commit_after>import os
from soho.utils import register_plugin
registry = {}
def register_renderer(spec, *ext):
"""Register a renderer.
``spec``
a string that represents the full path to a class, for example
``'soho.renderers.zpt.ZPTRenderer'``. The class must implement
the same interface as :class:`soho.renderers.BaseRenderer`.
``ext``
one or more file extensions to which the plugin will be
associated. At least one file extension must be provided. File
extensions should not contain the dot, for example ``'html'``,
not ``'.html'``.
"""
register_plugin(registry, spec, *ext)
class BaseRenderer(object):
"""The base class that any renderer must implement.
There is only one renderer for now, so the API is subject to
change (as soon as a second renderer is implemented).
"""
def __init__(self, template_path): # pragma: no coverage
raise NotImplementedError
def render(self, **bindings): # pragma: no coverage
"""Render the template with the given ``bindings``."""
raise NotImplementedError
def get_renderer(path, *args, **kwargs):
"""Return a renderer for the given template, or ``None`` if none
could be found.
"""
ext = os.path.splitext(path)[1][1:]
klass = registry.get(ext, None)
if klass is None:
return None
return klass(path, *args, **kwargs)
|
a275611ff2ebea7865a182a398282ab678333a30
|
tests/test_playalbumby/test_query.py
|
tests/test_playalbumby/test_query.py
|
#!/usr/bin/env python
# coding=utf-8
from __future__ import print_function, unicode_literals
import nose.tools as nose
from tests.utils import run_filter
def test_ignore_case():
"""should ignore case when querying albums by an artist"""
results = run_filter('playalbumby', 'survivor')
nose.assert_equal(results[0]['title'], 'Ultimate Survivor')
def test_trim_whitespace():
"""should trim whitespace when querying albums by an artist"""
results = run_filter('playalbumby', ' survivor ')
nose.assert_equal(results[0]['title'], 'Ultimate Survivor')
def test_partial():
"""should match partial queries when querying albums by an artist"""
results = run_filter('playalbumby', 'urviv')
nose.assert_equal(results[0]['title'], 'Ultimate Survivor')
|
#!/usr/bin/env python
# coding=utf-8
from __future__ import print_function, unicode_literals
import nose.tools as nose
from tests.utils import run_filter
def test_ignore_case():
"""should ignore case when querying albums by an artist"""
results = run_filter('playalbumby', 'SurVivor')
nose.assert_equal(results[0]['title'], 'Ultimate Survivor')
def test_trim_whitespace():
"""should trim whitespace when querying albums by an artist"""
results = run_filter('playalbumby', ' survivor ')
nose.assert_equal(results[0]['title'], 'Ultimate Survivor')
def test_partial():
"""should match partial queries when querying albums by an artist"""
results = run_filter('playalbumby', 'urviv')
nose.assert_equal(results[0]['title'], 'Ultimate Survivor')
|
Correct 'ignore case' playalbumby test
|
Correct 'ignore case' playalbumby test
|
Python
|
mit
|
caleb531/play-song,caleb531/play-song
|
#!/usr/bin/env python
# coding=utf-8
from __future__ import print_function, unicode_literals
import nose.tools as nose
from tests.utils import run_filter
def test_ignore_case():
"""should ignore case when querying albums by an artist"""
results = run_filter('playalbumby', 'survivor')
nose.assert_equal(results[0]['title'], 'Ultimate Survivor')
def test_trim_whitespace():
"""should trim whitespace when querying albums by an artist"""
results = run_filter('playalbumby', ' survivor ')
nose.assert_equal(results[0]['title'], 'Ultimate Survivor')
def test_partial():
"""should match partial queries when querying albums by an artist"""
results = run_filter('playalbumby', 'urviv')
nose.assert_equal(results[0]['title'], 'Ultimate Survivor')
Correct 'ignore case' playalbumby test
|
#!/usr/bin/env python
# coding=utf-8
from __future__ import print_function, unicode_literals
import nose.tools as nose
from tests.utils import run_filter
def test_ignore_case():
"""should ignore case when querying albums by an artist"""
results = run_filter('playalbumby', 'SurVivor')
nose.assert_equal(results[0]['title'], 'Ultimate Survivor')
def test_trim_whitespace():
"""should trim whitespace when querying albums by an artist"""
results = run_filter('playalbumby', ' survivor ')
nose.assert_equal(results[0]['title'], 'Ultimate Survivor')
def test_partial():
"""should match partial queries when querying albums by an artist"""
results = run_filter('playalbumby', 'urviv')
nose.assert_equal(results[0]['title'], 'Ultimate Survivor')
|
<commit_before>#!/usr/bin/env python
# coding=utf-8
from __future__ import print_function, unicode_literals
import nose.tools as nose
from tests.utils import run_filter
def test_ignore_case():
"""should ignore case when querying albums by an artist"""
results = run_filter('playalbumby', 'survivor')
nose.assert_equal(results[0]['title'], 'Ultimate Survivor')
def test_trim_whitespace():
"""should trim whitespace when querying albums by an artist"""
results = run_filter('playalbumby', ' survivor ')
nose.assert_equal(results[0]['title'], 'Ultimate Survivor')
def test_partial():
"""should match partial queries when querying albums by an artist"""
results = run_filter('playalbumby', 'urviv')
nose.assert_equal(results[0]['title'], 'Ultimate Survivor')
<commit_msg>Correct 'ignore case' playalbumby test<commit_after>
|
#!/usr/bin/env python
# coding=utf-8
from __future__ import print_function, unicode_literals
import nose.tools as nose
from tests.utils import run_filter
def test_ignore_case():
"""should ignore case when querying albums by an artist"""
results = run_filter('playalbumby', 'SurVivor')
nose.assert_equal(results[0]['title'], 'Ultimate Survivor')
def test_trim_whitespace():
"""should trim whitespace when querying albums by an artist"""
results = run_filter('playalbumby', ' survivor ')
nose.assert_equal(results[0]['title'], 'Ultimate Survivor')
def test_partial():
"""should match partial queries when querying albums by an artist"""
results = run_filter('playalbumby', 'urviv')
nose.assert_equal(results[0]['title'], 'Ultimate Survivor')
|
#!/usr/bin/env python
# coding=utf-8
from __future__ import print_function, unicode_literals
import nose.tools as nose
from tests.utils import run_filter
def test_ignore_case():
"""should ignore case when querying albums by an artist"""
results = run_filter('playalbumby', 'survivor')
nose.assert_equal(results[0]['title'], 'Ultimate Survivor')
def test_trim_whitespace():
"""should trim whitespace when querying albums by an artist"""
results = run_filter('playalbumby', ' survivor ')
nose.assert_equal(results[0]['title'], 'Ultimate Survivor')
def test_partial():
"""should match partial queries when querying albums by an artist"""
results = run_filter('playalbumby', 'urviv')
nose.assert_equal(results[0]['title'], 'Ultimate Survivor')
Correct 'ignore case' playalbumby test#!/usr/bin/env python
# coding=utf-8
from __future__ import print_function, unicode_literals
import nose.tools as nose
from tests.utils import run_filter
def test_ignore_case():
"""should ignore case when querying albums by an artist"""
results = run_filter('playalbumby', 'SurVivor')
nose.assert_equal(results[0]['title'], 'Ultimate Survivor')
def test_trim_whitespace():
"""should trim whitespace when querying albums by an artist"""
results = run_filter('playalbumby', ' survivor ')
nose.assert_equal(results[0]['title'], 'Ultimate Survivor')
def test_partial():
"""should match partial queries when querying albums by an artist"""
results = run_filter('playalbumby', 'urviv')
nose.assert_equal(results[0]['title'], 'Ultimate Survivor')
|
<commit_before>#!/usr/bin/env python
# coding=utf-8
from __future__ import print_function, unicode_literals
import nose.tools as nose
from tests.utils import run_filter
def test_ignore_case():
"""should ignore case when querying albums by an artist"""
results = run_filter('playalbumby', 'survivor')
nose.assert_equal(results[0]['title'], 'Ultimate Survivor')
def test_trim_whitespace():
"""should trim whitespace when querying albums by an artist"""
results = run_filter('playalbumby', ' survivor ')
nose.assert_equal(results[0]['title'], 'Ultimate Survivor')
def test_partial():
"""should match partial queries when querying albums by an artist"""
results = run_filter('playalbumby', 'urviv')
nose.assert_equal(results[0]['title'], 'Ultimate Survivor')
<commit_msg>Correct 'ignore case' playalbumby test<commit_after>#!/usr/bin/env python
# coding=utf-8
from __future__ import print_function, unicode_literals
import nose.tools as nose
from tests.utils import run_filter
def test_ignore_case():
"""should ignore case when querying albums by an artist"""
results = run_filter('playalbumby', 'SurVivor')
nose.assert_equal(results[0]['title'], 'Ultimate Survivor')
def test_trim_whitespace():
"""should trim whitespace when querying albums by an artist"""
results = run_filter('playalbumby', ' survivor ')
nose.assert_equal(results[0]['title'], 'Ultimate Survivor')
def test_partial():
"""should match partial queries when querying albums by an artist"""
results = run_filter('playalbumby', 'urviv')
nose.assert_equal(results[0]['title'], 'Ultimate Survivor')
|
8ffe293135c6ee80f185b6f6a8d6e9f096adc91c
|
knights/k_tags.py
|
knights/k_tags.py
|
import ast
from .library import Library
register = Library()
@register.tag(name='block')
def block(parser, token):
token = token.strip()
parser.build_method(token, endnodes=['endblock'])
return ast.YieldFrom(
value=ast.Call(
func=ast.Attribute(
value=ast.Name(id='self', ctx=ast.Load()),
attr=token,
ctx=ast.Load()
),
args=[
ast.Name(id='context', ctx=ast.Load()),
],
keywords=[], starargs=None, kwargs=None
)
)
@register.tag(name='if')
def do_if(parser, token):
code = ast.parse(token, mode='eval')
nodelist = list(parser.parse_node(['endif']))
return ast.IfExp(test=code.body, body=nodelist)
@register.tag(name='else')
def do_else(parser, token=None):
return ast.Yield(value=ast.Str(s=''))
|
import ast
from .library import Library
register = Library()
@register.tag(name='block')
def block(parser, token):
token = token.strip()
parser.build_method(token, endnodes=['endblock'])
return ast.Expr(value=ast.YieldFrom(
value=ast.Call(
func=ast.Attribute(
value=ast.Name(id='self', ctx=ast.Load()),
attr=token,
ctx=ast.Load()
),
args=[
ast.Name(id='context', ctx=ast.Load()),
],
keywords=[], starargs=None, kwargs=None
)
))
@register.tag(name='if')
def do_if(parser, token):
code = ast.parse(token, mode='eval')
nodelist = list(parser.parse_node(['endif']))
return ast.If(test=code.body, body=nodelist)
@register.tag(name='else')
def do_else(parser, token=None):
return ast.Expr(value=ast.Yield(value=ast.Str(s='')))
|
Use ast.If not ast.IfExp for if tag
|
Use ast.If not ast.IfExp for if tag
|
Python
|
mit
|
funkybob/knights-templater,funkybob/knights-templater
|
import ast
from .library import Library
register = Library()
@register.tag(name='block')
def block(parser, token):
token = token.strip()
parser.build_method(token, endnodes=['endblock'])
return ast.YieldFrom(
value=ast.Call(
func=ast.Attribute(
value=ast.Name(id='self', ctx=ast.Load()),
attr=token,
ctx=ast.Load()
),
args=[
ast.Name(id='context', ctx=ast.Load()),
],
keywords=[], starargs=None, kwargs=None
)
)
@register.tag(name='if')
def do_if(parser, token):
code = ast.parse(token, mode='eval')
nodelist = list(parser.parse_node(['endif']))
return ast.IfExp(test=code.body, body=nodelist)
@register.tag(name='else')
def do_else(parser, token=None):
return ast.Yield(value=ast.Str(s=''))
Use ast.If not ast.IfExp for if tag
|
import ast
from .library import Library
register = Library()
@register.tag(name='block')
def block(parser, token):
token = token.strip()
parser.build_method(token, endnodes=['endblock'])
return ast.Expr(value=ast.YieldFrom(
value=ast.Call(
func=ast.Attribute(
value=ast.Name(id='self', ctx=ast.Load()),
attr=token,
ctx=ast.Load()
),
args=[
ast.Name(id='context', ctx=ast.Load()),
],
keywords=[], starargs=None, kwargs=None
)
))
@register.tag(name='if')
def do_if(parser, token):
code = ast.parse(token, mode='eval')
nodelist = list(parser.parse_node(['endif']))
return ast.If(test=code.body, body=nodelist)
@register.tag(name='else')
def do_else(parser, token=None):
return ast.Expr(value=ast.Yield(value=ast.Str(s='')))
|
<commit_before>
import ast
from .library import Library
register = Library()
@register.tag(name='block')
def block(parser, token):
token = token.strip()
parser.build_method(token, endnodes=['endblock'])
return ast.YieldFrom(
value=ast.Call(
func=ast.Attribute(
value=ast.Name(id='self', ctx=ast.Load()),
attr=token,
ctx=ast.Load()
),
args=[
ast.Name(id='context', ctx=ast.Load()),
],
keywords=[], starargs=None, kwargs=None
)
)
@register.tag(name='if')
def do_if(parser, token):
code = ast.parse(token, mode='eval')
nodelist = list(parser.parse_node(['endif']))
return ast.IfExp(test=code.body, body=nodelist)
@register.tag(name='else')
def do_else(parser, token=None):
return ast.Yield(value=ast.Str(s=''))
<commit_msg>Use ast.If not ast.IfExp for if tag<commit_after>
|
import ast
from .library import Library
register = Library()
@register.tag(name='block')
def block(parser, token):
token = token.strip()
parser.build_method(token, endnodes=['endblock'])
return ast.Expr(value=ast.YieldFrom(
value=ast.Call(
func=ast.Attribute(
value=ast.Name(id='self', ctx=ast.Load()),
attr=token,
ctx=ast.Load()
),
args=[
ast.Name(id='context', ctx=ast.Load()),
],
keywords=[], starargs=None, kwargs=None
)
))
@register.tag(name='if')
def do_if(parser, token):
code = ast.parse(token, mode='eval')
nodelist = list(parser.parse_node(['endif']))
return ast.If(test=code.body, body=nodelist)
@register.tag(name='else')
def do_else(parser, token=None):
return ast.Expr(value=ast.Yield(value=ast.Str(s='')))
|
import ast
from .library import Library
register = Library()
@register.tag(name='block')
def block(parser, token):
token = token.strip()
parser.build_method(token, endnodes=['endblock'])
return ast.YieldFrom(
value=ast.Call(
func=ast.Attribute(
value=ast.Name(id='self', ctx=ast.Load()),
attr=token,
ctx=ast.Load()
),
args=[
ast.Name(id='context', ctx=ast.Load()),
],
keywords=[], starargs=None, kwargs=None
)
)
@register.tag(name='if')
def do_if(parser, token):
code = ast.parse(token, mode='eval')
nodelist = list(parser.parse_node(['endif']))
return ast.IfExp(test=code.body, body=nodelist)
@register.tag(name='else')
def do_else(parser, token=None):
return ast.Yield(value=ast.Str(s=''))
Use ast.If not ast.IfExp for if tag
import ast
from .library import Library
register = Library()
@register.tag(name='block')
def block(parser, token):
token = token.strip()
parser.build_method(token, endnodes=['endblock'])
return ast.Expr(value=ast.YieldFrom(
value=ast.Call(
func=ast.Attribute(
value=ast.Name(id='self', ctx=ast.Load()),
attr=token,
ctx=ast.Load()
),
args=[
ast.Name(id='context', ctx=ast.Load()),
],
keywords=[], starargs=None, kwargs=None
)
))
@register.tag(name='if')
def do_if(parser, token):
code = ast.parse(token, mode='eval')
nodelist = list(parser.parse_node(['endif']))
return ast.If(test=code.body, body=nodelist)
@register.tag(name='else')
def do_else(parser, token=None):
return ast.Expr(value=ast.Yield(value=ast.Str(s='')))
|
<commit_before>
import ast
from .library import Library
register = Library()
@register.tag(name='block')
def block(parser, token):
token = token.strip()
parser.build_method(token, endnodes=['endblock'])
return ast.YieldFrom(
value=ast.Call(
func=ast.Attribute(
value=ast.Name(id='self', ctx=ast.Load()),
attr=token,
ctx=ast.Load()
),
args=[
ast.Name(id='context', ctx=ast.Load()),
],
keywords=[], starargs=None, kwargs=None
)
)
@register.tag(name='if')
def do_if(parser, token):
code = ast.parse(token, mode='eval')
nodelist = list(parser.parse_node(['endif']))
return ast.IfExp(test=code.body, body=nodelist)
@register.tag(name='else')
def do_else(parser, token=None):
return ast.Yield(value=ast.Str(s=''))
<commit_msg>Use ast.If not ast.IfExp for if tag<commit_after>
import ast
from .library import Library
register = Library()
@register.tag(name='block')
def block(parser, token):
token = token.strip()
parser.build_method(token, endnodes=['endblock'])
return ast.Expr(value=ast.YieldFrom(
value=ast.Call(
func=ast.Attribute(
value=ast.Name(id='self', ctx=ast.Load()),
attr=token,
ctx=ast.Load()
),
args=[
ast.Name(id='context', ctx=ast.Load()),
],
keywords=[], starargs=None, kwargs=None
)
))
@register.tag(name='if')
def do_if(parser, token):
code = ast.parse(token, mode='eval')
nodelist = list(parser.parse_node(['endif']))
return ast.If(test=code.body, body=nodelist)
@register.tag(name='else')
def do_else(parser, token=None):
return ast.Expr(value=ast.Yield(value=ast.Str(s='')))
|
22112e164005fcbf2c79a5f307d1e587d9146c95
|
store/admin.py
|
store/admin.py
|
from django.contrib import admin
from .models import Image, Product, Review, Specification
class ImageInline(admin.StackedInline):
model = Image
class SpecificationInline(admin.StackedInline):
model = Specification
class ProductAdmin(admin.ModelAdmin):
list_display = (
'name',
'price',
)
inlines = [ImageInline, SpecificationInline, ]
search_fields = ['name', 'description',]
class ReviewAdmin(admin.ModelAdmin):
fields = ['product', 'name', 'title', 'rating', 'text']
admin.site.register(Image)
admin.site.register(Product, ProductAdmin)
admin.site.register(Review, ReviewAdmin)
admin.site.register(Specification)
|
from django.contrib import admin
from .models import Image, Product, Review, Specification
class ImageInline(admin.StackedInline):
model = Image
class SpecificationInline(admin.StackedInline):
model = Specification
class ProductAdmin(admin.ModelAdmin):
list_display = (
'name',
'price',
'featured',
)
inlines = [ImageInline, SpecificationInline, ]
search_fields = ['name', 'description',]
class ReviewAdmin(admin.ModelAdmin):
fields = ['product', 'name', 'title', 'rating', 'text']
admin.site.register(Image)
admin.site.register(Product, ProductAdmin)
admin.site.register(Review, ReviewAdmin)
admin.site.register(Specification)
|
Add featured field to ProductAdmin
|
Add featured field to ProductAdmin
|
Python
|
bsd-3-clause
|
kevgathuku/compshop,andela-kndungu/compshop,andela-kndungu/compshop,andela-kndungu/compshop,kevgathuku/compshop,andela-kndungu/compshop,kevgathuku/compshop,kevgathuku/compshop
|
from django.contrib import admin
from .models import Image, Product, Review, Specification
class ImageInline(admin.StackedInline):
model = Image
class SpecificationInline(admin.StackedInline):
model = Specification
class ProductAdmin(admin.ModelAdmin):
list_display = (
'name',
'price',
)
inlines = [ImageInline, SpecificationInline, ]
search_fields = ['name', 'description',]
class ReviewAdmin(admin.ModelAdmin):
fields = ['product', 'name', 'title', 'rating', 'text']
admin.site.register(Image)
admin.site.register(Product, ProductAdmin)
admin.site.register(Review, ReviewAdmin)
admin.site.register(Specification)
Add featured field to ProductAdmin
|
from django.contrib import admin
from .models import Image, Product, Review, Specification
class ImageInline(admin.StackedInline):
model = Image
class SpecificationInline(admin.StackedInline):
model = Specification
class ProductAdmin(admin.ModelAdmin):
list_display = (
'name',
'price',
'featured',
)
inlines = [ImageInline, SpecificationInline, ]
search_fields = ['name', 'description',]
class ReviewAdmin(admin.ModelAdmin):
fields = ['product', 'name', 'title', 'rating', 'text']
admin.site.register(Image)
admin.site.register(Product, ProductAdmin)
admin.site.register(Review, ReviewAdmin)
admin.site.register(Specification)
|
<commit_before>from django.contrib import admin
from .models import Image, Product, Review, Specification
class ImageInline(admin.StackedInline):
model = Image
class SpecificationInline(admin.StackedInline):
model = Specification
class ProductAdmin(admin.ModelAdmin):
list_display = (
'name',
'price',
)
inlines = [ImageInline, SpecificationInline, ]
search_fields = ['name', 'description',]
class ReviewAdmin(admin.ModelAdmin):
fields = ['product', 'name', 'title', 'rating', 'text']
admin.site.register(Image)
admin.site.register(Product, ProductAdmin)
admin.site.register(Review, ReviewAdmin)
admin.site.register(Specification)
<commit_msg>Add featured field to ProductAdmin<commit_after>
|
from django.contrib import admin
from .models import Image, Product, Review, Specification
class ImageInline(admin.StackedInline):
model = Image
class SpecificationInline(admin.StackedInline):
model = Specification
class ProductAdmin(admin.ModelAdmin):
list_display = (
'name',
'price',
'featured',
)
inlines = [ImageInline, SpecificationInline, ]
search_fields = ['name', 'description',]
class ReviewAdmin(admin.ModelAdmin):
fields = ['product', 'name', 'title', 'rating', 'text']
admin.site.register(Image)
admin.site.register(Product, ProductAdmin)
admin.site.register(Review, ReviewAdmin)
admin.site.register(Specification)
|
from django.contrib import admin
from .models import Image, Product, Review, Specification
class ImageInline(admin.StackedInline):
model = Image
class SpecificationInline(admin.StackedInline):
model = Specification
class ProductAdmin(admin.ModelAdmin):
list_display = (
'name',
'price',
)
inlines = [ImageInline, SpecificationInline, ]
search_fields = ['name', 'description',]
class ReviewAdmin(admin.ModelAdmin):
fields = ['product', 'name', 'title', 'rating', 'text']
admin.site.register(Image)
admin.site.register(Product, ProductAdmin)
admin.site.register(Review, ReviewAdmin)
admin.site.register(Specification)
Add featured field to ProductAdminfrom django.contrib import admin
from .models import Image, Product, Review, Specification
class ImageInline(admin.StackedInline):
model = Image
class SpecificationInline(admin.StackedInline):
model = Specification
class ProductAdmin(admin.ModelAdmin):
list_display = (
'name',
'price',
'featured',
)
inlines = [ImageInline, SpecificationInline, ]
search_fields = ['name', 'description',]
class ReviewAdmin(admin.ModelAdmin):
fields = ['product', 'name', 'title', 'rating', 'text']
admin.site.register(Image)
admin.site.register(Product, ProductAdmin)
admin.site.register(Review, ReviewAdmin)
admin.site.register(Specification)
|
<commit_before>from django.contrib import admin
from .models import Image, Product, Review, Specification
class ImageInline(admin.StackedInline):
model = Image
class SpecificationInline(admin.StackedInline):
model = Specification
class ProductAdmin(admin.ModelAdmin):
list_display = (
'name',
'price',
)
inlines = [ImageInline, SpecificationInline, ]
search_fields = ['name', 'description',]
class ReviewAdmin(admin.ModelAdmin):
fields = ['product', 'name', 'title', 'rating', 'text']
admin.site.register(Image)
admin.site.register(Product, ProductAdmin)
admin.site.register(Review, ReviewAdmin)
admin.site.register(Specification)
<commit_msg>Add featured field to ProductAdmin<commit_after>from django.contrib import admin
from .models import Image, Product, Review, Specification
class ImageInline(admin.StackedInline):
model = Image
class SpecificationInline(admin.StackedInline):
model = Specification
class ProductAdmin(admin.ModelAdmin):
list_display = (
'name',
'price',
'featured',
)
inlines = [ImageInline, SpecificationInline, ]
search_fields = ['name', 'description',]
class ReviewAdmin(admin.ModelAdmin):
fields = ['product', 'name', 'title', 'rating', 'text']
admin.site.register(Image)
admin.site.register(Product, ProductAdmin)
admin.site.register(Review, ReviewAdmin)
admin.site.register(Specification)
|
f52f1cb890a0a0105915e9406fb0c39332f6c101
|
src/settings.py
|
src/settings.py
|
#!/usr/bin/python
#
# Copyright 2011 Friday Film Club. All Rights Reserved.
"""Main views of the Friday Film Club app."""
__author__ = 'adamjmcgrath@gmail.com (Adam McGrath)'
import os
DEBUG = os.environ.get('SERVER_SOFTWARE', '').startswith('Dev')
FMJ_EMAIL_SHORT = 'fmj@fridayfilmclub.com'
FMJ_EMAIL = 'Film Master Jack <%s>' % FMJ_EMAIL_SHORT
ENVIRONMENT = ({
'www.fridayfilmclub.com': 'prod',
'dev.ffcapp.appspot.com': 'staging',
}).get(os.environ.get('HTTP_HOST'), 'local')
|
#!/usr/bin/python
#
# Copyright 2011 Friday Film Club. All Rights Reserved.
"""Main views of the Friday Film Club app."""
__author__ = 'adamjmcgrath@gmail.com (Adam McGrath)'
import os
DEBUG = os.environ.get('SERVER_SOFTWARE', '').startswith('Dev')
FMJ_EMAIL_SHORT = 'fmj@fridayfilmclub.com'
FMJ_EMAIL = 'Film Master Jack <%s>' % FMJ_EMAIL_SHORT
ENVIRONMENT = ({
'www.fridayfilmclub.com': 'prod',
'ffcapp.appspot.com': 'prod',
'dev.ffcapp.appspot.com': 'staging',
}).get(os.environ.get('HTTP_HOST'), 'local')
|
Fix auth issue, catch up devel --skip-ci
|
Fix auth issue, catch up devel --skip-ci
|
Python
|
mpl-2.0
|
adamjmcgrath/fridayfilmclub,adamjmcgrath/fridayfilmclub,adamjmcgrath/fridayfilmclub,adamjmcgrath/fridayfilmclub
|
#!/usr/bin/python
#
# Copyright 2011 Friday Film Club. All Rights Reserved.
"""Main views of the Friday Film Club app."""
__author__ = 'adamjmcgrath@gmail.com (Adam McGrath)'
import os
DEBUG = os.environ.get('SERVER_SOFTWARE', '').startswith('Dev')
FMJ_EMAIL_SHORT = 'fmj@fridayfilmclub.com'
FMJ_EMAIL = 'Film Master Jack <%s>' % FMJ_EMAIL_SHORT
ENVIRONMENT = ({
'www.fridayfilmclub.com': 'prod',
'dev.ffcapp.appspot.com': 'staging',
}).get(os.environ.get('HTTP_HOST'), 'local')
Fix auth issue, catch up devel --skip-ci
|
#!/usr/bin/python
#
# Copyright 2011 Friday Film Club. All Rights Reserved.
"""Main views of the Friday Film Club app."""
__author__ = 'adamjmcgrath@gmail.com (Adam McGrath)'
import os
DEBUG = os.environ.get('SERVER_SOFTWARE', '').startswith('Dev')
FMJ_EMAIL_SHORT = 'fmj@fridayfilmclub.com'
FMJ_EMAIL = 'Film Master Jack <%s>' % FMJ_EMAIL_SHORT
ENVIRONMENT = ({
'www.fridayfilmclub.com': 'prod',
'ffcapp.appspot.com': 'prod',
'dev.ffcapp.appspot.com': 'staging',
}).get(os.environ.get('HTTP_HOST'), 'local')
|
<commit_before>#!/usr/bin/python
#
# Copyright 2011 Friday Film Club. All Rights Reserved.
"""Main views of the Friday Film Club app."""
__author__ = 'adamjmcgrath@gmail.com (Adam McGrath)'
import os
DEBUG = os.environ.get('SERVER_SOFTWARE', '').startswith('Dev')
FMJ_EMAIL_SHORT = 'fmj@fridayfilmclub.com'
FMJ_EMAIL = 'Film Master Jack <%s>' % FMJ_EMAIL_SHORT
ENVIRONMENT = ({
'www.fridayfilmclub.com': 'prod',
'dev.ffcapp.appspot.com': 'staging',
}).get(os.environ.get('HTTP_HOST'), 'local')
<commit_msg>Fix auth issue, catch up devel --skip-ci<commit_after>
|
#!/usr/bin/python
#
# Copyright 2011 Friday Film Club. All Rights Reserved.
"""Main views of the Friday Film Club app."""
__author__ = 'adamjmcgrath@gmail.com (Adam McGrath)'
import os
DEBUG = os.environ.get('SERVER_SOFTWARE', '').startswith('Dev')
FMJ_EMAIL_SHORT = 'fmj@fridayfilmclub.com'
FMJ_EMAIL = 'Film Master Jack <%s>' % FMJ_EMAIL_SHORT
ENVIRONMENT = ({
'www.fridayfilmclub.com': 'prod',
'ffcapp.appspot.com': 'prod',
'dev.ffcapp.appspot.com': 'staging',
}).get(os.environ.get('HTTP_HOST'), 'local')
|
#!/usr/bin/python
#
# Copyright 2011 Friday Film Club. All Rights Reserved.
"""Main views of the Friday Film Club app."""
__author__ = 'adamjmcgrath@gmail.com (Adam McGrath)'
import os
DEBUG = os.environ.get('SERVER_SOFTWARE', '').startswith('Dev')
FMJ_EMAIL_SHORT = 'fmj@fridayfilmclub.com'
FMJ_EMAIL = 'Film Master Jack <%s>' % FMJ_EMAIL_SHORT
ENVIRONMENT = ({
'www.fridayfilmclub.com': 'prod',
'dev.ffcapp.appspot.com': 'staging',
}).get(os.environ.get('HTTP_HOST'), 'local')
Fix auth issue, catch up devel --skip-ci#!/usr/bin/python
#
# Copyright 2011 Friday Film Club. All Rights Reserved.
"""Main views of the Friday Film Club app."""
__author__ = 'adamjmcgrath@gmail.com (Adam McGrath)'
import os
DEBUG = os.environ.get('SERVER_SOFTWARE', '').startswith('Dev')
FMJ_EMAIL_SHORT = 'fmj@fridayfilmclub.com'
FMJ_EMAIL = 'Film Master Jack <%s>' % FMJ_EMAIL_SHORT
ENVIRONMENT = ({
'www.fridayfilmclub.com': 'prod',
'ffcapp.appspot.com': 'prod',
'dev.ffcapp.appspot.com': 'staging',
}).get(os.environ.get('HTTP_HOST'), 'local')
|
<commit_before>#!/usr/bin/python
#
# Copyright 2011 Friday Film Club. All Rights Reserved.
"""Main views of the Friday Film Club app."""
__author__ = 'adamjmcgrath@gmail.com (Adam McGrath)'
import os
DEBUG = os.environ.get('SERVER_SOFTWARE', '').startswith('Dev')
FMJ_EMAIL_SHORT = 'fmj@fridayfilmclub.com'
FMJ_EMAIL = 'Film Master Jack <%s>' % FMJ_EMAIL_SHORT
ENVIRONMENT = ({
'www.fridayfilmclub.com': 'prod',
'dev.ffcapp.appspot.com': 'staging',
}).get(os.environ.get('HTTP_HOST'), 'local')
<commit_msg>Fix auth issue, catch up devel --skip-ci<commit_after>#!/usr/bin/python
#
# Copyright 2011 Friday Film Club. All Rights Reserved.
"""Main views of the Friday Film Club app."""
__author__ = 'adamjmcgrath@gmail.com (Adam McGrath)'
import os
DEBUG = os.environ.get('SERVER_SOFTWARE', '').startswith('Dev')
FMJ_EMAIL_SHORT = 'fmj@fridayfilmclub.com'
FMJ_EMAIL = 'Film Master Jack <%s>' % FMJ_EMAIL_SHORT
ENVIRONMENT = ({
'www.fridayfilmclub.com': 'prod',
'ffcapp.appspot.com': 'prod',
'dev.ffcapp.appspot.com': 'staging',
}).get(os.environ.get('HTTP_HOST'), 'local')
|
cf19d5a52237e6098dedc3c0bbfdaa3aedd180e0
|
loginza/models.py
|
loginza/models.py
|
from django.contrib.auth.models import User
from django.db import models
from django.utils import simplejson as json
from .signals import post_associate
class IdentityManager(models.Manager):
def from_loginza_data(self, loginza_data):
data = json.dumps(loginza_data)
identity, created = self.get_or_create(
identity = loginza_data['identity'],
provider = loginza_data['provider'],
defaults = {'data': data}
)
if not created:
identity.data = data
identity.save()
return identity
class Identity(models.Model):
identity = models.CharField(max_length=255)
provider = models.CharField(max_length=255)
user = models.ForeignKey(User, null=True)
data = models.TextField()
objects = IdentityManager()
class Meta:
unique_together = (('identity', 'provider'),)
def associate(self, user):
self.user = user
self.save()
post_associate.send(sender=type(self), instance=self)
def create_user(self, username, email, password=None):
existing_users = 0
while True:
existing_users += 1
try:
User.objects.get(username=username)
except User.DoesNotExist:
break
username = '%s_%d' % (username, existing_users)
user = User.objects.create_user(username, email, password)
self.associate(user)
return user
|
from django.contrib.auth.models import User
from django.db import models
from django.utils import simplejson as json
from .signals import post_associate
class IdentityManager(models.Manager):
def from_loginza_data(self, loginza_data):
data = json.dumps(loginza_data)
identity, created = self.get_or_create(
identity = loginza_data['identity'],
provider = loginza_data['provider'],
defaults = {'data': data}
)
if not created:
identity.data = data
identity.save()
return identity
class Identity(models.Model):
identity = models.CharField(max_length=255)
provider = models.CharField(max_length=255)
user = models.ForeignKey(User, null=True)
data = models.TextField()
objects = IdentityManager()
class Meta:
unique_together = (('identity', 'provider'),)
def associate(self, user):
self.user = user
self.save()
post_associate.send(sender=type(self), instance=self)
def create_user(self, username, email, password=None):
existing_users = 0
new_username = None
while True:
existing_users += 1
qs = User.objects.all()
qs = qs.filter(username=new_username or username)
if not qs.exists():
break
new_username = '%s_%d' % (username, existing_users)
user = User.objects.create_user(new_username or username, email, password)
self.associate(user)
return user
|
Fix user creation with unique username
|
Fix user creation with unique username
|
Python
|
isc
|
xobb1t/django-loginza-auth
|
from django.contrib.auth.models import User
from django.db import models
from django.utils import simplejson as json
from .signals import post_associate
class IdentityManager(models.Manager):
def from_loginza_data(self, loginza_data):
data = json.dumps(loginza_data)
identity, created = self.get_or_create(
identity = loginza_data['identity'],
provider = loginza_data['provider'],
defaults = {'data': data}
)
if not created:
identity.data = data
identity.save()
return identity
class Identity(models.Model):
identity = models.CharField(max_length=255)
provider = models.CharField(max_length=255)
user = models.ForeignKey(User, null=True)
data = models.TextField()
objects = IdentityManager()
class Meta:
unique_together = (('identity', 'provider'),)
def associate(self, user):
self.user = user
self.save()
post_associate.send(sender=type(self), instance=self)
def create_user(self, username, email, password=None):
existing_users = 0
while True:
existing_users += 1
try:
User.objects.get(username=username)
except User.DoesNotExist:
break
username = '%s_%d' % (username, existing_users)
user = User.objects.create_user(username, email, password)
self.associate(user)
return user
Fix user creation with unique username
|
from django.contrib.auth.models import User
from django.db import models
from django.utils import simplejson as json
from .signals import post_associate
class IdentityManager(models.Manager):
def from_loginza_data(self, loginza_data):
data = json.dumps(loginza_data)
identity, created = self.get_or_create(
identity = loginza_data['identity'],
provider = loginza_data['provider'],
defaults = {'data': data}
)
if not created:
identity.data = data
identity.save()
return identity
class Identity(models.Model):
identity = models.CharField(max_length=255)
provider = models.CharField(max_length=255)
user = models.ForeignKey(User, null=True)
data = models.TextField()
objects = IdentityManager()
class Meta:
unique_together = (('identity', 'provider'),)
def associate(self, user):
self.user = user
self.save()
post_associate.send(sender=type(self), instance=self)
def create_user(self, username, email, password=None):
existing_users = 0
new_username = None
while True:
existing_users += 1
qs = User.objects.all()
qs = qs.filter(username=new_username or username)
if not qs.exists():
break
new_username = '%s_%d' % (username, existing_users)
user = User.objects.create_user(new_username or username, email, password)
self.associate(user)
return user
|
<commit_before>from django.contrib.auth.models import User
from django.db import models
from django.utils import simplejson as json
from .signals import post_associate
class IdentityManager(models.Manager):
def from_loginza_data(self, loginza_data):
data = json.dumps(loginza_data)
identity, created = self.get_or_create(
identity = loginza_data['identity'],
provider = loginza_data['provider'],
defaults = {'data': data}
)
if not created:
identity.data = data
identity.save()
return identity
class Identity(models.Model):
identity = models.CharField(max_length=255)
provider = models.CharField(max_length=255)
user = models.ForeignKey(User, null=True)
data = models.TextField()
objects = IdentityManager()
class Meta:
unique_together = (('identity', 'provider'),)
def associate(self, user):
self.user = user
self.save()
post_associate.send(sender=type(self), instance=self)
def create_user(self, username, email, password=None):
existing_users = 0
while True:
existing_users += 1
try:
User.objects.get(username=username)
except User.DoesNotExist:
break
username = '%s_%d' % (username, existing_users)
user = User.objects.create_user(username, email, password)
self.associate(user)
return user
<commit_msg>Fix user creation with unique username<commit_after>
|
from django.contrib.auth.models import User
from django.db import models
from django.utils import simplejson as json
from .signals import post_associate
class IdentityManager(models.Manager):
def from_loginza_data(self, loginza_data):
data = json.dumps(loginza_data)
identity, created = self.get_or_create(
identity = loginza_data['identity'],
provider = loginza_data['provider'],
defaults = {'data': data}
)
if not created:
identity.data = data
identity.save()
return identity
class Identity(models.Model):
identity = models.CharField(max_length=255)
provider = models.CharField(max_length=255)
user = models.ForeignKey(User, null=True)
data = models.TextField()
objects = IdentityManager()
class Meta:
unique_together = (('identity', 'provider'),)
def associate(self, user):
self.user = user
self.save()
post_associate.send(sender=type(self), instance=self)
def create_user(self, username, email, password=None):
existing_users = 0
new_username = None
while True:
existing_users += 1
qs = User.objects.all()
qs = qs.filter(username=new_username or username)
if not qs.exists():
break
new_username = '%s_%d' % (username, existing_users)
user = User.objects.create_user(new_username or username, email, password)
self.associate(user)
return user
|
from django.contrib.auth.models import User
from django.db import models
from django.utils import simplejson as json
from .signals import post_associate
class IdentityManager(models.Manager):
def from_loginza_data(self, loginza_data):
data = json.dumps(loginza_data)
identity, created = self.get_or_create(
identity = loginza_data['identity'],
provider = loginza_data['provider'],
defaults = {'data': data}
)
if not created:
identity.data = data
identity.save()
return identity
class Identity(models.Model):
identity = models.CharField(max_length=255)
provider = models.CharField(max_length=255)
user = models.ForeignKey(User, null=True)
data = models.TextField()
objects = IdentityManager()
class Meta:
unique_together = (('identity', 'provider'),)
def associate(self, user):
self.user = user
self.save()
post_associate.send(sender=type(self), instance=self)
def create_user(self, username, email, password=None):
existing_users = 0
while True:
existing_users += 1
try:
User.objects.get(username=username)
except User.DoesNotExist:
break
username = '%s_%d' % (username, existing_users)
user = User.objects.create_user(username, email, password)
self.associate(user)
return user
Fix user creation with unique usernamefrom django.contrib.auth.models import User
from django.db import models
from django.utils import simplejson as json
from .signals import post_associate
class IdentityManager(models.Manager):
def from_loginza_data(self, loginza_data):
data = json.dumps(loginza_data)
identity, created = self.get_or_create(
identity = loginza_data['identity'],
provider = loginza_data['provider'],
defaults = {'data': data}
)
if not created:
identity.data = data
identity.save()
return identity
class Identity(models.Model):
identity = models.CharField(max_length=255)
provider = models.CharField(max_length=255)
user = models.ForeignKey(User, null=True)
data = models.TextField()
objects = IdentityManager()
class Meta:
unique_together = (('identity', 'provider'),)
def associate(self, user):
self.user = user
self.save()
post_associate.send(sender=type(self), instance=self)
def create_user(self, username, email, password=None):
existing_users = 0
new_username = None
while True:
existing_users += 1
qs = User.objects.all()
qs = qs.filter(username=new_username or username)
if not qs.exists():
break
new_username = '%s_%d' % (username, existing_users)
user = User.objects.create_user(new_username or username, email, password)
self.associate(user)
return user
|
<commit_before>from django.contrib.auth.models import User
from django.db import models
from django.utils import simplejson as json
from .signals import post_associate
class IdentityManager(models.Manager):
def from_loginza_data(self, loginza_data):
data = json.dumps(loginza_data)
identity, created = self.get_or_create(
identity = loginza_data['identity'],
provider = loginza_data['provider'],
defaults = {'data': data}
)
if not created:
identity.data = data
identity.save()
return identity
class Identity(models.Model):
identity = models.CharField(max_length=255)
provider = models.CharField(max_length=255)
user = models.ForeignKey(User, null=True)
data = models.TextField()
objects = IdentityManager()
class Meta:
unique_together = (('identity', 'provider'),)
def associate(self, user):
self.user = user
self.save()
post_associate.send(sender=type(self), instance=self)
def create_user(self, username, email, password=None):
existing_users = 0
while True:
existing_users += 1
try:
User.objects.get(username=username)
except User.DoesNotExist:
break
username = '%s_%d' % (username, existing_users)
user = User.objects.create_user(username, email, password)
self.associate(user)
return user
<commit_msg>Fix user creation with unique username<commit_after>from django.contrib.auth.models import User
from django.db import models
from django.utils import simplejson as json
from .signals import post_associate
class IdentityManager(models.Manager):
def from_loginza_data(self, loginza_data):
data = json.dumps(loginza_data)
identity, created = self.get_or_create(
identity = loginza_data['identity'],
provider = loginza_data['provider'],
defaults = {'data': data}
)
if not created:
identity.data = data
identity.save()
return identity
class Identity(models.Model):
identity = models.CharField(max_length=255)
provider = models.CharField(max_length=255)
user = models.ForeignKey(User, null=True)
data = models.TextField()
objects = IdentityManager()
class Meta:
unique_together = (('identity', 'provider'),)
def associate(self, user):
self.user = user
self.save()
post_associate.send(sender=type(self), instance=self)
def create_user(self, username, email, password=None):
existing_users = 0
new_username = None
while True:
existing_users += 1
qs = User.objects.all()
qs = qs.filter(username=new_username or username)
if not qs.exists():
break
new_username = '%s_%d' % (username, existing_users)
user = User.objects.create_user(new_username or username, email, password)
self.associate(user)
return user
|
7d2277685a125e4ee2b57ed7782bcae62f64464b
|
matrix/example.py
|
matrix/example.py
|
class Matrix(object):
def __init__(self, s):
self.rows = [[int(n) for n in row.split()]
for row in s.split('\n')]
@property
def columns(self):
return map(list, zip(*self.rows))
|
class Matrix(object):
def __init__(self, s):
self.rows = [[int(n) for n in row.split()]
for row in s.split('\n')]
@property
def columns(self):
return [list(tup) for tup in zip(*self.rows)]
|
Make matrix exercise compatible with Python3
|
Make matrix exercise compatible with Python3
|
Python
|
mit
|
exercism/python,pombredanne/xpython,outkaj/xpython,behrtam/xpython,smalley/python,orozcoadrian/xpython,orozcoadrian/xpython,exercism/xpython,ZacharyRSmith/xpython,de2Zotjes/xpython,oalbe/xpython,jmluy/xpython,N-Parsons/exercism-python,pheanex/xpython,behrtam/xpython,exercism/python,pombredanne/xpython,wobh/xpython,jmluy/xpython,smalley/python,Peque/xpython,Peque/xpython,rootulp/xpython,exercism/xpython,rootulp/xpython,de2Zotjes/xpython,wobh/xpython,mweb/python,pheanex/xpython,N-Parsons/exercism-python,oalbe/xpython,ZacharyRSmith/xpython,mweb/python,outkaj/xpython
|
class Matrix(object):
def __init__(self, s):
self.rows = [[int(n) for n in row.split()]
for row in s.split('\n')]
@property
def columns(self):
return map(list, zip(*self.rows))
Make matrix exercise compatible with Python3
|
class Matrix(object):
def __init__(self, s):
self.rows = [[int(n) for n in row.split()]
for row in s.split('\n')]
@property
def columns(self):
return [list(tup) for tup in zip(*self.rows)]
|
<commit_before>class Matrix(object):
def __init__(self, s):
self.rows = [[int(n) for n in row.split()]
for row in s.split('\n')]
@property
def columns(self):
return map(list, zip(*self.rows))
<commit_msg>Make matrix exercise compatible with Python3<commit_after>
|
class Matrix(object):
def __init__(self, s):
self.rows = [[int(n) for n in row.split()]
for row in s.split('\n')]
@property
def columns(self):
return [list(tup) for tup in zip(*self.rows)]
|
class Matrix(object):
def __init__(self, s):
self.rows = [[int(n) for n in row.split()]
for row in s.split('\n')]
@property
def columns(self):
return map(list, zip(*self.rows))
Make matrix exercise compatible with Python3class Matrix(object):
def __init__(self, s):
self.rows = [[int(n) for n in row.split()]
for row in s.split('\n')]
@property
def columns(self):
return [list(tup) for tup in zip(*self.rows)]
|
<commit_before>class Matrix(object):
def __init__(self, s):
self.rows = [[int(n) for n in row.split()]
for row in s.split('\n')]
@property
def columns(self):
return map(list, zip(*self.rows))
<commit_msg>Make matrix exercise compatible with Python3<commit_after>class Matrix(object):
def __init__(self, s):
self.rows = [[int(n) for n in row.split()]
for row in s.split('\n')]
@property
def columns(self):
return [list(tup) for tup in zip(*self.rows)]
|
bab2c322a9861e9869e92a3952a0d19f1559b099
|
redis_commands/parse.py
|
redis_commands/parse.py
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
import lxml.etree, lxml.html
import re
url = "http://redis.io"
output = "output.txt"
f = open(output, "w");
tree = lxml.html.parse("download/raw.dat").getroot()
commands = tree.find_class("command")
data = {}
for command in commands:
for row in command.findall('a'):
command_url = "%s%s" % (url, row.get('href'))
for sibling in command.itersiblings():
usage = ""
for command_args in command.findall('span'):
usage = "Usage: %s %s" % (row.text, command_args.text.replace(' ', '').replace('\n', ' ').strip())
summary = "%s<br>%s" % (sibling.text, usage)
data[command_url] = (row.text, summary)
for command_url in data.keys():
command, summary = data[command_url]
summary = unicode(summary).encode("utf-8")
f.write("\t".join([str(command), # title
"", # namespace
command_url, # url
summary, # description
"", # synopsis
"", # details
"", # type
"" # lang
])
)
f.write("\n")
f.close()
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
import lxml.etree, lxml.html
import re
url = "http://redis.io"
output = "output.txt"
f = open(output, "w");
tree = lxml.html.parse("download/raw.dat").getroot()
commands = tree.find_class("command")
data = {}
for command in commands:
for row in command.findall('a'):
command_url = "%s%s" % (url, row.get('href'))
for sibling in command.itersiblings():
usage = ""
for command_args in command.findall('span'):
usage = "%s %s" % (row.text, command_args.text.replace(' ', '').replace('\n', ' ').strip())
summary = "%s" % (sibling.text)
data[command_url] = (row.text, summary, usage)
for command_url in data.keys():
command, summary, usage = data[command_url]
summary = unicode(summary).encode("utf-8")
usage = unicode(usage).encode("utf-8")
f.write("\t".join([str(command), # title
"", # namespace
command_url, # url
summary, # description
usage, # synopsis
"", # details
"", # type
"" # lang
])
)
f.write("\n")
f.close()
|
Use a synopsis for usage examples
|
redis_commands: Use a synopsis for usage examples
|
Python
|
apache-2.0
|
nikhilsingh291/zeroclickinfo-fathead,thinker3197/zeroclickinfo-fathead,p12tic/zeroclickinfo-fathead,p12tic/zeroclickinfo-fathead,thinker3197/zeroclickinfo-fathead,samskeller/zeroclickinfo-fathead,nikhilsingh291/zeroclickinfo-fathead,dankolbrs/zeroclickinfo-fathead,rasikapohankar/zeroclickinfo-fathead,p12tic/zeroclickinfo-fathead,samskeller/zeroclickinfo-fathead,samskeller/zeroclickinfo-fathead,rasikapohankar/zeroclickinfo-fathead,souravbadami/zeroclickinfo-fathead,thinker3197/zeroclickinfo-fathead,rasikapohankar/zeroclickinfo-fathead,dankolbrs/zeroclickinfo-fathead,souravbadami/zeroclickinfo-fathead,p12tic/zeroclickinfo-fathead,souravbadami/zeroclickinfo-fathead,rasikapohankar/zeroclickinfo-fathead,p12tic/zeroclickinfo-fathead,samskeller/zeroclickinfo-fathead,rasikapohankar/zeroclickinfo-fathead,dankolbrs/zeroclickinfo-fathead,rasikapohankar/zeroclickinfo-fathead,dankolbrs/zeroclickinfo-fathead,samskeller/zeroclickinfo-fathead,nikhilsingh291/zeroclickinfo-fathead,p12tic/zeroclickinfo-fathead,souravbadami/zeroclickinfo-fathead,thinker3197/zeroclickinfo-fathead,nikhilsingh291/zeroclickinfo-fathead,thinker3197/zeroclickinfo-fathead,souravbadami/zeroclickinfo-fathead,samskeller/zeroclickinfo-fathead,nikhilsingh291/zeroclickinfo-fathead,thinker3197/zeroclickinfo-fathead,nikhilsingh291/zeroclickinfo-fathead,dankolbrs/zeroclickinfo-fathead,dankolbrs/zeroclickinfo-fathead,souravbadami/zeroclickinfo-fathead
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
import lxml.etree, lxml.html
import re
url = "http://redis.io"
output = "output.txt"
f = open(output, "w");
tree = lxml.html.parse("download/raw.dat").getroot()
commands = tree.find_class("command")
data = {}
for command in commands:
for row in command.findall('a'):
command_url = "%s%s" % (url, row.get('href'))
for sibling in command.itersiblings():
usage = ""
for command_args in command.findall('span'):
usage = "Usage: %s %s" % (row.text, command_args.text.replace(' ', '').replace('\n', ' ').strip())
summary = "%s<br>%s" % (sibling.text, usage)
data[command_url] = (row.text, summary)
for command_url in data.keys():
command, summary = data[command_url]
summary = unicode(summary).encode("utf-8")
f.write("\t".join([str(command), # title
"", # namespace
command_url, # url
summary, # description
"", # synopsis
"", # details
"", # type
"" # lang
])
)
f.write("\n")
f.close()
redis_commands: Use a synopsis for usage examples
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
import lxml.etree, lxml.html
import re
url = "http://redis.io"
output = "output.txt"
f = open(output, "w");
tree = lxml.html.parse("download/raw.dat").getroot()
commands = tree.find_class("command")
data = {}
for command in commands:
for row in command.findall('a'):
command_url = "%s%s" % (url, row.get('href'))
for sibling in command.itersiblings():
usage = ""
for command_args in command.findall('span'):
usage = "%s %s" % (row.text, command_args.text.replace(' ', '').replace('\n', ' ').strip())
summary = "%s" % (sibling.text)
data[command_url] = (row.text, summary, usage)
for command_url in data.keys():
command, summary, usage = data[command_url]
summary = unicode(summary).encode("utf-8")
usage = unicode(usage).encode("utf-8")
f.write("\t".join([str(command), # title
"", # namespace
command_url, # url
summary, # description
usage, # synopsis
"", # details
"", # type
"" # lang
])
)
f.write("\n")
f.close()
|
<commit_before>#!/usr/bin/python
# -*- coding: utf-8 -*-
import lxml.etree, lxml.html
import re
url = "http://redis.io"
output = "output.txt"
f = open(output, "w");
tree = lxml.html.parse("download/raw.dat").getroot()
commands = tree.find_class("command")
data = {}
for command in commands:
for row in command.findall('a'):
command_url = "%s%s" % (url, row.get('href'))
for sibling in command.itersiblings():
usage = ""
for command_args in command.findall('span'):
usage = "Usage: %s %s" % (row.text, command_args.text.replace(' ', '').replace('\n', ' ').strip())
summary = "%s<br>%s" % (sibling.text, usage)
data[command_url] = (row.text, summary)
for command_url in data.keys():
command, summary = data[command_url]
summary = unicode(summary).encode("utf-8")
f.write("\t".join([str(command), # title
"", # namespace
command_url, # url
summary, # description
"", # synopsis
"", # details
"", # type
"" # lang
])
)
f.write("\n")
f.close()
<commit_msg>redis_commands: Use a synopsis for usage examples<commit_after>
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
import lxml.etree, lxml.html
import re
url = "http://redis.io"
output = "output.txt"
f = open(output, "w");
tree = lxml.html.parse("download/raw.dat").getroot()
commands = tree.find_class("command")
data = {}
for command in commands:
for row in command.findall('a'):
command_url = "%s%s" % (url, row.get('href'))
for sibling in command.itersiblings():
usage = ""
for command_args in command.findall('span'):
usage = "%s %s" % (row.text, command_args.text.replace(' ', '').replace('\n', ' ').strip())
summary = "%s" % (sibling.text)
data[command_url] = (row.text, summary, usage)
for command_url in data.keys():
command, summary, usage = data[command_url]
summary = unicode(summary).encode("utf-8")
usage = unicode(usage).encode("utf-8")
f.write("\t".join([str(command), # title
"", # namespace
command_url, # url
summary, # description
usage, # synopsis
"", # details
"", # type
"" # lang
])
)
f.write("\n")
f.close()
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
import lxml.etree, lxml.html
import re
url = "http://redis.io"
output = "output.txt"
f = open(output, "w");
tree = lxml.html.parse("download/raw.dat").getroot()
commands = tree.find_class("command")
data = {}
for command in commands:
for row in command.findall('a'):
command_url = "%s%s" % (url, row.get('href'))
for sibling in command.itersiblings():
usage = ""
for command_args in command.findall('span'):
usage = "Usage: %s %s" % (row.text, command_args.text.replace(' ', '').replace('\n', ' ').strip())
summary = "%s<br>%s" % (sibling.text, usage)
data[command_url] = (row.text, summary)
for command_url in data.keys():
command, summary = data[command_url]
summary = unicode(summary).encode("utf-8")
f.write("\t".join([str(command), # title
"", # namespace
command_url, # url
summary, # description
"", # synopsis
"", # details
"", # type
"" # lang
])
)
f.write("\n")
f.close()
redis_commands: Use a synopsis for usage examples#!/usr/bin/python
# -*- coding: utf-8 -*-
import lxml.etree, lxml.html
import re
url = "http://redis.io"
output = "output.txt"
f = open(output, "w");
tree = lxml.html.parse("download/raw.dat").getroot()
commands = tree.find_class("command")
data = {}
for command in commands:
for row in command.findall('a'):
command_url = "%s%s" % (url, row.get('href'))
for sibling in command.itersiblings():
usage = ""
for command_args in command.findall('span'):
usage = "%s %s" % (row.text, command_args.text.replace(' ', '').replace('\n', ' ').strip())
summary = "%s" % (sibling.text)
data[command_url] = (row.text, summary, usage)
for command_url in data.keys():
command, summary, usage = data[command_url]
summary = unicode(summary).encode("utf-8")
usage = unicode(usage).encode("utf-8")
f.write("\t".join([str(command), # title
"", # namespace
command_url, # url
summary, # description
usage, # synopsis
"", # details
"", # type
"" # lang
])
)
f.write("\n")
f.close()
|
<commit_before>#!/usr/bin/python
# -*- coding: utf-8 -*-
import lxml.etree, lxml.html
import re
url = "http://redis.io"
output = "output.txt"
f = open(output, "w");
tree = lxml.html.parse("download/raw.dat").getroot()
commands = tree.find_class("command")
data = {}
for command in commands:
for row in command.findall('a'):
command_url = "%s%s" % (url, row.get('href'))
for sibling in command.itersiblings():
usage = ""
for command_args in command.findall('span'):
usage = "Usage: %s %s" % (row.text, command_args.text.replace(' ', '').replace('\n', ' ').strip())
summary = "%s<br>%s" % (sibling.text, usage)
data[command_url] = (row.text, summary)
for command_url in data.keys():
command, summary = data[command_url]
summary = unicode(summary).encode("utf-8")
f.write("\t".join([str(command), # title
"", # namespace
command_url, # url
summary, # description
"", # synopsis
"", # details
"", # type
"" # lang
])
)
f.write("\n")
f.close()
<commit_msg>redis_commands: Use a synopsis for usage examples<commit_after>#!/usr/bin/python
# -*- coding: utf-8 -*-
import lxml.etree, lxml.html
import re
url = "http://redis.io"
output = "output.txt"
f = open(output, "w");
tree = lxml.html.parse("download/raw.dat").getroot()
commands = tree.find_class("command")
data = {}
for command in commands:
for row in command.findall('a'):
command_url = "%s%s" % (url, row.get('href'))
for sibling in command.itersiblings():
usage = ""
for command_args in command.findall('span'):
usage = "%s %s" % (row.text, command_args.text.replace(' ', '').replace('\n', ' ').strip())
summary = "%s" % (sibling.text)
data[command_url] = (row.text, summary, usage)
for command_url in data.keys():
command, summary, usage = data[command_url]
summary = unicode(summary).encode("utf-8")
usage = unicode(usage).encode("utf-8")
f.write("\t".join([str(command), # title
"", # namespace
command_url, # url
summary, # description
usage, # synopsis
"", # details
"", # type
"" # lang
])
)
f.write("\n")
f.close()
|
db0aa94de30d73217f9091635c92f59b8af98ef7
|
alg_sum_list.py
|
alg_sum_list.py
|
from __future__ import absolute_import
from __future__ import print_function
from __future__ import division
def sum_list_for(num_ls):
"""Sum number list by for loop."""
_sum = 0
for num in num_ls:
_sum += num
return _sum
def sum_list_recur(num_ls):
"""Sum number list by recursion."""
if len(num_ls) == 1:
return num_ls[0]
else:
return num_ls[0] + sum_list_recur(num_ls[1:])
def main():
import time
num_ls = [0, 1, 2, 3, 4, 5]
start_time = time.time()
print('By for loop: {}'.format(sum_list_for(num_ls)))
print('Time: {}'.format(time.time() - start_time))
start_time = time.time()
print('By recursion: {}'.format(sum_list_recur(num_ls)))
print('Time: {}'.format(time.time() - start_time))
if __name__ == '__main__':
main()
|
from __future__ import absolute_import
from __future__ import print_function
from __future__ import division
def sum_list_iter(num_ls):
"""Sum number list by for loop."""
_sum = 0
for num in num_ls:
_sum += num
return _sum
def sum_list_recur(num_ls):
"""Sum number list by recursion."""
if len(num_ls) == 1:
return num_ls[0]
else:
return num_ls[0] + sum_list_recur(num_ls[1:])
def main():
import time
num_ls = range(100)
start_time = time.time()
print('By iteration: {}'.format(sum_list_iter(num_ls)))
print('Time: {}'.format(time.time() - start_time))
start_time = time.time()
print('By recursion: {}'.format(sum_list_recur(num_ls)))
print('Time: {}'.format(time.time() - start_time))
if __name__ == '__main__':
main()
|
Rename to sum_list_iter() and revise main()'s num_ls
|
Rename to sum_list_iter() and revise main()'s num_ls
|
Python
|
bsd-2-clause
|
bowen0701/algorithms_data_structures
|
from __future__ import absolute_import
from __future__ import print_function
from __future__ import division
def sum_list_for(num_ls):
"""Sum number list by for loop."""
_sum = 0
for num in num_ls:
_sum += num
return _sum
def sum_list_recur(num_ls):
"""Sum number list by recursion."""
if len(num_ls) == 1:
return num_ls[0]
else:
return num_ls[0] + sum_list_recur(num_ls[1:])
def main():
import time
num_ls = [0, 1, 2, 3, 4, 5]
start_time = time.time()
print('By for loop: {}'.format(sum_list_for(num_ls)))
print('Time: {}'.format(time.time() - start_time))
start_time = time.time()
print('By recursion: {}'.format(sum_list_recur(num_ls)))
print('Time: {}'.format(time.time() - start_time))
if __name__ == '__main__':
main()
Rename to sum_list_iter() and revise main()'s num_ls
|
from __future__ import absolute_import
from __future__ import print_function
from __future__ import division
def sum_list_iter(num_ls):
"""Sum number list by for loop."""
_sum = 0
for num in num_ls:
_sum += num
return _sum
def sum_list_recur(num_ls):
"""Sum number list by recursion."""
if len(num_ls) == 1:
return num_ls[0]
else:
return num_ls[0] + sum_list_recur(num_ls[1:])
def main():
import time
num_ls = range(100)
start_time = time.time()
print('By iteration: {}'.format(sum_list_iter(num_ls)))
print('Time: {}'.format(time.time() - start_time))
start_time = time.time()
print('By recursion: {}'.format(sum_list_recur(num_ls)))
print('Time: {}'.format(time.time() - start_time))
if __name__ == '__main__':
main()
|
<commit_before>from __future__ import absolute_import
from __future__ import print_function
from __future__ import division
def sum_list_for(num_ls):
"""Sum number list by for loop."""
_sum = 0
for num in num_ls:
_sum += num
return _sum
def sum_list_recur(num_ls):
"""Sum number list by recursion."""
if len(num_ls) == 1:
return num_ls[0]
else:
return num_ls[0] + sum_list_recur(num_ls[1:])
def main():
import time
num_ls = [0, 1, 2, 3, 4, 5]
start_time = time.time()
print('By for loop: {}'.format(sum_list_for(num_ls)))
print('Time: {}'.format(time.time() - start_time))
start_time = time.time()
print('By recursion: {}'.format(sum_list_recur(num_ls)))
print('Time: {}'.format(time.time() - start_time))
if __name__ == '__main__':
main()
<commit_msg>Rename to sum_list_iter() and revise main()'s num_ls<commit_after>
|
from __future__ import absolute_import
from __future__ import print_function
from __future__ import division
def sum_list_iter(num_ls):
"""Sum number list by for loop."""
_sum = 0
for num in num_ls:
_sum += num
return _sum
def sum_list_recur(num_ls):
"""Sum number list by recursion."""
if len(num_ls) == 1:
return num_ls[0]
else:
return num_ls[0] + sum_list_recur(num_ls[1:])
def main():
import time
num_ls = range(100)
start_time = time.time()
print('By iteration: {}'.format(sum_list_iter(num_ls)))
print('Time: {}'.format(time.time() - start_time))
start_time = time.time()
print('By recursion: {}'.format(sum_list_recur(num_ls)))
print('Time: {}'.format(time.time() - start_time))
if __name__ == '__main__':
main()
|
from __future__ import absolute_import
from __future__ import print_function
from __future__ import division
def sum_list_for(num_ls):
"""Sum number list by for loop."""
_sum = 0
for num in num_ls:
_sum += num
return _sum
def sum_list_recur(num_ls):
"""Sum number list by recursion."""
if len(num_ls) == 1:
return num_ls[0]
else:
return num_ls[0] + sum_list_recur(num_ls[1:])
def main():
import time
num_ls = [0, 1, 2, 3, 4, 5]
start_time = time.time()
print('By for loop: {}'.format(sum_list_for(num_ls)))
print('Time: {}'.format(time.time() - start_time))
start_time = time.time()
print('By recursion: {}'.format(sum_list_recur(num_ls)))
print('Time: {}'.format(time.time() - start_time))
if __name__ == '__main__':
main()
Rename to sum_list_iter() and revise main()'s num_lsfrom __future__ import absolute_import
from __future__ import print_function
from __future__ import division
def sum_list_iter(num_ls):
"""Sum number list by for loop."""
_sum = 0
for num in num_ls:
_sum += num
return _sum
def sum_list_recur(num_ls):
"""Sum number list by recursion."""
if len(num_ls) == 1:
return num_ls[0]
else:
return num_ls[0] + sum_list_recur(num_ls[1:])
def main():
import time
num_ls = range(100)
start_time = time.time()
print('By iteration: {}'.format(sum_list_iter(num_ls)))
print('Time: {}'.format(time.time() - start_time))
start_time = time.time()
print('By recursion: {}'.format(sum_list_recur(num_ls)))
print('Time: {}'.format(time.time() - start_time))
if __name__ == '__main__':
main()
|
<commit_before>from __future__ import absolute_import
from __future__ import print_function
from __future__ import division
def sum_list_for(num_ls):
"""Sum number list by for loop."""
_sum = 0
for num in num_ls:
_sum += num
return _sum
def sum_list_recur(num_ls):
"""Sum number list by recursion."""
if len(num_ls) == 1:
return num_ls[0]
else:
return num_ls[0] + sum_list_recur(num_ls[1:])
def main():
import time
num_ls = [0, 1, 2, 3, 4, 5]
start_time = time.time()
print('By for loop: {}'.format(sum_list_for(num_ls)))
print('Time: {}'.format(time.time() - start_time))
start_time = time.time()
print('By recursion: {}'.format(sum_list_recur(num_ls)))
print('Time: {}'.format(time.time() - start_time))
if __name__ == '__main__':
main()
<commit_msg>Rename to sum_list_iter() and revise main()'s num_ls<commit_after>from __future__ import absolute_import
from __future__ import print_function
from __future__ import division
def sum_list_iter(num_ls):
"""Sum number list by for loop."""
_sum = 0
for num in num_ls:
_sum += num
return _sum
def sum_list_recur(num_ls):
"""Sum number list by recursion."""
if len(num_ls) == 1:
return num_ls[0]
else:
return num_ls[0] + sum_list_recur(num_ls[1:])
def main():
import time
num_ls = range(100)
start_time = time.time()
print('By iteration: {}'.format(sum_list_iter(num_ls)))
print('Time: {}'.format(time.time() - start_time))
start_time = time.time()
print('By recursion: {}'.format(sum_list_recur(num_ls)))
print('Time: {}'.format(time.time() - start_time))
if __name__ == '__main__':
main()
|
03a803bb87478d79f67b20275bc45b56e7c8300f
|
tests/similarity/test_new_similarity.py
|
tests/similarity/test_new_similarity.py
|
import unittest
from similarity.nw_similarity import NWAlgorithm
class Test(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_nw_algorithm(self):
t = NWAlgorithm('abcdefghij', 'dgj')
t.print_matrix()
(a, b) = t.alignments()
print '---------------'
print a
print b
if __name__ == "__main__":
# import sys;sys.argv = ['', 'Test.testSimple']
unittest.main()
|
import unittest
from similarity.nw_similarity import NWAlgorithm
class TestNewSimilarity(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_nw_algorithm(self):
t = NWAlgorithm('abcdefghij', 'dgj')
t.print_matrix()
(a, b) = t.alignments()
print '---------------'
print a
print b
if __name__ == "__main__":
# import sys;sys.argv = ['', 'Test.testSimple']
print 'Starting new similarity tests'
unittest.main()
|
Fix incorrect import reference to nw_similarity
|
Fix incorrect import reference to nw_similarity
|
Python
|
mit
|
dpazel/tryinggithub
|
import unittest
from similarity.nw_similarity import NWAlgorithm
class Test(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_nw_algorithm(self):
t = NWAlgorithm('abcdefghij', 'dgj')
t.print_matrix()
(a, b) = t.alignments()
print '---------------'
print a
print b
if __name__ == "__main__":
# import sys;sys.argv = ['', 'Test.testSimple']
unittest.main()Fix incorrect import reference to nw_similarity
|
import unittest
from similarity.nw_similarity import NWAlgorithm
class TestNewSimilarity(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_nw_algorithm(self):
t = NWAlgorithm('abcdefghij', 'dgj')
t.print_matrix()
(a, b) = t.alignments()
print '---------------'
print a
print b
if __name__ == "__main__":
# import sys;sys.argv = ['', 'Test.testSimple']
print 'Starting new similarity tests'
unittest.main()
|
<commit_before>import unittest
from similarity.nw_similarity import NWAlgorithm
class Test(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_nw_algorithm(self):
t = NWAlgorithm('abcdefghij', 'dgj')
t.print_matrix()
(a, b) = t.alignments()
print '---------------'
print a
print b
if __name__ == "__main__":
# import sys;sys.argv = ['', 'Test.testSimple']
unittest.main()<commit_msg>Fix incorrect import reference to nw_similarity<commit_after>
|
import unittest
from similarity.nw_similarity import NWAlgorithm
class TestNewSimilarity(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_nw_algorithm(self):
t = NWAlgorithm('abcdefghij', 'dgj')
t.print_matrix()
(a, b) = t.alignments()
print '---------------'
print a
print b
if __name__ == "__main__":
# import sys;sys.argv = ['', 'Test.testSimple']
print 'Starting new similarity tests'
unittest.main()
|
import unittest
from similarity.nw_similarity import NWAlgorithm
class Test(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_nw_algorithm(self):
t = NWAlgorithm('abcdefghij', 'dgj')
t.print_matrix()
(a, b) = t.alignments()
print '---------------'
print a
print b
if __name__ == "__main__":
# import sys;sys.argv = ['', 'Test.testSimple']
unittest.main()Fix incorrect import reference to nw_similarityimport unittest
from similarity.nw_similarity import NWAlgorithm
class TestNewSimilarity(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_nw_algorithm(self):
t = NWAlgorithm('abcdefghij', 'dgj')
t.print_matrix()
(a, b) = t.alignments()
print '---------------'
print a
print b
if __name__ == "__main__":
# import sys;sys.argv = ['', 'Test.testSimple']
print 'Starting new similarity tests'
unittest.main()
|
<commit_before>import unittest
from similarity.nw_similarity import NWAlgorithm
class Test(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_nw_algorithm(self):
t = NWAlgorithm('abcdefghij', 'dgj')
t.print_matrix()
(a, b) = t.alignments()
print '---------------'
print a
print b
if __name__ == "__main__":
# import sys;sys.argv = ['', 'Test.testSimple']
unittest.main()<commit_msg>Fix incorrect import reference to nw_similarity<commit_after>import unittest
from similarity.nw_similarity import NWAlgorithm
class TestNewSimilarity(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_nw_algorithm(self):
t = NWAlgorithm('abcdefghij', 'dgj')
t.print_matrix()
(a, b) = t.alignments()
print '---------------'
print a
print b
if __name__ == "__main__":
# import sys;sys.argv = ['', 'Test.testSimple']
print 'Starting new similarity tests'
unittest.main()
|
d2367579b9c17bcb81a78108c0eda960346a79e1
|
src/reduce_framerate.py
|
src/reduce_framerate.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# (C) 2015 Jean Nassar
# Released under BSD version 4
"""
Reduce /ardrone/image_raw framerate from 30 Hz to 2 Hz.
"""
import rospy
from sensor_msgs.msg import Image
class ImageFeature(object):
"""
A ROS image Publisher/Subscriber.
"""
def __init__(self):
self.image_subscriber = rospy.Subscriber("/ardrone/image_raw",
Image, self.image_callback,
queue_size=1)
self.image_publisher = rospy.Publisher("/output/slow_image_raw",
Image, queue_size=1)
rospy.logdebug("Subscribed to /ardrone/image_raw")
self.count = 0
def frame_callback(self, frame):
"""
Callback function of subscribed topic.
"""
# Publish every fifteenth frame
if not self.count % 15:
self.image_publisher.publish(frame)
self.count += 1
def main():
"""Initialize ROS node."""
rospy.init_node("framerate_reducer", anonymous=True)
ImageFeature()
rospy.loginfo("Reducing framerate")
rospy.spin()
if __name__ == "__main__":
main()
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# (C) 2015 Jean Nassar
# Released under BSD version 4
"""
Reduce /ardrone/image_raw framerate from 30 Hz to 2 Hz.
"""
import rospy
from sensor_msgs.msg import Image
class ImageFeature(object):
"""
A ROS image Publisher/Subscriber.
"""
def __init__(self):
self.image_subscriber = rospy.Subscriber("/ardrone/image_raw",
Image, self.frame_callback,
queue_size=1)
self.image_publisher = rospy.Publisher("/output/slow_image_raw",
Image, queue_size=1)
rospy.logdebug("Subscribed to /ardrone/image_raw")
self.count = 0
def frame_callback(self, frame):
"""
Callback function of subscribed topic.
"""
# Publish every fifteenth frame
if not self.count % 15:
self.image_publisher.publish(frame)
self.count += 1
def main():
"""Initialize ROS node."""
rospy.init_node("framerate_reducer", anonymous=True)
ImageFeature()
rospy.loginfo("Reducing framerate")
rospy.spin()
if __name__ == "__main__":
main()
|
Fix mistaken reference to image_callback.
|
Fix mistaken reference to image_callback.
|
Python
|
mit
|
masasin/spirit,masasin/spirit
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# (C) 2015 Jean Nassar
# Released under BSD version 4
"""
Reduce /ardrone/image_raw framerate from 30 Hz to 2 Hz.
"""
import rospy
from sensor_msgs.msg import Image
class ImageFeature(object):
"""
A ROS image Publisher/Subscriber.
"""
def __init__(self):
self.image_subscriber = rospy.Subscriber("/ardrone/image_raw",
Image, self.image_callback,
queue_size=1)
self.image_publisher = rospy.Publisher("/output/slow_image_raw",
Image, queue_size=1)
rospy.logdebug("Subscribed to /ardrone/image_raw")
self.count = 0
def frame_callback(self, frame):
"""
Callback function of subscribed topic.
"""
# Publish every fifteenth frame
if not self.count % 15:
self.image_publisher.publish(frame)
self.count += 1
def main():
"""Initialize ROS node."""
rospy.init_node("framerate_reducer", anonymous=True)
ImageFeature()
rospy.loginfo("Reducing framerate")
rospy.spin()
if __name__ == "__main__":
main()
Fix mistaken reference to image_callback.
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# (C) 2015 Jean Nassar
# Released under BSD version 4
"""
Reduce /ardrone/image_raw framerate from 30 Hz to 2 Hz.
"""
import rospy
from sensor_msgs.msg import Image
class ImageFeature(object):
"""
A ROS image Publisher/Subscriber.
"""
def __init__(self):
self.image_subscriber = rospy.Subscriber("/ardrone/image_raw",
Image, self.frame_callback,
queue_size=1)
self.image_publisher = rospy.Publisher("/output/slow_image_raw",
Image, queue_size=1)
rospy.logdebug("Subscribed to /ardrone/image_raw")
self.count = 0
def frame_callback(self, frame):
"""
Callback function of subscribed topic.
"""
# Publish every fifteenth frame
if not self.count % 15:
self.image_publisher.publish(frame)
self.count += 1
def main():
"""Initialize ROS node."""
rospy.init_node("framerate_reducer", anonymous=True)
ImageFeature()
rospy.loginfo("Reducing framerate")
rospy.spin()
if __name__ == "__main__":
main()
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
# (C) 2015 Jean Nassar
# Released under BSD version 4
"""
Reduce /ardrone/image_raw framerate from 30 Hz to 2 Hz.
"""
import rospy
from sensor_msgs.msg import Image
class ImageFeature(object):
"""
A ROS image Publisher/Subscriber.
"""
def __init__(self):
self.image_subscriber = rospy.Subscriber("/ardrone/image_raw",
Image, self.image_callback,
queue_size=1)
self.image_publisher = rospy.Publisher("/output/slow_image_raw",
Image, queue_size=1)
rospy.logdebug("Subscribed to /ardrone/image_raw")
self.count = 0
def frame_callback(self, frame):
"""
Callback function of subscribed topic.
"""
# Publish every fifteenth frame
if not self.count % 15:
self.image_publisher.publish(frame)
self.count += 1
def main():
"""Initialize ROS node."""
rospy.init_node("framerate_reducer", anonymous=True)
ImageFeature()
rospy.loginfo("Reducing framerate")
rospy.spin()
if __name__ == "__main__":
main()
<commit_msg>Fix mistaken reference to image_callback.<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# (C) 2015 Jean Nassar
# Released under BSD version 4
"""
Reduce /ardrone/image_raw framerate from 30 Hz to 2 Hz.
"""
import rospy
from sensor_msgs.msg import Image
class ImageFeature(object):
"""
A ROS image Publisher/Subscriber.
"""
def __init__(self):
self.image_subscriber = rospy.Subscriber("/ardrone/image_raw",
Image, self.frame_callback,
queue_size=1)
self.image_publisher = rospy.Publisher("/output/slow_image_raw",
Image, queue_size=1)
rospy.logdebug("Subscribed to /ardrone/image_raw")
self.count = 0
def frame_callback(self, frame):
"""
Callback function of subscribed topic.
"""
# Publish every fifteenth frame
if not self.count % 15:
self.image_publisher.publish(frame)
self.count += 1
def main():
"""Initialize ROS node."""
rospy.init_node("framerate_reducer", anonymous=True)
ImageFeature()
rospy.loginfo("Reducing framerate")
rospy.spin()
if __name__ == "__main__":
main()
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# (C) 2015 Jean Nassar
# Released under BSD version 4
"""
Reduce /ardrone/image_raw framerate from 30 Hz to 2 Hz.
"""
import rospy
from sensor_msgs.msg import Image
class ImageFeature(object):
"""
A ROS image Publisher/Subscriber.
"""
def __init__(self):
self.image_subscriber = rospy.Subscriber("/ardrone/image_raw",
Image, self.image_callback,
queue_size=1)
self.image_publisher = rospy.Publisher("/output/slow_image_raw",
Image, queue_size=1)
rospy.logdebug("Subscribed to /ardrone/image_raw")
self.count = 0
def frame_callback(self, frame):
"""
Callback function of subscribed topic.
"""
# Publish every fifteenth frame
if not self.count % 15:
self.image_publisher.publish(frame)
self.count += 1
def main():
"""Initialize ROS node."""
rospy.init_node("framerate_reducer", anonymous=True)
ImageFeature()
rospy.loginfo("Reducing framerate")
rospy.spin()
if __name__ == "__main__":
main()
Fix mistaken reference to image_callback.#!/usr/bin/env python
# -*- coding: utf-8 -*-
# (C) 2015 Jean Nassar
# Released under BSD version 4
"""
Reduce /ardrone/image_raw framerate from 30 Hz to 2 Hz.
"""
import rospy
from sensor_msgs.msg import Image
class ImageFeature(object):
"""
A ROS image Publisher/Subscriber.
"""
def __init__(self):
self.image_subscriber = rospy.Subscriber("/ardrone/image_raw",
Image, self.frame_callback,
queue_size=1)
self.image_publisher = rospy.Publisher("/output/slow_image_raw",
Image, queue_size=1)
rospy.logdebug("Subscribed to /ardrone/image_raw")
self.count = 0
def frame_callback(self, frame):
"""
Callback function of subscribed topic.
"""
# Publish every fifteenth frame
if not self.count % 15:
self.image_publisher.publish(frame)
self.count += 1
def main():
"""Initialize ROS node."""
rospy.init_node("framerate_reducer", anonymous=True)
ImageFeature()
rospy.loginfo("Reducing framerate")
rospy.spin()
if __name__ == "__main__":
main()
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
# (C) 2015 Jean Nassar
# Released under BSD version 4
"""
Reduce /ardrone/image_raw framerate from 30 Hz to 2 Hz.
"""
import rospy
from sensor_msgs.msg import Image
class ImageFeature(object):
"""
A ROS image Publisher/Subscriber.
"""
def __init__(self):
self.image_subscriber = rospy.Subscriber("/ardrone/image_raw",
Image, self.image_callback,
queue_size=1)
self.image_publisher = rospy.Publisher("/output/slow_image_raw",
Image, queue_size=1)
rospy.logdebug("Subscribed to /ardrone/image_raw")
self.count = 0
def frame_callback(self, frame):
"""
Callback function of subscribed topic.
"""
# Publish every fifteenth frame
if not self.count % 15:
self.image_publisher.publish(frame)
self.count += 1
def main():
"""Initialize ROS node."""
rospy.init_node("framerate_reducer", anonymous=True)
ImageFeature()
rospy.loginfo("Reducing framerate")
rospy.spin()
if __name__ == "__main__":
main()
<commit_msg>Fix mistaken reference to image_callback.<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
# (C) 2015 Jean Nassar
# Released under BSD version 4
"""
Reduce /ardrone/image_raw framerate from 30 Hz to 2 Hz.
"""
import rospy
from sensor_msgs.msg import Image
class ImageFeature(object):
"""
A ROS image Publisher/Subscriber.
"""
def __init__(self):
self.image_subscriber = rospy.Subscriber("/ardrone/image_raw",
Image, self.frame_callback,
queue_size=1)
self.image_publisher = rospy.Publisher("/output/slow_image_raw",
Image, queue_size=1)
rospy.logdebug("Subscribed to /ardrone/image_raw")
self.count = 0
def frame_callback(self, frame):
"""
Callback function of subscribed topic.
"""
# Publish every fifteenth frame
if not self.count % 15:
self.image_publisher.publish(frame)
self.count += 1
def main():
"""Initialize ROS node."""
rospy.init_node("framerate_reducer", anonymous=True)
ImageFeature()
rospy.loginfo("Reducing framerate")
rospy.spin()
if __name__ == "__main__":
main()
|
5a7c70a4e62598aad3c4d34af8d4ee45d3e51bc1
|
fabfile.py
|
fabfile.py
|
"""
Collection of utilities to automate the administration of Twisted's
infrastructure. Use this utility to install, update and start/stop/restart
services running on twistedmatrix.com.
"""
"""
This file is a simple entry point, nothing is final about it!
Just experimenting for now.
"""
from fabric.api import task, sudo
from fablib import package, service, load_config, requires_root
from services import dns
__all__ = ['dns']
load_config('config')
# TODO: Add hooks to check if updated to upstream before running any command
@task
@requires_root
def install_exim():
package.install('exim4')
service.enable('exim4')
@task
@requires_root
def make_service_admin(username):
"""
Simply add the given user to the 'service-admin' group. This allows the
user to execute any command as any service-specific user through sudo.
"""
sudo('usermod -a -G service-admin {}'.format(username))
|
"""
Collection of utilities to automate the administration of Twisted's
infrastructure. Use this utility to install, update and start/stop/restart
services running on twistedmatrix.com.
"""
"""
This file is a simple entry point, nothing is final about it!
Just experimenting for now.
"""
from fabric.api import task, sudo
from fablib import package, service, load_config, requires_root
from services import dns, trac
__all__ = ['dns', 'trac', 'make_service_admin']
load_config('config')
# TODO: Add hooks to check if updated to upstream before running any command
@task
@requires_root
def install_exim():
package.install('exim4')
service.enable('exim4')
@task
@requires_root
def make_service_admin(username):
"""
Simply add the given user to the 'service-admin' group. This allows the
user to execute any command as any service-specific user through sudo.
"""
sudo('usermod -a -G service-admin {}'.format(username))
|
Make trac related tasks available in fab
|
Make trac related tasks available in fab
|
Python
|
mit
|
alex/braid,alex/braid
|
"""
Collection of utilities to automate the administration of Twisted's
infrastructure. Use this utility to install, update and start/stop/restart
services running on twistedmatrix.com.
"""
"""
This file is a simple entry point, nothing is final about it!
Just experimenting for now.
"""
from fabric.api import task, sudo
from fablib import package, service, load_config, requires_root
from services import dns
__all__ = ['dns']
load_config('config')
# TODO: Add hooks to check if updated to upstream before running any command
@task
@requires_root
def install_exim():
package.install('exim4')
service.enable('exim4')
@task
@requires_root
def make_service_admin(username):
"""
Simply add the given user to the 'service-admin' group. This allows the
user to execute any command as any service-specific user through sudo.
"""
sudo('usermod -a -G service-admin {}'.format(username))
Make trac related tasks available in fab
|
"""
Collection of utilities to automate the administration of Twisted's
infrastructure. Use this utility to install, update and start/stop/restart
services running on twistedmatrix.com.
"""
"""
This file is a simple entry point, nothing is final about it!
Just experimenting for now.
"""
from fabric.api import task, sudo
from fablib import package, service, load_config, requires_root
from services import dns, trac
__all__ = ['dns', 'trac', 'make_service_admin']
load_config('config')
# TODO: Add hooks to check if updated to upstream before running any command
@task
@requires_root
def install_exim():
package.install('exim4')
service.enable('exim4')
@task
@requires_root
def make_service_admin(username):
"""
Simply add the given user to the 'service-admin' group. This allows the
user to execute any command as any service-specific user through sudo.
"""
sudo('usermod -a -G service-admin {}'.format(username))
|
<commit_before>"""
Collection of utilities to automate the administration of Twisted's
infrastructure. Use this utility to install, update and start/stop/restart
services running on twistedmatrix.com.
"""
"""
This file is a simple entry point, nothing is final about it!
Just experimenting for now.
"""
from fabric.api import task, sudo
from fablib import package, service, load_config, requires_root
from services import dns
__all__ = ['dns']
load_config('config')
# TODO: Add hooks to check if updated to upstream before running any command
@task
@requires_root
def install_exim():
package.install('exim4')
service.enable('exim4')
@task
@requires_root
def make_service_admin(username):
"""
Simply add the given user to the 'service-admin' group. This allows the
user to execute any command as any service-specific user through sudo.
"""
sudo('usermod -a -G service-admin {}'.format(username))
<commit_msg>Make trac related tasks available in fab<commit_after>
|
"""
Collection of utilities to automate the administration of Twisted's
infrastructure. Use this utility to install, update and start/stop/restart
services running on twistedmatrix.com.
"""
"""
This file is a simple entry point, nothing is final about it!
Just experimenting for now.
"""
from fabric.api import task, sudo
from fablib import package, service, load_config, requires_root
from services import dns, trac
__all__ = ['dns', 'trac', 'make_service_admin']
load_config('config')
# TODO: Add hooks to check if updated to upstream before running any command
@task
@requires_root
def install_exim():
package.install('exim4')
service.enable('exim4')
@task
@requires_root
def make_service_admin(username):
"""
Simply add the given user to the 'service-admin' group. This allows the
user to execute any command as any service-specific user through sudo.
"""
sudo('usermod -a -G service-admin {}'.format(username))
|
"""
Collection of utilities to automate the administration of Twisted's
infrastructure. Use this utility to install, update and start/stop/restart
services running on twistedmatrix.com.
"""
"""
This file is a simple entry point, nothing is final about it!
Just experimenting for now.
"""
from fabric.api import task, sudo
from fablib import package, service, load_config, requires_root
from services import dns
__all__ = ['dns']
load_config('config')
# TODO: Add hooks to check if updated to upstream before running any command
@task
@requires_root
def install_exim():
package.install('exim4')
service.enable('exim4')
@task
@requires_root
def make_service_admin(username):
"""
Simply add the given user to the 'service-admin' group. This allows the
user to execute any command as any service-specific user through sudo.
"""
sudo('usermod -a -G service-admin {}'.format(username))
Make trac related tasks available in fab"""
Collection of utilities to automate the administration of Twisted's
infrastructure. Use this utility to install, update and start/stop/restart
services running on twistedmatrix.com.
"""
"""
This file is a simple entry point, nothing is final about it!
Just experimenting for now.
"""
from fabric.api import task, sudo
from fablib import package, service, load_config, requires_root
from services import dns, trac
__all__ = ['dns', 'trac', 'make_service_admin']
load_config('config')
# TODO: Add hooks to check if updated to upstream before running any command
@task
@requires_root
def install_exim():
package.install('exim4')
service.enable('exim4')
@task
@requires_root
def make_service_admin(username):
"""
Simply add the given user to the 'service-admin' group. This allows the
user to execute any command as any service-specific user through sudo.
"""
sudo('usermod -a -G service-admin {}'.format(username))
|
<commit_before>"""
Collection of utilities to automate the administration of Twisted's
infrastructure. Use this utility to install, update and start/stop/restart
services running on twistedmatrix.com.
"""
"""
This file is a simple entry point, nothing is final about it!
Just experimenting for now.
"""
from fabric.api import task, sudo
from fablib import package, service, load_config, requires_root
from services import dns
__all__ = ['dns']
load_config('config')
# TODO: Add hooks to check if updated to upstream before running any command
@task
@requires_root
def install_exim():
package.install('exim4')
service.enable('exim4')
@task
@requires_root
def make_service_admin(username):
"""
Simply add the given user to the 'service-admin' group. This allows the
user to execute any command as any service-specific user through sudo.
"""
sudo('usermod -a -G service-admin {}'.format(username))
<commit_msg>Make trac related tasks available in fab<commit_after>"""
Collection of utilities to automate the administration of Twisted's
infrastructure. Use this utility to install, update and start/stop/restart
services running on twistedmatrix.com.
"""
"""
This file is a simple entry point, nothing is final about it!
Just experimenting for now.
"""
from fabric.api import task, sudo
from fablib import package, service, load_config, requires_root
from services import dns, trac
__all__ = ['dns', 'trac', 'make_service_admin']
load_config('config')
# TODO: Add hooks to check if updated to upstream before running any command
@task
@requires_root
def install_exim():
package.install('exim4')
service.enable('exim4')
@task
@requires_root
def make_service_admin(username):
"""
Simply add the given user to the 'service-admin' group. This allows the
user to execute any command as any service-specific user through sudo.
"""
sudo('usermod -a -G service-admin {}'.format(username))
|
edb1753fe1897de2a39a186c09fa2de390a63d65
|
taggit/migrations/__init__.py
|
taggit/migrations/__init__.py
|
"""
Django migrations for taggit app
This package does not contain South migrations. South migrations can be found
in the ``south_migrations`` package.
"""
SOUTH_ERROR_MESSAGE = """\n
For South support, customize the SOUTH_MIGRATION_MODULES setting like so:
SOUTH_MIGRATION_MODULES = {
'taggit': 'taggit.south_migrations',
}
"""
# Ensure the user is not using Django 1.6 or below with South
try:
from django.db import migrations # noqa
except ImportError:
from django.core.exceptions import ImproperlyConfigured
raise ImproperlyConfigured(SOUTH_ERROR_MESSAGE)
|
Raise error if taggit.migrations imported for Django <=1.6
|
Raise error if taggit.migrations imported for Django <=1.6
Fixes gh-219.
|
Python
|
bsd-3-clause
|
adrian-sgn/django-taggit,IRI-Research/django-taggit,doselect/django-taggit,benjaminrigaud/django-taggit,nealtodd/django-taggit,laanlabs/django-taggit,eugena/django-taggit,7kfpun/django-taggit,tamarmot/django-taggit,izquierdo/django-taggit,cimani/django-taggit,Eksmo/django-taggit,kaedroho/django-taggit,orbitvu/django-taggit,kminkov/django-taggit,Maplecroft/django-taggit,gem/django-taggit,vhf/django-taggit
|
Raise error if taggit.migrations imported for Django <=1.6
Fixes gh-219.
|
"""
Django migrations for taggit app
This package does not contain South migrations. South migrations can be found
in the ``south_migrations`` package.
"""
SOUTH_ERROR_MESSAGE = """\n
For South support, customize the SOUTH_MIGRATION_MODULES setting like so:
SOUTH_MIGRATION_MODULES = {
'taggit': 'taggit.south_migrations',
}
"""
# Ensure the user is not using Django 1.6 or below with South
try:
from django.db import migrations # noqa
except ImportError:
from django.core.exceptions import ImproperlyConfigured
raise ImproperlyConfigured(SOUTH_ERROR_MESSAGE)
|
<commit_before><commit_msg>Raise error if taggit.migrations imported for Django <=1.6
Fixes gh-219.<commit_after>
|
"""
Django migrations for taggit app
This package does not contain South migrations. South migrations can be found
in the ``south_migrations`` package.
"""
SOUTH_ERROR_MESSAGE = """\n
For South support, customize the SOUTH_MIGRATION_MODULES setting like so:
SOUTH_MIGRATION_MODULES = {
'taggit': 'taggit.south_migrations',
}
"""
# Ensure the user is not using Django 1.6 or below with South
try:
from django.db import migrations # noqa
except ImportError:
from django.core.exceptions import ImproperlyConfigured
raise ImproperlyConfigured(SOUTH_ERROR_MESSAGE)
|
Raise error if taggit.migrations imported for Django <=1.6
Fixes gh-219."""
Django migrations for taggit app
This package does not contain South migrations. South migrations can be found
in the ``south_migrations`` package.
"""
SOUTH_ERROR_MESSAGE = """\n
For South support, customize the SOUTH_MIGRATION_MODULES setting like so:
SOUTH_MIGRATION_MODULES = {
'taggit': 'taggit.south_migrations',
}
"""
# Ensure the user is not using Django 1.6 or below with South
try:
from django.db import migrations # noqa
except ImportError:
from django.core.exceptions import ImproperlyConfigured
raise ImproperlyConfigured(SOUTH_ERROR_MESSAGE)
|
<commit_before><commit_msg>Raise error if taggit.migrations imported for Django <=1.6
Fixes gh-219.<commit_after>"""
Django migrations for taggit app
This package does not contain South migrations. South migrations can be found
in the ``south_migrations`` package.
"""
SOUTH_ERROR_MESSAGE = """\n
For South support, customize the SOUTH_MIGRATION_MODULES setting like so:
SOUTH_MIGRATION_MODULES = {
'taggit': 'taggit.south_migrations',
}
"""
# Ensure the user is not using Django 1.6 or below with South
try:
from django.db import migrations # noqa
except ImportError:
from django.core.exceptions import ImproperlyConfigured
raise ImproperlyConfigured(SOUTH_ERROR_MESSAGE)
|
|
47eecbb86e78d79666c6ae44efa47915f8807621
|
doctor/utils.py
|
doctor/utils.py
|
from django.views.debug import cleanse_setting
def cleanse_dictionary(dictionary):
"""
Cleanse sensitive values in a dictionary.
"""
cleansed_dictionary = {}
for key, val in dictionary.iteritems():
cleansed_dictionary[key] = cleanse_setting(key, val)
return cleansed_dictionary
|
from django.utils.datastructures import SortedDict
from django.views.debug import cleanse_setting
def cleanse_dictionary(dictionary):
"""
Cleanse sensitive values in a dictionary.
"""
cleansed_dictionary = SortedDict()
for key, val in dictionary.iteritems():
cleansed_dictionary[key] = cleanse_setting(key, val)
return cleansed_dictionary
|
Use sorted dictionary in cleanse_dictionary.
|
Use sorted dictionary in cleanse_dictionary.
|
Python
|
bsd-3-clause
|
novapost/django-doctor,python-hospital/django-doctor,funkbit/django-doctor
|
from django.views.debug import cleanse_setting
def cleanse_dictionary(dictionary):
"""
Cleanse sensitive values in a dictionary.
"""
cleansed_dictionary = {}
for key, val in dictionary.iteritems():
cleansed_dictionary[key] = cleanse_setting(key, val)
return cleansed_dictionary
Use sorted dictionary in cleanse_dictionary.
|
from django.utils.datastructures import SortedDict
from django.views.debug import cleanse_setting
def cleanse_dictionary(dictionary):
"""
Cleanse sensitive values in a dictionary.
"""
cleansed_dictionary = SortedDict()
for key, val in dictionary.iteritems():
cleansed_dictionary[key] = cleanse_setting(key, val)
return cleansed_dictionary
|
<commit_before>from django.views.debug import cleanse_setting
def cleanse_dictionary(dictionary):
"""
Cleanse sensitive values in a dictionary.
"""
cleansed_dictionary = {}
for key, val in dictionary.iteritems():
cleansed_dictionary[key] = cleanse_setting(key, val)
return cleansed_dictionary
<commit_msg>Use sorted dictionary in cleanse_dictionary.<commit_after>
|
from django.utils.datastructures import SortedDict
from django.views.debug import cleanse_setting
def cleanse_dictionary(dictionary):
"""
Cleanse sensitive values in a dictionary.
"""
cleansed_dictionary = SortedDict()
for key, val in dictionary.iteritems():
cleansed_dictionary[key] = cleanse_setting(key, val)
return cleansed_dictionary
|
from django.views.debug import cleanse_setting
def cleanse_dictionary(dictionary):
"""
Cleanse sensitive values in a dictionary.
"""
cleansed_dictionary = {}
for key, val in dictionary.iteritems():
cleansed_dictionary[key] = cleanse_setting(key, val)
return cleansed_dictionary
Use sorted dictionary in cleanse_dictionary.from django.utils.datastructures import SortedDict
from django.views.debug import cleanse_setting
def cleanse_dictionary(dictionary):
"""
Cleanse sensitive values in a dictionary.
"""
cleansed_dictionary = SortedDict()
for key, val in dictionary.iteritems():
cleansed_dictionary[key] = cleanse_setting(key, val)
return cleansed_dictionary
|
<commit_before>from django.views.debug import cleanse_setting
def cleanse_dictionary(dictionary):
"""
Cleanse sensitive values in a dictionary.
"""
cleansed_dictionary = {}
for key, val in dictionary.iteritems():
cleansed_dictionary[key] = cleanse_setting(key, val)
return cleansed_dictionary
<commit_msg>Use sorted dictionary in cleanse_dictionary.<commit_after>from django.utils.datastructures import SortedDict
from django.views.debug import cleanse_setting
def cleanse_dictionary(dictionary):
"""
Cleanse sensitive values in a dictionary.
"""
cleansed_dictionary = SortedDict()
for key, val in dictionary.iteritems():
cleansed_dictionary[key] = cleanse_setting(key, val)
return cleansed_dictionary
|
4c3786b0b1ad0969bad2865bb11b81be396b5f8d
|
CMake/vtkTestDriver.py
|
CMake/vtkTestDriver.py
|
r"""
This is a script that can be used to run tests that require multiple
executables to be run e.g. those client-server tests.
Usage:
python vtkTestDriver.py --process exe1 arg11 arg12 ...
--process exe2 arg21 arg22 ...
--process ...
"""
import sys
import subprocess
# Extract arguments for each process to execute.
command_lists = []
prev = None
for idx in range(1, len(sys.argv)):
if sys.argv[idx] == "--process":
if prev:
command_lists.append(sys.argv[prev:idx])
prev = idx+1
if prev <= len(sys.argv):
command_lists.append(sys.argv[prev:])
procs = []
for cmdlist in command_lists:
print "Executing '", " ".join(cmdlist), "'"
proc = subprocess.Popen(cmdlist)
procs.append(proc)
# Now wait for each of the processes to terminate.
# If ctest times out, it will kill this process and all subprocesses will be
# terminated anyways, so we don't need to handle timeout specially.
for proc in procs:
proc.wait()
for proc in procs:
if proc.returncode != 0:
print "ERROR: A process exited with error. Test will fail."
sys.exit(1) # error
print "All's well!"
|
r"""
This is a script that can be used to run tests that require multiple
executables to be run e.g. those client-server tests.
Usage:
python vtkTestDriver.py --process exe1 arg11 arg12 ...
--process exe2 arg21 arg22 ...
--process ...
"""
import sys
import subprocess
import time
# Extract arguments for each process to execute.
command_lists = []
prev = None
for idx in range(1, len(sys.argv)):
if sys.argv[idx] == "--process":
if prev:
command_lists.append(sys.argv[prev:idx])
prev = idx+1
if prev <= len(sys.argv):
command_lists.append(sys.argv[prev:])
procs = []
for cmdlist in command_lists:
print >> sys.stderr, "Executing '", " ".join(cmdlist), "'"
proc = subprocess.Popen(cmdlist)
procs.append(proc)
# sleep to ensure that the process starts.
time.sleep(0.1)
# Now wait for each of the processes to terminate.
# If ctest times out, it will kill this process and all subprocesses will be
# terminated anyways, so we don't need to handle timeout specially.
for proc in procs:
proc.wait()
for proc in procs:
if proc.returncode != 0:
print >> sys.stderr, "ERROR: A process exited with error. Test will fail."
sys.exit(1) # error
print "All's well!"
|
Add a delay between process launches.
|
Add a delay between process launches.
It so happens that the client process was launched before the server causing
deadlocks in some cases. Fixed that by adding a small delay. In reality, we may
need to logic to vtkTestDriver to make it more featured and parse outputs from
processes to handle this correctly. We can do that in next stage.
Change-Id: Ief6989c50e816bc8b6b7f8a780fd31b578079f4c
|
Python
|
bsd-3-clause
|
candy7393/VTK,sumedhasingla/VTK,mspark93/VTK,jmerkow/VTK,sankhesh/VTK,SimVascular/VTK,sankhesh/VTK,keithroe/vtkoptix,demarle/VTK,jmerkow/VTK,demarle/VTK,mspark93/VTK,jmerkow/VTK,candy7393/VTK,berendkleinhaneveld/VTK,mspark93/VTK,demarle/VTK,mspark93/VTK,sankhesh/VTK,hendradarwin/VTK,keithroe/vtkoptix,msmolens/VTK,johnkit/vtk-dev,sankhesh/VTK,johnkit/vtk-dev,ashray/VTK-EVM,keithroe/vtkoptix,sankhesh/VTK,candy7393/VTK,keithroe/vtkoptix,sumedhasingla/VTK,sumedhasingla/VTK,gram526/VTK,jmerkow/VTK,johnkit/vtk-dev,johnkit/vtk-dev,candy7393/VTK,ashray/VTK-EVM,candy7393/VTK,gram526/VTK,demarle/VTK,jmerkow/VTK,demarle/VTK,berendkleinhaneveld/VTK,berendkleinhaneveld/VTK,msmolens/VTK,SimVascular/VTK,hendradarwin/VTK,keithroe/vtkoptix,msmolens/VTK,SimVascular/VTK,gram526/VTK,keithroe/vtkoptix,ashray/VTK-EVM,jmerkow/VTK,ashray/VTK-EVM,msmolens/VTK,gram526/VTK,sankhesh/VTK,sumedhasingla/VTK,gram526/VTK,demarle/VTK,SimVascular/VTK,msmolens/VTK,hendradarwin/VTK,keithroe/vtkoptix,sankhesh/VTK,hendradarwin/VTK,gram526/VTK,SimVascular/VTK,berendkleinhaneveld/VTK,ashray/VTK-EVM,johnkit/vtk-dev,berendkleinhaneveld/VTK,SimVascular/VTK,hendradarwin/VTK,msmolens/VTK,hendradarwin/VTK,berendkleinhaneveld/VTK,candy7393/VTK,gram526/VTK,SimVascular/VTK,msmolens/VTK,gram526/VTK,mspark93/VTK,johnkit/vtk-dev,ashray/VTK-EVM,ashray/VTK-EVM,candy7393/VTK,sumedhasingla/VTK,johnkit/vtk-dev,msmolens/VTK,hendradarwin/VTK,ashray/VTK-EVM,sankhesh/VTK,mspark93/VTK,sumedhasingla/VTK,sumedhasingla/VTK,mspark93/VTK,jmerkow/VTK,candy7393/VTK,berendkleinhaneveld/VTK,jmerkow/VTK,mspark93/VTK,demarle/VTK,SimVascular/VTK,keithroe/vtkoptix,demarle/VTK,sumedhasingla/VTK
|
r"""
This is a script that can be used to run tests that require multiple
executables to be run e.g. those client-server tests.
Usage:
python vtkTestDriver.py --process exe1 arg11 arg12 ...
--process exe2 arg21 arg22 ...
--process ...
"""
import sys
import subprocess
# Extract arguments for each process to execute.
command_lists = []
prev = None
for idx in range(1, len(sys.argv)):
if sys.argv[idx] == "--process":
if prev:
command_lists.append(sys.argv[prev:idx])
prev = idx+1
if prev <= len(sys.argv):
command_lists.append(sys.argv[prev:])
procs = []
for cmdlist in command_lists:
print "Executing '", " ".join(cmdlist), "'"
proc = subprocess.Popen(cmdlist)
procs.append(proc)
# Now wait for each of the processes to terminate.
# If ctest times out, it will kill this process and all subprocesses will be
# terminated anyways, so we don't need to handle timeout specially.
for proc in procs:
proc.wait()
for proc in procs:
if proc.returncode != 0:
print "ERROR: A process exited with error. Test will fail."
sys.exit(1) # error
print "All's well!"
Add a delay between process launches.
It so happens that the client process was launched before the server causing
deadlocks in some cases. Fixed that by adding a small delay. In reality, we may
need to logic to vtkTestDriver to make it more featured and parse outputs from
processes to handle this correctly. We can do that in next stage.
Change-Id: Ief6989c50e816bc8b6b7f8a780fd31b578079f4c
|
r"""
This is a script that can be used to run tests that require multiple
executables to be run e.g. those client-server tests.
Usage:
python vtkTestDriver.py --process exe1 arg11 arg12 ...
--process exe2 arg21 arg22 ...
--process ...
"""
import sys
import subprocess
import time
# Extract arguments for each process to execute.
command_lists = []
prev = None
for idx in range(1, len(sys.argv)):
if sys.argv[idx] == "--process":
if prev:
command_lists.append(sys.argv[prev:idx])
prev = idx+1
if prev <= len(sys.argv):
command_lists.append(sys.argv[prev:])
procs = []
for cmdlist in command_lists:
print >> sys.stderr, "Executing '", " ".join(cmdlist), "'"
proc = subprocess.Popen(cmdlist)
procs.append(proc)
# sleep to ensure that the process starts.
time.sleep(0.1)
# Now wait for each of the processes to terminate.
# If ctest times out, it will kill this process and all subprocesses will be
# terminated anyways, so we don't need to handle timeout specially.
for proc in procs:
proc.wait()
for proc in procs:
if proc.returncode != 0:
print >> sys.stderr, "ERROR: A process exited with error. Test will fail."
sys.exit(1) # error
print "All's well!"
|
<commit_before>r"""
This is a script that can be used to run tests that require multiple
executables to be run e.g. those client-server tests.
Usage:
python vtkTestDriver.py --process exe1 arg11 arg12 ...
--process exe2 arg21 arg22 ...
--process ...
"""
import sys
import subprocess
# Extract arguments for each process to execute.
command_lists = []
prev = None
for idx in range(1, len(sys.argv)):
if sys.argv[idx] == "--process":
if prev:
command_lists.append(sys.argv[prev:idx])
prev = idx+1
if prev <= len(sys.argv):
command_lists.append(sys.argv[prev:])
procs = []
for cmdlist in command_lists:
print "Executing '", " ".join(cmdlist), "'"
proc = subprocess.Popen(cmdlist)
procs.append(proc)
# Now wait for each of the processes to terminate.
# If ctest times out, it will kill this process and all subprocesses will be
# terminated anyways, so we don't need to handle timeout specially.
for proc in procs:
proc.wait()
for proc in procs:
if proc.returncode != 0:
print "ERROR: A process exited with error. Test will fail."
sys.exit(1) # error
print "All's well!"
<commit_msg>Add a delay between process launches.
It so happens that the client process was launched before the server causing
deadlocks in some cases. Fixed that by adding a small delay. In reality, we may
need to logic to vtkTestDriver to make it more featured and parse outputs from
processes to handle this correctly. We can do that in next stage.
Change-Id: Ief6989c50e816bc8b6b7f8a780fd31b578079f4c<commit_after>
|
r"""
This is a script that can be used to run tests that require multiple
executables to be run e.g. those client-server tests.
Usage:
python vtkTestDriver.py --process exe1 arg11 arg12 ...
--process exe2 arg21 arg22 ...
--process ...
"""
import sys
import subprocess
import time
# Extract arguments for each process to execute.
command_lists = []
prev = None
for idx in range(1, len(sys.argv)):
if sys.argv[idx] == "--process":
if prev:
command_lists.append(sys.argv[prev:idx])
prev = idx+1
if prev <= len(sys.argv):
command_lists.append(sys.argv[prev:])
procs = []
for cmdlist in command_lists:
print >> sys.stderr, "Executing '", " ".join(cmdlist), "'"
proc = subprocess.Popen(cmdlist)
procs.append(proc)
# sleep to ensure that the process starts.
time.sleep(0.1)
# Now wait for each of the processes to terminate.
# If ctest times out, it will kill this process and all subprocesses will be
# terminated anyways, so we don't need to handle timeout specially.
for proc in procs:
proc.wait()
for proc in procs:
if proc.returncode != 0:
print >> sys.stderr, "ERROR: A process exited with error. Test will fail."
sys.exit(1) # error
print "All's well!"
|
r"""
This is a script that can be used to run tests that require multiple
executables to be run e.g. those client-server tests.
Usage:
python vtkTestDriver.py --process exe1 arg11 arg12 ...
--process exe2 arg21 arg22 ...
--process ...
"""
import sys
import subprocess
# Extract arguments for each process to execute.
command_lists = []
prev = None
for idx in range(1, len(sys.argv)):
if sys.argv[idx] == "--process":
if prev:
command_lists.append(sys.argv[prev:idx])
prev = idx+1
if prev <= len(sys.argv):
command_lists.append(sys.argv[prev:])
procs = []
for cmdlist in command_lists:
print "Executing '", " ".join(cmdlist), "'"
proc = subprocess.Popen(cmdlist)
procs.append(proc)
# Now wait for each of the processes to terminate.
# If ctest times out, it will kill this process and all subprocesses will be
# terminated anyways, so we don't need to handle timeout specially.
for proc in procs:
proc.wait()
for proc in procs:
if proc.returncode != 0:
print "ERROR: A process exited with error. Test will fail."
sys.exit(1) # error
print "All's well!"
Add a delay between process launches.
It so happens that the client process was launched before the server causing
deadlocks in some cases. Fixed that by adding a small delay. In reality, we may
need to logic to vtkTestDriver to make it more featured and parse outputs from
processes to handle this correctly. We can do that in next stage.
Change-Id: Ief6989c50e816bc8b6b7f8a780fd31b578079f4cr"""
This is a script that can be used to run tests that require multiple
executables to be run e.g. those client-server tests.
Usage:
python vtkTestDriver.py --process exe1 arg11 arg12 ...
--process exe2 arg21 arg22 ...
--process ...
"""
import sys
import subprocess
import time
# Extract arguments for each process to execute.
command_lists = []
prev = None
for idx in range(1, len(sys.argv)):
if sys.argv[idx] == "--process":
if prev:
command_lists.append(sys.argv[prev:idx])
prev = idx+1
if prev <= len(sys.argv):
command_lists.append(sys.argv[prev:])
procs = []
for cmdlist in command_lists:
print >> sys.stderr, "Executing '", " ".join(cmdlist), "'"
proc = subprocess.Popen(cmdlist)
procs.append(proc)
# sleep to ensure that the process starts.
time.sleep(0.1)
# Now wait for each of the processes to terminate.
# If ctest times out, it will kill this process and all subprocesses will be
# terminated anyways, so we don't need to handle timeout specially.
for proc in procs:
proc.wait()
for proc in procs:
if proc.returncode != 0:
print >> sys.stderr, "ERROR: A process exited with error. Test will fail."
sys.exit(1) # error
print "All's well!"
|
<commit_before>r"""
This is a script that can be used to run tests that require multiple
executables to be run e.g. those client-server tests.
Usage:
python vtkTestDriver.py --process exe1 arg11 arg12 ...
--process exe2 arg21 arg22 ...
--process ...
"""
import sys
import subprocess
# Extract arguments for each process to execute.
command_lists = []
prev = None
for idx in range(1, len(sys.argv)):
if sys.argv[idx] == "--process":
if prev:
command_lists.append(sys.argv[prev:idx])
prev = idx+1
if prev <= len(sys.argv):
command_lists.append(sys.argv[prev:])
procs = []
for cmdlist in command_lists:
print "Executing '", " ".join(cmdlist), "'"
proc = subprocess.Popen(cmdlist)
procs.append(proc)
# Now wait for each of the processes to terminate.
# If ctest times out, it will kill this process and all subprocesses will be
# terminated anyways, so we don't need to handle timeout specially.
for proc in procs:
proc.wait()
for proc in procs:
if proc.returncode != 0:
print "ERROR: A process exited with error. Test will fail."
sys.exit(1) # error
print "All's well!"
<commit_msg>Add a delay between process launches.
It so happens that the client process was launched before the server causing
deadlocks in some cases. Fixed that by adding a small delay. In reality, we may
need to logic to vtkTestDriver to make it more featured and parse outputs from
processes to handle this correctly. We can do that in next stage.
Change-Id: Ief6989c50e816bc8b6b7f8a780fd31b578079f4c<commit_after>r"""
This is a script that can be used to run tests that require multiple
executables to be run e.g. those client-server tests.
Usage:
python vtkTestDriver.py --process exe1 arg11 arg12 ...
--process exe2 arg21 arg22 ...
--process ...
"""
import sys
import subprocess
import time
# Extract arguments for each process to execute.
command_lists = []
prev = None
for idx in range(1, len(sys.argv)):
if sys.argv[idx] == "--process":
if prev:
command_lists.append(sys.argv[prev:idx])
prev = idx+1
if prev <= len(sys.argv):
command_lists.append(sys.argv[prev:])
procs = []
for cmdlist in command_lists:
print >> sys.stderr, "Executing '", " ".join(cmdlist), "'"
proc = subprocess.Popen(cmdlist)
procs.append(proc)
# sleep to ensure that the process starts.
time.sleep(0.1)
# Now wait for each of the processes to terminate.
# If ctest times out, it will kill this process and all subprocesses will be
# terminated anyways, so we don't need to handle timeout specially.
for proc in procs:
proc.wait()
for proc in procs:
if proc.returncode != 0:
print >> sys.stderr, "ERROR: A process exited with error. Test will fail."
sys.exit(1) # error
print "All's well!"
|
18b8c8cafcfc61e0b6bb170438a405a7c0406da0
|
mkdocs/gh_deploy.py
|
mkdocs/gh_deploy.py
|
import subprocess
import os
def gh_deploy(config):
if not os.path.exists('.git'):
print 'Cannot deploy - this directory does not appear to be a git repository'
return
print "Copying '%s' to `gh-pages` branch and pushing to GitHub." % config['site_dir']
try:
subprocess.check_call(['ghp-import', '-p', config['site_dir']])
except:
return
url = subprocess.check_output(["git", "config", "--get", "remote.origin.url"])
url = url.decode('utf-8').strip()
host, path = url.split('github.com/', 1)
username, repo = path.split('/', 1)
if repo.endswith('.git'):
repo = repo[:-len('.git')]
print 'Your documentation should shortly be available at: http://%s.github.io/%s' % (username, repo)
|
import subprocess
import os
def gh_deploy(config):
if not os.path.exists('.git'):
print 'Cannot deploy - this directory does not appear to be a git repository'
return
print "Copying '%s' to `gh-pages` branch and pushing to GitHub." % config['site_dir']
try:
subprocess.check_call(['ghp-import', '-p', config['site_dir']])
except:
return
# TODO: Also check for CNAME file
url = subprocess.check_output(["git", "config", "--get", "remote.origin.url"])
url = url.decode('utf-8').strip()
if 'github.com/' in url:
host, path = url.split('github.com/', 1)
else:
host, path = url.split('github.com:', 1)
username, repo = path.split('/', 1)
if repo.endswith('.git'):
repo = repo[:-len('.git')]
url = 'http://%s.github.io/%s' % (username, repo)
print 'Your documentation should shortly be available at: ' + url
|
Support both remote URL styles in gh-deploy.
|
Support both remote URL styles in gh-deploy.
|
Python
|
bsd-2-clause
|
jpush/mkdocs,davidgillies/mkdocs,kubikusrubikus/mkdocs,mlzummo/mkdocs,d0ugal/mkdocs,mkdocs/mkdocs,ramramps/mkdocs,peter1000/mkdocs,williamjmorenor/mkdocs,vi4m/mkdocs,longjl/mkdocs,nicoddemus/mkdocs,xeechou/mkblogs,fujita-shintaro/mkdocs,jamesbeebop/mkdocs,lbenet/mkdocs,justinkinney/mkdocs,michaelmcandrew/mkdocs,xeechou/mkblogs,nicoddemus/mkdocs,jeoygin/mkdocs,mlzummo/mkdocs,ramramps/mkdocs,jimporter/mkdocs,justinkinney/mkdocs,gregelin/mkdocs,davidgillies/mkdocs,rickpeters/mkdocs,mkdocs/mkdocs,hhg2288/mkdocs,jeoygin/mkdocs,tedmiston/mkdocs,waylan/mkdocs,samhatfield/mkdocs,davidgillies/mkdocs,vi4m/mkdocs,mlzummo/mkdocs,rickpeters/mkdocs,fujita-shintaro/mkdocs,nicoddemus/mkdocs,longjl/mkdocs,dmehra/mkdocs,hhg2288/mkdocs,williamjmorenor/mkdocs,samhatfield/mkdocs,justinkinney/mkdocs,ericholscher/mkdocs,ramramps/mkdocs,jpush/mkdocs,lbenet/mkdocs,cnbin/mkdocs,gregelin/mkdocs,williamjmorenor/mkdocs,wenqiuhua/mkdocs,peter1000/mkdocs,cazzerson/mkdocs,jeoygin/mkdocs,d0ugal/mkdocs,lukfor/mkdocs,tedmiston/mkdocs,wenqiuhua/mkdocs,cazzerson/mkdocs,hhg2288/mkdocs,lbenet/mkdocs,vi4m/mkdocs,michaelmcandrew/mkdocs,d0ugal/mkdocs,longjl/mkdocs,simonfork/mkdocs,pjbull/mkdocs,jamesbeebop/mkdocs,peter1000/mkdocs,pjbull/mkdocs,jpush/mkdocs,ericholscher/mkdocs,jimporter/mkdocs,xeechou/mkblogs,fujita-shintaro/mkdocs,wenqiuhua/mkdocs,cazzerson/mkdocs,kubikusrubikus/mkdocs,kubikusrubikus/mkdocs,dmehra/mkdocs,lukfor/mkdocs,cnbin/mkdocs,waylan/mkdocs,wenqiuhua/mkdocs,jamesbeebop/mkdocs,mkdocs/mkdocs,cazzerson/mkdocs,michaelmcandrew/mkdocs,tedmiston/mkdocs,rickpeters/mkdocs,samuelcolvin/mkdocs,simonfork/mkdocs,pjbull/mkdocs,xeechou/mkblogs,simonfork/mkdocs,jimporter/mkdocs,waylan/mkdocs,jpush/mkdocs,samuelcolvin/mkdocs,gregelin/mkdocs,lukfor/mkdocs,samuelcolvin/mkdocs,cnbin/mkdocs,dmehra/mkdocs,ericholscher/mkdocs,samhatfield/mkdocs
|
import subprocess
import os
def gh_deploy(config):
if not os.path.exists('.git'):
print 'Cannot deploy - this directory does not appear to be a git repository'
return
print "Copying '%s' to `gh-pages` branch and pushing to GitHub." % config['site_dir']
try:
subprocess.check_call(['ghp-import', '-p', config['site_dir']])
except:
return
url = subprocess.check_output(["git", "config", "--get", "remote.origin.url"])
url = url.decode('utf-8').strip()
host, path = url.split('github.com/', 1)
username, repo = path.split('/', 1)
if repo.endswith('.git'):
repo = repo[:-len('.git')]
print 'Your documentation should shortly be available at: http://%s.github.io/%s' % (username, repo)
Support both remote URL styles in gh-deploy.
|
import subprocess
import os
def gh_deploy(config):
if not os.path.exists('.git'):
print 'Cannot deploy - this directory does not appear to be a git repository'
return
print "Copying '%s' to `gh-pages` branch and pushing to GitHub." % config['site_dir']
try:
subprocess.check_call(['ghp-import', '-p', config['site_dir']])
except:
return
# TODO: Also check for CNAME file
url = subprocess.check_output(["git", "config", "--get", "remote.origin.url"])
url = url.decode('utf-8').strip()
if 'github.com/' in url:
host, path = url.split('github.com/', 1)
else:
host, path = url.split('github.com:', 1)
username, repo = path.split('/', 1)
if repo.endswith('.git'):
repo = repo[:-len('.git')]
url = 'http://%s.github.io/%s' % (username, repo)
print 'Your documentation should shortly be available at: ' + url
|
<commit_before>import subprocess
import os
def gh_deploy(config):
if not os.path.exists('.git'):
print 'Cannot deploy - this directory does not appear to be a git repository'
return
print "Copying '%s' to `gh-pages` branch and pushing to GitHub." % config['site_dir']
try:
subprocess.check_call(['ghp-import', '-p', config['site_dir']])
except:
return
url = subprocess.check_output(["git", "config", "--get", "remote.origin.url"])
url = url.decode('utf-8').strip()
host, path = url.split('github.com/', 1)
username, repo = path.split('/', 1)
if repo.endswith('.git'):
repo = repo[:-len('.git')]
print 'Your documentation should shortly be available at: http://%s.github.io/%s' % (username, repo)
<commit_msg>Support both remote URL styles in gh-deploy.<commit_after>
|
import subprocess
import os
def gh_deploy(config):
if not os.path.exists('.git'):
print 'Cannot deploy - this directory does not appear to be a git repository'
return
print "Copying '%s' to `gh-pages` branch and pushing to GitHub." % config['site_dir']
try:
subprocess.check_call(['ghp-import', '-p', config['site_dir']])
except:
return
# TODO: Also check for CNAME file
url = subprocess.check_output(["git", "config", "--get", "remote.origin.url"])
url = url.decode('utf-8').strip()
if 'github.com/' in url:
host, path = url.split('github.com/', 1)
else:
host, path = url.split('github.com:', 1)
username, repo = path.split('/', 1)
if repo.endswith('.git'):
repo = repo[:-len('.git')]
url = 'http://%s.github.io/%s' % (username, repo)
print 'Your documentation should shortly be available at: ' + url
|
import subprocess
import os
def gh_deploy(config):
if not os.path.exists('.git'):
print 'Cannot deploy - this directory does not appear to be a git repository'
return
print "Copying '%s' to `gh-pages` branch and pushing to GitHub." % config['site_dir']
try:
subprocess.check_call(['ghp-import', '-p', config['site_dir']])
except:
return
url = subprocess.check_output(["git", "config", "--get", "remote.origin.url"])
url = url.decode('utf-8').strip()
host, path = url.split('github.com/', 1)
username, repo = path.split('/', 1)
if repo.endswith('.git'):
repo = repo[:-len('.git')]
print 'Your documentation should shortly be available at: http://%s.github.io/%s' % (username, repo)
Support both remote URL styles in gh-deploy.import subprocess
import os
def gh_deploy(config):
if not os.path.exists('.git'):
print 'Cannot deploy - this directory does not appear to be a git repository'
return
print "Copying '%s' to `gh-pages` branch and pushing to GitHub." % config['site_dir']
try:
subprocess.check_call(['ghp-import', '-p', config['site_dir']])
except:
return
# TODO: Also check for CNAME file
url = subprocess.check_output(["git", "config", "--get", "remote.origin.url"])
url = url.decode('utf-8').strip()
if 'github.com/' in url:
host, path = url.split('github.com/', 1)
else:
host, path = url.split('github.com:', 1)
username, repo = path.split('/', 1)
if repo.endswith('.git'):
repo = repo[:-len('.git')]
url = 'http://%s.github.io/%s' % (username, repo)
print 'Your documentation should shortly be available at: ' + url
|
<commit_before>import subprocess
import os
def gh_deploy(config):
if not os.path.exists('.git'):
print 'Cannot deploy - this directory does not appear to be a git repository'
return
print "Copying '%s' to `gh-pages` branch and pushing to GitHub." % config['site_dir']
try:
subprocess.check_call(['ghp-import', '-p', config['site_dir']])
except:
return
url = subprocess.check_output(["git", "config", "--get", "remote.origin.url"])
url = url.decode('utf-8').strip()
host, path = url.split('github.com/', 1)
username, repo = path.split('/', 1)
if repo.endswith('.git'):
repo = repo[:-len('.git')]
print 'Your documentation should shortly be available at: http://%s.github.io/%s' % (username, repo)
<commit_msg>Support both remote URL styles in gh-deploy.<commit_after>import subprocess
import os
def gh_deploy(config):
if not os.path.exists('.git'):
print 'Cannot deploy - this directory does not appear to be a git repository'
return
print "Copying '%s' to `gh-pages` branch and pushing to GitHub." % config['site_dir']
try:
subprocess.check_call(['ghp-import', '-p', config['site_dir']])
except:
return
# TODO: Also check for CNAME file
url = subprocess.check_output(["git", "config", "--get", "remote.origin.url"])
url = url.decode('utf-8').strip()
if 'github.com/' in url:
host, path = url.split('github.com/', 1)
else:
host, path = url.split('github.com:', 1)
username, repo = path.split('/', 1)
if repo.endswith('.git'):
repo = repo[:-len('.git')]
url = 'http://%s.github.io/%s' % (username, repo)
print 'Your documentation should shortly be available at: ' + url
|
6358f3fb8a3ece53adeb71f9b59f96a5a3a9ca70
|
examples/system/ulp_adc/example_test.py
|
examples/system/ulp_adc/example_test.py
|
from __future__ import unicode_literals
from tiny_test_fw import Utility
import re
import ttfw_idf
@ttfw_idf.idf_example_test(env_tag='Example_GENERIC')
def test_examples_ulp_adc(env, extra_data):
dut = env.get_dut('ulp_adc', 'examples/system/ulp_adc')
dut.start_app()
dut.expect_all('Not ULP wakeup',
'Entering deep sleep',
timeout=30)
for _ in range(5):
dut.expect('Deep sleep wakeup', timeout=60)
measurements = int(dut.expect(re.compile(r'ULP did (\d+) measurements'), timeout=5)[0], 10)
Utility.console_log('ULP did {} measurements'.format(measurements))
dut.expect('Thresholds: low=1500 high=2000', timeout=5)
value = int(dut.expect(re.compile(r'Value=(\d+) was (?:below)|(?:above) threshold'), timeout=5)[0], 10)
Utility.console_log('Value {} was outside the boundaries'.format(value))
dut.expect('Entering deep sleep', timeout=60)
if __name__ == '__main__':
test_examples_ulp_adc()
|
from __future__ import unicode_literals
from tiny_test_fw import Utility
import re
import ttfw_idf
@ttfw_idf.idf_example_test(env_tag='Example_GENERIC')
def test_examples_ulp_adc(env, extra_data):
dut = env.get_dut('ulp_adc', 'examples/system/ulp_adc')
dut.start_app()
dut.expect_all('Not ULP wakeup',
'Entering deep sleep',
timeout=30)
for _ in range(5):
dut.expect('Deep sleep wakeup', timeout=60)
measurements_str = dut.expect(re.compile(r'ULP did (\d+) measurements'), timeout=5)[0]
assert measurements_str is not None
measurements = int(measurements_str)
Utility.console_log('ULP did {} measurements'.format(measurements))
dut.expect('Thresholds: low=1500 high=2000', timeout=5)
value_str = dut.expect(re.compile(r'Value=(\d+) was (above|below) threshold'), timeout=5)[0]
assert value_str is not None
value = int(value_str)
Utility.console_log('Value {} was outside the boundaries'.format(value))
dut.expect('Entering deep sleep', timeout=60)
if __name__ == '__main__':
test_examples_ulp_adc()
|
Fix regex in ulp_adc example test
|
CI: Fix regex in ulp_adc example test
|
Python
|
apache-2.0
|
espressif/esp-idf,espressif/esp-idf,espressif/esp-idf,espressif/esp-idf
|
from __future__ import unicode_literals
from tiny_test_fw import Utility
import re
import ttfw_idf
@ttfw_idf.idf_example_test(env_tag='Example_GENERIC')
def test_examples_ulp_adc(env, extra_data):
dut = env.get_dut('ulp_adc', 'examples/system/ulp_adc')
dut.start_app()
dut.expect_all('Not ULP wakeup',
'Entering deep sleep',
timeout=30)
for _ in range(5):
dut.expect('Deep sleep wakeup', timeout=60)
measurements = int(dut.expect(re.compile(r'ULP did (\d+) measurements'), timeout=5)[0], 10)
Utility.console_log('ULP did {} measurements'.format(measurements))
dut.expect('Thresholds: low=1500 high=2000', timeout=5)
value = int(dut.expect(re.compile(r'Value=(\d+) was (?:below)|(?:above) threshold'), timeout=5)[0], 10)
Utility.console_log('Value {} was outside the boundaries'.format(value))
dut.expect('Entering deep sleep', timeout=60)
if __name__ == '__main__':
test_examples_ulp_adc()
CI: Fix regex in ulp_adc example test
|
from __future__ import unicode_literals
from tiny_test_fw import Utility
import re
import ttfw_idf
@ttfw_idf.idf_example_test(env_tag='Example_GENERIC')
def test_examples_ulp_adc(env, extra_data):
dut = env.get_dut('ulp_adc', 'examples/system/ulp_adc')
dut.start_app()
dut.expect_all('Not ULP wakeup',
'Entering deep sleep',
timeout=30)
for _ in range(5):
dut.expect('Deep sleep wakeup', timeout=60)
measurements_str = dut.expect(re.compile(r'ULP did (\d+) measurements'), timeout=5)[0]
assert measurements_str is not None
measurements = int(measurements_str)
Utility.console_log('ULP did {} measurements'.format(measurements))
dut.expect('Thresholds: low=1500 high=2000', timeout=5)
value_str = dut.expect(re.compile(r'Value=(\d+) was (above|below) threshold'), timeout=5)[0]
assert value_str is not None
value = int(value_str)
Utility.console_log('Value {} was outside the boundaries'.format(value))
dut.expect('Entering deep sleep', timeout=60)
if __name__ == '__main__':
test_examples_ulp_adc()
|
<commit_before>from __future__ import unicode_literals
from tiny_test_fw import Utility
import re
import ttfw_idf
@ttfw_idf.idf_example_test(env_tag='Example_GENERIC')
def test_examples_ulp_adc(env, extra_data):
dut = env.get_dut('ulp_adc', 'examples/system/ulp_adc')
dut.start_app()
dut.expect_all('Not ULP wakeup',
'Entering deep sleep',
timeout=30)
for _ in range(5):
dut.expect('Deep sleep wakeup', timeout=60)
measurements = int(dut.expect(re.compile(r'ULP did (\d+) measurements'), timeout=5)[0], 10)
Utility.console_log('ULP did {} measurements'.format(measurements))
dut.expect('Thresholds: low=1500 high=2000', timeout=5)
value = int(dut.expect(re.compile(r'Value=(\d+) was (?:below)|(?:above) threshold'), timeout=5)[0], 10)
Utility.console_log('Value {} was outside the boundaries'.format(value))
dut.expect('Entering deep sleep', timeout=60)
if __name__ == '__main__':
test_examples_ulp_adc()
<commit_msg>CI: Fix regex in ulp_adc example test<commit_after>
|
from __future__ import unicode_literals
from tiny_test_fw import Utility
import re
import ttfw_idf
@ttfw_idf.idf_example_test(env_tag='Example_GENERIC')
def test_examples_ulp_adc(env, extra_data):
dut = env.get_dut('ulp_adc', 'examples/system/ulp_adc')
dut.start_app()
dut.expect_all('Not ULP wakeup',
'Entering deep sleep',
timeout=30)
for _ in range(5):
dut.expect('Deep sleep wakeup', timeout=60)
measurements_str = dut.expect(re.compile(r'ULP did (\d+) measurements'), timeout=5)[0]
assert measurements_str is not None
measurements = int(measurements_str)
Utility.console_log('ULP did {} measurements'.format(measurements))
dut.expect('Thresholds: low=1500 high=2000', timeout=5)
value_str = dut.expect(re.compile(r'Value=(\d+) was (above|below) threshold'), timeout=5)[0]
assert value_str is not None
value = int(value_str)
Utility.console_log('Value {} was outside the boundaries'.format(value))
dut.expect('Entering deep sleep', timeout=60)
if __name__ == '__main__':
test_examples_ulp_adc()
|
from __future__ import unicode_literals
from tiny_test_fw import Utility
import re
import ttfw_idf
@ttfw_idf.idf_example_test(env_tag='Example_GENERIC')
def test_examples_ulp_adc(env, extra_data):
dut = env.get_dut('ulp_adc', 'examples/system/ulp_adc')
dut.start_app()
dut.expect_all('Not ULP wakeup',
'Entering deep sleep',
timeout=30)
for _ in range(5):
dut.expect('Deep sleep wakeup', timeout=60)
measurements = int(dut.expect(re.compile(r'ULP did (\d+) measurements'), timeout=5)[0], 10)
Utility.console_log('ULP did {} measurements'.format(measurements))
dut.expect('Thresholds: low=1500 high=2000', timeout=5)
value = int(dut.expect(re.compile(r'Value=(\d+) was (?:below)|(?:above) threshold'), timeout=5)[0], 10)
Utility.console_log('Value {} was outside the boundaries'.format(value))
dut.expect('Entering deep sleep', timeout=60)
if __name__ == '__main__':
test_examples_ulp_adc()
CI: Fix regex in ulp_adc example testfrom __future__ import unicode_literals
from tiny_test_fw import Utility
import re
import ttfw_idf
@ttfw_idf.idf_example_test(env_tag='Example_GENERIC')
def test_examples_ulp_adc(env, extra_data):
dut = env.get_dut('ulp_adc', 'examples/system/ulp_adc')
dut.start_app()
dut.expect_all('Not ULP wakeup',
'Entering deep sleep',
timeout=30)
for _ in range(5):
dut.expect('Deep sleep wakeup', timeout=60)
measurements_str = dut.expect(re.compile(r'ULP did (\d+) measurements'), timeout=5)[0]
assert measurements_str is not None
measurements = int(measurements_str)
Utility.console_log('ULP did {} measurements'.format(measurements))
dut.expect('Thresholds: low=1500 high=2000', timeout=5)
value_str = dut.expect(re.compile(r'Value=(\d+) was (above|below) threshold'), timeout=5)[0]
assert value_str is not None
value = int(value_str)
Utility.console_log('Value {} was outside the boundaries'.format(value))
dut.expect('Entering deep sleep', timeout=60)
if __name__ == '__main__':
test_examples_ulp_adc()
|
<commit_before>from __future__ import unicode_literals
from tiny_test_fw import Utility
import re
import ttfw_idf
@ttfw_idf.idf_example_test(env_tag='Example_GENERIC')
def test_examples_ulp_adc(env, extra_data):
dut = env.get_dut('ulp_adc', 'examples/system/ulp_adc')
dut.start_app()
dut.expect_all('Not ULP wakeup',
'Entering deep sleep',
timeout=30)
for _ in range(5):
dut.expect('Deep sleep wakeup', timeout=60)
measurements = int(dut.expect(re.compile(r'ULP did (\d+) measurements'), timeout=5)[0], 10)
Utility.console_log('ULP did {} measurements'.format(measurements))
dut.expect('Thresholds: low=1500 high=2000', timeout=5)
value = int(dut.expect(re.compile(r'Value=(\d+) was (?:below)|(?:above) threshold'), timeout=5)[0], 10)
Utility.console_log('Value {} was outside the boundaries'.format(value))
dut.expect('Entering deep sleep', timeout=60)
if __name__ == '__main__':
test_examples_ulp_adc()
<commit_msg>CI: Fix regex in ulp_adc example test<commit_after>from __future__ import unicode_literals
from tiny_test_fw import Utility
import re
import ttfw_idf
@ttfw_idf.idf_example_test(env_tag='Example_GENERIC')
def test_examples_ulp_adc(env, extra_data):
dut = env.get_dut('ulp_adc', 'examples/system/ulp_adc')
dut.start_app()
dut.expect_all('Not ULP wakeup',
'Entering deep sleep',
timeout=30)
for _ in range(5):
dut.expect('Deep sleep wakeup', timeout=60)
measurements_str = dut.expect(re.compile(r'ULP did (\d+) measurements'), timeout=5)[0]
assert measurements_str is not None
measurements = int(measurements_str)
Utility.console_log('ULP did {} measurements'.format(measurements))
dut.expect('Thresholds: low=1500 high=2000', timeout=5)
value_str = dut.expect(re.compile(r'Value=(\d+) was (above|below) threshold'), timeout=5)[0]
assert value_str is not None
value = int(value_str)
Utility.console_log('Value {} was outside the boundaries'.format(value))
dut.expect('Entering deep sleep', timeout=60)
if __name__ == '__main__':
test_examples_ulp_adc()
|
f2d2706c7943108c26357bcd9b6dfd92229ae7ce
|
blo/__init__.py
|
blo/__init__.py
|
from .BloArticle import BloArticle
class Blo:
def __init__(self):
pass
if __name__ == '__main__':
# TODO: implement main routine of Blo.
# blo [-c config_file] markdown_file.md
# -- if no -c option then load config file from default path (current directory).
# ---- if no configuration file on current directory blo said error.
# 1. init database (database name from environment variable or configuration file)
# 2. parse markdown file from command line argument.
# -- if command line argument path is directory then it will do recursive in directory.
# 3. generate html and commit to database
pass
|
import configparser
import optparse
from .BloArticle import BloArticle
class Blo:
def __init__(self):
pass
if __name__ == '__main__':
parser = optparse.OptionParser("usage: %prog [option] markdown_file.md")
parser.add_option("-c", "--config", dest="config_file",
default="./blo.cfg", type="string", help="specify configuration file path to run on")
(options, args) = parser.parse_args()
if len(args) != 1:
parser.error("incorrect number of arguments")
cfg_file = options.config_file
# TODO: implement main routine of Blo.
# blo [-c config_file] markdown_file.md
# -- if no -c option then load config file from default path (current directory).
# ---- if no configuration file on current directory blo said error.
# 1. init database (database name from environment variable or configuration file)
# 2. parse markdown file from command line argument.
# -- if command line argument path is directory then it will do recursive in directory.
# 3. generate html and commit to database
pass
|
Implement command line option parse program.
|
Implement command line option parse program.
|
Python
|
mit
|
10nin/blo,10nin/blo
|
from .BloArticle import BloArticle
class Blo:
def __init__(self):
pass
if __name__ == '__main__':
# TODO: implement main routine of Blo.
# blo [-c config_file] markdown_file.md
# -- if no -c option then load config file from default path (current directory).
# ---- if no configuration file on current directory blo said error.
# 1. init database (database name from environment variable or configuration file)
# 2. parse markdown file from command line argument.
# -- if command line argument path is directory then it will do recursive in directory.
# 3. generate html and commit to database
pass
Implement command line option parse program.
|
import configparser
import optparse
from .BloArticle import BloArticle
class Blo:
def __init__(self):
pass
if __name__ == '__main__':
parser = optparse.OptionParser("usage: %prog [option] markdown_file.md")
parser.add_option("-c", "--config", dest="config_file",
default="./blo.cfg", type="string", help="specify configuration file path to run on")
(options, args) = parser.parse_args()
if len(args) != 1:
parser.error("incorrect number of arguments")
cfg_file = options.config_file
# TODO: implement main routine of Blo.
# blo [-c config_file] markdown_file.md
# -- if no -c option then load config file from default path (current directory).
# ---- if no configuration file on current directory blo said error.
# 1. init database (database name from environment variable or configuration file)
# 2. parse markdown file from command line argument.
# -- if command line argument path is directory then it will do recursive in directory.
# 3. generate html and commit to database
pass
|
<commit_before>from .BloArticle import BloArticle
class Blo:
def __init__(self):
pass
if __name__ == '__main__':
# TODO: implement main routine of Blo.
# blo [-c config_file] markdown_file.md
# -- if no -c option then load config file from default path (current directory).
# ---- if no configuration file on current directory blo said error.
# 1. init database (database name from environment variable or configuration file)
# 2. parse markdown file from command line argument.
# -- if command line argument path is directory then it will do recursive in directory.
# 3. generate html and commit to database
pass
<commit_msg>Implement command line option parse program.<commit_after>
|
import configparser
import optparse
from .BloArticle import BloArticle
class Blo:
def __init__(self):
pass
if __name__ == '__main__':
parser = optparse.OptionParser("usage: %prog [option] markdown_file.md")
parser.add_option("-c", "--config", dest="config_file",
default="./blo.cfg", type="string", help="specify configuration file path to run on")
(options, args) = parser.parse_args()
if len(args) != 1:
parser.error("incorrect number of arguments")
cfg_file = options.config_file
# TODO: implement main routine of Blo.
# blo [-c config_file] markdown_file.md
# -- if no -c option then load config file from default path (current directory).
# ---- if no configuration file on current directory blo said error.
# 1. init database (database name from environment variable or configuration file)
# 2. parse markdown file from command line argument.
# -- if command line argument path is directory then it will do recursive in directory.
# 3. generate html and commit to database
pass
|
from .BloArticle import BloArticle
class Blo:
def __init__(self):
pass
if __name__ == '__main__':
# TODO: implement main routine of Blo.
# blo [-c config_file] markdown_file.md
# -- if no -c option then load config file from default path (current directory).
# ---- if no configuration file on current directory blo said error.
# 1. init database (database name from environment variable or configuration file)
# 2. parse markdown file from command line argument.
# -- if command line argument path is directory then it will do recursive in directory.
# 3. generate html and commit to database
pass
Implement command line option parse program.import configparser
import optparse
from .BloArticle import BloArticle
class Blo:
def __init__(self):
pass
if __name__ == '__main__':
parser = optparse.OptionParser("usage: %prog [option] markdown_file.md")
parser.add_option("-c", "--config", dest="config_file",
default="./blo.cfg", type="string", help="specify configuration file path to run on")
(options, args) = parser.parse_args()
if len(args) != 1:
parser.error("incorrect number of arguments")
cfg_file = options.config_file
# TODO: implement main routine of Blo.
# blo [-c config_file] markdown_file.md
# -- if no -c option then load config file from default path (current directory).
# ---- if no configuration file on current directory blo said error.
# 1. init database (database name from environment variable or configuration file)
# 2. parse markdown file from command line argument.
# -- if command line argument path is directory then it will do recursive in directory.
# 3. generate html and commit to database
pass
|
<commit_before>from .BloArticle import BloArticle
class Blo:
def __init__(self):
pass
if __name__ == '__main__':
# TODO: implement main routine of Blo.
# blo [-c config_file] markdown_file.md
# -- if no -c option then load config file from default path (current directory).
# ---- if no configuration file on current directory blo said error.
# 1. init database (database name from environment variable or configuration file)
# 2. parse markdown file from command line argument.
# -- if command line argument path is directory then it will do recursive in directory.
# 3. generate html and commit to database
pass
<commit_msg>Implement command line option parse program.<commit_after>import configparser
import optparse
from .BloArticle import BloArticle
class Blo:
def __init__(self):
pass
if __name__ == '__main__':
parser = optparse.OptionParser("usage: %prog [option] markdown_file.md")
parser.add_option("-c", "--config", dest="config_file",
default="./blo.cfg", type="string", help="specify configuration file path to run on")
(options, args) = parser.parse_args()
if len(args) != 1:
parser.error("incorrect number of arguments")
cfg_file = options.config_file
# TODO: implement main routine of Blo.
# blo [-c config_file] markdown_file.md
# -- if no -c option then load config file from default path (current directory).
# ---- if no configuration file on current directory blo said error.
# 1. init database (database name from environment variable or configuration file)
# 2. parse markdown file from command line argument.
# -- if command line argument path is directory then it will do recursive in directory.
# 3. generate html and commit to database
pass
|
4a9e08b3ab73ce9b1d33c40f9d95d348b1bd8e6a
|
tests/test_sphere_manifold.py
|
tests/test_sphere_manifold.py
|
import unittest
import numpy as np
import numpy.linalg as la
import numpy.random as rnd
import numpy.testing as np_testing
from pymanopt.manifolds import Sphere
class TestSphereManifold(unittest.TestCase):
def setUp(self):
self.m = m = 100
self.n = n = 50
self.sphere = Sphere(m, n)
def test_proj(self):
# Construct a random point (X) on the manifold.
X = rnd.randn(self.m, self.n)
X /= la.norm(X, "fro")
# Construct a vector H) in the ambient space.
H = rnd.randn(self.m, self.n)
# Compare the projections.
np_testing.assert_array_almost_equal(H - X * np.trace(X.T.dot(H)),
self.sphere.proj(X, H))
|
import unittest
import numpy as np
import numpy.linalg as la
import numpy.random as rnd
import numpy.testing as np_testing
from pymanopt.manifolds import Sphere
class TestSphereManifold(unittest.TestCase):
def setUp(self):
self.m = m = 100
self.n = n = 50
self.sphere = Sphere(m, n)
def test_proj(self):
# Construct a random point X on the manifold.
X = rnd.randn(self.m, self.n)
X /= la.norm(X, "fro")
# Construct a vector H in the ambient space.
H = rnd.randn(self.m, self.n)
# Compare the projections.
np_testing.assert_array_almost_equal(H - X * np.trace(X.T.dot(H)),
self.sphere.proj(X, H))
|
Fix typo in unit test class of the sphere manifold
|
Fix typo in unit test class of the sphere manifold
Signed-off-by: Niklas Koep <342d5290239d9c5264c8f98185afedb99596601a@gmail.com>
|
Python
|
bsd-3-clause
|
nkoep/pymanopt,pymanopt/pymanopt,nkoep/pymanopt,tingelst/pymanopt,pymanopt/pymanopt,nkoep/pymanopt,j-towns/pymanopt
|
import unittest
import numpy as np
import numpy.linalg as la
import numpy.random as rnd
import numpy.testing as np_testing
from pymanopt.manifolds import Sphere
class TestSphereManifold(unittest.TestCase):
def setUp(self):
self.m = m = 100
self.n = n = 50
self.sphere = Sphere(m, n)
def test_proj(self):
# Construct a random point (X) on the manifold.
X = rnd.randn(self.m, self.n)
X /= la.norm(X, "fro")
# Construct a vector H) in the ambient space.
H = rnd.randn(self.m, self.n)
# Compare the projections.
np_testing.assert_array_almost_equal(H - X * np.trace(X.T.dot(H)),
self.sphere.proj(X, H))
Fix typo in unit test class of the sphere manifold
Signed-off-by: Niklas Koep <342d5290239d9c5264c8f98185afedb99596601a@gmail.com>
|
import unittest
import numpy as np
import numpy.linalg as la
import numpy.random as rnd
import numpy.testing as np_testing
from pymanopt.manifolds import Sphere
class TestSphereManifold(unittest.TestCase):
def setUp(self):
self.m = m = 100
self.n = n = 50
self.sphere = Sphere(m, n)
def test_proj(self):
# Construct a random point X on the manifold.
X = rnd.randn(self.m, self.n)
X /= la.norm(X, "fro")
# Construct a vector H in the ambient space.
H = rnd.randn(self.m, self.n)
# Compare the projections.
np_testing.assert_array_almost_equal(H - X * np.trace(X.T.dot(H)),
self.sphere.proj(X, H))
|
<commit_before>import unittest
import numpy as np
import numpy.linalg as la
import numpy.random as rnd
import numpy.testing as np_testing
from pymanopt.manifolds import Sphere
class TestSphereManifold(unittest.TestCase):
def setUp(self):
self.m = m = 100
self.n = n = 50
self.sphere = Sphere(m, n)
def test_proj(self):
# Construct a random point (X) on the manifold.
X = rnd.randn(self.m, self.n)
X /= la.norm(X, "fro")
# Construct a vector H) in the ambient space.
H = rnd.randn(self.m, self.n)
# Compare the projections.
np_testing.assert_array_almost_equal(H - X * np.trace(X.T.dot(H)),
self.sphere.proj(X, H))
<commit_msg>Fix typo in unit test class of the sphere manifold
Signed-off-by: Niklas Koep <342d5290239d9c5264c8f98185afedb99596601a@gmail.com><commit_after>
|
import unittest
import numpy as np
import numpy.linalg as la
import numpy.random as rnd
import numpy.testing as np_testing
from pymanopt.manifolds import Sphere
class TestSphereManifold(unittest.TestCase):
def setUp(self):
self.m = m = 100
self.n = n = 50
self.sphere = Sphere(m, n)
def test_proj(self):
# Construct a random point X on the manifold.
X = rnd.randn(self.m, self.n)
X /= la.norm(X, "fro")
# Construct a vector H in the ambient space.
H = rnd.randn(self.m, self.n)
# Compare the projections.
np_testing.assert_array_almost_equal(H - X * np.trace(X.T.dot(H)),
self.sphere.proj(X, H))
|
import unittest
import numpy as np
import numpy.linalg as la
import numpy.random as rnd
import numpy.testing as np_testing
from pymanopt.manifolds import Sphere
class TestSphereManifold(unittest.TestCase):
def setUp(self):
self.m = m = 100
self.n = n = 50
self.sphere = Sphere(m, n)
def test_proj(self):
# Construct a random point (X) on the manifold.
X = rnd.randn(self.m, self.n)
X /= la.norm(X, "fro")
# Construct a vector H) in the ambient space.
H = rnd.randn(self.m, self.n)
# Compare the projections.
np_testing.assert_array_almost_equal(H - X * np.trace(X.T.dot(H)),
self.sphere.proj(X, H))
Fix typo in unit test class of the sphere manifold
Signed-off-by: Niklas Koep <342d5290239d9c5264c8f98185afedb99596601a@gmail.com>import unittest
import numpy as np
import numpy.linalg as la
import numpy.random as rnd
import numpy.testing as np_testing
from pymanopt.manifolds import Sphere
class TestSphereManifold(unittest.TestCase):
def setUp(self):
self.m = m = 100
self.n = n = 50
self.sphere = Sphere(m, n)
def test_proj(self):
# Construct a random point X on the manifold.
X = rnd.randn(self.m, self.n)
X /= la.norm(X, "fro")
# Construct a vector H in the ambient space.
H = rnd.randn(self.m, self.n)
# Compare the projections.
np_testing.assert_array_almost_equal(H - X * np.trace(X.T.dot(H)),
self.sphere.proj(X, H))
|
<commit_before>import unittest
import numpy as np
import numpy.linalg as la
import numpy.random as rnd
import numpy.testing as np_testing
from pymanopt.manifolds import Sphere
class TestSphereManifold(unittest.TestCase):
def setUp(self):
self.m = m = 100
self.n = n = 50
self.sphere = Sphere(m, n)
def test_proj(self):
# Construct a random point (X) on the manifold.
X = rnd.randn(self.m, self.n)
X /= la.norm(X, "fro")
# Construct a vector H) in the ambient space.
H = rnd.randn(self.m, self.n)
# Compare the projections.
np_testing.assert_array_almost_equal(H - X * np.trace(X.T.dot(H)),
self.sphere.proj(X, H))
<commit_msg>Fix typo in unit test class of the sphere manifold
Signed-off-by: Niklas Koep <342d5290239d9c5264c8f98185afedb99596601a@gmail.com><commit_after>import unittest
import numpy as np
import numpy.linalg as la
import numpy.random as rnd
import numpy.testing as np_testing
from pymanopt.manifolds import Sphere
class TestSphereManifold(unittest.TestCase):
def setUp(self):
self.m = m = 100
self.n = n = 50
self.sphere = Sphere(m, n)
def test_proj(self):
# Construct a random point X on the manifold.
X = rnd.randn(self.m, self.n)
X /= la.norm(X, "fro")
# Construct a vector H in the ambient space.
H = rnd.randn(self.m, self.n)
# Compare the projections.
np_testing.assert_array_almost_equal(H - X * np.trace(X.T.dot(H)),
self.sphere.proj(X, H))
|
114f2e4c33a924dce32eef80f291c7a5d623de4c
|
tools/clang/scripts/update.py
|
tools/clang/scripts/update.py
|
#!/usr/bin/env python
# Copyright (c) 2011 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Windows can't run .sh files, so this is a small python wrapper around
update.sh.
"""
import os
import subprocess
import sys
def main():
if sys.platform in ['win32', 'cygwin']:
return 0
# This script is called by gclient. gclient opens its hooks subprocesses with
# (stdout=subprocess.PIPE, stderr=subprocess.STDOUT) and then does custom
# output processing that breaks printing '\r' characters for single-line
# updating status messages as printed by curl and wget.
# Work around this by setting stderr of the update.sh process to stdin (!):
# gclient doesn't redirect stdin, and while stdin itself is read-only, the
# subprocess module dup()s it in the child process - and a dup()ed sys.stdin
# is writable, try
# fd2 = os.dup(sys.stdin.fileno()); os.write(fd2, 'hi')
# TODO: Fix gclient instead, http://crbug.com/95350
return subprocess.call(
[os.path.join(os.path.dirname(__file__), 'update.sh')] + sys.argv[1:],
stderr=sys.stdin)
if __name__ == '__main__':
sys.exit(main())
|
#!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Windows can't run .sh files, so this is a small python wrapper around
update.sh.
"""
import os
import subprocess
import sys
def main():
if sys.platform in ['win32', 'cygwin']:
return 0
# This script is called by gclient. gclient opens its hooks subprocesses with
# (stdout=subprocess.PIPE, stderr=subprocess.STDOUT) and then does custom
# output processing that breaks printing '\r' characters for single-line
# updating status messages as printed by curl and wget.
# Work around this by setting stderr of the update.sh process to stdin (!):
# gclient doesn't redirect stdin, and while stdin itself is read-only, a
# dup()ed sys.stdin is writable, try
# fd2 = os.dup(sys.stdin.fileno()); os.write(fd2, 'hi')
# TODO: Fix gclient instead, http://crbug.com/95350
return subprocess.call(
[os.path.join(os.path.dirname(__file__), 'update.sh')] + sys.argv[1:],
stderr=os.fdopen(os.dup(sys.stdin.fileno())))
if __name__ == '__main__':
sys.exit(main())
|
Fix spurious '../llvm/configure: line 541: 0: Bad file descriptor' on clang roll try jobs.
|
clang: Fix spurious '../llvm/configure: line 541: 0: Bad file descriptor' on clang roll try jobs.
The subprocess module used to dup() stdin, but apparently that's no longer true. So dup it manually.
Patch from Jay Soffian <jaysoffian@gmail.com>!
BUG=none
TEST=Put a new clang revision in tools/clang/scripts/update.sh, send try job. Works.
Review URL: http://codereview.chromium.org/9430007
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@122839 0039d316-1c4b-4281-b951-d872f2087c98
|
Python
|
bsd-3-clause
|
ropik/chromium,yitian134/chromium,adobe/chromium,gavinp/chromium,gavinp/chromium,ropik/chromium,gavinp/chromium,ropik/chromium,adobe/chromium,ropik/chromium,yitian134/chromium,gavinp/chromium,yitian134/chromium,adobe/chromium,gavinp/chromium,yitian134/chromium,adobe/chromium,adobe/chromium,gavinp/chromium,yitian134/chromium,adobe/chromium,yitian134/chromium,yitian134/chromium,adobe/chromium,gavinp/chromium,adobe/chromium,yitian134/chromium,ropik/chromium,adobe/chromium,yitian134/chromium,ropik/chromium,yitian134/chromium,gavinp/chromium,ropik/chromium,ropik/chromium,adobe/chromium,gavinp/chromium,ropik/chromium,adobe/chromium,gavinp/chromium
|
#!/usr/bin/env python
# Copyright (c) 2011 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Windows can't run .sh files, so this is a small python wrapper around
update.sh.
"""
import os
import subprocess
import sys
def main():
if sys.platform in ['win32', 'cygwin']:
return 0
# This script is called by gclient. gclient opens its hooks subprocesses with
# (stdout=subprocess.PIPE, stderr=subprocess.STDOUT) and then does custom
# output processing that breaks printing '\r' characters for single-line
# updating status messages as printed by curl and wget.
# Work around this by setting stderr of the update.sh process to stdin (!):
# gclient doesn't redirect stdin, and while stdin itself is read-only, the
# subprocess module dup()s it in the child process - and a dup()ed sys.stdin
# is writable, try
# fd2 = os.dup(sys.stdin.fileno()); os.write(fd2, 'hi')
# TODO: Fix gclient instead, http://crbug.com/95350
return subprocess.call(
[os.path.join(os.path.dirname(__file__), 'update.sh')] + sys.argv[1:],
stderr=sys.stdin)
if __name__ == '__main__':
sys.exit(main())
clang: Fix spurious '../llvm/configure: line 541: 0: Bad file descriptor' on clang roll try jobs.
The subprocess module used to dup() stdin, but apparently that's no longer true. So dup it manually.
Patch from Jay Soffian <jaysoffian@gmail.com>!
BUG=none
TEST=Put a new clang revision in tools/clang/scripts/update.sh, send try job. Works.
Review URL: http://codereview.chromium.org/9430007
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@122839 0039d316-1c4b-4281-b951-d872f2087c98
|
#!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Windows can't run .sh files, so this is a small python wrapper around
update.sh.
"""
import os
import subprocess
import sys
def main():
if sys.platform in ['win32', 'cygwin']:
return 0
# This script is called by gclient. gclient opens its hooks subprocesses with
# (stdout=subprocess.PIPE, stderr=subprocess.STDOUT) and then does custom
# output processing that breaks printing '\r' characters for single-line
# updating status messages as printed by curl and wget.
# Work around this by setting stderr of the update.sh process to stdin (!):
# gclient doesn't redirect stdin, and while stdin itself is read-only, a
# dup()ed sys.stdin is writable, try
# fd2 = os.dup(sys.stdin.fileno()); os.write(fd2, 'hi')
# TODO: Fix gclient instead, http://crbug.com/95350
return subprocess.call(
[os.path.join(os.path.dirname(__file__), 'update.sh')] + sys.argv[1:],
stderr=os.fdopen(os.dup(sys.stdin.fileno())))
if __name__ == '__main__':
sys.exit(main())
|
<commit_before>#!/usr/bin/env python
# Copyright (c) 2011 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Windows can't run .sh files, so this is a small python wrapper around
update.sh.
"""
import os
import subprocess
import sys
def main():
if sys.platform in ['win32', 'cygwin']:
return 0
# This script is called by gclient. gclient opens its hooks subprocesses with
# (stdout=subprocess.PIPE, stderr=subprocess.STDOUT) and then does custom
# output processing that breaks printing '\r' characters for single-line
# updating status messages as printed by curl and wget.
# Work around this by setting stderr of the update.sh process to stdin (!):
# gclient doesn't redirect stdin, and while stdin itself is read-only, the
# subprocess module dup()s it in the child process - and a dup()ed sys.stdin
# is writable, try
# fd2 = os.dup(sys.stdin.fileno()); os.write(fd2, 'hi')
# TODO: Fix gclient instead, http://crbug.com/95350
return subprocess.call(
[os.path.join(os.path.dirname(__file__), 'update.sh')] + sys.argv[1:],
stderr=sys.stdin)
if __name__ == '__main__':
sys.exit(main())
<commit_msg>clang: Fix spurious '../llvm/configure: line 541: 0: Bad file descriptor' on clang roll try jobs.
The subprocess module used to dup() stdin, but apparently that's no longer true. So dup it manually.
Patch from Jay Soffian <jaysoffian@gmail.com>!
BUG=none
TEST=Put a new clang revision in tools/clang/scripts/update.sh, send try job. Works.
Review URL: http://codereview.chromium.org/9430007
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@122839 0039d316-1c4b-4281-b951-d872f2087c98<commit_after>
|
#!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Windows can't run .sh files, so this is a small python wrapper around
update.sh.
"""
import os
import subprocess
import sys
def main():
if sys.platform in ['win32', 'cygwin']:
return 0
# This script is called by gclient. gclient opens its hooks subprocesses with
# (stdout=subprocess.PIPE, stderr=subprocess.STDOUT) and then does custom
# output processing that breaks printing '\r' characters for single-line
# updating status messages as printed by curl and wget.
# Work around this by setting stderr of the update.sh process to stdin (!):
# gclient doesn't redirect stdin, and while stdin itself is read-only, a
# dup()ed sys.stdin is writable, try
# fd2 = os.dup(sys.stdin.fileno()); os.write(fd2, 'hi')
# TODO: Fix gclient instead, http://crbug.com/95350
return subprocess.call(
[os.path.join(os.path.dirname(__file__), 'update.sh')] + sys.argv[1:],
stderr=os.fdopen(os.dup(sys.stdin.fileno())))
if __name__ == '__main__':
sys.exit(main())
|
#!/usr/bin/env python
# Copyright (c) 2011 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Windows can't run .sh files, so this is a small python wrapper around
update.sh.
"""
import os
import subprocess
import sys
def main():
if sys.platform in ['win32', 'cygwin']:
return 0
# This script is called by gclient. gclient opens its hooks subprocesses with
# (stdout=subprocess.PIPE, stderr=subprocess.STDOUT) and then does custom
# output processing that breaks printing '\r' characters for single-line
# updating status messages as printed by curl and wget.
# Work around this by setting stderr of the update.sh process to stdin (!):
# gclient doesn't redirect stdin, and while stdin itself is read-only, the
# subprocess module dup()s it in the child process - and a dup()ed sys.stdin
# is writable, try
# fd2 = os.dup(sys.stdin.fileno()); os.write(fd2, 'hi')
# TODO: Fix gclient instead, http://crbug.com/95350
return subprocess.call(
[os.path.join(os.path.dirname(__file__), 'update.sh')] + sys.argv[1:],
stderr=sys.stdin)
if __name__ == '__main__':
sys.exit(main())
clang: Fix spurious '../llvm/configure: line 541: 0: Bad file descriptor' on clang roll try jobs.
The subprocess module used to dup() stdin, but apparently that's no longer true. So dup it manually.
Patch from Jay Soffian <jaysoffian@gmail.com>!
BUG=none
TEST=Put a new clang revision in tools/clang/scripts/update.sh, send try job. Works.
Review URL: http://codereview.chromium.org/9430007
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@122839 0039d316-1c4b-4281-b951-d872f2087c98#!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Windows can't run .sh files, so this is a small python wrapper around
update.sh.
"""
import os
import subprocess
import sys
def main():
if sys.platform in ['win32', 'cygwin']:
return 0
# This script is called by gclient. gclient opens its hooks subprocesses with
# (stdout=subprocess.PIPE, stderr=subprocess.STDOUT) and then does custom
# output processing that breaks printing '\r' characters for single-line
# updating status messages as printed by curl and wget.
# Work around this by setting stderr of the update.sh process to stdin (!):
# gclient doesn't redirect stdin, and while stdin itself is read-only, a
# dup()ed sys.stdin is writable, try
# fd2 = os.dup(sys.stdin.fileno()); os.write(fd2, 'hi')
# TODO: Fix gclient instead, http://crbug.com/95350
return subprocess.call(
[os.path.join(os.path.dirname(__file__), 'update.sh')] + sys.argv[1:],
stderr=os.fdopen(os.dup(sys.stdin.fileno())))
if __name__ == '__main__':
sys.exit(main())
|
<commit_before>#!/usr/bin/env python
# Copyright (c) 2011 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Windows can't run .sh files, so this is a small python wrapper around
update.sh.
"""
import os
import subprocess
import sys
def main():
if sys.platform in ['win32', 'cygwin']:
return 0
# This script is called by gclient. gclient opens its hooks subprocesses with
# (stdout=subprocess.PIPE, stderr=subprocess.STDOUT) and then does custom
# output processing that breaks printing '\r' characters for single-line
# updating status messages as printed by curl and wget.
# Work around this by setting stderr of the update.sh process to stdin (!):
# gclient doesn't redirect stdin, and while stdin itself is read-only, the
# subprocess module dup()s it in the child process - and a dup()ed sys.stdin
# is writable, try
# fd2 = os.dup(sys.stdin.fileno()); os.write(fd2, 'hi')
# TODO: Fix gclient instead, http://crbug.com/95350
return subprocess.call(
[os.path.join(os.path.dirname(__file__), 'update.sh')] + sys.argv[1:],
stderr=sys.stdin)
if __name__ == '__main__':
sys.exit(main())
<commit_msg>clang: Fix spurious '../llvm/configure: line 541: 0: Bad file descriptor' on clang roll try jobs.
The subprocess module used to dup() stdin, but apparently that's no longer true. So dup it manually.
Patch from Jay Soffian <jaysoffian@gmail.com>!
BUG=none
TEST=Put a new clang revision in tools/clang/scripts/update.sh, send try job. Works.
Review URL: http://codereview.chromium.org/9430007
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@122839 0039d316-1c4b-4281-b951-d872f2087c98<commit_after>#!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Windows can't run .sh files, so this is a small python wrapper around
update.sh.
"""
import os
import subprocess
import sys
def main():
if sys.platform in ['win32', 'cygwin']:
return 0
# This script is called by gclient. gclient opens its hooks subprocesses with
# (stdout=subprocess.PIPE, stderr=subprocess.STDOUT) and then does custom
# output processing that breaks printing '\r' characters for single-line
# updating status messages as printed by curl and wget.
# Work around this by setting stderr of the update.sh process to stdin (!):
# gclient doesn't redirect stdin, and while stdin itself is read-only, a
# dup()ed sys.stdin is writable, try
# fd2 = os.dup(sys.stdin.fileno()); os.write(fd2, 'hi')
# TODO: Fix gclient instead, http://crbug.com/95350
return subprocess.call(
[os.path.join(os.path.dirname(__file__), 'update.sh')] + sys.argv[1:],
stderr=os.fdopen(os.dup(sys.stdin.fileno())))
if __name__ == '__main__':
sys.exit(main())
|
fae25cfe1a19de44d950e1a04fb6caa4f452b818
|
test/test_pre_commit.py
|
test/test_pre_commit.py
|
import unittest
from mock import Mock, patch
from captainhook import pre_commit
class TestMain(unittest.TestCase):
@patch('captainhook.pre_commit.get_files')
@patch('captainhook.pre_commit.HookConfig')
@patch('captainhook.pre_commit.checks')
def test_calling_run_without_args(self, checks, HookConfig, get_files):
get_files.return_value = ['file_one']
HookConfig().is_enabled.return_value = True
HookConfig().arguments.return_value = ''
testmod = Mock()
testmod.run.return_value = None
checks.return_value = [("testmod", testmod)]
result = pre_commit.main()
self.assertEquals(result, 0)
testmod.run.assert_called_with(['file_one'])
@patch('captainhook.pre_commit.get_files')
@patch('captainhook.pre_commit.HookConfig')
@patch('captainhook.pre_commit.checks')
def test_calling_run_with_args(self, checks, HookConfig, get_files):
get_files.return_value = ['file_one']
HookConfig().is_enabled.return_value = True
HookConfig().arguments.return_value = 'yep'
testmod = Mock()
testmod.run.return_value = None
checks.return_value = [("testmod", testmod)]
result = pre_commit.main()
self.assertEquals(result, 0)
testmod.run.assert_called_with(['file_one'], 'yep')
|
import unittest
from mock import Mock, patch
from captainhook import pre_commit
class TestMain(unittest.TestCase):
def setUp(self):
self.get_files_patch = patch('captainhook.pre_commit.get_files')
get_files = self.get_files_patch.start()
get_files.return_value = ['file_one']
self.hook_config_patch = patch('captainhook.pre_commit.HookConfig')
self.HookConfig = self.hook_config_patch.start()
self.HookConfig().is_enabled.return_value = True
self.HookConfig().arguments.return_value = ''
self.testmod = Mock()
self.testmod.run.return_value = None
self.checks_patch = patch('captainhook.pre_commit.checks')
checks = self.checks_patch.start()
checks.return_value = [("testmod", self.testmod)]
def tearDown(self):
self.checks_patch.stop()
self.hook_config_patch.stop()
self.get_files_patch.stop()
def test_calling_run_without_args(self):
result = pre_commit.main()
self.assertEquals(result, 0)
self.testmod.run.assert_called_with(['file_one'])
def test_calling_run_with_args(self):
self.HookConfig().arguments.return_value = 'yep'
result = pre_commit.main()
self.assertEquals(result, 0)
self.testmod.run.assert_called_with(['file_one'], 'yep')
|
Replace pre-commit test patch decorators with setUp/tearDown patching.
|
Replace pre-commit test patch decorators with setUp/tearDown patching.
|
Python
|
bsd-3-clause
|
Friz-zy/captainhook,alexcouper/captainhook,pczerkas/captainhook
|
import unittest
from mock import Mock, patch
from captainhook import pre_commit
class TestMain(unittest.TestCase):
@patch('captainhook.pre_commit.get_files')
@patch('captainhook.pre_commit.HookConfig')
@patch('captainhook.pre_commit.checks')
def test_calling_run_without_args(self, checks, HookConfig, get_files):
get_files.return_value = ['file_one']
HookConfig().is_enabled.return_value = True
HookConfig().arguments.return_value = ''
testmod = Mock()
testmod.run.return_value = None
checks.return_value = [("testmod", testmod)]
result = pre_commit.main()
self.assertEquals(result, 0)
testmod.run.assert_called_with(['file_one'])
@patch('captainhook.pre_commit.get_files')
@patch('captainhook.pre_commit.HookConfig')
@patch('captainhook.pre_commit.checks')
def test_calling_run_with_args(self, checks, HookConfig, get_files):
get_files.return_value = ['file_one']
HookConfig().is_enabled.return_value = True
HookConfig().arguments.return_value = 'yep'
testmod = Mock()
testmod.run.return_value = None
checks.return_value = [("testmod", testmod)]
result = pre_commit.main()
self.assertEquals(result, 0)
testmod.run.assert_called_with(['file_one'], 'yep')
Replace pre-commit test patch decorators with setUp/tearDown patching.
|
import unittest
from mock import Mock, patch
from captainhook import pre_commit
class TestMain(unittest.TestCase):
def setUp(self):
self.get_files_patch = patch('captainhook.pre_commit.get_files')
get_files = self.get_files_patch.start()
get_files.return_value = ['file_one']
self.hook_config_patch = patch('captainhook.pre_commit.HookConfig')
self.HookConfig = self.hook_config_patch.start()
self.HookConfig().is_enabled.return_value = True
self.HookConfig().arguments.return_value = ''
self.testmod = Mock()
self.testmod.run.return_value = None
self.checks_patch = patch('captainhook.pre_commit.checks')
checks = self.checks_patch.start()
checks.return_value = [("testmod", self.testmod)]
def tearDown(self):
self.checks_patch.stop()
self.hook_config_patch.stop()
self.get_files_patch.stop()
def test_calling_run_without_args(self):
result = pre_commit.main()
self.assertEquals(result, 0)
self.testmod.run.assert_called_with(['file_one'])
def test_calling_run_with_args(self):
self.HookConfig().arguments.return_value = 'yep'
result = pre_commit.main()
self.assertEquals(result, 0)
self.testmod.run.assert_called_with(['file_one'], 'yep')
|
<commit_before>import unittest
from mock import Mock, patch
from captainhook import pre_commit
class TestMain(unittest.TestCase):
@patch('captainhook.pre_commit.get_files')
@patch('captainhook.pre_commit.HookConfig')
@patch('captainhook.pre_commit.checks')
def test_calling_run_without_args(self, checks, HookConfig, get_files):
get_files.return_value = ['file_one']
HookConfig().is_enabled.return_value = True
HookConfig().arguments.return_value = ''
testmod = Mock()
testmod.run.return_value = None
checks.return_value = [("testmod", testmod)]
result = pre_commit.main()
self.assertEquals(result, 0)
testmod.run.assert_called_with(['file_one'])
@patch('captainhook.pre_commit.get_files')
@patch('captainhook.pre_commit.HookConfig')
@patch('captainhook.pre_commit.checks')
def test_calling_run_with_args(self, checks, HookConfig, get_files):
get_files.return_value = ['file_one']
HookConfig().is_enabled.return_value = True
HookConfig().arguments.return_value = 'yep'
testmod = Mock()
testmod.run.return_value = None
checks.return_value = [("testmod", testmod)]
result = pre_commit.main()
self.assertEquals(result, 0)
testmod.run.assert_called_with(['file_one'], 'yep')
<commit_msg>Replace pre-commit test patch decorators with setUp/tearDown patching.<commit_after>
|
import unittest
from mock import Mock, patch
from captainhook import pre_commit
class TestMain(unittest.TestCase):
def setUp(self):
self.get_files_patch = patch('captainhook.pre_commit.get_files')
get_files = self.get_files_patch.start()
get_files.return_value = ['file_one']
self.hook_config_patch = patch('captainhook.pre_commit.HookConfig')
self.HookConfig = self.hook_config_patch.start()
self.HookConfig().is_enabled.return_value = True
self.HookConfig().arguments.return_value = ''
self.testmod = Mock()
self.testmod.run.return_value = None
self.checks_patch = patch('captainhook.pre_commit.checks')
checks = self.checks_patch.start()
checks.return_value = [("testmod", self.testmod)]
def tearDown(self):
self.checks_patch.stop()
self.hook_config_patch.stop()
self.get_files_patch.stop()
def test_calling_run_without_args(self):
result = pre_commit.main()
self.assertEquals(result, 0)
self.testmod.run.assert_called_with(['file_one'])
def test_calling_run_with_args(self):
self.HookConfig().arguments.return_value = 'yep'
result = pre_commit.main()
self.assertEquals(result, 0)
self.testmod.run.assert_called_with(['file_one'], 'yep')
|
import unittest
from mock import Mock, patch
from captainhook import pre_commit
class TestMain(unittest.TestCase):
@patch('captainhook.pre_commit.get_files')
@patch('captainhook.pre_commit.HookConfig')
@patch('captainhook.pre_commit.checks')
def test_calling_run_without_args(self, checks, HookConfig, get_files):
get_files.return_value = ['file_one']
HookConfig().is_enabled.return_value = True
HookConfig().arguments.return_value = ''
testmod = Mock()
testmod.run.return_value = None
checks.return_value = [("testmod", testmod)]
result = pre_commit.main()
self.assertEquals(result, 0)
testmod.run.assert_called_with(['file_one'])
@patch('captainhook.pre_commit.get_files')
@patch('captainhook.pre_commit.HookConfig')
@patch('captainhook.pre_commit.checks')
def test_calling_run_with_args(self, checks, HookConfig, get_files):
get_files.return_value = ['file_one']
HookConfig().is_enabled.return_value = True
HookConfig().arguments.return_value = 'yep'
testmod = Mock()
testmod.run.return_value = None
checks.return_value = [("testmod", testmod)]
result = pre_commit.main()
self.assertEquals(result, 0)
testmod.run.assert_called_with(['file_one'], 'yep')
Replace pre-commit test patch decorators with setUp/tearDown patching.import unittest
from mock import Mock, patch
from captainhook import pre_commit
class TestMain(unittest.TestCase):
def setUp(self):
self.get_files_patch = patch('captainhook.pre_commit.get_files')
get_files = self.get_files_patch.start()
get_files.return_value = ['file_one']
self.hook_config_patch = patch('captainhook.pre_commit.HookConfig')
self.HookConfig = self.hook_config_patch.start()
self.HookConfig().is_enabled.return_value = True
self.HookConfig().arguments.return_value = ''
self.testmod = Mock()
self.testmod.run.return_value = None
self.checks_patch = patch('captainhook.pre_commit.checks')
checks = self.checks_patch.start()
checks.return_value = [("testmod", self.testmod)]
def tearDown(self):
self.checks_patch.stop()
self.hook_config_patch.stop()
self.get_files_patch.stop()
def test_calling_run_without_args(self):
result = pre_commit.main()
self.assertEquals(result, 0)
self.testmod.run.assert_called_with(['file_one'])
def test_calling_run_with_args(self):
self.HookConfig().arguments.return_value = 'yep'
result = pre_commit.main()
self.assertEquals(result, 0)
self.testmod.run.assert_called_with(['file_one'], 'yep')
|
<commit_before>import unittest
from mock import Mock, patch
from captainhook import pre_commit
class TestMain(unittest.TestCase):
@patch('captainhook.pre_commit.get_files')
@patch('captainhook.pre_commit.HookConfig')
@patch('captainhook.pre_commit.checks')
def test_calling_run_without_args(self, checks, HookConfig, get_files):
get_files.return_value = ['file_one']
HookConfig().is_enabled.return_value = True
HookConfig().arguments.return_value = ''
testmod = Mock()
testmod.run.return_value = None
checks.return_value = [("testmod", testmod)]
result = pre_commit.main()
self.assertEquals(result, 0)
testmod.run.assert_called_with(['file_one'])
@patch('captainhook.pre_commit.get_files')
@patch('captainhook.pre_commit.HookConfig')
@patch('captainhook.pre_commit.checks')
def test_calling_run_with_args(self, checks, HookConfig, get_files):
get_files.return_value = ['file_one']
HookConfig().is_enabled.return_value = True
HookConfig().arguments.return_value = 'yep'
testmod = Mock()
testmod.run.return_value = None
checks.return_value = [("testmod", testmod)]
result = pre_commit.main()
self.assertEquals(result, 0)
testmod.run.assert_called_with(['file_one'], 'yep')
<commit_msg>Replace pre-commit test patch decorators with setUp/tearDown patching.<commit_after>import unittest
from mock import Mock, patch
from captainhook import pre_commit
class TestMain(unittest.TestCase):
def setUp(self):
self.get_files_patch = patch('captainhook.pre_commit.get_files')
get_files = self.get_files_patch.start()
get_files.return_value = ['file_one']
self.hook_config_patch = patch('captainhook.pre_commit.HookConfig')
self.HookConfig = self.hook_config_patch.start()
self.HookConfig().is_enabled.return_value = True
self.HookConfig().arguments.return_value = ''
self.testmod = Mock()
self.testmod.run.return_value = None
self.checks_patch = patch('captainhook.pre_commit.checks')
checks = self.checks_patch.start()
checks.return_value = [("testmod", self.testmod)]
def tearDown(self):
self.checks_patch.stop()
self.hook_config_patch.stop()
self.get_files_patch.stop()
def test_calling_run_without_args(self):
result = pre_commit.main()
self.assertEquals(result, 0)
self.testmod.run.assert_called_with(['file_one'])
def test_calling_run_with_args(self):
self.HookConfig().arguments.return_value = 'yep'
result = pre_commit.main()
self.assertEquals(result, 0)
self.testmod.run.assert_called_with(['file_one'], 'yep')
|
18ff5cc690fada5c437a1be8e99df57cd8edfaea
|
tests/extractor_test.py
|
tests/extractor_test.py
|
import numpy as np
from unittest import TestCase
from cartography.extractor import LibrosaFeatureExtractor
def gen_signal(dur, sr, freq):
return np.pi * 2 * freq * np.arange(dur * sr) / float(sr)
class TestLibrosaFeatureExtractor(TestCase):
@classmethod
def setUpClass(cls):
cls.test_dur = 2
cls.test_freq = 440
cls.test_sr = 22050
cls.test_signal = gen_signal(cls.test_dur, cls.test_sr, cls.test_freq)
def test_mfcc(self):
extractor = LibrosaFeatureExtractor(None)
num_mfccs = 13
mfccs_kwargs = {
'num_mfccs': num_mfccs,
'delta_mfccs': False,
'delta2_mfccs': False
}
expected_shape = ((13, 1000))
got = extractor._mfcc(self.test_signal, self.test_sr, **mfccs_kwargs)
self.assertEqual(expected_shape, got.shape) # TODO figure out num hops
|
import numpy as np
from unittest import TestCase
from cartography.extractor import LibrosaFeatureExtractor
def gen_signal(dur, sr, freq):
return np.pi * 2 * freq * np.arange(dur * sr) / float(sr)
class TestLibrosaFeatureExtractor(TestCase):
@classmethod
def setUpClass(cls):
cls.test_dur = 2
cls.test_freq = 440
cls.test_sr = 22050
cls.test_signal = gen_signal(cls.test_dur, cls.test_sr, cls.test_freq)
def test_mfcc(self):
extractor = LibrosaFeatureExtractor(None)
num_mfccs = 13
mfccs_kwargs = {
'num_mfccs': num_mfccs,
'delta_mfccs': False,
'delta2_mfccs': False
}
expected_columns = 13
got = extractor._mfcc(self.test_signal, self.test_sr, **mfccs_kwargs)
self.assertEqual(expected_columns, got.shape[0])
|
Fix spacing. Update mfcc test to check columns
|
Fix spacing. Update mfcc test to check columns
|
Python
|
mit
|
tingled/synthetic-cartography,tingled/synthetic-cartography
|
import numpy as np
from unittest import TestCase
from cartography.extractor import LibrosaFeatureExtractor
def gen_signal(dur, sr, freq):
return np.pi * 2 * freq * np.arange(dur * sr) / float(sr)
class TestLibrosaFeatureExtractor(TestCase):
@classmethod
def setUpClass(cls):
cls.test_dur = 2
cls.test_freq = 440
cls.test_sr = 22050
cls.test_signal = gen_signal(cls.test_dur, cls.test_sr, cls.test_freq)
def test_mfcc(self):
extractor = LibrosaFeatureExtractor(None)
num_mfccs = 13
mfccs_kwargs = {
'num_mfccs': num_mfccs,
'delta_mfccs': False,
'delta2_mfccs': False
}
expected_shape = ((13, 1000))
got = extractor._mfcc(self.test_signal, self.test_sr, **mfccs_kwargs)
self.assertEqual(expected_shape, got.shape) # TODO figure out num hops
Fix spacing. Update mfcc test to check columns
|
import numpy as np
from unittest import TestCase
from cartography.extractor import LibrosaFeatureExtractor
def gen_signal(dur, sr, freq):
return np.pi * 2 * freq * np.arange(dur * sr) / float(sr)
class TestLibrosaFeatureExtractor(TestCase):
@classmethod
def setUpClass(cls):
cls.test_dur = 2
cls.test_freq = 440
cls.test_sr = 22050
cls.test_signal = gen_signal(cls.test_dur, cls.test_sr, cls.test_freq)
def test_mfcc(self):
extractor = LibrosaFeatureExtractor(None)
num_mfccs = 13
mfccs_kwargs = {
'num_mfccs': num_mfccs,
'delta_mfccs': False,
'delta2_mfccs': False
}
expected_columns = 13
got = extractor._mfcc(self.test_signal, self.test_sr, **mfccs_kwargs)
self.assertEqual(expected_columns, got.shape[0])
|
<commit_before>import numpy as np
from unittest import TestCase
from cartography.extractor import LibrosaFeatureExtractor
def gen_signal(dur, sr, freq):
return np.pi * 2 * freq * np.arange(dur * sr) / float(sr)
class TestLibrosaFeatureExtractor(TestCase):
@classmethod
def setUpClass(cls):
cls.test_dur = 2
cls.test_freq = 440
cls.test_sr = 22050
cls.test_signal = gen_signal(cls.test_dur, cls.test_sr, cls.test_freq)
def test_mfcc(self):
extractor = LibrosaFeatureExtractor(None)
num_mfccs = 13
mfccs_kwargs = {
'num_mfccs': num_mfccs,
'delta_mfccs': False,
'delta2_mfccs': False
}
expected_shape = ((13, 1000))
got = extractor._mfcc(self.test_signal, self.test_sr, **mfccs_kwargs)
self.assertEqual(expected_shape, got.shape) # TODO figure out num hops
<commit_msg>Fix spacing. Update mfcc test to check columns<commit_after>
|
import numpy as np
from unittest import TestCase
from cartography.extractor import LibrosaFeatureExtractor
def gen_signal(dur, sr, freq):
return np.pi * 2 * freq * np.arange(dur * sr) / float(sr)
class TestLibrosaFeatureExtractor(TestCase):
@classmethod
def setUpClass(cls):
cls.test_dur = 2
cls.test_freq = 440
cls.test_sr = 22050
cls.test_signal = gen_signal(cls.test_dur, cls.test_sr, cls.test_freq)
def test_mfcc(self):
extractor = LibrosaFeatureExtractor(None)
num_mfccs = 13
mfccs_kwargs = {
'num_mfccs': num_mfccs,
'delta_mfccs': False,
'delta2_mfccs': False
}
expected_columns = 13
got = extractor._mfcc(self.test_signal, self.test_sr, **mfccs_kwargs)
self.assertEqual(expected_columns, got.shape[0])
|
import numpy as np
from unittest import TestCase
from cartography.extractor import LibrosaFeatureExtractor
def gen_signal(dur, sr, freq):
return np.pi * 2 * freq * np.arange(dur * sr) / float(sr)
class TestLibrosaFeatureExtractor(TestCase):
@classmethod
def setUpClass(cls):
cls.test_dur = 2
cls.test_freq = 440
cls.test_sr = 22050
cls.test_signal = gen_signal(cls.test_dur, cls.test_sr, cls.test_freq)
def test_mfcc(self):
extractor = LibrosaFeatureExtractor(None)
num_mfccs = 13
mfccs_kwargs = {
'num_mfccs': num_mfccs,
'delta_mfccs': False,
'delta2_mfccs': False
}
expected_shape = ((13, 1000))
got = extractor._mfcc(self.test_signal, self.test_sr, **mfccs_kwargs)
self.assertEqual(expected_shape, got.shape) # TODO figure out num hops
Fix spacing. Update mfcc test to check columnsimport numpy as np
from unittest import TestCase
from cartography.extractor import LibrosaFeatureExtractor
def gen_signal(dur, sr, freq):
return np.pi * 2 * freq * np.arange(dur * sr) / float(sr)
class TestLibrosaFeatureExtractor(TestCase):
@classmethod
def setUpClass(cls):
cls.test_dur = 2
cls.test_freq = 440
cls.test_sr = 22050
cls.test_signal = gen_signal(cls.test_dur, cls.test_sr, cls.test_freq)
def test_mfcc(self):
extractor = LibrosaFeatureExtractor(None)
num_mfccs = 13
mfccs_kwargs = {
'num_mfccs': num_mfccs,
'delta_mfccs': False,
'delta2_mfccs': False
}
expected_columns = 13
got = extractor._mfcc(self.test_signal, self.test_sr, **mfccs_kwargs)
self.assertEqual(expected_columns, got.shape[0])
|
<commit_before>import numpy as np
from unittest import TestCase
from cartography.extractor import LibrosaFeatureExtractor
def gen_signal(dur, sr, freq):
return np.pi * 2 * freq * np.arange(dur * sr) / float(sr)
class TestLibrosaFeatureExtractor(TestCase):
@classmethod
def setUpClass(cls):
cls.test_dur = 2
cls.test_freq = 440
cls.test_sr = 22050
cls.test_signal = gen_signal(cls.test_dur, cls.test_sr, cls.test_freq)
def test_mfcc(self):
extractor = LibrosaFeatureExtractor(None)
num_mfccs = 13
mfccs_kwargs = {
'num_mfccs': num_mfccs,
'delta_mfccs': False,
'delta2_mfccs': False
}
expected_shape = ((13, 1000))
got = extractor._mfcc(self.test_signal, self.test_sr, **mfccs_kwargs)
self.assertEqual(expected_shape, got.shape) # TODO figure out num hops
<commit_msg>Fix spacing. Update mfcc test to check columns<commit_after>import numpy as np
from unittest import TestCase
from cartography.extractor import LibrosaFeatureExtractor
def gen_signal(dur, sr, freq):
return np.pi * 2 * freq * np.arange(dur * sr) / float(sr)
class TestLibrosaFeatureExtractor(TestCase):
@classmethod
def setUpClass(cls):
cls.test_dur = 2
cls.test_freq = 440
cls.test_sr = 22050
cls.test_signal = gen_signal(cls.test_dur, cls.test_sr, cls.test_freq)
def test_mfcc(self):
extractor = LibrosaFeatureExtractor(None)
num_mfccs = 13
mfccs_kwargs = {
'num_mfccs': num_mfccs,
'delta_mfccs': False,
'delta2_mfccs': False
}
expected_columns = 13
got = extractor._mfcc(self.test_signal, self.test_sr, **mfccs_kwargs)
self.assertEqual(expected_columns, got.shape[0])
|
8989258dab574cff0bc8001f1d59232983d15f68
|
grammpy/Grammars/PrettyApiGrammar.py
|
grammpy/Grammars/PrettyApiGrammar.py
|
#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 01.08.2017 07:33
:Licence GNUv3
Part of grammpy
"""
from .MultipleRulesGrammar import MultipleRulesGrammar as Grammar
class PrettyApiGrammar(Grammar):
def __init__(self,
terminals=None,
nonterminals=None,
rules=None,
start_symbol=None):
if isinstance(terminals, str):
temp = []
for ch in terminals:
temp.append(ch)
terminals = temp
super().__init__(terminals=terminals,
nonterminals=nonterminals,
rules=rules,
start_symbol=start_symbol)
|
#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 01.08.2017 07:33
:Licence GNUv3
Part of grammpy
"""
from .MultipleRulesGrammar import MultipleRulesGrammar as Grammar
class PrettyApiGrammar(Grammar):
def __init__(self,
terminals=None,
nonterminals=None,
rules=None,
start_symbol=None):
if isinstance(terminals, str):
temp = []
for ch in terminals:
temp.append(ch)
terminals = temp
super().__init__(terminals=terminals,
nonterminals=nonterminals,
rules=rules,
start_symbol=start_symbol)
def __copy__(self):
return PrettyApiGrammar(terminals=(t.s for t in self.terms()),
nonterminals=self.nonterms(),
rules=self.rules(),
start_symbol=self.start_get())
|
Add __copy__ method to grammar
|
Add __copy__ method to grammar
|
Python
|
mit
|
PatrikValkovic/grammpy
|
#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 01.08.2017 07:33
:Licence GNUv3
Part of grammpy
"""
from .MultipleRulesGrammar import MultipleRulesGrammar as Grammar
class PrettyApiGrammar(Grammar):
def __init__(self,
terminals=None,
nonterminals=None,
rules=None,
start_symbol=None):
if isinstance(terminals, str):
temp = []
for ch in terminals:
temp.append(ch)
terminals = temp
super().__init__(terminals=terminals,
nonterminals=nonterminals,
rules=rules,
start_symbol=start_symbol)
Add __copy__ method to grammar
|
#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 01.08.2017 07:33
:Licence GNUv3
Part of grammpy
"""
from .MultipleRulesGrammar import MultipleRulesGrammar as Grammar
class PrettyApiGrammar(Grammar):
def __init__(self,
terminals=None,
nonterminals=None,
rules=None,
start_symbol=None):
if isinstance(terminals, str):
temp = []
for ch in terminals:
temp.append(ch)
terminals = temp
super().__init__(terminals=terminals,
nonterminals=nonterminals,
rules=rules,
start_symbol=start_symbol)
def __copy__(self):
return PrettyApiGrammar(terminals=(t.s for t in self.terms()),
nonterminals=self.nonterms(),
rules=self.rules(),
start_symbol=self.start_get())
|
<commit_before>#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 01.08.2017 07:33
:Licence GNUv3
Part of grammpy
"""
from .MultipleRulesGrammar import MultipleRulesGrammar as Grammar
class PrettyApiGrammar(Grammar):
def __init__(self,
terminals=None,
nonterminals=None,
rules=None,
start_symbol=None):
if isinstance(terminals, str):
temp = []
for ch in terminals:
temp.append(ch)
terminals = temp
super().__init__(terminals=terminals,
nonterminals=nonterminals,
rules=rules,
start_symbol=start_symbol)
<commit_msg>Add __copy__ method to grammar<commit_after>
|
#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 01.08.2017 07:33
:Licence GNUv3
Part of grammpy
"""
from .MultipleRulesGrammar import MultipleRulesGrammar as Grammar
class PrettyApiGrammar(Grammar):
def __init__(self,
terminals=None,
nonterminals=None,
rules=None,
start_symbol=None):
if isinstance(terminals, str):
temp = []
for ch in terminals:
temp.append(ch)
terminals = temp
super().__init__(terminals=terminals,
nonterminals=nonterminals,
rules=rules,
start_symbol=start_symbol)
def __copy__(self):
return PrettyApiGrammar(terminals=(t.s for t in self.terms()),
nonterminals=self.nonterms(),
rules=self.rules(),
start_symbol=self.start_get())
|
#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 01.08.2017 07:33
:Licence GNUv3
Part of grammpy
"""
from .MultipleRulesGrammar import MultipleRulesGrammar as Grammar
class PrettyApiGrammar(Grammar):
def __init__(self,
terminals=None,
nonterminals=None,
rules=None,
start_symbol=None):
if isinstance(terminals, str):
temp = []
for ch in terminals:
temp.append(ch)
terminals = temp
super().__init__(terminals=terminals,
nonterminals=nonterminals,
rules=rules,
start_symbol=start_symbol)
Add __copy__ method to grammar#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 01.08.2017 07:33
:Licence GNUv3
Part of grammpy
"""
from .MultipleRulesGrammar import MultipleRulesGrammar as Grammar
class PrettyApiGrammar(Grammar):
def __init__(self,
terminals=None,
nonterminals=None,
rules=None,
start_symbol=None):
if isinstance(terminals, str):
temp = []
for ch in terminals:
temp.append(ch)
terminals = temp
super().__init__(terminals=terminals,
nonterminals=nonterminals,
rules=rules,
start_symbol=start_symbol)
def __copy__(self):
return PrettyApiGrammar(terminals=(t.s for t in self.terms()),
nonterminals=self.nonterms(),
rules=self.rules(),
start_symbol=self.start_get())
|
<commit_before>#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 01.08.2017 07:33
:Licence GNUv3
Part of grammpy
"""
from .MultipleRulesGrammar import MultipleRulesGrammar as Grammar
class PrettyApiGrammar(Grammar):
def __init__(self,
terminals=None,
nonterminals=None,
rules=None,
start_symbol=None):
if isinstance(terminals, str):
temp = []
for ch in terminals:
temp.append(ch)
terminals = temp
super().__init__(terminals=terminals,
nonterminals=nonterminals,
rules=rules,
start_symbol=start_symbol)
<commit_msg>Add __copy__ method to grammar<commit_after>#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 01.08.2017 07:33
:Licence GNUv3
Part of grammpy
"""
from .MultipleRulesGrammar import MultipleRulesGrammar as Grammar
class PrettyApiGrammar(Grammar):
def __init__(self,
terminals=None,
nonterminals=None,
rules=None,
start_symbol=None):
if isinstance(terminals, str):
temp = []
for ch in terminals:
temp.append(ch)
terminals = temp
super().__init__(terminals=terminals,
nonterminals=nonterminals,
rules=rules,
start_symbol=start_symbol)
def __copy__(self):
return PrettyApiGrammar(terminals=(t.s for t in self.terms()),
nonterminals=self.nonterms(),
rules=self.rules(),
start_symbol=self.start_get())
|
e4f7deee8c4154781c2e945bfc14cf2028586dc1
|
hellopython/print_method/__init__.py
|
hellopython/print_method/__init__.py
|
import codecs
import io
import sys
from workshopper.problems import BaseProblem
class Problem(BaseProblem):
def test(self, file):
old_stdout = sys.stdout
sys.stdout = io.StringIO()
eval(codecs.open(file).read())
message = sys.stdout.getvalue()
sys.stdout = old_stdout
assert message == 'Hello World\n'
|
import codecs
import io
import sys
from workshopper.problems import BaseProblem
class Problem(BaseProblem):
title = 'Print method'
def test(self, file):
old_stdout = sys.stdout
sys.stdout = io.StringIO()
eval(codecs.open(file).read())
message = sys.stdout.getvalue()
sys.stdout = old_stdout
assert message == 'Hello World\n'
|
Add a title to the print_method problem
|
Add a title to the print_method problem
|
Python
|
mit
|
pyschool/hipyschool
|
import codecs
import io
import sys
from workshopper.problems import BaseProblem
class Problem(BaseProblem):
def test(self, file):
old_stdout = sys.stdout
sys.stdout = io.StringIO()
eval(codecs.open(file).read())
message = sys.stdout.getvalue()
sys.stdout = old_stdout
assert message == 'Hello World\n'
Add a title to the print_method problem
|
import codecs
import io
import sys
from workshopper.problems import BaseProblem
class Problem(BaseProblem):
title = 'Print method'
def test(self, file):
old_stdout = sys.stdout
sys.stdout = io.StringIO()
eval(codecs.open(file).read())
message = sys.stdout.getvalue()
sys.stdout = old_stdout
assert message == 'Hello World\n'
|
<commit_before>import codecs
import io
import sys
from workshopper.problems import BaseProblem
class Problem(BaseProblem):
def test(self, file):
old_stdout = sys.stdout
sys.stdout = io.StringIO()
eval(codecs.open(file).read())
message = sys.stdout.getvalue()
sys.stdout = old_stdout
assert message == 'Hello World\n'
<commit_msg>Add a title to the print_method problem<commit_after>
|
import codecs
import io
import sys
from workshopper.problems import BaseProblem
class Problem(BaseProblem):
title = 'Print method'
def test(self, file):
old_stdout = sys.stdout
sys.stdout = io.StringIO()
eval(codecs.open(file).read())
message = sys.stdout.getvalue()
sys.stdout = old_stdout
assert message == 'Hello World\n'
|
import codecs
import io
import sys
from workshopper.problems import BaseProblem
class Problem(BaseProblem):
def test(self, file):
old_stdout = sys.stdout
sys.stdout = io.StringIO()
eval(codecs.open(file).read())
message = sys.stdout.getvalue()
sys.stdout = old_stdout
assert message == 'Hello World\n'
Add a title to the print_method problemimport codecs
import io
import sys
from workshopper.problems import BaseProblem
class Problem(BaseProblem):
title = 'Print method'
def test(self, file):
old_stdout = sys.stdout
sys.stdout = io.StringIO()
eval(codecs.open(file).read())
message = sys.stdout.getvalue()
sys.stdout = old_stdout
assert message == 'Hello World\n'
|
<commit_before>import codecs
import io
import sys
from workshopper.problems import BaseProblem
class Problem(BaseProblem):
def test(self, file):
old_stdout = sys.stdout
sys.stdout = io.StringIO()
eval(codecs.open(file).read())
message = sys.stdout.getvalue()
sys.stdout = old_stdout
assert message == 'Hello World\n'
<commit_msg>Add a title to the print_method problem<commit_after>import codecs
import io
import sys
from workshopper.problems import BaseProblem
class Problem(BaseProblem):
title = 'Print method'
def test(self, file):
old_stdout = sys.stdout
sys.stdout = io.StringIO()
eval(codecs.open(file).read())
message = sys.stdout.getvalue()
sys.stdout = old_stdout
assert message == 'Hello World\n'
|
debe3a250a04986583589b1192cb6111b8b6c228
|
pydelhiconf/uix/screens/screenabout.py
|
pydelhiconf/uix/screens/screenabout.py
|
from kivy.uix.screenmanager import Screen
from kivy.lang import Builder
from kivy.factory import Factory
from functools import partial
import webbrowser
class ScreenAbout(Screen):
Builder.load_string('''
<ScreenAbout>
spacing: dp(9)
name: 'ScreenAbout'
ScrollView
id: scroll
ScrollGrid
AsyncImage
id: imgbt
allow_stretch: True
size_hint_y: None
height: dp(200)
BackLabel
id: comm_desc
''')
def on_pre_enter(self):
self.ids.scroll.opacity = 0
def on_enter(self, onsuccess=False):
from network import get_data
about = get_data('about', onsuccess=onsuccess)
if not about:
return
about = about.get('0.0.1')[0]
imbt = self.ids.imgbt
imbt.source = about['logo']
imbt.on_released = partial(webbrowser.open, about['website'])
self.ids.comm_desc.text = about['about']
Factory.Animation(opacity=1, d=.5).start(self.ids.scroll)
|
from kivy.uix.screenmanager import Screen
from kivy.lang import Builder
from kivy.factory import Factory
from functools import partial
import webbrowser
class ScreenAbout(Screen):
Builder.load_string('''
<ScreenAbout>
spacing: dp(9)
name: 'ScreenAbout'
ScrollView
id: scroll
ScrollGrid
AsyncImage
id: imgbt
allow_stretch: True
size_hint_y: None
height: dp(200)
BackLabel
id: comm_desc
FloatLayout
size_hint_y: None
height: dp(45)
ActiveButton
id: but
text: "Visit our website"
size_hint: None, None
width: dp(200)
center_x: comm_desc.center_x
top: comm_desc.y - dp(10)
''')
def on_pre_enter(self):
self.ids.scroll.opacity = 0
def on_enter(self, onsuccess=False):
from network import get_data
about = get_data('about', onsuccess=onsuccess)
if not about:
return
about = about.get('0.0.1')[0]
imbt = self.ids.imgbt
imbt.source = about['logo']
self.ids.but.on_released = partial(webbrowser.open, about['website'])
self.ids.comm_desc.text = about['about']
Factory.Animation(opacity=1, d=.5).start(self.ids.scroll)
|
Add button that links to website
|
Add button that links to website
|
Python
|
agpl-3.0
|
pydelhi/pydelhi_mobile,shivan1b/pydelhi_mobile,samukasmk/pythonbrasil_mobile,akshayaurora/PyDelhiMobile
|
from kivy.uix.screenmanager import Screen
from kivy.lang import Builder
from kivy.factory import Factory
from functools import partial
import webbrowser
class ScreenAbout(Screen):
Builder.load_string('''
<ScreenAbout>
spacing: dp(9)
name: 'ScreenAbout'
ScrollView
id: scroll
ScrollGrid
AsyncImage
id: imgbt
allow_stretch: True
size_hint_y: None
height: dp(200)
BackLabel
id: comm_desc
''')
def on_pre_enter(self):
self.ids.scroll.opacity = 0
def on_enter(self, onsuccess=False):
from network import get_data
about = get_data('about', onsuccess=onsuccess)
if not about:
return
about = about.get('0.0.1')[0]
imbt = self.ids.imgbt
imbt.source = about['logo']
imbt.on_released = partial(webbrowser.open, about['website'])
self.ids.comm_desc.text = about['about']
Factory.Animation(opacity=1, d=.5).start(self.ids.scroll)Add button that links to website
|
from kivy.uix.screenmanager import Screen
from kivy.lang import Builder
from kivy.factory import Factory
from functools import partial
import webbrowser
class ScreenAbout(Screen):
Builder.load_string('''
<ScreenAbout>
spacing: dp(9)
name: 'ScreenAbout'
ScrollView
id: scroll
ScrollGrid
AsyncImage
id: imgbt
allow_stretch: True
size_hint_y: None
height: dp(200)
BackLabel
id: comm_desc
FloatLayout
size_hint_y: None
height: dp(45)
ActiveButton
id: but
text: "Visit our website"
size_hint: None, None
width: dp(200)
center_x: comm_desc.center_x
top: comm_desc.y - dp(10)
''')
def on_pre_enter(self):
self.ids.scroll.opacity = 0
def on_enter(self, onsuccess=False):
from network import get_data
about = get_data('about', onsuccess=onsuccess)
if not about:
return
about = about.get('0.0.1')[0]
imbt = self.ids.imgbt
imbt.source = about['logo']
self.ids.but.on_released = partial(webbrowser.open, about['website'])
self.ids.comm_desc.text = about['about']
Factory.Animation(opacity=1, d=.5).start(self.ids.scroll)
|
<commit_before>from kivy.uix.screenmanager import Screen
from kivy.lang import Builder
from kivy.factory import Factory
from functools import partial
import webbrowser
class ScreenAbout(Screen):
Builder.load_string('''
<ScreenAbout>
spacing: dp(9)
name: 'ScreenAbout'
ScrollView
id: scroll
ScrollGrid
AsyncImage
id: imgbt
allow_stretch: True
size_hint_y: None
height: dp(200)
BackLabel
id: comm_desc
''')
def on_pre_enter(self):
self.ids.scroll.opacity = 0
def on_enter(self, onsuccess=False):
from network import get_data
about = get_data('about', onsuccess=onsuccess)
if not about:
return
about = about.get('0.0.1')[0]
imbt = self.ids.imgbt
imbt.source = about['logo']
imbt.on_released = partial(webbrowser.open, about['website'])
self.ids.comm_desc.text = about['about']
Factory.Animation(opacity=1, d=.5).start(self.ids.scroll)<commit_msg>Add button that links to website<commit_after>
|
from kivy.uix.screenmanager import Screen
from kivy.lang import Builder
from kivy.factory import Factory
from functools import partial
import webbrowser
class ScreenAbout(Screen):
Builder.load_string('''
<ScreenAbout>
spacing: dp(9)
name: 'ScreenAbout'
ScrollView
id: scroll
ScrollGrid
AsyncImage
id: imgbt
allow_stretch: True
size_hint_y: None
height: dp(200)
BackLabel
id: comm_desc
FloatLayout
size_hint_y: None
height: dp(45)
ActiveButton
id: but
text: "Visit our website"
size_hint: None, None
width: dp(200)
center_x: comm_desc.center_x
top: comm_desc.y - dp(10)
''')
def on_pre_enter(self):
self.ids.scroll.opacity = 0
def on_enter(self, onsuccess=False):
from network import get_data
about = get_data('about', onsuccess=onsuccess)
if not about:
return
about = about.get('0.0.1')[0]
imbt = self.ids.imgbt
imbt.source = about['logo']
self.ids.but.on_released = partial(webbrowser.open, about['website'])
self.ids.comm_desc.text = about['about']
Factory.Animation(opacity=1, d=.5).start(self.ids.scroll)
|
from kivy.uix.screenmanager import Screen
from kivy.lang import Builder
from kivy.factory import Factory
from functools import partial
import webbrowser
class ScreenAbout(Screen):
Builder.load_string('''
<ScreenAbout>
spacing: dp(9)
name: 'ScreenAbout'
ScrollView
id: scroll
ScrollGrid
AsyncImage
id: imgbt
allow_stretch: True
size_hint_y: None
height: dp(200)
BackLabel
id: comm_desc
''')
def on_pre_enter(self):
self.ids.scroll.opacity = 0
def on_enter(self, onsuccess=False):
from network import get_data
about = get_data('about', onsuccess=onsuccess)
if not about:
return
about = about.get('0.0.1')[0]
imbt = self.ids.imgbt
imbt.source = about['logo']
imbt.on_released = partial(webbrowser.open, about['website'])
self.ids.comm_desc.text = about['about']
Factory.Animation(opacity=1, d=.5).start(self.ids.scroll)Add button that links to websitefrom kivy.uix.screenmanager import Screen
from kivy.lang import Builder
from kivy.factory import Factory
from functools import partial
import webbrowser
class ScreenAbout(Screen):
Builder.load_string('''
<ScreenAbout>
spacing: dp(9)
name: 'ScreenAbout'
ScrollView
id: scroll
ScrollGrid
AsyncImage
id: imgbt
allow_stretch: True
size_hint_y: None
height: dp(200)
BackLabel
id: comm_desc
FloatLayout
size_hint_y: None
height: dp(45)
ActiveButton
id: but
text: "Visit our website"
size_hint: None, None
width: dp(200)
center_x: comm_desc.center_x
top: comm_desc.y - dp(10)
''')
def on_pre_enter(self):
self.ids.scroll.opacity = 0
def on_enter(self, onsuccess=False):
from network import get_data
about = get_data('about', onsuccess=onsuccess)
if not about:
return
about = about.get('0.0.1')[0]
imbt = self.ids.imgbt
imbt.source = about['logo']
self.ids.but.on_released = partial(webbrowser.open, about['website'])
self.ids.comm_desc.text = about['about']
Factory.Animation(opacity=1, d=.5).start(self.ids.scroll)
|
<commit_before>from kivy.uix.screenmanager import Screen
from kivy.lang import Builder
from kivy.factory import Factory
from functools import partial
import webbrowser
class ScreenAbout(Screen):
Builder.load_string('''
<ScreenAbout>
spacing: dp(9)
name: 'ScreenAbout'
ScrollView
id: scroll
ScrollGrid
AsyncImage
id: imgbt
allow_stretch: True
size_hint_y: None
height: dp(200)
BackLabel
id: comm_desc
''')
def on_pre_enter(self):
self.ids.scroll.opacity = 0
def on_enter(self, onsuccess=False):
from network import get_data
about = get_data('about', onsuccess=onsuccess)
if not about:
return
about = about.get('0.0.1')[0]
imbt = self.ids.imgbt
imbt.source = about['logo']
imbt.on_released = partial(webbrowser.open, about['website'])
self.ids.comm_desc.text = about['about']
Factory.Animation(opacity=1, d=.5).start(self.ids.scroll)<commit_msg>Add button that links to website<commit_after>from kivy.uix.screenmanager import Screen
from kivy.lang import Builder
from kivy.factory import Factory
from functools import partial
import webbrowser
class ScreenAbout(Screen):
Builder.load_string('''
<ScreenAbout>
spacing: dp(9)
name: 'ScreenAbout'
ScrollView
id: scroll
ScrollGrid
AsyncImage
id: imgbt
allow_stretch: True
size_hint_y: None
height: dp(200)
BackLabel
id: comm_desc
FloatLayout
size_hint_y: None
height: dp(45)
ActiveButton
id: but
text: "Visit our website"
size_hint: None, None
width: dp(200)
center_x: comm_desc.center_x
top: comm_desc.y - dp(10)
''')
def on_pre_enter(self):
self.ids.scroll.opacity = 0
def on_enter(self, onsuccess=False):
from network import get_data
about = get_data('about', onsuccess=onsuccess)
if not about:
return
about = about.get('0.0.1')[0]
imbt = self.ids.imgbt
imbt.source = about['logo']
self.ids.but.on_released = partial(webbrowser.open, about['website'])
self.ids.comm_desc.text = about['about']
Factory.Animation(opacity=1, d=.5).start(self.ids.scroll)
|
46f10ffcf60166fe02e33a6cd686f272ae63674e
|
saleor/product/forms.py
|
saleor/product/forms.py
|
from django import forms
from django.utils.translation import pgettext_lazy
from ..cart.forms import AddToCartForm
from ..product.models import GenericProduct
class GenericProductForm(AddToCartForm):
variant = forms.ChoiceField()
def __init__(self, *args, **kwargs):
super(GenericProductForm, self).__init__(*args, **kwargs)
variants = self.product.variants.all()
if self.product.has_variants():
variants = variants.exclude(pk=self.product.base_variant.pk)
variant_choices = [(v.pk, v) for v in variants]
self.fields['variant'].choices = variant_choices
def get_variant(self, cleaned_data):
pk = cleaned_data['variant']
return self.product.variants.get(pk=pk)
class ProductVariantInline(forms.models.BaseInlineFormSet):
error_no_items = pgettext_lazy('Product admin error', 'You have to create at least one variant')
def clean(self):
count = 0
for form in self.forms:
if form.cleaned_data:
count += 1
if count < 1:
raise forms.ValidationError(self.error_no_items)
class ImageInline(ProductVariantInline):
error_no_items = pgettext_lazy('Product admin error', 'You have to add at least one image')
def get_form_class_for_product(product):
if isinstance(product, GenericProduct):
return GenericProductForm
raise NotImplementedError
|
from django import forms
from django.utils.translation import pgettext_lazy
from ..cart.forms import AddToCartForm
from ..product.models import GenericProduct
class GenericProductForm(AddToCartForm):
base_variant = forms.CharField(widget=forms.HiddenInput())
variant = forms.ChoiceField(required=False)
def __init__(self, *args, **kwargs):
super(GenericProductForm, self).__init__(*args, **kwargs)
self.fields['base_variant'].initial = self.product.base_variant.pk
variants = self.product.variants.all().exclude(
pk=self.product.base_variant.pk)
self.fields['variant'].choices = [(v.pk, v) for v in variants]
if not self.product.has_variants():
self.fields['variant'].widget = forms.HiddenInput()
def get_variant(self, cleaned_data):
pk = cleaned_data.get('variant') or cleaned_data.get('base_variant')
variant = self.product.variants.get(pk=pk)
return variant
class ProductVariantInline(forms.models.BaseInlineFormSet):
error_no_items = pgettext_lazy('Product admin error', 'You have to create at least one variant')
def clean(self):
count = 0
for form in self.forms:
if form.cleaned_data:
count += 1
if count < 1:
raise forms.ValidationError(self.error_no_items)
class ImageInline(ProductVariantInline):
error_no_items = pgettext_lazy('Product admin error', 'You have to add at least one image')
def get_form_class_for_product(product):
if isinstance(product, GenericProduct):
return GenericProductForm
raise NotImplementedError
|
Hide variants select input when product has no variants
|
Hide variants select input when product has no variants
|
Python
|
bsd-3-clause
|
josesanch/saleor,HyperManTT/ECommerceSaleor,paweltin/saleor,josesanch/saleor,spartonia/saleor,car3oon/saleor,laosunhust/saleor,mociepka/saleor,arth-co/saleor,UITools/saleor,Drekscott/Motlaesaleor,KenMutemi/saleor,rodrigozn/CW-Shop,tfroehlich82/saleor,HyperManTT/ECommerceSaleor,rchav/vinerack,arth-co/saleor,rchav/vinerack,rchav/vinerack,UITools/saleor,itbabu/saleor,paweltin/saleor,arth-co/saleor,KenMutemi/saleor,taedori81/saleor,spartonia/saleor,mociepka/saleor,arth-co/saleor,laosunhust/saleor,itbabu/saleor,tfroehlich82/saleor,spartonia/saleor,paweltin/saleor,spartonia/saleor,dashmug/saleor,UITools/saleor,taedori81/saleor,laosunhust/saleor,mociepka/saleor,tfroehlich82/saleor,Drekscott/Motlaesaleor,jreigel/saleor,Drekscott/Motlaesaleor,UITools/saleor,KenMutemi/saleor,jreigel/saleor,maferelo/saleor,taedori81/saleor,maferelo/saleor,laosunhust/saleor,avorio/saleor,avorio/saleor,maferelo/saleor,rodrigozn/CW-Shop,car3oon/saleor,taedori81/saleor,josesanch/saleor,dashmug/saleor,car3oon/saleor,Drekscott/Motlaesaleor,UITools/saleor,dashmug/saleor,avorio/saleor,jreigel/saleor,itbabu/saleor,rodrigozn/CW-Shop,HyperManTT/ECommerceSaleor,avorio/saleor,paweltin/saleor
|
from django import forms
from django.utils.translation import pgettext_lazy
from ..cart.forms import AddToCartForm
from ..product.models import GenericProduct
class GenericProductForm(AddToCartForm):
variant = forms.ChoiceField()
def __init__(self, *args, **kwargs):
super(GenericProductForm, self).__init__(*args, **kwargs)
variants = self.product.variants.all()
if self.product.has_variants():
variants = variants.exclude(pk=self.product.base_variant.pk)
variant_choices = [(v.pk, v) for v in variants]
self.fields['variant'].choices = variant_choices
def get_variant(self, cleaned_data):
pk = cleaned_data['variant']
return self.product.variants.get(pk=pk)
class ProductVariantInline(forms.models.BaseInlineFormSet):
error_no_items = pgettext_lazy('Product admin error', 'You have to create at least one variant')
def clean(self):
count = 0
for form in self.forms:
if form.cleaned_data:
count += 1
if count < 1:
raise forms.ValidationError(self.error_no_items)
class ImageInline(ProductVariantInline):
error_no_items = pgettext_lazy('Product admin error', 'You have to add at least one image')
def get_form_class_for_product(product):
if isinstance(product, GenericProduct):
return GenericProductForm
raise NotImplementedError
Hide variants select input when product has no variants
|
from django import forms
from django.utils.translation import pgettext_lazy
from ..cart.forms import AddToCartForm
from ..product.models import GenericProduct
class GenericProductForm(AddToCartForm):
base_variant = forms.CharField(widget=forms.HiddenInput())
variant = forms.ChoiceField(required=False)
def __init__(self, *args, **kwargs):
super(GenericProductForm, self).__init__(*args, **kwargs)
self.fields['base_variant'].initial = self.product.base_variant.pk
variants = self.product.variants.all().exclude(
pk=self.product.base_variant.pk)
self.fields['variant'].choices = [(v.pk, v) for v in variants]
if not self.product.has_variants():
self.fields['variant'].widget = forms.HiddenInput()
def get_variant(self, cleaned_data):
pk = cleaned_data.get('variant') or cleaned_data.get('base_variant')
variant = self.product.variants.get(pk=pk)
return variant
class ProductVariantInline(forms.models.BaseInlineFormSet):
error_no_items = pgettext_lazy('Product admin error', 'You have to create at least one variant')
def clean(self):
count = 0
for form in self.forms:
if form.cleaned_data:
count += 1
if count < 1:
raise forms.ValidationError(self.error_no_items)
class ImageInline(ProductVariantInline):
error_no_items = pgettext_lazy('Product admin error', 'You have to add at least one image')
def get_form_class_for_product(product):
if isinstance(product, GenericProduct):
return GenericProductForm
raise NotImplementedError
|
<commit_before>from django import forms
from django.utils.translation import pgettext_lazy
from ..cart.forms import AddToCartForm
from ..product.models import GenericProduct
class GenericProductForm(AddToCartForm):
variant = forms.ChoiceField()
def __init__(self, *args, **kwargs):
super(GenericProductForm, self).__init__(*args, **kwargs)
variants = self.product.variants.all()
if self.product.has_variants():
variants = variants.exclude(pk=self.product.base_variant.pk)
variant_choices = [(v.pk, v) for v in variants]
self.fields['variant'].choices = variant_choices
def get_variant(self, cleaned_data):
pk = cleaned_data['variant']
return self.product.variants.get(pk=pk)
class ProductVariantInline(forms.models.BaseInlineFormSet):
error_no_items = pgettext_lazy('Product admin error', 'You have to create at least one variant')
def clean(self):
count = 0
for form in self.forms:
if form.cleaned_data:
count += 1
if count < 1:
raise forms.ValidationError(self.error_no_items)
class ImageInline(ProductVariantInline):
error_no_items = pgettext_lazy('Product admin error', 'You have to add at least one image')
def get_form_class_for_product(product):
if isinstance(product, GenericProduct):
return GenericProductForm
raise NotImplementedError
<commit_msg>Hide variants select input when product has no variants<commit_after>
|
from django import forms
from django.utils.translation import pgettext_lazy
from ..cart.forms import AddToCartForm
from ..product.models import GenericProduct
class GenericProductForm(AddToCartForm):
base_variant = forms.CharField(widget=forms.HiddenInput())
variant = forms.ChoiceField(required=False)
def __init__(self, *args, **kwargs):
super(GenericProductForm, self).__init__(*args, **kwargs)
self.fields['base_variant'].initial = self.product.base_variant.pk
variants = self.product.variants.all().exclude(
pk=self.product.base_variant.pk)
self.fields['variant'].choices = [(v.pk, v) for v in variants]
if not self.product.has_variants():
self.fields['variant'].widget = forms.HiddenInput()
def get_variant(self, cleaned_data):
pk = cleaned_data.get('variant') or cleaned_data.get('base_variant')
variant = self.product.variants.get(pk=pk)
return variant
class ProductVariantInline(forms.models.BaseInlineFormSet):
error_no_items = pgettext_lazy('Product admin error', 'You have to create at least one variant')
def clean(self):
count = 0
for form in self.forms:
if form.cleaned_data:
count += 1
if count < 1:
raise forms.ValidationError(self.error_no_items)
class ImageInline(ProductVariantInline):
error_no_items = pgettext_lazy('Product admin error', 'You have to add at least one image')
def get_form_class_for_product(product):
if isinstance(product, GenericProduct):
return GenericProductForm
raise NotImplementedError
|
from django import forms
from django.utils.translation import pgettext_lazy
from ..cart.forms import AddToCartForm
from ..product.models import GenericProduct
class GenericProductForm(AddToCartForm):
variant = forms.ChoiceField()
def __init__(self, *args, **kwargs):
super(GenericProductForm, self).__init__(*args, **kwargs)
variants = self.product.variants.all()
if self.product.has_variants():
variants = variants.exclude(pk=self.product.base_variant.pk)
variant_choices = [(v.pk, v) for v in variants]
self.fields['variant'].choices = variant_choices
def get_variant(self, cleaned_data):
pk = cleaned_data['variant']
return self.product.variants.get(pk=pk)
class ProductVariantInline(forms.models.BaseInlineFormSet):
error_no_items = pgettext_lazy('Product admin error', 'You have to create at least one variant')
def clean(self):
count = 0
for form in self.forms:
if form.cleaned_data:
count += 1
if count < 1:
raise forms.ValidationError(self.error_no_items)
class ImageInline(ProductVariantInline):
error_no_items = pgettext_lazy('Product admin error', 'You have to add at least one image')
def get_form_class_for_product(product):
if isinstance(product, GenericProduct):
return GenericProductForm
raise NotImplementedError
Hide variants select input when product has no variantsfrom django import forms
from django.utils.translation import pgettext_lazy
from ..cart.forms import AddToCartForm
from ..product.models import GenericProduct
class GenericProductForm(AddToCartForm):
base_variant = forms.CharField(widget=forms.HiddenInput())
variant = forms.ChoiceField(required=False)
def __init__(self, *args, **kwargs):
super(GenericProductForm, self).__init__(*args, **kwargs)
self.fields['base_variant'].initial = self.product.base_variant.pk
variants = self.product.variants.all().exclude(
pk=self.product.base_variant.pk)
self.fields['variant'].choices = [(v.pk, v) for v in variants]
if not self.product.has_variants():
self.fields['variant'].widget = forms.HiddenInput()
def get_variant(self, cleaned_data):
pk = cleaned_data.get('variant') or cleaned_data.get('base_variant')
variant = self.product.variants.get(pk=pk)
return variant
class ProductVariantInline(forms.models.BaseInlineFormSet):
error_no_items = pgettext_lazy('Product admin error', 'You have to create at least one variant')
def clean(self):
count = 0
for form in self.forms:
if form.cleaned_data:
count += 1
if count < 1:
raise forms.ValidationError(self.error_no_items)
class ImageInline(ProductVariantInline):
error_no_items = pgettext_lazy('Product admin error', 'You have to add at least one image')
def get_form_class_for_product(product):
if isinstance(product, GenericProduct):
return GenericProductForm
raise NotImplementedError
|
<commit_before>from django import forms
from django.utils.translation import pgettext_lazy
from ..cart.forms import AddToCartForm
from ..product.models import GenericProduct
class GenericProductForm(AddToCartForm):
variant = forms.ChoiceField()
def __init__(self, *args, **kwargs):
super(GenericProductForm, self).__init__(*args, **kwargs)
variants = self.product.variants.all()
if self.product.has_variants():
variants = variants.exclude(pk=self.product.base_variant.pk)
variant_choices = [(v.pk, v) for v in variants]
self.fields['variant'].choices = variant_choices
def get_variant(self, cleaned_data):
pk = cleaned_data['variant']
return self.product.variants.get(pk=pk)
class ProductVariantInline(forms.models.BaseInlineFormSet):
error_no_items = pgettext_lazy('Product admin error', 'You have to create at least one variant')
def clean(self):
count = 0
for form in self.forms:
if form.cleaned_data:
count += 1
if count < 1:
raise forms.ValidationError(self.error_no_items)
class ImageInline(ProductVariantInline):
error_no_items = pgettext_lazy('Product admin error', 'You have to add at least one image')
def get_form_class_for_product(product):
if isinstance(product, GenericProduct):
return GenericProductForm
raise NotImplementedError
<commit_msg>Hide variants select input when product has no variants<commit_after>from django import forms
from django.utils.translation import pgettext_lazy
from ..cart.forms import AddToCartForm
from ..product.models import GenericProduct
class GenericProductForm(AddToCartForm):
base_variant = forms.CharField(widget=forms.HiddenInput())
variant = forms.ChoiceField(required=False)
def __init__(self, *args, **kwargs):
super(GenericProductForm, self).__init__(*args, **kwargs)
self.fields['base_variant'].initial = self.product.base_variant.pk
variants = self.product.variants.all().exclude(
pk=self.product.base_variant.pk)
self.fields['variant'].choices = [(v.pk, v) for v in variants]
if not self.product.has_variants():
self.fields['variant'].widget = forms.HiddenInput()
def get_variant(self, cleaned_data):
pk = cleaned_data.get('variant') or cleaned_data.get('base_variant')
variant = self.product.variants.get(pk=pk)
return variant
class ProductVariantInline(forms.models.BaseInlineFormSet):
error_no_items = pgettext_lazy('Product admin error', 'You have to create at least one variant')
def clean(self):
count = 0
for form in self.forms:
if form.cleaned_data:
count += 1
if count < 1:
raise forms.ValidationError(self.error_no_items)
class ImageInline(ProductVariantInline):
error_no_items = pgettext_lazy('Product admin error', 'You have to add at least one image')
def get_form_class_for_product(product):
if isinstance(product, GenericProduct):
return GenericProductForm
raise NotImplementedError
|
3301f112eb1a8e8706fc17373cc71c2f02691382
|
main.py
|
main.py
|
#!/usr/bin/env python3
# Pianette
# A command-line emulator of a PS2 Game Pad Controller
# that asynchronously listens to GPIO EDGE_RISING
# inputs from sensors and sends Serial commands to
# an ATMEGA328P acting as a fake SPI Slave for the Console.
# Written in Python 3.
import pianette.config
import sys
from pianette.GPIOController import GPIOController
from pianette.Pianette import Pianette
from pianette.utils import Debug
Debug.println("INFO", " ")
Debug.println("INFO", " ################################## ")
Debug.println("INFO", " | PIANETTE | ")
Debug.println("INFO", " ################################## ")
Debug.println("INFO", " ")
configobj = pianette.config.get_configobj('street-fighter-alpha-3', 'player1')
# Instanciate the global Pianette
# Its responsibility is to translate Piano actions to Console actions
pianette = Pianette(configobj=configobj)
# Instanciate the global GPIO Controller
# Its responsibility is to feed the Pianette based on GPIO inputs
gpio_controller = GPIOController(configobj=configobj, pianette=pianette)
# Make the Pianette object listen to GPIO inputs
pianette.enable_source("gpio")
# Run the main loop of interactive Pianette
Debug.println("NOTICE", "Entering main loop")
pianette.cmd.cmdloop()
|
#!/usr/bin/env python3
# Pianette
# A command-line emulator of a PS2 Game Pad Controller
# that asynchronously listens to GPIO EDGE_RISING
# inputs from sensors and sends Serial commands to
# an ATMEGA328P acting as a fake SPI Slave for the Console.
# Written in Python 3.
import pianette.config
import sys
from pianette.GPIOController import GPIOController
from pianette.Pianette import Pianette
from pianette.utils import Debug
Debug.println("INFO", " ")
Debug.println("INFO", " ################################## ")
Debug.println("INFO", " | PIANETTE | ")
Debug.println("INFO", " ################################## ")
Debug.println("INFO", " ")
# FIX ME - introduce sys.argv[1] to choose player AND game?
configobj = pianette.config.get_configobj('street-fighter-alpha-3', 'player1')
# Instanciate the global Pianette
# Its responsibility is to translate Piano actions to Console actions
pianette = Pianette(configobj=configobj)
# Instanciate the global GPIO Controller
# Its responsibility is to feed the Pianette based on GPIO inputs
gpio_controller = GPIOController(configobj=configobj, pianette=pianette)
# Make the Pianette object listen to GPIO inputs
pianette.enable_source("gpio")
# Run the main loop of interactive Pianette
Debug.println("NOTICE", "Entering main loop")
pianette.cmd.cmdloop()
|
Prepare for config and command line arguments
|
Prepare for config and command line arguments
|
Python
|
mit
|
tchapi/pianette,tchapi/pianette,tchapi/pianette,tchapi/pianette
|
#!/usr/bin/env python3
# Pianette
# A command-line emulator of a PS2 Game Pad Controller
# that asynchronously listens to GPIO EDGE_RISING
# inputs from sensors and sends Serial commands to
# an ATMEGA328P acting as a fake SPI Slave for the Console.
# Written in Python 3.
import pianette.config
import sys
from pianette.GPIOController import GPIOController
from pianette.Pianette import Pianette
from pianette.utils import Debug
Debug.println("INFO", " ")
Debug.println("INFO", " ################################## ")
Debug.println("INFO", " | PIANETTE | ")
Debug.println("INFO", " ################################## ")
Debug.println("INFO", " ")
configobj = pianette.config.get_configobj('street-fighter-alpha-3', 'player1')
# Instanciate the global Pianette
# Its responsibility is to translate Piano actions to Console actions
pianette = Pianette(configobj=configobj)
# Instanciate the global GPIO Controller
# Its responsibility is to feed the Pianette based on GPIO inputs
gpio_controller = GPIOController(configobj=configobj, pianette=pianette)
# Make the Pianette object listen to GPIO inputs
pianette.enable_source("gpio")
# Run the main loop of interactive Pianette
Debug.println("NOTICE", "Entering main loop")
pianette.cmd.cmdloop()
Prepare for config and command line arguments
|
#!/usr/bin/env python3
# Pianette
# A command-line emulator of a PS2 Game Pad Controller
# that asynchronously listens to GPIO EDGE_RISING
# inputs from sensors and sends Serial commands to
# an ATMEGA328P acting as a fake SPI Slave for the Console.
# Written in Python 3.
import pianette.config
import sys
from pianette.GPIOController import GPIOController
from pianette.Pianette import Pianette
from pianette.utils import Debug
Debug.println("INFO", " ")
Debug.println("INFO", " ################################## ")
Debug.println("INFO", " | PIANETTE | ")
Debug.println("INFO", " ################################## ")
Debug.println("INFO", " ")
# FIX ME - introduce sys.argv[1] to choose player AND game?
configobj = pianette.config.get_configobj('street-fighter-alpha-3', 'player1')
# Instanciate the global Pianette
# Its responsibility is to translate Piano actions to Console actions
pianette = Pianette(configobj=configobj)
# Instanciate the global GPIO Controller
# Its responsibility is to feed the Pianette based on GPIO inputs
gpio_controller = GPIOController(configobj=configobj, pianette=pianette)
# Make the Pianette object listen to GPIO inputs
pianette.enable_source("gpio")
# Run the main loop of interactive Pianette
Debug.println("NOTICE", "Entering main loop")
pianette.cmd.cmdloop()
|
<commit_before>#!/usr/bin/env python3
# Pianette
# A command-line emulator of a PS2 Game Pad Controller
# that asynchronously listens to GPIO EDGE_RISING
# inputs from sensors and sends Serial commands to
# an ATMEGA328P acting as a fake SPI Slave for the Console.
# Written in Python 3.
import pianette.config
import sys
from pianette.GPIOController import GPIOController
from pianette.Pianette import Pianette
from pianette.utils import Debug
Debug.println("INFO", " ")
Debug.println("INFO", " ################################## ")
Debug.println("INFO", " | PIANETTE | ")
Debug.println("INFO", " ################################## ")
Debug.println("INFO", " ")
configobj = pianette.config.get_configobj('street-fighter-alpha-3', 'player1')
# Instanciate the global Pianette
# Its responsibility is to translate Piano actions to Console actions
pianette = Pianette(configobj=configobj)
# Instanciate the global GPIO Controller
# Its responsibility is to feed the Pianette based on GPIO inputs
gpio_controller = GPIOController(configobj=configobj, pianette=pianette)
# Make the Pianette object listen to GPIO inputs
pianette.enable_source("gpio")
# Run the main loop of interactive Pianette
Debug.println("NOTICE", "Entering main loop")
pianette.cmd.cmdloop()
<commit_msg>Prepare for config and command line arguments<commit_after>
|
#!/usr/bin/env python3
# Pianette
# A command-line emulator of a PS2 Game Pad Controller
# that asynchronously listens to GPIO EDGE_RISING
# inputs from sensors and sends Serial commands to
# an ATMEGA328P acting as a fake SPI Slave for the Console.
# Written in Python 3.
import pianette.config
import sys
from pianette.GPIOController import GPIOController
from pianette.Pianette import Pianette
from pianette.utils import Debug
Debug.println("INFO", " ")
Debug.println("INFO", " ################################## ")
Debug.println("INFO", " | PIANETTE | ")
Debug.println("INFO", " ################################## ")
Debug.println("INFO", " ")
# FIX ME - introduce sys.argv[1] to choose player AND game?
configobj = pianette.config.get_configobj('street-fighter-alpha-3', 'player1')
# Instanciate the global Pianette
# Its responsibility is to translate Piano actions to Console actions
pianette = Pianette(configobj=configobj)
# Instanciate the global GPIO Controller
# Its responsibility is to feed the Pianette based on GPIO inputs
gpio_controller = GPIOController(configobj=configobj, pianette=pianette)
# Make the Pianette object listen to GPIO inputs
pianette.enable_source("gpio")
# Run the main loop of interactive Pianette
Debug.println("NOTICE", "Entering main loop")
pianette.cmd.cmdloop()
|
#!/usr/bin/env python3
# Pianette
# A command-line emulator of a PS2 Game Pad Controller
# that asynchronously listens to GPIO EDGE_RISING
# inputs from sensors and sends Serial commands to
# an ATMEGA328P acting as a fake SPI Slave for the Console.
# Written in Python 3.
import pianette.config
import sys
from pianette.GPIOController import GPIOController
from pianette.Pianette import Pianette
from pianette.utils import Debug
Debug.println("INFO", " ")
Debug.println("INFO", " ################################## ")
Debug.println("INFO", " | PIANETTE | ")
Debug.println("INFO", " ################################## ")
Debug.println("INFO", " ")
configobj = pianette.config.get_configobj('street-fighter-alpha-3', 'player1')
# Instanciate the global Pianette
# Its responsibility is to translate Piano actions to Console actions
pianette = Pianette(configobj=configobj)
# Instanciate the global GPIO Controller
# Its responsibility is to feed the Pianette based on GPIO inputs
gpio_controller = GPIOController(configobj=configobj, pianette=pianette)
# Make the Pianette object listen to GPIO inputs
pianette.enable_source("gpio")
# Run the main loop of interactive Pianette
Debug.println("NOTICE", "Entering main loop")
pianette.cmd.cmdloop()
Prepare for config and command line arguments#!/usr/bin/env python3
# Pianette
# A command-line emulator of a PS2 Game Pad Controller
# that asynchronously listens to GPIO EDGE_RISING
# inputs from sensors and sends Serial commands to
# an ATMEGA328P acting as a fake SPI Slave for the Console.
# Written in Python 3.
import pianette.config
import sys
from pianette.GPIOController import GPIOController
from pianette.Pianette import Pianette
from pianette.utils import Debug
Debug.println("INFO", " ")
Debug.println("INFO", " ################################## ")
Debug.println("INFO", " | PIANETTE | ")
Debug.println("INFO", " ################################## ")
Debug.println("INFO", " ")
# FIX ME - introduce sys.argv[1] to choose player AND game?
configobj = pianette.config.get_configobj('street-fighter-alpha-3', 'player1')
# Instanciate the global Pianette
# Its responsibility is to translate Piano actions to Console actions
pianette = Pianette(configobj=configobj)
# Instanciate the global GPIO Controller
# Its responsibility is to feed the Pianette based on GPIO inputs
gpio_controller = GPIOController(configobj=configobj, pianette=pianette)
# Make the Pianette object listen to GPIO inputs
pianette.enable_source("gpio")
# Run the main loop of interactive Pianette
Debug.println("NOTICE", "Entering main loop")
pianette.cmd.cmdloop()
|
<commit_before>#!/usr/bin/env python3
# Pianette
# A command-line emulator of a PS2 Game Pad Controller
# that asynchronously listens to GPIO EDGE_RISING
# inputs from sensors and sends Serial commands to
# an ATMEGA328P acting as a fake SPI Slave for the Console.
# Written in Python 3.
import pianette.config
import sys
from pianette.GPIOController import GPIOController
from pianette.Pianette import Pianette
from pianette.utils import Debug
Debug.println("INFO", " ")
Debug.println("INFO", " ################################## ")
Debug.println("INFO", " | PIANETTE | ")
Debug.println("INFO", " ################################## ")
Debug.println("INFO", " ")
configobj = pianette.config.get_configobj('street-fighter-alpha-3', 'player1')
# Instanciate the global Pianette
# Its responsibility is to translate Piano actions to Console actions
pianette = Pianette(configobj=configobj)
# Instanciate the global GPIO Controller
# Its responsibility is to feed the Pianette based on GPIO inputs
gpio_controller = GPIOController(configobj=configobj, pianette=pianette)
# Make the Pianette object listen to GPIO inputs
pianette.enable_source("gpio")
# Run the main loop of interactive Pianette
Debug.println("NOTICE", "Entering main loop")
pianette.cmd.cmdloop()
<commit_msg>Prepare for config and command line arguments<commit_after>#!/usr/bin/env python3
# Pianette
# A command-line emulator of a PS2 Game Pad Controller
# that asynchronously listens to GPIO EDGE_RISING
# inputs from sensors and sends Serial commands to
# an ATMEGA328P acting as a fake SPI Slave for the Console.
# Written in Python 3.
import pianette.config
import sys
from pianette.GPIOController import GPIOController
from pianette.Pianette import Pianette
from pianette.utils import Debug
Debug.println("INFO", " ")
Debug.println("INFO", " ################################## ")
Debug.println("INFO", " | PIANETTE | ")
Debug.println("INFO", " ################################## ")
Debug.println("INFO", " ")
# FIX ME - introduce sys.argv[1] to choose player AND game?
configobj = pianette.config.get_configobj('street-fighter-alpha-3', 'player1')
# Instanciate the global Pianette
# Its responsibility is to translate Piano actions to Console actions
pianette = Pianette(configobj=configobj)
# Instanciate the global GPIO Controller
# Its responsibility is to feed the Pianette based on GPIO inputs
gpio_controller = GPIOController(configobj=configobj, pianette=pianette)
# Make the Pianette object listen to GPIO inputs
pianette.enable_source("gpio")
# Run the main loop of interactive Pianette
Debug.println("NOTICE", "Entering main loop")
pianette.cmd.cmdloop()
|
5beea76076aa0806f8ee2db5f2169846e7497ef1
|
euler_python/problem55.py
|
euler_python/problem55.py
|
"""
problem55.py
If we take 47, reverse and add, 47 + 74 = 121, which is palindromic. A number
that never forms a palindrome through the reverse and add process is called a
Lychrel number. How many Lychrel numbers are there below ten-thousand? (Only
consider fifty iterations)
"""
from toolset import iterate, quantify, take, memoize
def to_digits(num):
# to_digits(1234) --> [1, 2, 3, 4]
return list(map(int, str(num)))
def to_num(digits):
# to_num([1, 2, 3, 4]) --> 1234
return int(''.join(map(str, digits)))
@memoize
def is_palindromic(num):
return to_digits(num) == list(reversed(to_digits(num)))
def is_lychrel(num):
rev = lambda x: to_num(reversed(to_digits(x)))
start = num + rev(num)
iterations = iterate(lambda x: x + rev(x), start)
return not any(is_palindromic(n) for n in take(50, iterations))
def problem55():
return quantify(range(1, 10000), pred=is_lychrel)
|
"""
problem55.py
If we take 47, reverse and add, 47 + 74 = 121, which is palindromic. A number
that never forms a palindrome through the reverse and add process is called a
Lychrel number. How many Lychrel numbers are there below ten-thousand? (Only
consider fifty iterations)
"""
from toolset import iterate, quantify, take, memoize
def rev(n):
"""Return the reverse of n's digits"""
return int(''.join(reversed(str(n))))
@memoize
def is_palindromic(n):
return n == rev(n)
def is_lychrel(n):
start = n + rev(n)
iterations = iterate(lambda x: x + rev(x), start)
return not any(is_palindromic(y) for y in take(50, iterations))
def problem55():
return quantify(range(1, 10000), pred=is_lychrel)
|
Simplify python solution to 55
|
Simplify python solution to 55
|
Python
|
mit
|
mjwestcott/projecteuler,mjwestcott/projecteuler,mjwestcott/projecteuler
|
"""
problem55.py
If we take 47, reverse and add, 47 + 74 = 121, which is palindromic. A number
that never forms a palindrome through the reverse and add process is called a
Lychrel number. How many Lychrel numbers are there below ten-thousand? (Only
consider fifty iterations)
"""
from toolset import iterate, quantify, take, memoize
def to_digits(num):
# to_digits(1234) --> [1, 2, 3, 4]
return list(map(int, str(num)))
def to_num(digits):
# to_num([1, 2, 3, 4]) --> 1234
return int(''.join(map(str, digits)))
@memoize
def is_palindromic(num):
return to_digits(num) == list(reversed(to_digits(num)))
def is_lychrel(num):
rev = lambda x: to_num(reversed(to_digits(x)))
start = num + rev(num)
iterations = iterate(lambda x: x + rev(x), start)
return not any(is_palindromic(n) for n in take(50, iterations))
def problem55():
return quantify(range(1, 10000), pred=is_lychrel)
Simplify python solution to 55
|
"""
problem55.py
If we take 47, reverse and add, 47 + 74 = 121, which is palindromic. A number
that never forms a palindrome through the reverse and add process is called a
Lychrel number. How many Lychrel numbers are there below ten-thousand? (Only
consider fifty iterations)
"""
from toolset import iterate, quantify, take, memoize
def rev(n):
"""Return the reverse of n's digits"""
return int(''.join(reversed(str(n))))
@memoize
def is_palindromic(n):
return n == rev(n)
def is_lychrel(n):
start = n + rev(n)
iterations = iterate(lambda x: x + rev(x), start)
return not any(is_palindromic(y) for y in take(50, iterations))
def problem55():
return quantify(range(1, 10000), pred=is_lychrel)
|
<commit_before>"""
problem55.py
If we take 47, reverse and add, 47 + 74 = 121, which is palindromic. A number
that never forms a palindrome through the reverse and add process is called a
Lychrel number. How many Lychrel numbers are there below ten-thousand? (Only
consider fifty iterations)
"""
from toolset import iterate, quantify, take, memoize
def to_digits(num):
# to_digits(1234) --> [1, 2, 3, 4]
return list(map(int, str(num)))
def to_num(digits):
# to_num([1, 2, 3, 4]) --> 1234
return int(''.join(map(str, digits)))
@memoize
def is_palindromic(num):
return to_digits(num) == list(reversed(to_digits(num)))
def is_lychrel(num):
rev = lambda x: to_num(reversed(to_digits(x)))
start = num + rev(num)
iterations = iterate(lambda x: x + rev(x), start)
return not any(is_palindromic(n) for n in take(50, iterations))
def problem55():
return quantify(range(1, 10000), pred=is_lychrel)
<commit_msg>Simplify python solution to 55<commit_after>
|
"""
problem55.py
If we take 47, reverse and add, 47 + 74 = 121, which is palindromic. A number
that never forms a palindrome through the reverse and add process is called a
Lychrel number. How many Lychrel numbers are there below ten-thousand? (Only
consider fifty iterations)
"""
from toolset import iterate, quantify, take, memoize
def rev(n):
"""Return the reverse of n's digits"""
return int(''.join(reversed(str(n))))
@memoize
def is_palindromic(n):
return n == rev(n)
def is_lychrel(n):
start = n + rev(n)
iterations = iterate(lambda x: x + rev(x), start)
return not any(is_palindromic(y) for y in take(50, iterations))
def problem55():
return quantify(range(1, 10000), pred=is_lychrel)
|
"""
problem55.py
If we take 47, reverse and add, 47 + 74 = 121, which is palindromic. A number
that never forms a palindrome through the reverse and add process is called a
Lychrel number. How many Lychrel numbers are there below ten-thousand? (Only
consider fifty iterations)
"""
from toolset import iterate, quantify, take, memoize
def to_digits(num):
# to_digits(1234) --> [1, 2, 3, 4]
return list(map(int, str(num)))
def to_num(digits):
# to_num([1, 2, 3, 4]) --> 1234
return int(''.join(map(str, digits)))
@memoize
def is_palindromic(num):
return to_digits(num) == list(reversed(to_digits(num)))
def is_lychrel(num):
rev = lambda x: to_num(reversed(to_digits(x)))
start = num + rev(num)
iterations = iterate(lambda x: x + rev(x), start)
return not any(is_palindromic(n) for n in take(50, iterations))
def problem55():
return quantify(range(1, 10000), pred=is_lychrel)
Simplify python solution to 55"""
problem55.py
If we take 47, reverse and add, 47 + 74 = 121, which is palindromic. A number
that never forms a palindrome through the reverse and add process is called a
Lychrel number. How many Lychrel numbers are there below ten-thousand? (Only
consider fifty iterations)
"""
from toolset import iterate, quantify, take, memoize
def rev(n):
"""Return the reverse of n's digits"""
return int(''.join(reversed(str(n))))
@memoize
def is_palindromic(n):
return n == rev(n)
def is_lychrel(n):
start = n + rev(n)
iterations = iterate(lambda x: x + rev(x), start)
return not any(is_palindromic(y) for y in take(50, iterations))
def problem55():
return quantify(range(1, 10000), pred=is_lychrel)
|
<commit_before>"""
problem55.py
If we take 47, reverse and add, 47 + 74 = 121, which is palindromic. A number
that never forms a palindrome through the reverse and add process is called a
Lychrel number. How many Lychrel numbers are there below ten-thousand? (Only
consider fifty iterations)
"""
from toolset import iterate, quantify, take, memoize
def to_digits(num):
# to_digits(1234) --> [1, 2, 3, 4]
return list(map(int, str(num)))
def to_num(digits):
# to_num([1, 2, 3, 4]) --> 1234
return int(''.join(map(str, digits)))
@memoize
def is_palindromic(num):
return to_digits(num) == list(reversed(to_digits(num)))
def is_lychrel(num):
rev = lambda x: to_num(reversed(to_digits(x)))
start = num + rev(num)
iterations = iterate(lambda x: x + rev(x), start)
return not any(is_palindromic(n) for n in take(50, iterations))
def problem55():
return quantify(range(1, 10000), pred=is_lychrel)
<commit_msg>Simplify python solution to 55<commit_after>"""
problem55.py
If we take 47, reverse and add, 47 + 74 = 121, which is palindromic. A number
that never forms a palindrome through the reverse and add process is called a
Lychrel number. How many Lychrel numbers are there below ten-thousand? (Only
consider fifty iterations)
"""
from toolset import iterate, quantify, take, memoize
def rev(n):
"""Return the reverse of n's digits"""
return int(''.join(reversed(str(n))))
@memoize
def is_palindromic(n):
return n == rev(n)
def is_lychrel(n):
start = n + rev(n)
iterations = iterate(lambda x: x + rev(x), start)
return not any(is_palindromic(y) for y in take(50, iterations))
def problem55():
return quantify(range(1, 10000), pred=is_lychrel)
|
91dee60bb768a8ab80530cab79649b60afdf7daf
|
mbed.py
|
mbed.py
|
from utils.helpers import error, find_mbed_dir, is_mbed_dir
import sys, os
from utils import set_project_dir
from commands.set import CmdSet
from commands.get import CmdGet
from commands.clone import CmdClone
from commands.compile import CmdCompile
from commands.list import CmdList
################################################################################
# Local functions
def help_and_exit(cmds):
error("Syntax: mbed <command> [arguments]")
error("Valid commands:")
for c in cmds:
error(" " + c.get_help() + "")
os._exit(1)
def run(args):
cmds = [CmdSet(), CmdGet()]
if is_mbed_dir():
cmds = cmds + [CmdCompile(), CmdList()]
else:
cmds = cmds = [CmdClone()]
if len(args) == 0:
error("No command given.")
help_and_exit(cmds)
cmd_map = dict([(c.get_name(), c) for c in cmds])
cmd = args[0].lower()
if not cmd in cmd_map:
error("Invalid command '%s'." % args[0])
help_and_exit(cmds)
res = cmd_map[cmd](args[1:])
if res == None:
error("Invalid command syntax")
error(cmd_map[cmd].get_help())
elif res == False:
os._exit(1)
################################################################################
# Entry point
if __name__ == "__main__":
set_project_dir(find_mbed_dir())
run(sys.argv[1:])
|
from utils.helpers import error, find_mbed_dir, is_mbed_dir
import sys, os
from utils import set_project_dir
from commands.set import CmdSet
from commands.get import CmdGet
from commands.clone import CmdClone
from commands.compile import CmdCompile
from commands.list import CmdList
################################################################################
# Local functions
def help_and_exit(cmds):
error("Syntax: mbed <command> [arguments]")
error("Valid commands:")
for c in cmds:
error(" " + c.get_help() + "")
os._exit(1)
def run(args):
cmds = [CmdSet(), CmdGet()]
if is_mbed_dir():
cmds = cmds + [CmdCompile(), CmdList()]
else:
cmds = cmds = [CmdClone()]
if len(args) == 0:
error("No command given.")
help_and_exit(cmds)
cmd_map = dict([(c.get_name(), c) for c in cmds])
cmd = args[0].lower()
if not cmd in cmd_map:
error("Invalid command '%s'." % args[0])
help_and_exit(cmds)
res = cmd_map[cmd](args[1:])
if res == None:
error("Invalid command syntax")
error(cmd_map[cmd].get_help())
elif res == False:
os._exit(1)
################################################################################
# Entry point
if __name__ == "__main__":
base = find_mbed_dir()
if base:
set_project_dir(base)
sys.path.append(base)
run(sys.argv[1:])
|
Fix Python module search path
|
Fix Python module search path
|
Python
|
apache-2.0
|
bogdanm/mbed-clt
|
from utils.helpers import error, find_mbed_dir, is_mbed_dir
import sys, os
from utils import set_project_dir
from commands.set import CmdSet
from commands.get import CmdGet
from commands.clone import CmdClone
from commands.compile import CmdCompile
from commands.list import CmdList
################################################################################
# Local functions
def help_and_exit(cmds):
error("Syntax: mbed <command> [arguments]")
error("Valid commands:")
for c in cmds:
error(" " + c.get_help() + "")
os._exit(1)
def run(args):
cmds = [CmdSet(), CmdGet()]
if is_mbed_dir():
cmds = cmds + [CmdCompile(), CmdList()]
else:
cmds = cmds = [CmdClone()]
if len(args) == 0:
error("No command given.")
help_and_exit(cmds)
cmd_map = dict([(c.get_name(), c) for c in cmds])
cmd = args[0].lower()
if not cmd in cmd_map:
error("Invalid command '%s'." % args[0])
help_and_exit(cmds)
res = cmd_map[cmd](args[1:])
if res == None:
error("Invalid command syntax")
error(cmd_map[cmd].get_help())
elif res == False:
os._exit(1)
################################################################################
# Entry point
if __name__ == "__main__":
set_project_dir(find_mbed_dir())
run(sys.argv[1:])
Fix Python module search path
|
from utils.helpers import error, find_mbed_dir, is_mbed_dir
import sys, os
from utils import set_project_dir
from commands.set import CmdSet
from commands.get import CmdGet
from commands.clone import CmdClone
from commands.compile import CmdCompile
from commands.list import CmdList
################################################################################
# Local functions
def help_and_exit(cmds):
error("Syntax: mbed <command> [arguments]")
error("Valid commands:")
for c in cmds:
error(" " + c.get_help() + "")
os._exit(1)
def run(args):
cmds = [CmdSet(), CmdGet()]
if is_mbed_dir():
cmds = cmds + [CmdCompile(), CmdList()]
else:
cmds = cmds = [CmdClone()]
if len(args) == 0:
error("No command given.")
help_and_exit(cmds)
cmd_map = dict([(c.get_name(), c) for c in cmds])
cmd = args[0].lower()
if not cmd in cmd_map:
error("Invalid command '%s'." % args[0])
help_and_exit(cmds)
res = cmd_map[cmd](args[1:])
if res == None:
error("Invalid command syntax")
error(cmd_map[cmd].get_help())
elif res == False:
os._exit(1)
################################################################################
# Entry point
if __name__ == "__main__":
base = find_mbed_dir()
if base:
set_project_dir(base)
sys.path.append(base)
run(sys.argv[1:])
|
<commit_before>from utils.helpers import error, find_mbed_dir, is_mbed_dir
import sys, os
from utils import set_project_dir
from commands.set import CmdSet
from commands.get import CmdGet
from commands.clone import CmdClone
from commands.compile import CmdCompile
from commands.list import CmdList
################################################################################
# Local functions
def help_and_exit(cmds):
error("Syntax: mbed <command> [arguments]")
error("Valid commands:")
for c in cmds:
error(" " + c.get_help() + "")
os._exit(1)
def run(args):
cmds = [CmdSet(), CmdGet()]
if is_mbed_dir():
cmds = cmds + [CmdCompile(), CmdList()]
else:
cmds = cmds = [CmdClone()]
if len(args) == 0:
error("No command given.")
help_and_exit(cmds)
cmd_map = dict([(c.get_name(), c) for c in cmds])
cmd = args[0].lower()
if not cmd in cmd_map:
error("Invalid command '%s'." % args[0])
help_and_exit(cmds)
res = cmd_map[cmd](args[1:])
if res == None:
error("Invalid command syntax")
error(cmd_map[cmd].get_help())
elif res == False:
os._exit(1)
################################################################################
# Entry point
if __name__ == "__main__":
set_project_dir(find_mbed_dir())
run(sys.argv[1:])
<commit_msg>Fix Python module search path<commit_after>
|
from utils.helpers import error, find_mbed_dir, is_mbed_dir
import sys, os
from utils import set_project_dir
from commands.set import CmdSet
from commands.get import CmdGet
from commands.clone import CmdClone
from commands.compile import CmdCompile
from commands.list import CmdList
################################################################################
# Local functions
def help_and_exit(cmds):
error("Syntax: mbed <command> [arguments]")
error("Valid commands:")
for c in cmds:
error(" " + c.get_help() + "")
os._exit(1)
def run(args):
cmds = [CmdSet(), CmdGet()]
if is_mbed_dir():
cmds = cmds + [CmdCompile(), CmdList()]
else:
cmds = cmds = [CmdClone()]
if len(args) == 0:
error("No command given.")
help_and_exit(cmds)
cmd_map = dict([(c.get_name(), c) for c in cmds])
cmd = args[0].lower()
if not cmd in cmd_map:
error("Invalid command '%s'." % args[0])
help_and_exit(cmds)
res = cmd_map[cmd](args[1:])
if res == None:
error("Invalid command syntax")
error(cmd_map[cmd].get_help())
elif res == False:
os._exit(1)
################################################################################
# Entry point
if __name__ == "__main__":
base = find_mbed_dir()
if base:
set_project_dir(base)
sys.path.append(base)
run(sys.argv[1:])
|
from utils.helpers import error, find_mbed_dir, is_mbed_dir
import sys, os
from utils import set_project_dir
from commands.set import CmdSet
from commands.get import CmdGet
from commands.clone import CmdClone
from commands.compile import CmdCompile
from commands.list import CmdList
################################################################################
# Local functions
def help_and_exit(cmds):
error("Syntax: mbed <command> [arguments]")
error("Valid commands:")
for c in cmds:
error(" " + c.get_help() + "")
os._exit(1)
def run(args):
cmds = [CmdSet(), CmdGet()]
if is_mbed_dir():
cmds = cmds + [CmdCompile(), CmdList()]
else:
cmds = cmds = [CmdClone()]
if len(args) == 0:
error("No command given.")
help_and_exit(cmds)
cmd_map = dict([(c.get_name(), c) for c in cmds])
cmd = args[0].lower()
if not cmd in cmd_map:
error("Invalid command '%s'." % args[0])
help_and_exit(cmds)
res = cmd_map[cmd](args[1:])
if res == None:
error("Invalid command syntax")
error(cmd_map[cmd].get_help())
elif res == False:
os._exit(1)
################################################################################
# Entry point
if __name__ == "__main__":
set_project_dir(find_mbed_dir())
run(sys.argv[1:])
Fix Python module search pathfrom utils.helpers import error, find_mbed_dir, is_mbed_dir
import sys, os
from utils import set_project_dir
from commands.set import CmdSet
from commands.get import CmdGet
from commands.clone import CmdClone
from commands.compile import CmdCompile
from commands.list import CmdList
################################################################################
# Local functions
def help_and_exit(cmds):
error("Syntax: mbed <command> [arguments]")
error("Valid commands:")
for c in cmds:
error(" " + c.get_help() + "")
os._exit(1)
def run(args):
cmds = [CmdSet(), CmdGet()]
if is_mbed_dir():
cmds = cmds + [CmdCompile(), CmdList()]
else:
cmds = cmds = [CmdClone()]
if len(args) == 0:
error("No command given.")
help_and_exit(cmds)
cmd_map = dict([(c.get_name(), c) for c in cmds])
cmd = args[0].lower()
if not cmd in cmd_map:
error("Invalid command '%s'." % args[0])
help_and_exit(cmds)
res = cmd_map[cmd](args[1:])
if res == None:
error("Invalid command syntax")
error(cmd_map[cmd].get_help())
elif res == False:
os._exit(1)
################################################################################
# Entry point
if __name__ == "__main__":
base = find_mbed_dir()
if base:
set_project_dir(base)
sys.path.append(base)
run(sys.argv[1:])
|
<commit_before>from utils.helpers import error, find_mbed_dir, is_mbed_dir
import sys, os
from utils import set_project_dir
from commands.set import CmdSet
from commands.get import CmdGet
from commands.clone import CmdClone
from commands.compile import CmdCompile
from commands.list import CmdList
################################################################################
# Local functions
def help_and_exit(cmds):
error("Syntax: mbed <command> [arguments]")
error("Valid commands:")
for c in cmds:
error(" " + c.get_help() + "")
os._exit(1)
def run(args):
cmds = [CmdSet(), CmdGet()]
if is_mbed_dir():
cmds = cmds + [CmdCompile(), CmdList()]
else:
cmds = cmds = [CmdClone()]
if len(args) == 0:
error("No command given.")
help_and_exit(cmds)
cmd_map = dict([(c.get_name(), c) for c in cmds])
cmd = args[0].lower()
if not cmd in cmd_map:
error("Invalid command '%s'." % args[0])
help_and_exit(cmds)
res = cmd_map[cmd](args[1:])
if res == None:
error("Invalid command syntax")
error(cmd_map[cmd].get_help())
elif res == False:
os._exit(1)
################################################################################
# Entry point
if __name__ == "__main__":
set_project_dir(find_mbed_dir())
run(sys.argv[1:])
<commit_msg>Fix Python module search path<commit_after>from utils.helpers import error, find_mbed_dir, is_mbed_dir
import sys, os
from utils import set_project_dir
from commands.set import CmdSet
from commands.get import CmdGet
from commands.clone import CmdClone
from commands.compile import CmdCompile
from commands.list import CmdList
################################################################################
# Local functions
def help_and_exit(cmds):
error("Syntax: mbed <command> [arguments]")
error("Valid commands:")
for c in cmds:
error(" " + c.get_help() + "")
os._exit(1)
def run(args):
cmds = [CmdSet(), CmdGet()]
if is_mbed_dir():
cmds = cmds + [CmdCompile(), CmdList()]
else:
cmds = cmds = [CmdClone()]
if len(args) == 0:
error("No command given.")
help_and_exit(cmds)
cmd_map = dict([(c.get_name(), c) for c in cmds])
cmd = args[0].lower()
if not cmd in cmd_map:
error("Invalid command '%s'." % args[0])
help_and_exit(cmds)
res = cmd_map[cmd](args[1:])
if res == None:
error("Invalid command syntax")
error(cmd_map[cmd].get_help())
elif res == False:
os._exit(1)
################################################################################
# Entry point
if __name__ == "__main__":
base = find_mbed_dir()
if base:
set_project_dir(base)
sys.path.append(base)
run(sys.argv[1:])
|
0f0da20d8bf270d9c9f329b4ed0bbba22de2d109
|
moksha/api/streams/datastream.py
|
moksha/api/streams/datastream.py
|
import logging
from datetime import timedelta
from twisted.internet.task import LoopingCall
from moksha.hub.hub import MokshaHub
log = logging.getLogger('moksha.hub')
class DataStream(object):
""" The parent DataStream class. """
def __init__(self):
self.hub = MokshaHub()
def send_message(self, topic, message):
try:
self.hub.send_message(topic, message)
except Exception, e:
log.error('Cannot send message: %s' % e)
def stop(self):
self.hub.close()
class PollingDataStream(DataStream):
""" A self-polling data stream.
This class represents a data stream that wakes up at a given frequency,
and calls the :meth:`poll` method.
"""
frequency = None # Either a timedelta object, or the number of seconds
def __init__(self):
super(PollingDataStream, self).__init__()
self.timer = LoopingCall(self.poll)
if isinstance(self.frequency, timedelta):
seconds = self.frequency.seconds + \
(self.frequency.days * 24 * 60 * 60) + \
(self.frequency.microseconds / 1000000.0)
else:
seconds = self.frequency
log.debug("Setting a %s second timers" % seconds)
self.timer.start(seconds, now=False)
def poll(self):
raise NotImplementedError
def stop(self):
super(PollingDataStream, self).stop()
self.timer.stop()
|
import logging
from datetime import timedelta
from twisted.internet.task import LoopingCall
from moksha.hub.hub import MokshaHub
log = logging.getLogger('moksha.hub')
class DataStream(object):
""" The parent DataStream class. """
def __init__(self):
self.hub = MokshaHub()
def send_message(self, topic, message):
try:
self.hub.send_message(topic, message)
except Exception, e:
log.error('Cannot send message: %s' % e)
def stop(self):
self.hub.close()
class PollingDataStream(DataStream):
""" A self-polling data stream.
This class represents a data stream that wakes up at a given frequency,
and calls the :meth:`poll` method.
"""
frequency = None # Either a timedelta object, or the number of seconds
def __init__(self, now=True):
super(PollingDataStream, self).__init__()
self.timer = LoopingCall(self.poll)
if isinstance(self.frequency, timedelta):
seconds = self.frequency.seconds + \
(self.frequency.days * 24 * 60 * 60) + \
(self.frequency.microseconds / 1000000.0)
else:
seconds = self.frequency
log.debug("Setting a %s second timers" % seconds)
self.timer.start(seconds, now=now)
def poll(self):
raise NotImplementedError
def stop(self):
super(PollingDataStream, self).stop()
self.timer.stop()
|
Make our PollingDataStreamers start up right away
|
Make our PollingDataStreamers start up right away
|
Python
|
apache-2.0
|
pombredanne/moksha,ralphbean/moksha,mokshaproject/moksha,mokshaproject/moksha,pombredanne/moksha,lmacken/moksha,ralphbean/moksha,lmacken/moksha,lmacken/moksha,mokshaproject/moksha,ralphbean/moksha,mokshaproject/moksha,pombredanne/moksha,pombredanne/moksha
|
import logging
from datetime import timedelta
from twisted.internet.task import LoopingCall
from moksha.hub.hub import MokshaHub
log = logging.getLogger('moksha.hub')
class DataStream(object):
""" The parent DataStream class. """
def __init__(self):
self.hub = MokshaHub()
def send_message(self, topic, message):
try:
self.hub.send_message(topic, message)
except Exception, e:
log.error('Cannot send message: %s' % e)
def stop(self):
self.hub.close()
class PollingDataStream(DataStream):
""" A self-polling data stream.
This class represents a data stream that wakes up at a given frequency,
and calls the :meth:`poll` method.
"""
frequency = None # Either a timedelta object, or the number of seconds
def __init__(self):
super(PollingDataStream, self).__init__()
self.timer = LoopingCall(self.poll)
if isinstance(self.frequency, timedelta):
seconds = self.frequency.seconds + \
(self.frequency.days * 24 * 60 * 60) + \
(self.frequency.microseconds / 1000000.0)
else:
seconds = self.frequency
log.debug("Setting a %s second timers" % seconds)
self.timer.start(seconds, now=False)
def poll(self):
raise NotImplementedError
def stop(self):
super(PollingDataStream, self).stop()
self.timer.stop()
Make our PollingDataStreamers start up right away
|
import logging
from datetime import timedelta
from twisted.internet.task import LoopingCall
from moksha.hub.hub import MokshaHub
log = logging.getLogger('moksha.hub')
class DataStream(object):
""" The parent DataStream class. """
def __init__(self):
self.hub = MokshaHub()
def send_message(self, topic, message):
try:
self.hub.send_message(topic, message)
except Exception, e:
log.error('Cannot send message: %s' % e)
def stop(self):
self.hub.close()
class PollingDataStream(DataStream):
""" A self-polling data stream.
This class represents a data stream that wakes up at a given frequency,
and calls the :meth:`poll` method.
"""
frequency = None # Either a timedelta object, or the number of seconds
def __init__(self, now=True):
super(PollingDataStream, self).__init__()
self.timer = LoopingCall(self.poll)
if isinstance(self.frequency, timedelta):
seconds = self.frequency.seconds + \
(self.frequency.days * 24 * 60 * 60) + \
(self.frequency.microseconds / 1000000.0)
else:
seconds = self.frequency
log.debug("Setting a %s second timers" % seconds)
self.timer.start(seconds, now=now)
def poll(self):
raise NotImplementedError
def stop(self):
super(PollingDataStream, self).stop()
self.timer.stop()
|
<commit_before>import logging
from datetime import timedelta
from twisted.internet.task import LoopingCall
from moksha.hub.hub import MokshaHub
log = logging.getLogger('moksha.hub')
class DataStream(object):
""" The parent DataStream class. """
def __init__(self):
self.hub = MokshaHub()
def send_message(self, topic, message):
try:
self.hub.send_message(topic, message)
except Exception, e:
log.error('Cannot send message: %s' % e)
def stop(self):
self.hub.close()
class PollingDataStream(DataStream):
""" A self-polling data stream.
This class represents a data stream that wakes up at a given frequency,
and calls the :meth:`poll` method.
"""
frequency = None # Either a timedelta object, or the number of seconds
def __init__(self):
super(PollingDataStream, self).__init__()
self.timer = LoopingCall(self.poll)
if isinstance(self.frequency, timedelta):
seconds = self.frequency.seconds + \
(self.frequency.days * 24 * 60 * 60) + \
(self.frequency.microseconds / 1000000.0)
else:
seconds = self.frequency
log.debug("Setting a %s second timers" % seconds)
self.timer.start(seconds, now=False)
def poll(self):
raise NotImplementedError
def stop(self):
super(PollingDataStream, self).stop()
self.timer.stop()
<commit_msg>Make our PollingDataStreamers start up right away<commit_after>
|
import logging
from datetime import timedelta
from twisted.internet.task import LoopingCall
from moksha.hub.hub import MokshaHub
log = logging.getLogger('moksha.hub')
class DataStream(object):
""" The parent DataStream class. """
def __init__(self):
self.hub = MokshaHub()
def send_message(self, topic, message):
try:
self.hub.send_message(topic, message)
except Exception, e:
log.error('Cannot send message: %s' % e)
def stop(self):
self.hub.close()
class PollingDataStream(DataStream):
""" A self-polling data stream.
This class represents a data stream that wakes up at a given frequency,
and calls the :meth:`poll` method.
"""
frequency = None # Either a timedelta object, or the number of seconds
def __init__(self, now=True):
super(PollingDataStream, self).__init__()
self.timer = LoopingCall(self.poll)
if isinstance(self.frequency, timedelta):
seconds = self.frequency.seconds + \
(self.frequency.days * 24 * 60 * 60) + \
(self.frequency.microseconds / 1000000.0)
else:
seconds = self.frequency
log.debug("Setting a %s second timers" % seconds)
self.timer.start(seconds, now=now)
def poll(self):
raise NotImplementedError
def stop(self):
super(PollingDataStream, self).stop()
self.timer.stop()
|
import logging
from datetime import timedelta
from twisted.internet.task import LoopingCall
from moksha.hub.hub import MokshaHub
log = logging.getLogger('moksha.hub')
class DataStream(object):
""" The parent DataStream class. """
def __init__(self):
self.hub = MokshaHub()
def send_message(self, topic, message):
try:
self.hub.send_message(topic, message)
except Exception, e:
log.error('Cannot send message: %s' % e)
def stop(self):
self.hub.close()
class PollingDataStream(DataStream):
""" A self-polling data stream.
This class represents a data stream that wakes up at a given frequency,
and calls the :meth:`poll` method.
"""
frequency = None # Either a timedelta object, or the number of seconds
def __init__(self):
super(PollingDataStream, self).__init__()
self.timer = LoopingCall(self.poll)
if isinstance(self.frequency, timedelta):
seconds = self.frequency.seconds + \
(self.frequency.days * 24 * 60 * 60) + \
(self.frequency.microseconds / 1000000.0)
else:
seconds = self.frequency
log.debug("Setting a %s second timers" % seconds)
self.timer.start(seconds, now=False)
def poll(self):
raise NotImplementedError
def stop(self):
super(PollingDataStream, self).stop()
self.timer.stop()
Make our PollingDataStreamers start up right awayimport logging
from datetime import timedelta
from twisted.internet.task import LoopingCall
from moksha.hub.hub import MokshaHub
log = logging.getLogger('moksha.hub')
class DataStream(object):
""" The parent DataStream class. """
def __init__(self):
self.hub = MokshaHub()
def send_message(self, topic, message):
try:
self.hub.send_message(topic, message)
except Exception, e:
log.error('Cannot send message: %s' % e)
def stop(self):
self.hub.close()
class PollingDataStream(DataStream):
""" A self-polling data stream.
This class represents a data stream that wakes up at a given frequency,
and calls the :meth:`poll` method.
"""
frequency = None # Either a timedelta object, or the number of seconds
def __init__(self, now=True):
super(PollingDataStream, self).__init__()
self.timer = LoopingCall(self.poll)
if isinstance(self.frequency, timedelta):
seconds = self.frequency.seconds + \
(self.frequency.days * 24 * 60 * 60) + \
(self.frequency.microseconds / 1000000.0)
else:
seconds = self.frequency
log.debug("Setting a %s second timers" % seconds)
self.timer.start(seconds, now=now)
def poll(self):
raise NotImplementedError
def stop(self):
super(PollingDataStream, self).stop()
self.timer.stop()
|
<commit_before>import logging
from datetime import timedelta
from twisted.internet.task import LoopingCall
from moksha.hub.hub import MokshaHub
log = logging.getLogger('moksha.hub')
class DataStream(object):
""" The parent DataStream class. """
def __init__(self):
self.hub = MokshaHub()
def send_message(self, topic, message):
try:
self.hub.send_message(topic, message)
except Exception, e:
log.error('Cannot send message: %s' % e)
def stop(self):
self.hub.close()
class PollingDataStream(DataStream):
""" A self-polling data stream.
This class represents a data stream that wakes up at a given frequency,
and calls the :meth:`poll` method.
"""
frequency = None # Either a timedelta object, or the number of seconds
def __init__(self):
super(PollingDataStream, self).__init__()
self.timer = LoopingCall(self.poll)
if isinstance(self.frequency, timedelta):
seconds = self.frequency.seconds + \
(self.frequency.days * 24 * 60 * 60) + \
(self.frequency.microseconds / 1000000.0)
else:
seconds = self.frequency
log.debug("Setting a %s second timers" % seconds)
self.timer.start(seconds, now=False)
def poll(self):
raise NotImplementedError
def stop(self):
super(PollingDataStream, self).stop()
self.timer.stop()
<commit_msg>Make our PollingDataStreamers start up right away<commit_after>import logging
from datetime import timedelta
from twisted.internet.task import LoopingCall
from moksha.hub.hub import MokshaHub
log = logging.getLogger('moksha.hub')
class DataStream(object):
""" The parent DataStream class. """
def __init__(self):
self.hub = MokshaHub()
def send_message(self, topic, message):
try:
self.hub.send_message(topic, message)
except Exception, e:
log.error('Cannot send message: %s' % e)
def stop(self):
self.hub.close()
class PollingDataStream(DataStream):
""" A self-polling data stream.
This class represents a data stream that wakes up at a given frequency,
and calls the :meth:`poll` method.
"""
frequency = None # Either a timedelta object, or the number of seconds
def __init__(self, now=True):
super(PollingDataStream, self).__init__()
self.timer = LoopingCall(self.poll)
if isinstance(self.frequency, timedelta):
seconds = self.frequency.seconds + \
(self.frequency.days * 24 * 60 * 60) + \
(self.frequency.microseconds / 1000000.0)
else:
seconds = self.frequency
log.debug("Setting a %s second timers" % seconds)
self.timer.start(seconds, now=now)
def poll(self):
raise NotImplementedError
def stop(self):
super(PollingDataStream, self).stop()
self.timer.stop()
|
8d7f4e549a2f83e93de9d440a7aa979b73cfba38
|
examples/my_test_suite.py
|
examples/my_test_suite.py
|
''' NOTE: This test suite contains 2 passing tests and 2 failing tests. '''
from seleniumbase import BaseCase
class MyTestSuite(BaseCase):
def test_1(self):
self.open("http://xkcd.com/1663/")
self.find_text("Garden", "div#ctitle", timeout=3)
for p in xrange(4):
self.click('a[rel="next"]')
self.find_text("Algorithms", "div#ctitle", timeout=3)
def test_2(self):
# This test should FAIL
self.open("http://xkcd.com/1675/")
raise Exception("FAKE EXCEPTION: This test fails on purpose.")
def test_3(self):
self.open("http://xkcd.com/1406/")
self.find_text("Universal Converter Box", "div#ctitle", timeout=3)
self.open("http://xkcd.com/608/")
self.find_text("Form", "div#ctitle", timeout=3)
def test_4(self):
# This test should FAIL
self.open("http://xkcd.com/1670/")
self.find_element("FakeElement.DoesNotExist", timeout=0.5)
|
''' NOTE: This test suite contains 2 passing tests and 2 failing tests. '''
from seleniumbase import BaseCase
class MyTestSuite(BaseCase):
def test_1(self):
self.open("http://xkcd.com/1663/")
self.find_text("Garden", "div#ctitle", timeout=3)
for p in xrange(4):
self.click('a[rel="next"]')
self.find_text("Algorithms", "div#ctitle", timeout=3)
def test_2(self):
# This test should FAIL
print "\n(This test fails on purpose)"
self.open("http://xkcd.com/1675/")
raise Exception("FAKE EXCEPTION: This test fails on purpose.")
def test_3(self):
self.open("http://xkcd.com/1406/")
self.find_text("Universal Converter Box", "div#ctitle", timeout=3)
self.open("http://xkcd.com/608/")
self.find_text("Form", "div#ctitle", timeout=3)
def test_4(self):
# This test should FAIL
print "\n(This test fails on purpose)"
self.open("http://xkcd.com/1670/")
self.find_element("FakeElement.DoesNotExist", timeout=0.5)
|
Make it clear that a few example tests fail on purpose
|
Make it clear that a few example tests fail on purpose
|
Python
|
mit
|
mdmintz/seleniumspot,possoumous/Watchers,mdmintz/SeleniumBase,seleniumbase/SeleniumBase,possoumous/Watchers,mdmintz/SeleniumBase,mdmintz/SeleniumBase,ktp420/SeleniumBase,ktp420/SeleniumBase,ktp420/SeleniumBase,mdmintz/SeleniumBase,possoumous/Watchers,mdmintz/seleniumspot,seleniumbase/SeleniumBase,seleniumbase/SeleniumBase,ktp420/SeleniumBase,seleniumbase/SeleniumBase,possoumous/Watchers
|
''' NOTE: This test suite contains 2 passing tests and 2 failing tests. '''
from seleniumbase import BaseCase
class MyTestSuite(BaseCase):
def test_1(self):
self.open("http://xkcd.com/1663/")
self.find_text("Garden", "div#ctitle", timeout=3)
for p in xrange(4):
self.click('a[rel="next"]')
self.find_text("Algorithms", "div#ctitle", timeout=3)
def test_2(self):
# This test should FAIL
self.open("http://xkcd.com/1675/")
raise Exception("FAKE EXCEPTION: This test fails on purpose.")
def test_3(self):
self.open("http://xkcd.com/1406/")
self.find_text("Universal Converter Box", "div#ctitle", timeout=3)
self.open("http://xkcd.com/608/")
self.find_text("Form", "div#ctitle", timeout=3)
def test_4(self):
# This test should FAIL
self.open("http://xkcd.com/1670/")
self.find_element("FakeElement.DoesNotExist", timeout=0.5)
Make it clear that a few example tests fail on purpose
|
''' NOTE: This test suite contains 2 passing tests and 2 failing tests. '''
from seleniumbase import BaseCase
class MyTestSuite(BaseCase):
def test_1(self):
self.open("http://xkcd.com/1663/")
self.find_text("Garden", "div#ctitle", timeout=3)
for p in xrange(4):
self.click('a[rel="next"]')
self.find_text("Algorithms", "div#ctitle", timeout=3)
def test_2(self):
# This test should FAIL
print "\n(This test fails on purpose)"
self.open("http://xkcd.com/1675/")
raise Exception("FAKE EXCEPTION: This test fails on purpose.")
def test_3(self):
self.open("http://xkcd.com/1406/")
self.find_text("Universal Converter Box", "div#ctitle", timeout=3)
self.open("http://xkcd.com/608/")
self.find_text("Form", "div#ctitle", timeout=3)
def test_4(self):
# This test should FAIL
print "\n(This test fails on purpose)"
self.open("http://xkcd.com/1670/")
self.find_element("FakeElement.DoesNotExist", timeout=0.5)
|
<commit_before>''' NOTE: This test suite contains 2 passing tests and 2 failing tests. '''
from seleniumbase import BaseCase
class MyTestSuite(BaseCase):
def test_1(self):
self.open("http://xkcd.com/1663/")
self.find_text("Garden", "div#ctitle", timeout=3)
for p in xrange(4):
self.click('a[rel="next"]')
self.find_text("Algorithms", "div#ctitle", timeout=3)
def test_2(self):
# This test should FAIL
self.open("http://xkcd.com/1675/")
raise Exception("FAKE EXCEPTION: This test fails on purpose.")
def test_3(self):
self.open("http://xkcd.com/1406/")
self.find_text("Universal Converter Box", "div#ctitle", timeout=3)
self.open("http://xkcd.com/608/")
self.find_text("Form", "div#ctitle", timeout=3)
def test_4(self):
# This test should FAIL
self.open("http://xkcd.com/1670/")
self.find_element("FakeElement.DoesNotExist", timeout=0.5)
<commit_msg>Make it clear that a few example tests fail on purpose<commit_after>
|
''' NOTE: This test suite contains 2 passing tests and 2 failing tests. '''
from seleniumbase import BaseCase
class MyTestSuite(BaseCase):
def test_1(self):
self.open("http://xkcd.com/1663/")
self.find_text("Garden", "div#ctitle", timeout=3)
for p in xrange(4):
self.click('a[rel="next"]')
self.find_text("Algorithms", "div#ctitle", timeout=3)
def test_2(self):
# This test should FAIL
print "\n(This test fails on purpose)"
self.open("http://xkcd.com/1675/")
raise Exception("FAKE EXCEPTION: This test fails on purpose.")
def test_3(self):
self.open("http://xkcd.com/1406/")
self.find_text("Universal Converter Box", "div#ctitle", timeout=3)
self.open("http://xkcd.com/608/")
self.find_text("Form", "div#ctitle", timeout=3)
def test_4(self):
# This test should FAIL
print "\n(This test fails on purpose)"
self.open("http://xkcd.com/1670/")
self.find_element("FakeElement.DoesNotExist", timeout=0.5)
|
''' NOTE: This test suite contains 2 passing tests and 2 failing tests. '''
from seleniumbase import BaseCase
class MyTestSuite(BaseCase):
def test_1(self):
self.open("http://xkcd.com/1663/")
self.find_text("Garden", "div#ctitle", timeout=3)
for p in xrange(4):
self.click('a[rel="next"]')
self.find_text("Algorithms", "div#ctitle", timeout=3)
def test_2(self):
# This test should FAIL
self.open("http://xkcd.com/1675/")
raise Exception("FAKE EXCEPTION: This test fails on purpose.")
def test_3(self):
self.open("http://xkcd.com/1406/")
self.find_text("Universal Converter Box", "div#ctitle", timeout=3)
self.open("http://xkcd.com/608/")
self.find_text("Form", "div#ctitle", timeout=3)
def test_4(self):
# This test should FAIL
self.open("http://xkcd.com/1670/")
self.find_element("FakeElement.DoesNotExist", timeout=0.5)
Make it clear that a few example tests fail on purpose''' NOTE: This test suite contains 2 passing tests and 2 failing tests. '''
from seleniumbase import BaseCase
class MyTestSuite(BaseCase):
def test_1(self):
self.open("http://xkcd.com/1663/")
self.find_text("Garden", "div#ctitle", timeout=3)
for p in xrange(4):
self.click('a[rel="next"]')
self.find_text("Algorithms", "div#ctitle", timeout=3)
def test_2(self):
# This test should FAIL
print "\n(This test fails on purpose)"
self.open("http://xkcd.com/1675/")
raise Exception("FAKE EXCEPTION: This test fails on purpose.")
def test_3(self):
self.open("http://xkcd.com/1406/")
self.find_text("Universal Converter Box", "div#ctitle", timeout=3)
self.open("http://xkcd.com/608/")
self.find_text("Form", "div#ctitle", timeout=3)
def test_4(self):
# This test should FAIL
print "\n(This test fails on purpose)"
self.open("http://xkcd.com/1670/")
self.find_element("FakeElement.DoesNotExist", timeout=0.5)
|
<commit_before>''' NOTE: This test suite contains 2 passing tests and 2 failing tests. '''
from seleniumbase import BaseCase
class MyTestSuite(BaseCase):
def test_1(self):
self.open("http://xkcd.com/1663/")
self.find_text("Garden", "div#ctitle", timeout=3)
for p in xrange(4):
self.click('a[rel="next"]')
self.find_text("Algorithms", "div#ctitle", timeout=3)
def test_2(self):
# This test should FAIL
self.open("http://xkcd.com/1675/")
raise Exception("FAKE EXCEPTION: This test fails on purpose.")
def test_3(self):
self.open("http://xkcd.com/1406/")
self.find_text("Universal Converter Box", "div#ctitle", timeout=3)
self.open("http://xkcd.com/608/")
self.find_text("Form", "div#ctitle", timeout=3)
def test_4(self):
# This test should FAIL
self.open("http://xkcd.com/1670/")
self.find_element("FakeElement.DoesNotExist", timeout=0.5)
<commit_msg>Make it clear that a few example tests fail on purpose<commit_after>''' NOTE: This test suite contains 2 passing tests and 2 failing tests. '''
from seleniumbase import BaseCase
class MyTestSuite(BaseCase):
def test_1(self):
self.open("http://xkcd.com/1663/")
self.find_text("Garden", "div#ctitle", timeout=3)
for p in xrange(4):
self.click('a[rel="next"]')
self.find_text("Algorithms", "div#ctitle", timeout=3)
def test_2(self):
# This test should FAIL
print "\n(This test fails on purpose)"
self.open("http://xkcd.com/1675/")
raise Exception("FAKE EXCEPTION: This test fails on purpose.")
def test_3(self):
self.open("http://xkcd.com/1406/")
self.find_text("Universal Converter Box", "div#ctitle", timeout=3)
self.open("http://xkcd.com/608/")
self.find_text("Form", "div#ctitle", timeout=3)
def test_4(self):
# This test should FAIL
print "\n(This test fails on purpose)"
self.open("http://xkcd.com/1670/")
self.find_element("FakeElement.DoesNotExist", timeout=0.5)
|
ee61a63acce97ef878e761678a0069f8fa459ea8
|
xbob/learn/linear/__init__.py
|
xbob/learn/linear/__init__.py
|
from ._library import *
from ._library import __version__, __api_version__
def get_include():
"""Returns the directory containing the C/C++ API include directives"""
return __import__('pkg_resources').resource_filename(__name__, 'include')
# gets sphinx autodoc done right - don't remove it
__all__ = [k for k in dir() if not k.startswith('_')]
del k
|
from ._library import *
from ._library import __version__, __api_version__
def get_include():
"""Returns the directory containing the C/C++ API include directives"""
return __import__('pkg_resources').resource_filename(__name__, 'include')
# gets sphinx autodoc done right - don't remove it
__all__ = [_ for _ in dir() if not _.startswith('_')]
|
Fix python3 compatibility issues with doc fix
|
Fix python3 compatibility issues with doc fix
|
Python
|
bsd-3-clause
|
tiagofrepereira2012/bob.learn.linear,tiagofrepereira2012/bob.learn.linear,tiagofrepereira2012/bob.learn.linear
|
from ._library import *
from ._library import __version__, __api_version__
def get_include():
"""Returns the directory containing the C/C++ API include directives"""
return __import__('pkg_resources').resource_filename(__name__, 'include')
# gets sphinx autodoc done right - don't remove it
__all__ = [k for k in dir() if not k.startswith('_')]
del k
Fix python3 compatibility issues with doc fix
|
from ._library import *
from ._library import __version__, __api_version__
def get_include():
"""Returns the directory containing the C/C++ API include directives"""
return __import__('pkg_resources').resource_filename(__name__, 'include')
# gets sphinx autodoc done right - don't remove it
__all__ = [_ for _ in dir() if not _.startswith('_')]
|
<commit_before>from ._library import *
from ._library import __version__, __api_version__
def get_include():
"""Returns the directory containing the C/C++ API include directives"""
return __import__('pkg_resources').resource_filename(__name__, 'include')
# gets sphinx autodoc done right - don't remove it
__all__ = [k for k in dir() if not k.startswith('_')]
del k
<commit_msg>Fix python3 compatibility issues with doc fix<commit_after>
|
from ._library import *
from ._library import __version__, __api_version__
def get_include():
"""Returns the directory containing the C/C++ API include directives"""
return __import__('pkg_resources').resource_filename(__name__, 'include')
# gets sphinx autodoc done right - don't remove it
__all__ = [_ for _ in dir() if not _.startswith('_')]
|
from ._library import *
from ._library import __version__, __api_version__
def get_include():
"""Returns the directory containing the C/C++ API include directives"""
return __import__('pkg_resources').resource_filename(__name__, 'include')
# gets sphinx autodoc done right - don't remove it
__all__ = [k for k in dir() if not k.startswith('_')]
del k
Fix python3 compatibility issues with doc fixfrom ._library import *
from ._library import __version__, __api_version__
def get_include():
"""Returns the directory containing the C/C++ API include directives"""
return __import__('pkg_resources').resource_filename(__name__, 'include')
# gets sphinx autodoc done right - don't remove it
__all__ = [_ for _ in dir() if not _.startswith('_')]
|
<commit_before>from ._library import *
from ._library import __version__, __api_version__
def get_include():
"""Returns the directory containing the C/C++ API include directives"""
return __import__('pkg_resources').resource_filename(__name__, 'include')
# gets sphinx autodoc done right - don't remove it
__all__ = [k for k in dir() if not k.startswith('_')]
del k
<commit_msg>Fix python3 compatibility issues with doc fix<commit_after>from ._library import *
from ._library import __version__, __api_version__
def get_include():
"""Returns the directory containing the C/C++ API include directives"""
return __import__('pkg_resources').resource_filename(__name__, 'include')
# gets sphinx autodoc done right - don't remove it
__all__ = [_ for _ in dir() if not _.startswith('_')]
|
e0cbe4bef0376a361ae931b82de3502b31227a54
|
examples/sponza/effect.py
|
examples/sponza/effect.py
|
import moderngl as mgl
from demosys.effects import effect
class SceneEffect(effect.Effect):
"""Generated default effect"""
def __init__(self):
self.scene = self.get_scene("Sponza/glTF/Sponza.gltf", local=True)
self.proj_mat = self.create_projection(fov=75.0, near=0.01, far=1000.0)
def draw(self, time, frametime, target):
self.ctx.enable(mgl.DEPTH_TEST)
self.sys_camera.velocity = self.scene.diagonal_size / 5.0
self.scene.draw(
projection_matrix=self.proj_mat,
camera_matrix=self.sys_camera.view_matrix,
time=time,
)
# Draw bbox
# self.scene.draw_bbox(self.proj_mat, self.sys_camera.view_matrix, all=True)
|
import moderngl
from demosys.effects import effect
class SceneEffect(effect.Effect):
"""Generated default effect"""
def __init__(self):
self.scene = self.get_scene("Sponza/glTF/Sponza.gltf", local=True)
self.proj_mat = self.create_projection(fov=75.0, near=0.01, far=1000.0)
def draw(self, time, frametime, target):
self.ctx.enable(moderngl.DEPTH_TEST)
self.ctx.enable(moderngl.CULL_FACE)
self.sys_camera.velocity = self.scene.diagonal_size / 5.0
self.scene.draw(
projection_matrix=self.proj_mat,
camera_matrix=self.sys_camera.view_matrix,
time=time,
)
# Draw bbox
# self.scene.draw_bbox(self.proj_mat, self.sys_camera.view_matrix, all=True)
|
Enable face culling in sponza example
|
Enable face culling in sponza example
|
Python
|
isc
|
Contraz/demosys-py
|
import moderngl as mgl
from demosys.effects import effect
class SceneEffect(effect.Effect):
"""Generated default effect"""
def __init__(self):
self.scene = self.get_scene("Sponza/glTF/Sponza.gltf", local=True)
self.proj_mat = self.create_projection(fov=75.0, near=0.01, far=1000.0)
def draw(self, time, frametime, target):
self.ctx.enable(mgl.DEPTH_TEST)
self.sys_camera.velocity = self.scene.diagonal_size / 5.0
self.scene.draw(
projection_matrix=self.proj_mat,
camera_matrix=self.sys_camera.view_matrix,
time=time,
)
# Draw bbox
# self.scene.draw_bbox(self.proj_mat, self.sys_camera.view_matrix, all=True)
Enable face culling in sponza example
|
import moderngl
from demosys.effects import effect
class SceneEffect(effect.Effect):
"""Generated default effect"""
def __init__(self):
self.scene = self.get_scene("Sponza/glTF/Sponza.gltf", local=True)
self.proj_mat = self.create_projection(fov=75.0, near=0.01, far=1000.0)
def draw(self, time, frametime, target):
self.ctx.enable(moderngl.DEPTH_TEST)
self.ctx.enable(moderngl.CULL_FACE)
self.sys_camera.velocity = self.scene.diagonal_size / 5.0
self.scene.draw(
projection_matrix=self.proj_mat,
camera_matrix=self.sys_camera.view_matrix,
time=time,
)
# Draw bbox
# self.scene.draw_bbox(self.proj_mat, self.sys_camera.view_matrix, all=True)
|
<commit_before>import moderngl as mgl
from demosys.effects import effect
class SceneEffect(effect.Effect):
"""Generated default effect"""
def __init__(self):
self.scene = self.get_scene("Sponza/glTF/Sponza.gltf", local=True)
self.proj_mat = self.create_projection(fov=75.0, near=0.01, far=1000.0)
def draw(self, time, frametime, target):
self.ctx.enable(mgl.DEPTH_TEST)
self.sys_camera.velocity = self.scene.diagonal_size / 5.0
self.scene.draw(
projection_matrix=self.proj_mat,
camera_matrix=self.sys_camera.view_matrix,
time=time,
)
# Draw bbox
# self.scene.draw_bbox(self.proj_mat, self.sys_camera.view_matrix, all=True)
<commit_msg>Enable face culling in sponza example<commit_after>
|
import moderngl
from demosys.effects import effect
class SceneEffect(effect.Effect):
"""Generated default effect"""
def __init__(self):
self.scene = self.get_scene("Sponza/glTF/Sponza.gltf", local=True)
self.proj_mat = self.create_projection(fov=75.0, near=0.01, far=1000.0)
def draw(self, time, frametime, target):
self.ctx.enable(moderngl.DEPTH_TEST)
self.ctx.enable(moderngl.CULL_FACE)
self.sys_camera.velocity = self.scene.diagonal_size / 5.0
self.scene.draw(
projection_matrix=self.proj_mat,
camera_matrix=self.sys_camera.view_matrix,
time=time,
)
# Draw bbox
# self.scene.draw_bbox(self.proj_mat, self.sys_camera.view_matrix, all=True)
|
import moderngl as mgl
from demosys.effects import effect
class SceneEffect(effect.Effect):
"""Generated default effect"""
def __init__(self):
self.scene = self.get_scene("Sponza/glTF/Sponza.gltf", local=True)
self.proj_mat = self.create_projection(fov=75.0, near=0.01, far=1000.0)
def draw(self, time, frametime, target):
self.ctx.enable(mgl.DEPTH_TEST)
self.sys_camera.velocity = self.scene.diagonal_size / 5.0
self.scene.draw(
projection_matrix=self.proj_mat,
camera_matrix=self.sys_camera.view_matrix,
time=time,
)
# Draw bbox
# self.scene.draw_bbox(self.proj_mat, self.sys_camera.view_matrix, all=True)
Enable face culling in sponza exampleimport moderngl
from demosys.effects import effect
class SceneEffect(effect.Effect):
"""Generated default effect"""
def __init__(self):
self.scene = self.get_scene("Sponza/glTF/Sponza.gltf", local=True)
self.proj_mat = self.create_projection(fov=75.0, near=0.01, far=1000.0)
def draw(self, time, frametime, target):
self.ctx.enable(moderngl.DEPTH_TEST)
self.ctx.enable(moderngl.CULL_FACE)
self.sys_camera.velocity = self.scene.diagonal_size / 5.0
self.scene.draw(
projection_matrix=self.proj_mat,
camera_matrix=self.sys_camera.view_matrix,
time=time,
)
# Draw bbox
# self.scene.draw_bbox(self.proj_mat, self.sys_camera.view_matrix, all=True)
|
<commit_before>import moderngl as mgl
from demosys.effects import effect
class SceneEffect(effect.Effect):
"""Generated default effect"""
def __init__(self):
self.scene = self.get_scene("Sponza/glTF/Sponza.gltf", local=True)
self.proj_mat = self.create_projection(fov=75.0, near=0.01, far=1000.0)
def draw(self, time, frametime, target):
self.ctx.enable(mgl.DEPTH_TEST)
self.sys_camera.velocity = self.scene.diagonal_size / 5.0
self.scene.draw(
projection_matrix=self.proj_mat,
camera_matrix=self.sys_camera.view_matrix,
time=time,
)
# Draw bbox
# self.scene.draw_bbox(self.proj_mat, self.sys_camera.view_matrix, all=True)
<commit_msg>Enable face culling in sponza example<commit_after>import moderngl
from demosys.effects import effect
class SceneEffect(effect.Effect):
"""Generated default effect"""
def __init__(self):
self.scene = self.get_scene("Sponza/glTF/Sponza.gltf", local=True)
self.proj_mat = self.create_projection(fov=75.0, near=0.01, far=1000.0)
def draw(self, time, frametime, target):
self.ctx.enable(moderngl.DEPTH_TEST)
self.ctx.enable(moderngl.CULL_FACE)
self.sys_camera.velocity = self.scene.diagonal_size / 5.0
self.scene.draw(
projection_matrix=self.proj_mat,
camera_matrix=self.sys_camera.view_matrix,
time=time,
)
# Draw bbox
# self.scene.draw_bbox(self.proj_mat, self.sys_camera.view_matrix, all=True)
|
ee6941b15a66394d2683d5baeb0fe6ee61c2d0d3
|
freight/http.py
|
freight/http.py
|
from __future__ import absolute_import
__all__ = ['build_session', 'delete', 'get', 'post', 'put']
import freight
import requests
USER_AGENT = 'freight/{}'.format(freight.VERSION),
def build_session():
session = requests.Session()
session.headers.update({'User-Agent': USER_AGENT})
return session
def delete(*args, **kwargs):
session = build_session()
return session.delete(*args, **kwargs)
def get(*args, **kwargs):
session = build_session()
return session.get(*args, **kwargs)
def post(*args, **kwargs):
session = build_session()
return session.post(*args, **kwargs)
def put(*args, **kwargs):
session = build_session()
return session.put(*args, **kwargs)
|
from __future__ import absolute_import
__all__ = ['build_session', 'delete', 'get', 'post', 'put']
import freight
import requests
USER_AGENT = 'freight/{version} (https://github.com/getsentry/freight)'.format(
version=freight.VERSION,
),
def build_session():
session = requests.Session()
session.headers.update({'User-Agent': USER_AGENT})
return session
def delete(*args, **kwargs):
session = build_session()
return session.delete(*args, **kwargs)
def get(*args, **kwargs):
session = build_session()
return session.get(*args, **kwargs)
def post(*args, **kwargs):
session = build_session()
return session.post(*args, **kwargs)
def put(*args, **kwargs):
session = build_session()
return session.put(*args, **kwargs)
|
Add url to user agent
|
Add url to user agent
|
Python
|
apache-2.0
|
klynton/freight,klynton/freight,rshk/freight,jkimbo/freight,rshk/freight,getsentry/freight,getsentry/freight,getsentry/freight,rshk/freight,jkimbo/freight,klynton/freight,jkimbo/freight,getsentry/freight,klynton/freight,jkimbo/freight,rshk/freight,getsentry/freight
|
from __future__ import absolute_import
__all__ = ['build_session', 'delete', 'get', 'post', 'put']
import freight
import requests
USER_AGENT = 'freight/{}'.format(freight.VERSION),
def build_session():
session = requests.Session()
session.headers.update({'User-Agent': USER_AGENT})
return session
def delete(*args, **kwargs):
session = build_session()
return session.delete(*args, **kwargs)
def get(*args, **kwargs):
session = build_session()
return session.get(*args, **kwargs)
def post(*args, **kwargs):
session = build_session()
return session.post(*args, **kwargs)
def put(*args, **kwargs):
session = build_session()
return session.put(*args, **kwargs)
Add url to user agent
|
from __future__ import absolute_import
__all__ = ['build_session', 'delete', 'get', 'post', 'put']
import freight
import requests
USER_AGENT = 'freight/{version} (https://github.com/getsentry/freight)'.format(
version=freight.VERSION,
),
def build_session():
session = requests.Session()
session.headers.update({'User-Agent': USER_AGENT})
return session
def delete(*args, **kwargs):
session = build_session()
return session.delete(*args, **kwargs)
def get(*args, **kwargs):
session = build_session()
return session.get(*args, **kwargs)
def post(*args, **kwargs):
session = build_session()
return session.post(*args, **kwargs)
def put(*args, **kwargs):
session = build_session()
return session.put(*args, **kwargs)
|
<commit_before>from __future__ import absolute_import
__all__ = ['build_session', 'delete', 'get', 'post', 'put']
import freight
import requests
USER_AGENT = 'freight/{}'.format(freight.VERSION),
def build_session():
session = requests.Session()
session.headers.update({'User-Agent': USER_AGENT})
return session
def delete(*args, **kwargs):
session = build_session()
return session.delete(*args, **kwargs)
def get(*args, **kwargs):
session = build_session()
return session.get(*args, **kwargs)
def post(*args, **kwargs):
session = build_session()
return session.post(*args, **kwargs)
def put(*args, **kwargs):
session = build_session()
return session.put(*args, **kwargs)
<commit_msg>Add url to user agent<commit_after>
|
from __future__ import absolute_import
__all__ = ['build_session', 'delete', 'get', 'post', 'put']
import freight
import requests
USER_AGENT = 'freight/{version} (https://github.com/getsentry/freight)'.format(
version=freight.VERSION,
),
def build_session():
session = requests.Session()
session.headers.update({'User-Agent': USER_AGENT})
return session
def delete(*args, **kwargs):
session = build_session()
return session.delete(*args, **kwargs)
def get(*args, **kwargs):
session = build_session()
return session.get(*args, **kwargs)
def post(*args, **kwargs):
session = build_session()
return session.post(*args, **kwargs)
def put(*args, **kwargs):
session = build_session()
return session.put(*args, **kwargs)
|
from __future__ import absolute_import
__all__ = ['build_session', 'delete', 'get', 'post', 'put']
import freight
import requests
USER_AGENT = 'freight/{}'.format(freight.VERSION),
def build_session():
session = requests.Session()
session.headers.update({'User-Agent': USER_AGENT})
return session
def delete(*args, **kwargs):
session = build_session()
return session.delete(*args, **kwargs)
def get(*args, **kwargs):
session = build_session()
return session.get(*args, **kwargs)
def post(*args, **kwargs):
session = build_session()
return session.post(*args, **kwargs)
def put(*args, **kwargs):
session = build_session()
return session.put(*args, **kwargs)
Add url to user agentfrom __future__ import absolute_import
__all__ = ['build_session', 'delete', 'get', 'post', 'put']
import freight
import requests
USER_AGENT = 'freight/{version} (https://github.com/getsentry/freight)'.format(
version=freight.VERSION,
),
def build_session():
session = requests.Session()
session.headers.update({'User-Agent': USER_AGENT})
return session
def delete(*args, **kwargs):
session = build_session()
return session.delete(*args, **kwargs)
def get(*args, **kwargs):
session = build_session()
return session.get(*args, **kwargs)
def post(*args, **kwargs):
session = build_session()
return session.post(*args, **kwargs)
def put(*args, **kwargs):
session = build_session()
return session.put(*args, **kwargs)
|
<commit_before>from __future__ import absolute_import
__all__ = ['build_session', 'delete', 'get', 'post', 'put']
import freight
import requests
USER_AGENT = 'freight/{}'.format(freight.VERSION),
def build_session():
session = requests.Session()
session.headers.update({'User-Agent': USER_AGENT})
return session
def delete(*args, **kwargs):
session = build_session()
return session.delete(*args, **kwargs)
def get(*args, **kwargs):
session = build_session()
return session.get(*args, **kwargs)
def post(*args, **kwargs):
session = build_session()
return session.post(*args, **kwargs)
def put(*args, **kwargs):
session = build_session()
return session.put(*args, **kwargs)
<commit_msg>Add url to user agent<commit_after>from __future__ import absolute_import
__all__ = ['build_session', 'delete', 'get', 'post', 'put']
import freight
import requests
USER_AGENT = 'freight/{version} (https://github.com/getsentry/freight)'.format(
version=freight.VERSION,
),
def build_session():
session = requests.Session()
session.headers.update({'User-Agent': USER_AGENT})
return session
def delete(*args, **kwargs):
session = build_session()
return session.delete(*args, **kwargs)
def get(*args, **kwargs):
session = build_session()
return session.get(*args, **kwargs)
def post(*args, **kwargs):
session = build_session()
return session.post(*args, **kwargs)
def put(*args, **kwargs):
session = build_session()
return session.put(*args, **kwargs)
|
dbbf8a8de7a3212ac0c91a74a9fe5dd197272483
|
VOIAnalyzer.py
|
VOIAnalyzer.py
|
#!/usr/bin/env python
# coding : utf-8
"""
Main module for VOI analyzer.
"""
import pandas as pd
import numpy as np
import argparse
import os
|
#!/usr/bin/env python
# coding : utf-8
"""
Main module for VOI analyzer.
"""
import pandas as pd
import numpy as np
import argparse
import os
def _analysis(img_mat, voi_mat, voi_no, eps=1e-12):
""" Extract VOI statistices for each VOI.
"""
vec = img_mat[voi_mat == voi_no]
vec2 = vec[~np.isnan(vec)]
# Statistics
v_mean = float(vec2.mean())
v_sd = float(vec2.std(ddof=1))
v_cov = v_sd / (v_mean + eps) * 100.
v_max = float(vec2.max())
v_min = float(vec2.min())
n_vox = vec.size
# Output
out_tab = pd.DataFrame({"VOI No." : [voi_no],
"No. of voxels" : [n_vox],
"Mean" : [v_mean],
"SD" : [v_sd],
"CoV" : [v_cov],
"Max" : [v_max],
"Min" : [v_min]})
return out_tab
def voi_analysis(img_file, voi_file, lut_file=None):
""" Extract VOI values.
It outputs Pandas DataFrame for VOI statistics.
Inputs:
img_file : Path for image to extract VOI values
voi_file : Path for VOI map
lut_file : Path for look-up table for VOI map.
If not None, look-up table is applied to output table.
Output:
out_tab : Pandas DataFrame for VOI statistics.
"""
# Load image & VOI
img_mat, img_aff = utils.loadImage(img_file)[:2]
voi_mat = utils.loadImage(voi_file)[0].astype(np.int16)
# Extract
maxNo = voi_mat.max()
out_tab = pd.concat([_analysis(img_mat, voi_mat, v_no)
for v_no in range(1, maxNo + 1, 1)])
# Calculate volumes (unit: cm3)
vol_per_vox = np.abs(np.prod(np.diag(img_aff[:3, :3])))
out_tab.loc[:, "Volume"] = out_tab.loc[:, "No. of voxels"] / 1000.
# Apply look-up table
if lut_file is not None:
lut = utils.loadLUT(lut_file)
out_tab.loc[:, "VOI"] = out_tab.loc[:, "VOI No."].map(lut)
# Image file name
out_tab.loc[:, "Path"] = img_file
return out_tab
|
Implement basis for VOI analyzer.
|
Implement basis for VOI analyzer.
|
Python
|
mit
|
spikefairway/VOIAnalyzer
|
#!/usr/bin/env python
# coding : utf-8
"""
Main module for VOI analyzer.
"""
import pandas as pd
import numpy as np
import argparse
import os
Implement basis for VOI analyzer.
|
#!/usr/bin/env python
# coding : utf-8
"""
Main module for VOI analyzer.
"""
import pandas as pd
import numpy as np
import argparse
import os
def _analysis(img_mat, voi_mat, voi_no, eps=1e-12):
""" Extract VOI statistices for each VOI.
"""
vec = img_mat[voi_mat == voi_no]
vec2 = vec[~np.isnan(vec)]
# Statistics
v_mean = float(vec2.mean())
v_sd = float(vec2.std(ddof=1))
v_cov = v_sd / (v_mean + eps) * 100.
v_max = float(vec2.max())
v_min = float(vec2.min())
n_vox = vec.size
# Output
out_tab = pd.DataFrame({"VOI No." : [voi_no],
"No. of voxels" : [n_vox],
"Mean" : [v_mean],
"SD" : [v_sd],
"CoV" : [v_cov],
"Max" : [v_max],
"Min" : [v_min]})
return out_tab
def voi_analysis(img_file, voi_file, lut_file=None):
""" Extract VOI values.
It outputs Pandas DataFrame for VOI statistics.
Inputs:
img_file : Path for image to extract VOI values
voi_file : Path for VOI map
lut_file : Path for look-up table for VOI map.
If not None, look-up table is applied to output table.
Output:
out_tab : Pandas DataFrame for VOI statistics.
"""
# Load image & VOI
img_mat, img_aff = utils.loadImage(img_file)[:2]
voi_mat = utils.loadImage(voi_file)[0].astype(np.int16)
# Extract
maxNo = voi_mat.max()
out_tab = pd.concat([_analysis(img_mat, voi_mat, v_no)
for v_no in range(1, maxNo + 1, 1)])
# Calculate volumes (unit: cm3)
vol_per_vox = np.abs(np.prod(np.diag(img_aff[:3, :3])))
out_tab.loc[:, "Volume"] = out_tab.loc[:, "No. of voxels"] / 1000.
# Apply look-up table
if lut_file is not None:
lut = utils.loadLUT(lut_file)
out_tab.loc[:, "VOI"] = out_tab.loc[:, "VOI No."].map(lut)
# Image file name
out_tab.loc[:, "Path"] = img_file
return out_tab
|
<commit_before>#!/usr/bin/env python
# coding : utf-8
"""
Main module for VOI analyzer.
"""
import pandas as pd
import numpy as np
import argparse
import os
<commit_msg>Implement basis for VOI analyzer.<commit_after>
|
#!/usr/bin/env python
# coding : utf-8
"""
Main module for VOI analyzer.
"""
import pandas as pd
import numpy as np
import argparse
import os
def _analysis(img_mat, voi_mat, voi_no, eps=1e-12):
""" Extract VOI statistices for each VOI.
"""
vec = img_mat[voi_mat == voi_no]
vec2 = vec[~np.isnan(vec)]
# Statistics
v_mean = float(vec2.mean())
v_sd = float(vec2.std(ddof=1))
v_cov = v_sd / (v_mean + eps) * 100.
v_max = float(vec2.max())
v_min = float(vec2.min())
n_vox = vec.size
# Output
out_tab = pd.DataFrame({"VOI No." : [voi_no],
"No. of voxels" : [n_vox],
"Mean" : [v_mean],
"SD" : [v_sd],
"CoV" : [v_cov],
"Max" : [v_max],
"Min" : [v_min]})
return out_tab
def voi_analysis(img_file, voi_file, lut_file=None):
""" Extract VOI values.
It outputs Pandas DataFrame for VOI statistics.
Inputs:
img_file : Path for image to extract VOI values
voi_file : Path for VOI map
lut_file : Path for look-up table for VOI map.
If not None, look-up table is applied to output table.
Output:
out_tab : Pandas DataFrame for VOI statistics.
"""
# Load image & VOI
img_mat, img_aff = utils.loadImage(img_file)[:2]
voi_mat = utils.loadImage(voi_file)[0].astype(np.int16)
# Extract
maxNo = voi_mat.max()
out_tab = pd.concat([_analysis(img_mat, voi_mat, v_no)
for v_no in range(1, maxNo + 1, 1)])
# Calculate volumes (unit: cm3)
vol_per_vox = np.abs(np.prod(np.diag(img_aff[:3, :3])))
out_tab.loc[:, "Volume"] = out_tab.loc[:, "No. of voxels"] / 1000.
# Apply look-up table
if lut_file is not None:
lut = utils.loadLUT(lut_file)
out_tab.loc[:, "VOI"] = out_tab.loc[:, "VOI No."].map(lut)
# Image file name
out_tab.loc[:, "Path"] = img_file
return out_tab
|
#!/usr/bin/env python
# coding : utf-8
"""
Main module for VOI analyzer.
"""
import pandas as pd
import numpy as np
import argparse
import os
Implement basis for VOI analyzer.#!/usr/bin/env python
# coding : utf-8
"""
Main module for VOI analyzer.
"""
import pandas as pd
import numpy as np
import argparse
import os
def _analysis(img_mat, voi_mat, voi_no, eps=1e-12):
""" Extract VOI statistices for each VOI.
"""
vec = img_mat[voi_mat == voi_no]
vec2 = vec[~np.isnan(vec)]
# Statistics
v_mean = float(vec2.mean())
v_sd = float(vec2.std(ddof=1))
v_cov = v_sd / (v_mean + eps) * 100.
v_max = float(vec2.max())
v_min = float(vec2.min())
n_vox = vec.size
# Output
out_tab = pd.DataFrame({"VOI No." : [voi_no],
"No. of voxels" : [n_vox],
"Mean" : [v_mean],
"SD" : [v_sd],
"CoV" : [v_cov],
"Max" : [v_max],
"Min" : [v_min]})
return out_tab
def voi_analysis(img_file, voi_file, lut_file=None):
""" Extract VOI values.
It outputs Pandas DataFrame for VOI statistics.
Inputs:
img_file : Path for image to extract VOI values
voi_file : Path for VOI map
lut_file : Path for look-up table for VOI map.
If not None, look-up table is applied to output table.
Output:
out_tab : Pandas DataFrame for VOI statistics.
"""
# Load image & VOI
img_mat, img_aff = utils.loadImage(img_file)[:2]
voi_mat = utils.loadImage(voi_file)[0].astype(np.int16)
# Extract
maxNo = voi_mat.max()
out_tab = pd.concat([_analysis(img_mat, voi_mat, v_no)
for v_no in range(1, maxNo + 1, 1)])
# Calculate volumes (unit: cm3)
vol_per_vox = np.abs(np.prod(np.diag(img_aff[:3, :3])))
out_tab.loc[:, "Volume"] = out_tab.loc[:, "No. of voxels"] / 1000.
# Apply look-up table
if lut_file is not None:
lut = utils.loadLUT(lut_file)
out_tab.loc[:, "VOI"] = out_tab.loc[:, "VOI No."].map(lut)
# Image file name
out_tab.loc[:, "Path"] = img_file
return out_tab
|
<commit_before>#!/usr/bin/env python
# coding : utf-8
"""
Main module for VOI analyzer.
"""
import pandas as pd
import numpy as np
import argparse
import os
<commit_msg>Implement basis for VOI analyzer.<commit_after>#!/usr/bin/env python
# coding : utf-8
"""
Main module for VOI analyzer.
"""
import pandas as pd
import numpy as np
import argparse
import os
def _analysis(img_mat, voi_mat, voi_no, eps=1e-12):
""" Extract VOI statistices for each VOI.
"""
vec = img_mat[voi_mat == voi_no]
vec2 = vec[~np.isnan(vec)]
# Statistics
v_mean = float(vec2.mean())
v_sd = float(vec2.std(ddof=1))
v_cov = v_sd / (v_mean + eps) * 100.
v_max = float(vec2.max())
v_min = float(vec2.min())
n_vox = vec.size
# Output
out_tab = pd.DataFrame({"VOI No." : [voi_no],
"No. of voxels" : [n_vox],
"Mean" : [v_mean],
"SD" : [v_sd],
"CoV" : [v_cov],
"Max" : [v_max],
"Min" : [v_min]})
return out_tab
def voi_analysis(img_file, voi_file, lut_file=None):
""" Extract VOI values.
It outputs Pandas DataFrame for VOI statistics.
Inputs:
img_file : Path for image to extract VOI values
voi_file : Path for VOI map
lut_file : Path for look-up table for VOI map.
If not None, look-up table is applied to output table.
Output:
out_tab : Pandas DataFrame for VOI statistics.
"""
# Load image & VOI
img_mat, img_aff = utils.loadImage(img_file)[:2]
voi_mat = utils.loadImage(voi_file)[0].astype(np.int16)
# Extract
maxNo = voi_mat.max()
out_tab = pd.concat([_analysis(img_mat, voi_mat, v_no)
for v_no in range(1, maxNo + 1, 1)])
# Calculate volumes (unit: cm3)
vol_per_vox = np.abs(np.prod(np.diag(img_aff[:3, :3])))
out_tab.loc[:, "Volume"] = out_tab.loc[:, "No. of voxels"] / 1000.
# Apply look-up table
if lut_file is not None:
lut = utils.loadLUT(lut_file)
out_tab.loc[:, "VOI"] = out_tab.loc[:, "VOI No."].map(lut)
# Image file name
out_tab.loc[:, "Path"] = img_file
return out_tab
|
0853d74c1ee15b28f308cba6c4145741c7937f50
|
vcli/verror.py
|
vcli/verror.py
|
import re
RE_MESSAGE = re.compile(r'Message: (.+), Sqlstate:')
RE_SQLSTATE = re.compile(r'Sqlstate: (\d+)')
RE_POSITION = re.compile(r'Position: (\d+)')
def format_error(error):
msg = str(error)
if not hasattr(error, 'one_line_sql'):
return msg
result = ''
match = RE_SQLSTATE.search(msg)
if match:
result += 'ERROR %s: ' % match.group(1)
match = RE_MESSAGE.search(msg)
if match:
result += match.group(1)
match = RE_POSITION.search(msg)
if match:
sql = error.one_line_sql()
position = int(match.group(1))
result += ('\n%s\n' % sql) + (' ' * (position - 1)) + '^'
return result
|
import re
RE_MESSAGE = re.compile(r'Message: (.+), Sqlstate:')
RE_SQLSTATE = re.compile(r'Sqlstate: (\d+)')
RE_POSITION = re.compile(r'Position: (\d+)')
def format_error(error):
msg = str(error)
if not hasattr(error, 'one_line_sql'):
return msg
result = ''
match = RE_SQLSTATE.search(msg)
if match:
result += 'ERROR %s: ' % match.group(1)
match = RE_MESSAGE.search(msg)
if match:
result += match.group(1)
match = RE_POSITION.search(msg)
if match:
sql = error.one_line_sql()
position = int(match.group(1))
# Truncate the SQL query if its length > n (n must be odd)
n = 75
d = (n - 1) / 2
length = len(sql)
if length > n:
left = position - d
right = position + d
position = d
head = '...'
tail = '...'
if left < 0:
right -= left
position += left
left = 0
head = ''
elif right >= length:
offset = right - length + 1
left -= offset
position += offset
right = length - 1
tail = ''
sql = head + sql[left:right + 1] + tail
position += len(head)
result += ('\n%s\n' % sql) + (' ' * (position - 1)) + '^'
return result
|
Format error message better in a long SQL
|
Format error message better in a long SQL
|
Python
|
bsd-3-clause
|
dbcli/vcli,dbcli/vcli
|
import re
RE_MESSAGE = re.compile(r'Message: (.+), Sqlstate:')
RE_SQLSTATE = re.compile(r'Sqlstate: (\d+)')
RE_POSITION = re.compile(r'Position: (\d+)')
def format_error(error):
msg = str(error)
if not hasattr(error, 'one_line_sql'):
return msg
result = ''
match = RE_SQLSTATE.search(msg)
if match:
result += 'ERROR %s: ' % match.group(1)
match = RE_MESSAGE.search(msg)
if match:
result += match.group(1)
match = RE_POSITION.search(msg)
if match:
sql = error.one_line_sql()
position = int(match.group(1))
result += ('\n%s\n' % sql) + (' ' * (position - 1)) + '^'
return result
Format error message better in a long SQL
|
import re
RE_MESSAGE = re.compile(r'Message: (.+), Sqlstate:')
RE_SQLSTATE = re.compile(r'Sqlstate: (\d+)')
RE_POSITION = re.compile(r'Position: (\d+)')
def format_error(error):
msg = str(error)
if not hasattr(error, 'one_line_sql'):
return msg
result = ''
match = RE_SQLSTATE.search(msg)
if match:
result += 'ERROR %s: ' % match.group(1)
match = RE_MESSAGE.search(msg)
if match:
result += match.group(1)
match = RE_POSITION.search(msg)
if match:
sql = error.one_line_sql()
position = int(match.group(1))
# Truncate the SQL query if its length > n (n must be odd)
n = 75
d = (n - 1) / 2
length = len(sql)
if length > n:
left = position - d
right = position + d
position = d
head = '...'
tail = '...'
if left < 0:
right -= left
position += left
left = 0
head = ''
elif right >= length:
offset = right - length + 1
left -= offset
position += offset
right = length - 1
tail = ''
sql = head + sql[left:right + 1] + tail
position += len(head)
result += ('\n%s\n' % sql) + (' ' * (position - 1)) + '^'
return result
|
<commit_before>import re
RE_MESSAGE = re.compile(r'Message: (.+), Sqlstate:')
RE_SQLSTATE = re.compile(r'Sqlstate: (\d+)')
RE_POSITION = re.compile(r'Position: (\d+)')
def format_error(error):
msg = str(error)
if not hasattr(error, 'one_line_sql'):
return msg
result = ''
match = RE_SQLSTATE.search(msg)
if match:
result += 'ERROR %s: ' % match.group(1)
match = RE_MESSAGE.search(msg)
if match:
result += match.group(1)
match = RE_POSITION.search(msg)
if match:
sql = error.one_line_sql()
position = int(match.group(1))
result += ('\n%s\n' % sql) + (' ' * (position - 1)) + '^'
return result
<commit_msg>Format error message better in a long SQL<commit_after>
|
import re
RE_MESSAGE = re.compile(r'Message: (.+), Sqlstate:')
RE_SQLSTATE = re.compile(r'Sqlstate: (\d+)')
RE_POSITION = re.compile(r'Position: (\d+)')
def format_error(error):
msg = str(error)
if not hasattr(error, 'one_line_sql'):
return msg
result = ''
match = RE_SQLSTATE.search(msg)
if match:
result += 'ERROR %s: ' % match.group(1)
match = RE_MESSAGE.search(msg)
if match:
result += match.group(1)
match = RE_POSITION.search(msg)
if match:
sql = error.one_line_sql()
position = int(match.group(1))
# Truncate the SQL query if its length > n (n must be odd)
n = 75
d = (n - 1) / 2
length = len(sql)
if length > n:
left = position - d
right = position + d
position = d
head = '...'
tail = '...'
if left < 0:
right -= left
position += left
left = 0
head = ''
elif right >= length:
offset = right - length + 1
left -= offset
position += offset
right = length - 1
tail = ''
sql = head + sql[left:right + 1] + tail
position += len(head)
result += ('\n%s\n' % sql) + (' ' * (position - 1)) + '^'
return result
|
import re
RE_MESSAGE = re.compile(r'Message: (.+), Sqlstate:')
RE_SQLSTATE = re.compile(r'Sqlstate: (\d+)')
RE_POSITION = re.compile(r'Position: (\d+)')
def format_error(error):
msg = str(error)
if not hasattr(error, 'one_line_sql'):
return msg
result = ''
match = RE_SQLSTATE.search(msg)
if match:
result += 'ERROR %s: ' % match.group(1)
match = RE_MESSAGE.search(msg)
if match:
result += match.group(1)
match = RE_POSITION.search(msg)
if match:
sql = error.one_line_sql()
position = int(match.group(1))
result += ('\n%s\n' % sql) + (' ' * (position - 1)) + '^'
return result
Format error message better in a long SQLimport re
RE_MESSAGE = re.compile(r'Message: (.+), Sqlstate:')
RE_SQLSTATE = re.compile(r'Sqlstate: (\d+)')
RE_POSITION = re.compile(r'Position: (\d+)')
def format_error(error):
msg = str(error)
if not hasattr(error, 'one_line_sql'):
return msg
result = ''
match = RE_SQLSTATE.search(msg)
if match:
result += 'ERROR %s: ' % match.group(1)
match = RE_MESSAGE.search(msg)
if match:
result += match.group(1)
match = RE_POSITION.search(msg)
if match:
sql = error.one_line_sql()
position = int(match.group(1))
# Truncate the SQL query if its length > n (n must be odd)
n = 75
d = (n - 1) / 2
length = len(sql)
if length > n:
left = position - d
right = position + d
position = d
head = '...'
tail = '...'
if left < 0:
right -= left
position += left
left = 0
head = ''
elif right >= length:
offset = right - length + 1
left -= offset
position += offset
right = length - 1
tail = ''
sql = head + sql[left:right + 1] + tail
position += len(head)
result += ('\n%s\n' % sql) + (' ' * (position - 1)) + '^'
return result
|
<commit_before>import re
RE_MESSAGE = re.compile(r'Message: (.+), Sqlstate:')
RE_SQLSTATE = re.compile(r'Sqlstate: (\d+)')
RE_POSITION = re.compile(r'Position: (\d+)')
def format_error(error):
msg = str(error)
if not hasattr(error, 'one_line_sql'):
return msg
result = ''
match = RE_SQLSTATE.search(msg)
if match:
result += 'ERROR %s: ' % match.group(1)
match = RE_MESSAGE.search(msg)
if match:
result += match.group(1)
match = RE_POSITION.search(msg)
if match:
sql = error.one_line_sql()
position = int(match.group(1))
result += ('\n%s\n' % sql) + (' ' * (position - 1)) + '^'
return result
<commit_msg>Format error message better in a long SQL<commit_after>import re
RE_MESSAGE = re.compile(r'Message: (.+), Sqlstate:')
RE_SQLSTATE = re.compile(r'Sqlstate: (\d+)')
RE_POSITION = re.compile(r'Position: (\d+)')
def format_error(error):
msg = str(error)
if not hasattr(error, 'one_line_sql'):
return msg
result = ''
match = RE_SQLSTATE.search(msg)
if match:
result += 'ERROR %s: ' % match.group(1)
match = RE_MESSAGE.search(msg)
if match:
result += match.group(1)
match = RE_POSITION.search(msg)
if match:
sql = error.one_line_sql()
position = int(match.group(1))
# Truncate the SQL query if its length > n (n must be odd)
n = 75
d = (n - 1) / 2
length = len(sql)
if length > n:
left = position - d
right = position + d
position = d
head = '...'
tail = '...'
if left < 0:
right -= left
position += left
left = 0
head = ''
elif right >= length:
offset = right - length + 1
left -= offset
position += offset
right = length - 1
tail = ''
sql = head + sql[left:right + 1] + tail
position += len(head)
result += ('\n%s\n' % sql) + (' ' * (position - 1)) + '^'
return result
|
7821db4fb30bc013f8ae71c779faae5f6864da1d
|
falafel/__init__.py
|
falafel/__init__.py
|
import os
from .core import LogFileOutput, MapperOutput, computed # noqa: F401
from .core.plugins import mapper, reducer, make_response, make_metadata # noqa: F401
from .mappers import get_active_lines # noqa: F401
from .util import defaults, parse_table # noqa: F401
__here__ = os.path.dirname(os.path.abspath(__file__))
VERSION = "1.9.0"
NAME = "falafel"
with open(os.path.join(__here__, "RELEASE")) as f:
RELEASE = f.read().strip()
with open(os.path.join(__here__, "COMMIT")) as f:
COMMIT = f.read().strip()
def get_nvr():
return "{0}-{1}-{2}".format(NAME, VERSION, RELEASE)
|
import os
from .core import LogFileOutput, MapperOutput, computed # noqa: F401
from .core.plugins import mapper, reducer, make_response, make_metadata # noqa: F401
from .mappers import get_active_lines # noqa: F401
from .util import defaults, parse_table # noqa: F401
__here__ = os.path.dirname(os.path.abspath(__file__))
VERSION = "1.9.0"
NAME = "falafel"
with open(os.path.join(__here__, "RELEASE")) as f:
RELEASE = f.read().strip()
with open(os.path.join(__here__, "COMMIT")) as f:
COMMIT = f.read().strip()
def get_nvr():
return "{0}-{1}-{2}".format(NAME, VERSION, RELEASE)
RULES_STATUS = {}
"""
Mapping of dictionaries containing nvr and commitid for each rule repo included
in this instance
{"rule_repo_1": {"version": nvr(), "commit": sha1}}
"""
def add_status(name, nvr, commit):
"""
Rule repositories should call this method in their package __init__ to
register their version information.
"""
RULES_STATUS[name] = {"version": nvr, "commit": commit}
|
Allow rule repos to provide version information
|
Allow rule repos to provide version information
Added a new method to the root package `add_status`.
Rule repos should use it during initialization:
import falafel
falafel.add_status(name="my_rule_repo", nvr="my-rules-1.0.0-1", commit="abcdef")
|
Python
|
apache-2.0
|
RedHatInsights/insights-core,RedHatInsights/insights-core
|
import os
from .core import LogFileOutput, MapperOutput, computed # noqa: F401
from .core.plugins import mapper, reducer, make_response, make_metadata # noqa: F401
from .mappers import get_active_lines # noqa: F401
from .util import defaults, parse_table # noqa: F401
__here__ = os.path.dirname(os.path.abspath(__file__))
VERSION = "1.9.0"
NAME = "falafel"
with open(os.path.join(__here__, "RELEASE")) as f:
RELEASE = f.read().strip()
with open(os.path.join(__here__, "COMMIT")) as f:
COMMIT = f.read().strip()
def get_nvr():
return "{0}-{1}-{2}".format(NAME, VERSION, RELEASE)
Allow rule repos to provide version information
Added a new method to the root package `add_status`.
Rule repos should use it during initialization:
import falafel
falafel.add_status(name="my_rule_repo", nvr="my-rules-1.0.0-1", commit="abcdef")
|
import os
from .core import LogFileOutput, MapperOutput, computed # noqa: F401
from .core.plugins import mapper, reducer, make_response, make_metadata # noqa: F401
from .mappers import get_active_lines # noqa: F401
from .util import defaults, parse_table # noqa: F401
__here__ = os.path.dirname(os.path.abspath(__file__))
VERSION = "1.9.0"
NAME = "falafel"
with open(os.path.join(__here__, "RELEASE")) as f:
RELEASE = f.read().strip()
with open(os.path.join(__here__, "COMMIT")) as f:
COMMIT = f.read().strip()
def get_nvr():
return "{0}-{1}-{2}".format(NAME, VERSION, RELEASE)
RULES_STATUS = {}
"""
Mapping of dictionaries containing nvr and commitid for each rule repo included
in this instance
{"rule_repo_1": {"version": nvr(), "commit": sha1}}
"""
def add_status(name, nvr, commit):
"""
Rule repositories should call this method in their package __init__ to
register their version information.
"""
RULES_STATUS[name] = {"version": nvr, "commit": commit}
|
<commit_before>import os
from .core import LogFileOutput, MapperOutput, computed # noqa: F401
from .core.plugins import mapper, reducer, make_response, make_metadata # noqa: F401
from .mappers import get_active_lines # noqa: F401
from .util import defaults, parse_table # noqa: F401
__here__ = os.path.dirname(os.path.abspath(__file__))
VERSION = "1.9.0"
NAME = "falafel"
with open(os.path.join(__here__, "RELEASE")) as f:
RELEASE = f.read().strip()
with open(os.path.join(__here__, "COMMIT")) as f:
COMMIT = f.read().strip()
def get_nvr():
return "{0}-{1}-{2}".format(NAME, VERSION, RELEASE)
<commit_msg>Allow rule repos to provide version information
Added a new method to the root package `add_status`.
Rule repos should use it during initialization:
import falafel
falafel.add_status(name="my_rule_repo", nvr="my-rules-1.0.0-1", commit="abcdef")<commit_after>
|
import os
from .core import LogFileOutput, MapperOutput, computed # noqa: F401
from .core.plugins import mapper, reducer, make_response, make_metadata # noqa: F401
from .mappers import get_active_lines # noqa: F401
from .util import defaults, parse_table # noqa: F401
__here__ = os.path.dirname(os.path.abspath(__file__))
VERSION = "1.9.0"
NAME = "falafel"
with open(os.path.join(__here__, "RELEASE")) as f:
RELEASE = f.read().strip()
with open(os.path.join(__here__, "COMMIT")) as f:
COMMIT = f.read().strip()
def get_nvr():
return "{0}-{1}-{2}".format(NAME, VERSION, RELEASE)
RULES_STATUS = {}
"""
Mapping of dictionaries containing nvr and commitid for each rule repo included
in this instance
{"rule_repo_1": {"version": nvr(), "commit": sha1}}
"""
def add_status(name, nvr, commit):
"""
Rule repositories should call this method in their package __init__ to
register their version information.
"""
RULES_STATUS[name] = {"version": nvr, "commit": commit}
|
import os
from .core import LogFileOutput, MapperOutput, computed # noqa: F401
from .core.plugins import mapper, reducer, make_response, make_metadata # noqa: F401
from .mappers import get_active_lines # noqa: F401
from .util import defaults, parse_table # noqa: F401
__here__ = os.path.dirname(os.path.abspath(__file__))
VERSION = "1.9.0"
NAME = "falafel"
with open(os.path.join(__here__, "RELEASE")) as f:
RELEASE = f.read().strip()
with open(os.path.join(__here__, "COMMIT")) as f:
COMMIT = f.read().strip()
def get_nvr():
return "{0}-{1}-{2}".format(NAME, VERSION, RELEASE)
Allow rule repos to provide version information
Added a new method to the root package `add_status`.
Rule repos should use it during initialization:
import falafel
falafel.add_status(name="my_rule_repo", nvr="my-rules-1.0.0-1", commit="abcdef")import os
from .core import LogFileOutput, MapperOutput, computed # noqa: F401
from .core.plugins import mapper, reducer, make_response, make_metadata # noqa: F401
from .mappers import get_active_lines # noqa: F401
from .util import defaults, parse_table # noqa: F401
__here__ = os.path.dirname(os.path.abspath(__file__))
VERSION = "1.9.0"
NAME = "falafel"
with open(os.path.join(__here__, "RELEASE")) as f:
RELEASE = f.read().strip()
with open(os.path.join(__here__, "COMMIT")) as f:
COMMIT = f.read().strip()
def get_nvr():
return "{0}-{1}-{2}".format(NAME, VERSION, RELEASE)
RULES_STATUS = {}
"""
Mapping of dictionaries containing nvr and commitid for each rule repo included
in this instance
{"rule_repo_1": {"version": nvr(), "commit": sha1}}
"""
def add_status(name, nvr, commit):
"""
Rule repositories should call this method in their package __init__ to
register their version information.
"""
RULES_STATUS[name] = {"version": nvr, "commit": commit}
|
<commit_before>import os
from .core import LogFileOutput, MapperOutput, computed # noqa: F401
from .core.plugins import mapper, reducer, make_response, make_metadata # noqa: F401
from .mappers import get_active_lines # noqa: F401
from .util import defaults, parse_table # noqa: F401
__here__ = os.path.dirname(os.path.abspath(__file__))
VERSION = "1.9.0"
NAME = "falafel"
with open(os.path.join(__here__, "RELEASE")) as f:
RELEASE = f.read().strip()
with open(os.path.join(__here__, "COMMIT")) as f:
COMMIT = f.read().strip()
def get_nvr():
return "{0}-{1}-{2}".format(NAME, VERSION, RELEASE)
<commit_msg>Allow rule repos to provide version information
Added a new method to the root package `add_status`.
Rule repos should use it during initialization:
import falafel
falafel.add_status(name="my_rule_repo", nvr="my-rules-1.0.0-1", commit="abcdef")<commit_after>import os
from .core import LogFileOutput, MapperOutput, computed # noqa: F401
from .core.plugins import mapper, reducer, make_response, make_metadata # noqa: F401
from .mappers import get_active_lines # noqa: F401
from .util import defaults, parse_table # noqa: F401
__here__ = os.path.dirname(os.path.abspath(__file__))
VERSION = "1.9.0"
NAME = "falafel"
with open(os.path.join(__here__, "RELEASE")) as f:
RELEASE = f.read().strip()
with open(os.path.join(__here__, "COMMIT")) as f:
COMMIT = f.read().strip()
def get_nvr():
return "{0}-{1}-{2}".format(NAME, VERSION, RELEASE)
RULES_STATUS = {}
"""
Mapping of dictionaries containing nvr and commitid for each rule repo included
in this instance
{"rule_repo_1": {"version": nvr(), "commit": sha1}}
"""
def add_status(name, nvr, commit):
"""
Rule repositories should call this method in their package __init__ to
register their version information.
"""
RULES_STATUS[name] = {"version": nvr, "commit": commit}
|
9e87598ddaa19b565099685c2bdad636b29b3d4f
|
frappe/tests/test_boot.py
|
frappe/tests/test_boot.py
|
import unittest
import frappe
from frappe.boot import get_unseen_notes
from frappe.desk.doctype.note.note import mark_as_seen
class TestBootData(unittest.TestCase):
def test_get_unseen_notes(self):
frappe.db.delete("Note")
frappe.db.delete("Note Seen By")
note = frappe.get_doc(
{
"doctype": "Note",
"title": "Test Note",
"notify_on_login": 1,
"content": "Test Note 1",
"public": 1,
}
)
note.insert()
frappe.set_user("test@example.com")
unseen_notes = [d.title for d in get_unseen_notes()]
self.assertListEqual(unseen_notes, ["Test Note"])
mark_as_seen(note.name)
unseen_notes = [d.title for d in get_unseen_notes()]
self.assertListEqual(unseen_notes, [])
|
import unittest
import frappe
from frappe.boot import get_unseen_notes
from frappe.desk.doctype.note.note import mark_as_seen
from frappe.tests.utils import FrappeTestCase
class TestBootData(FrappeTestCase):
def test_get_unseen_notes(self):
frappe.db.delete("Note")
frappe.db.delete("Note Seen By")
note = frappe.get_doc(
{
"doctype": "Note",
"title": "Test Note",
"notify_on_login": 1,
"content": "Test Note 1",
"public": 1,
}
)
note.insert()
frappe.set_user("test@example.com")
unseen_notes = [d.title for d in get_unseen_notes()]
self.assertListEqual(unseen_notes, ["Test Note"])
mark_as_seen(note.name)
unseen_notes = [d.title for d in get_unseen_notes()]
self.assertListEqual(unseen_notes, [])
|
Use FrappeTestCase as it rolls back test data
|
refactor: Use FrappeTestCase as it rolls back test data
|
Python
|
mit
|
frappe/frappe,StrellaGroup/frappe,frappe/frappe,StrellaGroup/frappe,StrellaGroup/frappe,frappe/frappe
|
import unittest
import frappe
from frappe.boot import get_unseen_notes
from frappe.desk.doctype.note.note import mark_as_seen
class TestBootData(unittest.TestCase):
def test_get_unseen_notes(self):
frappe.db.delete("Note")
frappe.db.delete("Note Seen By")
note = frappe.get_doc(
{
"doctype": "Note",
"title": "Test Note",
"notify_on_login": 1,
"content": "Test Note 1",
"public": 1,
}
)
note.insert()
frappe.set_user("test@example.com")
unseen_notes = [d.title for d in get_unseen_notes()]
self.assertListEqual(unseen_notes, ["Test Note"])
mark_as_seen(note.name)
unseen_notes = [d.title for d in get_unseen_notes()]
self.assertListEqual(unseen_notes, [])
refactor: Use FrappeTestCase as it rolls back test data
|
import unittest
import frappe
from frappe.boot import get_unseen_notes
from frappe.desk.doctype.note.note import mark_as_seen
from frappe.tests.utils import FrappeTestCase
class TestBootData(FrappeTestCase):
def test_get_unseen_notes(self):
frappe.db.delete("Note")
frappe.db.delete("Note Seen By")
note = frappe.get_doc(
{
"doctype": "Note",
"title": "Test Note",
"notify_on_login": 1,
"content": "Test Note 1",
"public": 1,
}
)
note.insert()
frappe.set_user("test@example.com")
unseen_notes = [d.title for d in get_unseen_notes()]
self.assertListEqual(unseen_notes, ["Test Note"])
mark_as_seen(note.name)
unseen_notes = [d.title for d in get_unseen_notes()]
self.assertListEqual(unseen_notes, [])
|
<commit_before>import unittest
import frappe
from frappe.boot import get_unseen_notes
from frappe.desk.doctype.note.note import mark_as_seen
class TestBootData(unittest.TestCase):
def test_get_unseen_notes(self):
frappe.db.delete("Note")
frappe.db.delete("Note Seen By")
note = frappe.get_doc(
{
"doctype": "Note",
"title": "Test Note",
"notify_on_login": 1,
"content": "Test Note 1",
"public": 1,
}
)
note.insert()
frappe.set_user("test@example.com")
unseen_notes = [d.title for d in get_unseen_notes()]
self.assertListEqual(unseen_notes, ["Test Note"])
mark_as_seen(note.name)
unseen_notes = [d.title for d in get_unseen_notes()]
self.assertListEqual(unseen_notes, [])
<commit_msg>refactor: Use FrappeTestCase as it rolls back test data<commit_after>
|
import unittest
import frappe
from frappe.boot import get_unseen_notes
from frappe.desk.doctype.note.note import mark_as_seen
from frappe.tests.utils import FrappeTestCase
class TestBootData(FrappeTestCase):
def test_get_unseen_notes(self):
frappe.db.delete("Note")
frappe.db.delete("Note Seen By")
note = frappe.get_doc(
{
"doctype": "Note",
"title": "Test Note",
"notify_on_login": 1,
"content": "Test Note 1",
"public": 1,
}
)
note.insert()
frappe.set_user("test@example.com")
unseen_notes = [d.title for d in get_unseen_notes()]
self.assertListEqual(unseen_notes, ["Test Note"])
mark_as_seen(note.name)
unseen_notes = [d.title for d in get_unseen_notes()]
self.assertListEqual(unseen_notes, [])
|
import unittest
import frappe
from frappe.boot import get_unseen_notes
from frappe.desk.doctype.note.note import mark_as_seen
class TestBootData(unittest.TestCase):
def test_get_unseen_notes(self):
frappe.db.delete("Note")
frappe.db.delete("Note Seen By")
note = frappe.get_doc(
{
"doctype": "Note",
"title": "Test Note",
"notify_on_login": 1,
"content": "Test Note 1",
"public": 1,
}
)
note.insert()
frappe.set_user("test@example.com")
unseen_notes = [d.title for d in get_unseen_notes()]
self.assertListEqual(unseen_notes, ["Test Note"])
mark_as_seen(note.name)
unseen_notes = [d.title for d in get_unseen_notes()]
self.assertListEqual(unseen_notes, [])
refactor: Use FrappeTestCase as it rolls back test dataimport unittest
import frappe
from frappe.boot import get_unseen_notes
from frappe.desk.doctype.note.note import mark_as_seen
from frappe.tests.utils import FrappeTestCase
class TestBootData(FrappeTestCase):
def test_get_unseen_notes(self):
frappe.db.delete("Note")
frappe.db.delete("Note Seen By")
note = frappe.get_doc(
{
"doctype": "Note",
"title": "Test Note",
"notify_on_login": 1,
"content": "Test Note 1",
"public": 1,
}
)
note.insert()
frappe.set_user("test@example.com")
unseen_notes = [d.title for d in get_unseen_notes()]
self.assertListEqual(unseen_notes, ["Test Note"])
mark_as_seen(note.name)
unseen_notes = [d.title for d in get_unseen_notes()]
self.assertListEqual(unseen_notes, [])
|
<commit_before>import unittest
import frappe
from frappe.boot import get_unseen_notes
from frappe.desk.doctype.note.note import mark_as_seen
class TestBootData(unittest.TestCase):
def test_get_unseen_notes(self):
frappe.db.delete("Note")
frappe.db.delete("Note Seen By")
note = frappe.get_doc(
{
"doctype": "Note",
"title": "Test Note",
"notify_on_login": 1,
"content": "Test Note 1",
"public": 1,
}
)
note.insert()
frappe.set_user("test@example.com")
unseen_notes = [d.title for d in get_unseen_notes()]
self.assertListEqual(unseen_notes, ["Test Note"])
mark_as_seen(note.name)
unseen_notes = [d.title for d in get_unseen_notes()]
self.assertListEqual(unseen_notes, [])
<commit_msg>refactor: Use FrappeTestCase as it rolls back test data<commit_after>import unittest
import frappe
from frappe.boot import get_unseen_notes
from frappe.desk.doctype.note.note import mark_as_seen
from frappe.tests.utils import FrappeTestCase
class TestBootData(FrappeTestCase):
def test_get_unseen_notes(self):
frappe.db.delete("Note")
frappe.db.delete("Note Seen By")
note = frappe.get_doc(
{
"doctype": "Note",
"title": "Test Note",
"notify_on_login": 1,
"content": "Test Note 1",
"public": 1,
}
)
note.insert()
frappe.set_user("test@example.com")
unseen_notes = [d.title for d in get_unseen_notes()]
self.assertListEqual(unseen_notes, ["Test Note"])
mark_as_seen(note.name)
unseen_notes = [d.title for d in get_unseen_notes()]
self.assertListEqual(unseen_notes, [])
|
a5ddab3208992ca6ab655ddef9a4155d5fc6bc55
|
tests/grammar_test.py
|
tests/grammar_test.py
|
import nose
from parser_tool import get_parser, parse
sentences = (
# N[s] V[i]
"Brad drives",
# N[s] V[t] N[p]
"Angela drives cars",
# N[s] V[t] Det N[s]
"Brad buys the house",
# Det[s] N[s] V[i]
"a dog walks"
)
grammar = get_parser("grammars/feat1.fcfg", trace=0)
def test_grammar():
global sentences, parser
for sent in sentences:
print "Testing: %s" % sent
trees = parse(grammar, sent)
assert len(trees) > 0
if __name__=="__main__":
nose.main()
|
import nose
from parser_tool import get_parser, parse
sentences = (
# PN V[i]
"Brad drives",
# PN V[t] N[p]
"Angela drives cars",
# PN V[t] Det N[s]
"Brad buys the house",
# Det[s] N[s] V[i]
"a dog walks",
# Det[p] N[p] V[i]
"these dogs walk",
# Det[p] N[p] V[t] Det N[s]
"the cars enter the house",
# A N[p] V[t] Det N[s]
"red cars enter the house",
# Det A N[s] V[t] Det N[s]
"a red car enters the house",
# PN V[t] Det A N[s]
"Brad buys a red car",
)
grammar = get_parser("grammars/feat1.fcfg", trace=0)
def test_grammar():
global sentences, parser
for sent in sentences:
print "Testing: %s" % sent
trees = parse(grammar, sent)
assert len(trees) > 0
if __name__=="__main__":
nose.main()
|
Increase testing coverage of grammar
|
Increase testing coverage of grammar
* added some sample sentences with ajectives
|
Python
|
mit
|
caninemwenja/marker,kmwenja/marker
|
import nose
from parser_tool import get_parser, parse
sentences = (
# N[s] V[i]
"Brad drives",
# N[s] V[t] N[p]
"Angela drives cars",
# N[s] V[t] Det N[s]
"Brad buys the house",
# Det[s] N[s] V[i]
"a dog walks"
)
grammar = get_parser("grammars/feat1.fcfg", trace=0)
def test_grammar():
global sentences, parser
for sent in sentences:
print "Testing: %s" % sent
trees = parse(grammar, sent)
assert len(trees) > 0
if __name__=="__main__":
nose.main()
Increase testing coverage of grammar
* added some sample sentences with ajectives
|
import nose
from parser_tool import get_parser, parse
sentences = (
# PN V[i]
"Brad drives",
# PN V[t] N[p]
"Angela drives cars",
# PN V[t] Det N[s]
"Brad buys the house",
# Det[s] N[s] V[i]
"a dog walks",
# Det[p] N[p] V[i]
"these dogs walk",
# Det[p] N[p] V[t] Det N[s]
"the cars enter the house",
# A N[p] V[t] Det N[s]
"red cars enter the house",
# Det A N[s] V[t] Det N[s]
"a red car enters the house",
# PN V[t] Det A N[s]
"Brad buys a red car",
)
grammar = get_parser("grammars/feat1.fcfg", trace=0)
def test_grammar():
global sentences, parser
for sent in sentences:
print "Testing: %s" % sent
trees = parse(grammar, sent)
assert len(trees) > 0
if __name__=="__main__":
nose.main()
|
<commit_before>import nose
from parser_tool import get_parser, parse
sentences = (
# N[s] V[i]
"Brad drives",
# N[s] V[t] N[p]
"Angela drives cars",
# N[s] V[t] Det N[s]
"Brad buys the house",
# Det[s] N[s] V[i]
"a dog walks"
)
grammar = get_parser("grammars/feat1.fcfg", trace=0)
def test_grammar():
global sentences, parser
for sent in sentences:
print "Testing: %s" % sent
trees = parse(grammar, sent)
assert len(trees) > 0
if __name__=="__main__":
nose.main()
<commit_msg>Increase testing coverage of grammar
* added some sample sentences with ajectives<commit_after>
|
import nose
from parser_tool import get_parser, parse
sentences = (
# PN V[i]
"Brad drives",
# PN V[t] N[p]
"Angela drives cars",
# PN V[t] Det N[s]
"Brad buys the house",
# Det[s] N[s] V[i]
"a dog walks",
# Det[p] N[p] V[i]
"these dogs walk",
# Det[p] N[p] V[t] Det N[s]
"the cars enter the house",
# A N[p] V[t] Det N[s]
"red cars enter the house",
# Det A N[s] V[t] Det N[s]
"a red car enters the house",
# PN V[t] Det A N[s]
"Brad buys a red car",
)
grammar = get_parser("grammars/feat1.fcfg", trace=0)
def test_grammar():
global sentences, parser
for sent in sentences:
print "Testing: %s" % sent
trees = parse(grammar, sent)
assert len(trees) > 0
if __name__=="__main__":
nose.main()
|
import nose
from parser_tool import get_parser, parse
sentences = (
# N[s] V[i]
"Brad drives",
# N[s] V[t] N[p]
"Angela drives cars",
# N[s] V[t] Det N[s]
"Brad buys the house",
# Det[s] N[s] V[i]
"a dog walks"
)
grammar = get_parser("grammars/feat1.fcfg", trace=0)
def test_grammar():
global sentences, parser
for sent in sentences:
print "Testing: %s" % sent
trees = parse(grammar, sent)
assert len(trees) > 0
if __name__=="__main__":
nose.main()
Increase testing coverage of grammar
* added some sample sentences with ajectivesimport nose
from parser_tool import get_parser, parse
sentences = (
# PN V[i]
"Brad drives",
# PN V[t] N[p]
"Angela drives cars",
# PN V[t] Det N[s]
"Brad buys the house",
# Det[s] N[s] V[i]
"a dog walks",
# Det[p] N[p] V[i]
"these dogs walk",
# Det[p] N[p] V[t] Det N[s]
"the cars enter the house",
# A N[p] V[t] Det N[s]
"red cars enter the house",
# Det A N[s] V[t] Det N[s]
"a red car enters the house",
# PN V[t] Det A N[s]
"Brad buys a red car",
)
grammar = get_parser("grammars/feat1.fcfg", trace=0)
def test_grammar():
global sentences, parser
for sent in sentences:
print "Testing: %s" % sent
trees = parse(grammar, sent)
assert len(trees) > 0
if __name__=="__main__":
nose.main()
|
<commit_before>import nose
from parser_tool import get_parser, parse
sentences = (
# N[s] V[i]
"Brad drives",
# N[s] V[t] N[p]
"Angela drives cars",
# N[s] V[t] Det N[s]
"Brad buys the house",
# Det[s] N[s] V[i]
"a dog walks"
)
grammar = get_parser("grammars/feat1.fcfg", trace=0)
def test_grammar():
global sentences, parser
for sent in sentences:
print "Testing: %s" % sent
trees = parse(grammar, sent)
assert len(trees) > 0
if __name__=="__main__":
nose.main()
<commit_msg>Increase testing coverage of grammar
* added some sample sentences with ajectives<commit_after>import nose
from parser_tool import get_parser, parse
sentences = (
# PN V[i]
"Brad drives",
# PN V[t] N[p]
"Angela drives cars",
# PN V[t] Det N[s]
"Brad buys the house",
# Det[s] N[s] V[i]
"a dog walks",
# Det[p] N[p] V[i]
"these dogs walk",
# Det[p] N[p] V[t] Det N[s]
"the cars enter the house",
# A N[p] V[t] Det N[s]
"red cars enter the house",
# Det A N[s] V[t] Det N[s]
"a red car enters the house",
# PN V[t] Det A N[s]
"Brad buys a red car",
)
grammar = get_parser("grammars/feat1.fcfg", trace=0)
def test_grammar():
global sentences, parser
for sent in sentences:
print "Testing: %s" % sent
trees = parse(grammar, sent)
assert len(trees) > 0
if __name__=="__main__":
nose.main()
|
fff0087f82c3f79d5e60e32071a4e89478d8b85e
|
tests/test_element.py
|
tests/test_element.py
|
import pkg_resources
pkg_resources.require('cothread')
import cothread
import rml.element
def test_create_element():
e = rml.element.Element('BPM', 6.0)
assert e.get_type() == 'BPM'
assert e.get_length() == 6.0
def test_add_element_to_family():
e = rml.element.Element('dummy', 0.0)
e.add_to_family('fam')
assert 'fam' in e.get_families()
def test_get_pv_value():
PV = 'SR22C-DI-EBPM-04:SA:X'
e = rml.element.Element('dummy', 0.0, pv=PV)
result = e.get_pv('x')
assert isinstance(result, float)
|
import pkg_resources
pkg_resources.require('cothread')
import cothread
import rml
import rml.element
def test_create_element():
e = rml.element.Element('BPM', 6.0)
assert e.get_type() == 'BPM'
assert e.get_length() == 6.0
def test_add_element_to_family():
e = rml.element.Element('dummy', 0.0)
e.add_to_family('fam')
assert 'fam' in e.get_families()
def test_get_pv_value():
PV = 'SR22C-DI-EBPM-04:SA:X'
e = rml.element.Element('dummy', 0.0)
e.set_pv('x', PV)
result = e.get_pv('x')
assert isinstance(result, float)
with pytest.raises(rml.ConfigException):
e.get_pv('y')
|
Make pvs in Element behave more realistically.
|
Make pvs in Element behave more realistically.
|
Python
|
apache-2.0
|
razvanvasile/RML,willrogers/pml,willrogers/pml
|
import pkg_resources
pkg_resources.require('cothread')
import cothread
import rml.element
def test_create_element():
e = rml.element.Element('BPM', 6.0)
assert e.get_type() == 'BPM'
assert e.get_length() == 6.0
def test_add_element_to_family():
e = rml.element.Element('dummy', 0.0)
e.add_to_family('fam')
assert 'fam' in e.get_families()
def test_get_pv_value():
PV = 'SR22C-DI-EBPM-04:SA:X'
e = rml.element.Element('dummy', 0.0, pv=PV)
result = e.get_pv('x')
assert isinstance(result, float)
Make pvs in Element behave more realistically.
|
import pkg_resources
pkg_resources.require('cothread')
import cothread
import rml
import rml.element
def test_create_element():
e = rml.element.Element('BPM', 6.0)
assert e.get_type() == 'BPM'
assert e.get_length() == 6.0
def test_add_element_to_family():
e = rml.element.Element('dummy', 0.0)
e.add_to_family('fam')
assert 'fam' in e.get_families()
def test_get_pv_value():
PV = 'SR22C-DI-EBPM-04:SA:X'
e = rml.element.Element('dummy', 0.0)
e.set_pv('x', PV)
result = e.get_pv('x')
assert isinstance(result, float)
with pytest.raises(rml.ConfigException):
e.get_pv('y')
|
<commit_before>import pkg_resources
pkg_resources.require('cothread')
import cothread
import rml.element
def test_create_element():
e = rml.element.Element('BPM', 6.0)
assert e.get_type() == 'BPM'
assert e.get_length() == 6.0
def test_add_element_to_family():
e = rml.element.Element('dummy', 0.0)
e.add_to_family('fam')
assert 'fam' in e.get_families()
def test_get_pv_value():
PV = 'SR22C-DI-EBPM-04:SA:X'
e = rml.element.Element('dummy', 0.0, pv=PV)
result = e.get_pv('x')
assert isinstance(result, float)
<commit_msg>Make pvs in Element behave more realistically.<commit_after>
|
import pkg_resources
pkg_resources.require('cothread')
import cothread
import rml
import rml.element
def test_create_element():
e = rml.element.Element('BPM', 6.0)
assert e.get_type() == 'BPM'
assert e.get_length() == 6.0
def test_add_element_to_family():
e = rml.element.Element('dummy', 0.0)
e.add_to_family('fam')
assert 'fam' in e.get_families()
def test_get_pv_value():
PV = 'SR22C-DI-EBPM-04:SA:X'
e = rml.element.Element('dummy', 0.0)
e.set_pv('x', PV)
result = e.get_pv('x')
assert isinstance(result, float)
with pytest.raises(rml.ConfigException):
e.get_pv('y')
|
import pkg_resources
pkg_resources.require('cothread')
import cothread
import rml.element
def test_create_element():
e = rml.element.Element('BPM', 6.0)
assert e.get_type() == 'BPM'
assert e.get_length() == 6.0
def test_add_element_to_family():
e = rml.element.Element('dummy', 0.0)
e.add_to_family('fam')
assert 'fam' in e.get_families()
def test_get_pv_value():
PV = 'SR22C-DI-EBPM-04:SA:X'
e = rml.element.Element('dummy', 0.0, pv=PV)
result = e.get_pv('x')
assert isinstance(result, float)
Make pvs in Element behave more realistically.import pkg_resources
pkg_resources.require('cothread')
import cothread
import rml
import rml.element
def test_create_element():
e = rml.element.Element('BPM', 6.0)
assert e.get_type() == 'BPM'
assert e.get_length() == 6.0
def test_add_element_to_family():
e = rml.element.Element('dummy', 0.0)
e.add_to_family('fam')
assert 'fam' in e.get_families()
def test_get_pv_value():
PV = 'SR22C-DI-EBPM-04:SA:X'
e = rml.element.Element('dummy', 0.0)
e.set_pv('x', PV)
result = e.get_pv('x')
assert isinstance(result, float)
with pytest.raises(rml.ConfigException):
e.get_pv('y')
|
<commit_before>import pkg_resources
pkg_resources.require('cothread')
import cothread
import rml.element
def test_create_element():
e = rml.element.Element('BPM', 6.0)
assert e.get_type() == 'BPM'
assert e.get_length() == 6.0
def test_add_element_to_family():
e = rml.element.Element('dummy', 0.0)
e.add_to_family('fam')
assert 'fam' in e.get_families()
def test_get_pv_value():
PV = 'SR22C-DI-EBPM-04:SA:X'
e = rml.element.Element('dummy', 0.0, pv=PV)
result = e.get_pv('x')
assert isinstance(result, float)
<commit_msg>Make pvs in Element behave more realistically.<commit_after>import pkg_resources
pkg_resources.require('cothread')
import cothread
import rml
import rml.element
def test_create_element():
e = rml.element.Element('BPM', 6.0)
assert e.get_type() == 'BPM'
assert e.get_length() == 6.0
def test_add_element_to_family():
e = rml.element.Element('dummy', 0.0)
e.add_to_family('fam')
assert 'fam' in e.get_families()
def test_get_pv_value():
PV = 'SR22C-DI-EBPM-04:SA:X'
e = rml.element.Element('dummy', 0.0)
e.set_pv('x', PV)
result = e.get_pv('x')
assert isinstance(result, float)
with pytest.raises(rml.ConfigException):
e.get_pv('y')
|
9075603da0ac5993836001749c7999dec6498f95
|
tests/test_process.py
|
tests/test_process.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_process
----------------------------------
Tests for process management.
"""
# system imports
import unittest
import os
from subprocess import check_output
from tempfile import NamedTemporaryFile
# project imports
from nrtest.process import source, execute, monitor
class TestProcess(unittest.TestCase):
def test_source(self):
var_name, var_value = 'TESTVAR', 'This is a test'
with NamedTemporaryFile('w', delete=False) as f:
f.write('export %s="%s"' % (var_name, var_value))
script_name = f.name
env = source(script_name)
cmd = ['/bin/bash', '-c', 'echo $%s' % var_name]
stdout = check_output(cmd, env=env, universal_newlines=True)
os.remove(script_name)
self.assertEqual(stdout.strip(), var_value)
def test_duration(self):
time = 2
p = execute('sleep %s' % time)
(_, perf) = monitor(p)
duration = perf['duration']
self.assertAlmostEqual(duration, time, delta=1.0)
def test_timeout(self):
p = execute('sleep 5')
(_, perf) = monitor(p, timeout=2)
duration = perf['duration']
self.assertIsNone(duration)
if __name__ == '__main__':
import sys
sys.exit(unittest.main())
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_process
----------------------------------
Tests for process management.
"""
# system imports
import unittest
import os
from subprocess import check_output
from tempfile import NamedTemporaryFile
# project imports
from nrtest.process import source, execute, monitor
class TestProcess(unittest.TestCase):
def test_source(self):
var_name, var_value = 'TESTVAR', 'This is a test'
with NamedTemporaryFile('w', delete=False) as f:
f.write('export %s="%s"' % (var_name, var_value))
script_name = f.name
env = source(script_name)
cmd = ['/bin/bash', '-c', 'echo $%s' % var_name]
stdout = check_output(cmd, env=env, universal_newlines=True)
os.remove(script_name)
self.assertEqual(stdout.strip(), var_value)
def test_timeout(self):
p = execute('sleep 5')
(_, perf) = monitor(p, timeout=2)
duration = perf['duration']
self.assertIsNone(duration)
if __name__ == '__main__':
import sys
sys.exit(unittest.main())
|
Remove unreproducible test (sleep is too inaccurate)
|
Remove unreproducible test
(sleep is too inaccurate)
|
Python
|
mit
|
davidchall/nrtest
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_process
----------------------------------
Tests for process management.
"""
# system imports
import unittest
import os
from subprocess import check_output
from tempfile import NamedTemporaryFile
# project imports
from nrtest.process import source, execute, monitor
class TestProcess(unittest.TestCase):
def test_source(self):
var_name, var_value = 'TESTVAR', 'This is a test'
with NamedTemporaryFile('w', delete=False) as f:
f.write('export %s="%s"' % (var_name, var_value))
script_name = f.name
env = source(script_name)
cmd = ['/bin/bash', '-c', 'echo $%s' % var_name]
stdout = check_output(cmd, env=env, universal_newlines=True)
os.remove(script_name)
self.assertEqual(stdout.strip(), var_value)
def test_duration(self):
time = 2
p = execute('sleep %s' % time)
(_, perf) = monitor(p)
duration = perf['duration']
self.assertAlmostEqual(duration, time, delta=1.0)
def test_timeout(self):
p = execute('sleep 5')
(_, perf) = monitor(p, timeout=2)
duration = perf['duration']
self.assertIsNone(duration)
if __name__ == '__main__':
import sys
sys.exit(unittest.main())
Remove unreproducible test
(sleep is too inaccurate)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_process
----------------------------------
Tests for process management.
"""
# system imports
import unittest
import os
from subprocess import check_output
from tempfile import NamedTemporaryFile
# project imports
from nrtest.process import source, execute, monitor
class TestProcess(unittest.TestCase):
def test_source(self):
var_name, var_value = 'TESTVAR', 'This is a test'
with NamedTemporaryFile('w', delete=False) as f:
f.write('export %s="%s"' % (var_name, var_value))
script_name = f.name
env = source(script_name)
cmd = ['/bin/bash', '-c', 'echo $%s' % var_name]
stdout = check_output(cmd, env=env, universal_newlines=True)
os.remove(script_name)
self.assertEqual(stdout.strip(), var_value)
def test_timeout(self):
p = execute('sleep 5')
(_, perf) = monitor(p, timeout=2)
duration = perf['duration']
self.assertIsNone(duration)
if __name__ == '__main__':
import sys
sys.exit(unittest.main())
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_process
----------------------------------
Tests for process management.
"""
# system imports
import unittest
import os
from subprocess import check_output
from tempfile import NamedTemporaryFile
# project imports
from nrtest.process import source, execute, monitor
class TestProcess(unittest.TestCase):
def test_source(self):
var_name, var_value = 'TESTVAR', 'This is a test'
with NamedTemporaryFile('w', delete=False) as f:
f.write('export %s="%s"' % (var_name, var_value))
script_name = f.name
env = source(script_name)
cmd = ['/bin/bash', '-c', 'echo $%s' % var_name]
stdout = check_output(cmd, env=env, universal_newlines=True)
os.remove(script_name)
self.assertEqual(stdout.strip(), var_value)
def test_duration(self):
time = 2
p = execute('sleep %s' % time)
(_, perf) = monitor(p)
duration = perf['duration']
self.assertAlmostEqual(duration, time, delta=1.0)
def test_timeout(self):
p = execute('sleep 5')
(_, perf) = monitor(p, timeout=2)
duration = perf['duration']
self.assertIsNone(duration)
if __name__ == '__main__':
import sys
sys.exit(unittest.main())
<commit_msg>Remove unreproducible test
(sleep is too inaccurate)<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_process
----------------------------------
Tests for process management.
"""
# system imports
import unittest
import os
from subprocess import check_output
from tempfile import NamedTemporaryFile
# project imports
from nrtest.process import source, execute, monitor
class TestProcess(unittest.TestCase):
def test_source(self):
var_name, var_value = 'TESTVAR', 'This is a test'
with NamedTemporaryFile('w', delete=False) as f:
f.write('export %s="%s"' % (var_name, var_value))
script_name = f.name
env = source(script_name)
cmd = ['/bin/bash', '-c', 'echo $%s' % var_name]
stdout = check_output(cmd, env=env, universal_newlines=True)
os.remove(script_name)
self.assertEqual(stdout.strip(), var_value)
def test_timeout(self):
p = execute('sleep 5')
(_, perf) = monitor(p, timeout=2)
duration = perf['duration']
self.assertIsNone(duration)
if __name__ == '__main__':
import sys
sys.exit(unittest.main())
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_process
----------------------------------
Tests for process management.
"""
# system imports
import unittest
import os
from subprocess import check_output
from tempfile import NamedTemporaryFile
# project imports
from nrtest.process import source, execute, monitor
class TestProcess(unittest.TestCase):
def test_source(self):
var_name, var_value = 'TESTVAR', 'This is a test'
with NamedTemporaryFile('w', delete=False) as f:
f.write('export %s="%s"' % (var_name, var_value))
script_name = f.name
env = source(script_name)
cmd = ['/bin/bash', '-c', 'echo $%s' % var_name]
stdout = check_output(cmd, env=env, universal_newlines=True)
os.remove(script_name)
self.assertEqual(stdout.strip(), var_value)
def test_duration(self):
time = 2
p = execute('sleep %s' % time)
(_, perf) = monitor(p)
duration = perf['duration']
self.assertAlmostEqual(duration, time, delta=1.0)
def test_timeout(self):
p = execute('sleep 5')
(_, perf) = monitor(p, timeout=2)
duration = perf['duration']
self.assertIsNone(duration)
if __name__ == '__main__':
import sys
sys.exit(unittest.main())
Remove unreproducible test
(sleep is too inaccurate)#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_process
----------------------------------
Tests for process management.
"""
# system imports
import unittest
import os
from subprocess import check_output
from tempfile import NamedTemporaryFile
# project imports
from nrtest.process import source, execute, monitor
class TestProcess(unittest.TestCase):
def test_source(self):
var_name, var_value = 'TESTVAR', 'This is a test'
with NamedTemporaryFile('w', delete=False) as f:
f.write('export %s="%s"' % (var_name, var_value))
script_name = f.name
env = source(script_name)
cmd = ['/bin/bash', '-c', 'echo $%s' % var_name]
stdout = check_output(cmd, env=env, universal_newlines=True)
os.remove(script_name)
self.assertEqual(stdout.strip(), var_value)
def test_timeout(self):
p = execute('sleep 5')
(_, perf) = monitor(p, timeout=2)
duration = perf['duration']
self.assertIsNone(duration)
if __name__ == '__main__':
import sys
sys.exit(unittest.main())
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_process
----------------------------------
Tests for process management.
"""
# system imports
import unittest
import os
from subprocess import check_output
from tempfile import NamedTemporaryFile
# project imports
from nrtest.process import source, execute, monitor
class TestProcess(unittest.TestCase):
def test_source(self):
var_name, var_value = 'TESTVAR', 'This is a test'
with NamedTemporaryFile('w', delete=False) as f:
f.write('export %s="%s"' % (var_name, var_value))
script_name = f.name
env = source(script_name)
cmd = ['/bin/bash', '-c', 'echo $%s' % var_name]
stdout = check_output(cmd, env=env, universal_newlines=True)
os.remove(script_name)
self.assertEqual(stdout.strip(), var_value)
def test_duration(self):
time = 2
p = execute('sleep %s' % time)
(_, perf) = monitor(p)
duration = perf['duration']
self.assertAlmostEqual(duration, time, delta=1.0)
def test_timeout(self):
p = execute('sleep 5')
(_, perf) = monitor(p, timeout=2)
duration = perf['duration']
self.assertIsNone(duration)
if __name__ == '__main__':
import sys
sys.exit(unittest.main())
<commit_msg>Remove unreproducible test
(sleep is too inaccurate)<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_process
----------------------------------
Tests for process management.
"""
# system imports
import unittest
import os
from subprocess import check_output
from tempfile import NamedTemporaryFile
# project imports
from nrtest.process import source, execute, monitor
class TestProcess(unittest.TestCase):
def test_source(self):
var_name, var_value = 'TESTVAR', 'This is a test'
with NamedTemporaryFile('w', delete=False) as f:
f.write('export %s="%s"' % (var_name, var_value))
script_name = f.name
env = source(script_name)
cmd = ['/bin/bash', '-c', 'echo $%s' % var_name]
stdout = check_output(cmd, env=env, universal_newlines=True)
os.remove(script_name)
self.assertEqual(stdout.strip(), var_value)
def test_timeout(self):
p = execute('sleep 5')
(_, perf) = monitor(p, timeout=2)
duration = perf['duration']
self.assertIsNone(duration)
if __name__ == '__main__':
import sys
sys.exit(unittest.main())
|
34c0a728add7715a9420537f57f7c1a69176c57d
|
tests/serializer/abstract_test.py
|
tests/serializer/abstract_test.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
##
# hack for loading modules
#
import _path
_path.fix()
##
# python standard library
#
from functools import partial
import unittest
##
# pygrapes modules
#
from pygrapes.serializer import Base
class BaseSerializerTestCase(unittest.TestCase):
def test_method_dumps_exists(self):
self.assertTrue(hasattr(Base(), 'dumps'))
def test_method_dumps_expects_one_arg(self):
self.assertRaises(TypeError, Base().dumps)
def test_dumps_method_must_be_implemented(self):
self.assertRaises(NotImplementedError, partial(Base().dumps, 1))
def test_method_loads_exists(self):
self.assertTrue(hasattr(Base(), 'loads'))
def test_method_loads_expects_one_arg(self):
self.assertRaises(TypeError, Base().loads)
def test_loads_method_must_be_implemented(self):
self.assertRaises(NotImplementedError, partial(Base().loads, 1))
if "__main__" == __name__:
unittest.main()
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
##
# hack for loading modules
#
import _path
_path.fix()
##
# python standard library
#
from functools import partial
import unittest
##
# pygrapes modules
#
from pygrapes.serializer.abstract import Abstract
class AbstractSerializerTestCase(unittest.TestCase):
def test_method_dumps_exists(self):
self.assertTrue(hasattr(Abstract(), 'dumps'))
def test_method_dumps_expects_one_arg(self):
self.assertRaises(TypeError, Abstract().dumps)
def test_dumps_method_must_be_implemented(self):
self.assertRaises(NotImplementedError, partial(Abstract().dumps, 1))
def test_method_loads_exists(self):
self.assertTrue(hasattr(Abstract(), 'loads'))
def test_method_loads_expects_one_arg(self):
self.assertRaises(TypeError, Abstract().loads)
def test_loads_method_must_be_implemented(self):
self.assertRaises(NotImplementedError, partial(Abstract().loads, 1))
if "__main__" == __name__:
unittest.main()
|
Use abstract.Abstract instead of Base alias when testing pygrapes.serializer.abstract.Abstract class
|
Use abstract.Abstract instead of Base alias when testing pygrapes.serializer.abstract.Abstract class
|
Python
|
bsd-3-clause
|
michalbachowski/pygrapes,michalbachowski/pygrapes,michalbachowski/pygrapes
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
##
# hack for loading modules
#
import _path
_path.fix()
##
# python standard library
#
from functools import partial
import unittest
##
# pygrapes modules
#
from pygrapes.serializer import Base
class BaseSerializerTestCase(unittest.TestCase):
def test_method_dumps_exists(self):
self.assertTrue(hasattr(Base(), 'dumps'))
def test_method_dumps_expects_one_arg(self):
self.assertRaises(TypeError, Base().dumps)
def test_dumps_method_must_be_implemented(self):
self.assertRaises(NotImplementedError, partial(Base().dumps, 1))
def test_method_loads_exists(self):
self.assertTrue(hasattr(Base(), 'loads'))
def test_method_loads_expects_one_arg(self):
self.assertRaises(TypeError, Base().loads)
def test_loads_method_must_be_implemented(self):
self.assertRaises(NotImplementedError, partial(Base().loads, 1))
if "__main__" == __name__:
unittest.main()
Use abstract.Abstract instead of Base alias when testing pygrapes.serializer.abstract.Abstract class
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
##
# hack for loading modules
#
import _path
_path.fix()
##
# python standard library
#
from functools import partial
import unittest
##
# pygrapes modules
#
from pygrapes.serializer.abstract import Abstract
class AbstractSerializerTestCase(unittest.TestCase):
def test_method_dumps_exists(self):
self.assertTrue(hasattr(Abstract(), 'dumps'))
def test_method_dumps_expects_one_arg(self):
self.assertRaises(TypeError, Abstract().dumps)
def test_dumps_method_must_be_implemented(self):
self.assertRaises(NotImplementedError, partial(Abstract().dumps, 1))
def test_method_loads_exists(self):
self.assertTrue(hasattr(Abstract(), 'loads'))
def test_method_loads_expects_one_arg(self):
self.assertRaises(TypeError, Abstract().loads)
def test_loads_method_must_be_implemented(self):
self.assertRaises(NotImplementedError, partial(Abstract().loads, 1))
if "__main__" == __name__:
unittest.main()
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
##
# hack for loading modules
#
import _path
_path.fix()
##
# python standard library
#
from functools import partial
import unittest
##
# pygrapes modules
#
from pygrapes.serializer import Base
class BaseSerializerTestCase(unittest.TestCase):
def test_method_dumps_exists(self):
self.assertTrue(hasattr(Base(), 'dumps'))
def test_method_dumps_expects_one_arg(self):
self.assertRaises(TypeError, Base().dumps)
def test_dumps_method_must_be_implemented(self):
self.assertRaises(NotImplementedError, partial(Base().dumps, 1))
def test_method_loads_exists(self):
self.assertTrue(hasattr(Base(), 'loads'))
def test_method_loads_expects_one_arg(self):
self.assertRaises(TypeError, Base().loads)
def test_loads_method_must_be_implemented(self):
self.assertRaises(NotImplementedError, partial(Base().loads, 1))
if "__main__" == __name__:
unittest.main()
<commit_msg>Use abstract.Abstract instead of Base alias when testing pygrapes.serializer.abstract.Abstract class<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
##
# hack for loading modules
#
import _path
_path.fix()
##
# python standard library
#
from functools import partial
import unittest
##
# pygrapes modules
#
from pygrapes.serializer.abstract import Abstract
class AbstractSerializerTestCase(unittest.TestCase):
def test_method_dumps_exists(self):
self.assertTrue(hasattr(Abstract(), 'dumps'))
def test_method_dumps_expects_one_arg(self):
self.assertRaises(TypeError, Abstract().dumps)
def test_dumps_method_must_be_implemented(self):
self.assertRaises(NotImplementedError, partial(Abstract().dumps, 1))
def test_method_loads_exists(self):
self.assertTrue(hasattr(Abstract(), 'loads'))
def test_method_loads_expects_one_arg(self):
self.assertRaises(TypeError, Abstract().loads)
def test_loads_method_must_be_implemented(self):
self.assertRaises(NotImplementedError, partial(Abstract().loads, 1))
if "__main__" == __name__:
unittest.main()
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
##
# hack for loading modules
#
import _path
_path.fix()
##
# python standard library
#
from functools import partial
import unittest
##
# pygrapes modules
#
from pygrapes.serializer import Base
class BaseSerializerTestCase(unittest.TestCase):
def test_method_dumps_exists(self):
self.assertTrue(hasattr(Base(), 'dumps'))
def test_method_dumps_expects_one_arg(self):
self.assertRaises(TypeError, Base().dumps)
def test_dumps_method_must_be_implemented(self):
self.assertRaises(NotImplementedError, partial(Base().dumps, 1))
def test_method_loads_exists(self):
self.assertTrue(hasattr(Base(), 'loads'))
def test_method_loads_expects_one_arg(self):
self.assertRaises(TypeError, Base().loads)
def test_loads_method_must_be_implemented(self):
self.assertRaises(NotImplementedError, partial(Base().loads, 1))
if "__main__" == __name__:
unittest.main()
Use abstract.Abstract instead of Base alias when testing pygrapes.serializer.abstract.Abstract class#!/usr/bin/env python
# -*- coding: utf-8 -*-
##
# hack for loading modules
#
import _path
_path.fix()
##
# python standard library
#
from functools import partial
import unittest
##
# pygrapes modules
#
from pygrapes.serializer.abstract import Abstract
class AbstractSerializerTestCase(unittest.TestCase):
def test_method_dumps_exists(self):
self.assertTrue(hasattr(Abstract(), 'dumps'))
def test_method_dumps_expects_one_arg(self):
self.assertRaises(TypeError, Abstract().dumps)
def test_dumps_method_must_be_implemented(self):
self.assertRaises(NotImplementedError, partial(Abstract().dumps, 1))
def test_method_loads_exists(self):
self.assertTrue(hasattr(Abstract(), 'loads'))
def test_method_loads_expects_one_arg(self):
self.assertRaises(TypeError, Abstract().loads)
def test_loads_method_must_be_implemented(self):
self.assertRaises(NotImplementedError, partial(Abstract().loads, 1))
if "__main__" == __name__:
unittest.main()
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
##
# hack for loading modules
#
import _path
_path.fix()
##
# python standard library
#
from functools import partial
import unittest
##
# pygrapes modules
#
from pygrapes.serializer import Base
class BaseSerializerTestCase(unittest.TestCase):
def test_method_dumps_exists(self):
self.assertTrue(hasattr(Base(), 'dumps'))
def test_method_dumps_expects_one_arg(self):
self.assertRaises(TypeError, Base().dumps)
def test_dumps_method_must_be_implemented(self):
self.assertRaises(NotImplementedError, partial(Base().dumps, 1))
def test_method_loads_exists(self):
self.assertTrue(hasattr(Base(), 'loads'))
def test_method_loads_expects_one_arg(self):
self.assertRaises(TypeError, Base().loads)
def test_loads_method_must_be_implemented(self):
self.assertRaises(NotImplementedError, partial(Base().loads, 1))
if "__main__" == __name__:
unittest.main()
<commit_msg>Use abstract.Abstract instead of Base alias when testing pygrapes.serializer.abstract.Abstract class<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
##
# hack for loading modules
#
import _path
_path.fix()
##
# python standard library
#
from functools import partial
import unittest
##
# pygrapes modules
#
from pygrapes.serializer.abstract import Abstract
class AbstractSerializerTestCase(unittest.TestCase):
def test_method_dumps_exists(self):
self.assertTrue(hasattr(Abstract(), 'dumps'))
def test_method_dumps_expects_one_arg(self):
self.assertRaises(TypeError, Abstract().dumps)
def test_dumps_method_must_be_implemented(self):
self.assertRaises(NotImplementedError, partial(Abstract().dumps, 1))
def test_method_loads_exists(self):
self.assertTrue(hasattr(Abstract(), 'loads'))
def test_method_loads_expects_one_arg(self):
self.assertRaises(TypeError, Abstract().loads)
def test_loads_method_must_be_implemented(self):
self.assertRaises(NotImplementedError, partial(Abstract().loads, 1))
if "__main__" == __name__:
unittest.main()
|
c174e7bffbfbbb2fe4a1d13d37d9c056d44d95d2
|
project/apps.py
|
project/apps.py
|
#!/usr/bin/env python
"""
Project Core Application Classes
"""
__author__ = "Gavin M. Roy"
__email__ = "gavinmroy@gmail.com"
__date__ = "2009-11-10"
__version__ = 0.1
import project.data
import project.handler
class Home(project.handler.RequestHandler):
def get(self):
foo
self.render('templates/apps/home.html', title='Hello, World!');
|
#!/usr/bin/env python
"""
Project Core Application Classes
"""
__author__ = "Gavin M. Roy"
__email__ = "gavinmroy@gmail.com"
__date__ = "2009-11-10"
__version__ = 0.1
import project.data
import project.handler
class Home(project.handler.RequestHandler):
def get(self):
self.render('templates/apps/home.html', title='Hello, World!');
|
Remove the errror intentionally left for debugging
|
Remove the errror intentionally left for debugging
|
Python
|
bsd-3-clause
|
lucius-feng/tinman,gmr/tinman,lucius-feng/tinman,lucius-feng/tinman,gmr/tinman
|
#!/usr/bin/env python
"""
Project Core Application Classes
"""
__author__ = "Gavin M. Roy"
__email__ = "gavinmroy@gmail.com"
__date__ = "2009-11-10"
__version__ = 0.1
import project.data
import project.handler
class Home(project.handler.RequestHandler):
def get(self):
foo
self.render('templates/apps/home.html', title='Hello, World!');Remove the errror intentionally left for debugging
|
#!/usr/bin/env python
"""
Project Core Application Classes
"""
__author__ = "Gavin M. Roy"
__email__ = "gavinmroy@gmail.com"
__date__ = "2009-11-10"
__version__ = 0.1
import project.data
import project.handler
class Home(project.handler.RequestHandler):
def get(self):
self.render('templates/apps/home.html', title='Hello, World!');
|
<commit_before>#!/usr/bin/env python
"""
Project Core Application Classes
"""
__author__ = "Gavin M. Roy"
__email__ = "gavinmroy@gmail.com"
__date__ = "2009-11-10"
__version__ = 0.1
import project.data
import project.handler
class Home(project.handler.RequestHandler):
def get(self):
foo
self.render('templates/apps/home.html', title='Hello, World!');<commit_msg>Remove the errror intentionally left for debugging<commit_after>
|
#!/usr/bin/env python
"""
Project Core Application Classes
"""
__author__ = "Gavin M. Roy"
__email__ = "gavinmroy@gmail.com"
__date__ = "2009-11-10"
__version__ = 0.1
import project.data
import project.handler
class Home(project.handler.RequestHandler):
def get(self):
self.render('templates/apps/home.html', title='Hello, World!');
|
#!/usr/bin/env python
"""
Project Core Application Classes
"""
__author__ = "Gavin M. Roy"
__email__ = "gavinmroy@gmail.com"
__date__ = "2009-11-10"
__version__ = 0.1
import project.data
import project.handler
class Home(project.handler.RequestHandler):
def get(self):
foo
self.render('templates/apps/home.html', title='Hello, World!');Remove the errror intentionally left for debugging#!/usr/bin/env python
"""
Project Core Application Classes
"""
__author__ = "Gavin M. Roy"
__email__ = "gavinmroy@gmail.com"
__date__ = "2009-11-10"
__version__ = 0.1
import project.data
import project.handler
class Home(project.handler.RequestHandler):
def get(self):
self.render('templates/apps/home.html', title='Hello, World!');
|
<commit_before>#!/usr/bin/env python
"""
Project Core Application Classes
"""
__author__ = "Gavin M. Roy"
__email__ = "gavinmroy@gmail.com"
__date__ = "2009-11-10"
__version__ = 0.1
import project.data
import project.handler
class Home(project.handler.RequestHandler):
def get(self):
foo
self.render('templates/apps/home.html', title='Hello, World!');<commit_msg>Remove the errror intentionally left for debugging<commit_after>#!/usr/bin/env python
"""
Project Core Application Classes
"""
__author__ = "Gavin M. Roy"
__email__ = "gavinmroy@gmail.com"
__date__ = "2009-11-10"
__version__ = 0.1
import project.data
import project.handler
class Home(project.handler.RequestHandler):
def get(self):
self.render('templates/apps/home.html', title='Hello, World!');
|
d6b2c3fcae81ca30d406778f66c6f8b12cfb04d8
|
tests/window/WINDOW_CAPTION.py
|
tests/window/WINDOW_CAPTION.py
|
#!/usr/bin/env python
'''Test that the window caption can be set.
Expected behaviour:
Two windows will be opened, one with the caption "Window caption 1"
counting up every second; the other with a Unicode string including
some non-ASCII characters.
Press escape or close either window to finished the test.
'''
__docformat__ = 'restructuredtext'
__version__ = '$Id: $'
import time
import unittest
from pyglet import window
class WINDOW_CAPTION(unittest.TestCase):
def test_caption(self):
w1 = window.Window(200, 200)
w2 = window.Window(200, 200)
count = 1
w1.set_caption('Window caption %d' % count)
w2.set_caption(u'\u00bfHabla espa\u00f1ol?')
last_time = time.time()
while not (w1.has_exit or w2.has_exit):
if time.time() - last_time > 1:
count += 1
w1.set_caption('Window caption %d' % count)
last_time = time.time()
w1.dispatch_events()
w2.dispatch_events()
w1.close()
w2.close()
if __name__ == '__main__':
unittest.main()
|
#!/usr/bin/env python
'''Test that the window caption can be set.
Expected behaviour:
Two windows will be opened, one with the caption "Window caption 1"
counting up every second; the other with a Unicode string including
some non-ASCII characters.
Press escape or close either window to finished the test.
'''
__docformat__ = 'restructuredtext'
__version__ = '$Id: $'
import time
import unittest
from pyglet import window
class WINDOW_CAPTION(unittest.TestCase):
def test_caption(self):
w1 = window.Window(400, 200, resizable=True)
w2 = window.Window(400, 200, resizable=True)
count = 1
w1.set_caption('Window caption %d' % count)
w2.set_caption(u'\u00bfHabla espa\u00f1ol?')
last_time = time.time()
while not (w1.has_exit or w2.has_exit):
if time.time() - last_time > 1:
count += 1
w1.set_caption('Window caption %d' % count)
last_time = time.time()
w1.dispatch_events()
w2.dispatch_events()
w1.close()
w2.close()
if __name__ == '__main__':
unittest.main()
|
Make windows bigger in this test so the captions can be read.
|
Make windows bigger in this test so the captions can be read.
Index: tests/window/WINDOW_CAPTION.py
===================================================================
--- tests/window/WINDOW_CAPTION.py (revision 777)
+++ tests/window/WINDOW_CAPTION.py (working copy)
@@ -19,8 +19,8 @@
class WINDOW_CAPTION(unittest.TestCase):
def test_caption(self):
- w1 = window.Window(200, 200)
- w2 = window.Window(200, 200)
+ w1 = window.Window(400, 200, resizable=True)
+ w2 = window.Window(400, 200, resizable=True)
count = 1
w1.set_caption('Window caption %d' % count)
w2.set_caption(u'\u00bfHabla espa\u00f1ol?')
|
Python
|
bsd-3-clause
|
adamlwgriffiths/Pyglet,niklaskorz/pyglet,niklaskorz/pyglet,adamlwgriffiths/Pyglet,adamlwgriffiths/Pyglet,adamlwgriffiths/Pyglet,seeminglee/pyglet64,niklaskorz/pyglet,seeminglee/pyglet64,niklaskorz/pyglet,seeminglee/pyglet64
|
#!/usr/bin/env python
'''Test that the window caption can be set.
Expected behaviour:
Two windows will be opened, one with the caption "Window caption 1"
counting up every second; the other with a Unicode string including
some non-ASCII characters.
Press escape or close either window to finished the test.
'''
__docformat__ = 'restructuredtext'
__version__ = '$Id: $'
import time
import unittest
from pyglet import window
class WINDOW_CAPTION(unittest.TestCase):
def test_caption(self):
w1 = window.Window(200, 200)
w2 = window.Window(200, 200)
count = 1
w1.set_caption('Window caption %d' % count)
w2.set_caption(u'\u00bfHabla espa\u00f1ol?')
last_time = time.time()
while not (w1.has_exit or w2.has_exit):
if time.time() - last_time > 1:
count += 1
w1.set_caption('Window caption %d' % count)
last_time = time.time()
w1.dispatch_events()
w2.dispatch_events()
w1.close()
w2.close()
if __name__ == '__main__':
unittest.main()
Make windows bigger in this test so the captions can be read.
Index: tests/window/WINDOW_CAPTION.py
===================================================================
--- tests/window/WINDOW_CAPTION.py (revision 777)
+++ tests/window/WINDOW_CAPTION.py (working copy)
@@ -19,8 +19,8 @@
class WINDOW_CAPTION(unittest.TestCase):
def test_caption(self):
- w1 = window.Window(200, 200)
- w2 = window.Window(200, 200)
+ w1 = window.Window(400, 200, resizable=True)
+ w2 = window.Window(400, 200, resizable=True)
count = 1
w1.set_caption('Window caption %d' % count)
w2.set_caption(u'\u00bfHabla espa\u00f1ol?')
|
#!/usr/bin/env python
'''Test that the window caption can be set.
Expected behaviour:
Two windows will be opened, one with the caption "Window caption 1"
counting up every second; the other with a Unicode string including
some non-ASCII characters.
Press escape or close either window to finished the test.
'''
__docformat__ = 'restructuredtext'
__version__ = '$Id: $'
import time
import unittest
from pyglet import window
class WINDOW_CAPTION(unittest.TestCase):
def test_caption(self):
w1 = window.Window(400, 200, resizable=True)
w2 = window.Window(400, 200, resizable=True)
count = 1
w1.set_caption('Window caption %d' % count)
w2.set_caption(u'\u00bfHabla espa\u00f1ol?')
last_time = time.time()
while not (w1.has_exit or w2.has_exit):
if time.time() - last_time > 1:
count += 1
w1.set_caption('Window caption %d' % count)
last_time = time.time()
w1.dispatch_events()
w2.dispatch_events()
w1.close()
w2.close()
if __name__ == '__main__':
unittest.main()
|
<commit_before>#!/usr/bin/env python
'''Test that the window caption can be set.
Expected behaviour:
Two windows will be opened, one with the caption "Window caption 1"
counting up every second; the other with a Unicode string including
some non-ASCII characters.
Press escape or close either window to finished the test.
'''
__docformat__ = 'restructuredtext'
__version__ = '$Id: $'
import time
import unittest
from pyglet import window
class WINDOW_CAPTION(unittest.TestCase):
def test_caption(self):
w1 = window.Window(200, 200)
w2 = window.Window(200, 200)
count = 1
w1.set_caption('Window caption %d' % count)
w2.set_caption(u'\u00bfHabla espa\u00f1ol?')
last_time = time.time()
while not (w1.has_exit or w2.has_exit):
if time.time() - last_time > 1:
count += 1
w1.set_caption('Window caption %d' % count)
last_time = time.time()
w1.dispatch_events()
w2.dispatch_events()
w1.close()
w2.close()
if __name__ == '__main__':
unittest.main()
<commit_msg>Make windows bigger in this test so the captions can be read.
Index: tests/window/WINDOW_CAPTION.py
===================================================================
--- tests/window/WINDOW_CAPTION.py (revision 777)
+++ tests/window/WINDOW_CAPTION.py (working copy)
@@ -19,8 +19,8 @@
class WINDOW_CAPTION(unittest.TestCase):
def test_caption(self):
- w1 = window.Window(200, 200)
- w2 = window.Window(200, 200)
+ w1 = window.Window(400, 200, resizable=True)
+ w2 = window.Window(400, 200, resizable=True)
count = 1
w1.set_caption('Window caption %d' % count)
w2.set_caption(u'\u00bfHabla espa\u00f1ol?')<commit_after>
|
#!/usr/bin/env python
'''Test that the window caption can be set.
Expected behaviour:
Two windows will be opened, one with the caption "Window caption 1"
counting up every second; the other with a Unicode string including
some non-ASCII characters.
Press escape or close either window to finished the test.
'''
__docformat__ = 'restructuredtext'
__version__ = '$Id: $'
import time
import unittest
from pyglet import window
class WINDOW_CAPTION(unittest.TestCase):
def test_caption(self):
w1 = window.Window(400, 200, resizable=True)
w2 = window.Window(400, 200, resizable=True)
count = 1
w1.set_caption('Window caption %d' % count)
w2.set_caption(u'\u00bfHabla espa\u00f1ol?')
last_time = time.time()
while not (w1.has_exit or w2.has_exit):
if time.time() - last_time > 1:
count += 1
w1.set_caption('Window caption %d' % count)
last_time = time.time()
w1.dispatch_events()
w2.dispatch_events()
w1.close()
w2.close()
if __name__ == '__main__':
unittest.main()
|
#!/usr/bin/env python
'''Test that the window caption can be set.
Expected behaviour:
Two windows will be opened, one with the caption "Window caption 1"
counting up every second; the other with a Unicode string including
some non-ASCII characters.
Press escape or close either window to finished the test.
'''
__docformat__ = 'restructuredtext'
__version__ = '$Id: $'
import time
import unittest
from pyglet import window
class WINDOW_CAPTION(unittest.TestCase):
def test_caption(self):
w1 = window.Window(200, 200)
w2 = window.Window(200, 200)
count = 1
w1.set_caption('Window caption %d' % count)
w2.set_caption(u'\u00bfHabla espa\u00f1ol?')
last_time = time.time()
while not (w1.has_exit or w2.has_exit):
if time.time() - last_time > 1:
count += 1
w1.set_caption('Window caption %d' % count)
last_time = time.time()
w1.dispatch_events()
w2.dispatch_events()
w1.close()
w2.close()
if __name__ == '__main__':
unittest.main()
Make windows bigger in this test so the captions can be read.
Index: tests/window/WINDOW_CAPTION.py
===================================================================
--- tests/window/WINDOW_CAPTION.py (revision 777)
+++ tests/window/WINDOW_CAPTION.py (working copy)
@@ -19,8 +19,8 @@
class WINDOW_CAPTION(unittest.TestCase):
def test_caption(self):
- w1 = window.Window(200, 200)
- w2 = window.Window(200, 200)
+ w1 = window.Window(400, 200, resizable=True)
+ w2 = window.Window(400, 200, resizable=True)
count = 1
w1.set_caption('Window caption %d' % count)
w2.set_caption(u'\u00bfHabla espa\u00f1ol?')#!/usr/bin/env python
'''Test that the window caption can be set.
Expected behaviour:
Two windows will be opened, one with the caption "Window caption 1"
counting up every second; the other with a Unicode string including
some non-ASCII characters.
Press escape or close either window to finished the test.
'''
__docformat__ = 'restructuredtext'
__version__ = '$Id: $'
import time
import unittest
from pyglet import window
class WINDOW_CAPTION(unittest.TestCase):
def test_caption(self):
w1 = window.Window(400, 200, resizable=True)
w2 = window.Window(400, 200, resizable=True)
count = 1
w1.set_caption('Window caption %d' % count)
w2.set_caption(u'\u00bfHabla espa\u00f1ol?')
last_time = time.time()
while not (w1.has_exit or w2.has_exit):
if time.time() - last_time > 1:
count += 1
w1.set_caption('Window caption %d' % count)
last_time = time.time()
w1.dispatch_events()
w2.dispatch_events()
w1.close()
w2.close()
if __name__ == '__main__':
unittest.main()
|
<commit_before>#!/usr/bin/env python
'''Test that the window caption can be set.
Expected behaviour:
Two windows will be opened, one with the caption "Window caption 1"
counting up every second; the other with a Unicode string including
some non-ASCII characters.
Press escape or close either window to finished the test.
'''
__docformat__ = 'restructuredtext'
__version__ = '$Id: $'
import time
import unittest
from pyglet import window
class WINDOW_CAPTION(unittest.TestCase):
def test_caption(self):
w1 = window.Window(200, 200)
w2 = window.Window(200, 200)
count = 1
w1.set_caption('Window caption %d' % count)
w2.set_caption(u'\u00bfHabla espa\u00f1ol?')
last_time = time.time()
while not (w1.has_exit or w2.has_exit):
if time.time() - last_time > 1:
count += 1
w1.set_caption('Window caption %d' % count)
last_time = time.time()
w1.dispatch_events()
w2.dispatch_events()
w1.close()
w2.close()
if __name__ == '__main__':
unittest.main()
<commit_msg>Make windows bigger in this test so the captions can be read.
Index: tests/window/WINDOW_CAPTION.py
===================================================================
--- tests/window/WINDOW_CAPTION.py (revision 777)
+++ tests/window/WINDOW_CAPTION.py (working copy)
@@ -19,8 +19,8 @@
class WINDOW_CAPTION(unittest.TestCase):
def test_caption(self):
- w1 = window.Window(200, 200)
- w2 = window.Window(200, 200)
+ w1 = window.Window(400, 200, resizable=True)
+ w2 = window.Window(400, 200, resizable=True)
count = 1
w1.set_caption('Window caption %d' % count)
w2.set_caption(u'\u00bfHabla espa\u00f1ol?')<commit_after>#!/usr/bin/env python
'''Test that the window caption can be set.
Expected behaviour:
Two windows will be opened, one with the caption "Window caption 1"
counting up every second; the other with a Unicode string including
some non-ASCII characters.
Press escape or close either window to finished the test.
'''
__docformat__ = 'restructuredtext'
__version__ = '$Id: $'
import time
import unittest
from pyglet import window
class WINDOW_CAPTION(unittest.TestCase):
def test_caption(self):
w1 = window.Window(400, 200, resizable=True)
w2 = window.Window(400, 200, resizable=True)
count = 1
w1.set_caption('Window caption %d' % count)
w2.set_caption(u'\u00bfHabla espa\u00f1ol?')
last_time = time.time()
while not (w1.has_exit or w2.has_exit):
if time.time() - last_time > 1:
count += 1
w1.set_caption('Window caption %d' % count)
last_time = time.time()
w1.dispatch_events()
w2.dispatch_events()
w1.close()
w2.close()
if __name__ == '__main__':
unittest.main()
|
4070507e3357d36f2412cc5c68a63780ae1b814d
|
glance_api_local_check.py
|
glance_api_local_check.py
|
#!/usr/bin/env python
from maas_common import (get_auth_ref, get_glance_client, status_err,
status_ok, metric)
import sys
IMAGE_ENDPOINT = 'http://127.0.0.1:9292'
def check(token):
glance = get_glance_client(token, IMAGE_ENDPOINT)
if glance is None:
status_err('Unable to obtain valid glance client, cannot proceed')
status_ok()
metric('glance_api_local_status', 'uint32', 1)
def main():
auth_ref = get_auth_ref()
token = auth_ref['token']['id']
check(token)
if __name__ == "__main__":
main()
|
#!/usr/bin/env python
from maas_common import (status_ok, status_err, metric, get_keystone_client,
get_auth_ref)
from requests import Session
from requests import exceptions as exc
def check(auth_ref):
keystone = get_keystone_client(auth_ref)
tenant_id = keystone.tenant_id
auth_token = keystone.auth_token
registry_endpoint = 'http://127.0.0.1:9292/v2'
api-status = 1
milliseconds = 0
s = Session()
s.headers.update(
{'Content-type': 'application/json',
'x-auth-token': auth_token})
try:
# Hit something that isn't querying the glance-registry, since we
# query glance-registry in separate checks
r = s.get('%s/schemas/image' % registry_endpoint, verify=False,
timeout=10)
except (exc.ConnectionError, exc.HTTPError, exc.Timeout):
api_status = 0
milliseconds = -1
except Exception as e:
status_err(str(e))
else:
milliseconds = r.elapsed.total_seconds() * 1000
if not r.ok:
api_status = 0
status_ok()
metric('glance_registry_local_status', 'uint32', api_status)
metric('glance_registry_local_response_time', 'int32', milliseconds)
def main():
auth_ref = get_auth_ref()
check(auth_ref)
if __name__ == "__main__":
main()
|
Make a direct call to glance-api using requests
|
Make a direct call to glance-api using requests
This change makes this check no longer use the glanceclient tool so
we can craft a request that doesn't hit the glance-registry. The
reason for this is that the glance-registry itself is tested in a
different check and therefore we just need to ensure the glance-api
itself is responding.
|
Python
|
apache-2.0
|
cfarquhar/rpc-openstack,stevelle/rpc-openstack,robb-romans/rpc-openstack,byronmccollum/rpc-openstack,cloudnull/rpc-maas,byronmccollum/rpc-openstack,mattt416/rpc-openstack,nrb/rpc-openstack,hughsaunders/rpc-openstack,darrenchan/rpc-openstack,byronmccollum/rpc-openstack,jacobwagner/rpc-openstack,jpmontez/rpc-openstack,briancurtin/rpc-maas,sigmavirus24/rpc-openstack,darrenchan/rpc-openstack,rcbops/rpc-openstack,BjoernT/rpc-openstack,prometheanfire/rpc-openstack,nrb/rpc-openstack,briancurtin/rpc-maas,cloudnull/rpc-maas,xeregin/rpc-openstack,busterswt/rpc-openstack,git-harry/rpc-openstack,major/rpc-openstack,stevelle/rpc-openstack,miguelgrinberg/rpc-openstack,darrenchan/rpc-openstack,mattt416/rpc-openstack,cloudnull/rpc-openstack,robb-romans/rpc-openstack,cfarquhar/rpc-openstack,andymcc/rpc-openstack,jpmontez/rpc-openstack,darrenchan/rpc-openstack,xeregin/rpc-openstack,sigmavirus24/rpc-openstack,claco/rpc-openstack,galstrom21/rpc-openstack,cfarquhar/rpc-maas,mancdaz/rpc-openstack,jpmontez/rpc-openstack,git-harry/rpc-openstack,npawelek/rpc-maas,shannonmitchell/rpc-openstack,briancurtin/rpc-maas,claco/rpc-openstack,prometheanfire/rpc-openstack,BjoernT/rpc-openstack,busterswt/rpc-openstack,cloudnull/rpc-maas,jacobwagner/rpc-openstack,stevelle/rpc-openstack,sigmavirus24/rpc-openstack,mattt416/rpc-openstack,claco/rpc-openstack,cfarquhar/rpc-maas,npawelek/rpc-maas,andymcc/rpc-openstack,nrb/rpc-openstack,rcbops/rpc-openstack,miguelgrinberg/rpc-openstack,xeregin/rpc-openstack,galstrom21/rpc-openstack,cloudnull/rpc-openstack,miguelgrinberg/rpc-openstack,sigmavirus24/rpc-openstack,xeregin/rpc-openstack,shannonmitchell/rpc-openstack,major/rpc-openstack,npawelek/rpc-maas,busterswt/rpc-openstack,andymcc/rpc-openstack,mancdaz/rpc-openstack,hughsaunders/rpc-openstack,cfarquhar/rpc-maas
|
#!/usr/bin/env python
from maas_common import (get_auth_ref, get_glance_client, status_err,
status_ok, metric)
import sys
IMAGE_ENDPOINT = 'http://127.0.0.1:9292'
def check(token):
glance = get_glance_client(token, IMAGE_ENDPOINT)
if glance is None:
status_err('Unable to obtain valid glance client, cannot proceed')
status_ok()
metric('glance_api_local_status', 'uint32', 1)
def main():
auth_ref = get_auth_ref()
token = auth_ref['token']['id']
check(token)
if __name__ == "__main__":
main()
Make a direct call to glance-api using requests
This change makes this check no longer use the glanceclient tool so
we can craft a request that doesn't hit the glance-registry. The
reason for this is that the glance-registry itself is tested in a
different check and therefore we just need to ensure the glance-api
itself is responding.
|
#!/usr/bin/env python
from maas_common import (status_ok, status_err, metric, get_keystone_client,
get_auth_ref)
from requests import Session
from requests import exceptions as exc
def check(auth_ref):
keystone = get_keystone_client(auth_ref)
tenant_id = keystone.tenant_id
auth_token = keystone.auth_token
registry_endpoint = 'http://127.0.0.1:9292/v2'
api-status = 1
milliseconds = 0
s = Session()
s.headers.update(
{'Content-type': 'application/json',
'x-auth-token': auth_token})
try:
# Hit something that isn't querying the glance-registry, since we
# query glance-registry in separate checks
r = s.get('%s/schemas/image' % registry_endpoint, verify=False,
timeout=10)
except (exc.ConnectionError, exc.HTTPError, exc.Timeout):
api_status = 0
milliseconds = -1
except Exception as e:
status_err(str(e))
else:
milliseconds = r.elapsed.total_seconds() * 1000
if not r.ok:
api_status = 0
status_ok()
metric('glance_registry_local_status', 'uint32', api_status)
metric('glance_registry_local_response_time', 'int32', milliseconds)
def main():
auth_ref = get_auth_ref()
check(auth_ref)
if __name__ == "__main__":
main()
|
<commit_before>#!/usr/bin/env python
from maas_common import (get_auth_ref, get_glance_client, status_err,
status_ok, metric)
import sys
IMAGE_ENDPOINT = 'http://127.0.0.1:9292'
def check(token):
glance = get_glance_client(token, IMAGE_ENDPOINT)
if glance is None:
status_err('Unable to obtain valid glance client, cannot proceed')
status_ok()
metric('glance_api_local_status', 'uint32', 1)
def main():
auth_ref = get_auth_ref()
token = auth_ref['token']['id']
check(token)
if __name__ == "__main__":
main()
<commit_msg>Make a direct call to glance-api using requests
This change makes this check no longer use the glanceclient tool so
we can craft a request that doesn't hit the glance-registry. The
reason for this is that the glance-registry itself is tested in a
different check and therefore we just need to ensure the glance-api
itself is responding.<commit_after>
|
#!/usr/bin/env python
from maas_common import (status_ok, status_err, metric, get_keystone_client,
get_auth_ref)
from requests import Session
from requests import exceptions as exc
def check(auth_ref):
keystone = get_keystone_client(auth_ref)
tenant_id = keystone.tenant_id
auth_token = keystone.auth_token
registry_endpoint = 'http://127.0.0.1:9292/v2'
api-status = 1
milliseconds = 0
s = Session()
s.headers.update(
{'Content-type': 'application/json',
'x-auth-token': auth_token})
try:
# Hit something that isn't querying the glance-registry, since we
# query glance-registry in separate checks
r = s.get('%s/schemas/image' % registry_endpoint, verify=False,
timeout=10)
except (exc.ConnectionError, exc.HTTPError, exc.Timeout):
api_status = 0
milliseconds = -1
except Exception as e:
status_err(str(e))
else:
milliseconds = r.elapsed.total_seconds() * 1000
if not r.ok:
api_status = 0
status_ok()
metric('glance_registry_local_status', 'uint32', api_status)
metric('glance_registry_local_response_time', 'int32', milliseconds)
def main():
auth_ref = get_auth_ref()
check(auth_ref)
if __name__ == "__main__":
main()
|
#!/usr/bin/env python
from maas_common import (get_auth_ref, get_glance_client, status_err,
status_ok, metric)
import sys
IMAGE_ENDPOINT = 'http://127.0.0.1:9292'
def check(token):
glance = get_glance_client(token, IMAGE_ENDPOINT)
if glance is None:
status_err('Unable to obtain valid glance client, cannot proceed')
status_ok()
metric('glance_api_local_status', 'uint32', 1)
def main():
auth_ref = get_auth_ref()
token = auth_ref['token']['id']
check(token)
if __name__ == "__main__":
main()
Make a direct call to glance-api using requests
This change makes this check no longer use the glanceclient tool so
we can craft a request that doesn't hit the glance-registry. The
reason for this is that the glance-registry itself is tested in a
different check and therefore we just need to ensure the glance-api
itself is responding.#!/usr/bin/env python
from maas_common import (status_ok, status_err, metric, get_keystone_client,
get_auth_ref)
from requests import Session
from requests import exceptions as exc
def check(auth_ref):
keystone = get_keystone_client(auth_ref)
tenant_id = keystone.tenant_id
auth_token = keystone.auth_token
registry_endpoint = 'http://127.0.0.1:9292/v2'
api-status = 1
milliseconds = 0
s = Session()
s.headers.update(
{'Content-type': 'application/json',
'x-auth-token': auth_token})
try:
# Hit something that isn't querying the glance-registry, since we
# query glance-registry in separate checks
r = s.get('%s/schemas/image' % registry_endpoint, verify=False,
timeout=10)
except (exc.ConnectionError, exc.HTTPError, exc.Timeout):
api_status = 0
milliseconds = -1
except Exception as e:
status_err(str(e))
else:
milliseconds = r.elapsed.total_seconds() * 1000
if not r.ok:
api_status = 0
status_ok()
metric('glance_registry_local_status', 'uint32', api_status)
metric('glance_registry_local_response_time', 'int32', milliseconds)
def main():
auth_ref = get_auth_ref()
check(auth_ref)
if __name__ == "__main__":
main()
|
<commit_before>#!/usr/bin/env python
from maas_common import (get_auth_ref, get_glance_client, status_err,
status_ok, metric)
import sys
IMAGE_ENDPOINT = 'http://127.0.0.1:9292'
def check(token):
glance = get_glance_client(token, IMAGE_ENDPOINT)
if glance is None:
status_err('Unable to obtain valid glance client, cannot proceed')
status_ok()
metric('glance_api_local_status', 'uint32', 1)
def main():
auth_ref = get_auth_ref()
token = auth_ref['token']['id']
check(token)
if __name__ == "__main__":
main()
<commit_msg>Make a direct call to glance-api using requests
This change makes this check no longer use the glanceclient tool so
we can craft a request that doesn't hit the glance-registry. The
reason for this is that the glance-registry itself is tested in a
different check and therefore we just need to ensure the glance-api
itself is responding.<commit_after>#!/usr/bin/env python
from maas_common import (status_ok, status_err, metric, get_keystone_client,
get_auth_ref)
from requests import Session
from requests import exceptions as exc
def check(auth_ref):
keystone = get_keystone_client(auth_ref)
tenant_id = keystone.tenant_id
auth_token = keystone.auth_token
registry_endpoint = 'http://127.0.0.1:9292/v2'
api-status = 1
milliseconds = 0
s = Session()
s.headers.update(
{'Content-type': 'application/json',
'x-auth-token': auth_token})
try:
# Hit something that isn't querying the glance-registry, since we
# query glance-registry in separate checks
r = s.get('%s/schemas/image' % registry_endpoint, verify=False,
timeout=10)
except (exc.ConnectionError, exc.HTTPError, exc.Timeout):
api_status = 0
milliseconds = -1
except Exception as e:
status_err(str(e))
else:
milliseconds = r.elapsed.total_seconds() * 1000
if not r.ok:
api_status = 0
status_ok()
metric('glance_registry_local_status', 'uint32', api_status)
metric('glance_registry_local_response_time', 'int32', milliseconds)
def main():
auth_ref = get_auth_ref()
check(auth_ref)
if __name__ == "__main__":
main()
|
f6491445d8811f0fcb5bf8937056a4e15ed985b4
|
tests/acceptance/test_ensure_index.py
|
tests/acceptance/test_ensure_index.py
|
from tests.acceptance.base_acceptance_test import BaseAcceptanceTest
from scalymongo import Document
class IndexTestDocument(Document):
structure = {
'number': int,
'name': unicode,
'descending': int,
'ascending': int,
}
indexes = [
{'fields': 'number'},
{'fields': 'name', 'unique': True},
{'fields': [('descending', -1), ('ascending', 1)]}
]
__database__ = 'test'
__collection__ = 'IndexTestDocument'
class TestEnsureIndex(BaseAcceptanceTest):
@classmethod
def setup_class(cls):
BaseAcceptanceTest.setup_class()
cls.connected_document = cls.connection.models.IndexTestDocument
cls.connected_document.collection.drop()
cls.connected_document.ensure_indexes()
cls.indexes = cls.connected_document.collection.index_information()
def should_create_index_on_number(self):
assert self.indexes['number_1'] == {
'key': [('number', 1)],
'unique': False,
'v': 0,
}
def should_create_unique_index_on_name(self):
assert self.indexes['name_1'] == {
'key': [('name', 1)],
'unique': True,
'v': 0,
}
def should_create_descending_ascending_index(self):
assert self.indexes['descending_-1_ascending_1'] == {
'key': [('descending', -1), ('ascending', 1)],
'unique': False,
'v': 0,
}
|
from tests.acceptance.base_acceptance_test import BaseAcceptanceTest
from scalymongo import Document
class IndexTestDocument(Document):
structure = {
'number': int,
'name': unicode,
'descending': int,
'ascending': int,
}
indexes = [
{'fields': 'number'},
{'fields': 'name', 'unique': True},
{'fields': [('descending', -1), ('ascending', 1)]}
]
__database__ = 'test'
__collection__ = 'IndexTestDocument'
class TestEnsureIndex(BaseAcceptanceTest):
@classmethod
def setup_class(cls):
BaseAcceptanceTest.setup_class()
cls.connected_document = cls.connection.models.IndexTestDocument
cls.connected_document.collection.drop()
cls.connected_document.ensure_indexes()
cls.indexes = cls.connected_document.collection.index_information()
def should_create_index_on_number(self):
index = self.indexes['number_1']
assert index['key'] == [('number', 1)]
assert index.get('unique', False) is False
def should_create_unique_index_on_name(self):
index = self.indexes['name_1']
assert index['key'] == [('name', 1)]
assert index['unique'] is True
def should_create_descending_ascending_index(self):
index = self.indexes['descending_-1_ascending_1']
assert index['key'] == [('descending', -1), ('ascending', 1)]
assert index.get('unique', False) is False
|
Fix acceptance test for MongoDB 2.0
|
indexes: Fix acceptance test for MongoDB 2.0
There were some minor changes for the test to work properly
with 2.0:
- The index versions are now 1 instead of 0
Don't bother checking the version of index since scalymongo has no
control over it.
- The `unique` key is now only provided when ``True``
|
Python
|
bsd-3-clause
|
allancaffee/scaly-mongo
|
from tests.acceptance.base_acceptance_test import BaseAcceptanceTest
from scalymongo import Document
class IndexTestDocument(Document):
structure = {
'number': int,
'name': unicode,
'descending': int,
'ascending': int,
}
indexes = [
{'fields': 'number'},
{'fields': 'name', 'unique': True},
{'fields': [('descending', -1), ('ascending', 1)]}
]
__database__ = 'test'
__collection__ = 'IndexTestDocument'
class TestEnsureIndex(BaseAcceptanceTest):
@classmethod
def setup_class(cls):
BaseAcceptanceTest.setup_class()
cls.connected_document = cls.connection.models.IndexTestDocument
cls.connected_document.collection.drop()
cls.connected_document.ensure_indexes()
cls.indexes = cls.connected_document.collection.index_information()
def should_create_index_on_number(self):
assert self.indexes['number_1'] == {
'key': [('number', 1)],
'unique': False,
'v': 0,
}
def should_create_unique_index_on_name(self):
assert self.indexes['name_1'] == {
'key': [('name', 1)],
'unique': True,
'v': 0,
}
def should_create_descending_ascending_index(self):
assert self.indexes['descending_-1_ascending_1'] == {
'key': [('descending', -1), ('ascending', 1)],
'unique': False,
'v': 0,
}
indexes: Fix acceptance test for MongoDB 2.0
There were some minor changes for the test to work properly
with 2.0:
- The index versions are now 1 instead of 0
Don't bother checking the version of index since scalymongo has no
control over it.
- The `unique` key is now only provided when ``True``
|
from tests.acceptance.base_acceptance_test import BaseAcceptanceTest
from scalymongo import Document
class IndexTestDocument(Document):
structure = {
'number': int,
'name': unicode,
'descending': int,
'ascending': int,
}
indexes = [
{'fields': 'number'},
{'fields': 'name', 'unique': True},
{'fields': [('descending', -1), ('ascending', 1)]}
]
__database__ = 'test'
__collection__ = 'IndexTestDocument'
class TestEnsureIndex(BaseAcceptanceTest):
@classmethod
def setup_class(cls):
BaseAcceptanceTest.setup_class()
cls.connected_document = cls.connection.models.IndexTestDocument
cls.connected_document.collection.drop()
cls.connected_document.ensure_indexes()
cls.indexes = cls.connected_document.collection.index_information()
def should_create_index_on_number(self):
index = self.indexes['number_1']
assert index['key'] == [('number', 1)]
assert index.get('unique', False) is False
def should_create_unique_index_on_name(self):
index = self.indexes['name_1']
assert index['key'] == [('name', 1)]
assert index['unique'] is True
def should_create_descending_ascending_index(self):
index = self.indexes['descending_-1_ascending_1']
assert index['key'] == [('descending', -1), ('ascending', 1)]
assert index.get('unique', False) is False
|
<commit_before>from tests.acceptance.base_acceptance_test import BaseAcceptanceTest
from scalymongo import Document
class IndexTestDocument(Document):
structure = {
'number': int,
'name': unicode,
'descending': int,
'ascending': int,
}
indexes = [
{'fields': 'number'},
{'fields': 'name', 'unique': True},
{'fields': [('descending', -1), ('ascending', 1)]}
]
__database__ = 'test'
__collection__ = 'IndexTestDocument'
class TestEnsureIndex(BaseAcceptanceTest):
@classmethod
def setup_class(cls):
BaseAcceptanceTest.setup_class()
cls.connected_document = cls.connection.models.IndexTestDocument
cls.connected_document.collection.drop()
cls.connected_document.ensure_indexes()
cls.indexes = cls.connected_document.collection.index_information()
def should_create_index_on_number(self):
assert self.indexes['number_1'] == {
'key': [('number', 1)],
'unique': False,
'v': 0,
}
def should_create_unique_index_on_name(self):
assert self.indexes['name_1'] == {
'key': [('name', 1)],
'unique': True,
'v': 0,
}
def should_create_descending_ascending_index(self):
assert self.indexes['descending_-1_ascending_1'] == {
'key': [('descending', -1), ('ascending', 1)],
'unique': False,
'v': 0,
}
<commit_msg>indexes: Fix acceptance test for MongoDB 2.0
There were some minor changes for the test to work properly
with 2.0:
- The index versions are now 1 instead of 0
Don't bother checking the version of index since scalymongo has no
control over it.
- The `unique` key is now only provided when ``True``<commit_after>
|
from tests.acceptance.base_acceptance_test import BaseAcceptanceTest
from scalymongo import Document
class IndexTestDocument(Document):
structure = {
'number': int,
'name': unicode,
'descending': int,
'ascending': int,
}
indexes = [
{'fields': 'number'},
{'fields': 'name', 'unique': True},
{'fields': [('descending', -1), ('ascending', 1)]}
]
__database__ = 'test'
__collection__ = 'IndexTestDocument'
class TestEnsureIndex(BaseAcceptanceTest):
@classmethod
def setup_class(cls):
BaseAcceptanceTest.setup_class()
cls.connected_document = cls.connection.models.IndexTestDocument
cls.connected_document.collection.drop()
cls.connected_document.ensure_indexes()
cls.indexes = cls.connected_document.collection.index_information()
def should_create_index_on_number(self):
index = self.indexes['number_1']
assert index['key'] == [('number', 1)]
assert index.get('unique', False) is False
def should_create_unique_index_on_name(self):
index = self.indexes['name_1']
assert index['key'] == [('name', 1)]
assert index['unique'] is True
def should_create_descending_ascending_index(self):
index = self.indexes['descending_-1_ascending_1']
assert index['key'] == [('descending', -1), ('ascending', 1)]
assert index.get('unique', False) is False
|
from tests.acceptance.base_acceptance_test import BaseAcceptanceTest
from scalymongo import Document
class IndexTestDocument(Document):
structure = {
'number': int,
'name': unicode,
'descending': int,
'ascending': int,
}
indexes = [
{'fields': 'number'},
{'fields': 'name', 'unique': True},
{'fields': [('descending', -1), ('ascending', 1)]}
]
__database__ = 'test'
__collection__ = 'IndexTestDocument'
class TestEnsureIndex(BaseAcceptanceTest):
@classmethod
def setup_class(cls):
BaseAcceptanceTest.setup_class()
cls.connected_document = cls.connection.models.IndexTestDocument
cls.connected_document.collection.drop()
cls.connected_document.ensure_indexes()
cls.indexes = cls.connected_document.collection.index_information()
def should_create_index_on_number(self):
assert self.indexes['number_1'] == {
'key': [('number', 1)],
'unique': False,
'v': 0,
}
def should_create_unique_index_on_name(self):
assert self.indexes['name_1'] == {
'key': [('name', 1)],
'unique': True,
'v': 0,
}
def should_create_descending_ascending_index(self):
assert self.indexes['descending_-1_ascending_1'] == {
'key': [('descending', -1), ('ascending', 1)],
'unique': False,
'v': 0,
}
indexes: Fix acceptance test for MongoDB 2.0
There were some minor changes for the test to work properly
with 2.0:
- The index versions are now 1 instead of 0
Don't bother checking the version of index since scalymongo has no
control over it.
- The `unique` key is now only provided when ``True``from tests.acceptance.base_acceptance_test import BaseAcceptanceTest
from scalymongo import Document
class IndexTestDocument(Document):
structure = {
'number': int,
'name': unicode,
'descending': int,
'ascending': int,
}
indexes = [
{'fields': 'number'},
{'fields': 'name', 'unique': True},
{'fields': [('descending', -1), ('ascending', 1)]}
]
__database__ = 'test'
__collection__ = 'IndexTestDocument'
class TestEnsureIndex(BaseAcceptanceTest):
@classmethod
def setup_class(cls):
BaseAcceptanceTest.setup_class()
cls.connected_document = cls.connection.models.IndexTestDocument
cls.connected_document.collection.drop()
cls.connected_document.ensure_indexes()
cls.indexes = cls.connected_document.collection.index_information()
def should_create_index_on_number(self):
index = self.indexes['number_1']
assert index['key'] == [('number', 1)]
assert index.get('unique', False) is False
def should_create_unique_index_on_name(self):
index = self.indexes['name_1']
assert index['key'] == [('name', 1)]
assert index['unique'] is True
def should_create_descending_ascending_index(self):
index = self.indexes['descending_-1_ascending_1']
assert index['key'] == [('descending', -1), ('ascending', 1)]
assert index.get('unique', False) is False
|
<commit_before>from tests.acceptance.base_acceptance_test import BaseAcceptanceTest
from scalymongo import Document
class IndexTestDocument(Document):
structure = {
'number': int,
'name': unicode,
'descending': int,
'ascending': int,
}
indexes = [
{'fields': 'number'},
{'fields': 'name', 'unique': True},
{'fields': [('descending', -1), ('ascending', 1)]}
]
__database__ = 'test'
__collection__ = 'IndexTestDocument'
class TestEnsureIndex(BaseAcceptanceTest):
@classmethod
def setup_class(cls):
BaseAcceptanceTest.setup_class()
cls.connected_document = cls.connection.models.IndexTestDocument
cls.connected_document.collection.drop()
cls.connected_document.ensure_indexes()
cls.indexes = cls.connected_document.collection.index_information()
def should_create_index_on_number(self):
assert self.indexes['number_1'] == {
'key': [('number', 1)],
'unique': False,
'v': 0,
}
def should_create_unique_index_on_name(self):
assert self.indexes['name_1'] == {
'key': [('name', 1)],
'unique': True,
'v': 0,
}
def should_create_descending_ascending_index(self):
assert self.indexes['descending_-1_ascending_1'] == {
'key': [('descending', -1), ('ascending', 1)],
'unique': False,
'v': 0,
}
<commit_msg>indexes: Fix acceptance test for MongoDB 2.0
There were some minor changes for the test to work properly
with 2.0:
- The index versions are now 1 instead of 0
Don't bother checking the version of index since scalymongo has no
control over it.
- The `unique` key is now only provided when ``True``<commit_after>from tests.acceptance.base_acceptance_test import BaseAcceptanceTest
from scalymongo import Document
class IndexTestDocument(Document):
structure = {
'number': int,
'name': unicode,
'descending': int,
'ascending': int,
}
indexes = [
{'fields': 'number'},
{'fields': 'name', 'unique': True},
{'fields': [('descending', -1), ('ascending', 1)]}
]
__database__ = 'test'
__collection__ = 'IndexTestDocument'
class TestEnsureIndex(BaseAcceptanceTest):
@classmethod
def setup_class(cls):
BaseAcceptanceTest.setup_class()
cls.connected_document = cls.connection.models.IndexTestDocument
cls.connected_document.collection.drop()
cls.connected_document.ensure_indexes()
cls.indexes = cls.connected_document.collection.index_information()
def should_create_index_on_number(self):
index = self.indexes['number_1']
assert index['key'] == [('number', 1)]
assert index.get('unique', False) is False
def should_create_unique_index_on_name(self):
index = self.indexes['name_1']
assert index['key'] == [('name', 1)]
assert index['unique'] is True
def should_create_descending_ascending_index(self):
index = self.indexes['descending_-1_ascending_1']
assert index['key'] == [('descending', -1), ('ascending', 1)]
assert index.get('unique', False) is False
|
05b8ae37fccb152fcdd618b09984f3d1d8beae45
|
fabfile.py
|
fabfile.py
|
import os
from fabric.api import *
base_path = os.path.dirname(__file__)
project_root = "~/Gather"
pip_path = os.path.join(project_root, "bin/pip")
python_path = os.path.join(project_root, "bin/python")
env.user = "gather"
env.hosts = ["gather.whouz.com"]
def update_from_github():
with cd(project_root):
run("git pull")
def update_pip_requirements():
with cd(project_root):
run("%s install -r requirements.txt" % pip_path)
def migrate_databases():
with cd(project_root):
run("%s manage.py db upgrade" % python_path)
def reload_nginx():
_current_user = env.user
env.user = 'root'
run("/etc/init.d/nginx reload")
env.user = _current_user
def restart_gunicorn():
_current_user = env.user
env.user = 'root'
run("supervisorctl reload")
env.user = _current_user
def reload_gunicorn():
run("kill -HUP `cat /tmp/gather.pid`")
def update():
update_from_github()
reload_gunicorn()
def fullyupdate():
update_from_github()
update_pip_requirements()
migrate_databases()
reload_nginx()
reload_gunicorn()
|
import os
from fabric.api import *
base_path = os.path.dirname(__file__)
project_root = "~/Gather"
pip_path = os.path.join(project_root, "bin/pip")
python_path = os.path.join(project_root, "bin/python")
env.user = "gather"
env.hosts = ["gather.whouz.com"]
def update_from_github():
with cd(project_root):
run("git pull")
def update_pip_requirements():
with cd(project_root):
run("%s install -r requirements.txt" % pip_path)
def migrate_databases():
with cd(project_root):
run("%s manage.py db upgrade" % python_path)
def reload_nginx():
_current_user = env.user
env.user = 'root'
run("/etc/init.d/nginx reload")
env.user = _current_user
def restart_gunicorn():
_current_user = env.user
env.user = 'root'
run("supervisorctl reload")
env.user = _current_user
def reload_gunicorn():
run("kill -HUP `cat /tmp/gather.pid`")
def update():
update_from_github()
migrate_databases()
reload_gunicorn()
def fullyupdate():
update_from_github()
update_pip_requirements()
migrate_databases()
reload_nginx()
reload_gunicorn()
|
Migrate database for small update
|
Migrate database for small update
|
Python
|
mit
|
whtsky/Gather,whtsky/Gather
|
import os
from fabric.api import *
base_path = os.path.dirname(__file__)
project_root = "~/Gather"
pip_path = os.path.join(project_root, "bin/pip")
python_path = os.path.join(project_root, "bin/python")
env.user = "gather"
env.hosts = ["gather.whouz.com"]
def update_from_github():
with cd(project_root):
run("git pull")
def update_pip_requirements():
with cd(project_root):
run("%s install -r requirements.txt" % pip_path)
def migrate_databases():
with cd(project_root):
run("%s manage.py db upgrade" % python_path)
def reload_nginx():
_current_user = env.user
env.user = 'root'
run("/etc/init.d/nginx reload")
env.user = _current_user
def restart_gunicorn():
_current_user = env.user
env.user = 'root'
run("supervisorctl reload")
env.user = _current_user
def reload_gunicorn():
run("kill -HUP `cat /tmp/gather.pid`")
def update():
update_from_github()
reload_gunicorn()
def fullyupdate():
update_from_github()
update_pip_requirements()
migrate_databases()
reload_nginx()
reload_gunicorn()
Migrate database for small update
|
import os
from fabric.api import *
base_path = os.path.dirname(__file__)
project_root = "~/Gather"
pip_path = os.path.join(project_root, "bin/pip")
python_path = os.path.join(project_root, "bin/python")
env.user = "gather"
env.hosts = ["gather.whouz.com"]
def update_from_github():
with cd(project_root):
run("git pull")
def update_pip_requirements():
with cd(project_root):
run("%s install -r requirements.txt" % pip_path)
def migrate_databases():
with cd(project_root):
run("%s manage.py db upgrade" % python_path)
def reload_nginx():
_current_user = env.user
env.user = 'root'
run("/etc/init.d/nginx reload")
env.user = _current_user
def restart_gunicorn():
_current_user = env.user
env.user = 'root'
run("supervisorctl reload")
env.user = _current_user
def reload_gunicorn():
run("kill -HUP `cat /tmp/gather.pid`")
def update():
update_from_github()
migrate_databases()
reload_gunicorn()
def fullyupdate():
update_from_github()
update_pip_requirements()
migrate_databases()
reload_nginx()
reload_gunicorn()
|
<commit_before>import os
from fabric.api import *
base_path = os.path.dirname(__file__)
project_root = "~/Gather"
pip_path = os.path.join(project_root, "bin/pip")
python_path = os.path.join(project_root, "bin/python")
env.user = "gather"
env.hosts = ["gather.whouz.com"]
def update_from_github():
with cd(project_root):
run("git pull")
def update_pip_requirements():
with cd(project_root):
run("%s install -r requirements.txt" % pip_path)
def migrate_databases():
with cd(project_root):
run("%s manage.py db upgrade" % python_path)
def reload_nginx():
_current_user = env.user
env.user = 'root'
run("/etc/init.d/nginx reload")
env.user = _current_user
def restart_gunicorn():
_current_user = env.user
env.user = 'root'
run("supervisorctl reload")
env.user = _current_user
def reload_gunicorn():
run("kill -HUP `cat /tmp/gather.pid`")
def update():
update_from_github()
reload_gunicorn()
def fullyupdate():
update_from_github()
update_pip_requirements()
migrate_databases()
reload_nginx()
reload_gunicorn()
<commit_msg>Migrate database for small update<commit_after>
|
import os
from fabric.api import *
base_path = os.path.dirname(__file__)
project_root = "~/Gather"
pip_path = os.path.join(project_root, "bin/pip")
python_path = os.path.join(project_root, "bin/python")
env.user = "gather"
env.hosts = ["gather.whouz.com"]
def update_from_github():
with cd(project_root):
run("git pull")
def update_pip_requirements():
with cd(project_root):
run("%s install -r requirements.txt" % pip_path)
def migrate_databases():
with cd(project_root):
run("%s manage.py db upgrade" % python_path)
def reload_nginx():
_current_user = env.user
env.user = 'root'
run("/etc/init.d/nginx reload")
env.user = _current_user
def restart_gunicorn():
_current_user = env.user
env.user = 'root'
run("supervisorctl reload")
env.user = _current_user
def reload_gunicorn():
run("kill -HUP `cat /tmp/gather.pid`")
def update():
update_from_github()
migrate_databases()
reload_gunicorn()
def fullyupdate():
update_from_github()
update_pip_requirements()
migrate_databases()
reload_nginx()
reload_gunicorn()
|
import os
from fabric.api import *
base_path = os.path.dirname(__file__)
project_root = "~/Gather"
pip_path = os.path.join(project_root, "bin/pip")
python_path = os.path.join(project_root, "bin/python")
env.user = "gather"
env.hosts = ["gather.whouz.com"]
def update_from_github():
with cd(project_root):
run("git pull")
def update_pip_requirements():
with cd(project_root):
run("%s install -r requirements.txt" % pip_path)
def migrate_databases():
with cd(project_root):
run("%s manage.py db upgrade" % python_path)
def reload_nginx():
_current_user = env.user
env.user = 'root'
run("/etc/init.d/nginx reload")
env.user = _current_user
def restart_gunicorn():
_current_user = env.user
env.user = 'root'
run("supervisorctl reload")
env.user = _current_user
def reload_gunicorn():
run("kill -HUP `cat /tmp/gather.pid`")
def update():
update_from_github()
reload_gunicorn()
def fullyupdate():
update_from_github()
update_pip_requirements()
migrate_databases()
reload_nginx()
reload_gunicorn()
Migrate database for small updateimport os
from fabric.api import *
base_path = os.path.dirname(__file__)
project_root = "~/Gather"
pip_path = os.path.join(project_root, "bin/pip")
python_path = os.path.join(project_root, "bin/python")
env.user = "gather"
env.hosts = ["gather.whouz.com"]
def update_from_github():
with cd(project_root):
run("git pull")
def update_pip_requirements():
with cd(project_root):
run("%s install -r requirements.txt" % pip_path)
def migrate_databases():
with cd(project_root):
run("%s manage.py db upgrade" % python_path)
def reload_nginx():
_current_user = env.user
env.user = 'root'
run("/etc/init.d/nginx reload")
env.user = _current_user
def restart_gunicorn():
_current_user = env.user
env.user = 'root'
run("supervisorctl reload")
env.user = _current_user
def reload_gunicorn():
run("kill -HUP `cat /tmp/gather.pid`")
def update():
update_from_github()
migrate_databases()
reload_gunicorn()
def fullyupdate():
update_from_github()
update_pip_requirements()
migrate_databases()
reload_nginx()
reload_gunicorn()
|
<commit_before>import os
from fabric.api import *
base_path = os.path.dirname(__file__)
project_root = "~/Gather"
pip_path = os.path.join(project_root, "bin/pip")
python_path = os.path.join(project_root, "bin/python")
env.user = "gather"
env.hosts = ["gather.whouz.com"]
def update_from_github():
with cd(project_root):
run("git pull")
def update_pip_requirements():
with cd(project_root):
run("%s install -r requirements.txt" % pip_path)
def migrate_databases():
with cd(project_root):
run("%s manage.py db upgrade" % python_path)
def reload_nginx():
_current_user = env.user
env.user = 'root'
run("/etc/init.d/nginx reload")
env.user = _current_user
def restart_gunicorn():
_current_user = env.user
env.user = 'root'
run("supervisorctl reload")
env.user = _current_user
def reload_gunicorn():
run("kill -HUP `cat /tmp/gather.pid`")
def update():
update_from_github()
reload_gunicorn()
def fullyupdate():
update_from_github()
update_pip_requirements()
migrate_databases()
reload_nginx()
reload_gunicorn()
<commit_msg>Migrate database for small update<commit_after>import os
from fabric.api import *
base_path = os.path.dirname(__file__)
project_root = "~/Gather"
pip_path = os.path.join(project_root, "bin/pip")
python_path = os.path.join(project_root, "bin/python")
env.user = "gather"
env.hosts = ["gather.whouz.com"]
def update_from_github():
with cd(project_root):
run("git pull")
def update_pip_requirements():
with cd(project_root):
run("%s install -r requirements.txt" % pip_path)
def migrate_databases():
with cd(project_root):
run("%s manage.py db upgrade" % python_path)
def reload_nginx():
_current_user = env.user
env.user = 'root'
run("/etc/init.d/nginx reload")
env.user = _current_user
def restart_gunicorn():
_current_user = env.user
env.user = 'root'
run("supervisorctl reload")
env.user = _current_user
def reload_gunicorn():
run("kill -HUP `cat /tmp/gather.pid`")
def update():
update_from_github()
migrate_databases()
reload_gunicorn()
def fullyupdate():
update_from_github()
update_pip_requirements()
migrate_databases()
reload_nginx()
reload_gunicorn()
|
9db8a4c37cb226f1606b711493ebec16573f3d46
|
polyaxon/libs/spec_validation.py
|
polyaxon/libs/spec_validation.py
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function
from django.core.exceptions import ValidationError
from polyaxon_schemas.exceptions import PolyaxonfileError, PolyaxonConfigurationError
from polyaxon_schemas.polyaxonfile.specification import GroupSpecification
def validate_spec_content(content):
try:
spec = GroupSpecification.read(content)
except (PolyaxonfileError, PolyaxonConfigurationError):
raise ValidationError('Received non valid specification content.')
if spec.is_local:
raise ValidationError('Received specification content for a local environment run.')
return spec
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function
from django.core.exceptions import ValidationError
from polyaxon_schemas.exceptions import PolyaxonfileError, PolyaxonConfigurationError
from polyaxon_schemas.polyaxonfile.specification import GroupSpecification, Specification
def validate_run_type(spec):
if spec.is_local:
raise ValidationError('Received specification content for a local environment run.')
def validate_spec_content(content):
try:
spec = GroupSpecification.read(content)
except (PolyaxonfileError, PolyaxonConfigurationError):
raise ValidationError('Received non valid specification content.')
validate_run_type(spec)
return spec
def validate_tensorboard_spec_content(content):
try:
spec = Specification.read(content)
except (PolyaxonfileError, PolyaxonConfigurationError):
raise ValidationError('Received non valid tensorboard specification content.')
validate_run_type(spec)
return spec
|
Add tensorboard serializer * Add validation * Add tests
|
Add tensorboard serializer
* Add validation
* Add tests
|
Python
|
apache-2.0
|
polyaxon/polyaxon,polyaxon/polyaxon,polyaxon/polyaxon
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function
from django.core.exceptions import ValidationError
from polyaxon_schemas.exceptions import PolyaxonfileError, PolyaxonConfigurationError
from polyaxon_schemas.polyaxonfile.specification import GroupSpecification
def validate_spec_content(content):
try:
spec = GroupSpecification.read(content)
except (PolyaxonfileError, PolyaxonConfigurationError):
raise ValidationError('Received non valid specification content.')
if spec.is_local:
raise ValidationError('Received specification content for a local environment run.')
return spec
Add tensorboard serializer
* Add validation
* Add tests
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function
from django.core.exceptions import ValidationError
from polyaxon_schemas.exceptions import PolyaxonfileError, PolyaxonConfigurationError
from polyaxon_schemas.polyaxonfile.specification import GroupSpecification, Specification
def validate_run_type(spec):
if spec.is_local:
raise ValidationError('Received specification content for a local environment run.')
def validate_spec_content(content):
try:
spec = GroupSpecification.read(content)
except (PolyaxonfileError, PolyaxonConfigurationError):
raise ValidationError('Received non valid specification content.')
validate_run_type(spec)
return spec
def validate_tensorboard_spec_content(content):
try:
spec = Specification.read(content)
except (PolyaxonfileError, PolyaxonConfigurationError):
raise ValidationError('Received non valid tensorboard specification content.')
validate_run_type(spec)
return spec
|
<commit_before># -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function
from django.core.exceptions import ValidationError
from polyaxon_schemas.exceptions import PolyaxonfileError, PolyaxonConfigurationError
from polyaxon_schemas.polyaxonfile.specification import GroupSpecification
def validate_spec_content(content):
try:
spec = GroupSpecification.read(content)
except (PolyaxonfileError, PolyaxonConfigurationError):
raise ValidationError('Received non valid specification content.')
if spec.is_local:
raise ValidationError('Received specification content for a local environment run.')
return spec
<commit_msg>Add tensorboard serializer
* Add validation
* Add tests<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function
from django.core.exceptions import ValidationError
from polyaxon_schemas.exceptions import PolyaxonfileError, PolyaxonConfigurationError
from polyaxon_schemas.polyaxonfile.specification import GroupSpecification, Specification
def validate_run_type(spec):
if spec.is_local:
raise ValidationError('Received specification content for a local environment run.')
def validate_spec_content(content):
try:
spec = GroupSpecification.read(content)
except (PolyaxonfileError, PolyaxonConfigurationError):
raise ValidationError('Received non valid specification content.')
validate_run_type(spec)
return spec
def validate_tensorboard_spec_content(content):
try:
spec = Specification.read(content)
except (PolyaxonfileError, PolyaxonConfigurationError):
raise ValidationError('Received non valid tensorboard specification content.')
validate_run_type(spec)
return spec
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function
from django.core.exceptions import ValidationError
from polyaxon_schemas.exceptions import PolyaxonfileError, PolyaxonConfigurationError
from polyaxon_schemas.polyaxonfile.specification import GroupSpecification
def validate_spec_content(content):
try:
spec = GroupSpecification.read(content)
except (PolyaxonfileError, PolyaxonConfigurationError):
raise ValidationError('Received non valid specification content.')
if spec.is_local:
raise ValidationError('Received specification content for a local environment run.')
return spec
Add tensorboard serializer
* Add validation
* Add tests# -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function
from django.core.exceptions import ValidationError
from polyaxon_schemas.exceptions import PolyaxonfileError, PolyaxonConfigurationError
from polyaxon_schemas.polyaxonfile.specification import GroupSpecification, Specification
def validate_run_type(spec):
if spec.is_local:
raise ValidationError('Received specification content for a local environment run.')
def validate_spec_content(content):
try:
spec = GroupSpecification.read(content)
except (PolyaxonfileError, PolyaxonConfigurationError):
raise ValidationError('Received non valid specification content.')
validate_run_type(spec)
return spec
def validate_tensorboard_spec_content(content):
try:
spec = Specification.read(content)
except (PolyaxonfileError, PolyaxonConfigurationError):
raise ValidationError('Received non valid tensorboard specification content.')
validate_run_type(spec)
return spec
|
<commit_before># -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function
from django.core.exceptions import ValidationError
from polyaxon_schemas.exceptions import PolyaxonfileError, PolyaxonConfigurationError
from polyaxon_schemas.polyaxonfile.specification import GroupSpecification
def validate_spec_content(content):
try:
spec = GroupSpecification.read(content)
except (PolyaxonfileError, PolyaxonConfigurationError):
raise ValidationError('Received non valid specification content.')
if spec.is_local:
raise ValidationError('Received specification content for a local environment run.')
return spec
<commit_msg>Add tensorboard serializer
* Add validation
* Add tests<commit_after># -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function
from django.core.exceptions import ValidationError
from polyaxon_schemas.exceptions import PolyaxonfileError, PolyaxonConfigurationError
from polyaxon_schemas.polyaxonfile.specification import GroupSpecification, Specification
def validate_run_type(spec):
if spec.is_local:
raise ValidationError('Received specification content for a local environment run.')
def validate_spec_content(content):
try:
spec = GroupSpecification.read(content)
except (PolyaxonfileError, PolyaxonConfigurationError):
raise ValidationError('Received non valid specification content.')
validate_run_type(spec)
return spec
def validate_tensorboard_spec_content(content):
try:
spec = Specification.read(content)
except (PolyaxonfileError, PolyaxonConfigurationError):
raise ValidationError('Received non valid tensorboard specification content.')
validate_run_type(spec)
return spec
|
d1bd937f7db211cc536e594952899c44a21f7e5d
|
tunobase/blog/urls.py
|
tunobase/blog/urls.py
|
"""
Blog App
This module provides generic django URL routing.
"""
from django.conf.urls import patterns, url
from tunobase.blog import views
urlpatterns = patterns('',
url(r'^list/$',
views.BlogList.as_view(
template_name='blog/blog_list.html'
),
name='blog_list'
),
url(r'^detail/(?P<slug>[\w-]+)/$',
views.BlogEntryDetail.as_view(
template_name='blog/blog_entry_detail.html'
),
name='blog_entry_detail'
),
url(r'^(?P<slug>[\w-]+)/$',
views.BlogDetail.as_view(
paginate_by=1,
template_name='blog/blog_detail.html',
partial_template_name='blog/includes/blog_entries.html'
),
name='blog_detail'
),
(r'^feed/$', views.BlogFeed()),
)
|
"""
Blog App
This module provides generic django URL routing.
"""
from django.conf.urls import patterns, url
from tunobase.blog import views
urlpatterns = patterns('',
url(r'^list/$',
views.BlogList.as_view(
template_name='blog/blog_list.html'
),
name='blog_list'
),
url(r'^detail/(?P<slug>[\w-]+)/$',
views.BlogEntryDetail.as_view(
template_name='blog/blog_entry_detail.html'
),
name='blog_entry_detail'
),
url(r'^(?P<slug>[\w-]+)/$',
views.BlogDetail.as_view(
paginate_by=10,
template_name='blog/blog_detail.html',
partial_template_name='blog/includes/blog_entries.html'
),
name='blog_detail'
),
(r'^feed/$', views.BlogFeed()),
)
|
Increase the pagination amount for blog app
|
Increase the pagination amount for blog app
|
Python
|
bsd-3-clause
|
unomena/tunobase,unomena/tunobase
|
"""
Blog App
This module provides generic django URL routing.
"""
from django.conf.urls import patterns, url
from tunobase.blog import views
urlpatterns = patterns('',
url(r'^list/$',
views.BlogList.as_view(
template_name='blog/blog_list.html'
),
name='blog_list'
),
url(r'^detail/(?P<slug>[\w-]+)/$',
views.BlogEntryDetail.as_view(
template_name='blog/blog_entry_detail.html'
),
name='blog_entry_detail'
),
url(r'^(?P<slug>[\w-]+)/$',
views.BlogDetail.as_view(
paginate_by=1,
template_name='blog/blog_detail.html',
partial_template_name='blog/includes/blog_entries.html'
),
name='blog_detail'
),
(r'^feed/$', views.BlogFeed()),
)
Increase the pagination amount for blog app
|
"""
Blog App
This module provides generic django URL routing.
"""
from django.conf.urls import patterns, url
from tunobase.blog import views
urlpatterns = patterns('',
url(r'^list/$',
views.BlogList.as_view(
template_name='blog/blog_list.html'
),
name='blog_list'
),
url(r'^detail/(?P<slug>[\w-]+)/$',
views.BlogEntryDetail.as_view(
template_name='blog/blog_entry_detail.html'
),
name='blog_entry_detail'
),
url(r'^(?P<slug>[\w-]+)/$',
views.BlogDetail.as_view(
paginate_by=10,
template_name='blog/blog_detail.html',
partial_template_name='blog/includes/blog_entries.html'
),
name='blog_detail'
),
(r'^feed/$', views.BlogFeed()),
)
|
<commit_before>"""
Blog App
This module provides generic django URL routing.
"""
from django.conf.urls import patterns, url
from tunobase.blog import views
urlpatterns = patterns('',
url(r'^list/$',
views.BlogList.as_view(
template_name='blog/blog_list.html'
),
name='blog_list'
),
url(r'^detail/(?P<slug>[\w-]+)/$',
views.BlogEntryDetail.as_view(
template_name='blog/blog_entry_detail.html'
),
name='blog_entry_detail'
),
url(r'^(?P<slug>[\w-]+)/$',
views.BlogDetail.as_view(
paginate_by=1,
template_name='blog/blog_detail.html',
partial_template_name='blog/includes/blog_entries.html'
),
name='blog_detail'
),
(r'^feed/$', views.BlogFeed()),
)
<commit_msg>Increase the pagination amount for blog app<commit_after>
|
"""
Blog App
This module provides generic django URL routing.
"""
from django.conf.urls import patterns, url
from tunobase.blog import views
urlpatterns = patterns('',
url(r'^list/$',
views.BlogList.as_view(
template_name='blog/blog_list.html'
),
name='blog_list'
),
url(r'^detail/(?P<slug>[\w-]+)/$',
views.BlogEntryDetail.as_view(
template_name='blog/blog_entry_detail.html'
),
name='blog_entry_detail'
),
url(r'^(?P<slug>[\w-]+)/$',
views.BlogDetail.as_view(
paginate_by=10,
template_name='blog/blog_detail.html',
partial_template_name='blog/includes/blog_entries.html'
),
name='blog_detail'
),
(r'^feed/$', views.BlogFeed()),
)
|
"""
Blog App
This module provides generic django URL routing.
"""
from django.conf.urls import patterns, url
from tunobase.blog import views
urlpatterns = patterns('',
url(r'^list/$',
views.BlogList.as_view(
template_name='blog/blog_list.html'
),
name='blog_list'
),
url(r'^detail/(?P<slug>[\w-]+)/$',
views.BlogEntryDetail.as_view(
template_name='blog/blog_entry_detail.html'
),
name='blog_entry_detail'
),
url(r'^(?P<slug>[\w-]+)/$',
views.BlogDetail.as_view(
paginate_by=1,
template_name='blog/blog_detail.html',
partial_template_name='blog/includes/blog_entries.html'
),
name='blog_detail'
),
(r'^feed/$', views.BlogFeed()),
)
Increase the pagination amount for blog app"""
Blog App
This module provides generic django URL routing.
"""
from django.conf.urls import patterns, url
from tunobase.blog import views
urlpatterns = patterns('',
url(r'^list/$',
views.BlogList.as_view(
template_name='blog/blog_list.html'
),
name='blog_list'
),
url(r'^detail/(?P<slug>[\w-]+)/$',
views.BlogEntryDetail.as_view(
template_name='blog/blog_entry_detail.html'
),
name='blog_entry_detail'
),
url(r'^(?P<slug>[\w-]+)/$',
views.BlogDetail.as_view(
paginate_by=10,
template_name='blog/blog_detail.html',
partial_template_name='blog/includes/blog_entries.html'
),
name='blog_detail'
),
(r'^feed/$', views.BlogFeed()),
)
|
<commit_before>"""
Blog App
This module provides generic django URL routing.
"""
from django.conf.urls import patterns, url
from tunobase.blog import views
urlpatterns = patterns('',
url(r'^list/$',
views.BlogList.as_view(
template_name='blog/blog_list.html'
),
name='blog_list'
),
url(r'^detail/(?P<slug>[\w-]+)/$',
views.BlogEntryDetail.as_view(
template_name='blog/blog_entry_detail.html'
),
name='blog_entry_detail'
),
url(r'^(?P<slug>[\w-]+)/$',
views.BlogDetail.as_view(
paginate_by=1,
template_name='blog/blog_detail.html',
partial_template_name='blog/includes/blog_entries.html'
),
name='blog_detail'
),
(r'^feed/$', views.BlogFeed()),
)
<commit_msg>Increase the pagination amount for blog app<commit_after>"""
Blog App
This module provides generic django URL routing.
"""
from django.conf.urls import patterns, url
from tunobase.blog import views
urlpatterns = patterns('',
url(r'^list/$',
views.BlogList.as_view(
template_name='blog/blog_list.html'
),
name='blog_list'
),
url(r'^detail/(?P<slug>[\w-]+)/$',
views.BlogEntryDetail.as_view(
template_name='blog/blog_entry_detail.html'
),
name='blog_entry_detail'
),
url(r'^(?P<slug>[\w-]+)/$',
views.BlogDetail.as_view(
paginate_by=10,
template_name='blog/blog_detail.html',
partial_template_name='blog/includes/blog_entries.html'
),
name='blog_detail'
),
(r'^feed/$', views.BlogFeed()),
)
|
d96b6fa97272057f0fb67f2440f1b5b642b92bbe
|
src/python/tensorflow_cloud/core/tests/examples/multi_file_example/scale_model.py
|
src/python/tensorflow_cloud/core/tests/examples/multi_file_example/scale_model.py
|
# Copyright 2020 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import tensorflow_cloud as tfc
tfc.run(
entry_point="train_model.py", requirements_txt="requirements.txt", stream_logs=True
)
|
# Copyright 2020 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import tensorflow_cloud as tfc
gcp_bucket = "your-gcp-bucket"
tfc.run(
entry_point="train_model.py",
requirements_txt="requirements.txt",
docker_image_bucket_name=gcp_bucket,
stream_logs=True,
)
|
Add storage bucket to run() call
|
Add storage bucket to run() call
|
Python
|
apache-2.0
|
tensorflow/cloud,tensorflow/cloud
|
# Copyright 2020 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import tensorflow_cloud as tfc
tfc.run(
entry_point="train_model.py", requirements_txt="requirements.txt", stream_logs=True
)
Add storage bucket to run() call
|
# Copyright 2020 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import tensorflow_cloud as tfc
gcp_bucket = "your-gcp-bucket"
tfc.run(
entry_point="train_model.py",
requirements_txt="requirements.txt",
docker_image_bucket_name=gcp_bucket,
stream_logs=True,
)
|
<commit_before># Copyright 2020 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import tensorflow_cloud as tfc
tfc.run(
entry_point="train_model.py", requirements_txt="requirements.txt", stream_logs=True
)
<commit_msg>Add storage bucket to run() call<commit_after>
|
# Copyright 2020 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import tensorflow_cloud as tfc
gcp_bucket = "your-gcp-bucket"
tfc.run(
entry_point="train_model.py",
requirements_txt="requirements.txt",
docker_image_bucket_name=gcp_bucket,
stream_logs=True,
)
|
# Copyright 2020 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import tensorflow_cloud as tfc
tfc.run(
entry_point="train_model.py", requirements_txt="requirements.txt", stream_logs=True
)
Add storage bucket to run() call# Copyright 2020 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import tensorflow_cloud as tfc
gcp_bucket = "your-gcp-bucket"
tfc.run(
entry_point="train_model.py",
requirements_txt="requirements.txt",
docker_image_bucket_name=gcp_bucket,
stream_logs=True,
)
|
<commit_before># Copyright 2020 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import tensorflow_cloud as tfc
tfc.run(
entry_point="train_model.py", requirements_txt="requirements.txt", stream_logs=True
)
<commit_msg>Add storage bucket to run() call<commit_after># Copyright 2020 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import tensorflow_cloud as tfc
gcp_bucket = "your-gcp-bucket"
tfc.run(
entry_point="train_model.py",
requirements_txt="requirements.txt",
docker_image_bucket_name=gcp_bucket,
stream_logs=True,
)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.