commit
stringlengths 40
40
| old_file
stringlengths 4
118
| new_file
stringlengths 4
118
| old_contents
stringlengths 0
2.94k
| new_contents
stringlengths 1
4.43k
| subject
stringlengths 15
444
| message
stringlengths 16
3.45k
| lang
stringclasses 1
value | license
stringclasses 13
values | repos
stringlengths 5
43.2k
| prompt
stringlengths 17
4.58k
| response
stringlengths 1
4.43k
| prompt_tagged
stringlengths 58
4.62k
| response_tagged
stringlengths 1
4.43k
| text
stringlengths 132
7.29k
| text_tagged
stringlengths 173
7.33k
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
8a3249a0c5fa3a82e7730aab973822b34bdc1a4a
|
setup.py
|
setup.py
|
import os
from setuptools import setup
def long_description():
os.system('pandoc --from=markdown --to=rst --output=README.rst README.md')
readme_fn = os.path.join(os.path.dirname(__file__), 'README.rst')
if os.path.exists(readme_fn):
with open(readme_fn) as f:
return f.read()
else:
return 'not available'
setup(
name='boddle',
version=__import__('boddle').__version__,
description="A unit testing tool for Python's bottle library.",
long_description=long_description(),
author='Derek Anderson',
author_email='public@kered.org',
url='https://github.com/keredson/boddle',
packages=[],
py_modules=['boddle'],
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: GNU Lesser General Public License v3 (LGPLv3)',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
],
install_requires=[],
)
|
import os
from setuptools import setup
def long_description():
os.system('pandoc --from=markdown --to=rst --output=README.rst README.md')
readme_fn = os.path.join(os.path.dirname(__file__), 'README.rst')
if os.path.exists(readme_fn):
with open(readme_fn) as f:
return f.read()
else:
return 'not available'
setup(
name='boddle',
version=__import__('boddle').__version__,
description="A unit testing tool for Python's bottle library.",
long_description=long_description(),
author='Derek Anderson',
author_email='public@kered.org',
url='https://github.com/keredson/boddle',
packages=[],
py_modules=['boddle'],
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: GNU Lesser General Public License v3 (LGPLv3)',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
],
install_requires=['bottle'],
)
|
Add bottle as a requirement for this package.
|
Add bottle as a requirement for this package.
|
Python
|
lgpl-2.1
|
keredson/boddle
|
import os
from setuptools import setup
def long_description():
os.system('pandoc --from=markdown --to=rst --output=README.rst README.md')
readme_fn = os.path.join(os.path.dirname(__file__), 'README.rst')
if os.path.exists(readme_fn):
with open(readme_fn) as f:
return f.read()
else:
return 'not available'
setup(
name='boddle',
version=__import__('boddle').__version__,
description="A unit testing tool for Python's bottle library.",
long_description=long_description(),
author='Derek Anderson',
author_email='public@kered.org',
url='https://github.com/keredson/boddle',
packages=[],
py_modules=['boddle'],
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: GNU Lesser General Public License v3 (LGPLv3)',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
],
install_requires=[],
)
Add bottle as a requirement for this package.
|
import os
from setuptools import setup
def long_description():
os.system('pandoc --from=markdown --to=rst --output=README.rst README.md')
readme_fn = os.path.join(os.path.dirname(__file__), 'README.rst')
if os.path.exists(readme_fn):
with open(readme_fn) as f:
return f.read()
else:
return 'not available'
setup(
name='boddle',
version=__import__('boddle').__version__,
description="A unit testing tool for Python's bottle library.",
long_description=long_description(),
author='Derek Anderson',
author_email='public@kered.org',
url='https://github.com/keredson/boddle',
packages=[],
py_modules=['boddle'],
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: GNU Lesser General Public License v3 (LGPLv3)',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
],
install_requires=['bottle'],
)
|
<commit_before>import os
from setuptools import setup
def long_description():
os.system('pandoc --from=markdown --to=rst --output=README.rst README.md')
readme_fn = os.path.join(os.path.dirname(__file__), 'README.rst')
if os.path.exists(readme_fn):
with open(readme_fn) as f:
return f.read()
else:
return 'not available'
setup(
name='boddle',
version=__import__('boddle').__version__,
description="A unit testing tool for Python's bottle library.",
long_description=long_description(),
author='Derek Anderson',
author_email='public@kered.org',
url='https://github.com/keredson/boddle',
packages=[],
py_modules=['boddle'],
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: GNU Lesser General Public License v3 (LGPLv3)',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
],
install_requires=[],
)
<commit_msg>Add bottle as a requirement for this package.<commit_after>
|
import os
from setuptools import setup
def long_description():
os.system('pandoc --from=markdown --to=rst --output=README.rst README.md')
readme_fn = os.path.join(os.path.dirname(__file__), 'README.rst')
if os.path.exists(readme_fn):
with open(readme_fn) as f:
return f.read()
else:
return 'not available'
setup(
name='boddle',
version=__import__('boddle').__version__,
description="A unit testing tool for Python's bottle library.",
long_description=long_description(),
author='Derek Anderson',
author_email='public@kered.org',
url='https://github.com/keredson/boddle',
packages=[],
py_modules=['boddle'],
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: GNU Lesser General Public License v3 (LGPLv3)',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
],
install_requires=['bottle'],
)
|
import os
from setuptools import setup
def long_description():
os.system('pandoc --from=markdown --to=rst --output=README.rst README.md')
readme_fn = os.path.join(os.path.dirname(__file__), 'README.rst')
if os.path.exists(readme_fn):
with open(readme_fn) as f:
return f.read()
else:
return 'not available'
setup(
name='boddle',
version=__import__('boddle').__version__,
description="A unit testing tool for Python's bottle library.",
long_description=long_description(),
author='Derek Anderson',
author_email='public@kered.org',
url='https://github.com/keredson/boddle',
packages=[],
py_modules=['boddle'],
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: GNU Lesser General Public License v3 (LGPLv3)',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
],
install_requires=[],
)
Add bottle as a requirement for this package.import os
from setuptools import setup
def long_description():
os.system('pandoc --from=markdown --to=rst --output=README.rst README.md')
readme_fn = os.path.join(os.path.dirname(__file__), 'README.rst')
if os.path.exists(readme_fn):
with open(readme_fn) as f:
return f.read()
else:
return 'not available'
setup(
name='boddle',
version=__import__('boddle').__version__,
description="A unit testing tool for Python's bottle library.",
long_description=long_description(),
author='Derek Anderson',
author_email='public@kered.org',
url='https://github.com/keredson/boddle',
packages=[],
py_modules=['boddle'],
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: GNU Lesser General Public License v3 (LGPLv3)',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
],
install_requires=['bottle'],
)
|
<commit_before>import os
from setuptools import setup
def long_description():
os.system('pandoc --from=markdown --to=rst --output=README.rst README.md')
readme_fn = os.path.join(os.path.dirname(__file__), 'README.rst')
if os.path.exists(readme_fn):
with open(readme_fn) as f:
return f.read()
else:
return 'not available'
setup(
name='boddle',
version=__import__('boddle').__version__,
description="A unit testing tool for Python's bottle library.",
long_description=long_description(),
author='Derek Anderson',
author_email='public@kered.org',
url='https://github.com/keredson/boddle',
packages=[],
py_modules=['boddle'],
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: GNU Lesser General Public License v3 (LGPLv3)',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
],
install_requires=[],
)
<commit_msg>Add bottle as a requirement for this package.<commit_after>import os
from setuptools import setup
def long_description():
os.system('pandoc --from=markdown --to=rst --output=README.rst README.md')
readme_fn = os.path.join(os.path.dirname(__file__), 'README.rst')
if os.path.exists(readme_fn):
with open(readme_fn) as f:
return f.read()
else:
return 'not available'
setup(
name='boddle',
version=__import__('boddle').__version__,
description="A unit testing tool for Python's bottle library.",
long_description=long_description(),
author='Derek Anderson',
author_email='public@kered.org',
url='https://github.com/keredson/boddle',
packages=[],
py_modules=['boddle'],
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: GNU Lesser General Public License v3 (LGPLv3)',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
],
install_requires=['bottle'],
)
|
27482ee5b6728160ff167ccb70b3dcdf86b99a55
|
setup.py
|
setup.py
|
from setuptools import setup, find_packages
import sys
import mbtoolbox
with open('mbtoolbox/__init__.py') as f:
for line in f:
if line.find("__version__") >= 0:
version = line.split("=")[1].strip()
version = version.strip('"')
version = version.strip("'")
continue
open_kwds = {}
if sys.version_info > (3,):
open_kwds['encoding'] = 'utf-8'
with open('README.md', **open_kwds) as f:
readme = f.read()
setup(
name='mbtoolbox',
version=mbtoolbox.__version__,
description="MBTiles toolbox tool for optimizing and verifying MBTiles files",
long_description=readme,
classifiers=[],
keywords='',
author='Lukas Martinelli',
author_email='me@lukasmartinelli.ch',
url='https://github.com/lukasmartinelli/mbtoolbox',
license='BSD',
packages=find_packages(exclude=[]),
include_package_data=True,
install_requires=['docopt==0.6.2', 'mercantile==0.8.3',
'humanize==0.5.1', 'mbutil==0.2.0'],
dependency_links=['https://github.com/mapbox/mbutil/tarball/master#egg=mbutil-0.2.0beta'],
scripts = ['bin/mbverify', 'bin/mboptimize']
)
|
from setuptools import setup, find_packages
import sys
import mbtoolbox
with open('mbtoolbox/__init__.py') as f:
for line in f:
if line.find("__version__") >= 0:
version = line.split("=")[1].strip()
version = version.strip('"')
version = version.strip("'")
continue
open_kwds = {}
if sys.version_info > (3,):
open_kwds['encoding'] = 'utf-8'
with open('README.md', **open_kwds) as f:
readme = f.read()
setup(
name='mbtoolbox',
version=mbtoolbox.__version__,
description="MBTiles toolbox tool for optimizing and verifying MBTiles files",
long_description=readme,
classifiers=[],
keywords='',
author='Lukas Martinelli',
author_email='me@lukasmartinelli.ch',
url='https://github.com/lukasmartinelli/mbtoolbox',
license='BSD',
packages=find_packages(exclude=[]),
include_package_data=True,
install_requires=['docopt==0.6.2', 'mercantile==0.8.3',
'humanize==0.5.1', 'mbutil==0.2.0beta'],
dependency_links=['https://github.com/mapbox/mbutil/tarball/master#egg=mbutil-0.2.0beta'],
scripts = ['bin/mbverify', 'bin/mboptimize']
)
|
Revert "Update mbutil to 0.2.0"
|
Revert "Update mbutil to 0.2.0"
This reverts commit c3834af13fb14d0961e7e8ce29c3bbbe91ebb5ce.
|
Python
|
mit
|
lukasmartinelli/mbtoolbox
|
from setuptools import setup, find_packages
import sys
import mbtoolbox
with open('mbtoolbox/__init__.py') as f:
for line in f:
if line.find("__version__") >= 0:
version = line.split("=")[1].strip()
version = version.strip('"')
version = version.strip("'")
continue
open_kwds = {}
if sys.version_info > (3,):
open_kwds['encoding'] = 'utf-8'
with open('README.md', **open_kwds) as f:
readme = f.read()
setup(
name='mbtoolbox',
version=mbtoolbox.__version__,
description="MBTiles toolbox tool for optimizing and verifying MBTiles files",
long_description=readme,
classifiers=[],
keywords='',
author='Lukas Martinelli',
author_email='me@lukasmartinelli.ch',
url='https://github.com/lukasmartinelli/mbtoolbox',
license='BSD',
packages=find_packages(exclude=[]),
include_package_data=True,
install_requires=['docopt==0.6.2', 'mercantile==0.8.3',
'humanize==0.5.1', 'mbutil==0.2.0'],
dependency_links=['https://github.com/mapbox/mbutil/tarball/master#egg=mbutil-0.2.0beta'],
scripts = ['bin/mbverify', 'bin/mboptimize']
)
Revert "Update mbutil to 0.2.0"
This reverts commit c3834af13fb14d0961e7e8ce29c3bbbe91ebb5ce.
|
from setuptools import setup, find_packages
import sys
import mbtoolbox
with open('mbtoolbox/__init__.py') as f:
for line in f:
if line.find("__version__") >= 0:
version = line.split("=")[1].strip()
version = version.strip('"')
version = version.strip("'")
continue
open_kwds = {}
if sys.version_info > (3,):
open_kwds['encoding'] = 'utf-8'
with open('README.md', **open_kwds) as f:
readme = f.read()
setup(
name='mbtoolbox',
version=mbtoolbox.__version__,
description="MBTiles toolbox tool for optimizing and verifying MBTiles files",
long_description=readme,
classifiers=[],
keywords='',
author='Lukas Martinelli',
author_email='me@lukasmartinelli.ch',
url='https://github.com/lukasmartinelli/mbtoolbox',
license='BSD',
packages=find_packages(exclude=[]),
include_package_data=True,
install_requires=['docopt==0.6.2', 'mercantile==0.8.3',
'humanize==0.5.1', 'mbutil==0.2.0beta'],
dependency_links=['https://github.com/mapbox/mbutil/tarball/master#egg=mbutil-0.2.0beta'],
scripts = ['bin/mbverify', 'bin/mboptimize']
)
|
<commit_before>from setuptools import setup, find_packages
import sys
import mbtoolbox
with open('mbtoolbox/__init__.py') as f:
for line in f:
if line.find("__version__") >= 0:
version = line.split("=")[1].strip()
version = version.strip('"')
version = version.strip("'")
continue
open_kwds = {}
if sys.version_info > (3,):
open_kwds['encoding'] = 'utf-8'
with open('README.md', **open_kwds) as f:
readme = f.read()
setup(
name='mbtoolbox',
version=mbtoolbox.__version__,
description="MBTiles toolbox tool for optimizing and verifying MBTiles files",
long_description=readme,
classifiers=[],
keywords='',
author='Lukas Martinelli',
author_email='me@lukasmartinelli.ch',
url='https://github.com/lukasmartinelli/mbtoolbox',
license='BSD',
packages=find_packages(exclude=[]),
include_package_data=True,
install_requires=['docopt==0.6.2', 'mercantile==0.8.3',
'humanize==0.5.1', 'mbutil==0.2.0'],
dependency_links=['https://github.com/mapbox/mbutil/tarball/master#egg=mbutil-0.2.0beta'],
scripts = ['bin/mbverify', 'bin/mboptimize']
)
<commit_msg>Revert "Update mbutil to 0.2.0"
This reverts commit c3834af13fb14d0961e7e8ce29c3bbbe91ebb5ce.<commit_after>
|
from setuptools import setup, find_packages
import sys
import mbtoolbox
with open('mbtoolbox/__init__.py') as f:
for line in f:
if line.find("__version__") >= 0:
version = line.split("=")[1].strip()
version = version.strip('"')
version = version.strip("'")
continue
open_kwds = {}
if sys.version_info > (3,):
open_kwds['encoding'] = 'utf-8'
with open('README.md', **open_kwds) as f:
readme = f.read()
setup(
name='mbtoolbox',
version=mbtoolbox.__version__,
description="MBTiles toolbox tool for optimizing and verifying MBTiles files",
long_description=readme,
classifiers=[],
keywords='',
author='Lukas Martinelli',
author_email='me@lukasmartinelli.ch',
url='https://github.com/lukasmartinelli/mbtoolbox',
license='BSD',
packages=find_packages(exclude=[]),
include_package_data=True,
install_requires=['docopt==0.6.2', 'mercantile==0.8.3',
'humanize==0.5.1', 'mbutil==0.2.0beta'],
dependency_links=['https://github.com/mapbox/mbutil/tarball/master#egg=mbutil-0.2.0beta'],
scripts = ['bin/mbverify', 'bin/mboptimize']
)
|
from setuptools import setup, find_packages
import sys
import mbtoolbox
with open('mbtoolbox/__init__.py') as f:
for line in f:
if line.find("__version__") >= 0:
version = line.split("=")[1].strip()
version = version.strip('"')
version = version.strip("'")
continue
open_kwds = {}
if sys.version_info > (3,):
open_kwds['encoding'] = 'utf-8'
with open('README.md', **open_kwds) as f:
readme = f.read()
setup(
name='mbtoolbox',
version=mbtoolbox.__version__,
description="MBTiles toolbox tool for optimizing and verifying MBTiles files",
long_description=readme,
classifiers=[],
keywords='',
author='Lukas Martinelli',
author_email='me@lukasmartinelli.ch',
url='https://github.com/lukasmartinelli/mbtoolbox',
license='BSD',
packages=find_packages(exclude=[]),
include_package_data=True,
install_requires=['docopt==0.6.2', 'mercantile==0.8.3',
'humanize==0.5.1', 'mbutil==0.2.0'],
dependency_links=['https://github.com/mapbox/mbutil/tarball/master#egg=mbutil-0.2.0beta'],
scripts = ['bin/mbverify', 'bin/mboptimize']
)
Revert "Update mbutil to 0.2.0"
This reverts commit c3834af13fb14d0961e7e8ce29c3bbbe91ebb5ce.from setuptools import setup, find_packages
import sys
import mbtoolbox
with open('mbtoolbox/__init__.py') as f:
for line in f:
if line.find("__version__") >= 0:
version = line.split("=")[1].strip()
version = version.strip('"')
version = version.strip("'")
continue
open_kwds = {}
if sys.version_info > (3,):
open_kwds['encoding'] = 'utf-8'
with open('README.md', **open_kwds) as f:
readme = f.read()
setup(
name='mbtoolbox',
version=mbtoolbox.__version__,
description="MBTiles toolbox tool for optimizing and verifying MBTiles files",
long_description=readme,
classifiers=[],
keywords='',
author='Lukas Martinelli',
author_email='me@lukasmartinelli.ch',
url='https://github.com/lukasmartinelli/mbtoolbox',
license='BSD',
packages=find_packages(exclude=[]),
include_package_data=True,
install_requires=['docopt==0.6.2', 'mercantile==0.8.3',
'humanize==0.5.1', 'mbutil==0.2.0beta'],
dependency_links=['https://github.com/mapbox/mbutil/tarball/master#egg=mbutil-0.2.0beta'],
scripts = ['bin/mbverify', 'bin/mboptimize']
)
|
<commit_before>from setuptools import setup, find_packages
import sys
import mbtoolbox
with open('mbtoolbox/__init__.py') as f:
for line in f:
if line.find("__version__") >= 0:
version = line.split("=")[1].strip()
version = version.strip('"')
version = version.strip("'")
continue
open_kwds = {}
if sys.version_info > (3,):
open_kwds['encoding'] = 'utf-8'
with open('README.md', **open_kwds) as f:
readme = f.read()
setup(
name='mbtoolbox',
version=mbtoolbox.__version__,
description="MBTiles toolbox tool for optimizing and verifying MBTiles files",
long_description=readme,
classifiers=[],
keywords='',
author='Lukas Martinelli',
author_email='me@lukasmartinelli.ch',
url='https://github.com/lukasmartinelli/mbtoolbox',
license='BSD',
packages=find_packages(exclude=[]),
include_package_data=True,
install_requires=['docopt==0.6.2', 'mercantile==0.8.3',
'humanize==0.5.1', 'mbutil==0.2.0'],
dependency_links=['https://github.com/mapbox/mbutil/tarball/master#egg=mbutil-0.2.0beta'],
scripts = ['bin/mbverify', 'bin/mboptimize']
)
<commit_msg>Revert "Update mbutil to 0.2.0"
This reverts commit c3834af13fb14d0961e7e8ce29c3bbbe91ebb5ce.<commit_after>from setuptools import setup, find_packages
import sys
import mbtoolbox
with open('mbtoolbox/__init__.py') as f:
for line in f:
if line.find("__version__") >= 0:
version = line.split("=")[1].strip()
version = version.strip('"')
version = version.strip("'")
continue
open_kwds = {}
if sys.version_info > (3,):
open_kwds['encoding'] = 'utf-8'
with open('README.md', **open_kwds) as f:
readme = f.read()
setup(
name='mbtoolbox',
version=mbtoolbox.__version__,
description="MBTiles toolbox tool for optimizing and verifying MBTiles files",
long_description=readme,
classifiers=[],
keywords='',
author='Lukas Martinelli',
author_email='me@lukasmartinelli.ch',
url='https://github.com/lukasmartinelli/mbtoolbox',
license='BSD',
packages=find_packages(exclude=[]),
include_package_data=True,
install_requires=['docopt==0.6.2', 'mercantile==0.8.3',
'humanize==0.5.1', 'mbutil==0.2.0beta'],
dependency_links=['https://github.com/mapbox/mbutil/tarball/master#egg=mbutil-0.2.0beta'],
scripts = ['bin/mbverify', 'bin/mboptimize']
)
|
d1a79a8008f0944b95f4c8d44c737639abaf6559
|
setup.py
|
setup.py
|
from setuptools import setup
setup(name='nexmo',
version='1.0.3',
description='Python client for the Nexmo API',
long_description='Python client for the Nexmo API',
url='http://github.com/nexmo/python-nexmo',
author='Tim Craft',
author_email='mail@timcraft.com',
license='MIT',
packages=['nexmo'],
platforms=['any'],
install_requires=['requests'])
|
from setuptools import setup
setup(name='nexmo',
version='1.0.3',
description='Python client for the Nexmo API',
long_description='Python client for the Nexmo API',
url='http://github.com/Nexmo/nexmo-python',
author='Tim Craft',
author_email='mail@timcraft.com',
license='MIT',
packages=['nexmo'],
platforms=['any'],
install_requires=['requests'])
|
Update package url to github.com/Nexmo/nexmo-python
|
Update package url to github.com/Nexmo/nexmo-python
|
Python
|
mit
|
Nexmo/nexmo-python
|
from setuptools import setup
setup(name='nexmo',
version='1.0.3',
description='Python client for the Nexmo API',
long_description='Python client for the Nexmo API',
url='http://github.com/nexmo/python-nexmo',
author='Tim Craft',
author_email='mail@timcraft.com',
license='MIT',
packages=['nexmo'],
platforms=['any'],
install_requires=['requests'])
Update package url to github.com/Nexmo/nexmo-python
|
from setuptools import setup
setup(name='nexmo',
version='1.0.3',
description='Python client for the Nexmo API',
long_description='Python client for the Nexmo API',
url='http://github.com/Nexmo/nexmo-python',
author='Tim Craft',
author_email='mail@timcraft.com',
license='MIT',
packages=['nexmo'],
platforms=['any'],
install_requires=['requests'])
|
<commit_before>from setuptools import setup
setup(name='nexmo',
version='1.0.3',
description='Python client for the Nexmo API',
long_description='Python client for the Nexmo API',
url='http://github.com/nexmo/python-nexmo',
author='Tim Craft',
author_email='mail@timcraft.com',
license='MIT',
packages=['nexmo'],
platforms=['any'],
install_requires=['requests'])
<commit_msg>Update package url to github.com/Nexmo/nexmo-python<commit_after>
|
from setuptools import setup
setup(name='nexmo',
version='1.0.3',
description='Python client for the Nexmo API',
long_description='Python client for the Nexmo API',
url='http://github.com/Nexmo/nexmo-python',
author='Tim Craft',
author_email='mail@timcraft.com',
license='MIT',
packages=['nexmo'],
platforms=['any'],
install_requires=['requests'])
|
from setuptools import setup
setup(name='nexmo',
version='1.0.3',
description='Python client for the Nexmo API',
long_description='Python client for the Nexmo API',
url='http://github.com/nexmo/python-nexmo',
author='Tim Craft',
author_email='mail@timcraft.com',
license='MIT',
packages=['nexmo'],
platforms=['any'],
install_requires=['requests'])
Update package url to github.com/Nexmo/nexmo-pythonfrom setuptools import setup
setup(name='nexmo',
version='1.0.3',
description='Python client for the Nexmo API',
long_description='Python client for the Nexmo API',
url='http://github.com/Nexmo/nexmo-python',
author='Tim Craft',
author_email='mail@timcraft.com',
license='MIT',
packages=['nexmo'],
platforms=['any'],
install_requires=['requests'])
|
<commit_before>from setuptools import setup
setup(name='nexmo',
version='1.0.3',
description='Python client for the Nexmo API',
long_description='Python client for the Nexmo API',
url='http://github.com/nexmo/python-nexmo',
author='Tim Craft',
author_email='mail@timcraft.com',
license='MIT',
packages=['nexmo'],
platforms=['any'],
install_requires=['requests'])
<commit_msg>Update package url to github.com/Nexmo/nexmo-python<commit_after>from setuptools import setup
setup(name='nexmo',
version='1.0.3',
description='Python client for the Nexmo API',
long_description='Python client for the Nexmo API',
url='http://github.com/Nexmo/nexmo-python',
author='Tim Craft',
author_email='mail@timcraft.com',
license='MIT',
packages=['nexmo'],
platforms=['any'],
install_requires=['requests'])
|
45bb129760da600879d1d75baeb17100a8824426
|
setup.py
|
setup.py
|
#! /usr/bin/env python
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
setup(name='permamodel',
version='0.1.0',
author='Elchin Jafarov and Scott Stewart',
author_email='james.stewart@colorado.edu',
description='Permamodel',
long_description=open('README.md').read(),
packages=find_packages(),
#install_requires=('numpy', 'nose', 'gdal', 'pyproj'),
install_requires=('affine', 'netCDF4', 'scipy', 'numpy', 'nose',),
package_data={'': ['examples/*.cfg',
'examples/*.dat',
'components/Parameters/*']}
)
|
#! /usr/bin/env python
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
setup(name='permamodel',
version='0.1.0',
author='Elchin Jafarov and Scott Stewart',
author_email='james.stewart@colorado.edu',
description='Permamodel',
long_description=open('README.md').read(),
packages=find_packages(),
#install_requires=('numpy', 'nose', 'gdal', 'pyproj'),
install_requires=('affine', 'netCDF4', 'scipy', 'numpy', 'nose',),
package_data={'': ['examples/*.cfg',
'examples/*.dat',
'data/*']}
)
|
Include new data directory as package data
|
Include new data directory as package data
|
Python
|
mit
|
permamodel/permamodel,permamodel/permamodel
|
#! /usr/bin/env python
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
setup(name='permamodel',
version='0.1.0',
author='Elchin Jafarov and Scott Stewart',
author_email='james.stewart@colorado.edu',
description='Permamodel',
long_description=open('README.md').read(),
packages=find_packages(),
#install_requires=('numpy', 'nose', 'gdal', 'pyproj'),
install_requires=('affine', 'netCDF4', 'scipy', 'numpy', 'nose',),
package_data={'': ['examples/*.cfg',
'examples/*.dat',
'components/Parameters/*']}
)
Include new data directory as package data
|
#! /usr/bin/env python
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
setup(name='permamodel',
version='0.1.0',
author='Elchin Jafarov and Scott Stewart',
author_email='james.stewart@colorado.edu',
description='Permamodel',
long_description=open('README.md').read(),
packages=find_packages(),
#install_requires=('numpy', 'nose', 'gdal', 'pyproj'),
install_requires=('affine', 'netCDF4', 'scipy', 'numpy', 'nose',),
package_data={'': ['examples/*.cfg',
'examples/*.dat',
'data/*']}
)
|
<commit_before>#! /usr/bin/env python
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
setup(name='permamodel',
version='0.1.0',
author='Elchin Jafarov and Scott Stewart',
author_email='james.stewart@colorado.edu',
description='Permamodel',
long_description=open('README.md').read(),
packages=find_packages(),
#install_requires=('numpy', 'nose', 'gdal', 'pyproj'),
install_requires=('affine', 'netCDF4', 'scipy', 'numpy', 'nose',),
package_data={'': ['examples/*.cfg',
'examples/*.dat',
'components/Parameters/*']}
)
<commit_msg>Include new data directory as package data<commit_after>
|
#! /usr/bin/env python
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
setup(name='permamodel',
version='0.1.0',
author='Elchin Jafarov and Scott Stewart',
author_email='james.stewart@colorado.edu',
description='Permamodel',
long_description=open('README.md').read(),
packages=find_packages(),
#install_requires=('numpy', 'nose', 'gdal', 'pyproj'),
install_requires=('affine', 'netCDF4', 'scipy', 'numpy', 'nose',),
package_data={'': ['examples/*.cfg',
'examples/*.dat',
'data/*']}
)
|
#! /usr/bin/env python
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
setup(name='permamodel',
version='0.1.0',
author='Elchin Jafarov and Scott Stewart',
author_email='james.stewart@colorado.edu',
description='Permamodel',
long_description=open('README.md').read(),
packages=find_packages(),
#install_requires=('numpy', 'nose', 'gdal', 'pyproj'),
install_requires=('affine', 'netCDF4', 'scipy', 'numpy', 'nose',),
package_data={'': ['examples/*.cfg',
'examples/*.dat',
'components/Parameters/*']}
)
Include new data directory as package data#! /usr/bin/env python
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
setup(name='permamodel',
version='0.1.0',
author='Elchin Jafarov and Scott Stewart',
author_email='james.stewart@colorado.edu',
description='Permamodel',
long_description=open('README.md').read(),
packages=find_packages(),
#install_requires=('numpy', 'nose', 'gdal', 'pyproj'),
install_requires=('affine', 'netCDF4', 'scipy', 'numpy', 'nose',),
package_data={'': ['examples/*.cfg',
'examples/*.dat',
'data/*']}
)
|
<commit_before>#! /usr/bin/env python
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
setup(name='permamodel',
version='0.1.0',
author='Elchin Jafarov and Scott Stewart',
author_email='james.stewart@colorado.edu',
description='Permamodel',
long_description=open('README.md').read(),
packages=find_packages(),
#install_requires=('numpy', 'nose', 'gdal', 'pyproj'),
install_requires=('affine', 'netCDF4', 'scipy', 'numpy', 'nose',),
package_data={'': ['examples/*.cfg',
'examples/*.dat',
'components/Parameters/*']}
)
<commit_msg>Include new data directory as package data<commit_after>#! /usr/bin/env python
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
setup(name='permamodel',
version='0.1.0',
author='Elchin Jafarov and Scott Stewart',
author_email='james.stewart@colorado.edu',
description='Permamodel',
long_description=open('README.md').read(),
packages=find_packages(),
#install_requires=('numpy', 'nose', 'gdal', 'pyproj'),
install_requires=('affine', 'netCDF4', 'scipy', 'numpy', 'nose',),
package_data={'': ['examples/*.cfg',
'examples/*.dat',
'data/*']}
)
|
26e3ff35c56f96adf986e0d71de05248e94a01ed
|
setup.py
|
setup.py
|
from setuptools import setup
setup(name='protofuzz',
version='0.1',
description='Google protobuf message generator',
url='http://github.com/trailofbits/protofuzz',
author='Yan Ivnitskiy',
author_email='yan@trailofbits.com',
license='MIT',
packages=['protofuzz'],
install_requires=['py3-protobuffers'],
include_package_data=True,
test_suite='nose.collector',
tests_require=['nose'],
zip_safe=False)
|
from setuptools import setup
setup(name='protofuzz',
version='0.1',
description='Google protobuf message generator',
url='http://github.com/trailofbits/protofuzz',
author='Yan Ivnitskiy',
author_email='yan@trailofbits.com',
license='MIT',
packages=['protofuzz'],
install_requires=['protobuf>=2.6.0'],
include_package_data=True,
test_suite='nose.collector',
tests_require=['nose'],
zip_safe=False)
|
Move from py3-protobufers to official version
|
Move from py3-protobufers to official version
The offical version of protobuf supports python 3 as of version 2.6, whats more
it supports version 3.5 which the py3-protobuffers version doesn't seem to (see
Issue #4). The py3-protobuffers module is also no longer maintained so switching
away is sensible.
|
Python
|
mit
|
trailofbits/protofuzz
|
from setuptools import setup
setup(name='protofuzz',
version='0.1',
description='Google protobuf message generator',
url='http://github.com/trailofbits/protofuzz',
author='Yan Ivnitskiy',
author_email='yan@trailofbits.com',
license='MIT',
packages=['protofuzz'],
install_requires=['py3-protobuffers'],
include_package_data=True,
test_suite='nose.collector',
tests_require=['nose'],
zip_safe=False)
Move from py3-protobufers to official version
The offical version of protobuf supports python 3 as of version 2.6, whats more
it supports version 3.5 which the py3-protobuffers version doesn't seem to (see
Issue #4). The py3-protobuffers module is also no longer maintained so switching
away is sensible.
|
from setuptools import setup
setup(name='protofuzz',
version='0.1',
description='Google protobuf message generator',
url='http://github.com/trailofbits/protofuzz',
author='Yan Ivnitskiy',
author_email='yan@trailofbits.com',
license='MIT',
packages=['protofuzz'],
install_requires=['protobuf>=2.6.0'],
include_package_data=True,
test_suite='nose.collector',
tests_require=['nose'],
zip_safe=False)
|
<commit_before>from setuptools import setup
setup(name='protofuzz',
version='0.1',
description='Google protobuf message generator',
url='http://github.com/trailofbits/protofuzz',
author='Yan Ivnitskiy',
author_email='yan@trailofbits.com',
license='MIT',
packages=['protofuzz'],
install_requires=['py3-protobuffers'],
include_package_data=True,
test_suite='nose.collector',
tests_require=['nose'],
zip_safe=False)
<commit_msg>Move from py3-protobufers to official version
The offical version of protobuf supports python 3 as of version 2.6, whats more
it supports version 3.5 which the py3-protobuffers version doesn't seem to (see
Issue #4). The py3-protobuffers module is also no longer maintained so switching
away is sensible.<commit_after>
|
from setuptools import setup
setup(name='protofuzz',
version='0.1',
description='Google protobuf message generator',
url='http://github.com/trailofbits/protofuzz',
author='Yan Ivnitskiy',
author_email='yan@trailofbits.com',
license='MIT',
packages=['protofuzz'],
install_requires=['protobuf>=2.6.0'],
include_package_data=True,
test_suite='nose.collector',
tests_require=['nose'],
zip_safe=False)
|
from setuptools import setup
setup(name='protofuzz',
version='0.1',
description='Google protobuf message generator',
url='http://github.com/trailofbits/protofuzz',
author='Yan Ivnitskiy',
author_email='yan@trailofbits.com',
license='MIT',
packages=['protofuzz'],
install_requires=['py3-protobuffers'],
include_package_data=True,
test_suite='nose.collector',
tests_require=['nose'],
zip_safe=False)
Move from py3-protobufers to official version
The offical version of protobuf supports python 3 as of version 2.6, whats more
it supports version 3.5 which the py3-protobuffers version doesn't seem to (see
Issue #4). The py3-protobuffers module is also no longer maintained so switching
away is sensible.from setuptools import setup
setup(name='protofuzz',
version='0.1',
description='Google protobuf message generator',
url='http://github.com/trailofbits/protofuzz',
author='Yan Ivnitskiy',
author_email='yan@trailofbits.com',
license='MIT',
packages=['protofuzz'],
install_requires=['protobuf>=2.6.0'],
include_package_data=True,
test_suite='nose.collector',
tests_require=['nose'],
zip_safe=False)
|
<commit_before>from setuptools import setup
setup(name='protofuzz',
version='0.1',
description='Google protobuf message generator',
url='http://github.com/trailofbits/protofuzz',
author='Yan Ivnitskiy',
author_email='yan@trailofbits.com',
license='MIT',
packages=['protofuzz'],
install_requires=['py3-protobuffers'],
include_package_data=True,
test_suite='nose.collector',
tests_require=['nose'],
zip_safe=False)
<commit_msg>Move from py3-protobufers to official version
The offical version of protobuf supports python 3 as of version 2.6, whats more
it supports version 3.5 which the py3-protobuffers version doesn't seem to (see
Issue #4). The py3-protobuffers module is also no longer maintained so switching
away is sensible.<commit_after>from setuptools import setup
setup(name='protofuzz',
version='0.1',
description='Google protobuf message generator',
url='http://github.com/trailofbits/protofuzz',
author='Yan Ivnitskiy',
author_email='yan@trailofbits.com',
license='MIT',
packages=['protofuzz'],
install_requires=['protobuf>=2.6.0'],
include_package_data=True,
test_suite='nose.collector',
tests_require=['nose'],
zip_safe=False)
|
9437c82748b4154ccbb336f9fd115a2f70e22c20
|
setup.py
|
setup.py
|
from setuptools import setup
setup(
name='bambou',
version='0.0.1',
url='http://www.nuagenetworks.net/',
author='Christophe Serafin',
author_email='christophe.serafin@alcatel-lucent.com',
packages=['bambou', 'bambou.utils'],
description='REST Library for Nuage Networks',
long_description=open('README.md').read(),
install_requires=[line for line in open('requirements.txt')],
)
|
from setuptools import setup
setup(
name='bambou',
version='0.0.2',
url='http://www.nuagenetworks.net/',
author='Christophe Serafin',
author_email='christophe.serafin@alcatel-lucent.com',
packages=['bambou', 'bambou.utils'],
description='REST Library for Nuage Networks',
long_description=open('README.md').read(),
install_requires=[line for line in open('requirements.txt')],
)
|
Set Bambou package version to 0.0.2
|
Set Bambou package version to 0.0.2
|
Python
|
bsd-3-clause
|
nuagenetworks/bambou
|
from setuptools import setup
setup(
name='bambou',
version='0.0.1',
url='http://www.nuagenetworks.net/',
author='Christophe Serafin',
author_email='christophe.serafin@alcatel-lucent.com',
packages=['bambou', 'bambou.utils'],
description='REST Library for Nuage Networks',
long_description=open('README.md').read(),
install_requires=[line for line in open('requirements.txt')],
)
Set Bambou package version to 0.0.2
|
from setuptools import setup
setup(
name='bambou',
version='0.0.2',
url='http://www.nuagenetworks.net/',
author='Christophe Serafin',
author_email='christophe.serafin@alcatel-lucent.com',
packages=['bambou', 'bambou.utils'],
description='REST Library for Nuage Networks',
long_description=open('README.md').read(),
install_requires=[line for line in open('requirements.txt')],
)
|
<commit_before>from setuptools import setup
setup(
name='bambou',
version='0.0.1',
url='http://www.nuagenetworks.net/',
author='Christophe Serafin',
author_email='christophe.serafin@alcatel-lucent.com',
packages=['bambou', 'bambou.utils'],
description='REST Library for Nuage Networks',
long_description=open('README.md').read(),
install_requires=[line for line in open('requirements.txt')],
)
<commit_msg>Set Bambou package version to 0.0.2<commit_after>
|
from setuptools import setup
setup(
name='bambou',
version='0.0.2',
url='http://www.nuagenetworks.net/',
author='Christophe Serafin',
author_email='christophe.serafin@alcatel-lucent.com',
packages=['bambou', 'bambou.utils'],
description='REST Library for Nuage Networks',
long_description=open('README.md').read(),
install_requires=[line for line in open('requirements.txt')],
)
|
from setuptools import setup
setup(
name='bambou',
version='0.0.1',
url='http://www.nuagenetworks.net/',
author='Christophe Serafin',
author_email='christophe.serafin@alcatel-lucent.com',
packages=['bambou', 'bambou.utils'],
description='REST Library for Nuage Networks',
long_description=open('README.md').read(),
install_requires=[line for line in open('requirements.txt')],
)
Set Bambou package version to 0.0.2from setuptools import setup
setup(
name='bambou',
version='0.0.2',
url='http://www.nuagenetworks.net/',
author='Christophe Serafin',
author_email='christophe.serafin@alcatel-lucent.com',
packages=['bambou', 'bambou.utils'],
description='REST Library for Nuage Networks',
long_description=open('README.md').read(),
install_requires=[line for line in open('requirements.txt')],
)
|
<commit_before>from setuptools import setup
setup(
name='bambou',
version='0.0.1',
url='http://www.nuagenetworks.net/',
author='Christophe Serafin',
author_email='christophe.serafin@alcatel-lucent.com',
packages=['bambou', 'bambou.utils'],
description='REST Library for Nuage Networks',
long_description=open('README.md').read(),
install_requires=[line for line in open('requirements.txt')],
)
<commit_msg>Set Bambou package version to 0.0.2<commit_after>from setuptools import setup
setup(
name='bambou',
version='0.0.2',
url='http://www.nuagenetworks.net/',
author='Christophe Serafin',
author_email='christophe.serafin@alcatel-lucent.com',
packages=['bambou', 'bambou.utils'],
description='REST Library for Nuage Networks',
long_description=open('README.md').read(),
install_requires=[line for line in open('requirements.txt')],
)
|
2c71d9589947e1e1f3e75e907b1c28beccc7b268
|
setup.py
|
setup.py
|
from setuptools import setup, find_packages
setup(
name='rq-retry-scheduler',
version='0.1.0b1',
url='https://github.com/mikemill/rq_retry_scheduler',
description='RQ Retry and Scheduler',
long_description=open('README.rst').read(),
author='Michael Miller',
author_email='mikemill@gmail.com',
packages=find_packages(exclude=['*tests*']),
license='MIT',
install_requires=['rq>=0.6.0'],
zip_safe=False,
platforms='any',
entry_points={
'console_scripts': [
'rqscheduler = rq_retry_scheduler.cli:main',
],
},
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3.4',
]
)
|
from setuptools import setup, find_packages
try:
import pypandoc
def long_description():
return pypandoc.convert_file('README.md', 'rst')
except ImportError:
def long_description():
return ''
setup(
name='rq-retry-scheduler',
version='0.1.0b1',
url='https://github.com/mikemill/rq_retry_scheduler',
description='RQ Retry and Scheduler',
long_description=long_description(),
author='Michael Miller',
author_email='mikemill@gmail.com',
packages=find_packages(exclude=['*tests*']),
license='MIT',
install_requires=['rq>=0.6.0'],
zip_safe=False,
platforms='any',
entry_points={
'console_scripts': [
'rqscheduler = rq_retry_scheduler.cli:main',
],
},
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3.4',
]
)
|
Use pypandoc to convert README.md to RST for long_description
|
Use pypandoc to convert README.md to RST for long_description
|
Python
|
mit
|
mikemill/rq_retry_scheduler
|
from setuptools import setup, find_packages
setup(
name='rq-retry-scheduler',
version='0.1.0b1',
url='https://github.com/mikemill/rq_retry_scheduler',
description='RQ Retry and Scheduler',
long_description=open('README.rst').read(),
author='Michael Miller',
author_email='mikemill@gmail.com',
packages=find_packages(exclude=['*tests*']),
license='MIT',
install_requires=['rq>=0.6.0'],
zip_safe=False,
platforms='any',
entry_points={
'console_scripts': [
'rqscheduler = rq_retry_scheduler.cli:main',
],
},
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3.4',
]
)
Use pypandoc to convert README.md to RST for long_description
|
from setuptools import setup, find_packages
try:
import pypandoc
def long_description():
return pypandoc.convert_file('README.md', 'rst')
except ImportError:
def long_description():
return ''
setup(
name='rq-retry-scheduler',
version='0.1.0b1',
url='https://github.com/mikemill/rq_retry_scheduler',
description='RQ Retry and Scheduler',
long_description=long_description(),
author='Michael Miller',
author_email='mikemill@gmail.com',
packages=find_packages(exclude=['*tests*']),
license='MIT',
install_requires=['rq>=0.6.0'],
zip_safe=False,
platforms='any',
entry_points={
'console_scripts': [
'rqscheduler = rq_retry_scheduler.cli:main',
],
},
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3.4',
]
)
|
<commit_before>from setuptools import setup, find_packages
setup(
name='rq-retry-scheduler',
version='0.1.0b1',
url='https://github.com/mikemill/rq_retry_scheduler',
description='RQ Retry and Scheduler',
long_description=open('README.rst').read(),
author='Michael Miller',
author_email='mikemill@gmail.com',
packages=find_packages(exclude=['*tests*']),
license='MIT',
install_requires=['rq>=0.6.0'],
zip_safe=False,
platforms='any',
entry_points={
'console_scripts': [
'rqscheduler = rq_retry_scheduler.cli:main',
],
},
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3.4',
]
)
<commit_msg>Use pypandoc to convert README.md to RST for long_description<commit_after>
|
from setuptools import setup, find_packages
try:
import pypandoc
def long_description():
return pypandoc.convert_file('README.md', 'rst')
except ImportError:
def long_description():
return ''
setup(
name='rq-retry-scheduler',
version='0.1.0b1',
url='https://github.com/mikemill/rq_retry_scheduler',
description='RQ Retry and Scheduler',
long_description=long_description(),
author='Michael Miller',
author_email='mikemill@gmail.com',
packages=find_packages(exclude=['*tests*']),
license='MIT',
install_requires=['rq>=0.6.0'],
zip_safe=False,
platforms='any',
entry_points={
'console_scripts': [
'rqscheduler = rq_retry_scheduler.cli:main',
],
},
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3.4',
]
)
|
from setuptools import setup, find_packages
setup(
name='rq-retry-scheduler',
version='0.1.0b1',
url='https://github.com/mikemill/rq_retry_scheduler',
description='RQ Retry and Scheduler',
long_description=open('README.rst').read(),
author='Michael Miller',
author_email='mikemill@gmail.com',
packages=find_packages(exclude=['*tests*']),
license='MIT',
install_requires=['rq>=0.6.0'],
zip_safe=False,
platforms='any',
entry_points={
'console_scripts': [
'rqscheduler = rq_retry_scheduler.cli:main',
],
},
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3.4',
]
)
Use pypandoc to convert README.md to RST for long_descriptionfrom setuptools import setup, find_packages
try:
import pypandoc
def long_description():
return pypandoc.convert_file('README.md', 'rst')
except ImportError:
def long_description():
return ''
setup(
name='rq-retry-scheduler',
version='0.1.0b1',
url='https://github.com/mikemill/rq_retry_scheduler',
description='RQ Retry and Scheduler',
long_description=long_description(),
author='Michael Miller',
author_email='mikemill@gmail.com',
packages=find_packages(exclude=['*tests*']),
license='MIT',
install_requires=['rq>=0.6.0'],
zip_safe=False,
platforms='any',
entry_points={
'console_scripts': [
'rqscheduler = rq_retry_scheduler.cli:main',
],
},
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3.4',
]
)
|
<commit_before>from setuptools import setup, find_packages
setup(
name='rq-retry-scheduler',
version='0.1.0b1',
url='https://github.com/mikemill/rq_retry_scheduler',
description='RQ Retry and Scheduler',
long_description=open('README.rst').read(),
author='Michael Miller',
author_email='mikemill@gmail.com',
packages=find_packages(exclude=['*tests*']),
license='MIT',
install_requires=['rq>=0.6.0'],
zip_safe=False,
platforms='any',
entry_points={
'console_scripts': [
'rqscheduler = rq_retry_scheduler.cli:main',
],
},
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3.4',
]
)
<commit_msg>Use pypandoc to convert README.md to RST for long_description<commit_after>from setuptools import setup, find_packages
try:
import pypandoc
def long_description():
return pypandoc.convert_file('README.md', 'rst')
except ImportError:
def long_description():
return ''
setup(
name='rq-retry-scheduler',
version='0.1.0b1',
url='https://github.com/mikemill/rq_retry_scheduler',
description='RQ Retry and Scheduler',
long_description=long_description(),
author='Michael Miller',
author_email='mikemill@gmail.com',
packages=find_packages(exclude=['*tests*']),
license='MIT',
install_requires=['rq>=0.6.0'],
zip_safe=False,
platforms='any',
entry_points={
'console_scripts': [
'rqscheduler = rq_retry_scheduler.cli:main',
],
},
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3.4',
]
)
|
dadc4766e047d83c65915cb2d1f2fb43ec2ed12e
|
setup.py
|
setup.py
|
from setuptools import setup, find_packages
from plotbitrate import __version__
with open("README.md", "r") as fh:
long_description = fh.read()
setup(
name='rezun-plotbitrate',
version=__version__,
packages=find_packages(),
description='A simple bitrate plotter for media files',
long_description=long_description,
long_description_content_type="text/markdown",
author='Steve Schmidt',
author_email='azcane@gmail.com',
license='BSD',
url='https://github.com/rezun/plotbitrate',
py_modules=['plotbitrate'],
classifiers=[
'Topic :: Multimedia :: Sound/Audio',
'Natural Language :: English',
'Programming Language :: Python :: 3.5',
],
keywords='ffprobe bitrate plot',
python_requires='>=3.5',
entry_points={
'console_scripts': [
'plotbitrate = plotbitrate:main'
]
},
install_requires=[
'matplotlib',
'pyqt5'
]
)
|
from setuptools import setup, find_packages
from plotbitrate import __version__
with open("README.md", "r") as fh:
long_description = fh.read()
setup(
name='plotbitrate',
version=__version__,
packages=find_packages(),
description='A simple bitrate plotter for media files',
long_description=long_description,
long_description_content_type="text/markdown",
author='Eric Work',
author_email='work.eric@gmail.com',
license='BSD',
url='https://github.com/zeroepoch/plotbitrate',
py_modules=['plotbitrate'],
classifiers=[
'Topic :: Multimedia :: Sound/Audio',
'Natural Language :: English',
'Programming Language :: Python :: 3.5',
],
keywords='ffprobe bitrate plot',
python_requires='>=3.5',
entry_points={
'console_scripts': [
'plotbitrate = plotbitrate:main'
]
},
install_requires=[
'matplotlib',
'pyqt5'
]
)
|
Rename pypi package and change author
|
Rename pypi package and change author
|
Python
|
bsd-2-clause
|
zeroepoch/plotbitrate
|
from setuptools import setup, find_packages
from plotbitrate import __version__
with open("README.md", "r") as fh:
long_description = fh.read()
setup(
name='rezun-plotbitrate',
version=__version__,
packages=find_packages(),
description='A simple bitrate plotter for media files',
long_description=long_description,
long_description_content_type="text/markdown",
author='Steve Schmidt',
author_email='azcane@gmail.com',
license='BSD',
url='https://github.com/rezun/plotbitrate',
py_modules=['plotbitrate'],
classifiers=[
'Topic :: Multimedia :: Sound/Audio',
'Natural Language :: English',
'Programming Language :: Python :: 3.5',
],
keywords='ffprobe bitrate plot',
python_requires='>=3.5',
entry_points={
'console_scripts': [
'plotbitrate = plotbitrate:main'
]
},
install_requires=[
'matplotlib',
'pyqt5'
]
)
Rename pypi package and change author
|
from setuptools import setup, find_packages
from plotbitrate import __version__
with open("README.md", "r") as fh:
long_description = fh.read()
setup(
name='plotbitrate',
version=__version__,
packages=find_packages(),
description='A simple bitrate plotter for media files',
long_description=long_description,
long_description_content_type="text/markdown",
author='Eric Work',
author_email='work.eric@gmail.com',
license='BSD',
url='https://github.com/zeroepoch/plotbitrate',
py_modules=['plotbitrate'],
classifiers=[
'Topic :: Multimedia :: Sound/Audio',
'Natural Language :: English',
'Programming Language :: Python :: 3.5',
],
keywords='ffprobe bitrate plot',
python_requires='>=3.5',
entry_points={
'console_scripts': [
'plotbitrate = plotbitrate:main'
]
},
install_requires=[
'matplotlib',
'pyqt5'
]
)
|
<commit_before>from setuptools import setup, find_packages
from plotbitrate import __version__
with open("README.md", "r") as fh:
long_description = fh.read()
setup(
name='rezun-plotbitrate',
version=__version__,
packages=find_packages(),
description='A simple bitrate plotter for media files',
long_description=long_description,
long_description_content_type="text/markdown",
author='Steve Schmidt',
author_email='azcane@gmail.com',
license='BSD',
url='https://github.com/rezun/plotbitrate',
py_modules=['plotbitrate'],
classifiers=[
'Topic :: Multimedia :: Sound/Audio',
'Natural Language :: English',
'Programming Language :: Python :: 3.5',
],
keywords='ffprobe bitrate plot',
python_requires='>=3.5',
entry_points={
'console_scripts': [
'plotbitrate = plotbitrate:main'
]
},
install_requires=[
'matplotlib',
'pyqt5'
]
)
<commit_msg>Rename pypi package and change author<commit_after>
|
from setuptools import setup, find_packages
from plotbitrate import __version__
with open("README.md", "r") as fh:
long_description = fh.read()
setup(
name='plotbitrate',
version=__version__,
packages=find_packages(),
description='A simple bitrate plotter for media files',
long_description=long_description,
long_description_content_type="text/markdown",
author='Eric Work',
author_email='work.eric@gmail.com',
license='BSD',
url='https://github.com/zeroepoch/plotbitrate',
py_modules=['plotbitrate'],
classifiers=[
'Topic :: Multimedia :: Sound/Audio',
'Natural Language :: English',
'Programming Language :: Python :: 3.5',
],
keywords='ffprobe bitrate plot',
python_requires='>=3.5',
entry_points={
'console_scripts': [
'plotbitrate = plotbitrate:main'
]
},
install_requires=[
'matplotlib',
'pyqt5'
]
)
|
from setuptools import setup, find_packages
from plotbitrate import __version__
with open("README.md", "r") as fh:
long_description = fh.read()
setup(
name='rezun-plotbitrate',
version=__version__,
packages=find_packages(),
description='A simple bitrate plotter for media files',
long_description=long_description,
long_description_content_type="text/markdown",
author='Steve Schmidt',
author_email='azcane@gmail.com',
license='BSD',
url='https://github.com/rezun/plotbitrate',
py_modules=['plotbitrate'],
classifiers=[
'Topic :: Multimedia :: Sound/Audio',
'Natural Language :: English',
'Programming Language :: Python :: 3.5',
],
keywords='ffprobe bitrate plot',
python_requires='>=3.5',
entry_points={
'console_scripts': [
'plotbitrate = plotbitrate:main'
]
},
install_requires=[
'matplotlib',
'pyqt5'
]
)
Rename pypi package and change authorfrom setuptools import setup, find_packages
from plotbitrate import __version__
with open("README.md", "r") as fh:
long_description = fh.read()
setup(
name='plotbitrate',
version=__version__,
packages=find_packages(),
description='A simple bitrate plotter for media files',
long_description=long_description,
long_description_content_type="text/markdown",
author='Eric Work',
author_email='work.eric@gmail.com',
license='BSD',
url='https://github.com/zeroepoch/plotbitrate',
py_modules=['plotbitrate'],
classifiers=[
'Topic :: Multimedia :: Sound/Audio',
'Natural Language :: English',
'Programming Language :: Python :: 3.5',
],
keywords='ffprobe bitrate plot',
python_requires='>=3.5',
entry_points={
'console_scripts': [
'plotbitrate = plotbitrate:main'
]
},
install_requires=[
'matplotlib',
'pyqt5'
]
)
|
<commit_before>from setuptools import setup, find_packages
from plotbitrate import __version__
with open("README.md", "r") as fh:
long_description = fh.read()
setup(
name='rezun-plotbitrate',
version=__version__,
packages=find_packages(),
description='A simple bitrate plotter for media files',
long_description=long_description,
long_description_content_type="text/markdown",
author='Steve Schmidt',
author_email='azcane@gmail.com',
license='BSD',
url='https://github.com/rezun/plotbitrate',
py_modules=['plotbitrate'],
classifiers=[
'Topic :: Multimedia :: Sound/Audio',
'Natural Language :: English',
'Programming Language :: Python :: 3.5',
],
keywords='ffprobe bitrate plot',
python_requires='>=3.5',
entry_points={
'console_scripts': [
'plotbitrate = plotbitrate:main'
]
},
install_requires=[
'matplotlib',
'pyqt5'
]
)
<commit_msg>Rename pypi package and change author<commit_after>from setuptools import setup, find_packages
from plotbitrate import __version__
with open("README.md", "r") as fh:
long_description = fh.read()
setup(
name='plotbitrate',
version=__version__,
packages=find_packages(),
description='A simple bitrate plotter for media files',
long_description=long_description,
long_description_content_type="text/markdown",
author='Eric Work',
author_email='work.eric@gmail.com',
license='BSD',
url='https://github.com/zeroepoch/plotbitrate',
py_modules=['plotbitrate'],
classifiers=[
'Topic :: Multimedia :: Sound/Audio',
'Natural Language :: English',
'Programming Language :: Python :: 3.5',
],
keywords='ffprobe bitrate plot',
python_requires='>=3.5',
entry_points={
'console_scripts': [
'plotbitrate = plotbitrate:main'
]
},
install_requires=[
'matplotlib',
'pyqt5'
]
)
|
6df9998f67056ca751d624379565d8e793e93f1f
|
setup.py
|
setup.py
|
import re
from setuptools import setup
init_contents = open('random_object_id/__init__.py').read()
version = re.search('"([0-9\.]+)"', init_contents).group(1)
with open('README.rst', 'rb') as f:
long_description = f.read().decode('utf-8')
setup(
name='random-object-id',
packages=['random_object_id'],
entry_points={
'console_scripts': [
'random_object_id=random_object_id.random_object_id:main',
],
},
version=version,
description='Generate a random MongoDB ObjectId.',
long_description=long_description,
author='Max Rozentsveyg',
author_email='maxr@outlook.com',
url='https://github.com/mxr/random-object-id',
)
|
import re
import os
from setuptools import setup
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
init_path = 'random_object_id/__init__.py'
version = re.search('"([0-9\.]+)"', read(init_path)).group(1)
long_description = read('README.rst')
setup(
name='random-object-id',
packages=['random_object_id'],
entry_points={
'console_scripts': [
'random_object_id=random_object_id.random_object_id:main',
],
},
version=version,
description='Generate a random MongoDB ObjectId.',
long_description=long_description,
author='Max Rozentsveyg',
author_email='maxr@outlook.com',
url='https://github.com/mxr/random-object-id',
)
|
Use helper method to read files
|
Use helper method to read files
|
Python
|
mit
|
mxr/random-object-id
|
import re
from setuptools import setup
init_contents = open('random_object_id/__init__.py').read()
version = re.search('"([0-9\.]+)"', init_contents).group(1)
with open('README.rst', 'rb') as f:
long_description = f.read().decode('utf-8')
setup(
name='random-object-id',
packages=['random_object_id'],
entry_points={
'console_scripts': [
'random_object_id=random_object_id.random_object_id:main',
],
},
version=version,
description='Generate a random MongoDB ObjectId.',
long_description=long_description,
author='Max Rozentsveyg',
author_email='maxr@outlook.com',
url='https://github.com/mxr/random-object-id',
)
Use helper method to read files
|
import re
import os
from setuptools import setup
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
init_path = 'random_object_id/__init__.py'
version = re.search('"([0-9\.]+)"', read(init_path)).group(1)
long_description = read('README.rst')
setup(
name='random-object-id',
packages=['random_object_id'],
entry_points={
'console_scripts': [
'random_object_id=random_object_id.random_object_id:main',
],
},
version=version,
description='Generate a random MongoDB ObjectId.',
long_description=long_description,
author='Max Rozentsveyg',
author_email='maxr@outlook.com',
url='https://github.com/mxr/random-object-id',
)
|
<commit_before>import re
from setuptools import setup
init_contents = open('random_object_id/__init__.py').read()
version = re.search('"([0-9\.]+)"', init_contents).group(1)
with open('README.rst', 'rb') as f:
long_description = f.read().decode('utf-8')
setup(
name='random-object-id',
packages=['random_object_id'],
entry_points={
'console_scripts': [
'random_object_id=random_object_id.random_object_id:main',
],
},
version=version,
description='Generate a random MongoDB ObjectId.',
long_description=long_description,
author='Max Rozentsveyg',
author_email='maxr@outlook.com',
url='https://github.com/mxr/random-object-id',
)
<commit_msg>Use helper method to read files<commit_after>
|
import re
import os
from setuptools import setup
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
init_path = 'random_object_id/__init__.py'
version = re.search('"([0-9\.]+)"', read(init_path)).group(1)
long_description = read('README.rst')
setup(
name='random-object-id',
packages=['random_object_id'],
entry_points={
'console_scripts': [
'random_object_id=random_object_id.random_object_id:main',
],
},
version=version,
description='Generate a random MongoDB ObjectId.',
long_description=long_description,
author='Max Rozentsveyg',
author_email='maxr@outlook.com',
url='https://github.com/mxr/random-object-id',
)
|
import re
from setuptools import setup
init_contents = open('random_object_id/__init__.py').read()
version = re.search('"([0-9\.]+)"', init_contents).group(1)
with open('README.rst', 'rb') as f:
long_description = f.read().decode('utf-8')
setup(
name='random-object-id',
packages=['random_object_id'],
entry_points={
'console_scripts': [
'random_object_id=random_object_id.random_object_id:main',
],
},
version=version,
description='Generate a random MongoDB ObjectId.',
long_description=long_description,
author='Max Rozentsveyg',
author_email='maxr@outlook.com',
url='https://github.com/mxr/random-object-id',
)
Use helper method to read filesimport re
import os
from setuptools import setup
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
init_path = 'random_object_id/__init__.py'
version = re.search('"([0-9\.]+)"', read(init_path)).group(1)
long_description = read('README.rst')
setup(
name='random-object-id',
packages=['random_object_id'],
entry_points={
'console_scripts': [
'random_object_id=random_object_id.random_object_id:main',
],
},
version=version,
description='Generate a random MongoDB ObjectId.',
long_description=long_description,
author='Max Rozentsveyg',
author_email='maxr@outlook.com',
url='https://github.com/mxr/random-object-id',
)
|
<commit_before>import re
from setuptools import setup
init_contents = open('random_object_id/__init__.py').read()
version = re.search('"([0-9\.]+)"', init_contents).group(1)
with open('README.rst', 'rb') as f:
long_description = f.read().decode('utf-8')
setup(
name='random-object-id',
packages=['random_object_id'],
entry_points={
'console_scripts': [
'random_object_id=random_object_id.random_object_id:main',
],
},
version=version,
description='Generate a random MongoDB ObjectId.',
long_description=long_description,
author='Max Rozentsveyg',
author_email='maxr@outlook.com',
url='https://github.com/mxr/random-object-id',
)
<commit_msg>Use helper method to read files<commit_after>import re
import os
from setuptools import setup
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
init_path = 'random_object_id/__init__.py'
version = re.search('"([0-9\.]+)"', read(init_path)).group(1)
long_description = read('README.rst')
setup(
name='random-object-id',
packages=['random_object_id'],
entry_points={
'console_scripts': [
'random_object_id=random_object_id.random_object_id:main',
],
},
version=version,
description='Generate a random MongoDB ObjectId.',
long_description=long_description,
author='Max Rozentsveyg',
author_email='maxr@outlook.com',
url='https://github.com/mxr/random-object-id',
)
|
1a647b75916fc11d835813825dc20ce45a368546
|
setup.py
|
setup.py
|
from distutils.core import setup
from setuptools import find_packages
setup(
name='katagawa',
version='0.1.0',
packages=find_packages(),
url='https://github.com/SunDwarf/Katagawa',
license='MIT',
author='Laura Dickinson',
author_email='l@veriny.tf',
description='An asyncio ORM for Python 3.5'
)
|
from distutils.core import setup
from setuptools import find_packages
setup(
name='katagawa',
version='0.1.0',
packages=find_packages(),
url='https://github.com/SunDwarf/Katagawa',
license='MIT',
author='Laura Dickinson',
author_email='l@veriny.tf',
description='An asyncio ORM for Python 3.5',
install_requires=[
"cached_property==1.3.0"
]
)
|
Add cached_property as a requirement.
|
Add cached_property as a requirement.
|
Python
|
mit
|
SunDwarf/asyncqlio
|
from distutils.core import setup
from setuptools import find_packages
setup(
name='katagawa',
version='0.1.0',
packages=find_packages(),
url='https://github.com/SunDwarf/Katagawa',
license='MIT',
author='Laura Dickinson',
author_email='l@veriny.tf',
description='An asyncio ORM for Python 3.5'
)
Add cached_property as a requirement.
|
from distutils.core import setup
from setuptools import find_packages
setup(
name='katagawa',
version='0.1.0',
packages=find_packages(),
url='https://github.com/SunDwarf/Katagawa',
license='MIT',
author='Laura Dickinson',
author_email='l@veriny.tf',
description='An asyncio ORM for Python 3.5',
install_requires=[
"cached_property==1.3.0"
]
)
|
<commit_before>from distutils.core import setup
from setuptools import find_packages
setup(
name='katagawa',
version='0.1.0',
packages=find_packages(),
url='https://github.com/SunDwarf/Katagawa',
license='MIT',
author='Laura Dickinson',
author_email='l@veriny.tf',
description='An asyncio ORM for Python 3.5'
)
<commit_msg>Add cached_property as a requirement.<commit_after>
|
from distutils.core import setup
from setuptools import find_packages
setup(
name='katagawa',
version='0.1.0',
packages=find_packages(),
url='https://github.com/SunDwarf/Katagawa',
license='MIT',
author='Laura Dickinson',
author_email='l@veriny.tf',
description='An asyncio ORM for Python 3.5',
install_requires=[
"cached_property==1.3.0"
]
)
|
from distutils.core import setup
from setuptools import find_packages
setup(
name='katagawa',
version='0.1.0',
packages=find_packages(),
url='https://github.com/SunDwarf/Katagawa',
license='MIT',
author='Laura Dickinson',
author_email='l@veriny.tf',
description='An asyncio ORM for Python 3.5'
)
Add cached_property as a requirement.from distutils.core import setup
from setuptools import find_packages
setup(
name='katagawa',
version='0.1.0',
packages=find_packages(),
url='https://github.com/SunDwarf/Katagawa',
license='MIT',
author='Laura Dickinson',
author_email='l@veriny.tf',
description='An asyncio ORM for Python 3.5',
install_requires=[
"cached_property==1.3.0"
]
)
|
<commit_before>from distutils.core import setup
from setuptools import find_packages
setup(
name='katagawa',
version='0.1.0',
packages=find_packages(),
url='https://github.com/SunDwarf/Katagawa',
license='MIT',
author='Laura Dickinson',
author_email='l@veriny.tf',
description='An asyncio ORM for Python 3.5'
)
<commit_msg>Add cached_property as a requirement.<commit_after>from distutils.core import setup
from setuptools import find_packages
setup(
name='katagawa',
version='0.1.0',
packages=find_packages(),
url='https://github.com/SunDwarf/Katagawa',
license='MIT',
author='Laura Dickinson',
author_email='l@veriny.tf',
description='An asyncio ORM for Python 3.5',
install_requires=[
"cached_property==1.3.0"
]
)
|
1ee1a337cb3094ae5a5cc79b6d4c62c2f7f64dc3
|
setup.py
|
setup.py
|
#!/usr/bin/env python
from distutils.core import setup
setup(
name='dbtools',
version=open("VERSION.txt").read().strip(),
description='Lightweight SQLite interface',
author='Jessica B. Hamrick',
author_email='jhamrick@berkeley.edu',
url='https://github.com/jhamrick/dbtools',
packages=['dbtools'],
keywords='sqlite pandas dataframe',
classifiers=[
"Development Status :: 4 - Beta",
"Intended Audience :: Science/Research",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python :: 2.7",
"Programming Language :: SQL",
"Topic :: Database :: Front-Ends",
"Topic :: Utilities",
],
install_requires=[
'pandas',
'numpy'
]
)
|
#!/usr/bin/env python
from distutils.core import setup
setup(
name='dbtools',
version=open("VERSION.txt").read().strip(),
description='Lightweight SQLite interface',
author='Jessica B. Hamrick',
author_email='jhamrick@berkeley.edu',
url='https://github.com/jhamrick/dbtools',
packages=['dbtools'],
keywords='sqlite pandas dataframe',
classifiers=[
"Development Status :: 4 - Beta",
"Intended Audience :: Science/Research",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: SQL",
"Topic :: Database :: Front-Ends",
"Topic :: Utilities",
],
install_requires=[
'pandas',
'numpy'
]
)
|
Add trove classifier for Python 3
|
Add trove classifier for Python 3
|
Python
|
mit
|
jhamrick/dbtools,jhamrick/dbtools
|
#!/usr/bin/env python
from distutils.core import setup
setup(
name='dbtools',
version=open("VERSION.txt").read().strip(),
description='Lightweight SQLite interface',
author='Jessica B. Hamrick',
author_email='jhamrick@berkeley.edu',
url='https://github.com/jhamrick/dbtools',
packages=['dbtools'],
keywords='sqlite pandas dataframe',
classifiers=[
"Development Status :: 4 - Beta",
"Intended Audience :: Science/Research",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python :: 2.7",
"Programming Language :: SQL",
"Topic :: Database :: Front-Ends",
"Topic :: Utilities",
],
install_requires=[
'pandas',
'numpy'
]
)
Add trove classifier for Python 3
|
#!/usr/bin/env python
from distutils.core import setup
setup(
name='dbtools',
version=open("VERSION.txt").read().strip(),
description='Lightweight SQLite interface',
author='Jessica B. Hamrick',
author_email='jhamrick@berkeley.edu',
url='https://github.com/jhamrick/dbtools',
packages=['dbtools'],
keywords='sqlite pandas dataframe',
classifiers=[
"Development Status :: 4 - Beta",
"Intended Audience :: Science/Research",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: SQL",
"Topic :: Database :: Front-Ends",
"Topic :: Utilities",
],
install_requires=[
'pandas',
'numpy'
]
)
|
<commit_before>#!/usr/bin/env python
from distutils.core import setup
setup(
name='dbtools',
version=open("VERSION.txt").read().strip(),
description='Lightweight SQLite interface',
author='Jessica B. Hamrick',
author_email='jhamrick@berkeley.edu',
url='https://github.com/jhamrick/dbtools',
packages=['dbtools'],
keywords='sqlite pandas dataframe',
classifiers=[
"Development Status :: 4 - Beta",
"Intended Audience :: Science/Research",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python :: 2.7",
"Programming Language :: SQL",
"Topic :: Database :: Front-Ends",
"Topic :: Utilities",
],
install_requires=[
'pandas',
'numpy'
]
)
<commit_msg>Add trove classifier for Python 3<commit_after>
|
#!/usr/bin/env python
from distutils.core import setup
setup(
name='dbtools',
version=open("VERSION.txt").read().strip(),
description='Lightweight SQLite interface',
author='Jessica B. Hamrick',
author_email='jhamrick@berkeley.edu',
url='https://github.com/jhamrick/dbtools',
packages=['dbtools'],
keywords='sqlite pandas dataframe',
classifiers=[
"Development Status :: 4 - Beta",
"Intended Audience :: Science/Research",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: SQL",
"Topic :: Database :: Front-Ends",
"Topic :: Utilities",
],
install_requires=[
'pandas',
'numpy'
]
)
|
#!/usr/bin/env python
from distutils.core import setup
setup(
name='dbtools',
version=open("VERSION.txt").read().strip(),
description='Lightweight SQLite interface',
author='Jessica B. Hamrick',
author_email='jhamrick@berkeley.edu',
url='https://github.com/jhamrick/dbtools',
packages=['dbtools'],
keywords='sqlite pandas dataframe',
classifiers=[
"Development Status :: 4 - Beta",
"Intended Audience :: Science/Research",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python :: 2.7",
"Programming Language :: SQL",
"Topic :: Database :: Front-Ends",
"Topic :: Utilities",
],
install_requires=[
'pandas',
'numpy'
]
)
Add trove classifier for Python 3#!/usr/bin/env python
from distutils.core import setup
setup(
name='dbtools',
version=open("VERSION.txt").read().strip(),
description='Lightweight SQLite interface',
author='Jessica B. Hamrick',
author_email='jhamrick@berkeley.edu',
url='https://github.com/jhamrick/dbtools',
packages=['dbtools'],
keywords='sqlite pandas dataframe',
classifiers=[
"Development Status :: 4 - Beta",
"Intended Audience :: Science/Research",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: SQL",
"Topic :: Database :: Front-Ends",
"Topic :: Utilities",
],
install_requires=[
'pandas',
'numpy'
]
)
|
<commit_before>#!/usr/bin/env python
from distutils.core import setup
setup(
name='dbtools',
version=open("VERSION.txt").read().strip(),
description='Lightweight SQLite interface',
author='Jessica B. Hamrick',
author_email='jhamrick@berkeley.edu',
url='https://github.com/jhamrick/dbtools',
packages=['dbtools'],
keywords='sqlite pandas dataframe',
classifiers=[
"Development Status :: 4 - Beta",
"Intended Audience :: Science/Research",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python :: 2.7",
"Programming Language :: SQL",
"Topic :: Database :: Front-Ends",
"Topic :: Utilities",
],
install_requires=[
'pandas',
'numpy'
]
)
<commit_msg>Add trove classifier for Python 3<commit_after>#!/usr/bin/env python
from distutils.core import setup
setup(
name='dbtools',
version=open("VERSION.txt").read().strip(),
description='Lightweight SQLite interface',
author='Jessica B. Hamrick',
author_email='jhamrick@berkeley.edu',
url='https://github.com/jhamrick/dbtools',
packages=['dbtools'],
keywords='sqlite pandas dataframe',
classifiers=[
"Development Status :: 4 - Beta",
"Intended Audience :: Science/Research",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: SQL",
"Topic :: Database :: Front-Ends",
"Topic :: Utilities",
],
install_requires=[
'pandas',
'numpy'
]
)
|
13bd34b4f7933a243afac3917c75bed32a86cd24
|
setup.py
|
setup.py
|
#!/usr/bin/env python
from setuptools import find_packages, setup
# Use quickphotos.VERSION for version numbers
version_tuple = __import__('quickphotos').VERSION
version = '.'.join([str(v) for v in version_tuple])
setup(
name='django-quick-photos',
version=version,
description='Latest Photos from Instagram for Django',
long_description=open('README.rst').read(),
url='https://github.com/blancltd/django-quick-photos',
maintainer='Alex Tomkins',
maintainer_email='alex@blanc.ltd.uk',
platforms=['any'],
install_requires=[
'python-instagram>=0.8.0',
],
packages=find_packages(),
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
],
license='BSD-2',
)
|
#!/usr/bin/env python
from setuptools import find_packages, setup
# Use quickphotos.VERSION for version numbers
version_tuple = __import__('quickphotos').VERSION
version = '.'.join([str(v) for v in version_tuple])
setup(
name='django-quick-photos',
version=version,
description='Latest Photos from Instagram for Django',
long_description=open('README.rst').read(),
url='https://github.com/blancltd/django-quick-photos',
maintainer='Blanc Ltd',
maintainer_email='studio@blanc.ltd.uk',
platforms=['any'],
install_requires=[
'python-instagram>=0.8.0',
],
packages=find_packages(),
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
],
license='BSD-2',
)
|
Update maintainer to Blanc Ltd
|
Update maintainer to Blanc Ltd
|
Python
|
bsd-3-clause
|
blancltd/django-quick-photos,kmlebedev/mezzanine-instagram-quickphotos
|
#!/usr/bin/env python
from setuptools import find_packages, setup
# Use quickphotos.VERSION for version numbers
version_tuple = __import__('quickphotos').VERSION
version = '.'.join([str(v) for v in version_tuple])
setup(
name='django-quick-photos',
version=version,
description='Latest Photos from Instagram for Django',
long_description=open('README.rst').read(),
url='https://github.com/blancltd/django-quick-photos',
maintainer='Alex Tomkins',
maintainer_email='alex@blanc.ltd.uk',
platforms=['any'],
install_requires=[
'python-instagram>=0.8.0',
],
packages=find_packages(),
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
],
license='BSD-2',
)
Update maintainer to Blanc Ltd
|
#!/usr/bin/env python
from setuptools import find_packages, setup
# Use quickphotos.VERSION for version numbers
version_tuple = __import__('quickphotos').VERSION
version = '.'.join([str(v) for v in version_tuple])
setup(
name='django-quick-photos',
version=version,
description='Latest Photos from Instagram for Django',
long_description=open('README.rst').read(),
url='https://github.com/blancltd/django-quick-photos',
maintainer='Blanc Ltd',
maintainer_email='studio@blanc.ltd.uk',
platforms=['any'],
install_requires=[
'python-instagram>=0.8.0',
],
packages=find_packages(),
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
],
license='BSD-2',
)
|
<commit_before>#!/usr/bin/env python
from setuptools import find_packages, setup
# Use quickphotos.VERSION for version numbers
version_tuple = __import__('quickphotos').VERSION
version = '.'.join([str(v) for v in version_tuple])
setup(
name='django-quick-photos',
version=version,
description='Latest Photos from Instagram for Django',
long_description=open('README.rst').read(),
url='https://github.com/blancltd/django-quick-photos',
maintainer='Alex Tomkins',
maintainer_email='alex@blanc.ltd.uk',
platforms=['any'],
install_requires=[
'python-instagram>=0.8.0',
],
packages=find_packages(),
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
],
license='BSD-2',
)
<commit_msg>Update maintainer to Blanc Ltd<commit_after>
|
#!/usr/bin/env python
from setuptools import find_packages, setup
# Use quickphotos.VERSION for version numbers
version_tuple = __import__('quickphotos').VERSION
version = '.'.join([str(v) for v in version_tuple])
setup(
name='django-quick-photos',
version=version,
description='Latest Photos from Instagram for Django',
long_description=open('README.rst').read(),
url='https://github.com/blancltd/django-quick-photos',
maintainer='Blanc Ltd',
maintainer_email='studio@blanc.ltd.uk',
platforms=['any'],
install_requires=[
'python-instagram>=0.8.0',
],
packages=find_packages(),
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
],
license='BSD-2',
)
|
#!/usr/bin/env python
from setuptools import find_packages, setup
# Use quickphotos.VERSION for version numbers
version_tuple = __import__('quickphotos').VERSION
version = '.'.join([str(v) for v in version_tuple])
setup(
name='django-quick-photos',
version=version,
description='Latest Photos from Instagram for Django',
long_description=open('README.rst').read(),
url='https://github.com/blancltd/django-quick-photos',
maintainer='Alex Tomkins',
maintainer_email='alex@blanc.ltd.uk',
platforms=['any'],
install_requires=[
'python-instagram>=0.8.0',
],
packages=find_packages(),
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
],
license='BSD-2',
)
Update maintainer to Blanc Ltd#!/usr/bin/env python
from setuptools import find_packages, setup
# Use quickphotos.VERSION for version numbers
version_tuple = __import__('quickphotos').VERSION
version = '.'.join([str(v) for v in version_tuple])
setup(
name='django-quick-photos',
version=version,
description='Latest Photos from Instagram for Django',
long_description=open('README.rst').read(),
url='https://github.com/blancltd/django-quick-photos',
maintainer='Blanc Ltd',
maintainer_email='studio@blanc.ltd.uk',
platforms=['any'],
install_requires=[
'python-instagram>=0.8.0',
],
packages=find_packages(),
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
],
license='BSD-2',
)
|
<commit_before>#!/usr/bin/env python
from setuptools import find_packages, setup
# Use quickphotos.VERSION for version numbers
version_tuple = __import__('quickphotos').VERSION
version = '.'.join([str(v) for v in version_tuple])
setup(
name='django-quick-photos',
version=version,
description='Latest Photos from Instagram for Django',
long_description=open('README.rst').read(),
url='https://github.com/blancltd/django-quick-photos',
maintainer='Alex Tomkins',
maintainer_email='alex@blanc.ltd.uk',
platforms=['any'],
install_requires=[
'python-instagram>=0.8.0',
],
packages=find_packages(),
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
],
license='BSD-2',
)
<commit_msg>Update maintainer to Blanc Ltd<commit_after>#!/usr/bin/env python
from setuptools import find_packages, setup
# Use quickphotos.VERSION for version numbers
version_tuple = __import__('quickphotos').VERSION
version = '.'.join([str(v) for v in version_tuple])
setup(
name='django-quick-photos',
version=version,
description='Latest Photos from Instagram for Django',
long_description=open('README.rst').read(),
url='https://github.com/blancltd/django-quick-photos',
maintainer='Blanc Ltd',
maintainer_email='studio@blanc.ltd.uk',
platforms=['any'],
install_requires=[
'python-instagram>=0.8.0',
],
packages=find_packages(),
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
],
license='BSD-2',
)
|
89b55999c3ffa46b627b03cf8f10aeeb42ab986f
|
setup.py
|
setup.py
|
# -*- encoding: utf8 -*-
from setuptools import setup, find_packages
import os
setup(
name = "django-prefetch",
version = "0.1.1",
url = 'https://github.com/ionelmc/django-prefetch',
download_url = '',
license = 'BSD',
description = "Generic model related data prefetch framework for Django",
long_description = file(os.path.join(os.path.dirname(__file__), 'README.rst')).read(),
author = 'Ionel Cristian Mărieș',
author_email = 'contact@ionelmc.ro',
packages = find_packages('src'),
package_dir = {'':'src'},
py_modules = ['prefetch'],
include_package_data = True,
zip_safe = False,
classifiers = [
'Development Status :: 4 - Beta',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP',
]
)
|
# -*- encoding: utf8 -*-
from setuptools import setup, find_packages
import os
setup(
name = "django-prefetch",
version = "0.1.1",
url = 'https://github.com/ionelmc/django-prefetch',
download_url = '',
license = 'BSD',
description = "Generic model related data prefetch framework for Django",
long_description = open(os.path.join(os.path.dirname(__file__), 'README.rst')).read(),
author = 'Ionel Cristian Mărieș',
author_email = 'contact@ionelmc.ro',
packages = find_packages('src'),
package_dir = {'':'src'},
py_modules = ['prefetch'],
include_package_data = True,
zip_safe = False,
classifiers = [
'Development Status :: 4 - Beta',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP',
]
)
|
Use open instead of file.
|
Use open instead of file.
|
Python
|
bsd-2-clause
|
ionelmc/django-prefetch
|
# -*- encoding: utf8 -*-
from setuptools import setup, find_packages
import os
setup(
name = "django-prefetch",
version = "0.1.1",
url = 'https://github.com/ionelmc/django-prefetch',
download_url = '',
license = 'BSD',
description = "Generic model related data prefetch framework for Django",
long_description = file(os.path.join(os.path.dirname(__file__), 'README.rst')).read(),
author = 'Ionel Cristian Mărieș',
author_email = 'contact@ionelmc.ro',
packages = find_packages('src'),
package_dir = {'':'src'},
py_modules = ['prefetch'],
include_package_data = True,
zip_safe = False,
classifiers = [
'Development Status :: 4 - Beta',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP',
]
)
Use open instead of file.
|
# -*- encoding: utf8 -*-
from setuptools import setup, find_packages
import os
setup(
name = "django-prefetch",
version = "0.1.1",
url = 'https://github.com/ionelmc/django-prefetch',
download_url = '',
license = 'BSD',
description = "Generic model related data prefetch framework for Django",
long_description = open(os.path.join(os.path.dirname(__file__), 'README.rst')).read(),
author = 'Ionel Cristian Mărieș',
author_email = 'contact@ionelmc.ro',
packages = find_packages('src'),
package_dir = {'':'src'},
py_modules = ['prefetch'],
include_package_data = True,
zip_safe = False,
classifiers = [
'Development Status :: 4 - Beta',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP',
]
)
|
<commit_before># -*- encoding: utf8 -*-
from setuptools import setup, find_packages
import os
setup(
name = "django-prefetch",
version = "0.1.1",
url = 'https://github.com/ionelmc/django-prefetch',
download_url = '',
license = 'BSD',
description = "Generic model related data prefetch framework for Django",
long_description = file(os.path.join(os.path.dirname(__file__), 'README.rst')).read(),
author = 'Ionel Cristian Mărieș',
author_email = 'contact@ionelmc.ro',
packages = find_packages('src'),
package_dir = {'':'src'},
py_modules = ['prefetch'],
include_package_data = True,
zip_safe = False,
classifiers = [
'Development Status :: 4 - Beta',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP',
]
)
<commit_msg>Use open instead of file.<commit_after>
|
# -*- encoding: utf8 -*-
from setuptools import setup, find_packages
import os
setup(
name = "django-prefetch",
version = "0.1.1",
url = 'https://github.com/ionelmc/django-prefetch',
download_url = '',
license = 'BSD',
description = "Generic model related data prefetch framework for Django",
long_description = open(os.path.join(os.path.dirname(__file__), 'README.rst')).read(),
author = 'Ionel Cristian Mărieș',
author_email = 'contact@ionelmc.ro',
packages = find_packages('src'),
package_dir = {'':'src'},
py_modules = ['prefetch'],
include_package_data = True,
zip_safe = False,
classifiers = [
'Development Status :: 4 - Beta',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP',
]
)
|
# -*- encoding: utf8 -*-
from setuptools import setup, find_packages
import os
setup(
name = "django-prefetch",
version = "0.1.1",
url = 'https://github.com/ionelmc/django-prefetch',
download_url = '',
license = 'BSD',
description = "Generic model related data prefetch framework for Django",
long_description = file(os.path.join(os.path.dirname(__file__), 'README.rst')).read(),
author = 'Ionel Cristian Mărieș',
author_email = 'contact@ionelmc.ro',
packages = find_packages('src'),
package_dir = {'':'src'},
py_modules = ['prefetch'],
include_package_data = True,
zip_safe = False,
classifiers = [
'Development Status :: 4 - Beta',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP',
]
)
Use open instead of file.# -*- encoding: utf8 -*-
from setuptools import setup, find_packages
import os
setup(
name = "django-prefetch",
version = "0.1.1",
url = 'https://github.com/ionelmc/django-prefetch',
download_url = '',
license = 'BSD',
description = "Generic model related data prefetch framework for Django",
long_description = open(os.path.join(os.path.dirname(__file__), 'README.rst')).read(),
author = 'Ionel Cristian Mărieș',
author_email = 'contact@ionelmc.ro',
packages = find_packages('src'),
package_dir = {'':'src'},
py_modules = ['prefetch'],
include_package_data = True,
zip_safe = False,
classifiers = [
'Development Status :: 4 - Beta',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP',
]
)
|
<commit_before># -*- encoding: utf8 -*-
from setuptools import setup, find_packages
import os
setup(
name = "django-prefetch",
version = "0.1.1",
url = 'https://github.com/ionelmc/django-prefetch',
download_url = '',
license = 'BSD',
description = "Generic model related data prefetch framework for Django",
long_description = file(os.path.join(os.path.dirname(__file__), 'README.rst')).read(),
author = 'Ionel Cristian Mărieș',
author_email = 'contact@ionelmc.ro',
packages = find_packages('src'),
package_dir = {'':'src'},
py_modules = ['prefetch'],
include_package_data = True,
zip_safe = False,
classifiers = [
'Development Status :: 4 - Beta',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP',
]
)
<commit_msg>Use open instead of file.<commit_after># -*- encoding: utf8 -*-
from setuptools import setup, find_packages
import os
setup(
name = "django-prefetch",
version = "0.1.1",
url = 'https://github.com/ionelmc/django-prefetch',
download_url = '',
license = 'BSD',
description = "Generic model related data prefetch framework for Django",
long_description = open(os.path.join(os.path.dirname(__file__), 'README.rst')).read(),
author = 'Ionel Cristian Mărieș',
author_email = 'contact@ionelmc.ro',
packages = find_packages('src'),
package_dir = {'':'src'},
py_modules = ['prefetch'],
include_package_data = True,
zip_safe = False,
classifiers = [
'Development Status :: 4 - Beta',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP',
]
)
|
55ef7aa61e7c0980c53f9ae25dbf8ee368b6db1c
|
setup.py
|
setup.py
|
#!/usr/bin/env python
"""
whichpkg
========
Locate the path of a specific python module
"""
from setuptools import setup
setup(
name='whichpkg',
version='0.3.0',
author='Matt Robenolt',
author_email='matt@ydekproductions.com',
url='https://github.com/mattrobenolt/whichpkg',
description='Locate the path of a specific python module',
long_description=__doc__,
install_requires=[],
scripts=['bin/whichpkg'],
zip_safe=False,
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'Topic :: Software Development'
],
)
|
#!/usr/bin/env python
"""
whichpkg
========
Locate the path of a specific python module
"""
import re
from setuptools import setup
version = re.search("__version__\s*=\s*'(.+)?'", open('bin/whichpkg').read()).groups(1)[0]
setup(
name='whichpkg',
version=version,
author='Matt Robenolt',
author_email='matt@ydekproductions.com',
url='https://github.com/mattrobenolt/whichpkg',
description='Locate the path of a specific python module',
long_description=__doc__,
install_requires=[],
scripts=['bin/whichpkg'],
zip_safe=False,
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'Topic :: Software Development'
],
)
|
Read the __version__ from whichpkg directly
|
Read the __version__ from whichpkg directly
|
Python
|
bsd-2-clause
|
mattrobenolt/whichpkg,pombredanne/whichpkg
|
#!/usr/bin/env python
"""
whichpkg
========
Locate the path of a specific python module
"""
from setuptools import setup
setup(
name='whichpkg',
version='0.3.0',
author='Matt Robenolt',
author_email='matt@ydekproductions.com',
url='https://github.com/mattrobenolt/whichpkg',
description='Locate the path of a specific python module',
long_description=__doc__,
install_requires=[],
scripts=['bin/whichpkg'],
zip_safe=False,
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'Topic :: Software Development'
],
)
Read the __version__ from whichpkg directly
|
#!/usr/bin/env python
"""
whichpkg
========
Locate the path of a specific python module
"""
import re
from setuptools import setup
version = re.search("__version__\s*=\s*'(.+)?'", open('bin/whichpkg').read()).groups(1)[0]
setup(
name='whichpkg',
version=version,
author='Matt Robenolt',
author_email='matt@ydekproductions.com',
url='https://github.com/mattrobenolt/whichpkg',
description='Locate the path of a specific python module',
long_description=__doc__,
install_requires=[],
scripts=['bin/whichpkg'],
zip_safe=False,
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'Topic :: Software Development'
],
)
|
<commit_before>#!/usr/bin/env python
"""
whichpkg
========
Locate the path of a specific python module
"""
from setuptools import setup
setup(
name='whichpkg',
version='0.3.0',
author='Matt Robenolt',
author_email='matt@ydekproductions.com',
url='https://github.com/mattrobenolt/whichpkg',
description='Locate the path of a specific python module',
long_description=__doc__,
install_requires=[],
scripts=['bin/whichpkg'],
zip_safe=False,
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'Topic :: Software Development'
],
)
<commit_msg>Read the __version__ from whichpkg directly<commit_after>
|
#!/usr/bin/env python
"""
whichpkg
========
Locate the path of a specific python module
"""
import re
from setuptools import setup
version = re.search("__version__\s*=\s*'(.+)?'", open('bin/whichpkg').read()).groups(1)[0]
setup(
name='whichpkg',
version=version,
author='Matt Robenolt',
author_email='matt@ydekproductions.com',
url='https://github.com/mattrobenolt/whichpkg',
description='Locate the path of a specific python module',
long_description=__doc__,
install_requires=[],
scripts=['bin/whichpkg'],
zip_safe=False,
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'Topic :: Software Development'
],
)
|
#!/usr/bin/env python
"""
whichpkg
========
Locate the path of a specific python module
"""
from setuptools import setup
setup(
name='whichpkg',
version='0.3.0',
author='Matt Robenolt',
author_email='matt@ydekproductions.com',
url='https://github.com/mattrobenolt/whichpkg',
description='Locate the path of a specific python module',
long_description=__doc__,
install_requires=[],
scripts=['bin/whichpkg'],
zip_safe=False,
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'Topic :: Software Development'
],
)
Read the __version__ from whichpkg directly#!/usr/bin/env python
"""
whichpkg
========
Locate the path of a specific python module
"""
import re
from setuptools import setup
version = re.search("__version__\s*=\s*'(.+)?'", open('bin/whichpkg').read()).groups(1)[0]
setup(
name='whichpkg',
version=version,
author='Matt Robenolt',
author_email='matt@ydekproductions.com',
url='https://github.com/mattrobenolt/whichpkg',
description='Locate the path of a specific python module',
long_description=__doc__,
install_requires=[],
scripts=['bin/whichpkg'],
zip_safe=False,
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'Topic :: Software Development'
],
)
|
<commit_before>#!/usr/bin/env python
"""
whichpkg
========
Locate the path of a specific python module
"""
from setuptools import setup
setup(
name='whichpkg',
version='0.3.0',
author='Matt Robenolt',
author_email='matt@ydekproductions.com',
url='https://github.com/mattrobenolt/whichpkg',
description='Locate the path of a specific python module',
long_description=__doc__,
install_requires=[],
scripts=['bin/whichpkg'],
zip_safe=False,
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'Topic :: Software Development'
],
)
<commit_msg>Read the __version__ from whichpkg directly<commit_after>#!/usr/bin/env python
"""
whichpkg
========
Locate the path of a specific python module
"""
import re
from setuptools import setup
version = re.search("__version__\s*=\s*'(.+)?'", open('bin/whichpkg').read()).groups(1)[0]
setup(
name='whichpkg',
version=version,
author='Matt Robenolt',
author_email='matt@ydekproductions.com',
url='https://github.com/mattrobenolt/whichpkg',
description='Locate the path of a specific python module',
long_description=__doc__,
install_requires=[],
scripts=['bin/whichpkg'],
zip_safe=False,
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'Topic :: Software Development'
],
)
|
f981802947fd2c15be04489f6805395971807c9d
|
PVGeo/__main__.py
|
PVGeo/__main__.py
|
__all__ = [
'test',
]
def test():
"""
@desc: This is a convienance method to run all of the tests in `PVGeo`.
@notes:
This can be executed from either the command line of within a standard Python environment:
```bash
$ python -m PVGeo test
```
```py
>>> import PVGeo
>>> PVGeo.test()
```
"""
import unittest
import fnmatch
import os
path = os.path.dirname(__file__) # path to remove
path = path[0:path.rfind('/')]
test_file_strings = []
for root, dirnames, filenames in os.walk(os.path.dirname(__file__)):
for filename in fnmatch.filter(filenames, '__test__.py'):
test_file_strings.append(os.path.join(root, filename).replace(path, ''))
# Remove extensions and change to module import syle
module_strings = [str[1:len(str)-3].replace('/', '.') for str in test_file_strings]
suites = [unittest.defaultTestLoader.loadTestsFromName(str) for str
in module_strings]
testSuite = unittest.TestSuite(suites)
return unittest.TextTestRunner(verbosity=2).run(testSuite)
if __name__ == '__main__':
import sys
arg = sys.argv[1]
if arg.lower() == 'test':
test()
else:
raise RuntimeError('Unknown argument: %s' % arg)
|
__all__ = [
'test',
]
def test(close=False):
"""
@desc: This is a convienance method to run all of the tests in `PVGeo`.
@notes:
This can be executed from either the command line of within a standard Python environment:
```bash
$ python -m PVGeo test
```
```py
>>> import PVGeo
>>> PVGeo.test()
```
"""
import unittest
import fnmatch
import os
path = os.path.dirname(__file__) # path to remove
path = path[0:path.rfind('/')]
test_file_strings = []
for root, dirnames, filenames in os.walk(os.path.dirname(__file__)):
for filename in fnmatch.filter(filenames, '__test__.py'):
test_file_strings.append(os.path.join(root, filename).replace(path, ''))
# Remove extensions and change to module import syle
module_strings = [str[1:len(str)-3].replace('/', '.') for str in test_file_strings]
suites = [unittest.defaultTestLoader.loadTestsFromName(str) for str
in module_strings]
testSuite = unittest.TestSuite(suites)
run = unittest.TextTestRunner(verbosity=2).run(testSuite)
if close:
exit(len(run.failures) > 0 or len(run.errors) > 0)
return run
if __name__ == '__main__':
import sys
arg = sys.argv[1]
if arg.lower() == 'test':
test(True)
else:
raise RuntimeError('Unknown argument: %s' % arg)
|
Add catch for Travis CI testing.
|
Add catch for Travis CI testing.
|
Python
|
bsd-3-clause
|
banesullivan/ParaViewGeophysics,banesullivan/ParaViewGeophysics,banesullivan/ParaViewGeophysics
|
__all__ = [
'test',
]
def test():
"""
@desc: This is a convienance method to run all of the tests in `PVGeo`.
@notes:
This can be executed from either the command line of within a standard Python environment:
```bash
$ python -m PVGeo test
```
```py
>>> import PVGeo
>>> PVGeo.test()
```
"""
import unittest
import fnmatch
import os
path = os.path.dirname(__file__) # path to remove
path = path[0:path.rfind('/')]
test_file_strings = []
for root, dirnames, filenames in os.walk(os.path.dirname(__file__)):
for filename in fnmatch.filter(filenames, '__test__.py'):
test_file_strings.append(os.path.join(root, filename).replace(path, ''))
# Remove extensions and change to module import syle
module_strings = [str[1:len(str)-3].replace('/', '.') for str in test_file_strings]
suites = [unittest.defaultTestLoader.loadTestsFromName(str) for str
in module_strings]
testSuite = unittest.TestSuite(suites)
return unittest.TextTestRunner(verbosity=2).run(testSuite)
if __name__ == '__main__':
import sys
arg = sys.argv[1]
if arg.lower() == 'test':
test()
else:
raise RuntimeError('Unknown argument: %s' % arg)
Add catch for Travis CI testing.
|
__all__ = [
'test',
]
def test(close=False):
"""
@desc: This is a convienance method to run all of the tests in `PVGeo`.
@notes:
This can be executed from either the command line of within a standard Python environment:
```bash
$ python -m PVGeo test
```
```py
>>> import PVGeo
>>> PVGeo.test()
```
"""
import unittest
import fnmatch
import os
path = os.path.dirname(__file__) # path to remove
path = path[0:path.rfind('/')]
test_file_strings = []
for root, dirnames, filenames in os.walk(os.path.dirname(__file__)):
for filename in fnmatch.filter(filenames, '__test__.py'):
test_file_strings.append(os.path.join(root, filename).replace(path, ''))
# Remove extensions and change to module import syle
module_strings = [str[1:len(str)-3].replace('/', '.') for str in test_file_strings]
suites = [unittest.defaultTestLoader.loadTestsFromName(str) for str
in module_strings]
testSuite = unittest.TestSuite(suites)
run = unittest.TextTestRunner(verbosity=2).run(testSuite)
if close:
exit(len(run.failures) > 0 or len(run.errors) > 0)
return run
if __name__ == '__main__':
import sys
arg = sys.argv[1]
if arg.lower() == 'test':
test(True)
else:
raise RuntimeError('Unknown argument: %s' % arg)
|
<commit_before>__all__ = [
'test',
]
def test():
"""
@desc: This is a convienance method to run all of the tests in `PVGeo`.
@notes:
This can be executed from either the command line of within a standard Python environment:
```bash
$ python -m PVGeo test
```
```py
>>> import PVGeo
>>> PVGeo.test()
```
"""
import unittest
import fnmatch
import os
path = os.path.dirname(__file__) # path to remove
path = path[0:path.rfind('/')]
test_file_strings = []
for root, dirnames, filenames in os.walk(os.path.dirname(__file__)):
for filename in fnmatch.filter(filenames, '__test__.py'):
test_file_strings.append(os.path.join(root, filename).replace(path, ''))
# Remove extensions and change to module import syle
module_strings = [str[1:len(str)-3].replace('/', '.') for str in test_file_strings]
suites = [unittest.defaultTestLoader.loadTestsFromName(str) for str
in module_strings]
testSuite = unittest.TestSuite(suites)
return unittest.TextTestRunner(verbosity=2).run(testSuite)
if __name__ == '__main__':
import sys
arg = sys.argv[1]
if arg.lower() == 'test':
test()
else:
raise RuntimeError('Unknown argument: %s' % arg)
<commit_msg>Add catch for Travis CI testing.<commit_after>
|
__all__ = [
'test',
]
def test(close=False):
"""
@desc: This is a convienance method to run all of the tests in `PVGeo`.
@notes:
This can be executed from either the command line of within a standard Python environment:
```bash
$ python -m PVGeo test
```
```py
>>> import PVGeo
>>> PVGeo.test()
```
"""
import unittest
import fnmatch
import os
path = os.path.dirname(__file__) # path to remove
path = path[0:path.rfind('/')]
test_file_strings = []
for root, dirnames, filenames in os.walk(os.path.dirname(__file__)):
for filename in fnmatch.filter(filenames, '__test__.py'):
test_file_strings.append(os.path.join(root, filename).replace(path, ''))
# Remove extensions and change to module import syle
module_strings = [str[1:len(str)-3].replace('/', '.') for str in test_file_strings]
suites = [unittest.defaultTestLoader.loadTestsFromName(str) for str
in module_strings]
testSuite = unittest.TestSuite(suites)
run = unittest.TextTestRunner(verbosity=2).run(testSuite)
if close:
exit(len(run.failures) > 0 or len(run.errors) > 0)
return run
if __name__ == '__main__':
import sys
arg = sys.argv[1]
if arg.lower() == 'test':
test(True)
else:
raise RuntimeError('Unknown argument: %s' % arg)
|
__all__ = [
'test',
]
def test():
"""
@desc: This is a convienance method to run all of the tests in `PVGeo`.
@notes:
This can be executed from either the command line of within a standard Python environment:
```bash
$ python -m PVGeo test
```
```py
>>> import PVGeo
>>> PVGeo.test()
```
"""
import unittest
import fnmatch
import os
path = os.path.dirname(__file__) # path to remove
path = path[0:path.rfind('/')]
test_file_strings = []
for root, dirnames, filenames in os.walk(os.path.dirname(__file__)):
for filename in fnmatch.filter(filenames, '__test__.py'):
test_file_strings.append(os.path.join(root, filename).replace(path, ''))
# Remove extensions and change to module import syle
module_strings = [str[1:len(str)-3].replace('/', '.') for str in test_file_strings]
suites = [unittest.defaultTestLoader.loadTestsFromName(str) for str
in module_strings]
testSuite = unittest.TestSuite(suites)
return unittest.TextTestRunner(verbosity=2).run(testSuite)
if __name__ == '__main__':
import sys
arg = sys.argv[1]
if arg.lower() == 'test':
test()
else:
raise RuntimeError('Unknown argument: %s' % arg)
Add catch for Travis CI testing.__all__ = [
'test',
]
def test(close=False):
"""
@desc: This is a convienance method to run all of the tests in `PVGeo`.
@notes:
This can be executed from either the command line of within a standard Python environment:
```bash
$ python -m PVGeo test
```
```py
>>> import PVGeo
>>> PVGeo.test()
```
"""
import unittest
import fnmatch
import os
path = os.path.dirname(__file__) # path to remove
path = path[0:path.rfind('/')]
test_file_strings = []
for root, dirnames, filenames in os.walk(os.path.dirname(__file__)):
for filename in fnmatch.filter(filenames, '__test__.py'):
test_file_strings.append(os.path.join(root, filename).replace(path, ''))
# Remove extensions and change to module import syle
module_strings = [str[1:len(str)-3].replace('/', '.') for str in test_file_strings]
suites = [unittest.defaultTestLoader.loadTestsFromName(str) for str
in module_strings]
testSuite = unittest.TestSuite(suites)
run = unittest.TextTestRunner(verbosity=2).run(testSuite)
if close:
exit(len(run.failures) > 0 or len(run.errors) > 0)
return run
if __name__ == '__main__':
import sys
arg = sys.argv[1]
if arg.lower() == 'test':
test(True)
else:
raise RuntimeError('Unknown argument: %s' % arg)
|
<commit_before>__all__ = [
'test',
]
def test():
"""
@desc: This is a convienance method to run all of the tests in `PVGeo`.
@notes:
This can be executed from either the command line of within a standard Python environment:
```bash
$ python -m PVGeo test
```
```py
>>> import PVGeo
>>> PVGeo.test()
```
"""
import unittest
import fnmatch
import os
path = os.path.dirname(__file__) # path to remove
path = path[0:path.rfind('/')]
test_file_strings = []
for root, dirnames, filenames in os.walk(os.path.dirname(__file__)):
for filename in fnmatch.filter(filenames, '__test__.py'):
test_file_strings.append(os.path.join(root, filename).replace(path, ''))
# Remove extensions and change to module import syle
module_strings = [str[1:len(str)-3].replace('/', '.') for str in test_file_strings]
suites = [unittest.defaultTestLoader.loadTestsFromName(str) for str
in module_strings]
testSuite = unittest.TestSuite(suites)
return unittest.TextTestRunner(verbosity=2).run(testSuite)
if __name__ == '__main__':
import sys
arg = sys.argv[1]
if arg.lower() == 'test':
test()
else:
raise RuntimeError('Unknown argument: %s' % arg)
<commit_msg>Add catch for Travis CI testing.<commit_after>__all__ = [
'test',
]
def test(close=False):
"""
@desc: This is a convienance method to run all of the tests in `PVGeo`.
@notes:
This can be executed from either the command line of within a standard Python environment:
```bash
$ python -m PVGeo test
```
```py
>>> import PVGeo
>>> PVGeo.test()
```
"""
import unittest
import fnmatch
import os
path = os.path.dirname(__file__) # path to remove
path = path[0:path.rfind('/')]
test_file_strings = []
for root, dirnames, filenames in os.walk(os.path.dirname(__file__)):
for filename in fnmatch.filter(filenames, '__test__.py'):
test_file_strings.append(os.path.join(root, filename).replace(path, ''))
# Remove extensions and change to module import syle
module_strings = [str[1:len(str)-3].replace('/', '.') for str in test_file_strings]
suites = [unittest.defaultTestLoader.loadTestsFromName(str) for str
in module_strings]
testSuite = unittest.TestSuite(suites)
run = unittest.TextTestRunner(verbosity=2).run(testSuite)
if close:
exit(len(run.failures) > 0 or len(run.errors) > 0)
return run
if __name__ == '__main__':
import sys
arg = sys.argv[1]
if arg.lower() == 'test':
test(True)
else:
raise RuntimeError('Unknown argument: %s' % arg)
|
3d774d8422b660d18f740f3ff3cadc1683d1c420
|
setup.py
|
setup.py
|
#!/usr/bin/env python
from setuptools import setup, find_packages
setup(
name = "tagman",
version = "0.1.8",
author = "ReThought Ltd",
author_email = "matthew@rethought-solutions.com",
url = "https://github.com/Rethought/tagman.git",
packages = find_packages('src'),
package_dir = {'':'src'},
license = "BSD",
keywords = "django, tagging, tagman",
description = "Curated tagging app for Django",
classifiers = [
"Intended Audience :: Developers",
"License :: OSI Approved :: BSD License",
'Operating System :: OS Independent',
'Programming Language :: Python',
]
)
|
#!/usr/bin/env python
from setuptools import setup, find_packages
setup(
name = "tagman",
version = "0.1.9",
author = "ReThought Ltd",
author_email = "matthew@rethought-solutions.com",
url = "https://github.com/Rethought/tagman.git",
packages = find_packages('src'),
package_dir = {'':'src'},
license = "BSD",
keywords = "django, tagging, tagman",
description = "Curated tagging app for Django",
classifiers = [
"Intended Audience :: Developers",
"License :: OSI Approved :: BSD License",
'Operating System :: OS Independent',
'Programming Language :: Python',
]
)
|
Increment version to reflect change
|
Increment version to reflect change
|
Python
|
bsd-3-clause
|
Rethought/tagman,Rethought/tagman
|
#!/usr/bin/env python
from setuptools import setup, find_packages
setup(
name = "tagman",
version = "0.1.8",
author = "ReThought Ltd",
author_email = "matthew@rethought-solutions.com",
url = "https://github.com/Rethought/tagman.git",
packages = find_packages('src'),
package_dir = {'':'src'},
license = "BSD",
keywords = "django, tagging, tagman",
description = "Curated tagging app for Django",
classifiers = [
"Intended Audience :: Developers",
"License :: OSI Approved :: BSD License",
'Operating System :: OS Independent',
'Programming Language :: Python',
]
)
Increment version to reflect change
|
#!/usr/bin/env python
from setuptools import setup, find_packages
setup(
name = "tagman",
version = "0.1.9",
author = "ReThought Ltd",
author_email = "matthew@rethought-solutions.com",
url = "https://github.com/Rethought/tagman.git",
packages = find_packages('src'),
package_dir = {'':'src'},
license = "BSD",
keywords = "django, tagging, tagman",
description = "Curated tagging app for Django",
classifiers = [
"Intended Audience :: Developers",
"License :: OSI Approved :: BSD License",
'Operating System :: OS Independent',
'Programming Language :: Python',
]
)
|
<commit_before>#!/usr/bin/env python
from setuptools import setup, find_packages
setup(
name = "tagman",
version = "0.1.8",
author = "ReThought Ltd",
author_email = "matthew@rethought-solutions.com",
url = "https://github.com/Rethought/tagman.git",
packages = find_packages('src'),
package_dir = {'':'src'},
license = "BSD",
keywords = "django, tagging, tagman",
description = "Curated tagging app for Django",
classifiers = [
"Intended Audience :: Developers",
"License :: OSI Approved :: BSD License",
'Operating System :: OS Independent',
'Programming Language :: Python',
]
)
<commit_msg>Increment version to reflect change<commit_after>
|
#!/usr/bin/env python
from setuptools import setup, find_packages
setup(
name = "tagman",
version = "0.1.9",
author = "ReThought Ltd",
author_email = "matthew@rethought-solutions.com",
url = "https://github.com/Rethought/tagman.git",
packages = find_packages('src'),
package_dir = {'':'src'},
license = "BSD",
keywords = "django, tagging, tagman",
description = "Curated tagging app for Django",
classifiers = [
"Intended Audience :: Developers",
"License :: OSI Approved :: BSD License",
'Operating System :: OS Independent',
'Programming Language :: Python',
]
)
|
#!/usr/bin/env python
from setuptools import setup, find_packages
setup(
name = "tagman",
version = "0.1.8",
author = "ReThought Ltd",
author_email = "matthew@rethought-solutions.com",
url = "https://github.com/Rethought/tagman.git",
packages = find_packages('src'),
package_dir = {'':'src'},
license = "BSD",
keywords = "django, tagging, tagman",
description = "Curated tagging app for Django",
classifiers = [
"Intended Audience :: Developers",
"License :: OSI Approved :: BSD License",
'Operating System :: OS Independent',
'Programming Language :: Python',
]
)
Increment version to reflect change#!/usr/bin/env python
from setuptools import setup, find_packages
setup(
name = "tagman",
version = "0.1.9",
author = "ReThought Ltd",
author_email = "matthew@rethought-solutions.com",
url = "https://github.com/Rethought/tagman.git",
packages = find_packages('src'),
package_dir = {'':'src'},
license = "BSD",
keywords = "django, tagging, tagman",
description = "Curated tagging app for Django",
classifiers = [
"Intended Audience :: Developers",
"License :: OSI Approved :: BSD License",
'Operating System :: OS Independent',
'Programming Language :: Python',
]
)
|
<commit_before>#!/usr/bin/env python
from setuptools import setup, find_packages
setup(
name = "tagman",
version = "0.1.8",
author = "ReThought Ltd",
author_email = "matthew@rethought-solutions.com",
url = "https://github.com/Rethought/tagman.git",
packages = find_packages('src'),
package_dir = {'':'src'},
license = "BSD",
keywords = "django, tagging, tagman",
description = "Curated tagging app for Django",
classifiers = [
"Intended Audience :: Developers",
"License :: OSI Approved :: BSD License",
'Operating System :: OS Independent',
'Programming Language :: Python',
]
)
<commit_msg>Increment version to reflect change<commit_after>#!/usr/bin/env python
from setuptools import setup, find_packages
setup(
name = "tagman",
version = "0.1.9",
author = "ReThought Ltd",
author_email = "matthew@rethought-solutions.com",
url = "https://github.com/Rethought/tagman.git",
packages = find_packages('src'),
package_dir = {'':'src'},
license = "BSD",
keywords = "django, tagging, tagman",
description = "Curated tagging app for Django",
classifiers = [
"Intended Audience :: Developers",
"License :: OSI Approved :: BSD License",
'Operating System :: OS Independent',
'Programming Language :: Python',
]
)
|
0f1c65fd5cf3d9be85030dbcb64dfa51284f9b77
|
setup.py
|
setup.py
|
#!/usr/bin/env python
# encoding: utf-8
"""
tempodb/setup.py
Copyright (c) 2012 TempoDB Inc. All rights reserved.
"""
import os
from setuptools import setup
def get_version(version_tuple):
version = '%s.%s' % (version_tuple[0], version_tuple[1])
if version_tuple[2]:
version = '%s.%s' % (version, version_tuple[2])
return version
# Dirty hack to get version number from tempodb/__init__.py - we can't
# import it as it depends on dateutil, requests, and simplejson which aren't
# installed until this file is read
init = os.path.join(os.path.dirname(__file__), 'tempodb', '__init__.py')
version_line = filter(lambda l: l.startswith('VERSION'), open(init))[0]
VERSION = get_version(eval(version_line.split('=')[-1]))
setup(
name="tempodb",
version=VERSION,
author="TempoDB Inc",
author_email="dev@tempo-db.com",
description="A client for the TempoDB API",
packages=["tempodb"],
long_description="A client for the TempoDB API.",
install_requires=[
'python-dateutil==1.5',
'requests',
'simplejson',
]
)
|
#!/usr/bin/env python
# encoding: utf-8
"""
tempodb/setup.py
Copyright (c) 2012 TempoDB Inc. All rights reserved.
"""
import os
from setuptools import setup
def get_version(version_tuple):
version = '%s.%s' % (version_tuple[0], version_tuple[1])
if version_tuple[2]:
version = '%s.%s' % (version, version_tuple[2])
return version
# Dirty hack to get version number from tempodb/__init__.py - we can't
# import it as it depends on dateutil, requests, and simplejson which aren't
# installed until this file is read
init = os.path.join(os.path.dirname(__file__), 'tempodb', '__init__.py')
version_line = filter(lambda l: l.startswith('VERSION'), open(init))[0]
VERSION = get_version(eval(version_line.split('=')[-1]))
setup(
name="tempodb",
version=VERSION,
author="TempoDB Inc",
author_email="dev@tempo-db.com",
description="A client for the TempoDB API",
packages=["tempodb"],
long_description="A client for the TempoDB API.",
dependency_links=[
'http://github.com/tempodb/requests/tarball/development#egg=requests-0.11.1ssl'
],
install_requires=[
'python-dateutil==1.5',
'requests==0.11.1ssl',
'simplejson',
]
)
|
Use a custom version of the requests package to default to SSLv3
|
Use a custom version of the requests package to default to SSLv3
|
Python
|
mit
|
TempoIQ/tempoiq-python,tempodb/tempodb-python,tempodb/tempodb-python,mrgaaron/tempoiq-python
|
#!/usr/bin/env python
# encoding: utf-8
"""
tempodb/setup.py
Copyright (c) 2012 TempoDB Inc. All rights reserved.
"""
import os
from setuptools import setup
def get_version(version_tuple):
version = '%s.%s' % (version_tuple[0], version_tuple[1])
if version_tuple[2]:
version = '%s.%s' % (version, version_tuple[2])
return version
# Dirty hack to get version number from tempodb/__init__.py - we can't
# import it as it depends on dateutil, requests, and simplejson which aren't
# installed until this file is read
init = os.path.join(os.path.dirname(__file__), 'tempodb', '__init__.py')
version_line = filter(lambda l: l.startswith('VERSION'), open(init))[0]
VERSION = get_version(eval(version_line.split('=')[-1]))
setup(
name="tempodb",
version=VERSION,
author="TempoDB Inc",
author_email="dev@tempo-db.com",
description="A client for the TempoDB API",
packages=["tempodb"],
long_description="A client for the TempoDB API.",
install_requires=[
'python-dateutil==1.5',
'requests',
'simplejson',
]
)
Use a custom version of the requests package to default to SSLv3
|
#!/usr/bin/env python
# encoding: utf-8
"""
tempodb/setup.py
Copyright (c) 2012 TempoDB Inc. All rights reserved.
"""
import os
from setuptools import setup
def get_version(version_tuple):
version = '%s.%s' % (version_tuple[0], version_tuple[1])
if version_tuple[2]:
version = '%s.%s' % (version, version_tuple[2])
return version
# Dirty hack to get version number from tempodb/__init__.py - we can't
# import it as it depends on dateutil, requests, and simplejson which aren't
# installed until this file is read
init = os.path.join(os.path.dirname(__file__), 'tempodb', '__init__.py')
version_line = filter(lambda l: l.startswith('VERSION'), open(init))[0]
VERSION = get_version(eval(version_line.split('=')[-1]))
setup(
name="tempodb",
version=VERSION,
author="TempoDB Inc",
author_email="dev@tempo-db.com",
description="A client for the TempoDB API",
packages=["tempodb"],
long_description="A client for the TempoDB API.",
dependency_links=[
'http://github.com/tempodb/requests/tarball/development#egg=requests-0.11.1ssl'
],
install_requires=[
'python-dateutil==1.5',
'requests==0.11.1ssl',
'simplejson',
]
)
|
<commit_before>#!/usr/bin/env python
# encoding: utf-8
"""
tempodb/setup.py
Copyright (c) 2012 TempoDB Inc. All rights reserved.
"""
import os
from setuptools import setup
def get_version(version_tuple):
version = '%s.%s' % (version_tuple[0], version_tuple[1])
if version_tuple[2]:
version = '%s.%s' % (version, version_tuple[2])
return version
# Dirty hack to get version number from tempodb/__init__.py - we can't
# import it as it depends on dateutil, requests, and simplejson which aren't
# installed until this file is read
init = os.path.join(os.path.dirname(__file__), 'tempodb', '__init__.py')
version_line = filter(lambda l: l.startswith('VERSION'), open(init))[0]
VERSION = get_version(eval(version_line.split('=')[-1]))
setup(
name="tempodb",
version=VERSION,
author="TempoDB Inc",
author_email="dev@tempo-db.com",
description="A client for the TempoDB API",
packages=["tempodb"],
long_description="A client for the TempoDB API.",
install_requires=[
'python-dateutil==1.5',
'requests',
'simplejson',
]
)
<commit_msg>Use a custom version of the requests package to default to SSLv3<commit_after>
|
#!/usr/bin/env python
# encoding: utf-8
"""
tempodb/setup.py
Copyright (c) 2012 TempoDB Inc. All rights reserved.
"""
import os
from setuptools import setup
def get_version(version_tuple):
version = '%s.%s' % (version_tuple[0], version_tuple[1])
if version_tuple[2]:
version = '%s.%s' % (version, version_tuple[2])
return version
# Dirty hack to get version number from tempodb/__init__.py - we can't
# import it as it depends on dateutil, requests, and simplejson which aren't
# installed until this file is read
init = os.path.join(os.path.dirname(__file__), 'tempodb', '__init__.py')
version_line = filter(lambda l: l.startswith('VERSION'), open(init))[0]
VERSION = get_version(eval(version_line.split('=')[-1]))
setup(
name="tempodb",
version=VERSION,
author="TempoDB Inc",
author_email="dev@tempo-db.com",
description="A client for the TempoDB API",
packages=["tempodb"],
long_description="A client for the TempoDB API.",
dependency_links=[
'http://github.com/tempodb/requests/tarball/development#egg=requests-0.11.1ssl'
],
install_requires=[
'python-dateutil==1.5',
'requests==0.11.1ssl',
'simplejson',
]
)
|
#!/usr/bin/env python
# encoding: utf-8
"""
tempodb/setup.py
Copyright (c) 2012 TempoDB Inc. All rights reserved.
"""
import os
from setuptools import setup
def get_version(version_tuple):
version = '%s.%s' % (version_tuple[0], version_tuple[1])
if version_tuple[2]:
version = '%s.%s' % (version, version_tuple[2])
return version
# Dirty hack to get version number from tempodb/__init__.py - we can't
# import it as it depends on dateutil, requests, and simplejson which aren't
# installed until this file is read
init = os.path.join(os.path.dirname(__file__), 'tempodb', '__init__.py')
version_line = filter(lambda l: l.startswith('VERSION'), open(init))[0]
VERSION = get_version(eval(version_line.split('=')[-1]))
setup(
name="tempodb",
version=VERSION,
author="TempoDB Inc",
author_email="dev@tempo-db.com",
description="A client for the TempoDB API",
packages=["tempodb"],
long_description="A client for the TempoDB API.",
install_requires=[
'python-dateutil==1.5',
'requests',
'simplejson',
]
)
Use a custom version of the requests package to default to SSLv3#!/usr/bin/env python
# encoding: utf-8
"""
tempodb/setup.py
Copyright (c) 2012 TempoDB Inc. All rights reserved.
"""
import os
from setuptools import setup
def get_version(version_tuple):
version = '%s.%s' % (version_tuple[0], version_tuple[1])
if version_tuple[2]:
version = '%s.%s' % (version, version_tuple[2])
return version
# Dirty hack to get version number from tempodb/__init__.py - we can't
# import it as it depends on dateutil, requests, and simplejson which aren't
# installed until this file is read
init = os.path.join(os.path.dirname(__file__), 'tempodb', '__init__.py')
version_line = filter(lambda l: l.startswith('VERSION'), open(init))[0]
VERSION = get_version(eval(version_line.split('=')[-1]))
setup(
name="tempodb",
version=VERSION,
author="TempoDB Inc",
author_email="dev@tempo-db.com",
description="A client for the TempoDB API",
packages=["tempodb"],
long_description="A client for the TempoDB API.",
dependency_links=[
'http://github.com/tempodb/requests/tarball/development#egg=requests-0.11.1ssl'
],
install_requires=[
'python-dateutil==1.5',
'requests==0.11.1ssl',
'simplejson',
]
)
|
<commit_before>#!/usr/bin/env python
# encoding: utf-8
"""
tempodb/setup.py
Copyright (c) 2012 TempoDB Inc. All rights reserved.
"""
import os
from setuptools import setup
def get_version(version_tuple):
version = '%s.%s' % (version_tuple[0], version_tuple[1])
if version_tuple[2]:
version = '%s.%s' % (version, version_tuple[2])
return version
# Dirty hack to get version number from tempodb/__init__.py - we can't
# import it as it depends on dateutil, requests, and simplejson which aren't
# installed until this file is read
init = os.path.join(os.path.dirname(__file__), 'tempodb', '__init__.py')
version_line = filter(lambda l: l.startswith('VERSION'), open(init))[0]
VERSION = get_version(eval(version_line.split('=')[-1]))
setup(
name="tempodb",
version=VERSION,
author="TempoDB Inc",
author_email="dev@tempo-db.com",
description="A client for the TempoDB API",
packages=["tempodb"],
long_description="A client for the TempoDB API.",
install_requires=[
'python-dateutil==1.5',
'requests',
'simplejson',
]
)
<commit_msg>Use a custom version of the requests package to default to SSLv3<commit_after>#!/usr/bin/env python
# encoding: utf-8
"""
tempodb/setup.py
Copyright (c) 2012 TempoDB Inc. All rights reserved.
"""
import os
from setuptools import setup
def get_version(version_tuple):
version = '%s.%s' % (version_tuple[0], version_tuple[1])
if version_tuple[2]:
version = '%s.%s' % (version, version_tuple[2])
return version
# Dirty hack to get version number from tempodb/__init__.py - we can't
# import it as it depends on dateutil, requests, and simplejson which aren't
# installed until this file is read
init = os.path.join(os.path.dirname(__file__), 'tempodb', '__init__.py')
version_line = filter(lambda l: l.startswith('VERSION'), open(init))[0]
VERSION = get_version(eval(version_line.split('=')[-1]))
setup(
name="tempodb",
version=VERSION,
author="TempoDB Inc",
author_email="dev@tempo-db.com",
description="A client for the TempoDB API",
packages=["tempodb"],
long_description="A client for the TempoDB API.",
dependency_links=[
'http://github.com/tempodb/requests/tarball/development#egg=requests-0.11.1ssl'
],
install_requires=[
'python-dateutil==1.5',
'requests==0.11.1ssl',
'simplejson',
]
)
|
44238902398806ae237e53b14cf836562719a361
|
setup.py
|
setup.py
|
import os
from setuptools import setup
import sys
def read(filename):
return open(os.path.join(os.path.dirname(__file__), filename)).read()
version_dependant_requirements = [
"uhashring < 2" if sys.version_info < (3, 6) else "uhashring", # It uses f-strings
]
setup(
name="python-binary-memcached",
version="0.30.1",
author="Jayson Reis",
author_email="santosdosreis@gmail.com",
description="A pure python module to access memcached via its binary protocol with SASL auth support",
long_description="{0}\n{1}".format(read("README.rst"), read("CHANGELOG.md")),
url="https://github.com/jaysonsantos/python-binary-memcached",
packages=["bmemcached", "bmemcached.client"],
classifiers=[
"Development Status :: 4 - Beta",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
],
install_requires=[
"six",
]
+ version_dependant_requirements,
)
|
import os
import sys
from setuptools import setup
from m2r import convert
def read(filename):
return open(os.path.join(os.path.dirname(__file__), filename)).read()
version_dependant_requirements = [
"uhashring < 2" if sys.version_info < (3, 6) else "uhashring", # It uses f-strings
]
setup(
name="python-binary-memcached",
version="0.30.1",
author="Jayson Reis",
author_email="santosdosreis@gmail.com",
description="A pure python module to access memcached via its binary protocol with SASL auth support",
long_description="{0}\n{1}".format(
read("README.rst"), convert(read("CHANGELOG.md"))
),
url="https://github.com/jaysonsantos/python-binary-memcached",
packages=["bmemcached", "bmemcached.client"],
classifiers=[
"Development Status :: 4 - Beta",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
],
install_requires=[
"six",
]
+ version_dependant_requirements,
)
|
Use m2r to convert the changelog to rst
|
chore: Use m2r to convert the changelog to rst
|
Python
|
mit
|
jaysonsantos/python-binary-memcached,jaysonsantos/python-binary-memcached
|
import os
from setuptools import setup
import sys
def read(filename):
return open(os.path.join(os.path.dirname(__file__), filename)).read()
version_dependant_requirements = [
"uhashring < 2" if sys.version_info < (3, 6) else "uhashring", # It uses f-strings
]
setup(
name="python-binary-memcached",
version="0.30.1",
author="Jayson Reis",
author_email="santosdosreis@gmail.com",
description="A pure python module to access memcached via its binary protocol with SASL auth support",
long_description="{0}\n{1}".format(read("README.rst"), read("CHANGELOG.md")),
url="https://github.com/jaysonsantos/python-binary-memcached",
packages=["bmemcached", "bmemcached.client"],
classifiers=[
"Development Status :: 4 - Beta",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
],
install_requires=[
"six",
]
+ version_dependant_requirements,
)
chore: Use m2r to convert the changelog to rst
|
import os
import sys
from setuptools import setup
from m2r import convert
def read(filename):
return open(os.path.join(os.path.dirname(__file__), filename)).read()
version_dependant_requirements = [
"uhashring < 2" if sys.version_info < (3, 6) else "uhashring", # It uses f-strings
]
setup(
name="python-binary-memcached",
version="0.30.1",
author="Jayson Reis",
author_email="santosdosreis@gmail.com",
description="A pure python module to access memcached via its binary protocol with SASL auth support",
long_description="{0}\n{1}".format(
read("README.rst"), convert(read("CHANGELOG.md"))
),
url="https://github.com/jaysonsantos/python-binary-memcached",
packages=["bmemcached", "bmemcached.client"],
classifiers=[
"Development Status :: 4 - Beta",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
],
install_requires=[
"six",
]
+ version_dependant_requirements,
)
|
<commit_before>import os
from setuptools import setup
import sys
def read(filename):
return open(os.path.join(os.path.dirname(__file__), filename)).read()
version_dependant_requirements = [
"uhashring < 2" if sys.version_info < (3, 6) else "uhashring", # It uses f-strings
]
setup(
name="python-binary-memcached",
version="0.30.1",
author="Jayson Reis",
author_email="santosdosreis@gmail.com",
description="A pure python module to access memcached via its binary protocol with SASL auth support",
long_description="{0}\n{1}".format(read("README.rst"), read("CHANGELOG.md")),
url="https://github.com/jaysonsantos/python-binary-memcached",
packages=["bmemcached", "bmemcached.client"],
classifiers=[
"Development Status :: 4 - Beta",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
],
install_requires=[
"six",
]
+ version_dependant_requirements,
)
<commit_msg>chore: Use m2r to convert the changelog to rst<commit_after>
|
import os
import sys
from setuptools import setup
from m2r import convert
def read(filename):
return open(os.path.join(os.path.dirname(__file__), filename)).read()
version_dependant_requirements = [
"uhashring < 2" if sys.version_info < (3, 6) else "uhashring", # It uses f-strings
]
setup(
name="python-binary-memcached",
version="0.30.1",
author="Jayson Reis",
author_email="santosdosreis@gmail.com",
description="A pure python module to access memcached via its binary protocol with SASL auth support",
long_description="{0}\n{1}".format(
read("README.rst"), convert(read("CHANGELOG.md"))
),
url="https://github.com/jaysonsantos/python-binary-memcached",
packages=["bmemcached", "bmemcached.client"],
classifiers=[
"Development Status :: 4 - Beta",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
],
install_requires=[
"six",
]
+ version_dependant_requirements,
)
|
import os
from setuptools import setup
import sys
def read(filename):
return open(os.path.join(os.path.dirname(__file__), filename)).read()
version_dependant_requirements = [
"uhashring < 2" if sys.version_info < (3, 6) else "uhashring", # It uses f-strings
]
setup(
name="python-binary-memcached",
version="0.30.1",
author="Jayson Reis",
author_email="santosdosreis@gmail.com",
description="A pure python module to access memcached via its binary protocol with SASL auth support",
long_description="{0}\n{1}".format(read("README.rst"), read("CHANGELOG.md")),
url="https://github.com/jaysonsantos/python-binary-memcached",
packages=["bmemcached", "bmemcached.client"],
classifiers=[
"Development Status :: 4 - Beta",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
],
install_requires=[
"six",
]
+ version_dependant_requirements,
)
chore: Use m2r to convert the changelog to rstimport os
import sys
from setuptools import setup
from m2r import convert
def read(filename):
return open(os.path.join(os.path.dirname(__file__), filename)).read()
version_dependant_requirements = [
"uhashring < 2" if sys.version_info < (3, 6) else "uhashring", # It uses f-strings
]
setup(
name="python-binary-memcached",
version="0.30.1",
author="Jayson Reis",
author_email="santosdosreis@gmail.com",
description="A pure python module to access memcached via its binary protocol with SASL auth support",
long_description="{0}\n{1}".format(
read("README.rst"), convert(read("CHANGELOG.md"))
),
url="https://github.com/jaysonsantos/python-binary-memcached",
packages=["bmemcached", "bmemcached.client"],
classifiers=[
"Development Status :: 4 - Beta",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
],
install_requires=[
"six",
]
+ version_dependant_requirements,
)
|
<commit_before>import os
from setuptools import setup
import sys
def read(filename):
return open(os.path.join(os.path.dirname(__file__), filename)).read()
version_dependant_requirements = [
"uhashring < 2" if sys.version_info < (3, 6) else "uhashring", # It uses f-strings
]
setup(
name="python-binary-memcached",
version="0.30.1",
author="Jayson Reis",
author_email="santosdosreis@gmail.com",
description="A pure python module to access memcached via its binary protocol with SASL auth support",
long_description="{0}\n{1}".format(read("README.rst"), read("CHANGELOG.md")),
url="https://github.com/jaysonsantos/python-binary-memcached",
packages=["bmemcached", "bmemcached.client"],
classifiers=[
"Development Status :: 4 - Beta",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
],
install_requires=[
"six",
]
+ version_dependant_requirements,
)
<commit_msg>chore: Use m2r to convert the changelog to rst<commit_after>import os
import sys
from setuptools import setup
from m2r import convert
def read(filename):
return open(os.path.join(os.path.dirname(__file__), filename)).read()
version_dependant_requirements = [
"uhashring < 2" if sys.version_info < (3, 6) else "uhashring", # It uses f-strings
]
setup(
name="python-binary-memcached",
version="0.30.1",
author="Jayson Reis",
author_email="santosdosreis@gmail.com",
description="A pure python module to access memcached via its binary protocol with SASL auth support",
long_description="{0}\n{1}".format(
read("README.rst"), convert(read("CHANGELOG.md"))
),
url="https://github.com/jaysonsantos/python-binary-memcached",
packages=["bmemcached", "bmemcached.client"],
classifiers=[
"Development Status :: 4 - Beta",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
],
install_requires=[
"six",
]
+ version_dependant_requirements,
)
|
529db5b62f87506dcf0b3cb87447c6ed4d9862ab
|
setup.py
|
setup.py
|
from setuptools import setup
import os
setup(
name = "conduit",
version = "0.0.2",
author = "Steve Leibman",
author_email = "sleibman@alum.mit.edu",
description = ("Framework for dataflow-style python programming"),
license = "MIT",
keywords = "dataflow distributed pipe flow programming",
url = "https://github.com/sleibman/python-conduit",
packages = ['conduit',
'conduit.util',
'conduit.test'],
long_description = """
python-conduit
==============
Python framework for dataflow-style programs.
Users of this framework structure their code into blocks with named inputs and outputs that are connected
by channels. A typical application will have one or more data generator/importer blocks which then pass
their data through various blocks which apply filters or tranforms to operate on the data.
For other similar projects, see: http://wiki.python.org/moin/FlowBasedProgramming
License
-------
conduit is free software and is released under the terms
of the MIT license (<http://opensource.org/licenses/mit-license.php>),
as specified in the accompanying LICENSE.txt file.
""",
test_suite = 'conduit.test',
classifiers = [
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"Intended Audience :: System Administrators",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Topic :: Utilities",
],
)
|
from setuptools import setup
import os
setup(
name = "conduit",
version = "0.0.3",
author = "Steve Leibman",
author_email = "sleibman@alum.mit.edu",
description = ("Framework for dataflow-style python programming"),
license = "MIT",
keywords = "dataflow distributed pipe flow programming",
url = "https://github.com/sleibman/python-conduit",
packages = ['conduit',
'conduit.util',
'conduit.test'],
long_description = """
python-conduit
==============
Python framework for dataflow-style programs.
Users of this framework structure their code into blocks with named inputs and outputs that are connected
by channels. A typical application will have one or more data generator/importer blocks which then pass
their data through various blocks which apply filters or tranforms to operate on the data.
For other similar projects, see: http://wiki.python.org/moin/FlowBasedProgramming
License
-------
conduit is free software and is released under the terms
of the MIT license (<http://opensource.org/licenses/mit-license.php>),
as specified in the accompanying LICENSE.txt file.
""",
test_suite = 'conduit.test',
classifiers = [
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"Intended Audience :: System Administrators",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Topic :: Utilities",
],
)
|
Bump version to 0.0.3 for pip.
|
Bump version to 0.0.3 for pip.
|
Python
|
mit
|
sleibman/python-conduit,sleibman/python-conduit
|
from setuptools import setup
import os
setup(
name = "conduit",
version = "0.0.2",
author = "Steve Leibman",
author_email = "sleibman@alum.mit.edu",
description = ("Framework for dataflow-style python programming"),
license = "MIT",
keywords = "dataflow distributed pipe flow programming",
url = "https://github.com/sleibman/python-conduit",
packages = ['conduit',
'conduit.util',
'conduit.test'],
long_description = """
python-conduit
==============
Python framework for dataflow-style programs.
Users of this framework structure their code into blocks with named inputs and outputs that are connected
by channels. A typical application will have one or more data generator/importer blocks which then pass
their data through various blocks which apply filters or tranforms to operate on the data.
For other similar projects, see: http://wiki.python.org/moin/FlowBasedProgramming
License
-------
conduit is free software and is released under the terms
of the MIT license (<http://opensource.org/licenses/mit-license.php>),
as specified in the accompanying LICENSE.txt file.
""",
test_suite = 'conduit.test',
classifiers = [
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"Intended Audience :: System Administrators",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Topic :: Utilities",
],
)
Bump version to 0.0.3 for pip.
|
from setuptools import setup
import os
setup(
name = "conduit",
version = "0.0.3",
author = "Steve Leibman",
author_email = "sleibman@alum.mit.edu",
description = ("Framework for dataflow-style python programming"),
license = "MIT",
keywords = "dataflow distributed pipe flow programming",
url = "https://github.com/sleibman/python-conduit",
packages = ['conduit',
'conduit.util',
'conduit.test'],
long_description = """
python-conduit
==============
Python framework for dataflow-style programs.
Users of this framework structure their code into blocks with named inputs and outputs that are connected
by channels. A typical application will have one or more data generator/importer blocks which then pass
their data through various blocks which apply filters or tranforms to operate on the data.
For other similar projects, see: http://wiki.python.org/moin/FlowBasedProgramming
License
-------
conduit is free software and is released under the terms
of the MIT license (<http://opensource.org/licenses/mit-license.php>),
as specified in the accompanying LICENSE.txt file.
""",
test_suite = 'conduit.test',
classifiers = [
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"Intended Audience :: System Administrators",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Topic :: Utilities",
],
)
|
<commit_before>from setuptools import setup
import os
setup(
name = "conduit",
version = "0.0.2",
author = "Steve Leibman",
author_email = "sleibman@alum.mit.edu",
description = ("Framework for dataflow-style python programming"),
license = "MIT",
keywords = "dataflow distributed pipe flow programming",
url = "https://github.com/sleibman/python-conduit",
packages = ['conduit',
'conduit.util',
'conduit.test'],
long_description = """
python-conduit
==============
Python framework for dataflow-style programs.
Users of this framework structure their code into blocks with named inputs and outputs that are connected
by channels. A typical application will have one or more data generator/importer blocks which then pass
their data through various blocks which apply filters or tranforms to operate on the data.
For other similar projects, see: http://wiki.python.org/moin/FlowBasedProgramming
License
-------
conduit is free software and is released under the terms
of the MIT license (<http://opensource.org/licenses/mit-license.php>),
as specified in the accompanying LICENSE.txt file.
""",
test_suite = 'conduit.test',
classifiers = [
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"Intended Audience :: System Administrators",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Topic :: Utilities",
],
)
<commit_msg>Bump version to 0.0.3 for pip.<commit_after>
|
from setuptools import setup
import os
setup(
name = "conduit",
version = "0.0.3",
author = "Steve Leibman",
author_email = "sleibman@alum.mit.edu",
description = ("Framework for dataflow-style python programming"),
license = "MIT",
keywords = "dataflow distributed pipe flow programming",
url = "https://github.com/sleibman/python-conduit",
packages = ['conduit',
'conduit.util',
'conduit.test'],
long_description = """
python-conduit
==============
Python framework for dataflow-style programs.
Users of this framework structure their code into blocks with named inputs and outputs that are connected
by channels. A typical application will have one or more data generator/importer blocks which then pass
their data through various blocks which apply filters or tranforms to operate on the data.
For other similar projects, see: http://wiki.python.org/moin/FlowBasedProgramming
License
-------
conduit is free software and is released under the terms
of the MIT license (<http://opensource.org/licenses/mit-license.php>),
as specified in the accompanying LICENSE.txt file.
""",
test_suite = 'conduit.test',
classifiers = [
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"Intended Audience :: System Administrators",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Topic :: Utilities",
],
)
|
from setuptools import setup
import os
setup(
name = "conduit",
version = "0.0.2",
author = "Steve Leibman",
author_email = "sleibman@alum.mit.edu",
description = ("Framework for dataflow-style python programming"),
license = "MIT",
keywords = "dataflow distributed pipe flow programming",
url = "https://github.com/sleibman/python-conduit",
packages = ['conduit',
'conduit.util',
'conduit.test'],
long_description = """
python-conduit
==============
Python framework for dataflow-style programs.
Users of this framework structure their code into blocks with named inputs and outputs that are connected
by channels. A typical application will have one or more data generator/importer blocks which then pass
their data through various blocks which apply filters or tranforms to operate on the data.
For other similar projects, see: http://wiki.python.org/moin/FlowBasedProgramming
License
-------
conduit is free software and is released under the terms
of the MIT license (<http://opensource.org/licenses/mit-license.php>),
as specified in the accompanying LICENSE.txt file.
""",
test_suite = 'conduit.test',
classifiers = [
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"Intended Audience :: System Administrators",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Topic :: Utilities",
],
)
Bump version to 0.0.3 for pip.from setuptools import setup
import os
setup(
name = "conduit",
version = "0.0.3",
author = "Steve Leibman",
author_email = "sleibman@alum.mit.edu",
description = ("Framework for dataflow-style python programming"),
license = "MIT",
keywords = "dataflow distributed pipe flow programming",
url = "https://github.com/sleibman/python-conduit",
packages = ['conduit',
'conduit.util',
'conduit.test'],
long_description = """
python-conduit
==============
Python framework for dataflow-style programs.
Users of this framework structure their code into blocks with named inputs and outputs that are connected
by channels. A typical application will have one or more data generator/importer blocks which then pass
their data through various blocks which apply filters or tranforms to operate on the data.
For other similar projects, see: http://wiki.python.org/moin/FlowBasedProgramming
License
-------
conduit is free software and is released under the terms
of the MIT license (<http://opensource.org/licenses/mit-license.php>),
as specified in the accompanying LICENSE.txt file.
""",
test_suite = 'conduit.test',
classifiers = [
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"Intended Audience :: System Administrators",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Topic :: Utilities",
],
)
|
<commit_before>from setuptools import setup
import os
setup(
name = "conduit",
version = "0.0.2",
author = "Steve Leibman",
author_email = "sleibman@alum.mit.edu",
description = ("Framework for dataflow-style python programming"),
license = "MIT",
keywords = "dataflow distributed pipe flow programming",
url = "https://github.com/sleibman/python-conduit",
packages = ['conduit',
'conduit.util',
'conduit.test'],
long_description = """
python-conduit
==============
Python framework for dataflow-style programs.
Users of this framework structure their code into blocks with named inputs and outputs that are connected
by channels. A typical application will have one or more data generator/importer blocks which then pass
their data through various blocks which apply filters or tranforms to operate on the data.
For other similar projects, see: http://wiki.python.org/moin/FlowBasedProgramming
License
-------
conduit is free software and is released under the terms
of the MIT license (<http://opensource.org/licenses/mit-license.php>),
as specified in the accompanying LICENSE.txt file.
""",
test_suite = 'conduit.test',
classifiers = [
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"Intended Audience :: System Administrators",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Topic :: Utilities",
],
)
<commit_msg>Bump version to 0.0.3 for pip.<commit_after>from setuptools import setup
import os
setup(
name = "conduit",
version = "0.0.3",
author = "Steve Leibman",
author_email = "sleibman@alum.mit.edu",
description = ("Framework for dataflow-style python programming"),
license = "MIT",
keywords = "dataflow distributed pipe flow programming",
url = "https://github.com/sleibman/python-conduit",
packages = ['conduit',
'conduit.util',
'conduit.test'],
long_description = """
python-conduit
==============
Python framework for dataflow-style programs.
Users of this framework structure their code into blocks with named inputs and outputs that are connected
by channels. A typical application will have one or more data generator/importer blocks which then pass
their data through various blocks which apply filters or tranforms to operate on the data.
For other similar projects, see: http://wiki.python.org/moin/FlowBasedProgramming
License
-------
conduit is free software and is released under the terms
of the MIT license (<http://opensource.org/licenses/mit-license.php>),
as specified in the accompanying LICENSE.txt file.
""",
test_suite = 'conduit.test',
classifiers = [
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"Intended Audience :: System Administrators",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Topic :: Utilities",
],
)
|
0406ddcb3e22f8f3eb3b1fdba702e41ebe8b5bf0
|
connector/tests/__init__.py
|
connector/tests/__init__.py
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Author: Guewen Baconnier
# Copyright 2012 Camptocamp SA
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from . import test_session
from . import test_event
from . import test_job
from . import test_queue
from . import test_worker
from . import test_backend
from . import test_producer
from . import test_connector
from . import test_mapper
from . import test_related_action
fast_suite = [
]
checks = [
test_session,
test_event,
test_job,
test_queue,
test_worker,
test_backend,
test_producer,
test_connector,
test_mapper,
test_related_action,
]
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Author: Guewen Baconnier
# Copyright 2012 Camptocamp SA
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from . import test_session
from . import test_event
from . import test_job
from . import test_queue
from . import test_worker
from . import test_backend
from . import test_producer
from . import test_connector
from . import test_mapper
from . import test_related_action
|
Remove deprecated fast_suite and check list for unit tests
|
Remove deprecated fast_suite and check list for unit tests
|
Python
|
agpl-3.0
|
OCA/connector,OCA/connector
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Author: Guewen Baconnier
# Copyright 2012 Camptocamp SA
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from . import test_session
from . import test_event
from . import test_job
from . import test_queue
from . import test_worker
from . import test_backend
from . import test_producer
from . import test_connector
from . import test_mapper
from . import test_related_action
fast_suite = [
]
checks = [
test_session,
test_event,
test_job,
test_queue,
test_worker,
test_backend,
test_producer,
test_connector,
test_mapper,
test_related_action,
]
Remove deprecated fast_suite and check list for unit tests
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Author: Guewen Baconnier
# Copyright 2012 Camptocamp SA
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from . import test_session
from . import test_event
from . import test_job
from . import test_queue
from . import test_worker
from . import test_backend
from . import test_producer
from . import test_connector
from . import test_mapper
from . import test_related_action
|
<commit_before># -*- coding: utf-8 -*-
##############################################################################
#
# Author: Guewen Baconnier
# Copyright 2012 Camptocamp SA
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from . import test_session
from . import test_event
from . import test_job
from . import test_queue
from . import test_worker
from . import test_backend
from . import test_producer
from . import test_connector
from . import test_mapper
from . import test_related_action
fast_suite = [
]
checks = [
test_session,
test_event,
test_job,
test_queue,
test_worker,
test_backend,
test_producer,
test_connector,
test_mapper,
test_related_action,
]
<commit_msg>Remove deprecated fast_suite and check list for unit tests<commit_after>
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Author: Guewen Baconnier
# Copyright 2012 Camptocamp SA
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from . import test_session
from . import test_event
from . import test_job
from . import test_queue
from . import test_worker
from . import test_backend
from . import test_producer
from . import test_connector
from . import test_mapper
from . import test_related_action
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Author: Guewen Baconnier
# Copyright 2012 Camptocamp SA
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from . import test_session
from . import test_event
from . import test_job
from . import test_queue
from . import test_worker
from . import test_backend
from . import test_producer
from . import test_connector
from . import test_mapper
from . import test_related_action
fast_suite = [
]
checks = [
test_session,
test_event,
test_job,
test_queue,
test_worker,
test_backend,
test_producer,
test_connector,
test_mapper,
test_related_action,
]
Remove deprecated fast_suite and check list for unit tests# -*- coding: utf-8 -*-
##############################################################################
#
# Author: Guewen Baconnier
# Copyright 2012 Camptocamp SA
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from . import test_session
from . import test_event
from . import test_job
from . import test_queue
from . import test_worker
from . import test_backend
from . import test_producer
from . import test_connector
from . import test_mapper
from . import test_related_action
|
<commit_before># -*- coding: utf-8 -*-
##############################################################################
#
# Author: Guewen Baconnier
# Copyright 2012 Camptocamp SA
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from . import test_session
from . import test_event
from . import test_job
from . import test_queue
from . import test_worker
from . import test_backend
from . import test_producer
from . import test_connector
from . import test_mapper
from . import test_related_action
fast_suite = [
]
checks = [
test_session,
test_event,
test_job,
test_queue,
test_worker,
test_backend,
test_producer,
test_connector,
test_mapper,
test_related_action,
]
<commit_msg>Remove deprecated fast_suite and check list for unit tests<commit_after># -*- coding: utf-8 -*-
##############################################################################
#
# Author: Guewen Baconnier
# Copyright 2012 Camptocamp SA
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from . import test_session
from . import test_event
from . import test_job
from . import test_queue
from . import test_worker
from . import test_backend
from . import test_producer
from . import test_connector
from . import test_mapper
from . import test_related_action
|
9dafef749aaf2fca9e865cf28b043ea22bafe3a5
|
backend/django/apps/accounts/tests.py
|
backend/django/apps/accounts/tests.py
|
from django.core.urlresolvers import reverse
from rest_framework.test import APITestCase
from rest_framework import status
import factory
import json
from .models import BaseAccount
from .serializers import WholeAccountSerializer
class UserFactory(factory.django.DjangoModelFactory):
class Meta:
model = BaseAccount
first_name = 'John'
last_name = 'Doe'
email = '{}.{}@email.com'.format(first_name, last_name)
password = 'passjohn1'
class FactoryBoyCreateUserTest(APITestCase):
def setUp(self):
self.user = UserFactory()
def test_can_create_user(self):
response = self.client.get(
reverse('_accounts:account-detail', kwargs={'pk': 1}))
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertJSONEqual(
raw=json.dumps(response.data),
expected_data=WholeAccountSerializer(self.user).data)
|
from django.core.urlresolvers import reverse
from rest_framework.test import APITestCase
from rest_framework import status
import factory
import json
from .models import BaseAccount
from .serializers import WholeAccountSerializer
class UserFactory(factory.django.DjangoModelFactory):
class Meta:
model = BaseAccount
first_name = 'John'
last_name = 'Doe'
email = '{}.{}@email.com'.format(first_name, last_name)
password = 'passjohn1'
class FactoryBoyCreateUserTest(APITestCase):
def setUp(self):
self.user = UserFactory()
def test_can_create_user(self):
response = self.client.get(
reverse('_accounts:account-detail', kwargs={'pk': 1}))
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertJSONEqual(
raw=json.dumps(response.data),
expected_data=WholeAccountSerializer(self.user).data)
class CreateUserTest(APITestCase):
def setUp(self):
self.user = UserFactory()
def test_create_user(self):
self.user.email = 'john@email.com'
data = json.dumps(WholeAccountSerializer(self.user).data)
response = self.client.post(
reverse('_accounts:account-list'),
data,
content_type='application/json')
self.assertEqual(
first=response.status_code, second=status.HTTP_201_CREATED)
|
Create a test for Account creation
|
Create a test for Account creation
|
Python
|
mit
|
slavpetroff/sweetshop,slavpetroff/sweetshop
|
from django.core.urlresolvers import reverse
from rest_framework.test import APITestCase
from rest_framework import status
import factory
import json
from .models import BaseAccount
from .serializers import WholeAccountSerializer
class UserFactory(factory.django.DjangoModelFactory):
class Meta:
model = BaseAccount
first_name = 'John'
last_name = 'Doe'
email = '{}.{}@email.com'.format(first_name, last_name)
password = 'passjohn1'
class FactoryBoyCreateUserTest(APITestCase):
def setUp(self):
self.user = UserFactory()
def test_can_create_user(self):
response = self.client.get(
reverse('_accounts:account-detail', kwargs={'pk': 1}))
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertJSONEqual(
raw=json.dumps(response.data),
expected_data=WholeAccountSerializer(self.user).data)
Create a test for Account creation
|
from django.core.urlresolvers import reverse
from rest_framework.test import APITestCase
from rest_framework import status
import factory
import json
from .models import BaseAccount
from .serializers import WholeAccountSerializer
class UserFactory(factory.django.DjangoModelFactory):
class Meta:
model = BaseAccount
first_name = 'John'
last_name = 'Doe'
email = '{}.{}@email.com'.format(first_name, last_name)
password = 'passjohn1'
class FactoryBoyCreateUserTest(APITestCase):
def setUp(self):
self.user = UserFactory()
def test_can_create_user(self):
response = self.client.get(
reverse('_accounts:account-detail', kwargs={'pk': 1}))
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertJSONEqual(
raw=json.dumps(response.data),
expected_data=WholeAccountSerializer(self.user).data)
class CreateUserTest(APITestCase):
def setUp(self):
self.user = UserFactory()
def test_create_user(self):
self.user.email = 'john@email.com'
data = json.dumps(WholeAccountSerializer(self.user).data)
response = self.client.post(
reverse('_accounts:account-list'),
data,
content_type='application/json')
self.assertEqual(
first=response.status_code, second=status.HTTP_201_CREATED)
|
<commit_before>from django.core.urlresolvers import reverse
from rest_framework.test import APITestCase
from rest_framework import status
import factory
import json
from .models import BaseAccount
from .serializers import WholeAccountSerializer
class UserFactory(factory.django.DjangoModelFactory):
class Meta:
model = BaseAccount
first_name = 'John'
last_name = 'Doe'
email = '{}.{}@email.com'.format(first_name, last_name)
password = 'passjohn1'
class FactoryBoyCreateUserTest(APITestCase):
def setUp(self):
self.user = UserFactory()
def test_can_create_user(self):
response = self.client.get(
reverse('_accounts:account-detail', kwargs={'pk': 1}))
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertJSONEqual(
raw=json.dumps(response.data),
expected_data=WholeAccountSerializer(self.user).data)
<commit_msg>Create a test for Account creation<commit_after>
|
from django.core.urlresolvers import reverse
from rest_framework.test import APITestCase
from rest_framework import status
import factory
import json
from .models import BaseAccount
from .serializers import WholeAccountSerializer
class UserFactory(factory.django.DjangoModelFactory):
class Meta:
model = BaseAccount
first_name = 'John'
last_name = 'Doe'
email = '{}.{}@email.com'.format(first_name, last_name)
password = 'passjohn1'
class FactoryBoyCreateUserTest(APITestCase):
def setUp(self):
self.user = UserFactory()
def test_can_create_user(self):
response = self.client.get(
reverse('_accounts:account-detail', kwargs={'pk': 1}))
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertJSONEqual(
raw=json.dumps(response.data),
expected_data=WholeAccountSerializer(self.user).data)
class CreateUserTest(APITestCase):
def setUp(self):
self.user = UserFactory()
def test_create_user(self):
self.user.email = 'john@email.com'
data = json.dumps(WholeAccountSerializer(self.user).data)
response = self.client.post(
reverse('_accounts:account-list'),
data,
content_type='application/json')
self.assertEqual(
first=response.status_code, second=status.HTTP_201_CREATED)
|
from django.core.urlresolvers import reverse
from rest_framework.test import APITestCase
from rest_framework import status
import factory
import json
from .models import BaseAccount
from .serializers import WholeAccountSerializer
class UserFactory(factory.django.DjangoModelFactory):
class Meta:
model = BaseAccount
first_name = 'John'
last_name = 'Doe'
email = '{}.{}@email.com'.format(first_name, last_name)
password = 'passjohn1'
class FactoryBoyCreateUserTest(APITestCase):
def setUp(self):
self.user = UserFactory()
def test_can_create_user(self):
response = self.client.get(
reverse('_accounts:account-detail', kwargs={'pk': 1}))
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertJSONEqual(
raw=json.dumps(response.data),
expected_data=WholeAccountSerializer(self.user).data)
Create a test for Account creationfrom django.core.urlresolvers import reverse
from rest_framework.test import APITestCase
from rest_framework import status
import factory
import json
from .models import BaseAccount
from .serializers import WholeAccountSerializer
class UserFactory(factory.django.DjangoModelFactory):
class Meta:
model = BaseAccount
first_name = 'John'
last_name = 'Doe'
email = '{}.{}@email.com'.format(first_name, last_name)
password = 'passjohn1'
class FactoryBoyCreateUserTest(APITestCase):
def setUp(self):
self.user = UserFactory()
def test_can_create_user(self):
response = self.client.get(
reverse('_accounts:account-detail', kwargs={'pk': 1}))
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertJSONEqual(
raw=json.dumps(response.data),
expected_data=WholeAccountSerializer(self.user).data)
class CreateUserTest(APITestCase):
def setUp(self):
self.user = UserFactory()
def test_create_user(self):
self.user.email = 'john@email.com'
data = json.dumps(WholeAccountSerializer(self.user).data)
response = self.client.post(
reverse('_accounts:account-list'),
data,
content_type='application/json')
self.assertEqual(
first=response.status_code, second=status.HTTP_201_CREATED)
|
<commit_before>from django.core.urlresolvers import reverse
from rest_framework.test import APITestCase
from rest_framework import status
import factory
import json
from .models import BaseAccount
from .serializers import WholeAccountSerializer
class UserFactory(factory.django.DjangoModelFactory):
class Meta:
model = BaseAccount
first_name = 'John'
last_name = 'Doe'
email = '{}.{}@email.com'.format(first_name, last_name)
password = 'passjohn1'
class FactoryBoyCreateUserTest(APITestCase):
def setUp(self):
self.user = UserFactory()
def test_can_create_user(self):
response = self.client.get(
reverse('_accounts:account-detail', kwargs={'pk': 1}))
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertJSONEqual(
raw=json.dumps(response.data),
expected_data=WholeAccountSerializer(self.user).data)
<commit_msg>Create a test for Account creation<commit_after>from django.core.urlresolvers import reverse
from rest_framework.test import APITestCase
from rest_framework import status
import factory
import json
from .models import BaseAccount
from .serializers import WholeAccountSerializer
class UserFactory(factory.django.DjangoModelFactory):
class Meta:
model = BaseAccount
first_name = 'John'
last_name = 'Doe'
email = '{}.{}@email.com'.format(first_name, last_name)
password = 'passjohn1'
class FactoryBoyCreateUserTest(APITestCase):
def setUp(self):
self.user = UserFactory()
def test_can_create_user(self):
response = self.client.get(
reverse('_accounts:account-detail', kwargs={'pk': 1}))
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertJSONEqual(
raw=json.dumps(response.data),
expected_data=WholeAccountSerializer(self.user).data)
class CreateUserTest(APITestCase):
def setUp(self):
self.user = UserFactory()
def test_create_user(self):
self.user.email = 'john@email.com'
data = json.dumps(WholeAccountSerializer(self.user).data)
response = self.client.post(
reverse('_accounts:account-list'),
data,
content_type='application/json')
self.assertEqual(
first=response.status_code, second=status.HTTP_201_CREATED)
|
994b9fbc9372b0c54f840a239f8b4a1cc89315ee
|
src/waldur_mastermind/invoices/filters.py
|
src/waldur_mastermind/invoices/filters.py
|
import django_filters
from waldur_core.core import filters as core_filters
from . import models
class InvoiceFilter(django_filters.FilterSet):
customer = core_filters.URLFilter(view_name='customer-detail', name='customer__uuid')
customer_uuid = django_filters.UUIDFilter(name='customer__uuid')
state = django_filters.MultipleChoiceFilter(choices=models.Invoice.States.CHOICES)
class Meta(object):
model = models.Invoice
fields = ('year', 'month')
|
import django_filters
from waldur_core.core import filters as core_filters
from . import models
class InvoiceFilter(django_filters.FilterSet):
customer = core_filters.URLFilter(view_name='customer-detail', name='customer__uuid')
customer_uuid = django_filters.UUIDFilter(name='customer__uuid')
state = django_filters.MultipleChoiceFilter(choices=models.Invoice.States.CHOICES)
o = django_filters.OrderingFilter(fields=(('year', 'month'),))
class Meta(object):
model = models.Invoice
fields = ('year', 'month')
|
Allow to filter invoices by date
|
Allow to filter invoices by date [WAL-2340]
|
Python
|
mit
|
opennode/waldur-mastermind,opennode/waldur-mastermind,opennode/nodeconductor-assembly-waldur,opennode/nodeconductor-assembly-waldur,opennode/waldur-mastermind,opennode/waldur-mastermind,opennode/nodeconductor-assembly-waldur
|
import django_filters
from waldur_core.core import filters as core_filters
from . import models
class InvoiceFilter(django_filters.FilterSet):
customer = core_filters.URLFilter(view_name='customer-detail', name='customer__uuid')
customer_uuid = django_filters.UUIDFilter(name='customer__uuid')
state = django_filters.MultipleChoiceFilter(choices=models.Invoice.States.CHOICES)
class Meta(object):
model = models.Invoice
fields = ('year', 'month')
Allow to filter invoices by date [WAL-2340]
|
import django_filters
from waldur_core.core import filters as core_filters
from . import models
class InvoiceFilter(django_filters.FilterSet):
customer = core_filters.URLFilter(view_name='customer-detail', name='customer__uuid')
customer_uuid = django_filters.UUIDFilter(name='customer__uuid')
state = django_filters.MultipleChoiceFilter(choices=models.Invoice.States.CHOICES)
o = django_filters.OrderingFilter(fields=(('year', 'month'),))
class Meta(object):
model = models.Invoice
fields = ('year', 'month')
|
<commit_before>import django_filters
from waldur_core.core import filters as core_filters
from . import models
class InvoiceFilter(django_filters.FilterSet):
customer = core_filters.URLFilter(view_name='customer-detail', name='customer__uuid')
customer_uuid = django_filters.UUIDFilter(name='customer__uuid')
state = django_filters.MultipleChoiceFilter(choices=models.Invoice.States.CHOICES)
class Meta(object):
model = models.Invoice
fields = ('year', 'month')
<commit_msg>Allow to filter invoices by date [WAL-2340]<commit_after>
|
import django_filters
from waldur_core.core import filters as core_filters
from . import models
class InvoiceFilter(django_filters.FilterSet):
customer = core_filters.URLFilter(view_name='customer-detail', name='customer__uuid')
customer_uuid = django_filters.UUIDFilter(name='customer__uuid')
state = django_filters.MultipleChoiceFilter(choices=models.Invoice.States.CHOICES)
o = django_filters.OrderingFilter(fields=(('year', 'month'),))
class Meta(object):
model = models.Invoice
fields = ('year', 'month')
|
import django_filters
from waldur_core.core import filters as core_filters
from . import models
class InvoiceFilter(django_filters.FilterSet):
customer = core_filters.URLFilter(view_name='customer-detail', name='customer__uuid')
customer_uuid = django_filters.UUIDFilter(name='customer__uuid')
state = django_filters.MultipleChoiceFilter(choices=models.Invoice.States.CHOICES)
class Meta(object):
model = models.Invoice
fields = ('year', 'month')
Allow to filter invoices by date [WAL-2340]import django_filters
from waldur_core.core import filters as core_filters
from . import models
class InvoiceFilter(django_filters.FilterSet):
customer = core_filters.URLFilter(view_name='customer-detail', name='customer__uuid')
customer_uuid = django_filters.UUIDFilter(name='customer__uuid')
state = django_filters.MultipleChoiceFilter(choices=models.Invoice.States.CHOICES)
o = django_filters.OrderingFilter(fields=(('year', 'month'),))
class Meta(object):
model = models.Invoice
fields = ('year', 'month')
|
<commit_before>import django_filters
from waldur_core.core import filters as core_filters
from . import models
class InvoiceFilter(django_filters.FilterSet):
customer = core_filters.URLFilter(view_name='customer-detail', name='customer__uuid')
customer_uuid = django_filters.UUIDFilter(name='customer__uuid')
state = django_filters.MultipleChoiceFilter(choices=models.Invoice.States.CHOICES)
class Meta(object):
model = models.Invoice
fields = ('year', 'month')
<commit_msg>Allow to filter invoices by date [WAL-2340]<commit_after>import django_filters
from waldur_core.core import filters as core_filters
from . import models
class InvoiceFilter(django_filters.FilterSet):
customer = core_filters.URLFilter(view_name='customer-detail', name='customer__uuid')
customer_uuid = django_filters.UUIDFilter(name='customer__uuid')
state = django_filters.MultipleChoiceFilter(choices=models.Invoice.States.CHOICES)
o = django_filters.OrderingFilter(fields=(('year', 'month'),))
class Meta(object):
model = models.Invoice
fields = ('year', 'month')
|
f25814cd2a91cb183e6cdae4a4597534dc8de17e
|
codesearch/paths.py
|
codesearch/paths.py
|
# Copyright 2017 The Chromium Authors.
#
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file or at
# https://developers.google.com/open-source/licenses/bsd.
import os
def GetPackageRelativePath(filename):
"""GetPackageRelativePath returns the path to |filename| relative to the root
of the package as determined by GetSourceRoot()."""
return os.path.relpath(filename, GetSourceRoot(filename)).replace('\\', '/')
def GetSourceRoot(filename):
"""Try to determine the root of the package which contains |filename|.
The current heuristic attempts to determine the root of the Chromium source
tree by searching up the directory hierarchy until we find a directory
containing src/.gn.
"""
# If filename is not absolute, then we are going to assume that it is
# relative to the current directory.
if not os.path.isabs(filename):
filename = os.path.abspath(filename)
if not os.path.exists(filename):
raise IOError('File not found: {}'.format(filename))
source_root = os.path.dirname(filename)
while True:
gnfile = os.path.join(source_root, 'src', '.gn')
if os.path.exists(gnfile):
return source_root
new_package_root = os.path.dirname(source_root)
if new_package_root == source_root:
raise Exception("Can't determine package root")
source_root = new_package_root
|
# Copyright 2017 The Chromium Authors.
#
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file or at
# https://developers.google.com/open-source/licenses/bsd.
import os
class NoSourceRootError(Exception):
"""Exception raise when the CodeSearch library can't determine the location
of the local Chromium checkout."""
pass
def GetPackageRelativePath(filename):
"""GetPackageRelativePath returns the path to |filename| relative to the root
of the package as determined by GetSourceRoot()."""
return os.path.relpath(filename, GetSourceRoot(filename)).replace('\\', '/')
def GetSourceRoot(filename):
"""Try to determine the root of the package which contains |filename|.
The current heuristic attempts to determine the root of the Chromium source
tree by searching up the directory hierarchy until we find a directory
containing src/.gn.
"""
# If filename is not absolute, then we are going to assume that it is
# relative to the current directory.
if not os.path.isabs(filename):
filename = os.path.abspath(filename)
if not os.path.exists(filename):
raise NoSourceRootError('File not found: {}'.format(filename))
source_root = os.path.dirname(filename)
while True:
gnfile = os.path.join(source_root, 'src', '.gn')
if os.path.exists(gnfile):
return source_root
new_package_root = os.path.dirname(source_root)
if new_package_root == source_root:
raise NoSourceRootError("Can't determine package root")
source_root = new_package_root
|
Raise a more specific exception when the source root cannot be found.
|
Raise a more specific exception when the source root cannot be found.
|
Python
|
bsd-3-clause
|
chromium/codesearch-py,chromium/codesearch-py
|
# Copyright 2017 The Chromium Authors.
#
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file or at
# https://developers.google.com/open-source/licenses/bsd.
import os
def GetPackageRelativePath(filename):
"""GetPackageRelativePath returns the path to |filename| relative to the root
of the package as determined by GetSourceRoot()."""
return os.path.relpath(filename, GetSourceRoot(filename)).replace('\\', '/')
def GetSourceRoot(filename):
"""Try to determine the root of the package which contains |filename|.
The current heuristic attempts to determine the root of the Chromium source
tree by searching up the directory hierarchy until we find a directory
containing src/.gn.
"""
# If filename is not absolute, then we are going to assume that it is
# relative to the current directory.
if not os.path.isabs(filename):
filename = os.path.abspath(filename)
if not os.path.exists(filename):
raise IOError('File not found: {}'.format(filename))
source_root = os.path.dirname(filename)
while True:
gnfile = os.path.join(source_root, 'src', '.gn')
if os.path.exists(gnfile):
return source_root
new_package_root = os.path.dirname(source_root)
if new_package_root == source_root:
raise Exception("Can't determine package root")
source_root = new_package_root
Raise a more specific exception when the source root cannot be found.
|
# Copyright 2017 The Chromium Authors.
#
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file or at
# https://developers.google.com/open-source/licenses/bsd.
import os
class NoSourceRootError(Exception):
"""Exception raise when the CodeSearch library can't determine the location
of the local Chromium checkout."""
pass
def GetPackageRelativePath(filename):
"""GetPackageRelativePath returns the path to |filename| relative to the root
of the package as determined by GetSourceRoot()."""
return os.path.relpath(filename, GetSourceRoot(filename)).replace('\\', '/')
def GetSourceRoot(filename):
"""Try to determine the root of the package which contains |filename|.
The current heuristic attempts to determine the root of the Chromium source
tree by searching up the directory hierarchy until we find a directory
containing src/.gn.
"""
# If filename is not absolute, then we are going to assume that it is
# relative to the current directory.
if not os.path.isabs(filename):
filename = os.path.abspath(filename)
if not os.path.exists(filename):
raise NoSourceRootError('File not found: {}'.format(filename))
source_root = os.path.dirname(filename)
while True:
gnfile = os.path.join(source_root, 'src', '.gn')
if os.path.exists(gnfile):
return source_root
new_package_root = os.path.dirname(source_root)
if new_package_root == source_root:
raise NoSourceRootError("Can't determine package root")
source_root = new_package_root
|
<commit_before># Copyright 2017 The Chromium Authors.
#
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file or at
# https://developers.google.com/open-source/licenses/bsd.
import os
def GetPackageRelativePath(filename):
"""GetPackageRelativePath returns the path to |filename| relative to the root
of the package as determined by GetSourceRoot()."""
return os.path.relpath(filename, GetSourceRoot(filename)).replace('\\', '/')
def GetSourceRoot(filename):
"""Try to determine the root of the package which contains |filename|.
The current heuristic attempts to determine the root of the Chromium source
tree by searching up the directory hierarchy until we find a directory
containing src/.gn.
"""
# If filename is not absolute, then we are going to assume that it is
# relative to the current directory.
if not os.path.isabs(filename):
filename = os.path.abspath(filename)
if not os.path.exists(filename):
raise IOError('File not found: {}'.format(filename))
source_root = os.path.dirname(filename)
while True:
gnfile = os.path.join(source_root, 'src', '.gn')
if os.path.exists(gnfile):
return source_root
new_package_root = os.path.dirname(source_root)
if new_package_root == source_root:
raise Exception("Can't determine package root")
source_root = new_package_root
<commit_msg>Raise a more specific exception when the source root cannot be found.<commit_after>
|
# Copyright 2017 The Chromium Authors.
#
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file or at
# https://developers.google.com/open-source/licenses/bsd.
import os
class NoSourceRootError(Exception):
"""Exception raise when the CodeSearch library can't determine the location
of the local Chromium checkout."""
pass
def GetPackageRelativePath(filename):
"""GetPackageRelativePath returns the path to |filename| relative to the root
of the package as determined by GetSourceRoot()."""
return os.path.relpath(filename, GetSourceRoot(filename)).replace('\\', '/')
def GetSourceRoot(filename):
"""Try to determine the root of the package which contains |filename|.
The current heuristic attempts to determine the root of the Chromium source
tree by searching up the directory hierarchy until we find a directory
containing src/.gn.
"""
# If filename is not absolute, then we are going to assume that it is
# relative to the current directory.
if not os.path.isabs(filename):
filename = os.path.abspath(filename)
if not os.path.exists(filename):
raise NoSourceRootError('File not found: {}'.format(filename))
source_root = os.path.dirname(filename)
while True:
gnfile = os.path.join(source_root, 'src', '.gn')
if os.path.exists(gnfile):
return source_root
new_package_root = os.path.dirname(source_root)
if new_package_root == source_root:
raise NoSourceRootError("Can't determine package root")
source_root = new_package_root
|
# Copyright 2017 The Chromium Authors.
#
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file or at
# https://developers.google.com/open-source/licenses/bsd.
import os
def GetPackageRelativePath(filename):
"""GetPackageRelativePath returns the path to |filename| relative to the root
of the package as determined by GetSourceRoot()."""
return os.path.relpath(filename, GetSourceRoot(filename)).replace('\\', '/')
def GetSourceRoot(filename):
"""Try to determine the root of the package which contains |filename|.
The current heuristic attempts to determine the root of the Chromium source
tree by searching up the directory hierarchy until we find a directory
containing src/.gn.
"""
# If filename is not absolute, then we are going to assume that it is
# relative to the current directory.
if not os.path.isabs(filename):
filename = os.path.abspath(filename)
if not os.path.exists(filename):
raise IOError('File not found: {}'.format(filename))
source_root = os.path.dirname(filename)
while True:
gnfile = os.path.join(source_root, 'src', '.gn')
if os.path.exists(gnfile):
return source_root
new_package_root = os.path.dirname(source_root)
if new_package_root == source_root:
raise Exception("Can't determine package root")
source_root = new_package_root
Raise a more specific exception when the source root cannot be found.# Copyright 2017 The Chromium Authors.
#
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file or at
# https://developers.google.com/open-source/licenses/bsd.
import os
class NoSourceRootError(Exception):
"""Exception raise when the CodeSearch library can't determine the location
of the local Chromium checkout."""
pass
def GetPackageRelativePath(filename):
"""GetPackageRelativePath returns the path to |filename| relative to the root
of the package as determined by GetSourceRoot()."""
return os.path.relpath(filename, GetSourceRoot(filename)).replace('\\', '/')
def GetSourceRoot(filename):
"""Try to determine the root of the package which contains |filename|.
The current heuristic attempts to determine the root of the Chromium source
tree by searching up the directory hierarchy until we find a directory
containing src/.gn.
"""
# If filename is not absolute, then we are going to assume that it is
# relative to the current directory.
if not os.path.isabs(filename):
filename = os.path.abspath(filename)
if not os.path.exists(filename):
raise NoSourceRootError('File not found: {}'.format(filename))
source_root = os.path.dirname(filename)
while True:
gnfile = os.path.join(source_root, 'src', '.gn')
if os.path.exists(gnfile):
return source_root
new_package_root = os.path.dirname(source_root)
if new_package_root == source_root:
raise NoSourceRootError("Can't determine package root")
source_root = new_package_root
|
<commit_before># Copyright 2017 The Chromium Authors.
#
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file or at
# https://developers.google.com/open-source/licenses/bsd.
import os
def GetPackageRelativePath(filename):
"""GetPackageRelativePath returns the path to |filename| relative to the root
of the package as determined by GetSourceRoot()."""
return os.path.relpath(filename, GetSourceRoot(filename)).replace('\\', '/')
def GetSourceRoot(filename):
"""Try to determine the root of the package which contains |filename|.
The current heuristic attempts to determine the root of the Chromium source
tree by searching up the directory hierarchy until we find a directory
containing src/.gn.
"""
# If filename is not absolute, then we are going to assume that it is
# relative to the current directory.
if not os.path.isabs(filename):
filename = os.path.abspath(filename)
if not os.path.exists(filename):
raise IOError('File not found: {}'.format(filename))
source_root = os.path.dirname(filename)
while True:
gnfile = os.path.join(source_root, 'src', '.gn')
if os.path.exists(gnfile):
return source_root
new_package_root = os.path.dirname(source_root)
if new_package_root == source_root:
raise Exception("Can't determine package root")
source_root = new_package_root
<commit_msg>Raise a more specific exception when the source root cannot be found.<commit_after># Copyright 2017 The Chromium Authors.
#
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file or at
# https://developers.google.com/open-source/licenses/bsd.
import os
class NoSourceRootError(Exception):
"""Exception raise when the CodeSearch library can't determine the location
of the local Chromium checkout."""
pass
def GetPackageRelativePath(filename):
"""GetPackageRelativePath returns the path to |filename| relative to the root
of the package as determined by GetSourceRoot()."""
return os.path.relpath(filename, GetSourceRoot(filename)).replace('\\', '/')
def GetSourceRoot(filename):
"""Try to determine the root of the package which contains |filename|.
The current heuristic attempts to determine the root of the Chromium source
tree by searching up the directory hierarchy until we find a directory
containing src/.gn.
"""
# If filename is not absolute, then we are going to assume that it is
# relative to the current directory.
if not os.path.isabs(filename):
filename = os.path.abspath(filename)
if not os.path.exists(filename):
raise NoSourceRootError('File not found: {}'.format(filename))
source_root = os.path.dirname(filename)
while True:
gnfile = os.path.join(source_root, 'src', '.gn')
if os.path.exists(gnfile):
return source_root
new_package_root = os.path.dirname(source_root)
if new_package_root == source_root:
raise NoSourceRootError("Can't determine package root")
source_root = new_package_root
|
7e153f0cb35a3572a724c29f3be26bf6254d632b
|
client/views.py
|
client/views.py
|
from django.shortcuts import render
from django.http import HttpResponse, Http404
from .models import Message
from django.contrib.auth.decorators import login_required
# Create your views here.
@login_required
def chatroom(request):
messages = Message.objects.order_by('date')
context = {'messages': messages}
return render(request, 'client/index.html', context)
|
from django.shortcuts import render, render_to_response, HttpResponseRedirect
from django.http import HttpResponse, Http404
from .models import Message, MessageForm
from django.contrib.auth.decorators import login_required
import datetime
# Create your views here.
@login_required
def chatroom(request):
# if this is a POST request we need to process the form data
if request.method == 'POST':
# create a form instance and populate it with data from the request:
form = MessageForm(request.POST)
# check whether it's valid:
if form.is_valid():
# process the data in form.cleaned_data as required
new_message = form.save(commit=False)
new_message.date = datetime.datetime.now()
new_message.username = request.user.username
new_message.save()
return HttpResponseRedirect('/chat/')
# if a GET (or any other method) we'll create a blank form
form = MessageForm()
messages = Message.objects.all().order_by('date')
context = {'messages': messages}
return render(request, 'client/index.html', {'form': form, 'context': context})
|
Update chatroom view to process new messages
|
Update chatroom view to process new messages
|
Python
|
apache-2.0
|
jason-feng/chatroom,jason-feng/chatroom,jason-feng/chatroom,jason-feng/chatroom
|
from django.shortcuts import render
from django.http import HttpResponse, Http404
from .models import Message
from django.contrib.auth.decorators import login_required
# Create your views here.
@login_required
def chatroom(request):
messages = Message.objects.order_by('date')
context = {'messages': messages}
return render(request, 'client/index.html', context)
Update chatroom view to process new messages
|
from django.shortcuts import render, render_to_response, HttpResponseRedirect
from django.http import HttpResponse, Http404
from .models import Message, MessageForm
from django.contrib.auth.decorators import login_required
import datetime
# Create your views here.
@login_required
def chatroom(request):
# if this is a POST request we need to process the form data
if request.method == 'POST':
# create a form instance and populate it with data from the request:
form = MessageForm(request.POST)
# check whether it's valid:
if form.is_valid():
# process the data in form.cleaned_data as required
new_message = form.save(commit=False)
new_message.date = datetime.datetime.now()
new_message.username = request.user.username
new_message.save()
return HttpResponseRedirect('/chat/')
# if a GET (or any other method) we'll create a blank form
form = MessageForm()
messages = Message.objects.all().order_by('date')
context = {'messages': messages}
return render(request, 'client/index.html', {'form': form, 'context': context})
|
<commit_before>from django.shortcuts import render
from django.http import HttpResponse, Http404
from .models import Message
from django.contrib.auth.decorators import login_required
# Create your views here.
@login_required
def chatroom(request):
messages = Message.objects.order_by('date')
context = {'messages': messages}
return render(request, 'client/index.html', context)
<commit_msg>Update chatroom view to process new messages<commit_after>
|
from django.shortcuts import render, render_to_response, HttpResponseRedirect
from django.http import HttpResponse, Http404
from .models import Message, MessageForm
from django.contrib.auth.decorators import login_required
import datetime
# Create your views here.
@login_required
def chatroom(request):
# if this is a POST request we need to process the form data
if request.method == 'POST':
# create a form instance and populate it with data from the request:
form = MessageForm(request.POST)
# check whether it's valid:
if form.is_valid():
# process the data in form.cleaned_data as required
new_message = form.save(commit=False)
new_message.date = datetime.datetime.now()
new_message.username = request.user.username
new_message.save()
return HttpResponseRedirect('/chat/')
# if a GET (or any other method) we'll create a blank form
form = MessageForm()
messages = Message.objects.all().order_by('date')
context = {'messages': messages}
return render(request, 'client/index.html', {'form': form, 'context': context})
|
from django.shortcuts import render
from django.http import HttpResponse, Http404
from .models import Message
from django.contrib.auth.decorators import login_required
# Create your views here.
@login_required
def chatroom(request):
messages = Message.objects.order_by('date')
context = {'messages': messages}
return render(request, 'client/index.html', context)
Update chatroom view to process new messagesfrom django.shortcuts import render, render_to_response, HttpResponseRedirect
from django.http import HttpResponse, Http404
from .models import Message, MessageForm
from django.contrib.auth.decorators import login_required
import datetime
# Create your views here.
@login_required
def chatroom(request):
# if this is a POST request we need to process the form data
if request.method == 'POST':
# create a form instance and populate it with data from the request:
form = MessageForm(request.POST)
# check whether it's valid:
if form.is_valid():
# process the data in form.cleaned_data as required
new_message = form.save(commit=False)
new_message.date = datetime.datetime.now()
new_message.username = request.user.username
new_message.save()
return HttpResponseRedirect('/chat/')
# if a GET (or any other method) we'll create a blank form
form = MessageForm()
messages = Message.objects.all().order_by('date')
context = {'messages': messages}
return render(request, 'client/index.html', {'form': form, 'context': context})
|
<commit_before>from django.shortcuts import render
from django.http import HttpResponse, Http404
from .models import Message
from django.contrib.auth.decorators import login_required
# Create your views here.
@login_required
def chatroom(request):
messages = Message.objects.order_by('date')
context = {'messages': messages}
return render(request, 'client/index.html', context)
<commit_msg>Update chatroom view to process new messages<commit_after>from django.shortcuts import render, render_to_response, HttpResponseRedirect
from django.http import HttpResponse, Http404
from .models import Message, MessageForm
from django.contrib.auth.decorators import login_required
import datetime
# Create your views here.
@login_required
def chatroom(request):
# if this is a POST request we need to process the form data
if request.method == 'POST':
# create a form instance and populate it with data from the request:
form = MessageForm(request.POST)
# check whether it's valid:
if form.is_valid():
# process the data in form.cleaned_data as required
new_message = form.save(commit=False)
new_message.date = datetime.datetime.now()
new_message.username = request.user.username
new_message.save()
return HttpResponseRedirect('/chat/')
# if a GET (or any other method) we'll create a blank form
form = MessageForm()
messages = Message.objects.all().order_by('date')
context = {'messages': messages}
return render(request, 'client/index.html', {'form': form, 'context': context})
|
59a717588c9f0e76d532516a0c38624042527291
|
testing/plot_test_data.py
|
testing/plot_test_data.py
|
import zephyr.util
from zephyr.collector import MeasurementCollector
from zephyr.bioharness import BioHarnessSignalAnalysis, BioHarnessPacketHandler
from zephyr.message import MessagePayloadParser
from zephyr.testing import visualize_measurements, test_data_dir, VirtualSerial
from zephyr.protocol import Protocol
def main():
zephyr.util.DISABLE_CLOCK_DIFFERENCE_ESTIMATION = True
collector = MeasurementCollector()
rr_signal_analysis = BioHarnessSignalAnalysis([], [collector.handle_event])
signal_packet_handlers = [collector.handle_signal, rr_signal_analysis.handle_signal]
signal_packet_handler = BioHarnessPacketHandler(signal_packet_handlers, [collector.handle_event])
payload_parser = MessagePayloadParser([signal_packet_handler.handle_packet])
ser = VirtualSerial(test_data_dir + "/120-second-bt-stream.dat")
protocol = Protocol(ser, payload_parser.handle_message)
try:
protocol.run()
except EOFError:
pass
visualize_measurements(collector)
if __name__ == "__main__":
main()
|
import zephyr.util
from zephyr.collector import MeasurementCollector
from zephyr.bioharness import BioHarnessSignalAnalysis, BioHarnessPacketHandler
from zephyr.message import MessagePayloadParser
from zephyr.testing import visualize_measurements, test_data_dir, VirtualSerial
from zephyr.protocol import Protocol, MessageFrameParser
def main():
zephyr.util.DISABLE_CLOCK_DIFFERENCE_ESTIMATION = True
collector = MeasurementCollector()
rr_signal_analysis = BioHarnessSignalAnalysis([], [collector.handle_event])
signal_packet_handlers = [collector.handle_signal, rr_signal_analysis.handle_signal]
signal_packet_handler = BioHarnessPacketHandler(signal_packet_handlers, [collector.handle_event])
payload_parser = MessagePayloadParser([signal_packet_handler.handle_packet])
message_parser = MessageFrameParser(payload_parser.handle_message)
ser = VirtualSerial(test_data_dir + "/120-second-bt-stream.dat")
protocol = Protocol(ser, [message_parser.parse_data])
try:
protocol.run()
except EOFError:
pass
visualize_measurements(collector)
if __name__ == "__main__":
main()
|
Fix test data plotting to use the changed interfaces
|
Fix test data plotting to use the changed interfaces
|
Python
|
bsd-2-clause
|
jpaalasm/zephyr-bt
|
import zephyr.util
from zephyr.collector import MeasurementCollector
from zephyr.bioharness import BioHarnessSignalAnalysis, BioHarnessPacketHandler
from zephyr.message import MessagePayloadParser
from zephyr.testing import visualize_measurements, test_data_dir, VirtualSerial
from zephyr.protocol import Protocol
def main():
zephyr.util.DISABLE_CLOCK_DIFFERENCE_ESTIMATION = True
collector = MeasurementCollector()
rr_signal_analysis = BioHarnessSignalAnalysis([], [collector.handle_event])
signal_packet_handlers = [collector.handle_signal, rr_signal_analysis.handle_signal]
signal_packet_handler = BioHarnessPacketHandler(signal_packet_handlers, [collector.handle_event])
payload_parser = MessagePayloadParser([signal_packet_handler.handle_packet])
ser = VirtualSerial(test_data_dir + "/120-second-bt-stream.dat")
protocol = Protocol(ser, payload_parser.handle_message)
try:
protocol.run()
except EOFError:
pass
visualize_measurements(collector)
if __name__ == "__main__":
main()
Fix test data plotting to use the changed interfaces
|
import zephyr.util
from zephyr.collector import MeasurementCollector
from zephyr.bioharness import BioHarnessSignalAnalysis, BioHarnessPacketHandler
from zephyr.message import MessagePayloadParser
from zephyr.testing import visualize_measurements, test_data_dir, VirtualSerial
from zephyr.protocol import Protocol, MessageFrameParser
def main():
zephyr.util.DISABLE_CLOCK_DIFFERENCE_ESTIMATION = True
collector = MeasurementCollector()
rr_signal_analysis = BioHarnessSignalAnalysis([], [collector.handle_event])
signal_packet_handlers = [collector.handle_signal, rr_signal_analysis.handle_signal]
signal_packet_handler = BioHarnessPacketHandler(signal_packet_handlers, [collector.handle_event])
payload_parser = MessagePayloadParser([signal_packet_handler.handle_packet])
message_parser = MessageFrameParser(payload_parser.handle_message)
ser = VirtualSerial(test_data_dir + "/120-second-bt-stream.dat")
protocol = Protocol(ser, [message_parser.parse_data])
try:
protocol.run()
except EOFError:
pass
visualize_measurements(collector)
if __name__ == "__main__":
main()
|
<commit_before>
import zephyr.util
from zephyr.collector import MeasurementCollector
from zephyr.bioharness import BioHarnessSignalAnalysis, BioHarnessPacketHandler
from zephyr.message import MessagePayloadParser
from zephyr.testing import visualize_measurements, test_data_dir, VirtualSerial
from zephyr.protocol import Protocol
def main():
zephyr.util.DISABLE_CLOCK_DIFFERENCE_ESTIMATION = True
collector = MeasurementCollector()
rr_signal_analysis = BioHarnessSignalAnalysis([], [collector.handle_event])
signal_packet_handlers = [collector.handle_signal, rr_signal_analysis.handle_signal]
signal_packet_handler = BioHarnessPacketHandler(signal_packet_handlers, [collector.handle_event])
payload_parser = MessagePayloadParser([signal_packet_handler.handle_packet])
ser = VirtualSerial(test_data_dir + "/120-second-bt-stream.dat")
protocol = Protocol(ser, payload_parser.handle_message)
try:
protocol.run()
except EOFError:
pass
visualize_measurements(collector)
if __name__ == "__main__":
main()
<commit_msg>Fix test data plotting to use the changed interfaces<commit_after>
|
import zephyr.util
from zephyr.collector import MeasurementCollector
from zephyr.bioharness import BioHarnessSignalAnalysis, BioHarnessPacketHandler
from zephyr.message import MessagePayloadParser
from zephyr.testing import visualize_measurements, test_data_dir, VirtualSerial
from zephyr.protocol import Protocol, MessageFrameParser
def main():
zephyr.util.DISABLE_CLOCK_DIFFERENCE_ESTIMATION = True
collector = MeasurementCollector()
rr_signal_analysis = BioHarnessSignalAnalysis([], [collector.handle_event])
signal_packet_handlers = [collector.handle_signal, rr_signal_analysis.handle_signal]
signal_packet_handler = BioHarnessPacketHandler(signal_packet_handlers, [collector.handle_event])
payload_parser = MessagePayloadParser([signal_packet_handler.handle_packet])
message_parser = MessageFrameParser(payload_parser.handle_message)
ser = VirtualSerial(test_data_dir + "/120-second-bt-stream.dat")
protocol = Protocol(ser, [message_parser.parse_data])
try:
protocol.run()
except EOFError:
pass
visualize_measurements(collector)
if __name__ == "__main__":
main()
|
import zephyr.util
from zephyr.collector import MeasurementCollector
from zephyr.bioharness import BioHarnessSignalAnalysis, BioHarnessPacketHandler
from zephyr.message import MessagePayloadParser
from zephyr.testing import visualize_measurements, test_data_dir, VirtualSerial
from zephyr.protocol import Protocol
def main():
zephyr.util.DISABLE_CLOCK_DIFFERENCE_ESTIMATION = True
collector = MeasurementCollector()
rr_signal_analysis = BioHarnessSignalAnalysis([], [collector.handle_event])
signal_packet_handlers = [collector.handle_signal, rr_signal_analysis.handle_signal]
signal_packet_handler = BioHarnessPacketHandler(signal_packet_handlers, [collector.handle_event])
payload_parser = MessagePayloadParser([signal_packet_handler.handle_packet])
ser = VirtualSerial(test_data_dir + "/120-second-bt-stream.dat")
protocol = Protocol(ser, payload_parser.handle_message)
try:
protocol.run()
except EOFError:
pass
visualize_measurements(collector)
if __name__ == "__main__":
main()
Fix test data plotting to use the changed interfaces
import zephyr.util
from zephyr.collector import MeasurementCollector
from zephyr.bioharness import BioHarnessSignalAnalysis, BioHarnessPacketHandler
from zephyr.message import MessagePayloadParser
from zephyr.testing import visualize_measurements, test_data_dir, VirtualSerial
from zephyr.protocol import Protocol, MessageFrameParser
def main():
zephyr.util.DISABLE_CLOCK_DIFFERENCE_ESTIMATION = True
collector = MeasurementCollector()
rr_signal_analysis = BioHarnessSignalAnalysis([], [collector.handle_event])
signal_packet_handlers = [collector.handle_signal, rr_signal_analysis.handle_signal]
signal_packet_handler = BioHarnessPacketHandler(signal_packet_handlers, [collector.handle_event])
payload_parser = MessagePayloadParser([signal_packet_handler.handle_packet])
message_parser = MessageFrameParser(payload_parser.handle_message)
ser = VirtualSerial(test_data_dir + "/120-second-bt-stream.dat")
protocol = Protocol(ser, [message_parser.parse_data])
try:
protocol.run()
except EOFError:
pass
visualize_measurements(collector)
if __name__ == "__main__":
main()
|
<commit_before>
import zephyr.util
from zephyr.collector import MeasurementCollector
from zephyr.bioharness import BioHarnessSignalAnalysis, BioHarnessPacketHandler
from zephyr.message import MessagePayloadParser
from zephyr.testing import visualize_measurements, test_data_dir, VirtualSerial
from zephyr.protocol import Protocol
def main():
zephyr.util.DISABLE_CLOCK_DIFFERENCE_ESTIMATION = True
collector = MeasurementCollector()
rr_signal_analysis = BioHarnessSignalAnalysis([], [collector.handle_event])
signal_packet_handlers = [collector.handle_signal, rr_signal_analysis.handle_signal]
signal_packet_handler = BioHarnessPacketHandler(signal_packet_handlers, [collector.handle_event])
payload_parser = MessagePayloadParser([signal_packet_handler.handle_packet])
ser = VirtualSerial(test_data_dir + "/120-second-bt-stream.dat")
protocol = Protocol(ser, payload_parser.handle_message)
try:
protocol.run()
except EOFError:
pass
visualize_measurements(collector)
if __name__ == "__main__":
main()
<commit_msg>Fix test data plotting to use the changed interfaces<commit_after>
import zephyr.util
from zephyr.collector import MeasurementCollector
from zephyr.bioharness import BioHarnessSignalAnalysis, BioHarnessPacketHandler
from zephyr.message import MessagePayloadParser
from zephyr.testing import visualize_measurements, test_data_dir, VirtualSerial
from zephyr.protocol import Protocol, MessageFrameParser
def main():
zephyr.util.DISABLE_CLOCK_DIFFERENCE_ESTIMATION = True
collector = MeasurementCollector()
rr_signal_analysis = BioHarnessSignalAnalysis([], [collector.handle_event])
signal_packet_handlers = [collector.handle_signal, rr_signal_analysis.handle_signal]
signal_packet_handler = BioHarnessPacketHandler(signal_packet_handlers, [collector.handle_event])
payload_parser = MessagePayloadParser([signal_packet_handler.handle_packet])
message_parser = MessageFrameParser(payload_parser.handle_message)
ser = VirtualSerial(test_data_dir + "/120-second-bt-stream.dat")
protocol = Protocol(ser, [message_parser.parse_data])
try:
protocol.run()
except EOFError:
pass
visualize_measurements(collector)
if __name__ == "__main__":
main()
|
19280ac68748cb5cd2cb439edeb667f581840604
|
tests/test_http_client.py
|
tests/test_http_client.py
|
import unittest
import httpretty
from fbmsgbot.http_client import HttpClient
from fbmsgbot.resources.urls import FACEBOOK_MESSAGES_POST_URL
class TestHttpClient(unittest.TestCase):
"""
Test the HttpClient
"""
@httpretty.activate
def test_submit_GET_request(self):
httpretty.register_uri(httpretty.GET,
FACEBOOK_MESSAGES_POST_URL + '/users/123',
body='{ \
"data" : [1,2,3] \
}')
def completion(payload, error):
assert payload['data'] == [1, 2, 3]
assert payload['data'] != [3, 2, 1]
client = HttpClient('123123')
client.submit_request('/users/123', 'GET', None, completion)
@httpretty.activate
def test_submite_POST_request(self):
httpretty.register_uri(httpretty.POST,
FACEBOOK_MESSAGES_POST_URL + 'users/',
body='{ \
"name": "ben", \
"age": 12 \
}', status=201)
def completion(payload, error):
if error is None:
assert payload['name'] == 'ben'
assert payload['age'] == 12
else:
raise
client = HttpClient('123123')
client.submit_request('users/', 'POST', None, completion)
|
import unittest
import httpretty
from fbmsgbot.http_client import HttpClient
from fbmsgbot.resources.urls import FACEBOOK_MESSAGES_POST_URL
class TestHttpClient(unittest.TestCase):
"""
Test the HttpClient
"""
@httpretty.activate
def test_submit_GET_request(self):
httpretty.register_uri(httpretty.GET,
FACEBOOK_MESSAGES_POST_URL + '/users/123',
body='{ \
"data" : [1,2,3] \
}', status=200)
client = HttpClient('123123')
response, error = client.submit_request('/users/123',
'GET', None)
assert response['data'] == [1, 2, 3]
assert response['data'] != [3, 2, 1]
@httpretty.activate
def test_submite_POST_request(self):
httpretty.register_uri(httpretty.POST,
FACEBOOK_MESSAGES_POST_URL + 'users/',
body='{ \
"name": "ben", \
"age": 12 \
}', status=201)
client = HttpClient('123123')
response, error = client.submit_request('users/',
'POST', None)
if error is None:
assert response['name'] == 'ben'
assert response['age'] == 12
else:
raise
|
Update tests to remove completion blocks
|
Update tests to remove completion blocks
|
Python
|
mit
|
ben-cunningham/pybot,ben-cunningham/python-messenger-bot
|
import unittest
import httpretty
from fbmsgbot.http_client import HttpClient
from fbmsgbot.resources.urls import FACEBOOK_MESSAGES_POST_URL
class TestHttpClient(unittest.TestCase):
"""
Test the HttpClient
"""
@httpretty.activate
def test_submit_GET_request(self):
httpretty.register_uri(httpretty.GET,
FACEBOOK_MESSAGES_POST_URL + '/users/123',
body='{ \
"data" : [1,2,3] \
}')
def completion(payload, error):
assert payload['data'] == [1, 2, 3]
assert payload['data'] != [3, 2, 1]
client = HttpClient('123123')
client.submit_request('/users/123', 'GET', None, completion)
@httpretty.activate
def test_submite_POST_request(self):
httpretty.register_uri(httpretty.POST,
FACEBOOK_MESSAGES_POST_URL + 'users/',
body='{ \
"name": "ben", \
"age": 12 \
}', status=201)
def completion(payload, error):
if error is None:
assert payload['name'] == 'ben'
assert payload['age'] == 12
else:
raise
client = HttpClient('123123')
client.submit_request('users/', 'POST', None, completion)
Update tests to remove completion blocks
|
import unittest
import httpretty
from fbmsgbot.http_client import HttpClient
from fbmsgbot.resources.urls import FACEBOOK_MESSAGES_POST_URL
class TestHttpClient(unittest.TestCase):
"""
Test the HttpClient
"""
@httpretty.activate
def test_submit_GET_request(self):
httpretty.register_uri(httpretty.GET,
FACEBOOK_MESSAGES_POST_URL + '/users/123',
body='{ \
"data" : [1,2,3] \
}', status=200)
client = HttpClient('123123')
response, error = client.submit_request('/users/123',
'GET', None)
assert response['data'] == [1, 2, 3]
assert response['data'] != [3, 2, 1]
@httpretty.activate
def test_submite_POST_request(self):
httpretty.register_uri(httpretty.POST,
FACEBOOK_MESSAGES_POST_URL + 'users/',
body='{ \
"name": "ben", \
"age": 12 \
}', status=201)
client = HttpClient('123123')
response, error = client.submit_request('users/',
'POST', None)
if error is None:
assert response['name'] == 'ben'
assert response['age'] == 12
else:
raise
|
<commit_before>import unittest
import httpretty
from fbmsgbot.http_client import HttpClient
from fbmsgbot.resources.urls import FACEBOOK_MESSAGES_POST_URL
class TestHttpClient(unittest.TestCase):
"""
Test the HttpClient
"""
@httpretty.activate
def test_submit_GET_request(self):
httpretty.register_uri(httpretty.GET,
FACEBOOK_MESSAGES_POST_URL + '/users/123',
body='{ \
"data" : [1,2,3] \
}')
def completion(payload, error):
assert payload['data'] == [1, 2, 3]
assert payload['data'] != [3, 2, 1]
client = HttpClient('123123')
client.submit_request('/users/123', 'GET', None, completion)
@httpretty.activate
def test_submite_POST_request(self):
httpretty.register_uri(httpretty.POST,
FACEBOOK_MESSAGES_POST_URL + 'users/',
body='{ \
"name": "ben", \
"age": 12 \
}', status=201)
def completion(payload, error):
if error is None:
assert payload['name'] == 'ben'
assert payload['age'] == 12
else:
raise
client = HttpClient('123123')
client.submit_request('users/', 'POST', None, completion)
<commit_msg>Update tests to remove completion blocks<commit_after>
|
import unittest
import httpretty
from fbmsgbot.http_client import HttpClient
from fbmsgbot.resources.urls import FACEBOOK_MESSAGES_POST_URL
class TestHttpClient(unittest.TestCase):
"""
Test the HttpClient
"""
@httpretty.activate
def test_submit_GET_request(self):
httpretty.register_uri(httpretty.GET,
FACEBOOK_MESSAGES_POST_URL + '/users/123',
body='{ \
"data" : [1,2,3] \
}', status=200)
client = HttpClient('123123')
response, error = client.submit_request('/users/123',
'GET', None)
assert response['data'] == [1, 2, 3]
assert response['data'] != [3, 2, 1]
@httpretty.activate
def test_submite_POST_request(self):
httpretty.register_uri(httpretty.POST,
FACEBOOK_MESSAGES_POST_URL + 'users/',
body='{ \
"name": "ben", \
"age": 12 \
}', status=201)
client = HttpClient('123123')
response, error = client.submit_request('users/',
'POST', None)
if error is None:
assert response['name'] == 'ben'
assert response['age'] == 12
else:
raise
|
import unittest
import httpretty
from fbmsgbot.http_client import HttpClient
from fbmsgbot.resources.urls import FACEBOOK_MESSAGES_POST_URL
class TestHttpClient(unittest.TestCase):
"""
Test the HttpClient
"""
@httpretty.activate
def test_submit_GET_request(self):
httpretty.register_uri(httpretty.GET,
FACEBOOK_MESSAGES_POST_URL + '/users/123',
body='{ \
"data" : [1,2,3] \
}')
def completion(payload, error):
assert payload['data'] == [1, 2, 3]
assert payload['data'] != [3, 2, 1]
client = HttpClient('123123')
client.submit_request('/users/123', 'GET', None, completion)
@httpretty.activate
def test_submite_POST_request(self):
httpretty.register_uri(httpretty.POST,
FACEBOOK_MESSAGES_POST_URL + 'users/',
body='{ \
"name": "ben", \
"age": 12 \
}', status=201)
def completion(payload, error):
if error is None:
assert payload['name'] == 'ben'
assert payload['age'] == 12
else:
raise
client = HttpClient('123123')
client.submit_request('users/', 'POST', None, completion)
Update tests to remove completion blocksimport unittest
import httpretty
from fbmsgbot.http_client import HttpClient
from fbmsgbot.resources.urls import FACEBOOK_MESSAGES_POST_URL
class TestHttpClient(unittest.TestCase):
"""
Test the HttpClient
"""
@httpretty.activate
def test_submit_GET_request(self):
httpretty.register_uri(httpretty.GET,
FACEBOOK_MESSAGES_POST_URL + '/users/123',
body='{ \
"data" : [1,2,3] \
}', status=200)
client = HttpClient('123123')
response, error = client.submit_request('/users/123',
'GET', None)
assert response['data'] == [1, 2, 3]
assert response['data'] != [3, 2, 1]
@httpretty.activate
def test_submite_POST_request(self):
httpretty.register_uri(httpretty.POST,
FACEBOOK_MESSAGES_POST_URL + 'users/',
body='{ \
"name": "ben", \
"age": 12 \
}', status=201)
client = HttpClient('123123')
response, error = client.submit_request('users/',
'POST', None)
if error is None:
assert response['name'] == 'ben'
assert response['age'] == 12
else:
raise
|
<commit_before>import unittest
import httpretty
from fbmsgbot.http_client import HttpClient
from fbmsgbot.resources.urls import FACEBOOK_MESSAGES_POST_URL
class TestHttpClient(unittest.TestCase):
"""
Test the HttpClient
"""
@httpretty.activate
def test_submit_GET_request(self):
httpretty.register_uri(httpretty.GET,
FACEBOOK_MESSAGES_POST_URL + '/users/123',
body='{ \
"data" : [1,2,3] \
}')
def completion(payload, error):
assert payload['data'] == [1, 2, 3]
assert payload['data'] != [3, 2, 1]
client = HttpClient('123123')
client.submit_request('/users/123', 'GET', None, completion)
@httpretty.activate
def test_submite_POST_request(self):
httpretty.register_uri(httpretty.POST,
FACEBOOK_MESSAGES_POST_URL + 'users/',
body='{ \
"name": "ben", \
"age": 12 \
}', status=201)
def completion(payload, error):
if error is None:
assert payload['name'] == 'ben'
assert payload['age'] == 12
else:
raise
client = HttpClient('123123')
client.submit_request('users/', 'POST', None, completion)
<commit_msg>Update tests to remove completion blocks<commit_after>import unittest
import httpretty
from fbmsgbot.http_client import HttpClient
from fbmsgbot.resources.urls import FACEBOOK_MESSAGES_POST_URL
class TestHttpClient(unittest.TestCase):
"""
Test the HttpClient
"""
@httpretty.activate
def test_submit_GET_request(self):
httpretty.register_uri(httpretty.GET,
FACEBOOK_MESSAGES_POST_URL + '/users/123',
body='{ \
"data" : [1,2,3] \
}', status=200)
client = HttpClient('123123')
response, error = client.submit_request('/users/123',
'GET', None)
assert response['data'] == [1, 2, 3]
assert response['data'] != [3, 2, 1]
@httpretty.activate
def test_submite_POST_request(self):
httpretty.register_uri(httpretty.POST,
FACEBOOK_MESSAGES_POST_URL + 'users/',
body='{ \
"name": "ben", \
"age": 12 \
}', status=201)
client = HttpClient('123123')
response, error = client.submit_request('users/',
'POST', None)
if error is None:
assert response['name'] == 'ben'
assert response['age'] == 12
else:
raise
|
01382a617d075b468ea8a08087f298da5c55a46c
|
kolibri/core/bookmarks/models.py
|
kolibri/core/bookmarks/models.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models
from django.utils import timezone
from morango.models import UUIDField
from kolibri.core.auth.models import AbstractFacilityDataModel
from kolibri.core.auth.models import Facility
from kolibri.core.auth.models import FacilityUser
from kolibri.core.auth.permissions.general import IsOwn
class Bookmark(AbstractFacilityDataModel):
content_id = UUIDField(blank=True, null=True)
channel_id = UUIDField(blank=True, null=True)
contentnode_id = UUIDField()
user = models.ForeignKey(FacilityUser, blank=False)
created = models.DateTimeField(default=timezone.now, db_index=True)
morango_model_name = "bookmark"
permissions = IsOwn()
def infer_dataset(self, *args, **kwargs):
if self.user_id:
return self.cached_related_dataset_lookup("user")
elif self.dataset_id:
# confirm that there exists a facility with that dataset_id
try:
return Facility.objects.get(dataset_id=self.dataset_id).dataset_id
except Facility.DoesNotExist:
pass
# if no user or matching facility, infer dataset from the default facility
facility = Facility.get_default_facility()
if not facility:
raise AssertionError(
"Before you can save bookmarks, you must have a facility"
)
return facility.dataset_id
def calculate_partition(self):
return "{dataset_id}:user-rw:{user_id}".format(
dataset_id=self.dataset_id, user_id=self.user.id
)
class Meta:
# Ensures that we do not save duplicates, otherwise raises a
# django.db.utils.IntegrityError
unique_together = (
"user",
"contentnode_id",
)
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models
from django.utils import timezone
from morango.models import UUIDField
from kolibri.core.auth.models import AbstractFacilityDataModel
from kolibri.core.auth.models import FacilityUser
from kolibri.core.auth.permissions.general import IsOwn
class Bookmark(AbstractFacilityDataModel):
content_id = UUIDField(blank=True, null=True)
channel_id = UUIDField(blank=True, null=True)
contentnode_id = UUIDField()
user = models.ForeignKey(FacilityUser, blank=False)
created = models.DateTimeField(default=timezone.now, db_index=True)
morango_model_name = "bookmark"
permissions = IsOwn()
def infer_dataset(self, *args, **kwargs):
return self.cached_related_dataset_lookup("user")
def calculate_partition(self):
return "{dataset_id}:user-rw:{user_id}".format(
dataset_id=self.dataset_id, user_id=self.user.id
)
class Meta:
# Ensures that we do not save duplicates, otherwise raises a
# django.db.utils.IntegrityError
unique_together = (
"user",
"contentnode_id",
)
|
Remove unnecessary cruft from Bookmark.infer_dataset
|
Remove unnecessary cruft from Bookmark.infer_dataset
|
Python
|
mit
|
learningequality/kolibri,learningequality/kolibri,learningequality/kolibri,learningequality/kolibri
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models
from django.utils import timezone
from morango.models import UUIDField
from kolibri.core.auth.models import AbstractFacilityDataModel
from kolibri.core.auth.models import Facility
from kolibri.core.auth.models import FacilityUser
from kolibri.core.auth.permissions.general import IsOwn
class Bookmark(AbstractFacilityDataModel):
content_id = UUIDField(blank=True, null=True)
channel_id = UUIDField(blank=True, null=True)
contentnode_id = UUIDField()
user = models.ForeignKey(FacilityUser, blank=False)
created = models.DateTimeField(default=timezone.now, db_index=True)
morango_model_name = "bookmark"
permissions = IsOwn()
def infer_dataset(self, *args, **kwargs):
if self.user_id:
return self.cached_related_dataset_lookup("user")
elif self.dataset_id:
# confirm that there exists a facility with that dataset_id
try:
return Facility.objects.get(dataset_id=self.dataset_id).dataset_id
except Facility.DoesNotExist:
pass
# if no user or matching facility, infer dataset from the default facility
facility = Facility.get_default_facility()
if not facility:
raise AssertionError(
"Before you can save bookmarks, you must have a facility"
)
return facility.dataset_id
def calculate_partition(self):
return "{dataset_id}:user-rw:{user_id}".format(
dataset_id=self.dataset_id, user_id=self.user.id
)
class Meta:
# Ensures that we do not save duplicates, otherwise raises a
# django.db.utils.IntegrityError
unique_together = (
"user",
"contentnode_id",
)
Remove unnecessary cruft from Bookmark.infer_dataset
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models
from django.utils import timezone
from morango.models import UUIDField
from kolibri.core.auth.models import AbstractFacilityDataModel
from kolibri.core.auth.models import FacilityUser
from kolibri.core.auth.permissions.general import IsOwn
class Bookmark(AbstractFacilityDataModel):
content_id = UUIDField(blank=True, null=True)
channel_id = UUIDField(blank=True, null=True)
contentnode_id = UUIDField()
user = models.ForeignKey(FacilityUser, blank=False)
created = models.DateTimeField(default=timezone.now, db_index=True)
morango_model_name = "bookmark"
permissions = IsOwn()
def infer_dataset(self, *args, **kwargs):
return self.cached_related_dataset_lookup("user")
def calculate_partition(self):
return "{dataset_id}:user-rw:{user_id}".format(
dataset_id=self.dataset_id, user_id=self.user.id
)
class Meta:
# Ensures that we do not save duplicates, otherwise raises a
# django.db.utils.IntegrityError
unique_together = (
"user",
"contentnode_id",
)
|
<commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models
from django.utils import timezone
from morango.models import UUIDField
from kolibri.core.auth.models import AbstractFacilityDataModel
from kolibri.core.auth.models import Facility
from kolibri.core.auth.models import FacilityUser
from kolibri.core.auth.permissions.general import IsOwn
class Bookmark(AbstractFacilityDataModel):
content_id = UUIDField(blank=True, null=True)
channel_id = UUIDField(blank=True, null=True)
contentnode_id = UUIDField()
user = models.ForeignKey(FacilityUser, blank=False)
created = models.DateTimeField(default=timezone.now, db_index=True)
morango_model_name = "bookmark"
permissions = IsOwn()
def infer_dataset(self, *args, **kwargs):
if self.user_id:
return self.cached_related_dataset_lookup("user")
elif self.dataset_id:
# confirm that there exists a facility with that dataset_id
try:
return Facility.objects.get(dataset_id=self.dataset_id).dataset_id
except Facility.DoesNotExist:
pass
# if no user or matching facility, infer dataset from the default facility
facility = Facility.get_default_facility()
if not facility:
raise AssertionError(
"Before you can save bookmarks, you must have a facility"
)
return facility.dataset_id
def calculate_partition(self):
return "{dataset_id}:user-rw:{user_id}".format(
dataset_id=self.dataset_id, user_id=self.user.id
)
class Meta:
# Ensures that we do not save duplicates, otherwise raises a
# django.db.utils.IntegrityError
unique_together = (
"user",
"contentnode_id",
)
<commit_msg>Remove unnecessary cruft from Bookmark.infer_dataset<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models
from django.utils import timezone
from morango.models import UUIDField
from kolibri.core.auth.models import AbstractFacilityDataModel
from kolibri.core.auth.models import FacilityUser
from kolibri.core.auth.permissions.general import IsOwn
class Bookmark(AbstractFacilityDataModel):
content_id = UUIDField(blank=True, null=True)
channel_id = UUIDField(blank=True, null=True)
contentnode_id = UUIDField()
user = models.ForeignKey(FacilityUser, blank=False)
created = models.DateTimeField(default=timezone.now, db_index=True)
morango_model_name = "bookmark"
permissions = IsOwn()
def infer_dataset(self, *args, **kwargs):
return self.cached_related_dataset_lookup("user")
def calculate_partition(self):
return "{dataset_id}:user-rw:{user_id}".format(
dataset_id=self.dataset_id, user_id=self.user.id
)
class Meta:
# Ensures that we do not save duplicates, otherwise raises a
# django.db.utils.IntegrityError
unique_together = (
"user",
"contentnode_id",
)
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models
from django.utils import timezone
from morango.models import UUIDField
from kolibri.core.auth.models import AbstractFacilityDataModel
from kolibri.core.auth.models import Facility
from kolibri.core.auth.models import FacilityUser
from kolibri.core.auth.permissions.general import IsOwn
class Bookmark(AbstractFacilityDataModel):
content_id = UUIDField(blank=True, null=True)
channel_id = UUIDField(blank=True, null=True)
contentnode_id = UUIDField()
user = models.ForeignKey(FacilityUser, blank=False)
created = models.DateTimeField(default=timezone.now, db_index=True)
morango_model_name = "bookmark"
permissions = IsOwn()
def infer_dataset(self, *args, **kwargs):
if self.user_id:
return self.cached_related_dataset_lookup("user")
elif self.dataset_id:
# confirm that there exists a facility with that dataset_id
try:
return Facility.objects.get(dataset_id=self.dataset_id).dataset_id
except Facility.DoesNotExist:
pass
# if no user or matching facility, infer dataset from the default facility
facility = Facility.get_default_facility()
if not facility:
raise AssertionError(
"Before you can save bookmarks, you must have a facility"
)
return facility.dataset_id
def calculate_partition(self):
return "{dataset_id}:user-rw:{user_id}".format(
dataset_id=self.dataset_id, user_id=self.user.id
)
class Meta:
# Ensures that we do not save duplicates, otherwise raises a
# django.db.utils.IntegrityError
unique_together = (
"user",
"contentnode_id",
)
Remove unnecessary cruft from Bookmark.infer_dataset# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models
from django.utils import timezone
from morango.models import UUIDField
from kolibri.core.auth.models import AbstractFacilityDataModel
from kolibri.core.auth.models import FacilityUser
from kolibri.core.auth.permissions.general import IsOwn
class Bookmark(AbstractFacilityDataModel):
content_id = UUIDField(blank=True, null=True)
channel_id = UUIDField(blank=True, null=True)
contentnode_id = UUIDField()
user = models.ForeignKey(FacilityUser, blank=False)
created = models.DateTimeField(default=timezone.now, db_index=True)
morango_model_name = "bookmark"
permissions = IsOwn()
def infer_dataset(self, *args, **kwargs):
return self.cached_related_dataset_lookup("user")
def calculate_partition(self):
return "{dataset_id}:user-rw:{user_id}".format(
dataset_id=self.dataset_id, user_id=self.user.id
)
class Meta:
# Ensures that we do not save duplicates, otherwise raises a
# django.db.utils.IntegrityError
unique_together = (
"user",
"contentnode_id",
)
|
<commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models
from django.utils import timezone
from morango.models import UUIDField
from kolibri.core.auth.models import AbstractFacilityDataModel
from kolibri.core.auth.models import Facility
from kolibri.core.auth.models import FacilityUser
from kolibri.core.auth.permissions.general import IsOwn
class Bookmark(AbstractFacilityDataModel):
content_id = UUIDField(blank=True, null=True)
channel_id = UUIDField(blank=True, null=True)
contentnode_id = UUIDField()
user = models.ForeignKey(FacilityUser, blank=False)
created = models.DateTimeField(default=timezone.now, db_index=True)
morango_model_name = "bookmark"
permissions = IsOwn()
def infer_dataset(self, *args, **kwargs):
if self.user_id:
return self.cached_related_dataset_lookup("user")
elif self.dataset_id:
# confirm that there exists a facility with that dataset_id
try:
return Facility.objects.get(dataset_id=self.dataset_id).dataset_id
except Facility.DoesNotExist:
pass
# if no user or matching facility, infer dataset from the default facility
facility = Facility.get_default_facility()
if not facility:
raise AssertionError(
"Before you can save bookmarks, you must have a facility"
)
return facility.dataset_id
def calculate_partition(self):
return "{dataset_id}:user-rw:{user_id}".format(
dataset_id=self.dataset_id, user_id=self.user.id
)
class Meta:
# Ensures that we do not save duplicates, otherwise raises a
# django.db.utils.IntegrityError
unique_together = (
"user",
"contentnode_id",
)
<commit_msg>Remove unnecessary cruft from Bookmark.infer_dataset<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models
from django.utils import timezone
from morango.models import UUIDField
from kolibri.core.auth.models import AbstractFacilityDataModel
from kolibri.core.auth.models import FacilityUser
from kolibri.core.auth.permissions.general import IsOwn
class Bookmark(AbstractFacilityDataModel):
content_id = UUIDField(blank=True, null=True)
channel_id = UUIDField(blank=True, null=True)
contentnode_id = UUIDField()
user = models.ForeignKey(FacilityUser, blank=False)
created = models.DateTimeField(default=timezone.now, db_index=True)
morango_model_name = "bookmark"
permissions = IsOwn()
def infer_dataset(self, *args, **kwargs):
return self.cached_related_dataset_lookup("user")
def calculate_partition(self):
return "{dataset_id}:user-rw:{user_id}".format(
dataset_id=self.dataset_id, user_id=self.user.id
)
class Meta:
# Ensures that we do not save duplicates, otherwise raises a
# django.db.utils.IntegrityError
unique_together = (
"user",
"contentnode_id",
)
|
40aa16d48c58a17ab08ac526e1a8806214167a1b
|
carnifex/test/integration/test_local_process.py
|
carnifex/test/integration/test_local_process.py
|
from twisted.trial.unittest import TestCase
from carnifex.localprocess import LocalProcessInductor
from twisted.internet import reactor
class InductorTest(TestCase):
def test_real_run(self):
executable = 'echo'
echo_text = "hello world!"
expected_stdout = echo_text + '\n'
inductor = LocalProcessInductor(reactor)
result = inductor.run(executable, args=(executable, echo_text))
@result.addCallback
def check_result(result):
expected_result = (expected_stdout, '', 0)
self.assertEqual(result, expected_result)
return result
|
from twisted.trial.unittest import TestCase
from carnifex.localprocess import LocalProcessInductor
from twisted.internet import reactor
class InductorTest(TestCase):
def test_real_run(self):
executable = 'echo'
echo_text = "hello world!"
expected_stdout = echo_text + '\n'
inductor = LocalProcessInductor(reactor)
result = inductor.run(executable, args=(executable, echo_text))
@result.addCallback
def check_result(result):
expected_result = (expected_stdout, '', 0)
self.assertEqual(result, expected_result)
return result
def test_real_run_unknown_command(self):
executable = 'thiscommandshouldnotexist'
inductor = LocalProcessInductor(reactor)
result = inductor.run(executable)
@result.addCallback
def check_result(result):
stdout, stderr, code = result
self.assertEqual(stdout, '')
self.assertNotEqual(stderr, '')
self.assertNotEqual(code, 0)
return result
def test_getExitStatus_false(self):
inductor = LocalProcessInductor(reactor)
result = inductor.getExitStatus('false')
@result.addCallback
def check_result(result):
self.assertNotEqual(result, 0, "The 'false' command should "
"exit with a nonzero code")
return result
def test_getExitStatus_true(self):
inductor = LocalProcessInductor(reactor)
result = inductor.getExitStatus('true')
@result.addCallback
def check_result(result):
self.assertEqual(result, 0, "The 'true' command should "
"exit with code 0")
return result
|
Add more tests to the local process integration test
|
Add more tests to the local process integration test
|
Python
|
mit
|
sporsh/carnifex
|
from twisted.trial.unittest import TestCase
from carnifex.localprocess import LocalProcessInductor
from twisted.internet import reactor
class InductorTest(TestCase):
def test_real_run(self):
executable = 'echo'
echo_text = "hello world!"
expected_stdout = echo_text + '\n'
inductor = LocalProcessInductor(reactor)
result = inductor.run(executable, args=(executable, echo_text))
@result.addCallback
def check_result(result):
expected_result = (expected_stdout, '', 0)
self.assertEqual(result, expected_result)
return result
Add more tests to the local process integration test
|
from twisted.trial.unittest import TestCase
from carnifex.localprocess import LocalProcessInductor
from twisted.internet import reactor
class InductorTest(TestCase):
def test_real_run(self):
executable = 'echo'
echo_text = "hello world!"
expected_stdout = echo_text + '\n'
inductor = LocalProcessInductor(reactor)
result = inductor.run(executable, args=(executable, echo_text))
@result.addCallback
def check_result(result):
expected_result = (expected_stdout, '', 0)
self.assertEqual(result, expected_result)
return result
def test_real_run_unknown_command(self):
executable = 'thiscommandshouldnotexist'
inductor = LocalProcessInductor(reactor)
result = inductor.run(executable)
@result.addCallback
def check_result(result):
stdout, stderr, code = result
self.assertEqual(stdout, '')
self.assertNotEqual(stderr, '')
self.assertNotEqual(code, 0)
return result
def test_getExitStatus_false(self):
inductor = LocalProcessInductor(reactor)
result = inductor.getExitStatus('false')
@result.addCallback
def check_result(result):
self.assertNotEqual(result, 0, "The 'false' command should "
"exit with a nonzero code")
return result
def test_getExitStatus_true(self):
inductor = LocalProcessInductor(reactor)
result = inductor.getExitStatus('true')
@result.addCallback
def check_result(result):
self.assertEqual(result, 0, "The 'true' command should "
"exit with code 0")
return result
|
<commit_before>from twisted.trial.unittest import TestCase
from carnifex.localprocess import LocalProcessInductor
from twisted.internet import reactor
class InductorTest(TestCase):
def test_real_run(self):
executable = 'echo'
echo_text = "hello world!"
expected_stdout = echo_text + '\n'
inductor = LocalProcessInductor(reactor)
result = inductor.run(executable, args=(executable, echo_text))
@result.addCallback
def check_result(result):
expected_result = (expected_stdout, '', 0)
self.assertEqual(result, expected_result)
return result
<commit_msg>Add more tests to the local process integration test<commit_after>
|
from twisted.trial.unittest import TestCase
from carnifex.localprocess import LocalProcessInductor
from twisted.internet import reactor
class InductorTest(TestCase):
def test_real_run(self):
executable = 'echo'
echo_text = "hello world!"
expected_stdout = echo_text + '\n'
inductor = LocalProcessInductor(reactor)
result = inductor.run(executable, args=(executable, echo_text))
@result.addCallback
def check_result(result):
expected_result = (expected_stdout, '', 0)
self.assertEqual(result, expected_result)
return result
def test_real_run_unknown_command(self):
executable = 'thiscommandshouldnotexist'
inductor = LocalProcessInductor(reactor)
result = inductor.run(executable)
@result.addCallback
def check_result(result):
stdout, stderr, code = result
self.assertEqual(stdout, '')
self.assertNotEqual(stderr, '')
self.assertNotEqual(code, 0)
return result
def test_getExitStatus_false(self):
inductor = LocalProcessInductor(reactor)
result = inductor.getExitStatus('false')
@result.addCallback
def check_result(result):
self.assertNotEqual(result, 0, "The 'false' command should "
"exit with a nonzero code")
return result
def test_getExitStatus_true(self):
inductor = LocalProcessInductor(reactor)
result = inductor.getExitStatus('true')
@result.addCallback
def check_result(result):
self.assertEqual(result, 0, "The 'true' command should "
"exit with code 0")
return result
|
from twisted.trial.unittest import TestCase
from carnifex.localprocess import LocalProcessInductor
from twisted.internet import reactor
class InductorTest(TestCase):
def test_real_run(self):
executable = 'echo'
echo_text = "hello world!"
expected_stdout = echo_text + '\n'
inductor = LocalProcessInductor(reactor)
result = inductor.run(executable, args=(executable, echo_text))
@result.addCallback
def check_result(result):
expected_result = (expected_stdout, '', 0)
self.assertEqual(result, expected_result)
return result
Add more tests to the local process integration testfrom twisted.trial.unittest import TestCase
from carnifex.localprocess import LocalProcessInductor
from twisted.internet import reactor
class InductorTest(TestCase):
def test_real_run(self):
executable = 'echo'
echo_text = "hello world!"
expected_stdout = echo_text + '\n'
inductor = LocalProcessInductor(reactor)
result = inductor.run(executable, args=(executable, echo_text))
@result.addCallback
def check_result(result):
expected_result = (expected_stdout, '', 0)
self.assertEqual(result, expected_result)
return result
def test_real_run_unknown_command(self):
executable = 'thiscommandshouldnotexist'
inductor = LocalProcessInductor(reactor)
result = inductor.run(executable)
@result.addCallback
def check_result(result):
stdout, stderr, code = result
self.assertEqual(stdout, '')
self.assertNotEqual(stderr, '')
self.assertNotEqual(code, 0)
return result
def test_getExitStatus_false(self):
inductor = LocalProcessInductor(reactor)
result = inductor.getExitStatus('false')
@result.addCallback
def check_result(result):
self.assertNotEqual(result, 0, "The 'false' command should "
"exit with a nonzero code")
return result
def test_getExitStatus_true(self):
inductor = LocalProcessInductor(reactor)
result = inductor.getExitStatus('true')
@result.addCallback
def check_result(result):
self.assertEqual(result, 0, "The 'true' command should "
"exit with code 0")
return result
|
<commit_before>from twisted.trial.unittest import TestCase
from carnifex.localprocess import LocalProcessInductor
from twisted.internet import reactor
class InductorTest(TestCase):
def test_real_run(self):
executable = 'echo'
echo_text = "hello world!"
expected_stdout = echo_text + '\n'
inductor = LocalProcessInductor(reactor)
result = inductor.run(executable, args=(executable, echo_text))
@result.addCallback
def check_result(result):
expected_result = (expected_stdout, '', 0)
self.assertEqual(result, expected_result)
return result
<commit_msg>Add more tests to the local process integration test<commit_after>from twisted.trial.unittest import TestCase
from carnifex.localprocess import LocalProcessInductor
from twisted.internet import reactor
class InductorTest(TestCase):
def test_real_run(self):
executable = 'echo'
echo_text = "hello world!"
expected_stdout = echo_text + '\n'
inductor = LocalProcessInductor(reactor)
result = inductor.run(executable, args=(executable, echo_text))
@result.addCallback
def check_result(result):
expected_result = (expected_stdout, '', 0)
self.assertEqual(result, expected_result)
return result
def test_real_run_unknown_command(self):
executable = 'thiscommandshouldnotexist'
inductor = LocalProcessInductor(reactor)
result = inductor.run(executable)
@result.addCallback
def check_result(result):
stdout, stderr, code = result
self.assertEqual(stdout, '')
self.assertNotEqual(stderr, '')
self.assertNotEqual(code, 0)
return result
def test_getExitStatus_false(self):
inductor = LocalProcessInductor(reactor)
result = inductor.getExitStatus('false')
@result.addCallback
def check_result(result):
self.assertNotEqual(result, 0, "The 'false' command should "
"exit with a nonzero code")
return result
def test_getExitStatus_true(self):
inductor = LocalProcessInductor(reactor)
result = inductor.getExitStatus('true')
@result.addCallback
def check_result(result):
self.assertEqual(result, 0, "The 'true' command should "
"exit with code 0")
return result
|
411517f7130b3f40b589682dfb0aef63b0df609a
|
document/api.py
|
document/api.py
|
from rest_framework import serializers, viewsets
from document.models import Document, Kamerstuk, Dossier
class DossierSerializer(serializers.HyperlinkedModelSerializer):
documents = serializers.HyperlinkedRelatedField(read_only=True,
view_name='document-detail',
many=True)
class Meta:
model = Dossier
fields = ('id', 'dossier_id', 'documents')
class DossierViewSet(viewsets.ModelViewSet):
queryset = Dossier.objects.all()
serializer_class = DossierSerializer
class DocumentSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Document
fields = ('id', 'dossier', 'raw_type', 'raw_title', 'publisher', 'date_published', 'document_url')
class DocumentViewSet(viewsets.ModelViewSet):
queryset = Document.objects.all()
serializer_class = DocumentSerializer
class KamerstukSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Kamerstuk
fields = ('id', 'document', 'id_main', 'id_sub', 'type_short', 'type_long')
class KamerstukViewSet(viewsets.ModelViewSet):
queryset = Kamerstuk.objects.all()
serializer_class = KamerstukSerializer
|
from rest_framework import serializers, viewsets
from document.models import Document, Kamerstuk, Dossier
class DossierSerializer(serializers.HyperlinkedModelSerializer):
documents = serializers.HyperlinkedRelatedField(read_only=True,
view_name='document-detail',
many=True)
class Meta:
model = Dossier
fields = ('id', 'dossier_id', 'title', 'documents')
class DossierViewSet(viewsets.ModelViewSet):
queryset = Dossier.objects.all()
serializer_class = DossierSerializer
class DocumentSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Document
fields = ('id', 'dossier', 'raw_type', 'raw_title', 'publisher', 'date_published', 'document_url')
class DocumentViewSet(viewsets.ModelViewSet):
queryset = Document.objects.all()
serializer_class = DocumentSerializer
class KamerstukSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Kamerstuk
fields = ('id', 'document', 'id_main', 'id_sub', 'type_short', 'type_long')
class KamerstukViewSet(viewsets.ModelViewSet):
queryset = Kamerstuk.objects.all()
serializer_class = KamerstukSerializer
|
Add dossier title to API
|
Add dossier title to API
|
Python
|
mit
|
openkamer/openkamer,openkamer/openkamer,openkamer/openkamer,openkamer/openkamer
|
from rest_framework import serializers, viewsets
from document.models import Document, Kamerstuk, Dossier
class DossierSerializer(serializers.HyperlinkedModelSerializer):
documents = serializers.HyperlinkedRelatedField(read_only=True,
view_name='document-detail',
many=True)
class Meta:
model = Dossier
fields = ('id', 'dossier_id', 'documents')
class DossierViewSet(viewsets.ModelViewSet):
queryset = Dossier.objects.all()
serializer_class = DossierSerializer
class DocumentSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Document
fields = ('id', 'dossier', 'raw_type', 'raw_title', 'publisher', 'date_published', 'document_url')
class DocumentViewSet(viewsets.ModelViewSet):
queryset = Document.objects.all()
serializer_class = DocumentSerializer
class KamerstukSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Kamerstuk
fields = ('id', 'document', 'id_main', 'id_sub', 'type_short', 'type_long')
class KamerstukViewSet(viewsets.ModelViewSet):
queryset = Kamerstuk.objects.all()
serializer_class = KamerstukSerializer
Add dossier title to API
|
from rest_framework import serializers, viewsets
from document.models import Document, Kamerstuk, Dossier
class DossierSerializer(serializers.HyperlinkedModelSerializer):
documents = serializers.HyperlinkedRelatedField(read_only=True,
view_name='document-detail',
many=True)
class Meta:
model = Dossier
fields = ('id', 'dossier_id', 'title', 'documents')
class DossierViewSet(viewsets.ModelViewSet):
queryset = Dossier.objects.all()
serializer_class = DossierSerializer
class DocumentSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Document
fields = ('id', 'dossier', 'raw_type', 'raw_title', 'publisher', 'date_published', 'document_url')
class DocumentViewSet(viewsets.ModelViewSet):
queryset = Document.objects.all()
serializer_class = DocumentSerializer
class KamerstukSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Kamerstuk
fields = ('id', 'document', 'id_main', 'id_sub', 'type_short', 'type_long')
class KamerstukViewSet(viewsets.ModelViewSet):
queryset = Kamerstuk.objects.all()
serializer_class = KamerstukSerializer
|
<commit_before>from rest_framework import serializers, viewsets
from document.models import Document, Kamerstuk, Dossier
class DossierSerializer(serializers.HyperlinkedModelSerializer):
documents = serializers.HyperlinkedRelatedField(read_only=True,
view_name='document-detail',
many=True)
class Meta:
model = Dossier
fields = ('id', 'dossier_id', 'documents')
class DossierViewSet(viewsets.ModelViewSet):
queryset = Dossier.objects.all()
serializer_class = DossierSerializer
class DocumentSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Document
fields = ('id', 'dossier', 'raw_type', 'raw_title', 'publisher', 'date_published', 'document_url')
class DocumentViewSet(viewsets.ModelViewSet):
queryset = Document.objects.all()
serializer_class = DocumentSerializer
class KamerstukSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Kamerstuk
fields = ('id', 'document', 'id_main', 'id_sub', 'type_short', 'type_long')
class KamerstukViewSet(viewsets.ModelViewSet):
queryset = Kamerstuk.objects.all()
serializer_class = KamerstukSerializer
<commit_msg>Add dossier title to API<commit_after>
|
from rest_framework import serializers, viewsets
from document.models import Document, Kamerstuk, Dossier
class DossierSerializer(serializers.HyperlinkedModelSerializer):
documents = serializers.HyperlinkedRelatedField(read_only=True,
view_name='document-detail',
many=True)
class Meta:
model = Dossier
fields = ('id', 'dossier_id', 'title', 'documents')
class DossierViewSet(viewsets.ModelViewSet):
queryset = Dossier.objects.all()
serializer_class = DossierSerializer
class DocumentSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Document
fields = ('id', 'dossier', 'raw_type', 'raw_title', 'publisher', 'date_published', 'document_url')
class DocumentViewSet(viewsets.ModelViewSet):
queryset = Document.objects.all()
serializer_class = DocumentSerializer
class KamerstukSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Kamerstuk
fields = ('id', 'document', 'id_main', 'id_sub', 'type_short', 'type_long')
class KamerstukViewSet(viewsets.ModelViewSet):
queryset = Kamerstuk.objects.all()
serializer_class = KamerstukSerializer
|
from rest_framework import serializers, viewsets
from document.models import Document, Kamerstuk, Dossier
class DossierSerializer(serializers.HyperlinkedModelSerializer):
documents = serializers.HyperlinkedRelatedField(read_only=True,
view_name='document-detail',
many=True)
class Meta:
model = Dossier
fields = ('id', 'dossier_id', 'documents')
class DossierViewSet(viewsets.ModelViewSet):
queryset = Dossier.objects.all()
serializer_class = DossierSerializer
class DocumentSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Document
fields = ('id', 'dossier', 'raw_type', 'raw_title', 'publisher', 'date_published', 'document_url')
class DocumentViewSet(viewsets.ModelViewSet):
queryset = Document.objects.all()
serializer_class = DocumentSerializer
class KamerstukSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Kamerstuk
fields = ('id', 'document', 'id_main', 'id_sub', 'type_short', 'type_long')
class KamerstukViewSet(viewsets.ModelViewSet):
queryset = Kamerstuk.objects.all()
serializer_class = KamerstukSerializer
Add dossier title to APIfrom rest_framework import serializers, viewsets
from document.models import Document, Kamerstuk, Dossier
class DossierSerializer(serializers.HyperlinkedModelSerializer):
documents = serializers.HyperlinkedRelatedField(read_only=True,
view_name='document-detail',
many=True)
class Meta:
model = Dossier
fields = ('id', 'dossier_id', 'title', 'documents')
class DossierViewSet(viewsets.ModelViewSet):
queryset = Dossier.objects.all()
serializer_class = DossierSerializer
class DocumentSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Document
fields = ('id', 'dossier', 'raw_type', 'raw_title', 'publisher', 'date_published', 'document_url')
class DocumentViewSet(viewsets.ModelViewSet):
queryset = Document.objects.all()
serializer_class = DocumentSerializer
class KamerstukSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Kamerstuk
fields = ('id', 'document', 'id_main', 'id_sub', 'type_short', 'type_long')
class KamerstukViewSet(viewsets.ModelViewSet):
queryset = Kamerstuk.objects.all()
serializer_class = KamerstukSerializer
|
<commit_before>from rest_framework import serializers, viewsets
from document.models import Document, Kamerstuk, Dossier
class DossierSerializer(serializers.HyperlinkedModelSerializer):
documents = serializers.HyperlinkedRelatedField(read_only=True,
view_name='document-detail',
many=True)
class Meta:
model = Dossier
fields = ('id', 'dossier_id', 'documents')
class DossierViewSet(viewsets.ModelViewSet):
queryset = Dossier.objects.all()
serializer_class = DossierSerializer
class DocumentSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Document
fields = ('id', 'dossier', 'raw_type', 'raw_title', 'publisher', 'date_published', 'document_url')
class DocumentViewSet(viewsets.ModelViewSet):
queryset = Document.objects.all()
serializer_class = DocumentSerializer
class KamerstukSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Kamerstuk
fields = ('id', 'document', 'id_main', 'id_sub', 'type_short', 'type_long')
class KamerstukViewSet(viewsets.ModelViewSet):
queryset = Kamerstuk.objects.all()
serializer_class = KamerstukSerializer
<commit_msg>Add dossier title to API<commit_after>from rest_framework import serializers, viewsets
from document.models import Document, Kamerstuk, Dossier
class DossierSerializer(serializers.HyperlinkedModelSerializer):
documents = serializers.HyperlinkedRelatedField(read_only=True,
view_name='document-detail',
many=True)
class Meta:
model = Dossier
fields = ('id', 'dossier_id', 'title', 'documents')
class DossierViewSet(viewsets.ModelViewSet):
queryset = Dossier.objects.all()
serializer_class = DossierSerializer
class DocumentSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Document
fields = ('id', 'dossier', 'raw_type', 'raw_title', 'publisher', 'date_published', 'document_url')
class DocumentViewSet(viewsets.ModelViewSet):
queryset = Document.objects.all()
serializer_class = DocumentSerializer
class KamerstukSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Kamerstuk
fields = ('id', 'document', 'id_main', 'id_sub', 'type_short', 'type_long')
class KamerstukViewSet(viewsets.ModelViewSet):
queryset = Kamerstuk.objects.all()
serializer_class = KamerstukSerializer
|
446da2ceffb49fe694026c3e8d3c7f24cdcc4215
|
tests.py
|
tests.py
|
"""Test suite for Mann."""
# -*- coding: utf-8 -*-
import unittest
from colour_runner import runner as crunner
# from mypleasure.mann import Mann
class ConsoleTestCase(unittest.TestCase):
"""Test console logger."""
def runTest(self): # noqa
pass
def suite():
"""Compose and return test suite."""
suite = unittest.TestSuite()
suite.addTest(ConsoleTestCase())
return suite
if __name__ == '__main__':
runner = crunner.ColourTextTestRunner()
runner.run(suite())
|
"""Test suite for Mann."""
# -*- coding: utf-8 -*-
import sys
import unittest
from io import StringIO
from colour_runner import runner as crunner
from mypleasure.mann import Mann
class ConsoleTestCase(unittest.TestCase):
"""Test console logger."""
def runTest(self): # noqa
try:
out = StringIO()
sys.stdout = out
logger = Mann(console=True)
logger.log('foo')
output = out.getvalue().strip()
self.assertEqual(output, 'foo')
finally:
sys.stdout = sys.__stdout__
try:
out = StringIO()
sys.stdout = out
logger = Mann()
logger.log('foo')
output = out.getvalue().strip()
self.assertEqual(output, '')
finally:
sys.stdout = sys.__stdout__
def suite():
"""Compose and return test suite."""
suite = unittest.TestSuite()
suite.addTest(ConsoleTestCase())
return suite
if __name__ == '__main__':
runner = crunner.ColourTextTestRunner()
runner.run(suite())
|
Create test for console output.
|
Create test for console output.
|
Python
|
mit
|
mypleasureteam/mann
|
"""Test suite for Mann."""
# -*- coding: utf-8 -*-
import unittest
from colour_runner import runner as crunner
# from mypleasure.mann import Mann
class ConsoleTestCase(unittest.TestCase):
"""Test console logger."""
def runTest(self): # noqa
pass
def suite():
"""Compose and return test suite."""
suite = unittest.TestSuite()
suite.addTest(ConsoleTestCase())
return suite
if __name__ == '__main__':
runner = crunner.ColourTextTestRunner()
runner.run(suite())
Create test for console output.
|
"""Test suite for Mann."""
# -*- coding: utf-8 -*-
import sys
import unittest
from io import StringIO
from colour_runner import runner as crunner
from mypleasure.mann import Mann
class ConsoleTestCase(unittest.TestCase):
"""Test console logger."""
def runTest(self): # noqa
try:
out = StringIO()
sys.stdout = out
logger = Mann(console=True)
logger.log('foo')
output = out.getvalue().strip()
self.assertEqual(output, 'foo')
finally:
sys.stdout = sys.__stdout__
try:
out = StringIO()
sys.stdout = out
logger = Mann()
logger.log('foo')
output = out.getvalue().strip()
self.assertEqual(output, '')
finally:
sys.stdout = sys.__stdout__
def suite():
"""Compose and return test suite."""
suite = unittest.TestSuite()
suite.addTest(ConsoleTestCase())
return suite
if __name__ == '__main__':
runner = crunner.ColourTextTestRunner()
runner.run(suite())
|
<commit_before>"""Test suite for Mann."""
# -*- coding: utf-8 -*-
import unittest
from colour_runner import runner as crunner
# from mypleasure.mann import Mann
class ConsoleTestCase(unittest.TestCase):
"""Test console logger."""
def runTest(self): # noqa
pass
def suite():
"""Compose and return test suite."""
suite = unittest.TestSuite()
suite.addTest(ConsoleTestCase())
return suite
if __name__ == '__main__':
runner = crunner.ColourTextTestRunner()
runner.run(suite())
<commit_msg>Create test for console output.<commit_after>
|
"""Test suite for Mann."""
# -*- coding: utf-8 -*-
import sys
import unittest
from io import StringIO
from colour_runner import runner as crunner
from mypleasure.mann import Mann
class ConsoleTestCase(unittest.TestCase):
"""Test console logger."""
def runTest(self): # noqa
try:
out = StringIO()
sys.stdout = out
logger = Mann(console=True)
logger.log('foo')
output = out.getvalue().strip()
self.assertEqual(output, 'foo')
finally:
sys.stdout = sys.__stdout__
try:
out = StringIO()
sys.stdout = out
logger = Mann()
logger.log('foo')
output = out.getvalue().strip()
self.assertEqual(output, '')
finally:
sys.stdout = sys.__stdout__
def suite():
"""Compose and return test suite."""
suite = unittest.TestSuite()
suite.addTest(ConsoleTestCase())
return suite
if __name__ == '__main__':
runner = crunner.ColourTextTestRunner()
runner.run(suite())
|
"""Test suite for Mann."""
# -*- coding: utf-8 -*-
import unittest
from colour_runner import runner as crunner
# from mypleasure.mann import Mann
class ConsoleTestCase(unittest.TestCase):
"""Test console logger."""
def runTest(self): # noqa
pass
def suite():
"""Compose and return test suite."""
suite = unittest.TestSuite()
suite.addTest(ConsoleTestCase())
return suite
if __name__ == '__main__':
runner = crunner.ColourTextTestRunner()
runner.run(suite())
Create test for console output."""Test suite for Mann."""
# -*- coding: utf-8 -*-
import sys
import unittest
from io import StringIO
from colour_runner import runner as crunner
from mypleasure.mann import Mann
class ConsoleTestCase(unittest.TestCase):
"""Test console logger."""
def runTest(self): # noqa
try:
out = StringIO()
sys.stdout = out
logger = Mann(console=True)
logger.log('foo')
output = out.getvalue().strip()
self.assertEqual(output, 'foo')
finally:
sys.stdout = sys.__stdout__
try:
out = StringIO()
sys.stdout = out
logger = Mann()
logger.log('foo')
output = out.getvalue().strip()
self.assertEqual(output, '')
finally:
sys.stdout = sys.__stdout__
def suite():
"""Compose and return test suite."""
suite = unittest.TestSuite()
suite.addTest(ConsoleTestCase())
return suite
if __name__ == '__main__':
runner = crunner.ColourTextTestRunner()
runner.run(suite())
|
<commit_before>"""Test suite for Mann."""
# -*- coding: utf-8 -*-
import unittest
from colour_runner import runner as crunner
# from mypleasure.mann import Mann
class ConsoleTestCase(unittest.TestCase):
"""Test console logger."""
def runTest(self): # noqa
pass
def suite():
"""Compose and return test suite."""
suite = unittest.TestSuite()
suite.addTest(ConsoleTestCase())
return suite
if __name__ == '__main__':
runner = crunner.ColourTextTestRunner()
runner.run(suite())
<commit_msg>Create test for console output.<commit_after>"""Test suite for Mann."""
# -*- coding: utf-8 -*-
import sys
import unittest
from io import StringIO
from colour_runner import runner as crunner
from mypleasure.mann import Mann
class ConsoleTestCase(unittest.TestCase):
"""Test console logger."""
def runTest(self): # noqa
try:
out = StringIO()
sys.stdout = out
logger = Mann(console=True)
logger.log('foo')
output = out.getvalue().strip()
self.assertEqual(output, 'foo')
finally:
sys.stdout = sys.__stdout__
try:
out = StringIO()
sys.stdout = out
logger = Mann()
logger.log('foo')
output = out.getvalue().strip()
self.assertEqual(output, '')
finally:
sys.stdout = sys.__stdout__
def suite():
"""Compose and return test suite."""
suite = unittest.TestSuite()
suite.addTest(ConsoleTestCase())
return suite
if __name__ == '__main__':
runner = crunner.ColourTextTestRunner()
runner.run(suite())
|
b19951bcf2035c9e755ad731e4f5081cf5f0d46f
|
troposphere/codeguruprofiler.py
|
troposphere/codeguruprofiler.py
|
# Copyright (c) 2020, Mark Peek <mark@peek.org>
# All rights reserved.
#
# See LICENSE file for full license.
from . import AWSObject
class ProfilingGroup(AWSObject):
resource_type = "AWS::CodeGuruProfiler::ProfilingGroup"
props = {
'ProfilingGroupName': (basestring, True),
}
|
# Copyright (c) 2020, Mark Peek <mark@peek.org>
# All rights reserved.
#
# See LICENSE file for full license.
from . import AWSObject
class ProfilingGroup(AWSObject):
resource_type = "AWS::CodeGuruProfiler::ProfilingGroup"
props = {
'AgentPermissions': (dict, False),
'ProfilingGroupName': (basestring, True),
}
|
Update AWS::CodeGuruProfiler::ProfilingGroup per 2020-06-03 changes
|
Update AWS::CodeGuruProfiler::ProfilingGroup per 2020-06-03 changes
|
Python
|
bsd-2-clause
|
cloudtools/troposphere,cloudtools/troposphere
|
# Copyright (c) 2020, Mark Peek <mark@peek.org>
# All rights reserved.
#
# See LICENSE file for full license.
from . import AWSObject
class ProfilingGroup(AWSObject):
resource_type = "AWS::CodeGuruProfiler::ProfilingGroup"
props = {
'ProfilingGroupName': (basestring, True),
}
Update AWS::CodeGuruProfiler::ProfilingGroup per 2020-06-03 changes
|
# Copyright (c) 2020, Mark Peek <mark@peek.org>
# All rights reserved.
#
# See LICENSE file for full license.
from . import AWSObject
class ProfilingGroup(AWSObject):
resource_type = "AWS::CodeGuruProfiler::ProfilingGroup"
props = {
'AgentPermissions': (dict, False),
'ProfilingGroupName': (basestring, True),
}
|
<commit_before># Copyright (c) 2020, Mark Peek <mark@peek.org>
# All rights reserved.
#
# See LICENSE file for full license.
from . import AWSObject
class ProfilingGroup(AWSObject):
resource_type = "AWS::CodeGuruProfiler::ProfilingGroup"
props = {
'ProfilingGroupName': (basestring, True),
}
<commit_msg>Update AWS::CodeGuruProfiler::ProfilingGroup per 2020-06-03 changes<commit_after>
|
# Copyright (c) 2020, Mark Peek <mark@peek.org>
# All rights reserved.
#
# See LICENSE file for full license.
from . import AWSObject
class ProfilingGroup(AWSObject):
resource_type = "AWS::CodeGuruProfiler::ProfilingGroup"
props = {
'AgentPermissions': (dict, False),
'ProfilingGroupName': (basestring, True),
}
|
# Copyright (c) 2020, Mark Peek <mark@peek.org>
# All rights reserved.
#
# See LICENSE file for full license.
from . import AWSObject
class ProfilingGroup(AWSObject):
resource_type = "AWS::CodeGuruProfiler::ProfilingGroup"
props = {
'ProfilingGroupName': (basestring, True),
}
Update AWS::CodeGuruProfiler::ProfilingGroup per 2020-06-03 changes# Copyright (c) 2020, Mark Peek <mark@peek.org>
# All rights reserved.
#
# See LICENSE file for full license.
from . import AWSObject
class ProfilingGroup(AWSObject):
resource_type = "AWS::CodeGuruProfiler::ProfilingGroup"
props = {
'AgentPermissions': (dict, False),
'ProfilingGroupName': (basestring, True),
}
|
<commit_before># Copyright (c) 2020, Mark Peek <mark@peek.org>
# All rights reserved.
#
# See LICENSE file for full license.
from . import AWSObject
class ProfilingGroup(AWSObject):
resource_type = "AWS::CodeGuruProfiler::ProfilingGroup"
props = {
'ProfilingGroupName': (basestring, True),
}
<commit_msg>Update AWS::CodeGuruProfiler::ProfilingGroup per 2020-06-03 changes<commit_after># Copyright (c) 2020, Mark Peek <mark@peek.org>
# All rights reserved.
#
# See LICENSE file for full license.
from . import AWSObject
class ProfilingGroup(AWSObject):
resource_type = "AWS::CodeGuruProfiler::ProfilingGroup"
props = {
'AgentPermissions': (dict, False),
'ProfilingGroupName': (basestring, True),
}
|
22a0968d92ef81e021aeae5ab4fd724cc64a3f8c
|
saleor/site/utils.py
|
saleor/site/utils.py
|
from django.conf import settings
from .models import SiteSetting
def get_site_settings(request):
if not hasattr(request, 'site_settings'):
site_settings_id = getattr(settings, 'SITE_SETTINGS_ID', None)
request.site_settings = get_site_settings_uncached(site_settings_id)
return request.site_settings
def get_site_settings_uncached(site_id=None):
return SiteSetting.objects.get(pk=site_id)
|
from django.conf import settings
from .models import SiteSetting
def get_site_settings(request):
if not hasattr(request, 'site_settings'):
site_settings_id = getattr(settings, 'SITE_SETTINGS_ID', None)
request.site_settings = get_site_settings_uncached(site_settings_id)
return request.site_settings
def get_site_settings_uncached(site_id=None):
return SiteSetting.objects.get(pk=site_id)
def get_setting_value(request, key):
site_settings = get_site_settings(request)
return getattr(site_settings, key, None)
|
Define function for getting setting value by key
|
Define function for getting setting value by key
|
Python
|
bsd-3-clause
|
KenMutemi/saleor,maferelo/saleor,mociepka/saleor,HyperManTT/ECommerceSaleor,maferelo/saleor,jreigel/saleor,KenMutemi/saleor,itbabu/saleor,UITools/saleor,maferelo/saleor,itbabu/saleor,tfroehlich82/saleor,UITools/saleor,jreigel/saleor,car3oon/saleor,tfroehlich82/saleor,tfroehlich82/saleor,jreigel/saleor,mociepka/saleor,UITools/saleor,UITools/saleor,KenMutemi/saleor,itbabu/saleor,HyperManTT/ECommerceSaleor,mociepka/saleor,car3oon/saleor,car3oon/saleor,HyperManTT/ECommerceSaleor,UITools/saleor
|
from django.conf import settings
from .models import SiteSetting
def get_site_settings(request):
if not hasattr(request, 'site_settings'):
site_settings_id = getattr(settings, 'SITE_SETTINGS_ID', None)
request.site_settings = get_site_settings_uncached(site_settings_id)
return request.site_settings
def get_site_settings_uncached(site_id=None):
return SiteSetting.objects.get(pk=site_id)
Define function for getting setting value by key
|
from django.conf import settings
from .models import SiteSetting
def get_site_settings(request):
if not hasattr(request, 'site_settings'):
site_settings_id = getattr(settings, 'SITE_SETTINGS_ID', None)
request.site_settings = get_site_settings_uncached(site_settings_id)
return request.site_settings
def get_site_settings_uncached(site_id=None):
return SiteSetting.objects.get(pk=site_id)
def get_setting_value(request, key):
site_settings = get_site_settings(request)
return getattr(site_settings, key, None)
|
<commit_before>from django.conf import settings
from .models import SiteSetting
def get_site_settings(request):
if not hasattr(request, 'site_settings'):
site_settings_id = getattr(settings, 'SITE_SETTINGS_ID', None)
request.site_settings = get_site_settings_uncached(site_settings_id)
return request.site_settings
def get_site_settings_uncached(site_id=None):
return SiteSetting.objects.get(pk=site_id)
<commit_msg>Define function for getting setting value by key<commit_after>
|
from django.conf import settings
from .models import SiteSetting
def get_site_settings(request):
if not hasattr(request, 'site_settings'):
site_settings_id = getattr(settings, 'SITE_SETTINGS_ID', None)
request.site_settings = get_site_settings_uncached(site_settings_id)
return request.site_settings
def get_site_settings_uncached(site_id=None):
return SiteSetting.objects.get(pk=site_id)
def get_setting_value(request, key):
site_settings = get_site_settings(request)
return getattr(site_settings, key, None)
|
from django.conf import settings
from .models import SiteSetting
def get_site_settings(request):
if not hasattr(request, 'site_settings'):
site_settings_id = getattr(settings, 'SITE_SETTINGS_ID', None)
request.site_settings = get_site_settings_uncached(site_settings_id)
return request.site_settings
def get_site_settings_uncached(site_id=None):
return SiteSetting.objects.get(pk=site_id)
Define function for getting setting value by keyfrom django.conf import settings
from .models import SiteSetting
def get_site_settings(request):
if not hasattr(request, 'site_settings'):
site_settings_id = getattr(settings, 'SITE_SETTINGS_ID', None)
request.site_settings = get_site_settings_uncached(site_settings_id)
return request.site_settings
def get_site_settings_uncached(site_id=None):
return SiteSetting.objects.get(pk=site_id)
def get_setting_value(request, key):
site_settings = get_site_settings(request)
return getattr(site_settings, key, None)
|
<commit_before>from django.conf import settings
from .models import SiteSetting
def get_site_settings(request):
if not hasattr(request, 'site_settings'):
site_settings_id = getattr(settings, 'SITE_SETTINGS_ID', None)
request.site_settings = get_site_settings_uncached(site_settings_id)
return request.site_settings
def get_site_settings_uncached(site_id=None):
return SiteSetting.objects.get(pk=site_id)
<commit_msg>Define function for getting setting value by key<commit_after>from django.conf import settings
from .models import SiteSetting
def get_site_settings(request):
if not hasattr(request, 'site_settings'):
site_settings_id = getattr(settings, 'SITE_SETTINGS_ID', None)
request.site_settings = get_site_settings_uncached(site_settings_id)
return request.site_settings
def get_site_settings_uncached(site_id=None):
return SiteSetting.objects.get(pk=site_id)
def get_setting_value(request, key):
site_settings = get_site_settings(request)
return getattr(site_settings, key, None)
|
7caa677b300340b62f999ed3733e95fb431da9d4
|
views.py
|
views.py
|
from django.shortcuts import render, HttpResponse
from django.shortcuts import HttpResponseRedirect
from django.template import Context, Template
from models import UploadFileForm
from models import extGenOptimizer1
OPTIONS = """
header: {
left: 'prev,next today',
center: 'title',
right: 'month,agendaWeek',
},
defaultView: 'agendaWeek',
editable: true,
eventLimit: true, // allow "more" link when too many events
scrollTime: '08:00:00',
"""
# Create your views here.
def index(request):
indexContext = {}
indexContext['fileReturnError'] = 'false'
events = 'events:[],'
if request.method == 'POST':
form = UploadFileForm(request.POST, request.FILES)
print "form is valid or not = ", form.is_valid()
if form.is_valid():
myOptimizerParser = extGenOptimizer1()
events = myOptimizerParser.run(request.FILES)
# else:
# indexContext['fileReturnError'] = 'true'
else:
form = UploadFileForm()
indexContext['form'] = form
indexContext['calendar_config_options'] = OPTIONS
indexContext['calendar_events'] = events
return render(request, 'EXT_GEN/index.html', indexContext)
|
from django.shortcuts import render, HttpResponse
from django.shortcuts import HttpResponseRedirect
from django.template import Context, Template
from django.middleware.csrf import rotate_token
from models import UploadFileForm
from models import extGenOptimizer1
OPTIONS = """
header: {
left: 'prev,next today',
center: 'title',
right: 'month,agendaWeek',
},
defaultView: 'agendaWeek',
editable: true,
eventLimit: true, // allow "more" link when too many events
scrollTime: '08:00:00',
"""
# Create your views here.
def index(request):
indexContext = {}
indexContext['fileReturnError'] = 'false'
events = 'events:[],'
if request.method == 'POST':
form = UploadFileForm(request.POST, request.FILES)
print "form is valid or not = ", form.is_valid()
if form.is_valid():
myOptimizerParser = extGenOptimizer1()
events = myOptimizerParser.run(request.FILES)
# else:
# indexContext['fileReturnError'] = 'true'
else:
form = UploadFileForm()
indexContext['form'] = form
indexContext['calendar_config_options'] = OPTIONS
indexContext['calendar_events'] = events
rotate_token(request)
return render(request, 'EXT_GEN/index.html', indexContext)
|
Add rotation of CSRF token to prevent form resubmission
|
Add rotation of CSRF token to prevent form resubmission
|
Python
|
mit
|
cameronlai/EXT_GEN,cameronlai/EXT_GEN
|
from django.shortcuts import render, HttpResponse
from django.shortcuts import HttpResponseRedirect
from django.template import Context, Template
from models import UploadFileForm
from models import extGenOptimizer1
OPTIONS = """
header: {
left: 'prev,next today',
center: 'title',
right: 'month,agendaWeek',
},
defaultView: 'agendaWeek',
editable: true,
eventLimit: true, // allow "more" link when too many events
scrollTime: '08:00:00',
"""
# Create your views here.
def index(request):
indexContext = {}
indexContext['fileReturnError'] = 'false'
events = 'events:[],'
if request.method == 'POST':
form = UploadFileForm(request.POST, request.FILES)
print "form is valid or not = ", form.is_valid()
if form.is_valid():
myOptimizerParser = extGenOptimizer1()
events = myOptimizerParser.run(request.FILES)
# else:
# indexContext['fileReturnError'] = 'true'
else:
form = UploadFileForm()
indexContext['form'] = form
indexContext['calendar_config_options'] = OPTIONS
indexContext['calendar_events'] = events
return render(request, 'EXT_GEN/index.html', indexContext)
Add rotation of CSRF token to prevent form resubmission
|
from django.shortcuts import render, HttpResponse
from django.shortcuts import HttpResponseRedirect
from django.template import Context, Template
from django.middleware.csrf import rotate_token
from models import UploadFileForm
from models import extGenOptimizer1
OPTIONS = """
header: {
left: 'prev,next today',
center: 'title',
right: 'month,agendaWeek',
},
defaultView: 'agendaWeek',
editable: true,
eventLimit: true, // allow "more" link when too many events
scrollTime: '08:00:00',
"""
# Create your views here.
def index(request):
indexContext = {}
indexContext['fileReturnError'] = 'false'
events = 'events:[],'
if request.method == 'POST':
form = UploadFileForm(request.POST, request.FILES)
print "form is valid or not = ", form.is_valid()
if form.is_valid():
myOptimizerParser = extGenOptimizer1()
events = myOptimizerParser.run(request.FILES)
# else:
# indexContext['fileReturnError'] = 'true'
else:
form = UploadFileForm()
indexContext['form'] = form
indexContext['calendar_config_options'] = OPTIONS
indexContext['calendar_events'] = events
rotate_token(request)
return render(request, 'EXT_GEN/index.html', indexContext)
|
<commit_before>from django.shortcuts import render, HttpResponse
from django.shortcuts import HttpResponseRedirect
from django.template import Context, Template
from models import UploadFileForm
from models import extGenOptimizer1
OPTIONS = """
header: {
left: 'prev,next today',
center: 'title',
right: 'month,agendaWeek',
},
defaultView: 'agendaWeek',
editable: true,
eventLimit: true, // allow "more" link when too many events
scrollTime: '08:00:00',
"""
# Create your views here.
def index(request):
indexContext = {}
indexContext['fileReturnError'] = 'false'
events = 'events:[],'
if request.method == 'POST':
form = UploadFileForm(request.POST, request.FILES)
print "form is valid or not = ", form.is_valid()
if form.is_valid():
myOptimizerParser = extGenOptimizer1()
events = myOptimizerParser.run(request.FILES)
# else:
# indexContext['fileReturnError'] = 'true'
else:
form = UploadFileForm()
indexContext['form'] = form
indexContext['calendar_config_options'] = OPTIONS
indexContext['calendar_events'] = events
return render(request, 'EXT_GEN/index.html', indexContext)
<commit_msg>Add rotation of CSRF token to prevent form resubmission<commit_after>
|
from django.shortcuts import render, HttpResponse
from django.shortcuts import HttpResponseRedirect
from django.template import Context, Template
from django.middleware.csrf import rotate_token
from models import UploadFileForm
from models import extGenOptimizer1
OPTIONS = """
header: {
left: 'prev,next today',
center: 'title',
right: 'month,agendaWeek',
},
defaultView: 'agendaWeek',
editable: true,
eventLimit: true, // allow "more" link when too many events
scrollTime: '08:00:00',
"""
# Create your views here.
def index(request):
indexContext = {}
indexContext['fileReturnError'] = 'false'
events = 'events:[],'
if request.method == 'POST':
form = UploadFileForm(request.POST, request.FILES)
print "form is valid or not = ", form.is_valid()
if form.is_valid():
myOptimizerParser = extGenOptimizer1()
events = myOptimizerParser.run(request.FILES)
# else:
# indexContext['fileReturnError'] = 'true'
else:
form = UploadFileForm()
indexContext['form'] = form
indexContext['calendar_config_options'] = OPTIONS
indexContext['calendar_events'] = events
rotate_token(request)
return render(request, 'EXT_GEN/index.html', indexContext)
|
from django.shortcuts import render, HttpResponse
from django.shortcuts import HttpResponseRedirect
from django.template import Context, Template
from models import UploadFileForm
from models import extGenOptimizer1
OPTIONS = """
header: {
left: 'prev,next today',
center: 'title',
right: 'month,agendaWeek',
},
defaultView: 'agendaWeek',
editable: true,
eventLimit: true, // allow "more" link when too many events
scrollTime: '08:00:00',
"""
# Create your views here.
def index(request):
indexContext = {}
indexContext['fileReturnError'] = 'false'
events = 'events:[],'
if request.method == 'POST':
form = UploadFileForm(request.POST, request.FILES)
print "form is valid or not = ", form.is_valid()
if form.is_valid():
myOptimizerParser = extGenOptimizer1()
events = myOptimizerParser.run(request.FILES)
# else:
# indexContext['fileReturnError'] = 'true'
else:
form = UploadFileForm()
indexContext['form'] = form
indexContext['calendar_config_options'] = OPTIONS
indexContext['calendar_events'] = events
return render(request, 'EXT_GEN/index.html', indexContext)
Add rotation of CSRF token to prevent form resubmissionfrom django.shortcuts import render, HttpResponse
from django.shortcuts import HttpResponseRedirect
from django.template import Context, Template
from django.middleware.csrf import rotate_token
from models import UploadFileForm
from models import extGenOptimizer1
OPTIONS = """
header: {
left: 'prev,next today',
center: 'title',
right: 'month,agendaWeek',
},
defaultView: 'agendaWeek',
editable: true,
eventLimit: true, // allow "more" link when too many events
scrollTime: '08:00:00',
"""
# Create your views here.
def index(request):
indexContext = {}
indexContext['fileReturnError'] = 'false'
events = 'events:[],'
if request.method == 'POST':
form = UploadFileForm(request.POST, request.FILES)
print "form is valid or not = ", form.is_valid()
if form.is_valid():
myOptimizerParser = extGenOptimizer1()
events = myOptimizerParser.run(request.FILES)
# else:
# indexContext['fileReturnError'] = 'true'
else:
form = UploadFileForm()
indexContext['form'] = form
indexContext['calendar_config_options'] = OPTIONS
indexContext['calendar_events'] = events
rotate_token(request)
return render(request, 'EXT_GEN/index.html', indexContext)
|
<commit_before>from django.shortcuts import render, HttpResponse
from django.shortcuts import HttpResponseRedirect
from django.template import Context, Template
from models import UploadFileForm
from models import extGenOptimizer1
OPTIONS = """
header: {
left: 'prev,next today',
center: 'title',
right: 'month,agendaWeek',
},
defaultView: 'agendaWeek',
editable: true,
eventLimit: true, // allow "more" link when too many events
scrollTime: '08:00:00',
"""
# Create your views here.
def index(request):
indexContext = {}
indexContext['fileReturnError'] = 'false'
events = 'events:[],'
if request.method == 'POST':
form = UploadFileForm(request.POST, request.FILES)
print "form is valid or not = ", form.is_valid()
if form.is_valid():
myOptimizerParser = extGenOptimizer1()
events = myOptimizerParser.run(request.FILES)
# else:
# indexContext['fileReturnError'] = 'true'
else:
form = UploadFileForm()
indexContext['form'] = form
indexContext['calendar_config_options'] = OPTIONS
indexContext['calendar_events'] = events
return render(request, 'EXT_GEN/index.html', indexContext)
<commit_msg>Add rotation of CSRF token to prevent form resubmission<commit_after>from django.shortcuts import render, HttpResponse
from django.shortcuts import HttpResponseRedirect
from django.template import Context, Template
from django.middleware.csrf import rotate_token
from models import UploadFileForm
from models import extGenOptimizer1
OPTIONS = """
header: {
left: 'prev,next today',
center: 'title',
right: 'month,agendaWeek',
},
defaultView: 'agendaWeek',
editable: true,
eventLimit: true, // allow "more" link when too many events
scrollTime: '08:00:00',
"""
# Create your views here.
def index(request):
indexContext = {}
indexContext['fileReturnError'] = 'false'
events = 'events:[],'
if request.method == 'POST':
form = UploadFileForm(request.POST, request.FILES)
print "form is valid or not = ", form.is_valid()
if form.is_valid():
myOptimizerParser = extGenOptimizer1()
events = myOptimizerParser.run(request.FILES)
# else:
# indexContext['fileReturnError'] = 'true'
else:
form = UploadFileForm()
indexContext['form'] = form
indexContext['calendar_config_options'] = OPTIONS
indexContext['calendar_events'] = events
rotate_token(request)
return render(request, 'EXT_GEN/index.html', indexContext)
|
8a821cb62a35547417fcd56d02486e5cc2d8494f
|
xzarr.py
|
xzarr.py
|
from .base import DataSourceMixin
class ZarrSource(DataSourceMixin):
"""Open a xarray dataset.
Parameters
----------
urlpath: str
Path to source. This can be a local directory or a remote data
service (i.e., with a protocol specifier like ``'s3://``).
storage_options: dict
Parameters passed to the backend file-system
kwargs:
Further parameters are passed to xr.open_zarr
"""
name = 'zarr'
def __init__(self, urlpath, storage_options=None, metadata=None, **kwargs):
super(ZarrSource, self).__init__(metadata=metadata)
self.urlpath = urlpath
self.storage_options = storage_options
self.kwargs = kwargs
self._ds = None
def _open_dataset(self):
import xarray as xr
from dask.bytes.core import get_fs, infer_options, \
update_storage_options
urlpath, protocol, options = infer_options(self.urlpath)
update_storage_options(options, self.storage_options)
self._fs, _ = get_fs(protocol, options)
if protocol != 'file':
self._mapper = get_mapper(protocol, self._fs, urlpath)
self._ds = xr.open_zarr(self._mapper, **self.kwargs)
else:
self._ds = xr.open_zarr(self.urlpath, **self.kwargs)
def close(self):
super(ZarrSource, self).close()
self._fs = None
self._mapper = None
def get_mapper(protocol, fs, path):
if protocol == 's3':
from s3fs.mapping import S3Map
return S3Map(path, fs)
elif protocol == 'gcs':
from gcsfs.mapping import GCSMap
return GCSMap(path, fs)
else:
raise NotImplementedError
|
from .base import DataSourceMixin
class ZarrSource(DataSourceMixin):
"""Open a xarray dataset.
Parameters
----------
urlpath: str
Path to source. This can be a local directory or a remote data
service (i.e., with a protocol specifier like ``'s3://``).
storage_options: dict
Parameters passed to the backend file-system
kwargs:
Further parameters are passed to xr.open_zarr
"""
name = 'zarr'
def __init__(self, urlpath, storage_options=None, metadata=None, **kwargs):
super(ZarrSource, self).__init__(metadata=metadata)
self.urlpath = urlpath
self.storage_options = storage_options or {}
self.kwargs = kwargs
self._ds = None
def _open_dataset(self):
import xarray as xr
from fsspec import get_mapper
self._mapper = get_mapper(self.urlpath, **self.storage_options)
self._ds = xr.open_zarr(self._mapper, **self.kwargs)
def close(self):
super(ZarrSource, self).close()
self._fs = None
self._mapper = None
|
Make work with any filesystem
|
Make work with any filesystem
|
Python
|
bsd-3-clause
|
ericdill/databroker,ericdill/databroker
|
from .base import DataSourceMixin
class ZarrSource(DataSourceMixin):
"""Open a xarray dataset.
Parameters
----------
urlpath: str
Path to source. This can be a local directory or a remote data
service (i.e., with a protocol specifier like ``'s3://``).
storage_options: dict
Parameters passed to the backend file-system
kwargs:
Further parameters are passed to xr.open_zarr
"""
name = 'zarr'
def __init__(self, urlpath, storage_options=None, metadata=None, **kwargs):
super(ZarrSource, self).__init__(metadata=metadata)
self.urlpath = urlpath
self.storage_options = storage_options
self.kwargs = kwargs
self._ds = None
def _open_dataset(self):
import xarray as xr
from dask.bytes.core import get_fs, infer_options, \
update_storage_options
urlpath, protocol, options = infer_options(self.urlpath)
update_storage_options(options, self.storage_options)
self._fs, _ = get_fs(protocol, options)
if protocol != 'file':
self._mapper = get_mapper(protocol, self._fs, urlpath)
self._ds = xr.open_zarr(self._mapper, **self.kwargs)
else:
self._ds = xr.open_zarr(self.urlpath, **self.kwargs)
def close(self):
super(ZarrSource, self).close()
self._fs = None
self._mapper = None
def get_mapper(protocol, fs, path):
if protocol == 's3':
from s3fs.mapping import S3Map
return S3Map(path, fs)
elif protocol == 'gcs':
from gcsfs.mapping import GCSMap
return GCSMap(path, fs)
else:
raise NotImplementedError
Make work with any filesystem
|
from .base import DataSourceMixin
class ZarrSource(DataSourceMixin):
"""Open a xarray dataset.
Parameters
----------
urlpath: str
Path to source. This can be a local directory or a remote data
service (i.e., with a protocol specifier like ``'s3://``).
storage_options: dict
Parameters passed to the backend file-system
kwargs:
Further parameters are passed to xr.open_zarr
"""
name = 'zarr'
def __init__(self, urlpath, storage_options=None, metadata=None, **kwargs):
super(ZarrSource, self).__init__(metadata=metadata)
self.urlpath = urlpath
self.storage_options = storage_options or {}
self.kwargs = kwargs
self._ds = None
def _open_dataset(self):
import xarray as xr
from fsspec import get_mapper
self._mapper = get_mapper(self.urlpath, **self.storage_options)
self._ds = xr.open_zarr(self._mapper, **self.kwargs)
def close(self):
super(ZarrSource, self).close()
self._fs = None
self._mapper = None
|
<commit_before>from .base import DataSourceMixin
class ZarrSource(DataSourceMixin):
"""Open a xarray dataset.
Parameters
----------
urlpath: str
Path to source. This can be a local directory or a remote data
service (i.e., with a protocol specifier like ``'s3://``).
storage_options: dict
Parameters passed to the backend file-system
kwargs:
Further parameters are passed to xr.open_zarr
"""
name = 'zarr'
def __init__(self, urlpath, storage_options=None, metadata=None, **kwargs):
super(ZarrSource, self).__init__(metadata=metadata)
self.urlpath = urlpath
self.storage_options = storage_options
self.kwargs = kwargs
self._ds = None
def _open_dataset(self):
import xarray as xr
from dask.bytes.core import get_fs, infer_options, \
update_storage_options
urlpath, protocol, options = infer_options(self.urlpath)
update_storage_options(options, self.storage_options)
self._fs, _ = get_fs(protocol, options)
if protocol != 'file':
self._mapper = get_mapper(protocol, self._fs, urlpath)
self._ds = xr.open_zarr(self._mapper, **self.kwargs)
else:
self._ds = xr.open_zarr(self.urlpath, **self.kwargs)
def close(self):
super(ZarrSource, self).close()
self._fs = None
self._mapper = None
def get_mapper(protocol, fs, path):
if protocol == 's3':
from s3fs.mapping import S3Map
return S3Map(path, fs)
elif protocol == 'gcs':
from gcsfs.mapping import GCSMap
return GCSMap(path, fs)
else:
raise NotImplementedError
<commit_msg>Make work with any filesystem<commit_after>
|
from .base import DataSourceMixin
class ZarrSource(DataSourceMixin):
"""Open a xarray dataset.
Parameters
----------
urlpath: str
Path to source. This can be a local directory or a remote data
service (i.e., with a protocol specifier like ``'s3://``).
storage_options: dict
Parameters passed to the backend file-system
kwargs:
Further parameters are passed to xr.open_zarr
"""
name = 'zarr'
def __init__(self, urlpath, storage_options=None, metadata=None, **kwargs):
super(ZarrSource, self).__init__(metadata=metadata)
self.urlpath = urlpath
self.storage_options = storage_options or {}
self.kwargs = kwargs
self._ds = None
def _open_dataset(self):
import xarray as xr
from fsspec import get_mapper
self._mapper = get_mapper(self.urlpath, **self.storage_options)
self._ds = xr.open_zarr(self._mapper, **self.kwargs)
def close(self):
super(ZarrSource, self).close()
self._fs = None
self._mapper = None
|
from .base import DataSourceMixin
class ZarrSource(DataSourceMixin):
"""Open a xarray dataset.
Parameters
----------
urlpath: str
Path to source. This can be a local directory or a remote data
service (i.e., with a protocol specifier like ``'s3://``).
storage_options: dict
Parameters passed to the backend file-system
kwargs:
Further parameters are passed to xr.open_zarr
"""
name = 'zarr'
def __init__(self, urlpath, storage_options=None, metadata=None, **kwargs):
super(ZarrSource, self).__init__(metadata=metadata)
self.urlpath = urlpath
self.storage_options = storage_options
self.kwargs = kwargs
self._ds = None
def _open_dataset(self):
import xarray as xr
from dask.bytes.core import get_fs, infer_options, \
update_storage_options
urlpath, protocol, options = infer_options(self.urlpath)
update_storage_options(options, self.storage_options)
self._fs, _ = get_fs(protocol, options)
if protocol != 'file':
self._mapper = get_mapper(protocol, self._fs, urlpath)
self._ds = xr.open_zarr(self._mapper, **self.kwargs)
else:
self._ds = xr.open_zarr(self.urlpath, **self.kwargs)
def close(self):
super(ZarrSource, self).close()
self._fs = None
self._mapper = None
def get_mapper(protocol, fs, path):
if protocol == 's3':
from s3fs.mapping import S3Map
return S3Map(path, fs)
elif protocol == 'gcs':
from gcsfs.mapping import GCSMap
return GCSMap(path, fs)
else:
raise NotImplementedError
Make work with any filesystemfrom .base import DataSourceMixin
class ZarrSource(DataSourceMixin):
"""Open a xarray dataset.
Parameters
----------
urlpath: str
Path to source. This can be a local directory or a remote data
service (i.e., with a protocol specifier like ``'s3://``).
storage_options: dict
Parameters passed to the backend file-system
kwargs:
Further parameters are passed to xr.open_zarr
"""
name = 'zarr'
def __init__(self, urlpath, storage_options=None, metadata=None, **kwargs):
super(ZarrSource, self).__init__(metadata=metadata)
self.urlpath = urlpath
self.storage_options = storage_options or {}
self.kwargs = kwargs
self._ds = None
def _open_dataset(self):
import xarray as xr
from fsspec import get_mapper
self._mapper = get_mapper(self.urlpath, **self.storage_options)
self._ds = xr.open_zarr(self._mapper, **self.kwargs)
def close(self):
super(ZarrSource, self).close()
self._fs = None
self._mapper = None
|
<commit_before>from .base import DataSourceMixin
class ZarrSource(DataSourceMixin):
"""Open a xarray dataset.
Parameters
----------
urlpath: str
Path to source. This can be a local directory or a remote data
service (i.e., with a protocol specifier like ``'s3://``).
storage_options: dict
Parameters passed to the backend file-system
kwargs:
Further parameters are passed to xr.open_zarr
"""
name = 'zarr'
def __init__(self, urlpath, storage_options=None, metadata=None, **kwargs):
super(ZarrSource, self).__init__(metadata=metadata)
self.urlpath = urlpath
self.storage_options = storage_options
self.kwargs = kwargs
self._ds = None
def _open_dataset(self):
import xarray as xr
from dask.bytes.core import get_fs, infer_options, \
update_storage_options
urlpath, protocol, options = infer_options(self.urlpath)
update_storage_options(options, self.storage_options)
self._fs, _ = get_fs(protocol, options)
if protocol != 'file':
self._mapper = get_mapper(protocol, self._fs, urlpath)
self._ds = xr.open_zarr(self._mapper, **self.kwargs)
else:
self._ds = xr.open_zarr(self.urlpath, **self.kwargs)
def close(self):
super(ZarrSource, self).close()
self._fs = None
self._mapper = None
def get_mapper(protocol, fs, path):
if protocol == 's3':
from s3fs.mapping import S3Map
return S3Map(path, fs)
elif protocol == 'gcs':
from gcsfs.mapping import GCSMap
return GCSMap(path, fs)
else:
raise NotImplementedError
<commit_msg>Make work with any filesystem<commit_after>from .base import DataSourceMixin
class ZarrSource(DataSourceMixin):
"""Open a xarray dataset.
Parameters
----------
urlpath: str
Path to source. This can be a local directory or a remote data
service (i.e., with a protocol specifier like ``'s3://``).
storage_options: dict
Parameters passed to the backend file-system
kwargs:
Further parameters are passed to xr.open_zarr
"""
name = 'zarr'
def __init__(self, urlpath, storage_options=None, metadata=None, **kwargs):
super(ZarrSource, self).__init__(metadata=metadata)
self.urlpath = urlpath
self.storage_options = storage_options or {}
self.kwargs = kwargs
self._ds = None
def _open_dataset(self):
import xarray as xr
from fsspec import get_mapper
self._mapper = get_mapper(self.urlpath, **self.storage_options)
self._ds = xr.open_zarr(self._mapper, **self.kwargs)
def close(self):
super(ZarrSource, self).close()
self._fs = None
self._mapper = None
|
f04e32cf6731e8900fa85b1814d9a68da1bcaa9d
|
vimeo/auth/authorization_code.py
|
vimeo/auth/authorization_code.py
|
#! /usr/bin/env python
# encoding: utf-8
from __future__ import absolute_import
import urllib
from .base import AuthenticationMixinBase
from . import GrantFailed
try:
basestring
except NameError:
basestring = str
class AuthorizationCodeMixin(AuthenticationMixinBase):
"""Implement helpers for the Authorization Code grant for OAuth2."""
def auth_url(self, scope, redirect):
"""Get the url to direct a user to authenticate."""
url = self.API_ROOT + "/oauth/authorize?"
query = {
"response_type": "code",
"client_id": self.app_info[0]
}
if scope:
if not isinstance(scope, basestring):
scope = ' '.join(scope)
query['scope'] = scope
if redirect:
query['redirect_uri'] = redirect
return url + urllib.urlencode(query)
def exchange_code(self, code, redirect):
"""Perform the exchange step for the code from the redirected user."""
code, headers, resp = self.call_grant('/oauth/access_token',
{
"grant_type": "authorization_code",
"code": code,
"redirect_uri": redirect
})
if not code == 200:
raise GrantFailed()
self.token = resp['access_token']
return self.token, resp['user'], resp['scope']
|
#! /usr/bin/env python
# encoding: utf-8
from __future__ import absolute_import
from .base import AuthenticationMixinBase
from . import GrantFailed
# We need to get urlencode from urllib.parse in Python 3, but fall back to
# urllib in Python 2
try:
from urllib.parse import urlencode
except ImportError:
from urllib import urlencode
try:
basestring
except NameError:
basestring = str
class AuthorizationCodeMixin(AuthenticationMixinBase):
"""Implement helpers for the Authorization Code grant for OAuth2."""
def auth_url(self, scope, redirect):
"""Get the url to direct a user to authenticate."""
url = self.API_ROOT + "/oauth/authorize?"
query = {
"response_type": "code",
"client_id": self.app_info[0]
}
if scope:
if not isinstance(scope, basestring):
scope = ' '.join(scope)
query['scope'] = scope
if redirect:
query['redirect_uri'] = redirect
return url + urlencode(query)
def exchange_code(self, code, redirect):
"""Perform the exchange step for the code from the redirected user."""
code, headers, resp = self.call_grant('/oauth/access_token',
{
"grant_type": "authorization_code",
"code": code,
"redirect_uri": redirect
})
if not code == 200:
raise GrantFailed()
self.token = resp['access_token']
return self.token, resp['user'], resp['scope']
|
Make urlencode load properly in python 3.
|
Make urlencode load properly in python 3.
|
Python
|
apache-2.0
|
blorenz/vimeo.py,vimeo/vimeo.py,gabrielgisoldo/vimeo.py,greedo/vimeo.py
|
#! /usr/bin/env python
# encoding: utf-8
from __future__ import absolute_import
import urllib
from .base import AuthenticationMixinBase
from . import GrantFailed
try:
basestring
except NameError:
basestring = str
class AuthorizationCodeMixin(AuthenticationMixinBase):
"""Implement helpers for the Authorization Code grant for OAuth2."""
def auth_url(self, scope, redirect):
"""Get the url to direct a user to authenticate."""
url = self.API_ROOT + "/oauth/authorize?"
query = {
"response_type": "code",
"client_id": self.app_info[0]
}
if scope:
if not isinstance(scope, basestring):
scope = ' '.join(scope)
query['scope'] = scope
if redirect:
query['redirect_uri'] = redirect
return url + urllib.urlencode(query)
def exchange_code(self, code, redirect):
"""Perform the exchange step for the code from the redirected user."""
code, headers, resp = self.call_grant('/oauth/access_token',
{
"grant_type": "authorization_code",
"code": code,
"redirect_uri": redirect
})
if not code == 200:
raise GrantFailed()
self.token = resp['access_token']
return self.token, resp['user'], resp['scope']
Make urlencode load properly in python 3.
|
#! /usr/bin/env python
# encoding: utf-8
from __future__ import absolute_import
from .base import AuthenticationMixinBase
from . import GrantFailed
# We need to get urlencode from urllib.parse in Python 3, but fall back to
# urllib in Python 2
try:
from urllib.parse import urlencode
except ImportError:
from urllib import urlencode
try:
basestring
except NameError:
basestring = str
class AuthorizationCodeMixin(AuthenticationMixinBase):
"""Implement helpers for the Authorization Code grant for OAuth2."""
def auth_url(self, scope, redirect):
"""Get the url to direct a user to authenticate."""
url = self.API_ROOT + "/oauth/authorize?"
query = {
"response_type": "code",
"client_id": self.app_info[0]
}
if scope:
if not isinstance(scope, basestring):
scope = ' '.join(scope)
query['scope'] = scope
if redirect:
query['redirect_uri'] = redirect
return url + urlencode(query)
def exchange_code(self, code, redirect):
"""Perform the exchange step for the code from the redirected user."""
code, headers, resp = self.call_grant('/oauth/access_token',
{
"grant_type": "authorization_code",
"code": code,
"redirect_uri": redirect
})
if not code == 200:
raise GrantFailed()
self.token = resp['access_token']
return self.token, resp['user'], resp['scope']
|
<commit_before>#! /usr/bin/env python
# encoding: utf-8
from __future__ import absolute_import
import urllib
from .base import AuthenticationMixinBase
from . import GrantFailed
try:
basestring
except NameError:
basestring = str
class AuthorizationCodeMixin(AuthenticationMixinBase):
"""Implement helpers for the Authorization Code grant for OAuth2."""
def auth_url(self, scope, redirect):
"""Get the url to direct a user to authenticate."""
url = self.API_ROOT + "/oauth/authorize?"
query = {
"response_type": "code",
"client_id": self.app_info[0]
}
if scope:
if not isinstance(scope, basestring):
scope = ' '.join(scope)
query['scope'] = scope
if redirect:
query['redirect_uri'] = redirect
return url + urllib.urlencode(query)
def exchange_code(self, code, redirect):
"""Perform the exchange step for the code from the redirected user."""
code, headers, resp = self.call_grant('/oauth/access_token',
{
"grant_type": "authorization_code",
"code": code,
"redirect_uri": redirect
})
if not code == 200:
raise GrantFailed()
self.token = resp['access_token']
return self.token, resp['user'], resp['scope']
<commit_msg>Make urlencode load properly in python 3.<commit_after>
|
#! /usr/bin/env python
# encoding: utf-8
from __future__ import absolute_import
from .base import AuthenticationMixinBase
from . import GrantFailed
# We need to get urlencode from urllib.parse in Python 3, but fall back to
# urllib in Python 2
try:
from urllib.parse import urlencode
except ImportError:
from urllib import urlencode
try:
basestring
except NameError:
basestring = str
class AuthorizationCodeMixin(AuthenticationMixinBase):
"""Implement helpers for the Authorization Code grant for OAuth2."""
def auth_url(self, scope, redirect):
"""Get the url to direct a user to authenticate."""
url = self.API_ROOT + "/oauth/authorize?"
query = {
"response_type": "code",
"client_id": self.app_info[0]
}
if scope:
if not isinstance(scope, basestring):
scope = ' '.join(scope)
query['scope'] = scope
if redirect:
query['redirect_uri'] = redirect
return url + urlencode(query)
def exchange_code(self, code, redirect):
"""Perform the exchange step for the code from the redirected user."""
code, headers, resp = self.call_grant('/oauth/access_token',
{
"grant_type": "authorization_code",
"code": code,
"redirect_uri": redirect
})
if not code == 200:
raise GrantFailed()
self.token = resp['access_token']
return self.token, resp['user'], resp['scope']
|
#! /usr/bin/env python
# encoding: utf-8
from __future__ import absolute_import
import urllib
from .base import AuthenticationMixinBase
from . import GrantFailed
try:
basestring
except NameError:
basestring = str
class AuthorizationCodeMixin(AuthenticationMixinBase):
"""Implement helpers for the Authorization Code grant for OAuth2."""
def auth_url(self, scope, redirect):
"""Get the url to direct a user to authenticate."""
url = self.API_ROOT + "/oauth/authorize?"
query = {
"response_type": "code",
"client_id": self.app_info[0]
}
if scope:
if not isinstance(scope, basestring):
scope = ' '.join(scope)
query['scope'] = scope
if redirect:
query['redirect_uri'] = redirect
return url + urllib.urlencode(query)
def exchange_code(self, code, redirect):
"""Perform the exchange step for the code from the redirected user."""
code, headers, resp = self.call_grant('/oauth/access_token',
{
"grant_type": "authorization_code",
"code": code,
"redirect_uri": redirect
})
if not code == 200:
raise GrantFailed()
self.token = resp['access_token']
return self.token, resp['user'], resp['scope']
Make urlencode load properly in python 3.#! /usr/bin/env python
# encoding: utf-8
from __future__ import absolute_import
from .base import AuthenticationMixinBase
from . import GrantFailed
# We need to get urlencode from urllib.parse in Python 3, but fall back to
# urllib in Python 2
try:
from urllib.parse import urlencode
except ImportError:
from urllib import urlencode
try:
basestring
except NameError:
basestring = str
class AuthorizationCodeMixin(AuthenticationMixinBase):
"""Implement helpers for the Authorization Code grant for OAuth2."""
def auth_url(self, scope, redirect):
"""Get the url to direct a user to authenticate."""
url = self.API_ROOT + "/oauth/authorize?"
query = {
"response_type": "code",
"client_id": self.app_info[0]
}
if scope:
if not isinstance(scope, basestring):
scope = ' '.join(scope)
query['scope'] = scope
if redirect:
query['redirect_uri'] = redirect
return url + urlencode(query)
def exchange_code(self, code, redirect):
"""Perform the exchange step for the code from the redirected user."""
code, headers, resp = self.call_grant('/oauth/access_token',
{
"grant_type": "authorization_code",
"code": code,
"redirect_uri": redirect
})
if not code == 200:
raise GrantFailed()
self.token = resp['access_token']
return self.token, resp['user'], resp['scope']
|
<commit_before>#! /usr/bin/env python
# encoding: utf-8
from __future__ import absolute_import
import urllib
from .base import AuthenticationMixinBase
from . import GrantFailed
try:
basestring
except NameError:
basestring = str
class AuthorizationCodeMixin(AuthenticationMixinBase):
"""Implement helpers for the Authorization Code grant for OAuth2."""
def auth_url(self, scope, redirect):
"""Get the url to direct a user to authenticate."""
url = self.API_ROOT + "/oauth/authorize?"
query = {
"response_type": "code",
"client_id": self.app_info[0]
}
if scope:
if not isinstance(scope, basestring):
scope = ' '.join(scope)
query['scope'] = scope
if redirect:
query['redirect_uri'] = redirect
return url + urllib.urlencode(query)
def exchange_code(self, code, redirect):
"""Perform the exchange step for the code from the redirected user."""
code, headers, resp = self.call_grant('/oauth/access_token',
{
"grant_type": "authorization_code",
"code": code,
"redirect_uri": redirect
})
if not code == 200:
raise GrantFailed()
self.token = resp['access_token']
return self.token, resp['user'], resp['scope']
<commit_msg>Make urlencode load properly in python 3.<commit_after>#! /usr/bin/env python
# encoding: utf-8
from __future__ import absolute_import
from .base import AuthenticationMixinBase
from . import GrantFailed
# We need to get urlencode from urllib.parse in Python 3, but fall back to
# urllib in Python 2
try:
from urllib.parse import urlencode
except ImportError:
from urllib import urlencode
try:
basestring
except NameError:
basestring = str
class AuthorizationCodeMixin(AuthenticationMixinBase):
"""Implement helpers for the Authorization Code grant for OAuth2."""
def auth_url(self, scope, redirect):
"""Get the url to direct a user to authenticate."""
url = self.API_ROOT + "/oauth/authorize?"
query = {
"response_type": "code",
"client_id": self.app_info[0]
}
if scope:
if not isinstance(scope, basestring):
scope = ' '.join(scope)
query['scope'] = scope
if redirect:
query['redirect_uri'] = redirect
return url + urlencode(query)
def exchange_code(self, code, redirect):
"""Perform the exchange step for the code from the redirected user."""
code, headers, resp = self.call_grant('/oauth/access_token',
{
"grant_type": "authorization_code",
"code": code,
"redirect_uri": redirect
})
if not code == 200:
raise GrantFailed()
self.token = resp['access_token']
return self.token, resp['user'], resp['scope']
|
483e04671095eedabc8972982dd2109a5329c603
|
tests/test_templatetags.py
|
tests/test_templatetags.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_templatetags
-------------------
Tests for `columns.templatetags` module.
"""
import unittest
from columns.templatetags.columns import rows, columns
class TestColumns(unittest.TestCase):
def test_columns(self):
data = range(7)
result = rows(data, 2)
expected = [[0, 1, 2, 3], [4, 5, 6]]
self.assertEqual(result, expected)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_templatetags
-------------------
Tests for `columns.templatetags` module.
"""
import unittest
from columns.templatetags.columns import columns
class TestColumns(unittest.TestCase):
def test_columns(self):
data = range(7)
result = columns(data, 2)
expected = [[0, 1, 2, 3], [4, 5, 6]]
self.assertEqual(result, expected)
|
Fix tests to match updated defs.
|
Fix tests to match updated defs.
|
Python
|
bsd-3-clause
|
audreyr/django-columns,audreyr/django-columns,audreyr/django-columns
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_templatetags
-------------------
Tests for `columns.templatetags` module.
"""
import unittest
from columns.templatetags.columns import rows, columns
class TestColumns(unittest.TestCase):
def test_columns(self):
data = range(7)
result = rows(data, 2)
expected = [[0, 1, 2, 3], [4, 5, 6]]
self.assertEqual(result, expected)
Fix tests to match updated defs.
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_templatetags
-------------------
Tests for `columns.templatetags` module.
"""
import unittest
from columns.templatetags.columns import columns
class TestColumns(unittest.TestCase):
def test_columns(self):
data = range(7)
result = columns(data, 2)
expected = [[0, 1, 2, 3], [4, 5, 6]]
self.assertEqual(result, expected)
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_templatetags
-------------------
Tests for `columns.templatetags` module.
"""
import unittest
from columns.templatetags.columns import rows, columns
class TestColumns(unittest.TestCase):
def test_columns(self):
data = range(7)
result = rows(data, 2)
expected = [[0, 1, 2, 3], [4, 5, 6]]
self.assertEqual(result, expected)
<commit_msg>Fix tests to match updated defs.<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_templatetags
-------------------
Tests for `columns.templatetags` module.
"""
import unittest
from columns.templatetags.columns import columns
class TestColumns(unittest.TestCase):
def test_columns(self):
data = range(7)
result = columns(data, 2)
expected = [[0, 1, 2, 3], [4, 5, 6]]
self.assertEqual(result, expected)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_templatetags
-------------------
Tests for `columns.templatetags` module.
"""
import unittest
from columns.templatetags.columns import rows, columns
class TestColumns(unittest.TestCase):
def test_columns(self):
data = range(7)
result = rows(data, 2)
expected = [[0, 1, 2, 3], [4, 5, 6]]
self.assertEqual(result, expected)
Fix tests to match updated defs.#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_templatetags
-------------------
Tests for `columns.templatetags` module.
"""
import unittest
from columns.templatetags.columns import columns
class TestColumns(unittest.TestCase):
def test_columns(self):
data = range(7)
result = columns(data, 2)
expected = [[0, 1, 2, 3], [4, 5, 6]]
self.assertEqual(result, expected)
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_templatetags
-------------------
Tests for `columns.templatetags` module.
"""
import unittest
from columns.templatetags.columns import rows, columns
class TestColumns(unittest.TestCase):
def test_columns(self):
data = range(7)
result = rows(data, 2)
expected = [[0, 1, 2, 3], [4, 5, 6]]
self.assertEqual(result, expected)
<commit_msg>Fix tests to match updated defs.<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_templatetags
-------------------
Tests for `columns.templatetags` module.
"""
import unittest
from columns.templatetags.columns import columns
class TestColumns(unittest.TestCase):
def test_columns(self):
data = range(7)
result = columns(data, 2)
expected = [[0, 1, 2, 3], [4, 5, 6]]
self.assertEqual(result, expected)
|
272d4bab431cd2b4e2010f3a7cd5b1c236bdacb4
|
Export.py
|
Export.py
|
import sqlite3
def main():
conn = sqlite3.connect("database")
cursor = conn.cursor()
# I claim this gives the current score. Another formulation is
# select trackid, score, max(scoreid) from scores group by trackid;
# cursor.execute("""select trackid, score from scores
# group by trackid order by scoreid""")
cursor.execute("""select scores.trackid, score, path from scores, tracks
where scores.trackid = tracks.trackid
group by scores.trackid order by scoreid""")
results = cursor.fetchall()
for result in results:
print str(result[1]) + "\t" + result[2]
if __name__ == '__main__':
main()
|
import sqlite3
def main():
conn = sqlite3.connect("database")
cursor = conn.cursor()
# I claim this gives the current score. Another formulation is
# select trackid, score, max(scoreid) from scores group by trackid;
# cursor.execute("""select trackid, score from scores
# group by trackid order by scoreid""")
# cursor.execute("""select scores.trackid, score, path from scores, tracks
# where scores.trackid = tracks.trackid
# group by scores.trackid order by scoreid""")
cursor.execute("""select score, path from tracks
where score is not null""")
results = cursor.fetchall()
for result in results:
print str(result[0]) + "\t" + result[1]
if __name__ == '__main__':
main()
|
Use new column for score.
|
Use new column for score.
|
Python
|
bsd-3-clause
|
erbridge/NQr,erbridge/NQr,erbridge/NQr
|
import sqlite3
def main():
conn = sqlite3.connect("database")
cursor = conn.cursor()
# I claim this gives the current score. Another formulation is
# select trackid, score, max(scoreid) from scores group by trackid;
# cursor.execute("""select trackid, score from scores
# group by trackid order by scoreid""")
cursor.execute("""select scores.trackid, score, path from scores, tracks
where scores.trackid = tracks.trackid
group by scores.trackid order by scoreid""")
results = cursor.fetchall()
for result in results:
print str(result[1]) + "\t" + result[2]
if __name__ == '__main__':
main()
Use new column for score.
|
import sqlite3
def main():
conn = sqlite3.connect("database")
cursor = conn.cursor()
# I claim this gives the current score. Another formulation is
# select trackid, score, max(scoreid) from scores group by trackid;
# cursor.execute("""select trackid, score from scores
# group by trackid order by scoreid""")
# cursor.execute("""select scores.trackid, score, path from scores, tracks
# where scores.trackid = tracks.trackid
# group by scores.trackid order by scoreid""")
cursor.execute("""select score, path from tracks
where score is not null""")
results = cursor.fetchall()
for result in results:
print str(result[0]) + "\t" + result[1]
if __name__ == '__main__':
main()
|
<commit_before>import sqlite3
def main():
conn = sqlite3.connect("database")
cursor = conn.cursor()
# I claim this gives the current score. Another formulation is
# select trackid, score, max(scoreid) from scores group by trackid;
# cursor.execute("""select trackid, score from scores
# group by trackid order by scoreid""")
cursor.execute("""select scores.trackid, score, path from scores, tracks
where scores.trackid = tracks.trackid
group by scores.trackid order by scoreid""")
results = cursor.fetchall()
for result in results:
print str(result[1]) + "\t" + result[2]
if __name__ == '__main__':
main()
<commit_msg>Use new column for score.<commit_after>
|
import sqlite3
def main():
conn = sqlite3.connect("database")
cursor = conn.cursor()
# I claim this gives the current score. Another formulation is
# select trackid, score, max(scoreid) from scores group by trackid;
# cursor.execute("""select trackid, score from scores
# group by trackid order by scoreid""")
# cursor.execute("""select scores.trackid, score, path from scores, tracks
# where scores.trackid = tracks.trackid
# group by scores.trackid order by scoreid""")
cursor.execute("""select score, path from tracks
where score is not null""")
results = cursor.fetchall()
for result in results:
print str(result[0]) + "\t" + result[1]
if __name__ == '__main__':
main()
|
import sqlite3
def main():
conn = sqlite3.connect("database")
cursor = conn.cursor()
# I claim this gives the current score. Another formulation is
# select trackid, score, max(scoreid) from scores group by trackid;
# cursor.execute("""select trackid, score from scores
# group by trackid order by scoreid""")
cursor.execute("""select scores.trackid, score, path from scores, tracks
where scores.trackid = tracks.trackid
group by scores.trackid order by scoreid""")
results = cursor.fetchall()
for result in results:
print str(result[1]) + "\t" + result[2]
if __name__ == '__main__':
main()
Use new column for score.import sqlite3
def main():
conn = sqlite3.connect("database")
cursor = conn.cursor()
# I claim this gives the current score. Another formulation is
# select trackid, score, max(scoreid) from scores group by trackid;
# cursor.execute("""select trackid, score from scores
# group by trackid order by scoreid""")
# cursor.execute("""select scores.trackid, score, path from scores, tracks
# where scores.trackid = tracks.trackid
# group by scores.trackid order by scoreid""")
cursor.execute("""select score, path from tracks
where score is not null""")
results = cursor.fetchall()
for result in results:
print str(result[0]) + "\t" + result[1]
if __name__ == '__main__':
main()
|
<commit_before>import sqlite3
def main():
conn = sqlite3.connect("database")
cursor = conn.cursor()
# I claim this gives the current score. Another formulation is
# select trackid, score, max(scoreid) from scores group by trackid;
# cursor.execute("""select trackid, score from scores
# group by trackid order by scoreid""")
cursor.execute("""select scores.trackid, score, path from scores, tracks
where scores.trackid = tracks.trackid
group by scores.trackid order by scoreid""")
results = cursor.fetchall()
for result in results:
print str(result[1]) + "\t" + result[2]
if __name__ == '__main__':
main()
<commit_msg>Use new column for score.<commit_after>import sqlite3
def main():
conn = sqlite3.connect("database")
cursor = conn.cursor()
# I claim this gives the current score. Another formulation is
# select trackid, score, max(scoreid) from scores group by trackid;
# cursor.execute("""select trackid, score from scores
# group by trackid order by scoreid""")
# cursor.execute("""select scores.trackid, score, path from scores, tracks
# where scores.trackid = tracks.trackid
# group by scores.trackid order by scoreid""")
cursor.execute("""select score, path from tracks
where score is not null""")
results = cursor.fetchall()
for result in results:
print str(result[0]) + "\t" + result[1]
if __name__ == '__main__':
main()
|
005d74dcb1f1f3e576af71e7cb3fb1e1d6d4df08
|
scripts/lib/paths.py
|
scripts/lib/paths.py
|
details_source = './source/details/'
xml_source = './source/raw_xml/'
term_dest = './courses/terms/'
course_dest = './source/courses/'
info_path = './courses/info.json'
mappings_path = './related-data/generated/'
handmade_path = './related-data/handmade/'
def find_details_subdir(clbid):
str_clbid = str(clbid).zfill(10)
n_thousand = int(int(clbid) / 1000)
thousands_subdir = (n_thousand * 1000)
return str(thousands_subdir).zfill(5) + '/' + str_clbid
def make_course_path(clbid):
clbid = str(clbid).zfill(10)
course_path = course_dest + find_details_subdir(clbid) + '.json'
return course_path
def make_html_path(clbid):
clbid = str(clbid).zfill(10)
return details_source + find_details_subdir(clbid) + '.html'
def make_xml_term_path(term):
return xml_source + str(term) + '.xml'
|
details_source = './source/details/'
xml_source = './source/raw_xml/'
term_dest = './courses/terms/'
course_dest = './source/courses/'
info_path = './courses/info.json'
mappings_path = './related-data/generated/'
handmade_path = './related-data/handmade/'
def find_details_subdir(clbid):
str_clbid = str(clbid).zfill(10)
n_thousand = int(int(clbid) / 1000)
thousands_subdir = (n_thousand * 1000)
return str(thousands_subdir).zfill(5) + '/' + str_clbid
def make_course_path(clbid):
clbid = str(clbid).zfill(10)
return course_dest + find_details_subdir(clbid) + '.json'
def make_html_path(clbid):
clbid = str(clbid).zfill(10)
return details_source + find_details_subdir(clbid) + '.html'
def make_xml_term_path(term):
return xml_source + str(term) + '.xml'
|
Remove the allocation of a variable in make_course_path
|
Remove the allocation of a variable in make_course_path
|
Python
|
mit
|
StoDevX/course-data-tools,StoDevX/course-data-tools
|
details_source = './source/details/'
xml_source = './source/raw_xml/'
term_dest = './courses/terms/'
course_dest = './source/courses/'
info_path = './courses/info.json'
mappings_path = './related-data/generated/'
handmade_path = './related-data/handmade/'
def find_details_subdir(clbid):
str_clbid = str(clbid).zfill(10)
n_thousand = int(int(clbid) / 1000)
thousands_subdir = (n_thousand * 1000)
return str(thousands_subdir).zfill(5) + '/' + str_clbid
def make_course_path(clbid):
clbid = str(clbid).zfill(10)
course_path = course_dest + find_details_subdir(clbid) + '.json'
return course_path
def make_html_path(clbid):
clbid = str(clbid).zfill(10)
return details_source + find_details_subdir(clbid) + '.html'
def make_xml_term_path(term):
return xml_source + str(term) + '.xml'
Remove the allocation of a variable in make_course_path
|
details_source = './source/details/'
xml_source = './source/raw_xml/'
term_dest = './courses/terms/'
course_dest = './source/courses/'
info_path = './courses/info.json'
mappings_path = './related-data/generated/'
handmade_path = './related-data/handmade/'
def find_details_subdir(clbid):
str_clbid = str(clbid).zfill(10)
n_thousand = int(int(clbid) / 1000)
thousands_subdir = (n_thousand * 1000)
return str(thousands_subdir).zfill(5) + '/' + str_clbid
def make_course_path(clbid):
clbid = str(clbid).zfill(10)
return course_dest + find_details_subdir(clbid) + '.json'
def make_html_path(clbid):
clbid = str(clbid).zfill(10)
return details_source + find_details_subdir(clbid) + '.html'
def make_xml_term_path(term):
return xml_source + str(term) + '.xml'
|
<commit_before>details_source = './source/details/'
xml_source = './source/raw_xml/'
term_dest = './courses/terms/'
course_dest = './source/courses/'
info_path = './courses/info.json'
mappings_path = './related-data/generated/'
handmade_path = './related-data/handmade/'
def find_details_subdir(clbid):
str_clbid = str(clbid).zfill(10)
n_thousand = int(int(clbid) / 1000)
thousands_subdir = (n_thousand * 1000)
return str(thousands_subdir).zfill(5) + '/' + str_clbid
def make_course_path(clbid):
clbid = str(clbid).zfill(10)
course_path = course_dest + find_details_subdir(clbid) + '.json'
return course_path
def make_html_path(clbid):
clbid = str(clbid).zfill(10)
return details_source + find_details_subdir(clbid) + '.html'
def make_xml_term_path(term):
return xml_source + str(term) + '.xml'
<commit_msg>Remove the allocation of a variable in make_course_path<commit_after>
|
details_source = './source/details/'
xml_source = './source/raw_xml/'
term_dest = './courses/terms/'
course_dest = './source/courses/'
info_path = './courses/info.json'
mappings_path = './related-data/generated/'
handmade_path = './related-data/handmade/'
def find_details_subdir(clbid):
str_clbid = str(clbid).zfill(10)
n_thousand = int(int(clbid) / 1000)
thousands_subdir = (n_thousand * 1000)
return str(thousands_subdir).zfill(5) + '/' + str_clbid
def make_course_path(clbid):
clbid = str(clbid).zfill(10)
return course_dest + find_details_subdir(clbid) + '.json'
def make_html_path(clbid):
clbid = str(clbid).zfill(10)
return details_source + find_details_subdir(clbid) + '.html'
def make_xml_term_path(term):
return xml_source + str(term) + '.xml'
|
details_source = './source/details/'
xml_source = './source/raw_xml/'
term_dest = './courses/terms/'
course_dest = './source/courses/'
info_path = './courses/info.json'
mappings_path = './related-data/generated/'
handmade_path = './related-data/handmade/'
def find_details_subdir(clbid):
str_clbid = str(clbid).zfill(10)
n_thousand = int(int(clbid) / 1000)
thousands_subdir = (n_thousand * 1000)
return str(thousands_subdir).zfill(5) + '/' + str_clbid
def make_course_path(clbid):
clbid = str(clbid).zfill(10)
course_path = course_dest + find_details_subdir(clbid) + '.json'
return course_path
def make_html_path(clbid):
clbid = str(clbid).zfill(10)
return details_source + find_details_subdir(clbid) + '.html'
def make_xml_term_path(term):
return xml_source + str(term) + '.xml'
Remove the allocation of a variable in make_course_pathdetails_source = './source/details/'
xml_source = './source/raw_xml/'
term_dest = './courses/terms/'
course_dest = './source/courses/'
info_path = './courses/info.json'
mappings_path = './related-data/generated/'
handmade_path = './related-data/handmade/'
def find_details_subdir(clbid):
str_clbid = str(clbid).zfill(10)
n_thousand = int(int(clbid) / 1000)
thousands_subdir = (n_thousand * 1000)
return str(thousands_subdir).zfill(5) + '/' + str_clbid
def make_course_path(clbid):
clbid = str(clbid).zfill(10)
return course_dest + find_details_subdir(clbid) + '.json'
def make_html_path(clbid):
clbid = str(clbid).zfill(10)
return details_source + find_details_subdir(clbid) + '.html'
def make_xml_term_path(term):
return xml_source + str(term) + '.xml'
|
<commit_before>details_source = './source/details/'
xml_source = './source/raw_xml/'
term_dest = './courses/terms/'
course_dest = './source/courses/'
info_path = './courses/info.json'
mappings_path = './related-data/generated/'
handmade_path = './related-data/handmade/'
def find_details_subdir(clbid):
str_clbid = str(clbid).zfill(10)
n_thousand = int(int(clbid) / 1000)
thousands_subdir = (n_thousand * 1000)
return str(thousands_subdir).zfill(5) + '/' + str_clbid
def make_course_path(clbid):
clbid = str(clbid).zfill(10)
course_path = course_dest + find_details_subdir(clbid) + '.json'
return course_path
def make_html_path(clbid):
clbid = str(clbid).zfill(10)
return details_source + find_details_subdir(clbid) + '.html'
def make_xml_term_path(term):
return xml_source + str(term) + '.xml'
<commit_msg>Remove the allocation of a variable in make_course_path<commit_after>details_source = './source/details/'
xml_source = './source/raw_xml/'
term_dest = './courses/terms/'
course_dest = './source/courses/'
info_path = './courses/info.json'
mappings_path = './related-data/generated/'
handmade_path = './related-data/handmade/'
def find_details_subdir(clbid):
str_clbid = str(clbid).zfill(10)
n_thousand = int(int(clbid) / 1000)
thousands_subdir = (n_thousand * 1000)
return str(thousands_subdir).zfill(5) + '/' + str_clbid
def make_course_path(clbid):
clbid = str(clbid).zfill(10)
return course_dest + find_details_subdir(clbid) + '.json'
def make_html_path(clbid):
clbid = str(clbid).zfill(10)
return details_source + find_details_subdir(clbid) + '.html'
def make_xml_term_path(term):
return xml_source + str(term) + '.xml'
|
e30b9cfd55b91424de62e5ac9fcdb0464a78f37e
|
testtube/tests/__init__.py
|
testtube/tests/__init__.py
|
import sys
if sys.version_info[:2] < (2, 7):
import unittest2 as unittest
else:
import unittest
|
import sys
if sys.version_info[:2] < (2, 7):
import unittest2 as unittest # NOQA
else:
import unittest # NOQA
if sys.version_info < (3,):
from mock import Mock, patch # NOQA
else:
from unittest.mock import Mock, patch # NOQA
# Frosted doesn't yet support noqa flags, so this hides the imported/unused
# complaints
Mock, patch, unittest
|
Make import mock.Mock or unittest.mock.Mock easier
|
Make import mock.Mock or unittest.mock.Mock easier
|
Python
|
mit
|
thomasw/testtube,beck/testtube,blaix/testtube
|
import sys
if sys.version_info[:2] < (2, 7):
import unittest2 as unittest
else:
import unittest
Make import mock.Mock or unittest.mock.Mock easier
|
import sys
if sys.version_info[:2] < (2, 7):
import unittest2 as unittest # NOQA
else:
import unittest # NOQA
if sys.version_info < (3,):
from mock import Mock, patch # NOQA
else:
from unittest.mock import Mock, patch # NOQA
# Frosted doesn't yet support noqa flags, so this hides the imported/unused
# complaints
Mock, patch, unittest
|
<commit_before>import sys
if sys.version_info[:2] < (2, 7):
import unittest2 as unittest
else:
import unittest
<commit_msg>Make import mock.Mock or unittest.mock.Mock easier<commit_after>
|
import sys
if sys.version_info[:2] < (2, 7):
import unittest2 as unittest # NOQA
else:
import unittest # NOQA
if sys.version_info < (3,):
from mock import Mock, patch # NOQA
else:
from unittest.mock import Mock, patch # NOQA
# Frosted doesn't yet support noqa flags, so this hides the imported/unused
# complaints
Mock, patch, unittest
|
import sys
if sys.version_info[:2] < (2, 7):
import unittest2 as unittest
else:
import unittest
Make import mock.Mock or unittest.mock.Mock easierimport sys
if sys.version_info[:2] < (2, 7):
import unittest2 as unittest # NOQA
else:
import unittest # NOQA
if sys.version_info < (3,):
from mock import Mock, patch # NOQA
else:
from unittest.mock import Mock, patch # NOQA
# Frosted doesn't yet support noqa flags, so this hides the imported/unused
# complaints
Mock, patch, unittest
|
<commit_before>import sys
if sys.version_info[:2] < (2, 7):
import unittest2 as unittest
else:
import unittest
<commit_msg>Make import mock.Mock or unittest.mock.Mock easier<commit_after>import sys
if sys.version_info[:2] < (2, 7):
import unittest2 as unittest # NOQA
else:
import unittest # NOQA
if sys.version_info < (3,):
from mock import Mock, patch # NOQA
else:
from unittest.mock import Mock, patch # NOQA
# Frosted doesn't yet support noqa flags, so this hides the imported/unused
# complaints
Mock, patch, unittest
|
e9edc74a28442c2f519d4a3c40253f7844c9ca2f
|
thecut/authorship/forms.py
|
thecut/authorship/forms.py
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
class AuthorshipFormMixin(object):
"""Set the ``created_by`` and ``updated_by`` fields on a model.
This form requires that a property, ``self.user`` be set to an instance of
:py:class`~django.contrib.auth.models.User` before the ``save()`` method is
called.
"""
def save(self, *args, **kwargs):
self.instance.updated_by = self.user
if not self.instance.pk:
self.instance.created_by = self.user
return super(AuthorshipFormMixin, self).save(*args, **kwargs)
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
class AuthorshipFormMixin(object):
"""Set the ``created_by`` and ``updated_by`` fields on a model.
Requires that a ``User`` instance be passed in to the constructor. Views
that inherit from ``AuthorshipViewMixin`` automatically pass this in.
"""
def __init__(self, user, *args, **kwargs):
self.user = user
super(AuthorshipFormMixin, self).__init__(*args, **kwargs)
def save(self, *args, **kwargs):
self.instance.updated_by = self.user
if not self.instance.pk:
self.instance.created_by = self.user
return super(AuthorshipFormMixin, self).save(*args, **kwargs)
|
Set the `self.user` property on the `AuthorshipFormMixin`.
|
Set the `self.user` property on the `AuthorshipFormMixin`.
|
Python
|
apache-2.0
|
thecut/thecut-authorship
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
class AuthorshipFormMixin(object):
"""Set the ``created_by`` and ``updated_by`` fields on a model.
This form requires that a property, ``self.user`` be set to an instance of
:py:class`~django.contrib.auth.models.User` before the ``save()`` method is
called.
"""
def save(self, *args, **kwargs):
self.instance.updated_by = self.user
if not self.instance.pk:
self.instance.created_by = self.user
return super(AuthorshipFormMixin, self).save(*args, **kwargs)
Set the `self.user` property on the `AuthorshipFormMixin`.
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
class AuthorshipFormMixin(object):
"""Set the ``created_by`` and ``updated_by`` fields on a model.
Requires that a ``User`` instance be passed in to the constructor. Views
that inherit from ``AuthorshipViewMixin`` automatically pass this in.
"""
def __init__(self, user, *args, **kwargs):
self.user = user
super(AuthorshipFormMixin, self).__init__(*args, **kwargs)
def save(self, *args, **kwargs):
self.instance.updated_by = self.user
if not self.instance.pk:
self.instance.created_by = self.user
return super(AuthorshipFormMixin, self).save(*args, **kwargs)
|
<commit_before># -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
class AuthorshipFormMixin(object):
"""Set the ``created_by`` and ``updated_by`` fields on a model.
This form requires that a property, ``self.user`` be set to an instance of
:py:class`~django.contrib.auth.models.User` before the ``save()`` method is
called.
"""
def save(self, *args, **kwargs):
self.instance.updated_by = self.user
if not self.instance.pk:
self.instance.created_by = self.user
return super(AuthorshipFormMixin, self).save(*args, **kwargs)
<commit_msg>Set the `self.user` property on the `AuthorshipFormMixin`.<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
class AuthorshipFormMixin(object):
"""Set the ``created_by`` and ``updated_by`` fields on a model.
Requires that a ``User`` instance be passed in to the constructor. Views
that inherit from ``AuthorshipViewMixin`` automatically pass this in.
"""
def __init__(self, user, *args, **kwargs):
self.user = user
super(AuthorshipFormMixin, self).__init__(*args, **kwargs)
def save(self, *args, **kwargs):
self.instance.updated_by = self.user
if not self.instance.pk:
self.instance.created_by = self.user
return super(AuthorshipFormMixin, self).save(*args, **kwargs)
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
class AuthorshipFormMixin(object):
"""Set the ``created_by`` and ``updated_by`` fields on a model.
This form requires that a property, ``self.user`` be set to an instance of
:py:class`~django.contrib.auth.models.User` before the ``save()`` method is
called.
"""
def save(self, *args, **kwargs):
self.instance.updated_by = self.user
if not self.instance.pk:
self.instance.created_by = self.user
return super(AuthorshipFormMixin, self).save(*args, **kwargs)
Set the `self.user` property on the `AuthorshipFormMixin`.# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
class AuthorshipFormMixin(object):
"""Set the ``created_by`` and ``updated_by`` fields on a model.
Requires that a ``User`` instance be passed in to the constructor. Views
that inherit from ``AuthorshipViewMixin`` automatically pass this in.
"""
def __init__(self, user, *args, **kwargs):
self.user = user
super(AuthorshipFormMixin, self).__init__(*args, **kwargs)
def save(self, *args, **kwargs):
self.instance.updated_by = self.user
if not self.instance.pk:
self.instance.created_by = self.user
return super(AuthorshipFormMixin, self).save(*args, **kwargs)
|
<commit_before># -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
class AuthorshipFormMixin(object):
"""Set the ``created_by`` and ``updated_by`` fields on a model.
This form requires that a property, ``self.user`` be set to an instance of
:py:class`~django.contrib.auth.models.User` before the ``save()`` method is
called.
"""
def save(self, *args, **kwargs):
self.instance.updated_by = self.user
if not self.instance.pk:
self.instance.created_by = self.user
return super(AuthorshipFormMixin, self).save(*args, **kwargs)
<commit_msg>Set the `self.user` property on the `AuthorshipFormMixin`.<commit_after># -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
class AuthorshipFormMixin(object):
"""Set the ``created_by`` and ``updated_by`` fields on a model.
Requires that a ``User`` instance be passed in to the constructor. Views
that inherit from ``AuthorshipViewMixin`` automatically pass this in.
"""
def __init__(self, user, *args, **kwargs):
self.user = user
super(AuthorshipFormMixin, self).__init__(*args, **kwargs)
def save(self, *args, **kwargs):
self.instance.updated_by = self.user
if not self.instance.pk:
self.instance.created_by = self.user
return super(AuthorshipFormMixin, self).save(*args, **kwargs)
|
013c359fff199e20b2359d3b59526ca7d9ad2932
|
velvet/test/test_utils.py
|
velvet/test/test_utils.py
|
#!/usr/bin/env python
#------------------------------------------------------------------------
# Copyright (c) 2015 SGW
#
# Distributed under the terms of the New BSD License.
#
# The full License is in the file LICENSE
#------------------------------------------------------------------------
import unittest
import numpy as np
import velvet as vt
class TestUtilFunctions(unittest.TestCase):
def test_isodd(self):
x = np.array([1,2,1,1,-3,-4,7,8,9,10,-2,1,-3,5,6,7,-10])
y = vt.isodd(x)
yCorrect = np.array([1,0,1,1,1,0,1,0,1,0,0,1,1,1,0,1,0])
for ind in xrange(len(y)):
self.assertEqual(y[ind],yCorrect[ind])
def mysuite():
return unittest.TestLoader().loadTestsFromTestCase(TestUtilFunctions)
if __name__ == '__main__':
suite = mysuite()
unittest.TextTestRunner(verbosity=2).run(suite)
|
#!/usr/bin/env python
#------------------------------------------------------------------------
# Copyright (c) 2015 SGW
#
# Distributed under the terms of the New BSD License.
#
# The full License is in the file LICENSE
#------------------------------------------------------------------------
import unittest
import numpy as np
import velvet as vt
class TestUtilFunctions(unittest.TestCase):
def test_isodd(self):
x = np.array([1,2,1,1,-3,-4,7,8,9,10,-2,1,-3,5,6,7,-10])
y = vt.isodd(x)
yCorrect = np.array([1,0,1,1,1,0,1,0,1,0,0,1,1,1,0,1,0])
for ind in np.arange(len(y)):
self.assertEqual(y[ind],yCorrect[ind])
def mysuite():
return unittest.TestLoader().loadTestsFromTestCase(TestUtilFunctions)
if __name__ == '__main__':
suite = mysuite()
unittest.TextTestRunner(verbosity=2).run(suite)
|
Fix bug in unit test
|
Fix bug in unit test
|
Python
|
bsd-3-clause
|
sgwoodjr/velvet
|
#!/usr/bin/env python
#------------------------------------------------------------------------
# Copyright (c) 2015 SGW
#
# Distributed under the terms of the New BSD License.
#
# The full License is in the file LICENSE
#------------------------------------------------------------------------
import unittest
import numpy as np
import velvet as vt
class TestUtilFunctions(unittest.TestCase):
def test_isodd(self):
x = np.array([1,2,1,1,-3,-4,7,8,9,10,-2,1,-3,5,6,7,-10])
y = vt.isodd(x)
yCorrect = np.array([1,0,1,1,1,0,1,0,1,0,0,1,1,1,0,1,0])
for ind in xrange(len(y)):
self.assertEqual(y[ind],yCorrect[ind])
def mysuite():
return unittest.TestLoader().loadTestsFromTestCase(TestUtilFunctions)
if __name__ == '__main__':
suite = mysuite()
unittest.TextTestRunner(verbosity=2).run(suite)
Fix bug in unit test
|
#!/usr/bin/env python
#------------------------------------------------------------------------
# Copyright (c) 2015 SGW
#
# Distributed under the terms of the New BSD License.
#
# The full License is in the file LICENSE
#------------------------------------------------------------------------
import unittest
import numpy as np
import velvet as vt
class TestUtilFunctions(unittest.TestCase):
def test_isodd(self):
x = np.array([1,2,1,1,-3,-4,7,8,9,10,-2,1,-3,5,6,7,-10])
y = vt.isodd(x)
yCorrect = np.array([1,0,1,1,1,0,1,0,1,0,0,1,1,1,0,1,0])
for ind in np.arange(len(y)):
self.assertEqual(y[ind],yCorrect[ind])
def mysuite():
return unittest.TestLoader().loadTestsFromTestCase(TestUtilFunctions)
if __name__ == '__main__':
suite = mysuite()
unittest.TextTestRunner(verbosity=2).run(suite)
|
<commit_before>#!/usr/bin/env python
#------------------------------------------------------------------------
# Copyright (c) 2015 SGW
#
# Distributed under the terms of the New BSD License.
#
# The full License is in the file LICENSE
#------------------------------------------------------------------------
import unittest
import numpy as np
import velvet as vt
class TestUtilFunctions(unittest.TestCase):
def test_isodd(self):
x = np.array([1,2,1,1,-3,-4,7,8,9,10,-2,1,-3,5,6,7,-10])
y = vt.isodd(x)
yCorrect = np.array([1,0,1,1,1,0,1,0,1,0,0,1,1,1,0,1,0])
for ind in xrange(len(y)):
self.assertEqual(y[ind],yCorrect[ind])
def mysuite():
return unittest.TestLoader().loadTestsFromTestCase(TestUtilFunctions)
if __name__ == '__main__':
suite = mysuite()
unittest.TextTestRunner(verbosity=2).run(suite)
<commit_msg>Fix bug in unit test<commit_after>
|
#!/usr/bin/env python
#------------------------------------------------------------------------
# Copyright (c) 2015 SGW
#
# Distributed under the terms of the New BSD License.
#
# The full License is in the file LICENSE
#------------------------------------------------------------------------
import unittest
import numpy as np
import velvet as vt
class TestUtilFunctions(unittest.TestCase):
def test_isodd(self):
x = np.array([1,2,1,1,-3,-4,7,8,9,10,-2,1,-3,5,6,7,-10])
y = vt.isodd(x)
yCorrect = np.array([1,0,1,1,1,0,1,0,1,0,0,1,1,1,0,1,0])
for ind in np.arange(len(y)):
self.assertEqual(y[ind],yCorrect[ind])
def mysuite():
return unittest.TestLoader().loadTestsFromTestCase(TestUtilFunctions)
if __name__ == '__main__':
suite = mysuite()
unittest.TextTestRunner(verbosity=2).run(suite)
|
#!/usr/bin/env python
#------------------------------------------------------------------------
# Copyright (c) 2015 SGW
#
# Distributed under the terms of the New BSD License.
#
# The full License is in the file LICENSE
#------------------------------------------------------------------------
import unittest
import numpy as np
import velvet as vt
class TestUtilFunctions(unittest.TestCase):
def test_isodd(self):
x = np.array([1,2,1,1,-3,-4,7,8,9,10,-2,1,-3,5,6,7,-10])
y = vt.isodd(x)
yCorrect = np.array([1,0,1,1,1,0,1,0,1,0,0,1,1,1,0,1,0])
for ind in xrange(len(y)):
self.assertEqual(y[ind],yCorrect[ind])
def mysuite():
return unittest.TestLoader().loadTestsFromTestCase(TestUtilFunctions)
if __name__ == '__main__':
suite = mysuite()
unittest.TextTestRunner(verbosity=2).run(suite)
Fix bug in unit test#!/usr/bin/env python
#------------------------------------------------------------------------
# Copyright (c) 2015 SGW
#
# Distributed under the terms of the New BSD License.
#
# The full License is in the file LICENSE
#------------------------------------------------------------------------
import unittest
import numpy as np
import velvet as vt
class TestUtilFunctions(unittest.TestCase):
def test_isodd(self):
x = np.array([1,2,1,1,-3,-4,7,8,9,10,-2,1,-3,5,6,7,-10])
y = vt.isodd(x)
yCorrect = np.array([1,0,1,1,1,0,1,0,1,0,0,1,1,1,0,1,0])
for ind in np.arange(len(y)):
self.assertEqual(y[ind],yCorrect[ind])
def mysuite():
return unittest.TestLoader().loadTestsFromTestCase(TestUtilFunctions)
if __name__ == '__main__':
suite = mysuite()
unittest.TextTestRunner(verbosity=2).run(suite)
|
<commit_before>#!/usr/bin/env python
#------------------------------------------------------------------------
# Copyright (c) 2015 SGW
#
# Distributed under the terms of the New BSD License.
#
# The full License is in the file LICENSE
#------------------------------------------------------------------------
import unittest
import numpy as np
import velvet as vt
class TestUtilFunctions(unittest.TestCase):
def test_isodd(self):
x = np.array([1,2,1,1,-3,-4,7,8,9,10,-2,1,-3,5,6,7,-10])
y = vt.isodd(x)
yCorrect = np.array([1,0,1,1,1,0,1,0,1,0,0,1,1,1,0,1,0])
for ind in xrange(len(y)):
self.assertEqual(y[ind],yCorrect[ind])
def mysuite():
return unittest.TestLoader().loadTestsFromTestCase(TestUtilFunctions)
if __name__ == '__main__':
suite = mysuite()
unittest.TextTestRunner(verbosity=2).run(suite)
<commit_msg>Fix bug in unit test<commit_after>#!/usr/bin/env python
#------------------------------------------------------------------------
# Copyright (c) 2015 SGW
#
# Distributed under the terms of the New BSD License.
#
# The full License is in the file LICENSE
#------------------------------------------------------------------------
import unittest
import numpy as np
import velvet as vt
class TestUtilFunctions(unittest.TestCase):
def test_isodd(self):
x = np.array([1,2,1,1,-3,-4,7,8,9,10,-2,1,-3,5,6,7,-10])
y = vt.isodd(x)
yCorrect = np.array([1,0,1,1,1,0,1,0,1,0,0,1,1,1,0,1,0])
for ind in np.arange(len(y)):
self.assertEqual(y[ind],yCorrect[ind])
def mysuite():
return unittest.TestLoader().loadTestsFromTestCase(TestUtilFunctions)
if __name__ == '__main__':
suite = mysuite()
unittest.TextTestRunner(verbosity=2).run(suite)
|
3df3f72b54068deaca51ce2b4c52c185bf8f4526
|
virtool/uploads/models.py
|
virtool/uploads/models.py
|
import enum
from sqlalchemy import Column, String, Boolean, Integer, DateTime, Enum
from virtool.postgres import Base
class UploadType(enum.Enum):
hmm = "hmm"
reference = "reference"
reads = "reads"
subtraction = "subtraction"
null = None
class Upload(Base):
__tablename__ = "uploads"
id = Column(Integer, primary_key=True)
created_at = Column(DateTime)
name = Column(String)
name_on_disk = Column(String, unique=True)
ready = Column(Boolean)
removed = Column(Boolean)
reserved = Column(Boolean)
size = Column(Integer)
type = Column(Enum(UploadType))
user = Column(String)
uploaded_at = Column(DateTime)
def __repr__(self):
return """<Upload(id= {self.id}, created_at={self.created_at}, name={self.name}, \
name_on_disk={self.name_on_disk}, ready={self.ready}, removed={self.removed}, reserved={self.reserved}, \
size={self.size}, type={self.type}, user={self.user}, uploaded_at={self.uploaded_at}>"""
|
import enum
from sqlalchemy import Column, String, Boolean, Integer, DateTime, Enum
from virtool.postgres import Base
class UploadType(str, enum.Enum):
hmm = "hmm"
reference = "reference"
reads = "reads"
subtraction = "subtraction"
null = None
class Upload(Base):
__tablename__ = "uploads"
id = Column(Integer, primary_key=True)
created_at = Column(DateTime)
name = Column(String)
name_on_disk = Column(String, unique=True)
ready = Column(Boolean)
removed = Column(Boolean)
reserved = Column(Boolean)
size = Column(Integer)
type = Column(Enum(UploadType))
user = Column(String)
uploaded_at = Column(DateTime)
def __repr__(self):
return f"<Upload(id={self.id}, created_at={self.created_at}, name={self.name}, " \
f"name_on_disk={self.name_on_disk}, ready={self.ready}, removed={self.removed}, " \
f"reserved={self.reserved}, " f"size={self.size}, type={self.type}, user={self.user}, " \
f"uploaded_at={self.uploaded_at}>"
|
Declare subclass of `UploadType` to be `str`
|
Declare subclass of `UploadType` to be `str`
* Fixes issues with JSON serializing
* Revert `__repr__` string format changes as the newlines created large gaps of whitespace
|
Python
|
mit
|
virtool/virtool,igboyes/virtool,igboyes/virtool,virtool/virtool
|
import enum
from sqlalchemy import Column, String, Boolean, Integer, DateTime, Enum
from virtool.postgres import Base
class UploadType(enum.Enum):
hmm = "hmm"
reference = "reference"
reads = "reads"
subtraction = "subtraction"
null = None
class Upload(Base):
__tablename__ = "uploads"
id = Column(Integer, primary_key=True)
created_at = Column(DateTime)
name = Column(String)
name_on_disk = Column(String, unique=True)
ready = Column(Boolean)
removed = Column(Boolean)
reserved = Column(Boolean)
size = Column(Integer)
type = Column(Enum(UploadType))
user = Column(String)
uploaded_at = Column(DateTime)
def __repr__(self):
return """<Upload(id= {self.id}, created_at={self.created_at}, name={self.name}, \
name_on_disk={self.name_on_disk}, ready={self.ready}, removed={self.removed}, reserved={self.reserved}, \
size={self.size}, type={self.type}, user={self.user}, uploaded_at={self.uploaded_at}>"""
Declare subclass of `UploadType` to be `str`
* Fixes issues with JSON serializing
* Revert `__repr__` string format changes as the newlines created large gaps of whitespace
|
import enum
from sqlalchemy import Column, String, Boolean, Integer, DateTime, Enum
from virtool.postgres import Base
class UploadType(str, enum.Enum):
hmm = "hmm"
reference = "reference"
reads = "reads"
subtraction = "subtraction"
null = None
class Upload(Base):
__tablename__ = "uploads"
id = Column(Integer, primary_key=True)
created_at = Column(DateTime)
name = Column(String)
name_on_disk = Column(String, unique=True)
ready = Column(Boolean)
removed = Column(Boolean)
reserved = Column(Boolean)
size = Column(Integer)
type = Column(Enum(UploadType))
user = Column(String)
uploaded_at = Column(DateTime)
def __repr__(self):
return f"<Upload(id={self.id}, created_at={self.created_at}, name={self.name}, " \
f"name_on_disk={self.name_on_disk}, ready={self.ready}, removed={self.removed}, " \
f"reserved={self.reserved}, " f"size={self.size}, type={self.type}, user={self.user}, " \
f"uploaded_at={self.uploaded_at}>"
|
<commit_before>import enum
from sqlalchemy import Column, String, Boolean, Integer, DateTime, Enum
from virtool.postgres import Base
class UploadType(enum.Enum):
hmm = "hmm"
reference = "reference"
reads = "reads"
subtraction = "subtraction"
null = None
class Upload(Base):
__tablename__ = "uploads"
id = Column(Integer, primary_key=True)
created_at = Column(DateTime)
name = Column(String)
name_on_disk = Column(String, unique=True)
ready = Column(Boolean)
removed = Column(Boolean)
reserved = Column(Boolean)
size = Column(Integer)
type = Column(Enum(UploadType))
user = Column(String)
uploaded_at = Column(DateTime)
def __repr__(self):
return """<Upload(id= {self.id}, created_at={self.created_at}, name={self.name}, \
name_on_disk={self.name_on_disk}, ready={self.ready}, removed={self.removed}, reserved={self.reserved}, \
size={self.size}, type={self.type}, user={self.user}, uploaded_at={self.uploaded_at}>"""
<commit_msg>Declare subclass of `UploadType` to be `str`
* Fixes issues with JSON serializing
* Revert `__repr__` string format changes as the newlines created large gaps of whitespace<commit_after>
|
import enum
from sqlalchemy import Column, String, Boolean, Integer, DateTime, Enum
from virtool.postgres import Base
class UploadType(str, enum.Enum):
hmm = "hmm"
reference = "reference"
reads = "reads"
subtraction = "subtraction"
null = None
class Upload(Base):
__tablename__ = "uploads"
id = Column(Integer, primary_key=True)
created_at = Column(DateTime)
name = Column(String)
name_on_disk = Column(String, unique=True)
ready = Column(Boolean)
removed = Column(Boolean)
reserved = Column(Boolean)
size = Column(Integer)
type = Column(Enum(UploadType))
user = Column(String)
uploaded_at = Column(DateTime)
def __repr__(self):
return f"<Upload(id={self.id}, created_at={self.created_at}, name={self.name}, " \
f"name_on_disk={self.name_on_disk}, ready={self.ready}, removed={self.removed}, " \
f"reserved={self.reserved}, " f"size={self.size}, type={self.type}, user={self.user}, " \
f"uploaded_at={self.uploaded_at}>"
|
import enum
from sqlalchemy import Column, String, Boolean, Integer, DateTime, Enum
from virtool.postgres import Base
class UploadType(enum.Enum):
hmm = "hmm"
reference = "reference"
reads = "reads"
subtraction = "subtraction"
null = None
class Upload(Base):
__tablename__ = "uploads"
id = Column(Integer, primary_key=True)
created_at = Column(DateTime)
name = Column(String)
name_on_disk = Column(String, unique=True)
ready = Column(Boolean)
removed = Column(Boolean)
reserved = Column(Boolean)
size = Column(Integer)
type = Column(Enum(UploadType))
user = Column(String)
uploaded_at = Column(DateTime)
def __repr__(self):
return """<Upload(id= {self.id}, created_at={self.created_at}, name={self.name}, \
name_on_disk={self.name_on_disk}, ready={self.ready}, removed={self.removed}, reserved={self.reserved}, \
size={self.size}, type={self.type}, user={self.user}, uploaded_at={self.uploaded_at}>"""
Declare subclass of `UploadType` to be `str`
* Fixes issues with JSON serializing
* Revert `__repr__` string format changes as the newlines created large gaps of whitespaceimport enum
from sqlalchemy import Column, String, Boolean, Integer, DateTime, Enum
from virtool.postgres import Base
class UploadType(str, enum.Enum):
hmm = "hmm"
reference = "reference"
reads = "reads"
subtraction = "subtraction"
null = None
class Upload(Base):
__tablename__ = "uploads"
id = Column(Integer, primary_key=True)
created_at = Column(DateTime)
name = Column(String)
name_on_disk = Column(String, unique=True)
ready = Column(Boolean)
removed = Column(Boolean)
reserved = Column(Boolean)
size = Column(Integer)
type = Column(Enum(UploadType))
user = Column(String)
uploaded_at = Column(DateTime)
def __repr__(self):
return f"<Upload(id={self.id}, created_at={self.created_at}, name={self.name}, " \
f"name_on_disk={self.name_on_disk}, ready={self.ready}, removed={self.removed}, " \
f"reserved={self.reserved}, " f"size={self.size}, type={self.type}, user={self.user}, " \
f"uploaded_at={self.uploaded_at}>"
|
<commit_before>import enum
from sqlalchemy import Column, String, Boolean, Integer, DateTime, Enum
from virtool.postgres import Base
class UploadType(enum.Enum):
hmm = "hmm"
reference = "reference"
reads = "reads"
subtraction = "subtraction"
null = None
class Upload(Base):
__tablename__ = "uploads"
id = Column(Integer, primary_key=True)
created_at = Column(DateTime)
name = Column(String)
name_on_disk = Column(String, unique=True)
ready = Column(Boolean)
removed = Column(Boolean)
reserved = Column(Boolean)
size = Column(Integer)
type = Column(Enum(UploadType))
user = Column(String)
uploaded_at = Column(DateTime)
def __repr__(self):
return """<Upload(id= {self.id}, created_at={self.created_at}, name={self.name}, \
name_on_disk={self.name_on_disk}, ready={self.ready}, removed={self.removed}, reserved={self.reserved}, \
size={self.size}, type={self.type}, user={self.user}, uploaded_at={self.uploaded_at}>"""
<commit_msg>Declare subclass of `UploadType` to be `str`
* Fixes issues with JSON serializing
* Revert `__repr__` string format changes as the newlines created large gaps of whitespace<commit_after>import enum
from sqlalchemy import Column, String, Boolean, Integer, DateTime, Enum
from virtool.postgres import Base
class UploadType(str, enum.Enum):
hmm = "hmm"
reference = "reference"
reads = "reads"
subtraction = "subtraction"
null = None
class Upload(Base):
__tablename__ = "uploads"
id = Column(Integer, primary_key=True)
created_at = Column(DateTime)
name = Column(String)
name_on_disk = Column(String, unique=True)
ready = Column(Boolean)
removed = Column(Boolean)
reserved = Column(Boolean)
size = Column(Integer)
type = Column(Enum(UploadType))
user = Column(String)
uploaded_at = Column(DateTime)
def __repr__(self):
return f"<Upload(id={self.id}, created_at={self.created_at}, name={self.name}, " \
f"name_on_disk={self.name_on_disk}, ready={self.ready}, removed={self.removed}, " \
f"reserved={self.reserved}, " f"size={self.size}, type={self.type}, user={self.user}, " \
f"uploaded_at={self.uploaded_at}>"
|
778fccf168a47f80dcc92373a92964467343bcce
|
topics/lemmatize_folder.py
|
topics/lemmatize_folder.py
|
import os
import sys
import re
import subprocess
def lemmatize( text ):
text = text.encode('utf8')
text = re.sub( '[\.,?!:;]' , '' , text )
out = subprocess.check_output( 'module load finnish-process; echo "' + text + '" | finnish-process', shell = True)
lemma = ''
for line in out.split('\n'):
line = line.strip()
line = line.split('\t')
if len( line ) >= 2:
lemma += line[1] + ' '
return lemma
## folder usecase
path = sys.argv[1]
for file in os.listdir( path ):
text = open( path + file )
text = text.readlines()
text = map( lambda x: x.strip(), text )
text = ' '.join( text )
lemma = lemmatize( text )
fo = open( path + file + '.lemma' )
fo.write( lemma )
fo.close()
|
import os
import sys
import re
import subprocess
def lemmatize( text ):
text = text.encode('utf8')
text = re.sub( '[\.,?!:;]' , '' , text )
out = subprocess.check_output( 'module load finnish-process; echo "' + text + '" | finnish-process', shell = True)
lemma = ''
for line in out.split('\n'):
line = line.strip()
line = line.split('\t')
if len( line ) >= 2:
lemma += line[1] + ' '
return lemma
## folder usecase
path = sys.argv[1]
for file in os.listdir( path ):
text = open( path + file )
text = text.readlines()
text = map( lambda x: x.strip(), text )
text = ' '.join( text )
lemma = lemmatize( text )
fo = open( path + file + '.lemma', 'w' )
fo.write( lemma )
fo.close()
|
Write access to new output file
|
Write access to new output file
|
Python
|
mit
|
HIIT/digivaalit-2015,HIIT/digivaalit-2015,HIIT/digivaalit-2015
|
import os
import sys
import re
import subprocess
def lemmatize( text ):
text = text.encode('utf8')
text = re.sub( '[\.,?!:;]' , '' , text )
out = subprocess.check_output( 'module load finnish-process; echo "' + text + '" | finnish-process', shell = True)
lemma = ''
for line in out.split('\n'):
line = line.strip()
line = line.split('\t')
if len( line ) >= 2:
lemma += line[1] + ' '
return lemma
## folder usecase
path = sys.argv[1]
for file in os.listdir( path ):
text = open( path + file )
text = text.readlines()
text = map( lambda x: x.strip(), text )
text = ' '.join( text )
lemma = lemmatize( text )
fo = open( path + file + '.lemma' )
fo.write( lemma )
fo.close()
Write access to new output file
|
import os
import sys
import re
import subprocess
def lemmatize( text ):
text = text.encode('utf8')
text = re.sub( '[\.,?!:;]' , '' , text )
out = subprocess.check_output( 'module load finnish-process; echo "' + text + '" | finnish-process', shell = True)
lemma = ''
for line in out.split('\n'):
line = line.strip()
line = line.split('\t')
if len( line ) >= 2:
lemma += line[1] + ' '
return lemma
## folder usecase
path = sys.argv[1]
for file in os.listdir( path ):
text = open( path + file )
text = text.readlines()
text = map( lambda x: x.strip(), text )
text = ' '.join( text )
lemma = lemmatize( text )
fo = open( path + file + '.lemma', 'w' )
fo.write( lemma )
fo.close()
|
<commit_before>import os
import sys
import re
import subprocess
def lemmatize( text ):
text = text.encode('utf8')
text = re.sub( '[\.,?!:;]' , '' , text )
out = subprocess.check_output( 'module load finnish-process; echo "' + text + '" | finnish-process', shell = True)
lemma = ''
for line in out.split('\n'):
line = line.strip()
line = line.split('\t')
if len( line ) >= 2:
lemma += line[1] + ' '
return lemma
## folder usecase
path = sys.argv[1]
for file in os.listdir( path ):
text = open( path + file )
text = text.readlines()
text = map( lambda x: x.strip(), text )
text = ' '.join( text )
lemma = lemmatize( text )
fo = open( path + file + '.lemma' )
fo.write( lemma )
fo.close()
<commit_msg>Write access to new output file<commit_after>
|
import os
import sys
import re
import subprocess
def lemmatize( text ):
text = text.encode('utf8')
text = re.sub( '[\.,?!:;]' , '' , text )
out = subprocess.check_output( 'module load finnish-process; echo "' + text + '" | finnish-process', shell = True)
lemma = ''
for line in out.split('\n'):
line = line.strip()
line = line.split('\t')
if len( line ) >= 2:
lemma += line[1] + ' '
return lemma
## folder usecase
path = sys.argv[1]
for file in os.listdir( path ):
text = open( path + file )
text = text.readlines()
text = map( lambda x: x.strip(), text )
text = ' '.join( text )
lemma = lemmatize( text )
fo = open( path + file + '.lemma', 'w' )
fo.write( lemma )
fo.close()
|
import os
import sys
import re
import subprocess
def lemmatize( text ):
text = text.encode('utf8')
text = re.sub( '[\.,?!:;]' , '' , text )
out = subprocess.check_output( 'module load finnish-process; echo "' + text + '" | finnish-process', shell = True)
lemma = ''
for line in out.split('\n'):
line = line.strip()
line = line.split('\t')
if len( line ) >= 2:
lemma += line[1] + ' '
return lemma
## folder usecase
path = sys.argv[1]
for file in os.listdir( path ):
text = open( path + file )
text = text.readlines()
text = map( lambda x: x.strip(), text )
text = ' '.join( text )
lemma = lemmatize( text )
fo = open( path + file + '.lemma' )
fo.write( lemma )
fo.close()
Write access to new output fileimport os
import sys
import re
import subprocess
def lemmatize( text ):
text = text.encode('utf8')
text = re.sub( '[\.,?!:;]' , '' , text )
out = subprocess.check_output( 'module load finnish-process; echo "' + text + '" | finnish-process', shell = True)
lemma = ''
for line in out.split('\n'):
line = line.strip()
line = line.split('\t')
if len( line ) >= 2:
lemma += line[1] + ' '
return lemma
## folder usecase
path = sys.argv[1]
for file in os.listdir( path ):
text = open( path + file )
text = text.readlines()
text = map( lambda x: x.strip(), text )
text = ' '.join( text )
lemma = lemmatize( text )
fo = open( path + file + '.lemma', 'w' )
fo.write( lemma )
fo.close()
|
<commit_before>import os
import sys
import re
import subprocess
def lemmatize( text ):
text = text.encode('utf8')
text = re.sub( '[\.,?!:;]' , '' , text )
out = subprocess.check_output( 'module load finnish-process; echo "' + text + '" | finnish-process', shell = True)
lemma = ''
for line in out.split('\n'):
line = line.strip()
line = line.split('\t')
if len( line ) >= 2:
lemma += line[1] + ' '
return lemma
## folder usecase
path = sys.argv[1]
for file in os.listdir( path ):
text = open( path + file )
text = text.readlines()
text = map( lambda x: x.strip(), text )
text = ' '.join( text )
lemma = lemmatize( text )
fo = open( path + file + '.lemma' )
fo.write( lemma )
fo.close()
<commit_msg>Write access to new output file<commit_after>import os
import sys
import re
import subprocess
def lemmatize( text ):
text = text.encode('utf8')
text = re.sub( '[\.,?!:;]' , '' , text )
out = subprocess.check_output( 'module load finnish-process; echo "' + text + '" | finnish-process', shell = True)
lemma = ''
for line in out.split('\n'):
line = line.strip()
line = line.split('\t')
if len( line ) >= 2:
lemma += line[1] + ' '
return lemma
## folder usecase
path = sys.argv[1]
for file in os.listdir( path ):
text = open( path + file )
text = text.readlines()
text = map( lambda x: x.strip(), text )
text = ' '.join( text )
lemma = lemmatize( text )
fo = open( path + file + '.lemma', 'w' )
fo.write( lemma )
fo.close()
|
cdf674fc65491c72723d068e72f9ba9f85c5b482
|
django_summernote/__init__.py
|
django_summernote/__init__.py
|
version_info = (0, 8, 11, 6)
__version__ = version = '.'.join(map(str, version_info))
__project__ = PROJECT = 'django-summernote'
__author__ = AUTHOR = "django-summernote contributors"
default_app_config = 'django_summernote.apps.DjangoSummernoteConfig'
|
version_info = (0, 8, 11, 6)
__version__ = version = '.'.join(map(str, version_info))
__project__ = PROJECT = 'django-summernote'
__author__ = AUTHOR = "django-summernote contributors"
from django import VERSION as django_version
if django_version < (3, 2):
default_app_config = 'django_summernote.apps.DjangoSummernoteConfig'
|
Fix default_app_config problem with Django >= 3.20
|
Fix default_app_config problem with Django >= 3.20
|
Python
|
mit
|
summernote/django-summernote,summernote/django-summernote,summernote/django-summernote
|
version_info = (0, 8, 11, 6)
__version__ = version = '.'.join(map(str, version_info))
__project__ = PROJECT = 'django-summernote'
__author__ = AUTHOR = "django-summernote contributors"
default_app_config = 'django_summernote.apps.DjangoSummernoteConfig'
Fix default_app_config problem with Django >= 3.20
|
version_info = (0, 8, 11, 6)
__version__ = version = '.'.join(map(str, version_info))
__project__ = PROJECT = 'django-summernote'
__author__ = AUTHOR = "django-summernote contributors"
from django import VERSION as django_version
if django_version < (3, 2):
default_app_config = 'django_summernote.apps.DjangoSummernoteConfig'
|
<commit_before>version_info = (0, 8, 11, 6)
__version__ = version = '.'.join(map(str, version_info))
__project__ = PROJECT = 'django-summernote'
__author__ = AUTHOR = "django-summernote contributors"
default_app_config = 'django_summernote.apps.DjangoSummernoteConfig'
<commit_msg>Fix default_app_config problem with Django >= 3.20<commit_after>
|
version_info = (0, 8, 11, 6)
__version__ = version = '.'.join(map(str, version_info))
__project__ = PROJECT = 'django-summernote'
__author__ = AUTHOR = "django-summernote contributors"
from django import VERSION as django_version
if django_version < (3, 2):
default_app_config = 'django_summernote.apps.DjangoSummernoteConfig'
|
version_info = (0, 8, 11, 6)
__version__ = version = '.'.join(map(str, version_info))
__project__ = PROJECT = 'django-summernote'
__author__ = AUTHOR = "django-summernote contributors"
default_app_config = 'django_summernote.apps.DjangoSummernoteConfig'
Fix default_app_config problem with Django >= 3.20version_info = (0, 8, 11, 6)
__version__ = version = '.'.join(map(str, version_info))
__project__ = PROJECT = 'django-summernote'
__author__ = AUTHOR = "django-summernote contributors"
from django import VERSION as django_version
if django_version < (3, 2):
default_app_config = 'django_summernote.apps.DjangoSummernoteConfig'
|
<commit_before>version_info = (0, 8, 11, 6)
__version__ = version = '.'.join(map(str, version_info))
__project__ = PROJECT = 'django-summernote'
__author__ = AUTHOR = "django-summernote contributors"
default_app_config = 'django_summernote.apps.DjangoSummernoteConfig'
<commit_msg>Fix default_app_config problem with Django >= 3.20<commit_after>version_info = (0, 8, 11, 6)
__version__ = version = '.'.join(map(str, version_info))
__project__ = PROJECT = 'django-summernote'
__author__ = AUTHOR = "django-summernote contributors"
from django import VERSION as django_version
if django_version < (3, 2):
default_app_config = 'django_summernote.apps.DjangoSummernoteConfig'
|
298a60fa2ad56cb6bfbf4a9821b547e5b197384c
|
django_replicated/decorators.py
|
django_replicated/decorators.py
|
# -*- coding:utf-8 -*-
'''
Decorators for using specific routing state for particular requests.
Used in cases when automatic switching based on request method doesn't
work.
Usage:
from django_replicated.decorators import use_master, use_slave
@use_master
def my_view(request, ...):
# master database used for all db operations during
# execution of the view (if not explicitly overriden).
@use_slave
def my_view(request, ...):
# same with slave connection
'''
import utils
def _use_state(state):
def decorator(func):
def wrapper(request, *args, **kwargs):
current_state = utils.check_state_override(request, state)
utils._use_state(current_state)
try:
response = func(request, *args, **kwargs)
finally:
utils._revert()
utils.handle_updated_redirect(request, response)
return response
return wrapper
return decorator
use_master = _use_state('master')
use_slave = _use_state('slave')
|
# -*- coding:utf-8 -*-
'''
Decorators for using specific routing state for particular requests.
Used in cases when automatic switching based on request method doesn't
work.
Usage:
from django_replicated.decorators import use_master, use_slave
@use_master
def my_view(request, ...):
# master database used for all db operations during
# execution of the view (if not explicitly overriden).
@use_slave
def my_view(request, ...):
# same with slave connection
'''
import utils
from functools import wraps
def _use_state(state):
def decorator(func):
@wraps(func)
def wrapper(request, *args, **kwargs):
current_state = utils.check_state_override(request, state)
utils._use_state(current_state)
try:
response = func(request, *args, **kwargs)
finally:
utils._revert()
utils.handle_updated_redirect(request, response)
return response
return wrapper
return decorator
use_master = _use_state('master')
use_slave = _use_state('slave')
|
Use 'wraps' from 'functools', to keep wrapped function's docstring, name and attributes.
|
Use 'wraps' from 'functools', to keep wrapped function's docstring, name and attributes.
|
Python
|
bsd-3-clause
|
lavr/django_replicated,dmirain/django_replicated,Zunonia/django_replicated
|
# -*- coding:utf-8 -*-
'''
Decorators for using specific routing state for particular requests.
Used in cases when automatic switching based on request method doesn't
work.
Usage:
from django_replicated.decorators import use_master, use_slave
@use_master
def my_view(request, ...):
# master database used for all db operations during
# execution of the view (if not explicitly overriden).
@use_slave
def my_view(request, ...):
# same with slave connection
'''
import utils
def _use_state(state):
def decorator(func):
def wrapper(request, *args, **kwargs):
current_state = utils.check_state_override(request, state)
utils._use_state(current_state)
try:
response = func(request, *args, **kwargs)
finally:
utils._revert()
utils.handle_updated_redirect(request, response)
return response
return wrapper
return decorator
use_master = _use_state('master')
use_slave = _use_state('slave')
Use 'wraps' from 'functools', to keep wrapped function's docstring, name and attributes.
|
# -*- coding:utf-8 -*-
'''
Decorators for using specific routing state for particular requests.
Used in cases when automatic switching based on request method doesn't
work.
Usage:
from django_replicated.decorators import use_master, use_slave
@use_master
def my_view(request, ...):
# master database used for all db operations during
# execution of the view (if not explicitly overriden).
@use_slave
def my_view(request, ...):
# same with slave connection
'''
import utils
from functools import wraps
def _use_state(state):
def decorator(func):
@wraps(func)
def wrapper(request, *args, **kwargs):
current_state = utils.check_state_override(request, state)
utils._use_state(current_state)
try:
response = func(request, *args, **kwargs)
finally:
utils._revert()
utils.handle_updated_redirect(request, response)
return response
return wrapper
return decorator
use_master = _use_state('master')
use_slave = _use_state('slave')
|
<commit_before># -*- coding:utf-8 -*-
'''
Decorators for using specific routing state for particular requests.
Used in cases when automatic switching based on request method doesn't
work.
Usage:
from django_replicated.decorators import use_master, use_slave
@use_master
def my_view(request, ...):
# master database used for all db operations during
# execution of the view (if not explicitly overriden).
@use_slave
def my_view(request, ...):
# same with slave connection
'''
import utils
def _use_state(state):
def decorator(func):
def wrapper(request, *args, **kwargs):
current_state = utils.check_state_override(request, state)
utils._use_state(current_state)
try:
response = func(request, *args, **kwargs)
finally:
utils._revert()
utils.handle_updated_redirect(request, response)
return response
return wrapper
return decorator
use_master = _use_state('master')
use_slave = _use_state('slave')
<commit_msg>Use 'wraps' from 'functools', to keep wrapped function's docstring, name and attributes.<commit_after>
|
# -*- coding:utf-8 -*-
'''
Decorators for using specific routing state for particular requests.
Used in cases when automatic switching based on request method doesn't
work.
Usage:
from django_replicated.decorators import use_master, use_slave
@use_master
def my_view(request, ...):
# master database used for all db operations during
# execution of the view (if not explicitly overriden).
@use_slave
def my_view(request, ...):
# same with slave connection
'''
import utils
from functools import wraps
def _use_state(state):
def decorator(func):
@wraps(func)
def wrapper(request, *args, **kwargs):
current_state = utils.check_state_override(request, state)
utils._use_state(current_state)
try:
response = func(request, *args, **kwargs)
finally:
utils._revert()
utils.handle_updated_redirect(request, response)
return response
return wrapper
return decorator
use_master = _use_state('master')
use_slave = _use_state('slave')
|
# -*- coding:utf-8 -*-
'''
Decorators for using specific routing state for particular requests.
Used in cases when automatic switching based on request method doesn't
work.
Usage:
from django_replicated.decorators import use_master, use_slave
@use_master
def my_view(request, ...):
# master database used for all db operations during
# execution of the view (if not explicitly overriden).
@use_slave
def my_view(request, ...):
# same with slave connection
'''
import utils
def _use_state(state):
def decorator(func):
def wrapper(request, *args, **kwargs):
current_state = utils.check_state_override(request, state)
utils._use_state(current_state)
try:
response = func(request, *args, **kwargs)
finally:
utils._revert()
utils.handle_updated_redirect(request, response)
return response
return wrapper
return decorator
use_master = _use_state('master')
use_slave = _use_state('slave')
Use 'wraps' from 'functools', to keep wrapped function's docstring, name and attributes.# -*- coding:utf-8 -*-
'''
Decorators for using specific routing state for particular requests.
Used in cases when automatic switching based on request method doesn't
work.
Usage:
from django_replicated.decorators import use_master, use_slave
@use_master
def my_view(request, ...):
# master database used for all db operations during
# execution of the view (if not explicitly overriden).
@use_slave
def my_view(request, ...):
# same with slave connection
'''
import utils
from functools import wraps
def _use_state(state):
def decorator(func):
@wraps(func)
def wrapper(request, *args, **kwargs):
current_state = utils.check_state_override(request, state)
utils._use_state(current_state)
try:
response = func(request, *args, **kwargs)
finally:
utils._revert()
utils.handle_updated_redirect(request, response)
return response
return wrapper
return decorator
use_master = _use_state('master')
use_slave = _use_state('slave')
|
<commit_before># -*- coding:utf-8 -*-
'''
Decorators for using specific routing state for particular requests.
Used in cases when automatic switching based on request method doesn't
work.
Usage:
from django_replicated.decorators import use_master, use_slave
@use_master
def my_view(request, ...):
# master database used for all db operations during
# execution of the view (if not explicitly overriden).
@use_slave
def my_view(request, ...):
# same with slave connection
'''
import utils
def _use_state(state):
def decorator(func):
def wrapper(request, *args, **kwargs):
current_state = utils.check_state_override(request, state)
utils._use_state(current_state)
try:
response = func(request, *args, **kwargs)
finally:
utils._revert()
utils.handle_updated_redirect(request, response)
return response
return wrapper
return decorator
use_master = _use_state('master')
use_slave = _use_state('slave')
<commit_msg>Use 'wraps' from 'functools', to keep wrapped function's docstring, name and attributes.<commit_after># -*- coding:utf-8 -*-
'''
Decorators for using specific routing state for particular requests.
Used in cases when automatic switching based on request method doesn't
work.
Usage:
from django_replicated.decorators import use_master, use_slave
@use_master
def my_view(request, ...):
# master database used for all db operations during
# execution of the view (if not explicitly overriden).
@use_slave
def my_view(request, ...):
# same with slave connection
'''
import utils
from functools import wraps
def _use_state(state):
def decorator(func):
@wraps(func)
def wrapper(request, *args, **kwargs):
current_state = utils.check_state_override(request, state)
utils._use_state(current_state)
try:
response = func(request, *args, **kwargs)
finally:
utils._revert()
utils.handle_updated_redirect(request, response)
return response
return wrapper
return decorator
use_master = _use_state('master')
use_slave = _use_state('slave')
|
af2afbbbd3014f85c69bbfb4dc65f6850e7840b4
|
djlint/analyzers/db_backends.py
|
djlint/analyzers/db_backends.py
|
import ast
from .base import BaseAnalyzer, Result
class DB_BackendsVisitor(ast.NodeVisitor):
def __init__(self):
self.found = []
deprecated_items = {
'django.db.backends.postgresql':
'django.db.backends.postgresql_psycopg2',
}
def visit_Str(self, node):
if node.s in self.deprecated_items.keys():
self.found.append((node.s, node))
class DB_BackendsAnalyzer(BaseAnalyzer):
def analyze_file(self, filepath, code):
if not isinstance(code, ast.AST):
return
visitor = DB_BackendsVisitor()
visitor.visit(code)
for name, node in visitor.found:
propose = visitor.deprecated_items[name]
result = Result(
description = (
'%r backend is deprecated, use %r instead' % (name, propose)
),
path = filepath,
line = node.lineno)
lines = self.get_file_lines(filepath, node.lineno, node.lineno)
for lineno, important, text in lines:
result.source.add_line(lineno, text, important)
result.solution.add_line(lineno, text.replace(name, propose), important)
yield result
|
import ast
from .base import BaseAnalyzer, Result
class DB_BackendsVisitor(ast.NodeVisitor):
def __init__(self):
self.found = []
removed_items = {
'django.db.backends.postgresql':
'django.db.backends.postgresql_psycopg2',
}
def visit_Str(self, node):
if node.s in self.removed_items.keys():
self.found.append((node.s, node))
class DB_BackendsAnalyzer(BaseAnalyzer):
def analyze_file(self, filepath, code):
if not isinstance(code, ast.AST):
return
visitor = DB_BackendsVisitor()
visitor.visit(code)
for name, node in visitor.found:
propose = visitor.removed_items[name]
result = Result(
description = (
'%r database backend has beed deprecated in Django 1.3 '
'and removed in 1.4. Use %r instead.' % (name, propose)
),
path = filepath,
line = node.lineno)
lines = self.get_file_lines(filepath, node.lineno, node.lineno)
for lineno, important, text in lines:
result.source.add_line(lineno, text, important)
result.solution.add_line(lineno, text.replace(name, propose), important)
yield result
|
Update database backends analyzer to target 1.5
|
Update database backends analyzer to target 1.5
|
Python
|
isc
|
alfredhq/djlint
|
import ast
from .base import BaseAnalyzer, Result
class DB_BackendsVisitor(ast.NodeVisitor):
def __init__(self):
self.found = []
deprecated_items = {
'django.db.backends.postgresql':
'django.db.backends.postgresql_psycopg2',
}
def visit_Str(self, node):
if node.s in self.deprecated_items.keys():
self.found.append((node.s, node))
class DB_BackendsAnalyzer(BaseAnalyzer):
def analyze_file(self, filepath, code):
if not isinstance(code, ast.AST):
return
visitor = DB_BackendsVisitor()
visitor.visit(code)
for name, node in visitor.found:
propose = visitor.deprecated_items[name]
result = Result(
description = (
'%r backend is deprecated, use %r instead' % (name, propose)
),
path = filepath,
line = node.lineno)
lines = self.get_file_lines(filepath, node.lineno, node.lineno)
for lineno, important, text in lines:
result.source.add_line(lineno, text, important)
result.solution.add_line(lineno, text.replace(name, propose), important)
yield result
Update database backends analyzer to target 1.5
|
import ast
from .base import BaseAnalyzer, Result
class DB_BackendsVisitor(ast.NodeVisitor):
def __init__(self):
self.found = []
removed_items = {
'django.db.backends.postgresql':
'django.db.backends.postgresql_psycopg2',
}
def visit_Str(self, node):
if node.s in self.removed_items.keys():
self.found.append((node.s, node))
class DB_BackendsAnalyzer(BaseAnalyzer):
def analyze_file(self, filepath, code):
if not isinstance(code, ast.AST):
return
visitor = DB_BackendsVisitor()
visitor.visit(code)
for name, node in visitor.found:
propose = visitor.removed_items[name]
result = Result(
description = (
'%r database backend has beed deprecated in Django 1.3 '
'and removed in 1.4. Use %r instead.' % (name, propose)
),
path = filepath,
line = node.lineno)
lines = self.get_file_lines(filepath, node.lineno, node.lineno)
for lineno, important, text in lines:
result.source.add_line(lineno, text, important)
result.solution.add_line(lineno, text.replace(name, propose), important)
yield result
|
<commit_before>import ast
from .base import BaseAnalyzer, Result
class DB_BackendsVisitor(ast.NodeVisitor):
def __init__(self):
self.found = []
deprecated_items = {
'django.db.backends.postgresql':
'django.db.backends.postgresql_psycopg2',
}
def visit_Str(self, node):
if node.s in self.deprecated_items.keys():
self.found.append((node.s, node))
class DB_BackendsAnalyzer(BaseAnalyzer):
def analyze_file(self, filepath, code):
if not isinstance(code, ast.AST):
return
visitor = DB_BackendsVisitor()
visitor.visit(code)
for name, node in visitor.found:
propose = visitor.deprecated_items[name]
result = Result(
description = (
'%r backend is deprecated, use %r instead' % (name, propose)
),
path = filepath,
line = node.lineno)
lines = self.get_file_lines(filepath, node.lineno, node.lineno)
for lineno, important, text in lines:
result.source.add_line(lineno, text, important)
result.solution.add_line(lineno, text.replace(name, propose), important)
yield result
<commit_msg>Update database backends analyzer to target 1.5<commit_after>
|
import ast
from .base import BaseAnalyzer, Result
class DB_BackendsVisitor(ast.NodeVisitor):
def __init__(self):
self.found = []
removed_items = {
'django.db.backends.postgresql':
'django.db.backends.postgresql_psycopg2',
}
def visit_Str(self, node):
if node.s in self.removed_items.keys():
self.found.append((node.s, node))
class DB_BackendsAnalyzer(BaseAnalyzer):
def analyze_file(self, filepath, code):
if not isinstance(code, ast.AST):
return
visitor = DB_BackendsVisitor()
visitor.visit(code)
for name, node in visitor.found:
propose = visitor.removed_items[name]
result = Result(
description = (
'%r database backend has beed deprecated in Django 1.3 '
'and removed in 1.4. Use %r instead.' % (name, propose)
),
path = filepath,
line = node.lineno)
lines = self.get_file_lines(filepath, node.lineno, node.lineno)
for lineno, important, text in lines:
result.source.add_line(lineno, text, important)
result.solution.add_line(lineno, text.replace(name, propose), important)
yield result
|
import ast
from .base import BaseAnalyzer, Result
class DB_BackendsVisitor(ast.NodeVisitor):
def __init__(self):
self.found = []
deprecated_items = {
'django.db.backends.postgresql':
'django.db.backends.postgresql_psycopg2',
}
def visit_Str(self, node):
if node.s in self.deprecated_items.keys():
self.found.append((node.s, node))
class DB_BackendsAnalyzer(BaseAnalyzer):
def analyze_file(self, filepath, code):
if not isinstance(code, ast.AST):
return
visitor = DB_BackendsVisitor()
visitor.visit(code)
for name, node in visitor.found:
propose = visitor.deprecated_items[name]
result = Result(
description = (
'%r backend is deprecated, use %r instead' % (name, propose)
),
path = filepath,
line = node.lineno)
lines = self.get_file_lines(filepath, node.lineno, node.lineno)
for lineno, important, text in lines:
result.source.add_line(lineno, text, important)
result.solution.add_line(lineno, text.replace(name, propose), important)
yield result
Update database backends analyzer to target 1.5import ast
from .base import BaseAnalyzer, Result
class DB_BackendsVisitor(ast.NodeVisitor):
def __init__(self):
self.found = []
removed_items = {
'django.db.backends.postgresql':
'django.db.backends.postgresql_psycopg2',
}
def visit_Str(self, node):
if node.s in self.removed_items.keys():
self.found.append((node.s, node))
class DB_BackendsAnalyzer(BaseAnalyzer):
def analyze_file(self, filepath, code):
if not isinstance(code, ast.AST):
return
visitor = DB_BackendsVisitor()
visitor.visit(code)
for name, node in visitor.found:
propose = visitor.removed_items[name]
result = Result(
description = (
'%r database backend has beed deprecated in Django 1.3 '
'and removed in 1.4. Use %r instead.' % (name, propose)
),
path = filepath,
line = node.lineno)
lines = self.get_file_lines(filepath, node.lineno, node.lineno)
for lineno, important, text in lines:
result.source.add_line(lineno, text, important)
result.solution.add_line(lineno, text.replace(name, propose), important)
yield result
|
<commit_before>import ast
from .base import BaseAnalyzer, Result
class DB_BackendsVisitor(ast.NodeVisitor):
def __init__(self):
self.found = []
deprecated_items = {
'django.db.backends.postgresql':
'django.db.backends.postgresql_psycopg2',
}
def visit_Str(self, node):
if node.s in self.deprecated_items.keys():
self.found.append((node.s, node))
class DB_BackendsAnalyzer(BaseAnalyzer):
def analyze_file(self, filepath, code):
if not isinstance(code, ast.AST):
return
visitor = DB_BackendsVisitor()
visitor.visit(code)
for name, node in visitor.found:
propose = visitor.deprecated_items[name]
result = Result(
description = (
'%r backend is deprecated, use %r instead' % (name, propose)
),
path = filepath,
line = node.lineno)
lines = self.get_file_lines(filepath, node.lineno, node.lineno)
for lineno, important, text in lines:
result.source.add_line(lineno, text, important)
result.solution.add_line(lineno, text.replace(name, propose), important)
yield result
<commit_msg>Update database backends analyzer to target 1.5<commit_after>import ast
from .base import BaseAnalyzer, Result
class DB_BackendsVisitor(ast.NodeVisitor):
def __init__(self):
self.found = []
removed_items = {
'django.db.backends.postgresql':
'django.db.backends.postgresql_psycopg2',
}
def visit_Str(self, node):
if node.s in self.removed_items.keys():
self.found.append((node.s, node))
class DB_BackendsAnalyzer(BaseAnalyzer):
def analyze_file(self, filepath, code):
if not isinstance(code, ast.AST):
return
visitor = DB_BackendsVisitor()
visitor.visit(code)
for name, node in visitor.found:
propose = visitor.removed_items[name]
result = Result(
description = (
'%r database backend has beed deprecated in Django 1.3 '
'and removed in 1.4. Use %r instead.' % (name, propose)
),
path = filepath,
line = node.lineno)
lines = self.get_file_lines(filepath, node.lineno, node.lineno)
for lineno, important, text in lines:
result.source.add_line(lineno, text, important)
result.solution.add_line(lineno, text.replace(name, propose), important)
yield result
|
2ac9df192c523d125288f093bb7c2d4c91b54bbe
|
yunity/users/factories.py
|
yunity/users/factories.py
|
from django.contrib.auth import get_user_model
from factory import DjangoModelFactory, CREATE_STRATEGY, LazyAttribute, PostGeneration, SubFactory
from yunity.base.factories import Wall
from yunity.utils.tests.fake import faker
class User(DjangoModelFactory):
class Meta:
model = get_user_model()
strategy = CREATE_STRATEGY
is_active = True
is_staff = False
display_name = LazyAttribute(lambda _: faker.name())
first_name = LazyAttribute(lambda _: faker.name())
last_name = LazyAttribute(lambda _: faker.name())
email = LazyAttribute(lambda _: faker.email())
password = PostGeneration(lambda obj, *args, **kwargs: obj.set_password(obj.display_name))
wall = SubFactory(Wall)
|
from django.contrib.auth import get_user_model
from factory import DjangoModelFactory, CREATE_STRATEGY, LazyAttribute, PostGeneration, SubFactory
from yunity.walls.factories import Wall
from yunity.utils.tests.fake import faker
class User(DjangoModelFactory):
class Meta:
model = get_user_model()
strategy = CREATE_STRATEGY
is_active = True
is_staff = False
display_name = LazyAttribute(lambda _: faker.name())
first_name = LazyAttribute(lambda _: faker.name())
last_name = LazyAttribute(lambda _: faker.name())
email = LazyAttribute(lambda _: faker.email())
password = PostGeneration(lambda obj, *args, **kwargs: obj.set_password(obj.display_name))
wall = SubFactory(Wall)
|
Fix reference to wall factory
|
Fix reference to wall factory
|
Python
|
agpl-3.0
|
yunity/foodsaving-backend,yunity/yunity-core,yunity/foodsaving-backend,yunity/foodsaving-backend,yunity/yunity-core
|
from django.contrib.auth import get_user_model
from factory import DjangoModelFactory, CREATE_STRATEGY, LazyAttribute, PostGeneration, SubFactory
from yunity.base.factories import Wall
from yunity.utils.tests.fake import faker
class User(DjangoModelFactory):
class Meta:
model = get_user_model()
strategy = CREATE_STRATEGY
is_active = True
is_staff = False
display_name = LazyAttribute(lambda _: faker.name())
first_name = LazyAttribute(lambda _: faker.name())
last_name = LazyAttribute(lambda _: faker.name())
email = LazyAttribute(lambda _: faker.email())
password = PostGeneration(lambda obj, *args, **kwargs: obj.set_password(obj.display_name))
wall = SubFactory(Wall)
Fix reference to wall factory
|
from django.contrib.auth import get_user_model
from factory import DjangoModelFactory, CREATE_STRATEGY, LazyAttribute, PostGeneration, SubFactory
from yunity.walls.factories import Wall
from yunity.utils.tests.fake import faker
class User(DjangoModelFactory):
class Meta:
model = get_user_model()
strategy = CREATE_STRATEGY
is_active = True
is_staff = False
display_name = LazyAttribute(lambda _: faker.name())
first_name = LazyAttribute(lambda _: faker.name())
last_name = LazyAttribute(lambda _: faker.name())
email = LazyAttribute(lambda _: faker.email())
password = PostGeneration(lambda obj, *args, **kwargs: obj.set_password(obj.display_name))
wall = SubFactory(Wall)
|
<commit_before>from django.contrib.auth import get_user_model
from factory import DjangoModelFactory, CREATE_STRATEGY, LazyAttribute, PostGeneration, SubFactory
from yunity.base.factories import Wall
from yunity.utils.tests.fake import faker
class User(DjangoModelFactory):
class Meta:
model = get_user_model()
strategy = CREATE_STRATEGY
is_active = True
is_staff = False
display_name = LazyAttribute(lambda _: faker.name())
first_name = LazyAttribute(lambda _: faker.name())
last_name = LazyAttribute(lambda _: faker.name())
email = LazyAttribute(lambda _: faker.email())
password = PostGeneration(lambda obj, *args, **kwargs: obj.set_password(obj.display_name))
wall = SubFactory(Wall)
<commit_msg>Fix reference to wall factory<commit_after>
|
from django.contrib.auth import get_user_model
from factory import DjangoModelFactory, CREATE_STRATEGY, LazyAttribute, PostGeneration, SubFactory
from yunity.walls.factories import Wall
from yunity.utils.tests.fake import faker
class User(DjangoModelFactory):
class Meta:
model = get_user_model()
strategy = CREATE_STRATEGY
is_active = True
is_staff = False
display_name = LazyAttribute(lambda _: faker.name())
first_name = LazyAttribute(lambda _: faker.name())
last_name = LazyAttribute(lambda _: faker.name())
email = LazyAttribute(lambda _: faker.email())
password = PostGeneration(lambda obj, *args, **kwargs: obj.set_password(obj.display_name))
wall = SubFactory(Wall)
|
from django.contrib.auth import get_user_model
from factory import DjangoModelFactory, CREATE_STRATEGY, LazyAttribute, PostGeneration, SubFactory
from yunity.base.factories import Wall
from yunity.utils.tests.fake import faker
class User(DjangoModelFactory):
class Meta:
model = get_user_model()
strategy = CREATE_STRATEGY
is_active = True
is_staff = False
display_name = LazyAttribute(lambda _: faker.name())
first_name = LazyAttribute(lambda _: faker.name())
last_name = LazyAttribute(lambda _: faker.name())
email = LazyAttribute(lambda _: faker.email())
password = PostGeneration(lambda obj, *args, **kwargs: obj.set_password(obj.display_name))
wall = SubFactory(Wall)
Fix reference to wall factoryfrom django.contrib.auth import get_user_model
from factory import DjangoModelFactory, CREATE_STRATEGY, LazyAttribute, PostGeneration, SubFactory
from yunity.walls.factories import Wall
from yunity.utils.tests.fake import faker
class User(DjangoModelFactory):
class Meta:
model = get_user_model()
strategy = CREATE_STRATEGY
is_active = True
is_staff = False
display_name = LazyAttribute(lambda _: faker.name())
first_name = LazyAttribute(lambda _: faker.name())
last_name = LazyAttribute(lambda _: faker.name())
email = LazyAttribute(lambda _: faker.email())
password = PostGeneration(lambda obj, *args, **kwargs: obj.set_password(obj.display_name))
wall = SubFactory(Wall)
|
<commit_before>from django.contrib.auth import get_user_model
from factory import DjangoModelFactory, CREATE_STRATEGY, LazyAttribute, PostGeneration, SubFactory
from yunity.base.factories import Wall
from yunity.utils.tests.fake import faker
class User(DjangoModelFactory):
class Meta:
model = get_user_model()
strategy = CREATE_STRATEGY
is_active = True
is_staff = False
display_name = LazyAttribute(lambda _: faker.name())
first_name = LazyAttribute(lambda _: faker.name())
last_name = LazyAttribute(lambda _: faker.name())
email = LazyAttribute(lambda _: faker.email())
password = PostGeneration(lambda obj, *args, **kwargs: obj.set_password(obj.display_name))
wall = SubFactory(Wall)
<commit_msg>Fix reference to wall factory<commit_after>from django.contrib.auth import get_user_model
from factory import DjangoModelFactory, CREATE_STRATEGY, LazyAttribute, PostGeneration, SubFactory
from yunity.walls.factories import Wall
from yunity.utils.tests.fake import faker
class User(DjangoModelFactory):
class Meta:
model = get_user_model()
strategy = CREATE_STRATEGY
is_active = True
is_staff = False
display_name = LazyAttribute(lambda _: faker.name())
first_name = LazyAttribute(lambda _: faker.name())
last_name = LazyAttribute(lambda _: faker.name())
email = LazyAttribute(lambda _: faker.email())
password = PostGeneration(lambda obj, *args, **kwargs: obj.set_password(obj.display_name))
wall = SubFactory(Wall)
|
f8a209e7b0cca0fb6cd7bd49fa4f024c472b4e13
|
zappa/ext/django_zappa.py
|
zappa/ext/django_zappa.py
|
import sys
# add the Lambda root path into the sys.path
sys.path.append('/var/task')
from django.core.handlers.wsgi import WSGIHandler
from django.core.wsgi import get_wsgi_application
import os
def get_django_wsgi(settings_module):
os.environ.setdefault("DJANGO_SETTINGS_MODULE", settings_module)
import django
django.setup()
return get_wsgi_application()
|
import os
import sys
# add the Lambda root path into the sys.path
sys.path.append('/var/task')
def get_django_wsgi(settings_module):
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", settings_module)
import django
if django.VERSION[0] <= 1 and django.VERSION[1] < 7:
# call django.setup only for django <1.7.0
# (because setup already in get_wsgi_application since that)
# https://github.com/django/django/commit/80d74097b4bd7186ad99b6d41d0ed90347a39b21
django.setup()
return get_wsgi_application()
|
Call django.setup() from zappa only for django < 1.7.0
|
Call django.setup() from zappa only for django < 1.7.0
* because since django 1.7 it leads to double initialization, which is problematic on some installations
|
Python
|
mit
|
scoates/Zappa,Miserlou/Zappa,anush0247/Zappa,mathom/Zappa,michi88/Zappa,parroyo/Zappa,anush0247/Zappa,longzhi/Zappa,Miserlou/Zappa,longzhi/Zappa,scoates/Zappa,pjz/Zappa,pjz/Zappa,parroyo/Zappa,mathom/Zappa,michi88/Zappa
|
import sys
# add the Lambda root path into the sys.path
sys.path.append('/var/task')
from django.core.handlers.wsgi import WSGIHandler
from django.core.wsgi import get_wsgi_application
import os
def get_django_wsgi(settings_module):
os.environ.setdefault("DJANGO_SETTINGS_MODULE", settings_module)
import django
django.setup()
return get_wsgi_application()Call django.setup() from zappa only for django < 1.7.0
* because since django 1.7 it leads to double initialization, which is problematic on some installations
|
import os
import sys
# add the Lambda root path into the sys.path
sys.path.append('/var/task')
def get_django_wsgi(settings_module):
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", settings_module)
import django
if django.VERSION[0] <= 1 and django.VERSION[1] < 7:
# call django.setup only for django <1.7.0
# (because setup already in get_wsgi_application since that)
# https://github.com/django/django/commit/80d74097b4bd7186ad99b6d41d0ed90347a39b21
django.setup()
return get_wsgi_application()
|
<commit_before>import sys
# add the Lambda root path into the sys.path
sys.path.append('/var/task')
from django.core.handlers.wsgi import WSGIHandler
from django.core.wsgi import get_wsgi_application
import os
def get_django_wsgi(settings_module):
os.environ.setdefault("DJANGO_SETTINGS_MODULE", settings_module)
import django
django.setup()
return get_wsgi_application()<commit_msg>Call django.setup() from zappa only for django < 1.7.0
* because since django 1.7 it leads to double initialization, which is problematic on some installations<commit_after>
|
import os
import sys
# add the Lambda root path into the sys.path
sys.path.append('/var/task')
def get_django_wsgi(settings_module):
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", settings_module)
import django
if django.VERSION[0] <= 1 and django.VERSION[1] < 7:
# call django.setup only for django <1.7.0
# (because setup already in get_wsgi_application since that)
# https://github.com/django/django/commit/80d74097b4bd7186ad99b6d41d0ed90347a39b21
django.setup()
return get_wsgi_application()
|
import sys
# add the Lambda root path into the sys.path
sys.path.append('/var/task')
from django.core.handlers.wsgi import WSGIHandler
from django.core.wsgi import get_wsgi_application
import os
def get_django_wsgi(settings_module):
os.environ.setdefault("DJANGO_SETTINGS_MODULE", settings_module)
import django
django.setup()
return get_wsgi_application()Call django.setup() from zappa only for django < 1.7.0
* because since django 1.7 it leads to double initialization, which is problematic on some installationsimport os
import sys
# add the Lambda root path into the sys.path
sys.path.append('/var/task')
def get_django_wsgi(settings_module):
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", settings_module)
import django
if django.VERSION[0] <= 1 and django.VERSION[1] < 7:
# call django.setup only for django <1.7.0
# (because setup already in get_wsgi_application since that)
# https://github.com/django/django/commit/80d74097b4bd7186ad99b6d41d0ed90347a39b21
django.setup()
return get_wsgi_application()
|
<commit_before>import sys
# add the Lambda root path into the sys.path
sys.path.append('/var/task')
from django.core.handlers.wsgi import WSGIHandler
from django.core.wsgi import get_wsgi_application
import os
def get_django_wsgi(settings_module):
os.environ.setdefault("DJANGO_SETTINGS_MODULE", settings_module)
import django
django.setup()
return get_wsgi_application()<commit_msg>Call django.setup() from zappa only for django < 1.7.0
* because since django 1.7 it leads to double initialization, which is problematic on some installations<commit_after>import os
import sys
# add the Lambda root path into the sys.path
sys.path.append('/var/task')
def get_django_wsgi(settings_module):
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", settings_module)
import django
if django.VERSION[0] <= 1 and django.VERSION[1] < 7:
# call django.setup only for django <1.7.0
# (because setup already in get_wsgi_application since that)
# https://github.com/django/django/commit/80d74097b4bd7186ad99b6d41d0ed90347a39b21
django.setup()
return get_wsgi_application()
|
5e671fe98093cf506ce1cb134c335cabd934ad84
|
aioredis/locks.py
|
aioredis/locks.py
|
from asyncio.locks import Lock as _Lock
from asyncio import coroutine
from asyncio import futures
from .util import create_future
# Fixes an issue with all Python versions that leaves pending waiters
# without being awakened when the first waiter is canceled.
# Code adapted from the PR https://github.com/python/cpython/pull/1031
# Waiting once it is merged to make a proper condition to relay on
# the stdlib implementation or this one patched
class Lock(_Lock):
@coroutine
def acquire(self):
"""Acquire a lock.
This method blocks until the lock is unlocked, then sets it to
locked and returns True.
"""
if not self._locked and all(w.cancelled() for w in self._waiters):
self._locked = True
return True
fut = create_future(self._loop)
self._waiters.append(fut)
try:
yield from fut
self._locked = True
return True
except futures.CancelledError:
if not self._locked: # pragma: no cover
self._wake_up_first()
raise
finally:
self._waiters.remove(fut)
def _wake_up_first(self):
"""Wake up the first waiter who isn't cancelled."""
for fut in self._waiters:
if not fut.done():
fut.set_result(True)
|
from asyncio.locks import Lock as _Lock
from asyncio import coroutine
from asyncio import futures
from .util import create_future
# Fixes an issue with all Python versions that leaves pending waiters
# without being awakened when the first waiter is canceled.
# Code adapted from the PR https://github.com/python/cpython/pull/1031
# Waiting once it is merged to make a proper condition to relay on
# the stdlib implementation or this one patched
class Lock(_Lock):
@coroutine
def acquire(self):
"""Acquire a lock.
This method blocks until the lock is unlocked, then sets it to
locked and returns True.
"""
if not self._locked and all(w.cancelled() for w in self._waiters):
self._locked = True
return True
fut = create_future(self._loop)
self._waiters.append(fut)
try:
yield from fut
self._locked = True
return True
except futures.CancelledError:
if not self._locked: # pragma: no cover
self._wake_up_first()
raise
finally:
self._waiters.remove(fut)
def _wake_up_first(self):
"""Wake up the first waiter who isn't cancelled."""
for fut in self._waiters:
if not fut.done():
fut.set_result(True)
break
|
Fix critical bug with patched Lock
|
Fix critical bug with patched Lock
|
Python
|
mit
|
aio-libs/aioredis,aio-libs/aioredis,ymap/aioredis
|
from asyncio.locks import Lock as _Lock
from asyncio import coroutine
from asyncio import futures
from .util import create_future
# Fixes an issue with all Python versions that leaves pending waiters
# without being awakened when the first waiter is canceled.
# Code adapted from the PR https://github.com/python/cpython/pull/1031
# Waiting once it is merged to make a proper condition to relay on
# the stdlib implementation or this one patched
class Lock(_Lock):
@coroutine
def acquire(self):
"""Acquire a lock.
This method blocks until the lock is unlocked, then sets it to
locked and returns True.
"""
if not self._locked and all(w.cancelled() for w in self._waiters):
self._locked = True
return True
fut = create_future(self._loop)
self._waiters.append(fut)
try:
yield from fut
self._locked = True
return True
except futures.CancelledError:
if not self._locked: # pragma: no cover
self._wake_up_first()
raise
finally:
self._waiters.remove(fut)
def _wake_up_first(self):
"""Wake up the first waiter who isn't cancelled."""
for fut in self._waiters:
if not fut.done():
fut.set_result(True)
Fix critical bug with patched Lock
|
from asyncio.locks import Lock as _Lock
from asyncio import coroutine
from asyncio import futures
from .util import create_future
# Fixes an issue with all Python versions that leaves pending waiters
# without being awakened when the first waiter is canceled.
# Code adapted from the PR https://github.com/python/cpython/pull/1031
# Waiting once it is merged to make a proper condition to relay on
# the stdlib implementation or this one patched
class Lock(_Lock):
@coroutine
def acquire(self):
"""Acquire a lock.
This method blocks until the lock is unlocked, then sets it to
locked and returns True.
"""
if not self._locked and all(w.cancelled() for w in self._waiters):
self._locked = True
return True
fut = create_future(self._loop)
self._waiters.append(fut)
try:
yield from fut
self._locked = True
return True
except futures.CancelledError:
if not self._locked: # pragma: no cover
self._wake_up_first()
raise
finally:
self._waiters.remove(fut)
def _wake_up_first(self):
"""Wake up the first waiter who isn't cancelled."""
for fut in self._waiters:
if not fut.done():
fut.set_result(True)
break
|
<commit_before>from asyncio.locks import Lock as _Lock
from asyncio import coroutine
from asyncio import futures
from .util import create_future
# Fixes an issue with all Python versions that leaves pending waiters
# without being awakened when the first waiter is canceled.
# Code adapted from the PR https://github.com/python/cpython/pull/1031
# Waiting once it is merged to make a proper condition to relay on
# the stdlib implementation or this one patched
class Lock(_Lock):
@coroutine
def acquire(self):
"""Acquire a lock.
This method blocks until the lock is unlocked, then sets it to
locked and returns True.
"""
if not self._locked and all(w.cancelled() for w in self._waiters):
self._locked = True
return True
fut = create_future(self._loop)
self._waiters.append(fut)
try:
yield from fut
self._locked = True
return True
except futures.CancelledError:
if not self._locked: # pragma: no cover
self._wake_up_first()
raise
finally:
self._waiters.remove(fut)
def _wake_up_first(self):
"""Wake up the first waiter who isn't cancelled."""
for fut in self._waiters:
if not fut.done():
fut.set_result(True)
<commit_msg>Fix critical bug with patched Lock<commit_after>
|
from asyncio.locks import Lock as _Lock
from asyncio import coroutine
from asyncio import futures
from .util import create_future
# Fixes an issue with all Python versions that leaves pending waiters
# without being awakened when the first waiter is canceled.
# Code adapted from the PR https://github.com/python/cpython/pull/1031
# Waiting once it is merged to make a proper condition to relay on
# the stdlib implementation or this one patched
class Lock(_Lock):
@coroutine
def acquire(self):
"""Acquire a lock.
This method blocks until the lock is unlocked, then sets it to
locked and returns True.
"""
if not self._locked and all(w.cancelled() for w in self._waiters):
self._locked = True
return True
fut = create_future(self._loop)
self._waiters.append(fut)
try:
yield from fut
self._locked = True
return True
except futures.CancelledError:
if not self._locked: # pragma: no cover
self._wake_up_first()
raise
finally:
self._waiters.remove(fut)
def _wake_up_first(self):
"""Wake up the first waiter who isn't cancelled."""
for fut in self._waiters:
if not fut.done():
fut.set_result(True)
break
|
from asyncio.locks import Lock as _Lock
from asyncio import coroutine
from asyncio import futures
from .util import create_future
# Fixes an issue with all Python versions that leaves pending waiters
# without being awakened when the first waiter is canceled.
# Code adapted from the PR https://github.com/python/cpython/pull/1031
# Waiting once it is merged to make a proper condition to relay on
# the stdlib implementation or this one patched
class Lock(_Lock):
@coroutine
def acquire(self):
"""Acquire a lock.
This method blocks until the lock is unlocked, then sets it to
locked and returns True.
"""
if not self._locked and all(w.cancelled() for w in self._waiters):
self._locked = True
return True
fut = create_future(self._loop)
self._waiters.append(fut)
try:
yield from fut
self._locked = True
return True
except futures.CancelledError:
if not self._locked: # pragma: no cover
self._wake_up_first()
raise
finally:
self._waiters.remove(fut)
def _wake_up_first(self):
"""Wake up the first waiter who isn't cancelled."""
for fut in self._waiters:
if not fut.done():
fut.set_result(True)
Fix critical bug with patched Lockfrom asyncio.locks import Lock as _Lock
from asyncio import coroutine
from asyncio import futures
from .util import create_future
# Fixes an issue with all Python versions that leaves pending waiters
# without being awakened when the first waiter is canceled.
# Code adapted from the PR https://github.com/python/cpython/pull/1031
# Waiting once it is merged to make a proper condition to relay on
# the stdlib implementation or this one patched
class Lock(_Lock):
@coroutine
def acquire(self):
"""Acquire a lock.
This method blocks until the lock is unlocked, then sets it to
locked and returns True.
"""
if not self._locked and all(w.cancelled() for w in self._waiters):
self._locked = True
return True
fut = create_future(self._loop)
self._waiters.append(fut)
try:
yield from fut
self._locked = True
return True
except futures.CancelledError:
if not self._locked: # pragma: no cover
self._wake_up_first()
raise
finally:
self._waiters.remove(fut)
def _wake_up_first(self):
"""Wake up the first waiter who isn't cancelled."""
for fut in self._waiters:
if not fut.done():
fut.set_result(True)
break
|
<commit_before>from asyncio.locks import Lock as _Lock
from asyncio import coroutine
from asyncio import futures
from .util import create_future
# Fixes an issue with all Python versions that leaves pending waiters
# without being awakened when the first waiter is canceled.
# Code adapted from the PR https://github.com/python/cpython/pull/1031
# Waiting once it is merged to make a proper condition to relay on
# the stdlib implementation or this one patched
class Lock(_Lock):
@coroutine
def acquire(self):
"""Acquire a lock.
This method blocks until the lock is unlocked, then sets it to
locked and returns True.
"""
if not self._locked and all(w.cancelled() for w in self._waiters):
self._locked = True
return True
fut = create_future(self._loop)
self._waiters.append(fut)
try:
yield from fut
self._locked = True
return True
except futures.CancelledError:
if not self._locked: # pragma: no cover
self._wake_up_first()
raise
finally:
self._waiters.remove(fut)
def _wake_up_first(self):
"""Wake up the first waiter who isn't cancelled."""
for fut in self._waiters:
if not fut.done():
fut.set_result(True)
<commit_msg>Fix critical bug with patched Lock<commit_after>from asyncio.locks import Lock as _Lock
from asyncio import coroutine
from asyncio import futures
from .util import create_future
# Fixes an issue with all Python versions that leaves pending waiters
# without being awakened when the first waiter is canceled.
# Code adapted from the PR https://github.com/python/cpython/pull/1031
# Waiting once it is merged to make a proper condition to relay on
# the stdlib implementation or this one patched
class Lock(_Lock):
@coroutine
def acquire(self):
"""Acquire a lock.
This method blocks until the lock is unlocked, then sets it to
locked and returns True.
"""
if not self._locked and all(w.cancelled() for w in self._waiters):
self._locked = True
return True
fut = create_future(self._loop)
self._waiters.append(fut)
try:
yield from fut
self._locked = True
return True
except futures.CancelledError:
if not self._locked: # pragma: no cover
self._wake_up_first()
raise
finally:
self._waiters.remove(fut)
def _wake_up_first(self):
"""Wake up the first waiter who isn't cancelled."""
for fut in self._waiters:
if not fut.done():
fut.set_result(True)
break
|
2fb1e51d7131f089b6cedbdf227eddb79e3641bf
|
zerver/webhooks/dropbox/view.py
|
zerver/webhooks/dropbox/view.py
|
from django.http import HttpRequest, HttpResponse
from zerver.lib.response import json_success
from zerver.lib.webhooks.common import check_send_webhook_message
from zerver.decorator import REQ, has_request_variables, api_key_only_webhook_view
from zerver.models import UserProfile
@api_key_only_webhook_view('Dropbox')
@has_request_variables
def api_dropbox_webhook(request: HttpRequest, user_profile: UserProfile,
notify_bot_owner_on_invalid_json=False) -> HttpResponse:
if request.method == 'GET':
return HttpResponse(request.GET['challenge'])
elif request.method == 'POST':
topic = 'Dropbox'
check_send_webhook_message(request, user_profile, topic,
"File has been updated on Dropbox!")
return json_success()
|
from django.http import HttpRequest, HttpResponse
from zerver.lib.response import json_success
from zerver.lib.webhooks.common import check_send_webhook_message
from zerver.decorator import REQ, has_request_variables, api_key_only_webhook_view
from zerver.models import UserProfile
@api_key_only_webhook_view('Dropbox', notify_bot_owner_on_invalid_json=False)
@has_request_variables
def api_dropbox_webhook(request: HttpRequest, user_profile: UserProfile) -> HttpResponse:
if request.method == 'GET':
return HttpResponse(request.GET['challenge'])
elif request.method == 'POST':
topic = 'Dropbox'
check_send_webhook_message(request, user_profile, topic,
"File has been updated on Dropbox!")
return json_success()
|
Fix incorrect placement of notify_bot_owner_on_invalid_json.
|
dropbox: Fix incorrect placement of notify_bot_owner_on_invalid_json.
This was an error I introduced in editing
b79213d2602291a4c7ccbafe0f775f77db60665b.
|
Python
|
apache-2.0
|
punchagan/zulip,brainwane/zulip,andersk/zulip,dhcrzf/zulip,rht/zulip,rishig/zulip,dhcrzf/zulip,andersk/zulip,showell/zulip,punchagan/zulip,rishig/zulip,showell/zulip,kou/zulip,kou/zulip,hackerkid/zulip,brainwane/zulip,synicalsyntax/zulip,hackerkid/zulip,rishig/zulip,jackrzhang/zulip,kou/zulip,andersk/zulip,rht/zulip,timabbott/zulip,punchagan/zulip,jackrzhang/zulip,hackerkid/zulip,showell/zulip,dhcrzf/zulip,shubhamdhama/zulip,jackrzhang/zulip,kou/zulip,brainwane/zulip,shubhamdhama/zulip,dhcrzf/zulip,timabbott/zulip,kou/zulip,showell/zulip,timabbott/zulip,punchagan/zulip,timabbott/zulip,dhcrzf/zulip,rishig/zulip,synicalsyntax/zulip,jackrzhang/zulip,rht/zulip,rishig/zulip,brainwane/zulip,timabbott/zulip,synicalsyntax/zulip,synicalsyntax/zulip,zulip/zulip,shubhamdhama/zulip,hackerkid/zulip,jackrzhang/zulip,tommyip/zulip,kou/zulip,tommyip/zulip,eeshangarg/zulip,brainwane/zulip,andersk/zulip,andersk/zulip,zulip/zulip,timabbott/zulip,rht/zulip,shubhamdhama/zulip,tommyip/zulip,zulip/zulip,punchagan/zulip,dhcrzf/zulip,andersk/zulip,brainwane/zulip,synicalsyntax/zulip,jackrzhang/zulip,zulip/zulip,shubhamdhama/zulip,eeshangarg/zulip,andersk/zulip,shubhamdhama/zulip,rishig/zulip,synicalsyntax/zulip,synicalsyntax/zulip,zulip/zulip,tommyip/zulip,hackerkid/zulip,showell/zulip,tommyip/zulip,hackerkid/zulip,punchagan/zulip,dhcrzf/zulip,rht/zulip,shubhamdhama/zulip,jackrzhang/zulip,timabbott/zulip,brainwane/zulip,zulip/zulip,rishig/zulip,kou/zulip,eeshangarg/zulip,rht/zulip,hackerkid/zulip,eeshangarg/zulip,tommyip/zulip,rht/zulip,eeshangarg/zulip,showell/zulip,eeshangarg/zulip,punchagan/zulip,zulip/zulip,tommyip/zulip,eeshangarg/zulip,showell/zulip
|
from django.http import HttpRequest, HttpResponse
from zerver.lib.response import json_success
from zerver.lib.webhooks.common import check_send_webhook_message
from zerver.decorator import REQ, has_request_variables, api_key_only_webhook_view
from zerver.models import UserProfile
@api_key_only_webhook_view('Dropbox')
@has_request_variables
def api_dropbox_webhook(request: HttpRequest, user_profile: UserProfile,
notify_bot_owner_on_invalid_json=False) -> HttpResponse:
if request.method == 'GET':
return HttpResponse(request.GET['challenge'])
elif request.method == 'POST':
topic = 'Dropbox'
check_send_webhook_message(request, user_profile, topic,
"File has been updated on Dropbox!")
return json_success()
dropbox: Fix incorrect placement of notify_bot_owner_on_invalid_json.
This was an error I introduced in editing
b79213d2602291a4c7ccbafe0f775f77db60665b.
|
from django.http import HttpRequest, HttpResponse
from zerver.lib.response import json_success
from zerver.lib.webhooks.common import check_send_webhook_message
from zerver.decorator import REQ, has_request_variables, api_key_only_webhook_view
from zerver.models import UserProfile
@api_key_only_webhook_view('Dropbox', notify_bot_owner_on_invalid_json=False)
@has_request_variables
def api_dropbox_webhook(request: HttpRequest, user_profile: UserProfile) -> HttpResponse:
if request.method == 'GET':
return HttpResponse(request.GET['challenge'])
elif request.method == 'POST':
topic = 'Dropbox'
check_send_webhook_message(request, user_profile, topic,
"File has been updated on Dropbox!")
return json_success()
|
<commit_before>from django.http import HttpRequest, HttpResponse
from zerver.lib.response import json_success
from zerver.lib.webhooks.common import check_send_webhook_message
from zerver.decorator import REQ, has_request_variables, api_key_only_webhook_view
from zerver.models import UserProfile
@api_key_only_webhook_view('Dropbox')
@has_request_variables
def api_dropbox_webhook(request: HttpRequest, user_profile: UserProfile,
notify_bot_owner_on_invalid_json=False) -> HttpResponse:
if request.method == 'GET':
return HttpResponse(request.GET['challenge'])
elif request.method == 'POST':
topic = 'Dropbox'
check_send_webhook_message(request, user_profile, topic,
"File has been updated on Dropbox!")
return json_success()
<commit_msg>dropbox: Fix incorrect placement of notify_bot_owner_on_invalid_json.
This was an error I introduced in editing
b79213d2602291a4c7ccbafe0f775f77db60665b.<commit_after>
|
from django.http import HttpRequest, HttpResponse
from zerver.lib.response import json_success
from zerver.lib.webhooks.common import check_send_webhook_message
from zerver.decorator import REQ, has_request_variables, api_key_only_webhook_view
from zerver.models import UserProfile
@api_key_only_webhook_view('Dropbox', notify_bot_owner_on_invalid_json=False)
@has_request_variables
def api_dropbox_webhook(request: HttpRequest, user_profile: UserProfile) -> HttpResponse:
if request.method == 'GET':
return HttpResponse(request.GET['challenge'])
elif request.method == 'POST':
topic = 'Dropbox'
check_send_webhook_message(request, user_profile, topic,
"File has been updated on Dropbox!")
return json_success()
|
from django.http import HttpRequest, HttpResponse
from zerver.lib.response import json_success
from zerver.lib.webhooks.common import check_send_webhook_message
from zerver.decorator import REQ, has_request_variables, api_key_only_webhook_view
from zerver.models import UserProfile
@api_key_only_webhook_view('Dropbox')
@has_request_variables
def api_dropbox_webhook(request: HttpRequest, user_profile: UserProfile,
notify_bot_owner_on_invalid_json=False) -> HttpResponse:
if request.method == 'GET':
return HttpResponse(request.GET['challenge'])
elif request.method == 'POST':
topic = 'Dropbox'
check_send_webhook_message(request, user_profile, topic,
"File has been updated on Dropbox!")
return json_success()
dropbox: Fix incorrect placement of notify_bot_owner_on_invalid_json.
This was an error I introduced in editing
b79213d2602291a4c7ccbafe0f775f77db60665b.from django.http import HttpRequest, HttpResponse
from zerver.lib.response import json_success
from zerver.lib.webhooks.common import check_send_webhook_message
from zerver.decorator import REQ, has_request_variables, api_key_only_webhook_view
from zerver.models import UserProfile
@api_key_only_webhook_view('Dropbox', notify_bot_owner_on_invalid_json=False)
@has_request_variables
def api_dropbox_webhook(request: HttpRequest, user_profile: UserProfile) -> HttpResponse:
if request.method == 'GET':
return HttpResponse(request.GET['challenge'])
elif request.method == 'POST':
topic = 'Dropbox'
check_send_webhook_message(request, user_profile, topic,
"File has been updated on Dropbox!")
return json_success()
|
<commit_before>from django.http import HttpRequest, HttpResponse
from zerver.lib.response import json_success
from zerver.lib.webhooks.common import check_send_webhook_message
from zerver.decorator import REQ, has_request_variables, api_key_only_webhook_view
from zerver.models import UserProfile
@api_key_only_webhook_view('Dropbox')
@has_request_variables
def api_dropbox_webhook(request: HttpRequest, user_profile: UserProfile,
notify_bot_owner_on_invalid_json=False) -> HttpResponse:
if request.method == 'GET':
return HttpResponse(request.GET['challenge'])
elif request.method == 'POST':
topic = 'Dropbox'
check_send_webhook_message(request, user_profile, topic,
"File has been updated on Dropbox!")
return json_success()
<commit_msg>dropbox: Fix incorrect placement of notify_bot_owner_on_invalid_json.
This was an error I introduced in editing
b79213d2602291a4c7ccbafe0f775f77db60665b.<commit_after>from django.http import HttpRequest, HttpResponse
from zerver.lib.response import json_success
from zerver.lib.webhooks.common import check_send_webhook_message
from zerver.decorator import REQ, has_request_variables, api_key_only_webhook_view
from zerver.models import UserProfile
@api_key_only_webhook_view('Dropbox', notify_bot_owner_on_invalid_json=False)
@has_request_variables
def api_dropbox_webhook(request: HttpRequest, user_profile: UserProfile) -> HttpResponse:
if request.method == 'GET':
return HttpResponse(request.GET['challenge'])
elif request.method == 'POST':
topic = 'Dropbox'
check_send_webhook_message(request, user_profile, topic,
"File has been updated on Dropbox!")
return json_success()
|
7b05ce75c0dd16944b26f2c53f1508aa3f771d27
|
migrations/versions/0177_add_virus_scan_statuses.py
|
migrations/versions/0177_add_virus_scan_statuses.py
|
"""
Revision ID: 0177_add_virus_scan_statuses
Revises: 0176_alter_billing_columns
Create Date: 2018-02-21 14:05:04.448977
"""
from alembic import op
revision = '0176_alter_billing_columns'
down_revision = '0175_drop_job_statistics_table'
def upgrade():
op.execute("INSERT INTO notification_status_types (name) VALUES ('pending-virus-check')")
op.execute("INSERT INTO notification_status_types (name) VALUES ('virus-scan-failed')")
def downgrade():
op.execute("UPDATE notifications SET notification_status = 'created' WHERE notification_status = 'pending-virus-check'")
op.execute("UPDATE notification_history SET notification_status = 'created' WHERE notification_status = 'pending-virus-check'")
op.execute("UPDATE notifications SET notification_status = 'permanent-failure' WHERE notification_status = 'virus-scan-failed'")
op.execute("UPDATE notification_history SET notification_status = 'permanent-failure' WHERE notification_status = 'virus-scan-failed'")
op.execute("DELETE FROM notification_status_types WHERE name in ('pending-virus-check', 'virus-scan-failed')")
|
"""
Revision ID: 0177_add_virus_scan_statuses
Revises: 0176_alter_billing_columns
Create Date: 2018-02-21 14:05:04.448977
"""
from alembic import op
revision = '0177_add_virus_scan_statuses'
down_revision = '0176_alter_billing_columns'
def upgrade():
op.execute("INSERT INTO notification_status_types (name) VALUES ('pending-virus-check')")
op.execute("INSERT INTO notification_status_types (name) VALUES ('virus-scan-failed')")
def downgrade():
op.execute("UPDATE notifications SET notification_status = 'created' WHERE notification_status = 'pending-virus-check'")
op.execute("UPDATE notification_history SET notification_status = 'created' WHERE notification_status = 'pending-virus-check'")
op.execute("UPDATE notifications SET notification_status = 'permanent-failure' WHERE notification_status = 'virus-scan-failed'")
op.execute("UPDATE notification_history SET notification_status = 'permanent-failure' WHERE notification_status = 'virus-scan-failed'")
op.execute("DELETE FROM notification_status_types WHERE name in ('pending-virus-check', 'virus-scan-failed')")
|
Fix revision numbers in migration 0177
|
Fix revision numbers in migration 0177
|
Python
|
mit
|
alphagov/notifications-api,alphagov/notifications-api
|
"""
Revision ID: 0177_add_virus_scan_statuses
Revises: 0176_alter_billing_columns
Create Date: 2018-02-21 14:05:04.448977
"""
from alembic import op
revision = '0176_alter_billing_columns'
down_revision = '0175_drop_job_statistics_table'
def upgrade():
op.execute("INSERT INTO notification_status_types (name) VALUES ('pending-virus-check')")
op.execute("INSERT INTO notification_status_types (name) VALUES ('virus-scan-failed')")
def downgrade():
op.execute("UPDATE notifications SET notification_status = 'created' WHERE notification_status = 'pending-virus-check'")
op.execute("UPDATE notification_history SET notification_status = 'created' WHERE notification_status = 'pending-virus-check'")
op.execute("UPDATE notifications SET notification_status = 'permanent-failure' WHERE notification_status = 'virus-scan-failed'")
op.execute("UPDATE notification_history SET notification_status = 'permanent-failure' WHERE notification_status = 'virus-scan-failed'")
op.execute("DELETE FROM notification_status_types WHERE name in ('pending-virus-check', 'virus-scan-failed')")
Fix revision numbers in migration 0177
|
"""
Revision ID: 0177_add_virus_scan_statuses
Revises: 0176_alter_billing_columns
Create Date: 2018-02-21 14:05:04.448977
"""
from alembic import op
revision = '0177_add_virus_scan_statuses'
down_revision = '0176_alter_billing_columns'
def upgrade():
op.execute("INSERT INTO notification_status_types (name) VALUES ('pending-virus-check')")
op.execute("INSERT INTO notification_status_types (name) VALUES ('virus-scan-failed')")
def downgrade():
op.execute("UPDATE notifications SET notification_status = 'created' WHERE notification_status = 'pending-virus-check'")
op.execute("UPDATE notification_history SET notification_status = 'created' WHERE notification_status = 'pending-virus-check'")
op.execute("UPDATE notifications SET notification_status = 'permanent-failure' WHERE notification_status = 'virus-scan-failed'")
op.execute("UPDATE notification_history SET notification_status = 'permanent-failure' WHERE notification_status = 'virus-scan-failed'")
op.execute("DELETE FROM notification_status_types WHERE name in ('pending-virus-check', 'virus-scan-failed')")
|
<commit_before>"""
Revision ID: 0177_add_virus_scan_statuses
Revises: 0176_alter_billing_columns
Create Date: 2018-02-21 14:05:04.448977
"""
from alembic import op
revision = '0176_alter_billing_columns'
down_revision = '0175_drop_job_statistics_table'
def upgrade():
op.execute("INSERT INTO notification_status_types (name) VALUES ('pending-virus-check')")
op.execute("INSERT INTO notification_status_types (name) VALUES ('virus-scan-failed')")
def downgrade():
op.execute("UPDATE notifications SET notification_status = 'created' WHERE notification_status = 'pending-virus-check'")
op.execute("UPDATE notification_history SET notification_status = 'created' WHERE notification_status = 'pending-virus-check'")
op.execute("UPDATE notifications SET notification_status = 'permanent-failure' WHERE notification_status = 'virus-scan-failed'")
op.execute("UPDATE notification_history SET notification_status = 'permanent-failure' WHERE notification_status = 'virus-scan-failed'")
op.execute("DELETE FROM notification_status_types WHERE name in ('pending-virus-check', 'virus-scan-failed')")
<commit_msg>Fix revision numbers in migration 0177<commit_after>
|
"""
Revision ID: 0177_add_virus_scan_statuses
Revises: 0176_alter_billing_columns
Create Date: 2018-02-21 14:05:04.448977
"""
from alembic import op
revision = '0177_add_virus_scan_statuses'
down_revision = '0176_alter_billing_columns'
def upgrade():
op.execute("INSERT INTO notification_status_types (name) VALUES ('pending-virus-check')")
op.execute("INSERT INTO notification_status_types (name) VALUES ('virus-scan-failed')")
def downgrade():
op.execute("UPDATE notifications SET notification_status = 'created' WHERE notification_status = 'pending-virus-check'")
op.execute("UPDATE notification_history SET notification_status = 'created' WHERE notification_status = 'pending-virus-check'")
op.execute("UPDATE notifications SET notification_status = 'permanent-failure' WHERE notification_status = 'virus-scan-failed'")
op.execute("UPDATE notification_history SET notification_status = 'permanent-failure' WHERE notification_status = 'virus-scan-failed'")
op.execute("DELETE FROM notification_status_types WHERE name in ('pending-virus-check', 'virus-scan-failed')")
|
"""
Revision ID: 0177_add_virus_scan_statuses
Revises: 0176_alter_billing_columns
Create Date: 2018-02-21 14:05:04.448977
"""
from alembic import op
revision = '0176_alter_billing_columns'
down_revision = '0175_drop_job_statistics_table'
def upgrade():
op.execute("INSERT INTO notification_status_types (name) VALUES ('pending-virus-check')")
op.execute("INSERT INTO notification_status_types (name) VALUES ('virus-scan-failed')")
def downgrade():
op.execute("UPDATE notifications SET notification_status = 'created' WHERE notification_status = 'pending-virus-check'")
op.execute("UPDATE notification_history SET notification_status = 'created' WHERE notification_status = 'pending-virus-check'")
op.execute("UPDATE notifications SET notification_status = 'permanent-failure' WHERE notification_status = 'virus-scan-failed'")
op.execute("UPDATE notification_history SET notification_status = 'permanent-failure' WHERE notification_status = 'virus-scan-failed'")
op.execute("DELETE FROM notification_status_types WHERE name in ('pending-virus-check', 'virus-scan-failed')")
Fix revision numbers in migration 0177"""
Revision ID: 0177_add_virus_scan_statuses
Revises: 0176_alter_billing_columns
Create Date: 2018-02-21 14:05:04.448977
"""
from alembic import op
revision = '0177_add_virus_scan_statuses'
down_revision = '0176_alter_billing_columns'
def upgrade():
op.execute("INSERT INTO notification_status_types (name) VALUES ('pending-virus-check')")
op.execute("INSERT INTO notification_status_types (name) VALUES ('virus-scan-failed')")
def downgrade():
op.execute("UPDATE notifications SET notification_status = 'created' WHERE notification_status = 'pending-virus-check'")
op.execute("UPDATE notification_history SET notification_status = 'created' WHERE notification_status = 'pending-virus-check'")
op.execute("UPDATE notifications SET notification_status = 'permanent-failure' WHERE notification_status = 'virus-scan-failed'")
op.execute("UPDATE notification_history SET notification_status = 'permanent-failure' WHERE notification_status = 'virus-scan-failed'")
op.execute("DELETE FROM notification_status_types WHERE name in ('pending-virus-check', 'virus-scan-failed')")
|
<commit_before>"""
Revision ID: 0177_add_virus_scan_statuses
Revises: 0176_alter_billing_columns
Create Date: 2018-02-21 14:05:04.448977
"""
from alembic import op
revision = '0176_alter_billing_columns'
down_revision = '0175_drop_job_statistics_table'
def upgrade():
op.execute("INSERT INTO notification_status_types (name) VALUES ('pending-virus-check')")
op.execute("INSERT INTO notification_status_types (name) VALUES ('virus-scan-failed')")
def downgrade():
op.execute("UPDATE notifications SET notification_status = 'created' WHERE notification_status = 'pending-virus-check'")
op.execute("UPDATE notification_history SET notification_status = 'created' WHERE notification_status = 'pending-virus-check'")
op.execute("UPDATE notifications SET notification_status = 'permanent-failure' WHERE notification_status = 'virus-scan-failed'")
op.execute("UPDATE notification_history SET notification_status = 'permanent-failure' WHERE notification_status = 'virus-scan-failed'")
op.execute("DELETE FROM notification_status_types WHERE name in ('pending-virus-check', 'virus-scan-failed')")
<commit_msg>Fix revision numbers in migration 0177<commit_after>"""
Revision ID: 0177_add_virus_scan_statuses
Revises: 0176_alter_billing_columns
Create Date: 2018-02-21 14:05:04.448977
"""
from alembic import op
revision = '0177_add_virus_scan_statuses'
down_revision = '0176_alter_billing_columns'
def upgrade():
op.execute("INSERT INTO notification_status_types (name) VALUES ('pending-virus-check')")
op.execute("INSERT INTO notification_status_types (name) VALUES ('virus-scan-failed')")
def downgrade():
op.execute("UPDATE notifications SET notification_status = 'created' WHERE notification_status = 'pending-virus-check'")
op.execute("UPDATE notification_history SET notification_status = 'created' WHERE notification_status = 'pending-virus-check'")
op.execute("UPDATE notifications SET notification_status = 'permanent-failure' WHERE notification_status = 'virus-scan-failed'")
op.execute("UPDATE notification_history SET notification_status = 'permanent-failure' WHERE notification_status = 'virus-scan-failed'")
op.execute("DELETE FROM notification_status_types WHERE name in ('pending-virus-check', 'virus-scan-failed')")
|
a7acf05dd308b88fe9de5e04018438e7861e5c93
|
src/sentry/web/forms/invite_organization_member.py
|
src/sentry/web/forms/invite_organization_member.py
|
from __future__ import absolute_import
from django import forms
from django.db import transaction, IntegrityError
from sentry.models import (
AuditLogEntry, AuditLogEntryEvent, OrganizationMember
)
class InviteOrganizationMemberForm(forms.ModelForm):
class Meta:
fields = ('email', 'role')
model = OrganizationMember
def save(self, actor, organization, ip_address):
om = super(InviteOrganizationMemberForm, self).save(commit=False)
om.organization = organization
try:
existing = OrganizationMember.objects.filter(
organization=organization,
user__email__iexact=om.email,
)[0]
except IndexError:
pass
else:
return existing, False
sid = transaction.savepoint(using='default')
try:
om.save()
except IntegrityError:
transaction.savepoint_rollback(sid, using='default')
return OrganizationMember.objects.get(
email__iexact=om.email,
organization=organization,
), False
transaction.savepoint_commit(sid, using='default')
AuditLogEntry.objects.create(
organization=organization,
actor=actor,
ip_address=ip_address,
target_object=om.id,
event=AuditLogEntryEvent.MEMBER_INVITE,
data=om.get_audit_log_data(),
)
om.send_invite_email()
return om, True
|
from __future__ import absolute_import
from django import forms
from django.db import transaction, IntegrityError
from sentry.models import (
AuditLogEntry, AuditLogEntryEvent, OrganizationMember
)
class InviteOrganizationMemberForm(forms.ModelForm):
# override this to ensure the field is required
email = forms.EmailField()
class Meta:
fields = ('email', 'role')
model = OrganizationMember
def save(self, actor, organization, ip_address):
om = super(InviteOrganizationMemberForm, self).save(commit=False)
om.organization = organization
try:
existing = OrganizationMember.objects.filter(
organization=organization,
user__email__iexact=om.email,
)[0]
except IndexError:
pass
else:
return existing, False
sid = transaction.savepoint(using='default')
try:
om.save()
except IntegrityError:
transaction.savepoint_rollback(sid, using='default')
return OrganizationMember.objects.get(
email__iexact=om.email,
organization=organization,
), False
transaction.savepoint_commit(sid, using='default')
AuditLogEntry.objects.create(
organization=organization,
actor=actor,
ip_address=ip_address,
target_object=om.id,
event=AuditLogEntryEvent.MEMBER_INVITE,
data=om.get_audit_log_data(),
)
om.send_invite_email()
return om, True
|
Enforce requirement of email field on member invite
|
Enforce requirement of email field on member invite
|
Python
|
bsd-3-clause
|
mitsuhiko/sentry,ifduyue/sentry,fotinakis/sentry,mvaled/sentry,zenefits/sentry,jean/sentry,BuildingLink/sentry,fotinakis/sentry,BuildingLink/sentry,jean/sentry,JamesMura/sentry,BuildingLink/sentry,JamesMura/sentry,JackDanger/sentry,gencer/sentry,mvaled/sentry,daevaorn/sentry,BuildingLink/sentry,zenefits/sentry,daevaorn/sentry,nicholasserra/sentry,fotinakis/sentry,JamesMura/sentry,jean/sentry,ifduyue/sentry,daevaorn/sentry,nicholasserra/sentry,mvaled/sentry,gencer/sentry,gencer/sentry,gencer/sentry,zenefits/sentry,JackDanger/sentry,JackDanger/sentry,daevaorn/sentry,mvaled/sentry,mvaled/sentry,mvaled/sentry,beeftornado/sentry,beeftornado/sentry,gencer/sentry,alexm92/sentry,alexm92/sentry,zenefits/sentry,alexm92/sentry,ifduyue/sentry,jean/sentry,looker/sentry,mitsuhiko/sentry,JamesMura/sentry,nicholasserra/sentry,jean/sentry,zenefits/sentry,looker/sentry,looker/sentry,BuildingLink/sentry,looker/sentry,looker/sentry,ifduyue/sentry,ifduyue/sentry,JamesMura/sentry,fotinakis/sentry,beeftornado/sentry
|
from __future__ import absolute_import
from django import forms
from django.db import transaction, IntegrityError
from sentry.models import (
AuditLogEntry, AuditLogEntryEvent, OrganizationMember
)
class InviteOrganizationMemberForm(forms.ModelForm):
class Meta:
fields = ('email', 'role')
model = OrganizationMember
def save(self, actor, organization, ip_address):
om = super(InviteOrganizationMemberForm, self).save(commit=False)
om.organization = organization
try:
existing = OrganizationMember.objects.filter(
organization=organization,
user__email__iexact=om.email,
)[0]
except IndexError:
pass
else:
return existing, False
sid = transaction.savepoint(using='default')
try:
om.save()
except IntegrityError:
transaction.savepoint_rollback(sid, using='default')
return OrganizationMember.objects.get(
email__iexact=om.email,
organization=organization,
), False
transaction.savepoint_commit(sid, using='default')
AuditLogEntry.objects.create(
organization=organization,
actor=actor,
ip_address=ip_address,
target_object=om.id,
event=AuditLogEntryEvent.MEMBER_INVITE,
data=om.get_audit_log_data(),
)
om.send_invite_email()
return om, True
Enforce requirement of email field on member invite
|
from __future__ import absolute_import
from django import forms
from django.db import transaction, IntegrityError
from sentry.models import (
AuditLogEntry, AuditLogEntryEvent, OrganizationMember
)
class InviteOrganizationMemberForm(forms.ModelForm):
# override this to ensure the field is required
email = forms.EmailField()
class Meta:
fields = ('email', 'role')
model = OrganizationMember
def save(self, actor, organization, ip_address):
om = super(InviteOrganizationMemberForm, self).save(commit=False)
om.organization = organization
try:
existing = OrganizationMember.objects.filter(
organization=organization,
user__email__iexact=om.email,
)[0]
except IndexError:
pass
else:
return existing, False
sid = transaction.savepoint(using='default')
try:
om.save()
except IntegrityError:
transaction.savepoint_rollback(sid, using='default')
return OrganizationMember.objects.get(
email__iexact=om.email,
organization=organization,
), False
transaction.savepoint_commit(sid, using='default')
AuditLogEntry.objects.create(
organization=organization,
actor=actor,
ip_address=ip_address,
target_object=om.id,
event=AuditLogEntryEvent.MEMBER_INVITE,
data=om.get_audit_log_data(),
)
om.send_invite_email()
return om, True
|
<commit_before>from __future__ import absolute_import
from django import forms
from django.db import transaction, IntegrityError
from sentry.models import (
AuditLogEntry, AuditLogEntryEvent, OrganizationMember
)
class InviteOrganizationMemberForm(forms.ModelForm):
class Meta:
fields = ('email', 'role')
model = OrganizationMember
def save(self, actor, organization, ip_address):
om = super(InviteOrganizationMemberForm, self).save(commit=False)
om.organization = organization
try:
existing = OrganizationMember.objects.filter(
organization=organization,
user__email__iexact=om.email,
)[0]
except IndexError:
pass
else:
return existing, False
sid = transaction.savepoint(using='default')
try:
om.save()
except IntegrityError:
transaction.savepoint_rollback(sid, using='default')
return OrganizationMember.objects.get(
email__iexact=om.email,
organization=organization,
), False
transaction.savepoint_commit(sid, using='default')
AuditLogEntry.objects.create(
organization=organization,
actor=actor,
ip_address=ip_address,
target_object=om.id,
event=AuditLogEntryEvent.MEMBER_INVITE,
data=om.get_audit_log_data(),
)
om.send_invite_email()
return om, True
<commit_msg>Enforce requirement of email field on member invite<commit_after>
|
from __future__ import absolute_import
from django import forms
from django.db import transaction, IntegrityError
from sentry.models import (
AuditLogEntry, AuditLogEntryEvent, OrganizationMember
)
class InviteOrganizationMemberForm(forms.ModelForm):
# override this to ensure the field is required
email = forms.EmailField()
class Meta:
fields = ('email', 'role')
model = OrganizationMember
def save(self, actor, organization, ip_address):
om = super(InviteOrganizationMemberForm, self).save(commit=False)
om.organization = organization
try:
existing = OrganizationMember.objects.filter(
organization=organization,
user__email__iexact=om.email,
)[0]
except IndexError:
pass
else:
return existing, False
sid = transaction.savepoint(using='default')
try:
om.save()
except IntegrityError:
transaction.savepoint_rollback(sid, using='default')
return OrganizationMember.objects.get(
email__iexact=om.email,
organization=organization,
), False
transaction.savepoint_commit(sid, using='default')
AuditLogEntry.objects.create(
organization=organization,
actor=actor,
ip_address=ip_address,
target_object=om.id,
event=AuditLogEntryEvent.MEMBER_INVITE,
data=om.get_audit_log_data(),
)
om.send_invite_email()
return om, True
|
from __future__ import absolute_import
from django import forms
from django.db import transaction, IntegrityError
from sentry.models import (
AuditLogEntry, AuditLogEntryEvent, OrganizationMember
)
class InviteOrganizationMemberForm(forms.ModelForm):
class Meta:
fields = ('email', 'role')
model = OrganizationMember
def save(self, actor, organization, ip_address):
om = super(InviteOrganizationMemberForm, self).save(commit=False)
om.organization = organization
try:
existing = OrganizationMember.objects.filter(
organization=organization,
user__email__iexact=om.email,
)[0]
except IndexError:
pass
else:
return existing, False
sid = transaction.savepoint(using='default')
try:
om.save()
except IntegrityError:
transaction.savepoint_rollback(sid, using='default')
return OrganizationMember.objects.get(
email__iexact=om.email,
organization=organization,
), False
transaction.savepoint_commit(sid, using='default')
AuditLogEntry.objects.create(
organization=organization,
actor=actor,
ip_address=ip_address,
target_object=om.id,
event=AuditLogEntryEvent.MEMBER_INVITE,
data=om.get_audit_log_data(),
)
om.send_invite_email()
return om, True
Enforce requirement of email field on member invitefrom __future__ import absolute_import
from django import forms
from django.db import transaction, IntegrityError
from sentry.models import (
AuditLogEntry, AuditLogEntryEvent, OrganizationMember
)
class InviteOrganizationMemberForm(forms.ModelForm):
# override this to ensure the field is required
email = forms.EmailField()
class Meta:
fields = ('email', 'role')
model = OrganizationMember
def save(self, actor, organization, ip_address):
om = super(InviteOrganizationMemberForm, self).save(commit=False)
om.organization = organization
try:
existing = OrganizationMember.objects.filter(
organization=organization,
user__email__iexact=om.email,
)[0]
except IndexError:
pass
else:
return existing, False
sid = transaction.savepoint(using='default')
try:
om.save()
except IntegrityError:
transaction.savepoint_rollback(sid, using='default')
return OrganizationMember.objects.get(
email__iexact=om.email,
organization=organization,
), False
transaction.savepoint_commit(sid, using='default')
AuditLogEntry.objects.create(
organization=organization,
actor=actor,
ip_address=ip_address,
target_object=om.id,
event=AuditLogEntryEvent.MEMBER_INVITE,
data=om.get_audit_log_data(),
)
om.send_invite_email()
return om, True
|
<commit_before>from __future__ import absolute_import
from django import forms
from django.db import transaction, IntegrityError
from sentry.models import (
AuditLogEntry, AuditLogEntryEvent, OrganizationMember
)
class InviteOrganizationMemberForm(forms.ModelForm):
class Meta:
fields = ('email', 'role')
model = OrganizationMember
def save(self, actor, organization, ip_address):
om = super(InviteOrganizationMemberForm, self).save(commit=False)
om.organization = organization
try:
existing = OrganizationMember.objects.filter(
organization=organization,
user__email__iexact=om.email,
)[0]
except IndexError:
pass
else:
return existing, False
sid = transaction.savepoint(using='default')
try:
om.save()
except IntegrityError:
transaction.savepoint_rollback(sid, using='default')
return OrganizationMember.objects.get(
email__iexact=om.email,
organization=organization,
), False
transaction.savepoint_commit(sid, using='default')
AuditLogEntry.objects.create(
organization=organization,
actor=actor,
ip_address=ip_address,
target_object=om.id,
event=AuditLogEntryEvent.MEMBER_INVITE,
data=om.get_audit_log_data(),
)
om.send_invite_email()
return om, True
<commit_msg>Enforce requirement of email field on member invite<commit_after>from __future__ import absolute_import
from django import forms
from django.db import transaction, IntegrityError
from sentry.models import (
AuditLogEntry, AuditLogEntryEvent, OrganizationMember
)
class InviteOrganizationMemberForm(forms.ModelForm):
# override this to ensure the field is required
email = forms.EmailField()
class Meta:
fields = ('email', 'role')
model = OrganizationMember
def save(self, actor, organization, ip_address):
om = super(InviteOrganizationMemberForm, self).save(commit=False)
om.organization = organization
try:
existing = OrganizationMember.objects.filter(
organization=organization,
user__email__iexact=om.email,
)[0]
except IndexError:
pass
else:
return existing, False
sid = transaction.savepoint(using='default')
try:
om.save()
except IntegrityError:
transaction.savepoint_rollback(sid, using='default')
return OrganizationMember.objects.get(
email__iexact=om.email,
organization=organization,
), False
transaction.savepoint_commit(sid, using='default')
AuditLogEntry.objects.create(
organization=organization,
actor=actor,
ip_address=ip_address,
target_object=om.id,
event=AuditLogEntryEvent.MEMBER_INVITE,
data=om.get_audit_log_data(),
)
om.send_invite_email()
return om, True
|
c40cb3410944053c18abf8fb2b23a59f4b336015
|
conversion_calls.py
|
conversion_calls.py
|
from settings import CONVERSIONS
def get_conversions(index):
"""
Get the list of conversions to be performed.
Defaults to doing all XSL conversions for all the files.
"""
if 0 <= index and index < len(CONVERSIONS):
return [CONVERSIONS[index],]
# Default to all conversions.
return CONVERSIONS
def get_msxsl_call(input_file, transform_file, output_file):
return ('msxsl.exe', input_file, transform_file, '-o', output_file)
def get_saxon_call(input_file, transform_file, output_file):
return (
'java',
'-jar',
'saxon9.jar',
'-s:' + input_file,
'-xsl:' + transform_file,
'-o:' + output_file
)
|
from settings import CONVERSIONS
def get_conversions(index):
"""
Get the list of conversions to be performed.
Defaults to doing all XSL conversions for all the files.
Parameters
----------
index : int
Index of conversion to be used.
Incorrect index will use default (all conversions).
Returns
-------
list of tuples
List of conversion detail tuples.
"""
if 0 <= index and index < len(CONVERSIONS):
return [CONVERSIONS[index],]
# Default to all conversions.
return CONVERSIONS
def get_msxsl_call(input_file, transform_file, output_file):
return ('msxsl.exe', input_file, transform_file, '-o', output_file)
def get_saxon_call(input_file, transform_file, output_file):
return (
'java',
'-jar',
'saxon9.jar',
'-s:' + input_file,
'-xsl:' + transform_file,
'-o:' + output_file
)
|
Expand docstring for get conversions.
|
Expand docstring for get conversions.
Add parameter and return value descriptions.
|
Python
|
mit
|
AustralianAntarcticDataCentre/metadata_xml_convert,AustralianAntarcticDataCentre/metadata_xml_convert
|
from settings import CONVERSIONS
def get_conversions(index):
"""
Get the list of conversions to be performed.
Defaults to doing all XSL conversions for all the files.
"""
if 0 <= index and index < len(CONVERSIONS):
return [CONVERSIONS[index],]
# Default to all conversions.
return CONVERSIONS
def get_msxsl_call(input_file, transform_file, output_file):
return ('msxsl.exe', input_file, transform_file, '-o', output_file)
def get_saxon_call(input_file, transform_file, output_file):
return (
'java',
'-jar',
'saxon9.jar',
'-s:' + input_file,
'-xsl:' + transform_file,
'-o:' + output_file
)
Expand docstring for get conversions.
Add parameter and return value descriptions.
|
from settings import CONVERSIONS
def get_conversions(index):
"""
Get the list of conversions to be performed.
Defaults to doing all XSL conversions for all the files.
Parameters
----------
index : int
Index of conversion to be used.
Incorrect index will use default (all conversions).
Returns
-------
list of tuples
List of conversion detail tuples.
"""
if 0 <= index and index < len(CONVERSIONS):
return [CONVERSIONS[index],]
# Default to all conversions.
return CONVERSIONS
def get_msxsl_call(input_file, transform_file, output_file):
return ('msxsl.exe', input_file, transform_file, '-o', output_file)
def get_saxon_call(input_file, transform_file, output_file):
return (
'java',
'-jar',
'saxon9.jar',
'-s:' + input_file,
'-xsl:' + transform_file,
'-o:' + output_file
)
|
<commit_before>from settings import CONVERSIONS
def get_conversions(index):
"""
Get the list of conversions to be performed.
Defaults to doing all XSL conversions for all the files.
"""
if 0 <= index and index < len(CONVERSIONS):
return [CONVERSIONS[index],]
# Default to all conversions.
return CONVERSIONS
def get_msxsl_call(input_file, transform_file, output_file):
return ('msxsl.exe', input_file, transform_file, '-o', output_file)
def get_saxon_call(input_file, transform_file, output_file):
return (
'java',
'-jar',
'saxon9.jar',
'-s:' + input_file,
'-xsl:' + transform_file,
'-o:' + output_file
)
<commit_msg>Expand docstring for get conversions.
Add parameter and return value descriptions.<commit_after>
|
from settings import CONVERSIONS
def get_conversions(index):
"""
Get the list of conversions to be performed.
Defaults to doing all XSL conversions for all the files.
Parameters
----------
index : int
Index of conversion to be used.
Incorrect index will use default (all conversions).
Returns
-------
list of tuples
List of conversion detail tuples.
"""
if 0 <= index and index < len(CONVERSIONS):
return [CONVERSIONS[index],]
# Default to all conversions.
return CONVERSIONS
def get_msxsl_call(input_file, transform_file, output_file):
return ('msxsl.exe', input_file, transform_file, '-o', output_file)
def get_saxon_call(input_file, transform_file, output_file):
return (
'java',
'-jar',
'saxon9.jar',
'-s:' + input_file,
'-xsl:' + transform_file,
'-o:' + output_file
)
|
from settings import CONVERSIONS
def get_conversions(index):
"""
Get the list of conversions to be performed.
Defaults to doing all XSL conversions for all the files.
"""
if 0 <= index and index < len(CONVERSIONS):
return [CONVERSIONS[index],]
# Default to all conversions.
return CONVERSIONS
def get_msxsl_call(input_file, transform_file, output_file):
return ('msxsl.exe', input_file, transform_file, '-o', output_file)
def get_saxon_call(input_file, transform_file, output_file):
return (
'java',
'-jar',
'saxon9.jar',
'-s:' + input_file,
'-xsl:' + transform_file,
'-o:' + output_file
)
Expand docstring for get conversions.
Add parameter and return value descriptions.from settings import CONVERSIONS
def get_conversions(index):
"""
Get the list of conversions to be performed.
Defaults to doing all XSL conversions for all the files.
Parameters
----------
index : int
Index of conversion to be used.
Incorrect index will use default (all conversions).
Returns
-------
list of tuples
List of conversion detail tuples.
"""
if 0 <= index and index < len(CONVERSIONS):
return [CONVERSIONS[index],]
# Default to all conversions.
return CONVERSIONS
def get_msxsl_call(input_file, transform_file, output_file):
return ('msxsl.exe', input_file, transform_file, '-o', output_file)
def get_saxon_call(input_file, transform_file, output_file):
return (
'java',
'-jar',
'saxon9.jar',
'-s:' + input_file,
'-xsl:' + transform_file,
'-o:' + output_file
)
|
<commit_before>from settings import CONVERSIONS
def get_conversions(index):
"""
Get the list of conversions to be performed.
Defaults to doing all XSL conversions for all the files.
"""
if 0 <= index and index < len(CONVERSIONS):
return [CONVERSIONS[index],]
# Default to all conversions.
return CONVERSIONS
def get_msxsl_call(input_file, transform_file, output_file):
return ('msxsl.exe', input_file, transform_file, '-o', output_file)
def get_saxon_call(input_file, transform_file, output_file):
return (
'java',
'-jar',
'saxon9.jar',
'-s:' + input_file,
'-xsl:' + transform_file,
'-o:' + output_file
)
<commit_msg>Expand docstring for get conversions.
Add parameter and return value descriptions.<commit_after>from settings import CONVERSIONS
def get_conversions(index):
"""
Get the list of conversions to be performed.
Defaults to doing all XSL conversions for all the files.
Parameters
----------
index : int
Index of conversion to be used.
Incorrect index will use default (all conversions).
Returns
-------
list of tuples
List of conversion detail tuples.
"""
if 0 <= index and index < len(CONVERSIONS):
return [CONVERSIONS[index],]
# Default to all conversions.
return CONVERSIONS
def get_msxsl_call(input_file, transform_file, output_file):
return ('msxsl.exe', input_file, transform_file, '-o', output_file)
def get_saxon_call(input_file, transform_file, output_file):
return (
'java',
'-jar',
'saxon9.jar',
'-s:' + input_file,
'-xsl:' + transform_file,
'-o:' + output_file
)
|
0f446d166818ec6b218b59751a1dce80842ce677
|
app/auth/views.py
|
app/auth/views.py
|
# Copyright (C) 2016 University of Zurich. All rights reserved.
#
# This file is part of MSRegistry Backend.
#
# MSRegistry Backend is free software: you can redistribute it and/or
# modify it under the terms of the version 3 of the GNU Affero General
# Public License as published by the Free Software Foundation, or any
# other later version.
#
# MSRegistry Backend is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the version
# 3 of the GNU Affero General Public License for more details.
#
# You should have received a copy of the version 3 of the GNU Affero
# General Public License along with MSRegistry Backend. If not, see
# <http://www.gnu.org/licenses/>.
__author__ = "Filippo Panessa <filippo.panessa@uzh.ch>"
__copyright__ = ("Copyright (c) 2016 S3IT, Zentrale Informatik,"
" University of Zurich")
from flask import jsonify
from . import auth
from app.auth.decorators import requires_auth
@auth.route('/test')
@requires_auth
def authTest():
return jsonify({'status': 200,
'code': 'authorization_success',
'description': "All good. You only get this message if you're authenticated."
})
|
# Copyright (C) 2016 University of Zurich. All rights reserved.
#
# This file is part of MSRegistry Backend.
#
# MSRegistry Backend is free software: you can redistribute it and/or
# modify it under the terms of the version 3 of the GNU Affero General
# Public License as published by the Free Software Foundation, or any
# other later version.
#
# MSRegistry Backend is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the version
# 3 of the GNU Affero General Public License for more details.
#
# You should have received a copy of the version 3 of the GNU Affero
# General Public License along with MSRegistry Backend. If not, see
# <http://www.gnu.org/licenses/>.
__author__ = "Filippo Panessa <filippo.panessa@uzh.ch>"
__copyright__ = ("Copyright (c) 2016 S3IT, Zentrale Informatik,"
" University of Zurich")
from flask import jsonify
from . import auth
from app.auth.decorators import requires_auth
@auth.route('/test')
@requires_auth
def authTest():
return jsonify({'code': 'authorization_success',
'description': "All good. You only get this message if you're authenticated."
})
|
Remove code field from API /auth/test response
|
Remove code field from API /auth/test response
|
Python
|
agpl-3.0
|
uzh/msregistry
|
# Copyright (C) 2016 University of Zurich. All rights reserved.
#
# This file is part of MSRegistry Backend.
#
# MSRegistry Backend is free software: you can redistribute it and/or
# modify it under the terms of the version 3 of the GNU Affero General
# Public License as published by the Free Software Foundation, or any
# other later version.
#
# MSRegistry Backend is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the version
# 3 of the GNU Affero General Public License for more details.
#
# You should have received a copy of the version 3 of the GNU Affero
# General Public License along with MSRegistry Backend. If not, see
# <http://www.gnu.org/licenses/>.
__author__ = "Filippo Panessa <filippo.panessa@uzh.ch>"
__copyright__ = ("Copyright (c) 2016 S3IT, Zentrale Informatik,"
" University of Zurich")
from flask import jsonify
from . import auth
from app.auth.decorators import requires_auth
@auth.route('/test')
@requires_auth
def authTest():
return jsonify({'status': 200,
'code': 'authorization_success',
'description': "All good. You only get this message if you're authenticated."
})
Remove code field from API /auth/test response
|
# Copyright (C) 2016 University of Zurich. All rights reserved.
#
# This file is part of MSRegistry Backend.
#
# MSRegistry Backend is free software: you can redistribute it and/or
# modify it under the terms of the version 3 of the GNU Affero General
# Public License as published by the Free Software Foundation, or any
# other later version.
#
# MSRegistry Backend is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the version
# 3 of the GNU Affero General Public License for more details.
#
# You should have received a copy of the version 3 of the GNU Affero
# General Public License along with MSRegistry Backend. If not, see
# <http://www.gnu.org/licenses/>.
__author__ = "Filippo Panessa <filippo.panessa@uzh.ch>"
__copyright__ = ("Copyright (c) 2016 S3IT, Zentrale Informatik,"
" University of Zurich")
from flask import jsonify
from . import auth
from app.auth.decorators import requires_auth
@auth.route('/test')
@requires_auth
def authTest():
return jsonify({'code': 'authorization_success',
'description': "All good. You only get this message if you're authenticated."
})
|
<commit_before># Copyright (C) 2016 University of Zurich. All rights reserved.
#
# This file is part of MSRegistry Backend.
#
# MSRegistry Backend is free software: you can redistribute it and/or
# modify it under the terms of the version 3 of the GNU Affero General
# Public License as published by the Free Software Foundation, or any
# other later version.
#
# MSRegistry Backend is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the version
# 3 of the GNU Affero General Public License for more details.
#
# You should have received a copy of the version 3 of the GNU Affero
# General Public License along with MSRegistry Backend. If not, see
# <http://www.gnu.org/licenses/>.
__author__ = "Filippo Panessa <filippo.panessa@uzh.ch>"
__copyright__ = ("Copyright (c) 2016 S3IT, Zentrale Informatik,"
" University of Zurich")
from flask import jsonify
from . import auth
from app.auth.decorators import requires_auth
@auth.route('/test')
@requires_auth
def authTest():
return jsonify({'status': 200,
'code': 'authorization_success',
'description': "All good. You only get this message if you're authenticated."
})
<commit_msg>Remove code field from API /auth/test response<commit_after>
|
# Copyright (C) 2016 University of Zurich. All rights reserved.
#
# This file is part of MSRegistry Backend.
#
# MSRegistry Backend is free software: you can redistribute it and/or
# modify it under the terms of the version 3 of the GNU Affero General
# Public License as published by the Free Software Foundation, or any
# other later version.
#
# MSRegistry Backend is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the version
# 3 of the GNU Affero General Public License for more details.
#
# You should have received a copy of the version 3 of the GNU Affero
# General Public License along with MSRegistry Backend. If not, see
# <http://www.gnu.org/licenses/>.
__author__ = "Filippo Panessa <filippo.panessa@uzh.ch>"
__copyright__ = ("Copyright (c) 2016 S3IT, Zentrale Informatik,"
" University of Zurich")
from flask import jsonify
from . import auth
from app.auth.decorators import requires_auth
@auth.route('/test')
@requires_auth
def authTest():
return jsonify({'code': 'authorization_success',
'description': "All good. You only get this message if you're authenticated."
})
|
# Copyright (C) 2016 University of Zurich. All rights reserved.
#
# This file is part of MSRegistry Backend.
#
# MSRegistry Backend is free software: you can redistribute it and/or
# modify it under the terms of the version 3 of the GNU Affero General
# Public License as published by the Free Software Foundation, or any
# other later version.
#
# MSRegistry Backend is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the version
# 3 of the GNU Affero General Public License for more details.
#
# You should have received a copy of the version 3 of the GNU Affero
# General Public License along with MSRegistry Backend. If not, see
# <http://www.gnu.org/licenses/>.
__author__ = "Filippo Panessa <filippo.panessa@uzh.ch>"
__copyright__ = ("Copyright (c) 2016 S3IT, Zentrale Informatik,"
" University of Zurich")
from flask import jsonify
from . import auth
from app.auth.decorators import requires_auth
@auth.route('/test')
@requires_auth
def authTest():
return jsonify({'status': 200,
'code': 'authorization_success',
'description': "All good. You only get this message if you're authenticated."
})
Remove code field from API /auth/test response# Copyright (C) 2016 University of Zurich. All rights reserved.
#
# This file is part of MSRegistry Backend.
#
# MSRegistry Backend is free software: you can redistribute it and/or
# modify it under the terms of the version 3 of the GNU Affero General
# Public License as published by the Free Software Foundation, or any
# other later version.
#
# MSRegistry Backend is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the version
# 3 of the GNU Affero General Public License for more details.
#
# You should have received a copy of the version 3 of the GNU Affero
# General Public License along with MSRegistry Backend. If not, see
# <http://www.gnu.org/licenses/>.
__author__ = "Filippo Panessa <filippo.panessa@uzh.ch>"
__copyright__ = ("Copyright (c) 2016 S3IT, Zentrale Informatik,"
" University of Zurich")
from flask import jsonify
from . import auth
from app.auth.decorators import requires_auth
@auth.route('/test')
@requires_auth
def authTest():
return jsonify({'code': 'authorization_success',
'description': "All good. You only get this message if you're authenticated."
})
|
<commit_before># Copyright (C) 2016 University of Zurich. All rights reserved.
#
# This file is part of MSRegistry Backend.
#
# MSRegistry Backend is free software: you can redistribute it and/or
# modify it under the terms of the version 3 of the GNU Affero General
# Public License as published by the Free Software Foundation, or any
# other later version.
#
# MSRegistry Backend is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the version
# 3 of the GNU Affero General Public License for more details.
#
# You should have received a copy of the version 3 of the GNU Affero
# General Public License along with MSRegistry Backend. If not, see
# <http://www.gnu.org/licenses/>.
__author__ = "Filippo Panessa <filippo.panessa@uzh.ch>"
__copyright__ = ("Copyright (c) 2016 S3IT, Zentrale Informatik,"
" University of Zurich")
from flask import jsonify
from . import auth
from app.auth.decorators import requires_auth
@auth.route('/test')
@requires_auth
def authTest():
return jsonify({'status': 200,
'code': 'authorization_success',
'description': "All good. You only get this message if you're authenticated."
})
<commit_msg>Remove code field from API /auth/test response<commit_after># Copyright (C) 2016 University of Zurich. All rights reserved.
#
# This file is part of MSRegistry Backend.
#
# MSRegistry Backend is free software: you can redistribute it and/or
# modify it under the terms of the version 3 of the GNU Affero General
# Public License as published by the Free Software Foundation, or any
# other later version.
#
# MSRegistry Backend is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the version
# 3 of the GNU Affero General Public License for more details.
#
# You should have received a copy of the version 3 of the GNU Affero
# General Public License along with MSRegistry Backend. If not, see
# <http://www.gnu.org/licenses/>.
__author__ = "Filippo Panessa <filippo.panessa@uzh.ch>"
__copyright__ = ("Copyright (c) 2016 S3IT, Zentrale Informatik,"
" University of Zurich")
from flask import jsonify
from . import auth
from app.auth.decorators import requires_auth
@auth.route('/test')
@requires_auth
def authTest():
return jsonify({'code': 'authorization_success',
'description': "All good. You only get this message if you're authenticated."
})
|
df9691aecf19d31eab1f52f7d735ed746877ffac
|
dache/__init__.py
|
dache/__init__.py
|
from six.moves.urllib.parse import urlparse
from dache.backends.base import CacheKeyWarning # noqa
from dache.backends.filebased import FileBasedCache
from dache.backends.locmem import LocMemCache
from dache.backends.redis import RedisCache
from dache.utils.module_loading import import_string
__version__ = '0.0.1'
__all__ = ('register_backend', 'Cache', 'CacheKeyWarning')
_BACKENDS = {
'file': FileBasedCache,
'locmem': LocMemCache,
'redis': RedisCache,
}
def register_backend(url_scheme, backend_class):
"""Register a cache backend."""
_BACKENDS[url_scheme] = backend_class
class Cache(object):
def __init__(self, url, **options):
# Create cache backend
result = urlparse(url)
backend_class = _BACKENDS[result.scheme]
if isinstance(backend_class, basestring):
backend_class = import_string(backend_class)
self._backend = backend_class(result, **options)
public_methods = ('add', 'get', 'set', 'delete', 'get_many', 'has_key',
'incr', 'decr', 'set_many', 'delete_many', 'clear',
'validate_key', 'incr_version', 'decr_version',
'close')
for method in public_methods:
setattr(self, method, getattr(self._backend, method))
def __contains__(self, item):
return item in self._backend
|
import six
from six.moves.urllib.parse import urlparse
from dache.backends.base import CacheKeyWarning # noqa
from dache.backends.filebased import FileBasedCache
from dache.backends.locmem import LocMemCache
from dache.backends.redis import RedisCache
from dache.utils.module_loading import import_string
__version__ = '0.0.1'
__all__ = ('register_backend', 'Cache', 'CacheKeyWarning')
_BACKENDS = {
'file': FileBasedCache,
'locmem': LocMemCache,
'redis': RedisCache,
}
def register_backend(url_scheme, backend_class):
"""Register a cache backend."""
_BACKENDS[url_scheme] = backend_class
class Cache(object):
def __init__(self, url, **options):
# Create cache backend
result = urlparse(url)
backend_class = _BACKENDS[result.scheme]
if isinstance(backend_class, six.string_types):
backend_class = import_string(backend_class)
self._backend = backend_class(result, **options)
public_methods = ('add', 'get', 'set', 'delete', 'get_many', 'has_key',
'incr', 'decr', 'set_many', 'delete_many', 'clear',
'validate_key', 'incr_version', 'decr_version',
'close')
for method in public_methods:
setattr(self, method, getattr(self._backend, method))
def __contains__(self, item):
return item in self._backend
|
Fix Python 3 string type checking
|
Fix Python 3 string type checking
|
Python
|
bsd-3-clause
|
eliangcs/dache
|
from six.moves.urllib.parse import urlparse
from dache.backends.base import CacheKeyWarning # noqa
from dache.backends.filebased import FileBasedCache
from dache.backends.locmem import LocMemCache
from dache.backends.redis import RedisCache
from dache.utils.module_loading import import_string
__version__ = '0.0.1'
__all__ = ('register_backend', 'Cache', 'CacheKeyWarning')
_BACKENDS = {
'file': FileBasedCache,
'locmem': LocMemCache,
'redis': RedisCache,
}
def register_backend(url_scheme, backend_class):
"""Register a cache backend."""
_BACKENDS[url_scheme] = backend_class
class Cache(object):
def __init__(self, url, **options):
# Create cache backend
result = urlparse(url)
backend_class = _BACKENDS[result.scheme]
if isinstance(backend_class, basestring):
backend_class = import_string(backend_class)
self._backend = backend_class(result, **options)
public_methods = ('add', 'get', 'set', 'delete', 'get_many', 'has_key',
'incr', 'decr', 'set_many', 'delete_many', 'clear',
'validate_key', 'incr_version', 'decr_version',
'close')
for method in public_methods:
setattr(self, method, getattr(self._backend, method))
def __contains__(self, item):
return item in self._backend
Fix Python 3 string type checking
|
import six
from six.moves.urllib.parse import urlparse
from dache.backends.base import CacheKeyWarning # noqa
from dache.backends.filebased import FileBasedCache
from dache.backends.locmem import LocMemCache
from dache.backends.redis import RedisCache
from dache.utils.module_loading import import_string
__version__ = '0.0.1'
__all__ = ('register_backend', 'Cache', 'CacheKeyWarning')
_BACKENDS = {
'file': FileBasedCache,
'locmem': LocMemCache,
'redis': RedisCache,
}
def register_backend(url_scheme, backend_class):
"""Register a cache backend."""
_BACKENDS[url_scheme] = backend_class
class Cache(object):
def __init__(self, url, **options):
# Create cache backend
result = urlparse(url)
backend_class = _BACKENDS[result.scheme]
if isinstance(backend_class, six.string_types):
backend_class = import_string(backend_class)
self._backend = backend_class(result, **options)
public_methods = ('add', 'get', 'set', 'delete', 'get_many', 'has_key',
'incr', 'decr', 'set_many', 'delete_many', 'clear',
'validate_key', 'incr_version', 'decr_version',
'close')
for method in public_methods:
setattr(self, method, getattr(self._backend, method))
def __contains__(self, item):
return item in self._backend
|
<commit_before>from six.moves.urllib.parse import urlparse
from dache.backends.base import CacheKeyWarning # noqa
from dache.backends.filebased import FileBasedCache
from dache.backends.locmem import LocMemCache
from dache.backends.redis import RedisCache
from dache.utils.module_loading import import_string
__version__ = '0.0.1'
__all__ = ('register_backend', 'Cache', 'CacheKeyWarning')
_BACKENDS = {
'file': FileBasedCache,
'locmem': LocMemCache,
'redis': RedisCache,
}
def register_backend(url_scheme, backend_class):
"""Register a cache backend."""
_BACKENDS[url_scheme] = backend_class
class Cache(object):
def __init__(self, url, **options):
# Create cache backend
result = urlparse(url)
backend_class = _BACKENDS[result.scheme]
if isinstance(backend_class, basestring):
backend_class = import_string(backend_class)
self._backend = backend_class(result, **options)
public_methods = ('add', 'get', 'set', 'delete', 'get_many', 'has_key',
'incr', 'decr', 'set_many', 'delete_many', 'clear',
'validate_key', 'incr_version', 'decr_version',
'close')
for method in public_methods:
setattr(self, method, getattr(self._backend, method))
def __contains__(self, item):
return item in self._backend
<commit_msg>Fix Python 3 string type checking<commit_after>
|
import six
from six.moves.urllib.parse import urlparse
from dache.backends.base import CacheKeyWarning # noqa
from dache.backends.filebased import FileBasedCache
from dache.backends.locmem import LocMemCache
from dache.backends.redis import RedisCache
from dache.utils.module_loading import import_string
__version__ = '0.0.1'
__all__ = ('register_backend', 'Cache', 'CacheKeyWarning')
_BACKENDS = {
'file': FileBasedCache,
'locmem': LocMemCache,
'redis': RedisCache,
}
def register_backend(url_scheme, backend_class):
"""Register a cache backend."""
_BACKENDS[url_scheme] = backend_class
class Cache(object):
def __init__(self, url, **options):
# Create cache backend
result = urlparse(url)
backend_class = _BACKENDS[result.scheme]
if isinstance(backend_class, six.string_types):
backend_class = import_string(backend_class)
self._backend = backend_class(result, **options)
public_methods = ('add', 'get', 'set', 'delete', 'get_many', 'has_key',
'incr', 'decr', 'set_many', 'delete_many', 'clear',
'validate_key', 'incr_version', 'decr_version',
'close')
for method in public_methods:
setattr(self, method, getattr(self._backend, method))
def __contains__(self, item):
return item in self._backend
|
from six.moves.urllib.parse import urlparse
from dache.backends.base import CacheKeyWarning # noqa
from dache.backends.filebased import FileBasedCache
from dache.backends.locmem import LocMemCache
from dache.backends.redis import RedisCache
from dache.utils.module_loading import import_string
__version__ = '0.0.1'
__all__ = ('register_backend', 'Cache', 'CacheKeyWarning')
_BACKENDS = {
'file': FileBasedCache,
'locmem': LocMemCache,
'redis': RedisCache,
}
def register_backend(url_scheme, backend_class):
"""Register a cache backend."""
_BACKENDS[url_scheme] = backend_class
class Cache(object):
def __init__(self, url, **options):
# Create cache backend
result = urlparse(url)
backend_class = _BACKENDS[result.scheme]
if isinstance(backend_class, basestring):
backend_class = import_string(backend_class)
self._backend = backend_class(result, **options)
public_methods = ('add', 'get', 'set', 'delete', 'get_many', 'has_key',
'incr', 'decr', 'set_many', 'delete_many', 'clear',
'validate_key', 'incr_version', 'decr_version',
'close')
for method in public_methods:
setattr(self, method, getattr(self._backend, method))
def __contains__(self, item):
return item in self._backend
Fix Python 3 string type checkingimport six
from six.moves.urllib.parse import urlparse
from dache.backends.base import CacheKeyWarning # noqa
from dache.backends.filebased import FileBasedCache
from dache.backends.locmem import LocMemCache
from dache.backends.redis import RedisCache
from dache.utils.module_loading import import_string
__version__ = '0.0.1'
__all__ = ('register_backend', 'Cache', 'CacheKeyWarning')
_BACKENDS = {
'file': FileBasedCache,
'locmem': LocMemCache,
'redis': RedisCache,
}
def register_backend(url_scheme, backend_class):
"""Register a cache backend."""
_BACKENDS[url_scheme] = backend_class
class Cache(object):
def __init__(self, url, **options):
# Create cache backend
result = urlparse(url)
backend_class = _BACKENDS[result.scheme]
if isinstance(backend_class, six.string_types):
backend_class = import_string(backend_class)
self._backend = backend_class(result, **options)
public_methods = ('add', 'get', 'set', 'delete', 'get_many', 'has_key',
'incr', 'decr', 'set_many', 'delete_many', 'clear',
'validate_key', 'incr_version', 'decr_version',
'close')
for method in public_methods:
setattr(self, method, getattr(self._backend, method))
def __contains__(self, item):
return item in self._backend
|
<commit_before>from six.moves.urllib.parse import urlparse
from dache.backends.base import CacheKeyWarning # noqa
from dache.backends.filebased import FileBasedCache
from dache.backends.locmem import LocMemCache
from dache.backends.redis import RedisCache
from dache.utils.module_loading import import_string
__version__ = '0.0.1'
__all__ = ('register_backend', 'Cache', 'CacheKeyWarning')
_BACKENDS = {
'file': FileBasedCache,
'locmem': LocMemCache,
'redis': RedisCache,
}
def register_backend(url_scheme, backend_class):
"""Register a cache backend."""
_BACKENDS[url_scheme] = backend_class
class Cache(object):
def __init__(self, url, **options):
# Create cache backend
result = urlparse(url)
backend_class = _BACKENDS[result.scheme]
if isinstance(backend_class, basestring):
backend_class = import_string(backend_class)
self._backend = backend_class(result, **options)
public_methods = ('add', 'get', 'set', 'delete', 'get_many', 'has_key',
'incr', 'decr', 'set_many', 'delete_many', 'clear',
'validate_key', 'incr_version', 'decr_version',
'close')
for method in public_methods:
setattr(self, method, getattr(self._backend, method))
def __contains__(self, item):
return item in self._backend
<commit_msg>Fix Python 3 string type checking<commit_after>import six
from six.moves.urllib.parse import urlparse
from dache.backends.base import CacheKeyWarning # noqa
from dache.backends.filebased import FileBasedCache
from dache.backends.locmem import LocMemCache
from dache.backends.redis import RedisCache
from dache.utils.module_loading import import_string
__version__ = '0.0.1'
__all__ = ('register_backend', 'Cache', 'CacheKeyWarning')
_BACKENDS = {
'file': FileBasedCache,
'locmem': LocMemCache,
'redis': RedisCache,
}
def register_backend(url_scheme, backend_class):
"""Register a cache backend."""
_BACKENDS[url_scheme] = backend_class
class Cache(object):
def __init__(self, url, **options):
# Create cache backend
result = urlparse(url)
backend_class = _BACKENDS[result.scheme]
if isinstance(backend_class, six.string_types):
backend_class = import_string(backend_class)
self._backend = backend_class(result, **options)
public_methods = ('add', 'get', 'set', 'delete', 'get_many', 'has_key',
'incr', 'decr', 'set_many', 'delete_many', 'clear',
'validate_key', 'incr_version', 'decr_version',
'close')
for method in public_methods:
setattr(self, method, getattr(self._backend, method))
def __contains__(self, item):
return item in self._backend
|
0bcdde64aeee1ddc7ae40d6aca8729a4070c604a
|
fabfile.py
|
fabfile.py
|
#!/usr/bin/env python
import os
from fabric.api import *
from fab_shared import (test, webpy_deploy as deploy,
setup, development, production, localhost, staging, restart_webserver,
rollback, lint, enable, disable, maintenancemode, rechef)
env.unit = "trinity"
env.path = "/var/tornado/%(unit)s" % env
env.scm = "git@github.com:bueda/%(unit)s.git" % env
env.scm_http_url = "http://github.com/bueda/%(unit)s" % env
env.root_dir = os.path.abspath(os.path.dirname(__file__))
env.pip_requirements = ["requirements/common.txt",]
env.pip_requirements_dev = ["requirements/dev.txt",]
env.pip_requirements_production = ["requirements/production.txt",]
env.campfire_subdomain = 'bueda'
env.campfire_room = 'Development'
env.campfire_token = '63768eee94d96b7b18e2091f3919b2a2a3dcd12a'
@runs_once
def tornado_test_runner(deployment_type=None):
return local('test/run_tests.py', capture=False).return_code
env.test_runner = tornado_test_runner
|
#!/usr/bin/env python
import os
from fabric.api import *
from fab_shared import (test, webpy_deploy as deploy,
setup, development, production, localhost, staging, restart_webserver,
rollback, lint, enable, disable, maintenancemode, rechef)
env.unit = "trinity"
env.path = "/var/tornado/%(unit)s" % env
env.scm = "git@github.com:bueda/%(unit)s.git" % env
env.scm_http_url = "http://github.com/bueda/%(unit)s" % env
env.root_dir = os.path.abspath(os.path.dirname(__file__))
env.pip_requirements = ["requirements/common.txt",]
env.pip_requirements_dev = ["requirements/dev.txt",]
env.pip_requirements_production = ["requirements/production.txt",]
env.campfire_subdomain = 'bueda'
env.campfire_room = 'Development'
env.campfire_token = '63768eee94d96b7b18e2091f3919b2a2a3dcd12a'
@runs_once
def tornado_test_runner(deployment_type=None):
return local('test/run_tests.py', capture=False).return_code
env.test_runner = tornado_test_runner
def reset():
import trinity
app = trinity.Trinity()
app.db.reset()
|
Add reset method for erasing graph database.
|
Add reset method for erasing graph database.
|
Python
|
mit
|
peplin/trinity
|
#!/usr/bin/env python
import os
from fabric.api import *
from fab_shared import (test, webpy_deploy as deploy,
setup, development, production, localhost, staging, restart_webserver,
rollback, lint, enable, disable, maintenancemode, rechef)
env.unit = "trinity"
env.path = "/var/tornado/%(unit)s" % env
env.scm = "git@github.com:bueda/%(unit)s.git" % env
env.scm_http_url = "http://github.com/bueda/%(unit)s" % env
env.root_dir = os.path.abspath(os.path.dirname(__file__))
env.pip_requirements = ["requirements/common.txt",]
env.pip_requirements_dev = ["requirements/dev.txt",]
env.pip_requirements_production = ["requirements/production.txt",]
env.campfire_subdomain = 'bueda'
env.campfire_room = 'Development'
env.campfire_token = '63768eee94d96b7b18e2091f3919b2a2a3dcd12a'
@runs_once
def tornado_test_runner(deployment_type=None):
return local('test/run_tests.py', capture=False).return_code
env.test_runner = tornado_test_runner
Add reset method for erasing graph database.
|
#!/usr/bin/env python
import os
from fabric.api import *
from fab_shared import (test, webpy_deploy as deploy,
setup, development, production, localhost, staging, restart_webserver,
rollback, lint, enable, disable, maintenancemode, rechef)
env.unit = "trinity"
env.path = "/var/tornado/%(unit)s" % env
env.scm = "git@github.com:bueda/%(unit)s.git" % env
env.scm_http_url = "http://github.com/bueda/%(unit)s" % env
env.root_dir = os.path.abspath(os.path.dirname(__file__))
env.pip_requirements = ["requirements/common.txt",]
env.pip_requirements_dev = ["requirements/dev.txt",]
env.pip_requirements_production = ["requirements/production.txt",]
env.campfire_subdomain = 'bueda'
env.campfire_room = 'Development'
env.campfire_token = '63768eee94d96b7b18e2091f3919b2a2a3dcd12a'
@runs_once
def tornado_test_runner(deployment_type=None):
return local('test/run_tests.py', capture=False).return_code
env.test_runner = tornado_test_runner
def reset():
import trinity
app = trinity.Trinity()
app.db.reset()
|
<commit_before>#!/usr/bin/env python
import os
from fabric.api import *
from fab_shared import (test, webpy_deploy as deploy,
setup, development, production, localhost, staging, restart_webserver,
rollback, lint, enable, disable, maintenancemode, rechef)
env.unit = "trinity"
env.path = "/var/tornado/%(unit)s" % env
env.scm = "git@github.com:bueda/%(unit)s.git" % env
env.scm_http_url = "http://github.com/bueda/%(unit)s" % env
env.root_dir = os.path.abspath(os.path.dirname(__file__))
env.pip_requirements = ["requirements/common.txt",]
env.pip_requirements_dev = ["requirements/dev.txt",]
env.pip_requirements_production = ["requirements/production.txt",]
env.campfire_subdomain = 'bueda'
env.campfire_room = 'Development'
env.campfire_token = '63768eee94d96b7b18e2091f3919b2a2a3dcd12a'
@runs_once
def tornado_test_runner(deployment_type=None):
return local('test/run_tests.py', capture=False).return_code
env.test_runner = tornado_test_runner
<commit_msg>Add reset method for erasing graph database.<commit_after>
|
#!/usr/bin/env python
import os
from fabric.api import *
from fab_shared import (test, webpy_deploy as deploy,
setup, development, production, localhost, staging, restart_webserver,
rollback, lint, enable, disable, maintenancemode, rechef)
env.unit = "trinity"
env.path = "/var/tornado/%(unit)s" % env
env.scm = "git@github.com:bueda/%(unit)s.git" % env
env.scm_http_url = "http://github.com/bueda/%(unit)s" % env
env.root_dir = os.path.abspath(os.path.dirname(__file__))
env.pip_requirements = ["requirements/common.txt",]
env.pip_requirements_dev = ["requirements/dev.txt",]
env.pip_requirements_production = ["requirements/production.txt",]
env.campfire_subdomain = 'bueda'
env.campfire_room = 'Development'
env.campfire_token = '63768eee94d96b7b18e2091f3919b2a2a3dcd12a'
@runs_once
def tornado_test_runner(deployment_type=None):
return local('test/run_tests.py', capture=False).return_code
env.test_runner = tornado_test_runner
def reset():
import trinity
app = trinity.Trinity()
app.db.reset()
|
#!/usr/bin/env python
import os
from fabric.api import *
from fab_shared import (test, webpy_deploy as deploy,
setup, development, production, localhost, staging, restart_webserver,
rollback, lint, enable, disable, maintenancemode, rechef)
env.unit = "trinity"
env.path = "/var/tornado/%(unit)s" % env
env.scm = "git@github.com:bueda/%(unit)s.git" % env
env.scm_http_url = "http://github.com/bueda/%(unit)s" % env
env.root_dir = os.path.abspath(os.path.dirname(__file__))
env.pip_requirements = ["requirements/common.txt",]
env.pip_requirements_dev = ["requirements/dev.txt",]
env.pip_requirements_production = ["requirements/production.txt",]
env.campfire_subdomain = 'bueda'
env.campfire_room = 'Development'
env.campfire_token = '63768eee94d96b7b18e2091f3919b2a2a3dcd12a'
@runs_once
def tornado_test_runner(deployment_type=None):
return local('test/run_tests.py', capture=False).return_code
env.test_runner = tornado_test_runner
Add reset method for erasing graph database.#!/usr/bin/env python
import os
from fabric.api import *
from fab_shared import (test, webpy_deploy as deploy,
setup, development, production, localhost, staging, restart_webserver,
rollback, lint, enable, disable, maintenancemode, rechef)
env.unit = "trinity"
env.path = "/var/tornado/%(unit)s" % env
env.scm = "git@github.com:bueda/%(unit)s.git" % env
env.scm_http_url = "http://github.com/bueda/%(unit)s" % env
env.root_dir = os.path.abspath(os.path.dirname(__file__))
env.pip_requirements = ["requirements/common.txt",]
env.pip_requirements_dev = ["requirements/dev.txt",]
env.pip_requirements_production = ["requirements/production.txt",]
env.campfire_subdomain = 'bueda'
env.campfire_room = 'Development'
env.campfire_token = '63768eee94d96b7b18e2091f3919b2a2a3dcd12a'
@runs_once
def tornado_test_runner(deployment_type=None):
return local('test/run_tests.py', capture=False).return_code
env.test_runner = tornado_test_runner
def reset():
import trinity
app = trinity.Trinity()
app.db.reset()
|
<commit_before>#!/usr/bin/env python
import os
from fabric.api import *
from fab_shared import (test, webpy_deploy as deploy,
setup, development, production, localhost, staging, restart_webserver,
rollback, lint, enable, disable, maintenancemode, rechef)
env.unit = "trinity"
env.path = "/var/tornado/%(unit)s" % env
env.scm = "git@github.com:bueda/%(unit)s.git" % env
env.scm_http_url = "http://github.com/bueda/%(unit)s" % env
env.root_dir = os.path.abspath(os.path.dirname(__file__))
env.pip_requirements = ["requirements/common.txt",]
env.pip_requirements_dev = ["requirements/dev.txt",]
env.pip_requirements_production = ["requirements/production.txt",]
env.campfire_subdomain = 'bueda'
env.campfire_room = 'Development'
env.campfire_token = '63768eee94d96b7b18e2091f3919b2a2a3dcd12a'
@runs_once
def tornado_test_runner(deployment_type=None):
return local('test/run_tests.py', capture=False).return_code
env.test_runner = tornado_test_runner
<commit_msg>Add reset method for erasing graph database.<commit_after>#!/usr/bin/env python
import os
from fabric.api import *
from fab_shared import (test, webpy_deploy as deploy,
setup, development, production, localhost, staging, restart_webserver,
rollback, lint, enable, disable, maintenancemode, rechef)
env.unit = "trinity"
env.path = "/var/tornado/%(unit)s" % env
env.scm = "git@github.com:bueda/%(unit)s.git" % env
env.scm_http_url = "http://github.com/bueda/%(unit)s" % env
env.root_dir = os.path.abspath(os.path.dirname(__file__))
env.pip_requirements = ["requirements/common.txt",]
env.pip_requirements_dev = ["requirements/dev.txt",]
env.pip_requirements_production = ["requirements/production.txt",]
env.campfire_subdomain = 'bueda'
env.campfire_room = 'Development'
env.campfire_token = '63768eee94d96b7b18e2091f3919b2a2a3dcd12a'
@runs_once
def tornado_test_runner(deployment_type=None):
return local('test/run_tests.py', capture=False).return_code
env.test_runner = tornado_test_runner
def reset():
import trinity
app = trinity.Trinity()
app.db.reset()
|
1b36dd94759c41c4af433ce53e131e318d09c14a
|
tests/storage/dav/test_carddav.py
|
tests/storage/dav/test_carddav.py
|
# -*- coding: utf-8 -*-
'''
vdirsyncer.tests.storage.test_carddav
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2014 Markus Unterwaditzer
:license: MIT, see LICENSE for more details.
'''
from vdirsyncer.storage.dav.carddav import CarddavStorage
from . import DavStorageTests
class TestCarddavStorage(DavStorageTests):
storage_class = CarddavStorage
item_template = (u'BEGIN:VCARD\n'
u'VERSION:3.0\n'
u'FN:Cyrus Daboo\n'
u'N:Daboo;Cyrus\n'
u'ADR;TYPE=POSTAL:;2822 Email HQ;' # address continuing
u'Suite 2821;RFCVille;PA;15213;USA\n' # on next line
u'EMAIL;TYPE=INTERNET;TYPE=PREF:cyrus@example.com\n'
u'NICKNAME:me\n'
u'NOTE:Example VCard.\n'
u'ORG:Self Employed\n'
u'TEL;TYPE=WORK;TYPE=VOICE:412 605 0499\n'
u'TEL;TYPE=FAX:412 605 0705\n'
u'URL:http://www.example.com\n'
u'UID:{uid}\n'
u'X-SOMETHING:{r}\n'
u'END:VCARD')
|
# -*- coding: utf-8 -*-
'''
vdirsyncer.tests.storage.test_carddav
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2014 Markus Unterwaditzer
:license: MIT, see LICENSE for more details.
'''
from vdirsyncer.storage.dav.carddav import CarddavStorage
from . import DavStorageTests
VCARD_TEMPLATE = u'''BEGIN:VCARD
VERSION:3.0
FN:Cyrus Daboo
N:Daboo;Cyrus
ADR;TYPE=POSTAL:;2822 Email HQ;Suite 2821;RFCVille;PA;15213;USA
EMAIL;TYPE=INTERNET;TYPE=PREF:cyrus@example.com
NICKNAME:me
NOTE:Example VCard.
ORG:Self Employed
TEL;TYPE=WORK;TYPE=VOICE:412 605 0499
TEL;TYPE=FAX:412 605 0705
URL:http://www.example.com
UID:{uid}
X-SOMETHING:{r}
END:VCARD'''
class TestCarddavStorage(DavStorageTests):
storage_class = CarddavStorage
item_template = VCARD_TEMPLATE
|
Move vcard template to real multi-line string
|
Move vcard template to real multi-line string
|
Python
|
mit
|
tribut/vdirsyncer,untitaker/vdirsyncer,credativUK/vdirsyncer,mathstuf/vdirsyncer,hobarrera/vdirsyncer,hobarrera/vdirsyncer,untitaker/vdirsyncer,untitaker/vdirsyncer,credativUK/vdirsyncer,tribut/vdirsyncer,mathstuf/vdirsyncer
|
# -*- coding: utf-8 -*-
'''
vdirsyncer.tests.storage.test_carddav
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2014 Markus Unterwaditzer
:license: MIT, see LICENSE for more details.
'''
from vdirsyncer.storage.dav.carddav import CarddavStorage
from . import DavStorageTests
class TestCarddavStorage(DavStorageTests):
storage_class = CarddavStorage
item_template = (u'BEGIN:VCARD\n'
u'VERSION:3.0\n'
u'FN:Cyrus Daboo\n'
u'N:Daboo;Cyrus\n'
u'ADR;TYPE=POSTAL:;2822 Email HQ;' # address continuing
u'Suite 2821;RFCVille;PA;15213;USA\n' # on next line
u'EMAIL;TYPE=INTERNET;TYPE=PREF:cyrus@example.com\n'
u'NICKNAME:me\n'
u'NOTE:Example VCard.\n'
u'ORG:Self Employed\n'
u'TEL;TYPE=WORK;TYPE=VOICE:412 605 0499\n'
u'TEL;TYPE=FAX:412 605 0705\n'
u'URL:http://www.example.com\n'
u'UID:{uid}\n'
u'X-SOMETHING:{r}\n'
u'END:VCARD')
Move vcard template to real multi-line string
|
# -*- coding: utf-8 -*-
'''
vdirsyncer.tests.storage.test_carddav
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2014 Markus Unterwaditzer
:license: MIT, see LICENSE for more details.
'''
from vdirsyncer.storage.dav.carddav import CarddavStorage
from . import DavStorageTests
VCARD_TEMPLATE = u'''BEGIN:VCARD
VERSION:3.0
FN:Cyrus Daboo
N:Daboo;Cyrus
ADR;TYPE=POSTAL:;2822 Email HQ;Suite 2821;RFCVille;PA;15213;USA
EMAIL;TYPE=INTERNET;TYPE=PREF:cyrus@example.com
NICKNAME:me
NOTE:Example VCard.
ORG:Self Employed
TEL;TYPE=WORK;TYPE=VOICE:412 605 0499
TEL;TYPE=FAX:412 605 0705
URL:http://www.example.com
UID:{uid}
X-SOMETHING:{r}
END:VCARD'''
class TestCarddavStorage(DavStorageTests):
storage_class = CarddavStorage
item_template = VCARD_TEMPLATE
|
<commit_before>
# -*- coding: utf-8 -*-
'''
vdirsyncer.tests.storage.test_carddav
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2014 Markus Unterwaditzer
:license: MIT, see LICENSE for more details.
'''
from vdirsyncer.storage.dav.carddav import CarddavStorage
from . import DavStorageTests
class TestCarddavStorage(DavStorageTests):
storage_class = CarddavStorage
item_template = (u'BEGIN:VCARD\n'
u'VERSION:3.0\n'
u'FN:Cyrus Daboo\n'
u'N:Daboo;Cyrus\n'
u'ADR;TYPE=POSTAL:;2822 Email HQ;' # address continuing
u'Suite 2821;RFCVille;PA;15213;USA\n' # on next line
u'EMAIL;TYPE=INTERNET;TYPE=PREF:cyrus@example.com\n'
u'NICKNAME:me\n'
u'NOTE:Example VCard.\n'
u'ORG:Self Employed\n'
u'TEL;TYPE=WORK;TYPE=VOICE:412 605 0499\n'
u'TEL;TYPE=FAX:412 605 0705\n'
u'URL:http://www.example.com\n'
u'UID:{uid}\n'
u'X-SOMETHING:{r}\n'
u'END:VCARD')
<commit_msg>Move vcard template to real multi-line string<commit_after>
|
# -*- coding: utf-8 -*-
'''
vdirsyncer.tests.storage.test_carddav
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2014 Markus Unterwaditzer
:license: MIT, see LICENSE for more details.
'''
from vdirsyncer.storage.dav.carddav import CarddavStorage
from . import DavStorageTests
VCARD_TEMPLATE = u'''BEGIN:VCARD
VERSION:3.0
FN:Cyrus Daboo
N:Daboo;Cyrus
ADR;TYPE=POSTAL:;2822 Email HQ;Suite 2821;RFCVille;PA;15213;USA
EMAIL;TYPE=INTERNET;TYPE=PREF:cyrus@example.com
NICKNAME:me
NOTE:Example VCard.
ORG:Self Employed
TEL;TYPE=WORK;TYPE=VOICE:412 605 0499
TEL;TYPE=FAX:412 605 0705
URL:http://www.example.com
UID:{uid}
X-SOMETHING:{r}
END:VCARD'''
class TestCarddavStorage(DavStorageTests):
storage_class = CarddavStorage
item_template = VCARD_TEMPLATE
|
# -*- coding: utf-8 -*-
'''
vdirsyncer.tests.storage.test_carddav
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2014 Markus Unterwaditzer
:license: MIT, see LICENSE for more details.
'''
from vdirsyncer.storage.dav.carddav import CarddavStorage
from . import DavStorageTests
class TestCarddavStorage(DavStorageTests):
storage_class = CarddavStorage
item_template = (u'BEGIN:VCARD\n'
u'VERSION:3.0\n'
u'FN:Cyrus Daboo\n'
u'N:Daboo;Cyrus\n'
u'ADR;TYPE=POSTAL:;2822 Email HQ;' # address continuing
u'Suite 2821;RFCVille;PA;15213;USA\n' # on next line
u'EMAIL;TYPE=INTERNET;TYPE=PREF:cyrus@example.com\n'
u'NICKNAME:me\n'
u'NOTE:Example VCard.\n'
u'ORG:Self Employed\n'
u'TEL;TYPE=WORK;TYPE=VOICE:412 605 0499\n'
u'TEL;TYPE=FAX:412 605 0705\n'
u'URL:http://www.example.com\n'
u'UID:{uid}\n'
u'X-SOMETHING:{r}\n'
u'END:VCARD')
Move vcard template to real multi-line string
# -*- coding: utf-8 -*-
'''
vdirsyncer.tests.storage.test_carddav
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2014 Markus Unterwaditzer
:license: MIT, see LICENSE for more details.
'''
from vdirsyncer.storage.dav.carddav import CarddavStorage
from . import DavStorageTests
VCARD_TEMPLATE = u'''BEGIN:VCARD
VERSION:3.0
FN:Cyrus Daboo
N:Daboo;Cyrus
ADR;TYPE=POSTAL:;2822 Email HQ;Suite 2821;RFCVille;PA;15213;USA
EMAIL;TYPE=INTERNET;TYPE=PREF:cyrus@example.com
NICKNAME:me
NOTE:Example VCard.
ORG:Self Employed
TEL;TYPE=WORK;TYPE=VOICE:412 605 0499
TEL;TYPE=FAX:412 605 0705
URL:http://www.example.com
UID:{uid}
X-SOMETHING:{r}
END:VCARD'''
class TestCarddavStorage(DavStorageTests):
storage_class = CarddavStorage
item_template = VCARD_TEMPLATE
|
<commit_before>
# -*- coding: utf-8 -*-
'''
vdirsyncer.tests.storage.test_carddav
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2014 Markus Unterwaditzer
:license: MIT, see LICENSE for more details.
'''
from vdirsyncer.storage.dav.carddav import CarddavStorage
from . import DavStorageTests
class TestCarddavStorage(DavStorageTests):
storage_class = CarddavStorage
item_template = (u'BEGIN:VCARD\n'
u'VERSION:3.0\n'
u'FN:Cyrus Daboo\n'
u'N:Daboo;Cyrus\n'
u'ADR;TYPE=POSTAL:;2822 Email HQ;' # address continuing
u'Suite 2821;RFCVille;PA;15213;USA\n' # on next line
u'EMAIL;TYPE=INTERNET;TYPE=PREF:cyrus@example.com\n'
u'NICKNAME:me\n'
u'NOTE:Example VCard.\n'
u'ORG:Self Employed\n'
u'TEL;TYPE=WORK;TYPE=VOICE:412 605 0499\n'
u'TEL;TYPE=FAX:412 605 0705\n'
u'URL:http://www.example.com\n'
u'UID:{uid}\n'
u'X-SOMETHING:{r}\n'
u'END:VCARD')
<commit_msg>Move vcard template to real multi-line string<commit_after>
# -*- coding: utf-8 -*-
'''
vdirsyncer.tests.storage.test_carddav
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2014 Markus Unterwaditzer
:license: MIT, see LICENSE for more details.
'''
from vdirsyncer.storage.dav.carddav import CarddavStorage
from . import DavStorageTests
VCARD_TEMPLATE = u'''BEGIN:VCARD
VERSION:3.0
FN:Cyrus Daboo
N:Daboo;Cyrus
ADR;TYPE=POSTAL:;2822 Email HQ;Suite 2821;RFCVille;PA;15213;USA
EMAIL;TYPE=INTERNET;TYPE=PREF:cyrus@example.com
NICKNAME:me
NOTE:Example VCard.
ORG:Self Employed
TEL;TYPE=WORK;TYPE=VOICE:412 605 0499
TEL;TYPE=FAX:412 605 0705
URL:http://www.example.com
UID:{uid}
X-SOMETHING:{r}
END:VCARD'''
class TestCarddavStorage(DavStorageTests):
storage_class = CarddavStorage
item_template = VCARD_TEMPLATE
|
8ca4babf48425efafb3c6229f5db0cec9715ab97
|
example/tests/test_views.py
|
example/tests/test_views.py
|
from django.core.urlresolvers import reverse
from django.test import TestCase
import json
from myshop.models.polymorphic.product import Product
from myshop.models.manufacturer import Manufacturer
class ProductSelectViewTest(TestCase):
def setUp(self):
manufacturer = Manufacturer.objects.create(name="testmanufacturer")
Product.objects.create(product_name="testproduct1", order=1, manufacturer=manufacturer)
def test_finds_product_case_insensitive(self):
response = self.client.get(reverse('shop:select-product') + "?term=Prod")
data = json.loads(response.content)
self.assertEqual(data['count'], 1)
self.assertEqual(data['results'][0]['text'], "testproduct1")
def test_bogus_query_finds_nothing(self):
response = self.client.get(reverse('shop:select-product') + "?term=whatever")
data = json.loads(response.content)
self.assertEqual(data['count'], 0)
|
from django.core.urlresolvers import reverse
from django.test import TestCase
import json
from myshop.models.polymorphic.product import Product
from myshop.models.manufacturer import Manufacturer
class ProductSelectViewTest(TestCase):
def setUp(self):
manufacturer = Manufacturer.objects.create(name="testmanufacturer")
Product.objects.create(product_name="testproduct1", order=1, manufacturer=manufacturer)
def test_finds_product_case_insensitive(self):
response = self.client.get(reverse('shop:select-product') + "?term=Prod")
data = json.loads(response.content.decode("utf-8"))
self.assertEqual(data['count'], 1)
self.assertEqual(data['results'][0]['text'], "testproduct1")
def test_bogus_query_finds_nothing(self):
response = self.client.get(reverse('shop:select-product') + "?term=whatever")
data = json.loads(response.content.decode("utf-8"))
self.assertEqual(data['count'], 0)
|
Fix test for Python 3
|
Fix test for Python 3
|
Python
|
bsd-3-clause
|
khchine5/django-shop,nimbis/django-shop,nimbis/django-shop,jrief/django-shop,divio/django-shop,khchine5/django-shop,khchine5/django-shop,jrief/django-shop,divio/django-shop,divio/django-shop,khchine5/django-shop,awesto/django-shop,nimbis/django-shop,jrief/django-shop,nimbis/django-shop,awesto/django-shop,awesto/django-shop,jrief/django-shop
|
from django.core.urlresolvers import reverse
from django.test import TestCase
import json
from myshop.models.polymorphic.product import Product
from myshop.models.manufacturer import Manufacturer
class ProductSelectViewTest(TestCase):
def setUp(self):
manufacturer = Manufacturer.objects.create(name="testmanufacturer")
Product.objects.create(product_name="testproduct1", order=1, manufacturer=manufacturer)
def test_finds_product_case_insensitive(self):
response = self.client.get(reverse('shop:select-product') + "?term=Prod")
data = json.loads(response.content)
self.assertEqual(data['count'], 1)
self.assertEqual(data['results'][0]['text'], "testproduct1")
def test_bogus_query_finds_nothing(self):
response = self.client.get(reverse('shop:select-product') + "?term=whatever")
data = json.loads(response.content)
self.assertEqual(data['count'], 0)
Fix test for Python 3
|
from django.core.urlresolvers import reverse
from django.test import TestCase
import json
from myshop.models.polymorphic.product import Product
from myshop.models.manufacturer import Manufacturer
class ProductSelectViewTest(TestCase):
def setUp(self):
manufacturer = Manufacturer.objects.create(name="testmanufacturer")
Product.objects.create(product_name="testproduct1", order=1, manufacturer=manufacturer)
def test_finds_product_case_insensitive(self):
response = self.client.get(reverse('shop:select-product') + "?term=Prod")
data = json.loads(response.content.decode("utf-8"))
self.assertEqual(data['count'], 1)
self.assertEqual(data['results'][0]['text'], "testproduct1")
def test_bogus_query_finds_nothing(self):
response = self.client.get(reverse('shop:select-product') + "?term=whatever")
data = json.loads(response.content.decode("utf-8"))
self.assertEqual(data['count'], 0)
|
<commit_before>from django.core.urlresolvers import reverse
from django.test import TestCase
import json
from myshop.models.polymorphic.product import Product
from myshop.models.manufacturer import Manufacturer
class ProductSelectViewTest(TestCase):
def setUp(self):
manufacturer = Manufacturer.objects.create(name="testmanufacturer")
Product.objects.create(product_name="testproduct1", order=1, manufacturer=manufacturer)
def test_finds_product_case_insensitive(self):
response = self.client.get(reverse('shop:select-product') + "?term=Prod")
data = json.loads(response.content)
self.assertEqual(data['count'], 1)
self.assertEqual(data['results'][0]['text'], "testproduct1")
def test_bogus_query_finds_nothing(self):
response = self.client.get(reverse('shop:select-product') + "?term=whatever")
data = json.loads(response.content)
self.assertEqual(data['count'], 0)
<commit_msg>Fix test for Python 3<commit_after>
|
from django.core.urlresolvers import reverse
from django.test import TestCase
import json
from myshop.models.polymorphic.product import Product
from myshop.models.manufacturer import Manufacturer
class ProductSelectViewTest(TestCase):
def setUp(self):
manufacturer = Manufacturer.objects.create(name="testmanufacturer")
Product.objects.create(product_name="testproduct1", order=1, manufacturer=manufacturer)
def test_finds_product_case_insensitive(self):
response = self.client.get(reverse('shop:select-product') + "?term=Prod")
data = json.loads(response.content.decode("utf-8"))
self.assertEqual(data['count'], 1)
self.assertEqual(data['results'][0]['text'], "testproduct1")
def test_bogus_query_finds_nothing(self):
response = self.client.get(reverse('shop:select-product') + "?term=whatever")
data = json.loads(response.content.decode("utf-8"))
self.assertEqual(data['count'], 0)
|
from django.core.urlresolvers import reverse
from django.test import TestCase
import json
from myshop.models.polymorphic.product import Product
from myshop.models.manufacturer import Manufacturer
class ProductSelectViewTest(TestCase):
def setUp(self):
manufacturer = Manufacturer.objects.create(name="testmanufacturer")
Product.objects.create(product_name="testproduct1", order=1, manufacturer=manufacturer)
def test_finds_product_case_insensitive(self):
response = self.client.get(reverse('shop:select-product') + "?term=Prod")
data = json.loads(response.content)
self.assertEqual(data['count'], 1)
self.assertEqual(data['results'][0]['text'], "testproduct1")
def test_bogus_query_finds_nothing(self):
response = self.client.get(reverse('shop:select-product') + "?term=whatever")
data = json.loads(response.content)
self.assertEqual(data['count'], 0)
Fix test for Python 3from django.core.urlresolvers import reverse
from django.test import TestCase
import json
from myshop.models.polymorphic.product import Product
from myshop.models.manufacturer import Manufacturer
class ProductSelectViewTest(TestCase):
def setUp(self):
manufacturer = Manufacturer.objects.create(name="testmanufacturer")
Product.objects.create(product_name="testproduct1", order=1, manufacturer=manufacturer)
def test_finds_product_case_insensitive(self):
response = self.client.get(reverse('shop:select-product') + "?term=Prod")
data = json.loads(response.content.decode("utf-8"))
self.assertEqual(data['count'], 1)
self.assertEqual(data['results'][0]['text'], "testproduct1")
def test_bogus_query_finds_nothing(self):
response = self.client.get(reverse('shop:select-product') + "?term=whatever")
data = json.loads(response.content.decode("utf-8"))
self.assertEqual(data['count'], 0)
|
<commit_before>from django.core.urlresolvers import reverse
from django.test import TestCase
import json
from myshop.models.polymorphic.product import Product
from myshop.models.manufacturer import Manufacturer
class ProductSelectViewTest(TestCase):
def setUp(self):
manufacturer = Manufacturer.objects.create(name="testmanufacturer")
Product.objects.create(product_name="testproduct1", order=1, manufacturer=manufacturer)
def test_finds_product_case_insensitive(self):
response = self.client.get(reverse('shop:select-product') + "?term=Prod")
data = json.loads(response.content)
self.assertEqual(data['count'], 1)
self.assertEqual(data['results'][0]['text'], "testproduct1")
def test_bogus_query_finds_nothing(self):
response = self.client.get(reverse('shop:select-product') + "?term=whatever")
data = json.loads(response.content)
self.assertEqual(data['count'], 0)
<commit_msg>Fix test for Python 3<commit_after>from django.core.urlresolvers import reverse
from django.test import TestCase
import json
from myshop.models.polymorphic.product import Product
from myshop.models.manufacturer import Manufacturer
class ProductSelectViewTest(TestCase):
def setUp(self):
manufacturer = Manufacturer.objects.create(name="testmanufacturer")
Product.objects.create(product_name="testproduct1", order=1, manufacturer=manufacturer)
def test_finds_product_case_insensitive(self):
response = self.client.get(reverse('shop:select-product') + "?term=Prod")
data = json.loads(response.content.decode("utf-8"))
self.assertEqual(data['count'], 1)
self.assertEqual(data['results'][0]['text'], "testproduct1")
def test_bogus_query_finds_nothing(self):
response = self.client.get(reverse('shop:select-product') + "?term=whatever")
data = json.loads(response.content.decode("utf-8"))
self.assertEqual(data['count'], 0)
|
e72ab305e2a90433c07300f37f7ae6fa2901b9cc
|
app/auth/views.py
|
app/auth/views.py
|
# -*- coding: utf-8 -*-
from flask import render_template, redirect, request, url_for, flash
from flask.ext.login import login_user, logout_user, login_required, \
current_user
from . import auth
from .forms import LoginForm, RegistrationForm
from .. import db
from ..models import User
@auth.route('/login', methods=['GET', 'POST'])
def login():
form = LoginForm()
if form.validate_on_submit():
user = User.query.filter_by(email=form.email.data).first()
if user is not None:
login_user(user)
return redirect(request.args.get('next') or url_for('main.index'))
flash('Invalid username or password.')
return render_template('login.html', form=form)
@auth.route('/logout')
# @login_required
def logout():
logout_user()
flash('You have been logged out.')
return redirect(url_for('main.index'))
@auth.route('/register', methods=['GET', 'POST'])
def register():
form = RegistrationForm()
if form.validate_on_submit():
user = User(email=form.email.data,
username=form.username.data,
password=form.password.data)
db.session.add(user)
db.session.commit()
flash('You successfully registered. Welcome!')
return redirect(url_for('auth.login'))
return render_template('register.html', form=form)
|
# -*- coding: utf-8 -*-
from flask import render_template, redirect, request, url_for, flash
from flask.ext.login import login_user, logout_user, login_required, \
current_user
from . import auth
from .forms import LoginForm, RegistrationForm
from ..models import User
@auth.route('/login', methods=['GET', 'POST'])
def login():
form = LoginForm()
if form.validate_on_submit():
user = User.query.filter_by(email=form.email.data).first()
if user is not None:
login_user(user)
return redirect(request.args.get('next') or url_for('main.index'))
flash('Invalid username or password.')
return render_template('login.html', form=form)
@auth.route('/logout')
# @login_required
def logout():
logout_user()
flash('You have been logged out.')
return redirect(url_for('main.index'))
@auth.route('/register', methods=['GET', 'POST'])
def register():
form = RegistrationForm()
if form.validate_on_submit():
user = User(email=form.email.data,
username=form.username.data,
password=form.password.data)
user.save()
flash('You successfully registered. Welcome!')
return redirect(url_for('auth.login'))
return render_template('register.html', form=form)
|
Use newly added save on new users.
|
Use newly added save on new users.
|
Python
|
mit
|
guillaumededrie/flask-todolist,poulp/flask-todolist,guillaumededrie/flask-todolist,rtzll/flask-todolist,0xfoo/flask-todolist,polyfunc/flask-todolist,poulp/flask-todolist,rtzll/flask-todolist,polyfunc/flask-todolist,guillaumededrie/flask-todolist,0xfoo/flask-todolist,poulp/flask-todolist,0xfoo/flask-todolist,polyfunc/flask-todolist,rtzll/flask-todolist,rtzll/flask-todolist
|
# -*- coding: utf-8 -*-
from flask import render_template, redirect, request, url_for, flash
from flask.ext.login import login_user, logout_user, login_required, \
current_user
from . import auth
from .forms import LoginForm, RegistrationForm
from .. import db
from ..models import User
@auth.route('/login', methods=['GET', 'POST'])
def login():
form = LoginForm()
if form.validate_on_submit():
user = User.query.filter_by(email=form.email.data).first()
if user is not None:
login_user(user)
return redirect(request.args.get('next') or url_for('main.index'))
flash('Invalid username or password.')
return render_template('login.html', form=form)
@auth.route('/logout')
# @login_required
def logout():
logout_user()
flash('You have been logged out.')
return redirect(url_for('main.index'))
@auth.route('/register', methods=['GET', 'POST'])
def register():
form = RegistrationForm()
if form.validate_on_submit():
user = User(email=form.email.data,
username=form.username.data,
password=form.password.data)
db.session.add(user)
db.session.commit()
flash('You successfully registered. Welcome!')
return redirect(url_for('auth.login'))
return render_template('register.html', form=form)
Use newly added save on new users.
|
# -*- coding: utf-8 -*-
from flask import render_template, redirect, request, url_for, flash
from flask.ext.login import login_user, logout_user, login_required, \
current_user
from . import auth
from .forms import LoginForm, RegistrationForm
from ..models import User
@auth.route('/login', methods=['GET', 'POST'])
def login():
form = LoginForm()
if form.validate_on_submit():
user = User.query.filter_by(email=form.email.data).first()
if user is not None:
login_user(user)
return redirect(request.args.get('next') or url_for('main.index'))
flash('Invalid username or password.')
return render_template('login.html', form=form)
@auth.route('/logout')
# @login_required
def logout():
logout_user()
flash('You have been logged out.')
return redirect(url_for('main.index'))
@auth.route('/register', methods=['GET', 'POST'])
def register():
form = RegistrationForm()
if form.validate_on_submit():
user = User(email=form.email.data,
username=form.username.data,
password=form.password.data)
user.save()
flash('You successfully registered. Welcome!')
return redirect(url_for('auth.login'))
return render_template('register.html', form=form)
|
<commit_before># -*- coding: utf-8 -*-
from flask import render_template, redirect, request, url_for, flash
from flask.ext.login import login_user, logout_user, login_required, \
current_user
from . import auth
from .forms import LoginForm, RegistrationForm
from .. import db
from ..models import User
@auth.route('/login', methods=['GET', 'POST'])
def login():
form = LoginForm()
if form.validate_on_submit():
user = User.query.filter_by(email=form.email.data).first()
if user is not None:
login_user(user)
return redirect(request.args.get('next') or url_for('main.index'))
flash('Invalid username or password.')
return render_template('login.html', form=form)
@auth.route('/logout')
# @login_required
def logout():
logout_user()
flash('You have been logged out.')
return redirect(url_for('main.index'))
@auth.route('/register', methods=['GET', 'POST'])
def register():
form = RegistrationForm()
if form.validate_on_submit():
user = User(email=form.email.data,
username=form.username.data,
password=form.password.data)
db.session.add(user)
db.session.commit()
flash('You successfully registered. Welcome!')
return redirect(url_for('auth.login'))
return render_template('register.html', form=form)
<commit_msg>Use newly added save on new users.<commit_after>
|
# -*- coding: utf-8 -*-
from flask import render_template, redirect, request, url_for, flash
from flask.ext.login import login_user, logout_user, login_required, \
current_user
from . import auth
from .forms import LoginForm, RegistrationForm
from ..models import User
@auth.route('/login', methods=['GET', 'POST'])
def login():
form = LoginForm()
if form.validate_on_submit():
user = User.query.filter_by(email=form.email.data).first()
if user is not None:
login_user(user)
return redirect(request.args.get('next') or url_for('main.index'))
flash('Invalid username or password.')
return render_template('login.html', form=form)
@auth.route('/logout')
# @login_required
def logout():
logout_user()
flash('You have been logged out.')
return redirect(url_for('main.index'))
@auth.route('/register', methods=['GET', 'POST'])
def register():
form = RegistrationForm()
if form.validate_on_submit():
user = User(email=form.email.data,
username=form.username.data,
password=form.password.data)
user.save()
flash('You successfully registered. Welcome!')
return redirect(url_for('auth.login'))
return render_template('register.html', form=form)
|
# -*- coding: utf-8 -*-
from flask import render_template, redirect, request, url_for, flash
from flask.ext.login import login_user, logout_user, login_required, \
current_user
from . import auth
from .forms import LoginForm, RegistrationForm
from .. import db
from ..models import User
@auth.route('/login', methods=['GET', 'POST'])
def login():
form = LoginForm()
if form.validate_on_submit():
user = User.query.filter_by(email=form.email.data).first()
if user is not None:
login_user(user)
return redirect(request.args.get('next') or url_for('main.index'))
flash('Invalid username or password.')
return render_template('login.html', form=form)
@auth.route('/logout')
# @login_required
def logout():
logout_user()
flash('You have been logged out.')
return redirect(url_for('main.index'))
@auth.route('/register', methods=['GET', 'POST'])
def register():
form = RegistrationForm()
if form.validate_on_submit():
user = User(email=form.email.data,
username=form.username.data,
password=form.password.data)
db.session.add(user)
db.session.commit()
flash('You successfully registered. Welcome!')
return redirect(url_for('auth.login'))
return render_template('register.html', form=form)
Use newly added save on new users.# -*- coding: utf-8 -*-
from flask import render_template, redirect, request, url_for, flash
from flask.ext.login import login_user, logout_user, login_required, \
current_user
from . import auth
from .forms import LoginForm, RegistrationForm
from ..models import User
@auth.route('/login', methods=['GET', 'POST'])
def login():
form = LoginForm()
if form.validate_on_submit():
user = User.query.filter_by(email=form.email.data).first()
if user is not None:
login_user(user)
return redirect(request.args.get('next') or url_for('main.index'))
flash('Invalid username or password.')
return render_template('login.html', form=form)
@auth.route('/logout')
# @login_required
def logout():
logout_user()
flash('You have been logged out.')
return redirect(url_for('main.index'))
@auth.route('/register', methods=['GET', 'POST'])
def register():
form = RegistrationForm()
if form.validate_on_submit():
user = User(email=form.email.data,
username=form.username.data,
password=form.password.data)
user.save()
flash('You successfully registered. Welcome!')
return redirect(url_for('auth.login'))
return render_template('register.html', form=form)
|
<commit_before># -*- coding: utf-8 -*-
from flask import render_template, redirect, request, url_for, flash
from flask.ext.login import login_user, logout_user, login_required, \
current_user
from . import auth
from .forms import LoginForm, RegistrationForm
from .. import db
from ..models import User
@auth.route('/login', methods=['GET', 'POST'])
def login():
form = LoginForm()
if form.validate_on_submit():
user = User.query.filter_by(email=form.email.data).first()
if user is not None:
login_user(user)
return redirect(request.args.get('next') or url_for('main.index'))
flash('Invalid username or password.')
return render_template('login.html', form=form)
@auth.route('/logout')
# @login_required
def logout():
logout_user()
flash('You have been logged out.')
return redirect(url_for('main.index'))
@auth.route('/register', methods=['GET', 'POST'])
def register():
form = RegistrationForm()
if form.validate_on_submit():
user = User(email=form.email.data,
username=form.username.data,
password=form.password.data)
db.session.add(user)
db.session.commit()
flash('You successfully registered. Welcome!')
return redirect(url_for('auth.login'))
return render_template('register.html', form=form)
<commit_msg>Use newly added save on new users.<commit_after># -*- coding: utf-8 -*-
from flask import render_template, redirect, request, url_for, flash
from flask.ext.login import login_user, logout_user, login_required, \
current_user
from . import auth
from .forms import LoginForm, RegistrationForm
from ..models import User
@auth.route('/login', methods=['GET', 'POST'])
def login():
form = LoginForm()
if form.validate_on_submit():
user = User.query.filter_by(email=form.email.data).first()
if user is not None:
login_user(user)
return redirect(request.args.get('next') or url_for('main.index'))
flash('Invalid username or password.')
return render_template('login.html', form=form)
@auth.route('/logout')
# @login_required
def logout():
logout_user()
flash('You have been logged out.')
return redirect(url_for('main.index'))
@auth.route('/register', methods=['GET', 'POST'])
def register():
form = RegistrationForm()
if form.validate_on_submit():
user = User(email=form.email.data,
username=form.username.data,
password=form.password.data)
user.save()
flash('You successfully registered. Welcome!')
return redirect(url_for('auth.login'))
return render_template('register.html', form=form)
|
9bf1f19eefc48dbced4b6ea1cc5258518d14bceb
|
app/utils/http.py
|
app/utils/http.py
|
import aiofiles
import aiohttp
import aiohttp.client_exceptions
from aiopath import AsyncPath
from sanic.log import logger
EXCEPTIONS = (
aiohttp.client_exceptions.ClientConnectionError,
aiohttp.client_exceptions.InvalidURL,
aiohttp.client_exceptions.TooManyRedirects,
AssertionError,
UnicodeError,
)
async def download(url: str, path: AsyncPath) -> bool:
async with aiohttp.ClientSession() as session:
try:
async with session.get(url) as response:
if response.status == 200:
f = await aiofiles.open(path, mode="wb") # type: ignore
await f.write(await response.read())
await f.close()
return True
logger.error(f"{response.status} response from {url}")
except EXCEPTIONS as e:
message = str(e).strip("() ")
logger.error(f"Invalid response from {url}: {message}")
return False
|
import asyncio
import aiofiles
import aiohttp
import aiohttp.client_exceptions
from aiopath import AsyncPath
from sanic.log import logger
EXCEPTIONS = (
aiohttp.client_exceptions.ClientConnectionError,
aiohttp.client_exceptions.InvalidURL,
aiohttp.client_exceptions.TooManyRedirects,
AssertionError,
asyncio.TimeoutError,
UnicodeError,
)
async def download(url: str, path: AsyncPath) -> bool:
async with aiohttp.ClientSession() as session:
try:
async with session.get(url, timeout=10) as response:
if response.status == 200:
f = await aiofiles.open(path, mode="wb") # type: ignore
await f.write(await response.read())
await f.close()
return True
logger.error(f"{response.status} response from {url}")
except EXCEPTIONS as e:
message = str(e).strip("() ") or e.__class__.__name__
logger.error(f"Invalid response from {url}: {message}")
return False
|
Add timeout to downloading custom background images
|
Add timeout to downloading custom background images
|
Python
|
mit
|
jacebrowning/memegen,jacebrowning/memegen
|
import aiofiles
import aiohttp
import aiohttp.client_exceptions
from aiopath import AsyncPath
from sanic.log import logger
EXCEPTIONS = (
aiohttp.client_exceptions.ClientConnectionError,
aiohttp.client_exceptions.InvalidURL,
aiohttp.client_exceptions.TooManyRedirects,
AssertionError,
UnicodeError,
)
async def download(url: str, path: AsyncPath) -> bool:
async with aiohttp.ClientSession() as session:
try:
async with session.get(url) as response:
if response.status == 200:
f = await aiofiles.open(path, mode="wb") # type: ignore
await f.write(await response.read())
await f.close()
return True
logger.error(f"{response.status} response from {url}")
except EXCEPTIONS as e:
message = str(e).strip("() ")
logger.error(f"Invalid response from {url}: {message}")
return False
Add timeout to downloading custom background images
|
import asyncio
import aiofiles
import aiohttp
import aiohttp.client_exceptions
from aiopath import AsyncPath
from sanic.log import logger
EXCEPTIONS = (
aiohttp.client_exceptions.ClientConnectionError,
aiohttp.client_exceptions.InvalidURL,
aiohttp.client_exceptions.TooManyRedirects,
AssertionError,
asyncio.TimeoutError,
UnicodeError,
)
async def download(url: str, path: AsyncPath) -> bool:
async with aiohttp.ClientSession() as session:
try:
async with session.get(url, timeout=10) as response:
if response.status == 200:
f = await aiofiles.open(path, mode="wb") # type: ignore
await f.write(await response.read())
await f.close()
return True
logger.error(f"{response.status} response from {url}")
except EXCEPTIONS as e:
message = str(e).strip("() ") or e.__class__.__name__
logger.error(f"Invalid response from {url}: {message}")
return False
|
<commit_before>import aiofiles
import aiohttp
import aiohttp.client_exceptions
from aiopath import AsyncPath
from sanic.log import logger
EXCEPTIONS = (
aiohttp.client_exceptions.ClientConnectionError,
aiohttp.client_exceptions.InvalidURL,
aiohttp.client_exceptions.TooManyRedirects,
AssertionError,
UnicodeError,
)
async def download(url: str, path: AsyncPath) -> bool:
async with aiohttp.ClientSession() as session:
try:
async with session.get(url) as response:
if response.status == 200:
f = await aiofiles.open(path, mode="wb") # type: ignore
await f.write(await response.read())
await f.close()
return True
logger.error(f"{response.status} response from {url}")
except EXCEPTIONS as e:
message = str(e).strip("() ")
logger.error(f"Invalid response from {url}: {message}")
return False
<commit_msg>Add timeout to downloading custom background images<commit_after>
|
import asyncio
import aiofiles
import aiohttp
import aiohttp.client_exceptions
from aiopath import AsyncPath
from sanic.log import logger
EXCEPTIONS = (
aiohttp.client_exceptions.ClientConnectionError,
aiohttp.client_exceptions.InvalidURL,
aiohttp.client_exceptions.TooManyRedirects,
AssertionError,
asyncio.TimeoutError,
UnicodeError,
)
async def download(url: str, path: AsyncPath) -> bool:
async with aiohttp.ClientSession() as session:
try:
async with session.get(url, timeout=10) as response:
if response.status == 200:
f = await aiofiles.open(path, mode="wb") # type: ignore
await f.write(await response.read())
await f.close()
return True
logger.error(f"{response.status} response from {url}")
except EXCEPTIONS as e:
message = str(e).strip("() ") or e.__class__.__name__
logger.error(f"Invalid response from {url}: {message}")
return False
|
import aiofiles
import aiohttp
import aiohttp.client_exceptions
from aiopath import AsyncPath
from sanic.log import logger
EXCEPTIONS = (
aiohttp.client_exceptions.ClientConnectionError,
aiohttp.client_exceptions.InvalidURL,
aiohttp.client_exceptions.TooManyRedirects,
AssertionError,
UnicodeError,
)
async def download(url: str, path: AsyncPath) -> bool:
async with aiohttp.ClientSession() as session:
try:
async with session.get(url) as response:
if response.status == 200:
f = await aiofiles.open(path, mode="wb") # type: ignore
await f.write(await response.read())
await f.close()
return True
logger.error(f"{response.status} response from {url}")
except EXCEPTIONS as e:
message = str(e).strip("() ")
logger.error(f"Invalid response from {url}: {message}")
return False
Add timeout to downloading custom background imagesimport asyncio
import aiofiles
import aiohttp
import aiohttp.client_exceptions
from aiopath import AsyncPath
from sanic.log import logger
EXCEPTIONS = (
aiohttp.client_exceptions.ClientConnectionError,
aiohttp.client_exceptions.InvalidURL,
aiohttp.client_exceptions.TooManyRedirects,
AssertionError,
asyncio.TimeoutError,
UnicodeError,
)
async def download(url: str, path: AsyncPath) -> bool:
async with aiohttp.ClientSession() as session:
try:
async with session.get(url, timeout=10) as response:
if response.status == 200:
f = await aiofiles.open(path, mode="wb") # type: ignore
await f.write(await response.read())
await f.close()
return True
logger.error(f"{response.status} response from {url}")
except EXCEPTIONS as e:
message = str(e).strip("() ") or e.__class__.__name__
logger.error(f"Invalid response from {url}: {message}")
return False
|
<commit_before>import aiofiles
import aiohttp
import aiohttp.client_exceptions
from aiopath import AsyncPath
from sanic.log import logger
EXCEPTIONS = (
aiohttp.client_exceptions.ClientConnectionError,
aiohttp.client_exceptions.InvalidURL,
aiohttp.client_exceptions.TooManyRedirects,
AssertionError,
UnicodeError,
)
async def download(url: str, path: AsyncPath) -> bool:
async with aiohttp.ClientSession() as session:
try:
async with session.get(url) as response:
if response.status == 200:
f = await aiofiles.open(path, mode="wb") # type: ignore
await f.write(await response.read())
await f.close()
return True
logger.error(f"{response.status} response from {url}")
except EXCEPTIONS as e:
message = str(e).strip("() ")
logger.error(f"Invalid response from {url}: {message}")
return False
<commit_msg>Add timeout to downloading custom background images<commit_after>import asyncio
import aiofiles
import aiohttp
import aiohttp.client_exceptions
from aiopath import AsyncPath
from sanic.log import logger
EXCEPTIONS = (
aiohttp.client_exceptions.ClientConnectionError,
aiohttp.client_exceptions.InvalidURL,
aiohttp.client_exceptions.TooManyRedirects,
AssertionError,
asyncio.TimeoutError,
UnicodeError,
)
async def download(url: str, path: AsyncPath) -> bool:
async with aiohttp.ClientSession() as session:
try:
async with session.get(url, timeout=10) as response:
if response.status == 200:
f = await aiofiles.open(path, mode="wb") # type: ignore
await f.write(await response.read())
await f.close()
return True
logger.error(f"{response.status} response from {url}")
except EXCEPTIONS as e:
message = str(e).strip("() ") or e.__class__.__name__
logger.error(f"Invalid response from {url}: {message}")
return False
|
07367ced88bd68666e4460d2734c6c18069573a3
|
django_field_cryptography/fields.py
|
django_field_cryptography/fields.py
|
from django.conf import settings
from django.db import models
from django.utils.encoding import force_bytes, force_str
from django.utils.six import with_metaclass
from cryptography.fernet import Fernet, InvalidToken
fernet = Fernet(settings.FERNET_KEY)
class EncryptedTextField(with_metaclass(models.SubfieldBase, models.TextField)):
"""A TextField encrypted with Fernet (AES).
EncryptedTextField rely on `Fernet` from `cryptography` to ensure symetric
encryption. This field is compatible with South migrations.
"""
def db_type(self, connection):
return 'bytea'
def get_prep_value(self, value):
return fernet.encrypt(force_bytes(value))
def to_python(self, value):
try:
value = fernet.decrypt(force_bytes(value))
except InvalidToken:
return value
else:
return force_str(value)
def south_field_triple(self):
"""Returns a suitable description of this field for South."""
from south.modelsinspector import introspector
field_class = '{}.{}'.format(self.__class__.__module__, self.__class__.__name__)
args, kwargs = introspector(self)
return (field_class, args, kwargs)
|
from django.conf import settings
from django.db import models
from django.utils.encoding import force_bytes, force_str
from django.utils.six import with_metaclass
from cryptography.fernet import Fernet, InvalidToken
fernet = Fernet(settings.FERNET_KEY)
class EncryptedTextField(with_metaclass(models.SubfieldBase, models.TextField)):
"""A TextField encrypted with Fernet (AES).
EncryptedTextField rely on `Fernet` from `cryptography` to ensure symetric
encryption. This field is compatible with South migrations.
"""
def db_type(self, connection):
return 'bytea'
def get_prep_value(self, value):
return fernet.encrypt(force_bytes(value))
def to_python(self, value):
"""to_python is called every time an instance of the field is
assigned a value and when retrieving the value from the database."""
try:
value = fernet.decrypt(force_bytes(value))
except InvalidToken:
return value
else:
return force_str(value)
def south_field_triple(self):
"""Returns a suitable description of this field for South."""
from south.modelsinspector import introspector
field_class = '{}.{}'.format(self.__class__.__module__, self.__class__.__name__)
args, kwargs = introspector(self)
return (field_class, args, kwargs)
|
Add doctring for `to_python` [ci-skip].
|
Add doctring for `to_python` [ci-skip].
`to_python` is called when assigning and retrieving a value from the
database.
|
Python
|
bsd-2-clause
|
incuna/django-field-cryptography,tombooth/django-field-cryptography
|
from django.conf import settings
from django.db import models
from django.utils.encoding import force_bytes, force_str
from django.utils.six import with_metaclass
from cryptography.fernet import Fernet, InvalidToken
fernet = Fernet(settings.FERNET_KEY)
class EncryptedTextField(with_metaclass(models.SubfieldBase, models.TextField)):
"""A TextField encrypted with Fernet (AES).
EncryptedTextField rely on `Fernet` from `cryptography` to ensure symetric
encryption. This field is compatible with South migrations.
"""
def db_type(self, connection):
return 'bytea'
def get_prep_value(self, value):
return fernet.encrypt(force_bytes(value))
def to_python(self, value):
try:
value = fernet.decrypt(force_bytes(value))
except InvalidToken:
return value
else:
return force_str(value)
def south_field_triple(self):
"""Returns a suitable description of this field for South."""
from south.modelsinspector import introspector
field_class = '{}.{}'.format(self.__class__.__module__, self.__class__.__name__)
args, kwargs = introspector(self)
return (field_class, args, kwargs)
Add doctring for `to_python` [ci-skip].
`to_python` is called when assigning and retrieving a value from the
database.
|
from django.conf import settings
from django.db import models
from django.utils.encoding import force_bytes, force_str
from django.utils.six import with_metaclass
from cryptography.fernet import Fernet, InvalidToken
fernet = Fernet(settings.FERNET_KEY)
class EncryptedTextField(with_metaclass(models.SubfieldBase, models.TextField)):
"""A TextField encrypted with Fernet (AES).
EncryptedTextField rely on `Fernet` from `cryptography` to ensure symetric
encryption. This field is compatible with South migrations.
"""
def db_type(self, connection):
return 'bytea'
def get_prep_value(self, value):
return fernet.encrypt(force_bytes(value))
def to_python(self, value):
"""to_python is called every time an instance of the field is
assigned a value and when retrieving the value from the database."""
try:
value = fernet.decrypt(force_bytes(value))
except InvalidToken:
return value
else:
return force_str(value)
def south_field_triple(self):
"""Returns a suitable description of this field for South."""
from south.modelsinspector import introspector
field_class = '{}.{}'.format(self.__class__.__module__, self.__class__.__name__)
args, kwargs = introspector(self)
return (field_class, args, kwargs)
|
<commit_before>from django.conf import settings
from django.db import models
from django.utils.encoding import force_bytes, force_str
from django.utils.six import with_metaclass
from cryptography.fernet import Fernet, InvalidToken
fernet = Fernet(settings.FERNET_KEY)
class EncryptedTextField(with_metaclass(models.SubfieldBase, models.TextField)):
"""A TextField encrypted with Fernet (AES).
EncryptedTextField rely on `Fernet` from `cryptography` to ensure symetric
encryption. This field is compatible with South migrations.
"""
def db_type(self, connection):
return 'bytea'
def get_prep_value(self, value):
return fernet.encrypt(force_bytes(value))
def to_python(self, value):
try:
value = fernet.decrypt(force_bytes(value))
except InvalidToken:
return value
else:
return force_str(value)
def south_field_triple(self):
"""Returns a suitable description of this field for South."""
from south.modelsinspector import introspector
field_class = '{}.{}'.format(self.__class__.__module__, self.__class__.__name__)
args, kwargs = introspector(self)
return (field_class, args, kwargs)
<commit_msg>Add doctring for `to_python` [ci-skip].
`to_python` is called when assigning and retrieving a value from the
database.<commit_after>
|
from django.conf import settings
from django.db import models
from django.utils.encoding import force_bytes, force_str
from django.utils.six import with_metaclass
from cryptography.fernet import Fernet, InvalidToken
fernet = Fernet(settings.FERNET_KEY)
class EncryptedTextField(with_metaclass(models.SubfieldBase, models.TextField)):
"""A TextField encrypted with Fernet (AES).
EncryptedTextField rely on `Fernet` from `cryptography` to ensure symetric
encryption. This field is compatible with South migrations.
"""
def db_type(self, connection):
return 'bytea'
def get_prep_value(self, value):
return fernet.encrypt(force_bytes(value))
def to_python(self, value):
"""to_python is called every time an instance of the field is
assigned a value and when retrieving the value from the database."""
try:
value = fernet.decrypt(force_bytes(value))
except InvalidToken:
return value
else:
return force_str(value)
def south_field_triple(self):
"""Returns a suitable description of this field for South."""
from south.modelsinspector import introspector
field_class = '{}.{}'.format(self.__class__.__module__, self.__class__.__name__)
args, kwargs = introspector(self)
return (field_class, args, kwargs)
|
from django.conf import settings
from django.db import models
from django.utils.encoding import force_bytes, force_str
from django.utils.six import with_metaclass
from cryptography.fernet import Fernet, InvalidToken
fernet = Fernet(settings.FERNET_KEY)
class EncryptedTextField(with_metaclass(models.SubfieldBase, models.TextField)):
"""A TextField encrypted with Fernet (AES).
EncryptedTextField rely on `Fernet` from `cryptography` to ensure symetric
encryption. This field is compatible with South migrations.
"""
def db_type(self, connection):
return 'bytea'
def get_prep_value(self, value):
return fernet.encrypt(force_bytes(value))
def to_python(self, value):
try:
value = fernet.decrypt(force_bytes(value))
except InvalidToken:
return value
else:
return force_str(value)
def south_field_triple(self):
"""Returns a suitable description of this field for South."""
from south.modelsinspector import introspector
field_class = '{}.{}'.format(self.__class__.__module__, self.__class__.__name__)
args, kwargs = introspector(self)
return (field_class, args, kwargs)
Add doctring for `to_python` [ci-skip].
`to_python` is called when assigning and retrieving a value from the
database.from django.conf import settings
from django.db import models
from django.utils.encoding import force_bytes, force_str
from django.utils.six import with_metaclass
from cryptography.fernet import Fernet, InvalidToken
fernet = Fernet(settings.FERNET_KEY)
class EncryptedTextField(with_metaclass(models.SubfieldBase, models.TextField)):
"""A TextField encrypted with Fernet (AES).
EncryptedTextField rely on `Fernet` from `cryptography` to ensure symetric
encryption. This field is compatible with South migrations.
"""
def db_type(self, connection):
return 'bytea'
def get_prep_value(self, value):
return fernet.encrypt(force_bytes(value))
def to_python(self, value):
"""to_python is called every time an instance of the field is
assigned a value and when retrieving the value from the database."""
try:
value = fernet.decrypt(force_bytes(value))
except InvalidToken:
return value
else:
return force_str(value)
def south_field_triple(self):
"""Returns a suitable description of this field for South."""
from south.modelsinspector import introspector
field_class = '{}.{}'.format(self.__class__.__module__, self.__class__.__name__)
args, kwargs = introspector(self)
return (field_class, args, kwargs)
|
<commit_before>from django.conf import settings
from django.db import models
from django.utils.encoding import force_bytes, force_str
from django.utils.six import with_metaclass
from cryptography.fernet import Fernet, InvalidToken
fernet = Fernet(settings.FERNET_KEY)
class EncryptedTextField(with_metaclass(models.SubfieldBase, models.TextField)):
"""A TextField encrypted with Fernet (AES).
EncryptedTextField rely on `Fernet` from `cryptography` to ensure symetric
encryption. This field is compatible with South migrations.
"""
def db_type(self, connection):
return 'bytea'
def get_prep_value(self, value):
return fernet.encrypt(force_bytes(value))
def to_python(self, value):
try:
value = fernet.decrypt(force_bytes(value))
except InvalidToken:
return value
else:
return force_str(value)
def south_field_triple(self):
"""Returns a suitable description of this field for South."""
from south.modelsinspector import introspector
field_class = '{}.{}'.format(self.__class__.__module__, self.__class__.__name__)
args, kwargs = introspector(self)
return (field_class, args, kwargs)
<commit_msg>Add doctring for `to_python` [ci-skip].
`to_python` is called when assigning and retrieving a value from the
database.<commit_after>from django.conf import settings
from django.db import models
from django.utils.encoding import force_bytes, force_str
from django.utils.six import with_metaclass
from cryptography.fernet import Fernet, InvalidToken
fernet = Fernet(settings.FERNET_KEY)
class EncryptedTextField(with_metaclass(models.SubfieldBase, models.TextField)):
"""A TextField encrypted with Fernet (AES).
EncryptedTextField rely on `Fernet` from `cryptography` to ensure symetric
encryption. This field is compatible with South migrations.
"""
def db_type(self, connection):
return 'bytea'
def get_prep_value(self, value):
return fernet.encrypt(force_bytes(value))
def to_python(self, value):
"""to_python is called every time an instance of the field is
assigned a value and when retrieving the value from the database."""
try:
value = fernet.decrypt(force_bytes(value))
except InvalidToken:
return value
else:
return force_str(value)
def south_field_triple(self):
"""Returns a suitable description of this field for South."""
from south.modelsinspector import introspector
field_class = '{}.{}'.format(self.__class__.__module__, self.__class__.__name__)
args, kwargs = introspector(self)
return (field_class, args, kwargs)
|
0f7af860d8df01d1c614b20d687ff6d0393d6938
|
docker/transport/basehttpadapter.py
|
docker/transport/basehttpadapter.py
|
import requests.adapters
class BaseHTTPAdapter(requests.adapters.HTTPAdapter):
def close(self):
self.pools.clear()
|
import requests.adapters
class BaseHTTPAdapter(requests.adapters.HTTPAdapter):
def close(self):
super(BaseHTTPAdapter, self).close()
if hasattr(self, 'pools'):
self.pools.clear()
|
Fix BaseHTTPAdapter for the SSL case
|
Fix BaseHTTPAdapter for the SSL case
Signed-off-by: Ulysses Souza <a0ff1337c6a0e43e9559f5f67fc3acb852912071@docker.com>
|
Python
|
apache-2.0
|
docker/docker-py,funkyfuture/docker-py,docker/docker-py,funkyfuture/docker-py,vdemeester/docker-py,vdemeester/docker-py
|
import requests.adapters
class BaseHTTPAdapter(requests.adapters.HTTPAdapter):
def close(self):
self.pools.clear()
Fix BaseHTTPAdapter for the SSL case
Signed-off-by: Ulysses Souza <a0ff1337c6a0e43e9559f5f67fc3acb852912071@docker.com>
|
import requests.adapters
class BaseHTTPAdapter(requests.adapters.HTTPAdapter):
def close(self):
super(BaseHTTPAdapter, self).close()
if hasattr(self, 'pools'):
self.pools.clear()
|
<commit_before>import requests.adapters
class BaseHTTPAdapter(requests.adapters.HTTPAdapter):
def close(self):
self.pools.clear()
<commit_msg>Fix BaseHTTPAdapter for the SSL case
Signed-off-by: Ulysses Souza <a0ff1337c6a0e43e9559f5f67fc3acb852912071@docker.com><commit_after>
|
import requests.adapters
class BaseHTTPAdapter(requests.adapters.HTTPAdapter):
def close(self):
super(BaseHTTPAdapter, self).close()
if hasattr(self, 'pools'):
self.pools.clear()
|
import requests.adapters
class BaseHTTPAdapter(requests.adapters.HTTPAdapter):
def close(self):
self.pools.clear()
Fix BaseHTTPAdapter for the SSL case
Signed-off-by: Ulysses Souza <a0ff1337c6a0e43e9559f5f67fc3acb852912071@docker.com>import requests.adapters
class BaseHTTPAdapter(requests.adapters.HTTPAdapter):
def close(self):
super(BaseHTTPAdapter, self).close()
if hasattr(self, 'pools'):
self.pools.clear()
|
<commit_before>import requests.adapters
class BaseHTTPAdapter(requests.adapters.HTTPAdapter):
def close(self):
self.pools.clear()
<commit_msg>Fix BaseHTTPAdapter for the SSL case
Signed-off-by: Ulysses Souza <a0ff1337c6a0e43e9559f5f67fc3acb852912071@docker.com><commit_after>import requests.adapters
class BaseHTTPAdapter(requests.adapters.HTTPAdapter):
def close(self):
super(BaseHTTPAdapter, self).close()
if hasattr(self, 'pools'):
self.pools.clear()
|
2cb10055b34972644d705bb07f80a0d40ac85002
|
vk_channelify/models/channel.py
|
vk_channelify/models/channel.py
|
from sqlalchemy import Column, String, Integer
from . import Base
class Channel(Base):
__tablename__ = 'channels'
channel_id = Column(String, primary_key=True, nullable=False)
vk_group_id = Column(String, nullable=False)
last_vk_post_id = Column(Integer, nullable=False, server_default='0')
owner_id = Column(String, nullable=False)
owner_username = Column(String)
hashtag_filter = Column(String)
|
from sqlalchemy import Column, String, Integer
from . import Base
class Channel(Base):
__tablename__ = 'channels'
channel_id = Column(String, primary_key=True, nullable=False)
vk_group_id = Column(String, nullable=False)
last_vk_post_id = Column(Integer, nullable=False, server_default='0', default=0)
owner_id = Column(String, nullable=False)
owner_username = Column(String)
hashtag_filter = Column(String)
|
Fix error 'unorderable types: int() > NoneType()'
|
Fix error 'unorderable types: int() > NoneType()'
|
Python
|
mit
|
reo7sp/vk-channelify,reo7sp/vk-channelify
|
from sqlalchemy import Column, String, Integer
from . import Base
class Channel(Base):
__tablename__ = 'channels'
channel_id = Column(String, primary_key=True, nullable=False)
vk_group_id = Column(String, nullable=False)
last_vk_post_id = Column(Integer, nullable=False, server_default='0')
owner_id = Column(String, nullable=False)
owner_username = Column(String)
hashtag_filter = Column(String)
Fix error 'unorderable types: int() > NoneType()'
|
from sqlalchemy import Column, String, Integer
from . import Base
class Channel(Base):
__tablename__ = 'channels'
channel_id = Column(String, primary_key=True, nullable=False)
vk_group_id = Column(String, nullable=False)
last_vk_post_id = Column(Integer, nullable=False, server_default='0', default=0)
owner_id = Column(String, nullable=False)
owner_username = Column(String)
hashtag_filter = Column(String)
|
<commit_before>from sqlalchemy import Column, String, Integer
from . import Base
class Channel(Base):
__tablename__ = 'channels'
channel_id = Column(String, primary_key=True, nullable=False)
vk_group_id = Column(String, nullable=False)
last_vk_post_id = Column(Integer, nullable=False, server_default='0')
owner_id = Column(String, nullable=False)
owner_username = Column(String)
hashtag_filter = Column(String)
<commit_msg>Fix error 'unorderable types: int() > NoneType()'<commit_after>
|
from sqlalchemy import Column, String, Integer
from . import Base
class Channel(Base):
__tablename__ = 'channels'
channel_id = Column(String, primary_key=True, nullable=False)
vk_group_id = Column(String, nullable=False)
last_vk_post_id = Column(Integer, nullable=False, server_default='0', default=0)
owner_id = Column(String, nullable=False)
owner_username = Column(String)
hashtag_filter = Column(String)
|
from sqlalchemy import Column, String, Integer
from . import Base
class Channel(Base):
__tablename__ = 'channels'
channel_id = Column(String, primary_key=True, nullable=False)
vk_group_id = Column(String, nullable=False)
last_vk_post_id = Column(Integer, nullable=False, server_default='0')
owner_id = Column(String, nullable=False)
owner_username = Column(String)
hashtag_filter = Column(String)
Fix error 'unorderable types: int() > NoneType()'from sqlalchemy import Column, String, Integer
from . import Base
class Channel(Base):
__tablename__ = 'channels'
channel_id = Column(String, primary_key=True, nullable=False)
vk_group_id = Column(String, nullable=False)
last_vk_post_id = Column(Integer, nullable=False, server_default='0', default=0)
owner_id = Column(String, nullable=False)
owner_username = Column(String)
hashtag_filter = Column(String)
|
<commit_before>from sqlalchemy import Column, String, Integer
from . import Base
class Channel(Base):
__tablename__ = 'channels'
channel_id = Column(String, primary_key=True, nullable=False)
vk_group_id = Column(String, nullable=False)
last_vk_post_id = Column(Integer, nullable=False, server_default='0')
owner_id = Column(String, nullable=False)
owner_username = Column(String)
hashtag_filter = Column(String)
<commit_msg>Fix error 'unorderable types: int() > NoneType()'<commit_after>from sqlalchemy import Column, String, Integer
from . import Base
class Channel(Base):
__tablename__ = 'channels'
channel_id = Column(String, primary_key=True, nullable=False)
vk_group_id = Column(String, nullable=False)
last_vk_post_id = Column(Integer, nullable=False, server_default='0', default=0)
owner_id = Column(String, nullable=False)
owner_username = Column(String)
hashtag_filter = Column(String)
|
eabc792a4ed87900ae1cb6a9404c3f85874cd053
|
avwx_api/views.py
|
avwx_api/views.py
|
"""
Michael duPont - michael@mdupont.com
avwx_api.views - Routes and views for the flask application
"""
# pylint: disable=W0702
# library
from flask import jsonify
# module
from avwx_api import app
##-------------------------------------------------------##
# Static Web Pages
@app.route('/')
@app.route('/home')
def home():
"""Returns static home page"""
return app.send_static_file('html/home.html')
@app.route('/about')
def about():
"""Returns static about page"""
return app.send_static_file('html/about.html')
@app.route('/contact')
def contact():
"""Returns static contact page"""
return app.send_static_file('html/contact.html')
@app.route('/documentation')
def documentation():
"""Returns static documentation page"""
return app.send_static_file('html/documentation.html')
@app.route('/updates')
def updates():
"""Returns static updates page"""
return app.send_static_file('html/updates.html')
##-------------------------------------------------------##
# API Routing Errors
@app.route('/api')
def no_report():
"""Returns no report msg"""
return jsonify({'Error': 'No report type given'})
@app.route('/api/metar')
@app.route('/api/taf')
def no_station():
"""Returns no station msg"""
return jsonify({'Error': 'No station given'})
|
"""
Michael duPont - michael@mdupont.com
avwx_api.views - Routes and views for the flask application
"""
# pylint: disable=W0702
# library
from flask import jsonify
# module
from avwx_api import app
##-------------------------------------------------------##
# Static Web Pages
@app.route('/')
@app.route('/home')
def home():
"""Returns static home page"""
return app.send_static_file('html/home.html')
@app.route('/about')
def about():
"""Returns static about page"""
return app.send_static_file('html/about.html')
@app.route('/contact')
def contact():
"""Returns static contact page"""
return app.send_static_file('html/contact.html')
@app.route('/documentation')
def documentation():
"""Returns static documentation page"""
return app.send_static_file('html/documentation.html')
@app.route('/updates')
def updates():
"""Returns static updates page"""
return app.send_static_file('html/updates.html')
##-------------------------------------------------------##
# API Routing Errors
@app.route('/api')
def no_report():
"""Returns no report msg"""
return jsonify({'Error': 'No report type given'}), 400
@app.route('/api/metar')
@app.route('/api/taf')
def no_station():
"""Returns no station msg"""
return jsonify({'Error': 'No station given'}), 400
|
Return 400 status for incomplete API queries
|
Return 400 status for incomplete API queries
|
Python
|
mit
|
flyinactor91/AVWX-API,flyinactor91/AVWX-API,flyinactor91/AVWX-API
|
"""
Michael duPont - michael@mdupont.com
avwx_api.views - Routes and views for the flask application
"""
# pylint: disable=W0702
# library
from flask import jsonify
# module
from avwx_api import app
##-------------------------------------------------------##
# Static Web Pages
@app.route('/')
@app.route('/home')
def home():
"""Returns static home page"""
return app.send_static_file('html/home.html')
@app.route('/about')
def about():
"""Returns static about page"""
return app.send_static_file('html/about.html')
@app.route('/contact')
def contact():
"""Returns static contact page"""
return app.send_static_file('html/contact.html')
@app.route('/documentation')
def documentation():
"""Returns static documentation page"""
return app.send_static_file('html/documentation.html')
@app.route('/updates')
def updates():
"""Returns static updates page"""
return app.send_static_file('html/updates.html')
##-------------------------------------------------------##
# API Routing Errors
@app.route('/api')
def no_report():
"""Returns no report msg"""
return jsonify({'Error': 'No report type given'})
@app.route('/api/metar')
@app.route('/api/taf')
def no_station():
"""Returns no station msg"""
return jsonify({'Error': 'No station given'})
Return 400 status for incomplete API queries
|
"""
Michael duPont - michael@mdupont.com
avwx_api.views - Routes and views for the flask application
"""
# pylint: disable=W0702
# library
from flask import jsonify
# module
from avwx_api import app
##-------------------------------------------------------##
# Static Web Pages
@app.route('/')
@app.route('/home')
def home():
"""Returns static home page"""
return app.send_static_file('html/home.html')
@app.route('/about')
def about():
"""Returns static about page"""
return app.send_static_file('html/about.html')
@app.route('/contact')
def contact():
"""Returns static contact page"""
return app.send_static_file('html/contact.html')
@app.route('/documentation')
def documentation():
"""Returns static documentation page"""
return app.send_static_file('html/documentation.html')
@app.route('/updates')
def updates():
"""Returns static updates page"""
return app.send_static_file('html/updates.html')
##-------------------------------------------------------##
# API Routing Errors
@app.route('/api')
def no_report():
"""Returns no report msg"""
return jsonify({'Error': 'No report type given'}), 400
@app.route('/api/metar')
@app.route('/api/taf')
def no_station():
"""Returns no station msg"""
return jsonify({'Error': 'No station given'}), 400
|
<commit_before>"""
Michael duPont - michael@mdupont.com
avwx_api.views - Routes and views for the flask application
"""
# pylint: disable=W0702
# library
from flask import jsonify
# module
from avwx_api import app
##-------------------------------------------------------##
# Static Web Pages
@app.route('/')
@app.route('/home')
def home():
"""Returns static home page"""
return app.send_static_file('html/home.html')
@app.route('/about')
def about():
"""Returns static about page"""
return app.send_static_file('html/about.html')
@app.route('/contact')
def contact():
"""Returns static contact page"""
return app.send_static_file('html/contact.html')
@app.route('/documentation')
def documentation():
"""Returns static documentation page"""
return app.send_static_file('html/documentation.html')
@app.route('/updates')
def updates():
"""Returns static updates page"""
return app.send_static_file('html/updates.html')
##-------------------------------------------------------##
# API Routing Errors
@app.route('/api')
def no_report():
"""Returns no report msg"""
return jsonify({'Error': 'No report type given'})
@app.route('/api/metar')
@app.route('/api/taf')
def no_station():
"""Returns no station msg"""
return jsonify({'Error': 'No station given'})
<commit_msg>Return 400 status for incomplete API queries<commit_after>
|
"""
Michael duPont - michael@mdupont.com
avwx_api.views - Routes and views for the flask application
"""
# pylint: disable=W0702
# library
from flask import jsonify
# module
from avwx_api import app
##-------------------------------------------------------##
# Static Web Pages
@app.route('/')
@app.route('/home')
def home():
"""Returns static home page"""
return app.send_static_file('html/home.html')
@app.route('/about')
def about():
"""Returns static about page"""
return app.send_static_file('html/about.html')
@app.route('/contact')
def contact():
"""Returns static contact page"""
return app.send_static_file('html/contact.html')
@app.route('/documentation')
def documentation():
"""Returns static documentation page"""
return app.send_static_file('html/documentation.html')
@app.route('/updates')
def updates():
"""Returns static updates page"""
return app.send_static_file('html/updates.html')
##-------------------------------------------------------##
# API Routing Errors
@app.route('/api')
def no_report():
"""Returns no report msg"""
return jsonify({'Error': 'No report type given'}), 400
@app.route('/api/metar')
@app.route('/api/taf')
def no_station():
"""Returns no station msg"""
return jsonify({'Error': 'No station given'}), 400
|
"""
Michael duPont - michael@mdupont.com
avwx_api.views - Routes and views for the flask application
"""
# pylint: disable=W0702
# library
from flask import jsonify
# module
from avwx_api import app
##-------------------------------------------------------##
# Static Web Pages
@app.route('/')
@app.route('/home')
def home():
"""Returns static home page"""
return app.send_static_file('html/home.html')
@app.route('/about')
def about():
"""Returns static about page"""
return app.send_static_file('html/about.html')
@app.route('/contact')
def contact():
"""Returns static contact page"""
return app.send_static_file('html/contact.html')
@app.route('/documentation')
def documentation():
"""Returns static documentation page"""
return app.send_static_file('html/documentation.html')
@app.route('/updates')
def updates():
"""Returns static updates page"""
return app.send_static_file('html/updates.html')
##-------------------------------------------------------##
# API Routing Errors
@app.route('/api')
def no_report():
"""Returns no report msg"""
return jsonify({'Error': 'No report type given'})
@app.route('/api/metar')
@app.route('/api/taf')
def no_station():
"""Returns no station msg"""
return jsonify({'Error': 'No station given'})
Return 400 status for incomplete API queries"""
Michael duPont - michael@mdupont.com
avwx_api.views - Routes and views for the flask application
"""
# pylint: disable=W0702
# library
from flask import jsonify
# module
from avwx_api import app
##-------------------------------------------------------##
# Static Web Pages
@app.route('/')
@app.route('/home')
def home():
"""Returns static home page"""
return app.send_static_file('html/home.html')
@app.route('/about')
def about():
"""Returns static about page"""
return app.send_static_file('html/about.html')
@app.route('/contact')
def contact():
"""Returns static contact page"""
return app.send_static_file('html/contact.html')
@app.route('/documentation')
def documentation():
"""Returns static documentation page"""
return app.send_static_file('html/documentation.html')
@app.route('/updates')
def updates():
"""Returns static updates page"""
return app.send_static_file('html/updates.html')
##-------------------------------------------------------##
# API Routing Errors
@app.route('/api')
def no_report():
"""Returns no report msg"""
return jsonify({'Error': 'No report type given'}), 400
@app.route('/api/metar')
@app.route('/api/taf')
def no_station():
"""Returns no station msg"""
return jsonify({'Error': 'No station given'}), 400
|
<commit_before>"""
Michael duPont - michael@mdupont.com
avwx_api.views - Routes and views for the flask application
"""
# pylint: disable=W0702
# library
from flask import jsonify
# module
from avwx_api import app
##-------------------------------------------------------##
# Static Web Pages
@app.route('/')
@app.route('/home')
def home():
"""Returns static home page"""
return app.send_static_file('html/home.html')
@app.route('/about')
def about():
"""Returns static about page"""
return app.send_static_file('html/about.html')
@app.route('/contact')
def contact():
"""Returns static contact page"""
return app.send_static_file('html/contact.html')
@app.route('/documentation')
def documentation():
"""Returns static documentation page"""
return app.send_static_file('html/documentation.html')
@app.route('/updates')
def updates():
"""Returns static updates page"""
return app.send_static_file('html/updates.html')
##-------------------------------------------------------##
# API Routing Errors
@app.route('/api')
def no_report():
"""Returns no report msg"""
return jsonify({'Error': 'No report type given'})
@app.route('/api/metar')
@app.route('/api/taf')
def no_station():
"""Returns no station msg"""
return jsonify({'Error': 'No station given'})
<commit_msg>Return 400 status for incomplete API queries<commit_after>"""
Michael duPont - michael@mdupont.com
avwx_api.views - Routes and views for the flask application
"""
# pylint: disable=W0702
# library
from flask import jsonify
# module
from avwx_api import app
##-------------------------------------------------------##
# Static Web Pages
@app.route('/')
@app.route('/home')
def home():
"""Returns static home page"""
return app.send_static_file('html/home.html')
@app.route('/about')
def about():
"""Returns static about page"""
return app.send_static_file('html/about.html')
@app.route('/contact')
def contact():
"""Returns static contact page"""
return app.send_static_file('html/contact.html')
@app.route('/documentation')
def documentation():
"""Returns static documentation page"""
return app.send_static_file('html/documentation.html')
@app.route('/updates')
def updates():
"""Returns static updates page"""
return app.send_static_file('html/updates.html')
##-------------------------------------------------------##
# API Routing Errors
@app.route('/api')
def no_report():
"""Returns no report msg"""
return jsonify({'Error': 'No report type given'}), 400
@app.route('/api/metar')
@app.route('/api/taf')
def no_station():
"""Returns no station msg"""
return jsonify({'Error': 'No station given'}), 400
|
16b3c9680f44722cf2544bdab581f9505666aef0
|
ds_utils/pandas.py
|
ds_utils/pandas.py
|
import numpy as np
def dataframe_size(df):
"""Return a dict with the size of DataFrame components in MB.
:param df: pandas.DataFrame
:return dictionary with index, columns, values and total size
"""
byte_to_megabyte_factor = 1024 ** 2
size = dict(zip(['index', 'columns', 'values'],
np.array([df.index.nbytes, df.columns.nbytes,
df.values.nbytes]) / byte_to_megabyte_factor))
size['total'] = np.sum(size.values())
return size
|
import numpy as np
import pandas as pd
def dataframe_size(df):
"""Return a dict with the size of DataFrame components in MB.
:param df: pandas.DataFrame
:return dictionary with index, columns, values and total size
"""
byte_to_megabyte_factor = 1024 ** 2
size = dict(zip(['index', 'columns', 'values'],
np.array([df.index.nbytes, df.columns.nbytes,
df.values.nbytes]) / byte_to_megabyte_factor))
size['total'] = np.sum(size.values())
return size
def print_full(df):
"""Print the full DataFrame
:param df: pandas.DataFrame
:return None
See:
- http://stackoverflow.com/questions/19124601/is-there-a-way-to-pretty-print-the-entire-pandas-series-dataframe
"""
with pd.option_context('display.max_rows', len(df), 'display.max_columns', len(df.shape[1])):
print df
|
Add function for printing full DataFrame
|
Add function for printing full DataFrame
|
Python
|
mit
|
hgrif/ds-utils
|
import numpy as np
def dataframe_size(df):
"""Return a dict with the size of DataFrame components in MB.
:param df: pandas.DataFrame
:return dictionary with index, columns, values and total size
"""
byte_to_megabyte_factor = 1024 ** 2
size = dict(zip(['index', 'columns', 'values'],
np.array([df.index.nbytes, df.columns.nbytes,
df.values.nbytes]) / byte_to_megabyte_factor))
size['total'] = np.sum(size.values())
return sizeAdd function for printing full DataFrame
|
import numpy as np
import pandas as pd
def dataframe_size(df):
"""Return a dict with the size of DataFrame components in MB.
:param df: pandas.DataFrame
:return dictionary with index, columns, values and total size
"""
byte_to_megabyte_factor = 1024 ** 2
size = dict(zip(['index', 'columns', 'values'],
np.array([df.index.nbytes, df.columns.nbytes,
df.values.nbytes]) / byte_to_megabyte_factor))
size['total'] = np.sum(size.values())
return size
def print_full(df):
"""Print the full DataFrame
:param df: pandas.DataFrame
:return None
See:
- http://stackoverflow.com/questions/19124601/is-there-a-way-to-pretty-print-the-entire-pandas-series-dataframe
"""
with pd.option_context('display.max_rows', len(df), 'display.max_columns', len(df.shape[1])):
print df
|
<commit_before>import numpy as np
def dataframe_size(df):
"""Return a dict with the size of DataFrame components in MB.
:param df: pandas.DataFrame
:return dictionary with index, columns, values and total size
"""
byte_to_megabyte_factor = 1024 ** 2
size = dict(zip(['index', 'columns', 'values'],
np.array([df.index.nbytes, df.columns.nbytes,
df.values.nbytes]) / byte_to_megabyte_factor))
size['total'] = np.sum(size.values())
return size<commit_msg>Add function for printing full DataFrame<commit_after>
|
import numpy as np
import pandas as pd
def dataframe_size(df):
"""Return a dict with the size of DataFrame components in MB.
:param df: pandas.DataFrame
:return dictionary with index, columns, values and total size
"""
byte_to_megabyte_factor = 1024 ** 2
size = dict(zip(['index', 'columns', 'values'],
np.array([df.index.nbytes, df.columns.nbytes,
df.values.nbytes]) / byte_to_megabyte_factor))
size['total'] = np.sum(size.values())
return size
def print_full(df):
"""Print the full DataFrame
:param df: pandas.DataFrame
:return None
See:
- http://stackoverflow.com/questions/19124601/is-there-a-way-to-pretty-print-the-entire-pandas-series-dataframe
"""
with pd.option_context('display.max_rows', len(df), 'display.max_columns', len(df.shape[1])):
print df
|
import numpy as np
def dataframe_size(df):
"""Return a dict with the size of DataFrame components in MB.
:param df: pandas.DataFrame
:return dictionary with index, columns, values and total size
"""
byte_to_megabyte_factor = 1024 ** 2
size = dict(zip(['index', 'columns', 'values'],
np.array([df.index.nbytes, df.columns.nbytes,
df.values.nbytes]) / byte_to_megabyte_factor))
size['total'] = np.sum(size.values())
return sizeAdd function for printing full DataFrameimport numpy as np
import pandas as pd
def dataframe_size(df):
"""Return a dict with the size of DataFrame components in MB.
:param df: pandas.DataFrame
:return dictionary with index, columns, values and total size
"""
byte_to_megabyte_factor = 1024 ** 2
size = dict(zip(['index', 'columns', 'values'],
np.array([df.index.nbytes, df.columns.nbytes,
df.values.nbytes]) / byte_to_megabyte_factor))
size['total'] = np.sum(size.values())
return size
def print_full(df):
"""Print the full DataFrame
:param df: pandas.DataFrame
:return None
See:
- http://stackoverflow.com/questions/19124601/is-there-a-way-to-pretty-print-the-entire-pandas-series-dataframe
"""
with pd.option_context('display.max_rows', len(df), 'display.max_columns', len(df.shape[1])):
print df
|
<commit_before>import numpy as np
def dataframe_size(df):
"""Return a dict with the size of DataFrame components in MB.
:param df: pandas.DataFrame
:return dictionary with index, columns, values and total size
"""
byte_to_megabyte_factor = 1024 ** 2
size = dict(zip(['index', 'columns', 'values'],
np.array([df.index.nbytes, df.columns.nbytes,
df.values.nbytes]) / byte_to_megabyte_factor))
size['total'] = np.sum(size.values())
return size<commit_msg>Add function for printing full DataFrame<commit_after>import numpy as np
import pandas as pd
def dataframe_size(df):
"""Return a dict with the size of DataFrame components in MB.
:param df: pandas.DataFrame
:return dictionary with index, columns, values and total size
"""
byte_to_megabyte_factor = 1024 ** 2
size = dict(zip(['index', 'columns', 'values'],
np.array([df.index.nbytes, df.columns.nbytes,
df.values.nbytes]) / byte_to_megabyte_factor))
size['total'] = np.sum(size.values())
return size
def print_full(df):
"""Print the full DataFrame
:param df: pandas.DataFrame
:return None
See:
- http://stackoverflow.com/questions/19124601/is-there-a-way-to-pretty-print-the-entire-pandas-series-dataframe
"""
with pd.option_context('display.max_rows', len(df), 'display.max_columns', len(df.shape[1])):
print df
|
75ed1685632471ba6b63c3d9d050933e1c06e3d8
|
exporters/writers/console_writer.py
|
exporters/writers/console_writer.py
|
from exporters.writers.base_writer import BaseWriter, ItemsLimitReached
class ConsoleWriter(BaseWriter):
"""
It is just a writer with testing purposes. It prints every item in console.
"""
def __init__(self, options):
super(ConsoleWriter, self).__init__(options)
self.logger.info('ConsoleWriter has been initiated')
def write_batch(self, batch):
for item in batch:
print item.formatted
self._increment_written_items()
if self.items_limit and self.items_limit == self.stats['items_count']:
raise ItemsLimitReached('Finishing job after items_limit reached: {} items written.'.format(self.stats['items_count']))
self.logger.debug('Wrote items')
|
import json
from exporters.writers.base_writer import BaseWriter, ItemsLimitReached
class ConsoleWriter(BaseWriter):
"""
It is just a writer with testing purposes. It prints every item in console.
"""
def __init__(self, options):
super(ConsoleWriter, self).__init__(options)
self.logger.info('ConsoleWriter has been initiated')
self.pretty_print = self.options.get('pretty_print', False)
def write_batch(self, batch):
for item in batch:
formatted_item = item.formatted
if self.pretty_print:
formatted_item = self._format(formatted_item)
print formatted_item
self._increment_written_items()
if self.items_limit and self.items_limit == self.stats['items_count']:
raise ItemsLimitReached('Finishing job after items_limit reached: {} items written.'.format(self.stats['items_count']))
self.logger.debug('Wrote items')
def _format(self, item):
try:
return json.dumps(json.loads(item), indent=2)
except:
return item
|
Add 'pretty_print' option to the ConsoleWritter
|
Add 'pretty_print' option to the ConsoleWritter
Simply tries to use the json module to print the item as a formated
json.
|
Python
|
bsd-3-clause
|
scrapinghub/exporters
|
from exporters.writers.base_writer import BaseWriter, ItemsLimitReached
class ConsoleWriter(BaseWriter):
"""
It is just a writer with testing purposes. It prints every item in console.
"""
def __init__(self, options):
super(ConsoleWriter, self).__init__(options)
self.logger.info('ConsoleWriter has been initiated')
def write_batch(self, batch):
for item in batch:
print item.formatted
self._increment_written_items()
if self.items_limit and self.items_limit == self.stats['items_count']:
raise ItemsLimitReached('Finishing job after items_limit reached: {} items written.'.format(self.stats['items_count']))
self.logger.debug('Wrote items')Add 'pretty_print' option to the ConsoleWritter
Simply tries to use the json module to print the item as a formated
json.
|
import json
from exporters.writers.base_writer import BaseWriter, ItemsLimitReached
class ConsoleWriter(BaseWriter):
"""
It is just a writer with testing purposes. It prints every item in console.
"""
def __init__(self, options):
super(ConsoleWriter, self).__init__(options)
self.logger.info('ConsoleWriter has been initiated')
self.pretty_print = self.options.get('pretty_print', False)
def write_batch(self, batch):
for item in batch:
formatted_item = item.formatted
if self.pretty_print:
formatted_item = self._format(formatted_item)
print formatted_item
self._increment_written_items()
if self.items_limit and self.items_limit == self.stats['items_count']:
raise ItemsLimitReached('Finishing job after items_limit reached: {} items written.'.format(self.stats['items_count']))
self.logger.debug('Wrote items')
def _format(self, item):
try:
return json.dumps(json.loads(item), indent=2)
except:
return item
|
<commit_before>from exporters.writers.base_writer import BaseWriter, ItemsLimitReached
class ConsoleWriter(BaseWriter):
"""
It is just a writer with testing purposes. It prints every item in console.
"""
def __init__(self, options):
super(ConsoleWriter, self).__init__(options)
self.logger.info('ConsoleWriter has been initiated')
def write_batch(self, batch):
for item in batch:
print item.formatted
self._increment_written_items()
if self.items_limit and self.items_limit == self.stats['items_count']:
raise ItemsLimitReached('Finishing job after items_limit reached: {} items written.'.format(self.stats['items_count']))
self.logger.debug('Wrote items')<commit_msg>Add 'pretty_print' option to the ConsoleWritter
Simply tries to use the json module to print the item as a formated
json.<commit_after>
|
import json
from exporters.writers.base_writer import BaseWriter, ItemsLimitReached
class ConsoleWriter(BaseWriter):
"""
It is just a writer with testing purposes. It prints every item in console.
"""
def __init__(self, options):
super(ConsoleWriter, self).__init__(options)
self.logger.info('ConsoleWriter has been initiated')
self.pretty_print = self.options.get('pretty_print', False)
def write_batch(self, batch):
for item in batch:
formatted_item = item.formatted
if self.pretty_print:
formatted_item = self._format(formatted_item)
print formatted_item
self._increment_written_items()
if self.items_limit and self.items_limit == self.stats['items_count']:
raise ItemsLimitReached('Finishing job after items_limit reached: {} items written.'.format(self.stats['items_count']))
self.logger.debug('Wrote items')
def _format(self, item):
try:
return json.dumps(json.loads(item), indent=2)
except:
return item
|
from exporters.writers.base_writer import BaseWriter, ItemsLimitReached
class ConsoleWriter(BaseWriter):
"""
It is just a writer with testing purposes. It prints every item in console.
"""
def __init__(self, options):
super(ConsoleWriter, self).__init__(options)
self.logger.info('ConsoleWriter has been initiated')
def write_batch(self, batch):
for item in batch:
print item.formatted
self._increment_written_items()
if self.items_limit and self.items_limit == self.stats['items_count']:
raise ItemsLimitReached('Finishing job after items_limit reached: {} items written.'.format(self.stats['items_count']))
self.logger.debug('Wrote items')Add 'pretty_print' option to the ConsoleWritter
Simply tries to use the json module to print the item as a formated
json.import json
from exporters.writers.base_writer import BaseWriter, ItemsLimitReached
class ConsoleWriter(BaseWriter):
"""
It is just a writer with testing purposes. It prints every item in console.
"""
def __init__(self, options):
super(ConsoleWriter, self).__init__(options)
self.logger.info('ConsoleWriter has been initiated')
self.pretty_print = self.options.get('pretty_print', False)
def write_batch(self, batch):
for item in batch:
formatted_item = item.formatted
if self.pretty_print:
formatted_item = self._format(formatted_item)
print formatted_item
self._increment_written_items()
if self.items_limit and self.items_limit == self.stats['items_count']:
raise ItemsLimitReached('Finishing job after items_limit reached: {} items written.'.format(self.stats['items_count']))
self.logger.debug('Wrote items')
def _format(self, item):
try:
return json.dumps(json.loads(item), indent=2)
except:
return item
|
<commit_before>from exporters.writers.base_writer import BaseWriter, ItemsLimitReached
class ConsoleWriter(BaseWriter):
"""
It is just a writer with testing purposes. It prints every item in console.
"""
def __init__(self, options):
super(ConsoleWriter, self).__init__(options)
self.logger.info('ConsoleWriter has been initiated')
def write_batch(self, batch):
for item in batch:
print item.formatted
self._increment_written_items()
if self.items_limit and self.items_limit == self.stats['items_count']:
raise ItemsLimitReached('Finishing job after items_limit reached: {} items written.'.format(self.stats['items_count']))
self.logger.debug('Wrote items')<commit_msg>Add 'pretty_print' option to the ConsoleWritter
Simply tries to use the json module to print the item as a formated
json.<commit_after>import json
from exporters.writers.base_writer import BaseWriter, ItemsLimitReached
class ConsoleWriter(BaseWriter):
"""
It is just a writer with testing purposes. It prints every item in console.
"""
def __init__(self, options):
super(ConsoleWriter, self).__init__(options)
self.logger.info('ConsoleWriter has been initiated')
self.pretty_print = self.options.get('pretty_print', False)
def write_batch(self, batch):
for item in batch:
formatted_item = item.formatted
if self.pretty_print:
formatted_item = self._format(formatted_item)
print formatted_item
self._increment_written_items()
if self.items_limit and self.items_limit == self.stats['items_count']:
raise ItemsLimitReached('Finishing job after items_limit reached: {} items written.'.format(self.stats['items_count']))
self.logger.debug('Wrote items')
def _format(self, item):
try:
return json.dumps(json.loads(item), indent=2)
except:
return item
|
b21327ab07451dd83eec0a17ee84a6e9d19f16c9
|
folivora/utils/notifications.py
|
folivora/utils/notifications.py
|
# -*- coding: utf-8 -*-
"""
folivora.utils.notification
~~~~~~~~~~~~~~~~~~~~~~~~~~~
Framework for user/project notifications.
"""
from django.conf import settings
from django.template import loader
from django.core.mail import send_mail
def route_notifications(*log_entries):
for entry in log_entries:
if entry.action in ACTION_MAPPING:
ACTION_MAPPING[entry.action](entry)
def send_update_avilable_notification(log):
message = loader.render_to_string('notifications/update_available.mail.txt',
{'log': log})
subject = '{prefix}New update available for project "{project}"'.format(**{
'prefix': settings.EMAIL_SUBJECT_PREFIX,
'project': log.project.name})
emails = log.project.members.values_list('email', flat=True)
send_mail(subject, message, settings.DEFAULT_FROM_EMAIL, emails,
fail_silently=False)
ACTION_MAPPING = {
'update_available': send_update_avilable_notification
}
|
# -*- coding: utf-8 -*-
"""
folivora.utils.notification
~~~~~~~~~~~~~~~~~~~~~~~~~~~
Framework for user/project notifications.
"""
from django.conf import settings
from django.template import loader
from django.core.mail import send_mail
def route_notifications(*log_entries):
for entry in log_entries:
if entry.action in ACTION_MAPPING:
ACTION_MAPPING[entry.action](entry)
def send_update_avilable_notification(log):
message = loader.render_to_string('notifications/update_available.mail.txt',
{'log': log})
subject = '{prefix}New update available for project "{project}"'.format(**{
'prefix': settings.EMAIL_SUBJECT_PREFIX,
'project': log.project.name})
mails = []
members = log.project.projectmember_set.all()
for member in members:
mails.append(member.mail or member.user.email)
send_mail(subject, message, settings.DEFAULT_FROM_EMAIL, mails,
fail_silently=False)
ACTION_MAPPING = {
'update_available': send_update_avilable_notification
}
|
Use project member email if given
|
Use project member email if given
|
Python
|
isc
|
rocketDuck/folivora,rocketDuck/folivora,rocketDuck/folivora
|
# -*- coding: utf-8 -*-
"""
folivora.utils.notification
~~~~~~~~~~~~~~~~~~~~~~~~~~~
Framework for user/project notifications.
"""
from django.conf import settings
from django.template import loader
from django.core.mail import send_mail
def route_notifications(*log_entries):
for entry in log_entries:
if entry.action in ACTION_MAPPING:
ACTION_MAPPING[entry.action](entry)
def send_update_avilable_notification(log):
message = loader.render_to_string('notifications/update_available.mail.txt',
{'log': log})
subject = '{prefix}New update available for project "{project}"'.format(**{
'prefix': settings.EMAIL_SUBJECT_PREFIX,
'project': log.project.name})
emails = log.project.members.values_list('email', flat=True)
send_mail(subject, message, settings.DEFAULT_FROM_EMAIL, emails,
fail_silently=False)
ACTION_MAPPING = {
'update_available': send_update_avilable_notification
}
Use project member email if given
|
# -*- coding: utf-8 -*-
"""
folivora.utils.notification
~~~~~~~~~~~~~~~~~~~~~~~~~~~
Framework for user/project notifications.
"""
from django.conf import settings
from django.template import loader
from django.core.mail import send_mail
def route_notifications(*log_entries):
for entry in log_entries:
if entry.action in ACTION_MAPPING:
ACTION_MAPPING[entry.action](entry)
def send_update_avilable_notification(log):
message = loader.render_to_string('notifications/update_available.mail.txt',
{'log': log})
subject = '{prefix}New update available for project "{project}"'.format(**{
'prefix': settings.EMAIL_SUBJECT_PREFIX,
'project': log.project.name})
mails = []
members = log.project.projectmember_set.all()
for member in members:
mails.append(member.mail or member.user.email)
send_mail(subject, message, settings.DEFAULT_FROM_EMAIL, mails,
fail_silently=False)
ACTION_MAPPING = {
'update_available': send_update_avilable_notification
}
|
<commit_before># -*- coding: utf-8 -*-
"""
folivora.utils.notification
~~~~~~~~~~~~~~~~~~~~~~~~~~~
Framework for user/project notifications.
"""
from django.conf import settings
from django.template import loader
from django.core.mail import send_mail
def route_notifications(*log_entries):
for entry in log_entries:
if entry.action in ACTION_MAPPING:
ACTION_MAPPING[entry.action](entry)
def send_update_avilable_notification(log):
message = loader.render_to_string('notifications/update_available.mail.txt',
{'log': log})
subject = '{prefix}New update available for project "{project}"'.format(**{
'prefix': settings.EMAIL_SUBJECT_PREFIX,
'project': log.project.name})
emails = log.project.members.values_list('email', flat=True)
send_mail(subject, message, settings.DEFAULT_FROM_EMAIL, emails,
fail_silently=False)
ACTION_MAPPING = {
'update_available': send_update_avilable_notification
}
<commit_msg>Use project member email if given<commit_after>
|
# -*- coding: utf-8 -*-
"""
folivora.utils.notification
~~~~~~~~~~~~~~~~~~~~~~~~~~~
Framework for user/project notifications.
"""
from django.conf import settings
from django.template import loader
from django.core.mail import send_mail
def route_notifications(*log_entries):
for entry in log_entries:
if entry.action in ACTION_MAPPING:
ACTION_MAPPING[entry.action](entry)
def send_update_avilable_notification(log):
message = loader.render_to_string('notifications/update_available.mail.txt',
{'log': log})
subject = '{prefix}New update available for project "{project}"'.format(**{
'prefix': settings.EMAIL_SUBJECT_PREFIX,
'project': log.project.name})
mails = []
members = log.project.projectmember_set.all()
for member in members:
mails.append(member.mail or member.user.email)
send_mail(subject, message, settings.DEFAULT_FROM_EMAIL, mails,
fail_silently=False)
ACTION_MAPPING = {
'update_available': send_update_avilable_notification
}
|
# -*- coding: utf-8 -*-
"""
folivora.utils.notification
~~~~~~~~~~~~~~~~~~~~~~~~~~~
Framework for user/project notifications.
"""
from django.conf import settings
from django.template import loader
from django.core.mail import send_mail
def route_notifications(*log_entries):
for entry in log_entries:
if entry.action in ACTION_MAPPING:
ACTION_MAPPING[entry.action](entry)
def send_update_avilable_notification(log):
message = loader.render_to_string('notifications/update_available.mail.txt',
{'log': log})
subject = '{prefix}New update available for project "{project}"'.format(**{
'prefix': settings.EMAIL_SUBJECT_PREFIX,
'project': log.project.name})
emails = log.project.members.values_list('email', flat=True)
send_mail(subject, message, settings.DEFAULT_FROM_EMAIL, emails,
fail_silently=False)
ACTION_MAPPING = {
'update_available': send_update_avilable_notification
}
Use project member email if given# -*- coding: utf-8 -*-
"""
folivora.utils.notification
~~~~~~~~~~~~~~~~~~~~~~~~~~~
Framework for user/project notifications.
"""
from django.conf import settings
from django.template import loader
from django.core.mail import send_mail
def route_notifications(*log_entries):
for entry in log_entries:
if entry.action in ACTION_MAPPING:
ACTION_MAPPING[entry.action](entry)
def send_update_avilable_notification(log):
message = loader.render_to_string('notifications/update_available.mail.txt',
{'log': log})
subject = '{prefix}New update available for project "{project}"'.format(**{
'prefix': settings.EMAIL_SUBJECT_PREFIX,
'project': log.project.name})
mails = []
members = log.project.projectmember_set.all()
for member in members:
mails.append(member.mail or member.user.email)
send_mail(subject, message, settings.DEFAULT_FROM_EMAIL, mails,
fail_silently=False)
ACTION_MAPPING = {
'update_available': send_update_avilable_notification
}
|
<commit_before># -*- coding: utf-8 -*-
"""
folivora.utils.notification
~~~~~~~~~~~~~~~~~~~~~~~~~~~
Framework for user/project notifications.
"""
from django.conf import settings
from django.template import loader
from django.core.mail import send_mail
def route_notifications(*log_entries):
for entry in log_entries:
if entry.action in ACTION_MAPPING:
ACTION_MAPPING[entry.action](entry)
def send_update_avilable_notification(log):
message = loader.render_to_string('notifications/update_available.mail.txt',
{'log': log})
subject = '{prefix}New update available for project "{project}"'.format(**{
'prefix': settings.EMAIL_SUBJECT_PREFIX,
'project': log.project.name})
emails = log.project.members.values_list('email', flat=True)
send_mail(subject, message, settings.DEFAULT_FROM_EMAIL, emails,
fail_silently=False)
ACTION_MAPPING = {
'update_available': send_update_avilable_notification
}
<commit_msg>Use project member email if given<commit_after># -*- coding: utf-8 -*-
"""
folivora.utils.notification
~~~~~~~~~~~~~~~~~~~~~~~~~~~
Framework for user/project notifications.
"""
from django.conf import settings
from django.template import loader
from django.core.mail import send_mail
def route_notifications(*log_entries):
for entry in log_entries:
if entry.action in ACTION_MAPPING:
ACTION_MAPPING[entry.action](entry)
def send_update_avilable_notification(log):
message = loader.render_to_string('notifications/update_available.mail.txt',
{'log': log})
subject = '{prefix}New update available for project "{project}"'.format(**{
'prefix': settings.EMAIL_SUBJECT_PREFIX,
'project': log.project.name})
mails = []
members = log.project.projectmember_set.all()
for member in members:
mails.append(member.mail or member.user.email)
send_mail(subject, message, settings.DEFAULT_FROM_EMAIL, mails,
fail_silently=False)
ACTION_MAPPING = {
'update_available': send_update_avilable_notification
}
|
1b6e3a4b51f0a85e63715984e9b670096d369db7
|
examples/threads.py
|
examples/threads.py
|
import guv
guv.monkey_patch()
import threading
import greenlet
greenlet_ids = {}
def debug(i):
print('{} greenlet_ids: {}'.format(i, greenlet_ids))
def f():
greenlet_ids[1] = greenlet.getcurrent()
debug(2)
def main():
greenlet_ids[0] = greenlet.getcurrent()
debug(1)
t = threading.Thread(target=f)
t.start()
debug(3)
if __name__ == '__main__':
main()
|
import guv
guv.monkey_patch()
from guv import gyield, sleep
import threading
import greenlet
greenlet_ids = {}
def debug(i):
print('{} greenlet_ids: {}'.format(i, greenlet_ids))
def f():
greenlet_ids[1] = greenlet.getcurrent()
debug(2)
print('t: 1')
gyield()
print('t: 2')
gyield()
print('t: 3')
def main():
greenlet_ids[0] = greenlet.getcurrent()
debug(1)
t = threading.Thread(target=f)
t.start()
debug(3)
print('m: 1')
gyield()
print('m: 2')
gyield()
print('m: 3')
if __name__ == '__main__':
main()
|
Add calls to gyield() to ensure that we're in the same OS thread
|
Add calls to gyield() to ensure that we're in the same OS thread
|
Python
|
mit
|
veegee/guv,veegee/guv
|
import guv
guv.monkey_patch()
import threading
import greenlet
greenlet_ids = {}
def debug(i):
print('{} greenlet_ids: {}'.format(i, greenlet_ids))
def f():
greenlet_ids[1] = greenlet.getcurrent()
debug(2)
def main():
greenlet_ids[0] = greenlet.getcurrent()
debug(1)
t = threading.Thread(target=f)
t.start()
debug(3)
if __name__ == '__main__':
main()
Add calls to gyield() to ensure that we're in the same OS thread
|
import guv
guv.monkey_patch()
from guv import gyield, sleep
import threading
import greenlet
greenlet_ids = {}
def debug(i):
print('{} greenlet_ids: {}'.format(i, greenlet_ids))
def f():
greenlet_ids[1] = greenlet.getcurrent()
debug(2)
print('t: 1')
gyield()
print('t: 2')
gyield()
print('t: 3')
def main():
greenlet_ids[0] = greenlet.getcurrent()
debug(1)
t = threading.Thread(target=f)
t.start()
debug(3)
print('m: 1')
gyield()
print('m: 2')
gyield()
print('m: 3')
if __name__ == '__main__':
main()
|
<commit_before>import guv
guv.monkey_patch()
import threading
import greenlet
greenlet_ids = {}
def debug(i):
print('{} greenlet_ids: {}'.format(i, greenlet_ids))
def f():
greenlet_ids[1] = greenlet.getcurrent()
debug(2)
def main():
greenlet_ids[0] = greenlet.getcurrent()
debug(1)
t = threading.Thread(target=f)
t.start()
debug(3)
if __name__ == '__main__':
main()
<commit_msg>Add calls to gyield() to ensure that we're in the same OS thread<commit_after>
|
import guv
guv.monkey_patch()
from guv import gyield, sleep
import threading
import greenlet
greenlet_ids = {}
def debug(i):
print('{} greenlet_ids: {}'.format(i, greenlet_ids))
def f():
greenlet_ids[1] = greenlet.getcurrent()
debug(2)
print('t: 1')
gyield()
print('t: 2')
gyield()
print('t: 3')
def main():
greenlet_ids[0] = greenlet.getcurrent()
debug(1)
t = threading.Thread(target=f)
t.start()
debug(3)
print('m: 1')
gyield()
print('m: 2')
gyield()
print('m: 3')
if __name__ == '__main__':
main()
|
import guv
guv.monkey_patch()
import threading
import greenlet
greenlet_ids = {}
def debug(i):
print('{} greenlet_ids: {}'.format(i, greenlet_ids))
def f():
greenlet_ids[1] = greenlet.getcurrent()
debug(2)
def main():
greenlet_ids[0] = greenlet.getcurrent()
debug(1)
t = threading.Thread(target=f)
t.start()
debug(3)
if __name__ == '__main__':
main()
Add calls to gyield() to ensure that we're in the same OS threadimport guv
guv.monkey_patch()
from guv import gyield, sleep
import threading
import greenlet
greenlet_ids = {}
def debug(i):
print('{} greenlet_ids: {}'.format(i, greenlet_ids))
def f():
greenlet_ids[1] = greenlet.getcurrent()
debug(2)
print('t: 1')
gyield()
print('t: 2')
gyield()
print('t: 3')
def main():
greenlet_ids[0] = greenlet.getcurrent()
debug(1)
t = threading.Thread(target=f)
t.start()
debug(3)
print('m: 1')
gyield()
print('m: 2')
gyield()
print('m: 3')
if __name__ == '__main__':
main()
|
<commit_before>import guv
guv.monkey_patch()
import threading
import greenlet
greenlet_ids = {}
def debug(i):
print('{} greenlet_ids: {}'.format(i, greenlet_ids))
def f():
greenlet_ids[1] = greenlet.getcurrent()
debug(2)
def main():
greenlet_ids[0] = greenlet.getcurrent()
debug(1)
t = threading.Thread(target=f)
t.start()
debug(3)
if __name__ == '__main__':
main()
<commit_msg>Add calls to gyield() to ensure that we're in the same OS thread<commit_after>import guv
guv.monkey_patch()
from guv import gyield, sleep
import threading
import greenlet
greenlet_ids = {}
def debug(i):
print('{} greenlet_ids: {}'.format(i, greenlet_ids))
def f():
greenlet_ids[1] = greenlet.getcurrent()
debug(2)
print('t: 1')
gyield()
print('t: 2')
gyield()
print('t: 3')
def main():
greenlet_ids[0] = greenlet.getcurrent()
debug(1)
t = threading.Thread(target=f)
t.start()
debug(3)
print('m: 1')
gyield()
print('m: 2')
gyield()
print('m: 3')
if __name__ == '__main__':
main()
|
ee00138478726ffb60b0a9d3541bb010b95903d8
|
cea/interfaces/dashboard/api/__init__.py
|
cea/interfaces/dashboard/api/__init__.py
|
from flask import Blueprint
from flask_restplus import Api
from .tools import api as tools
from .project import api as project
from .inputs import api as inputs
from .dashboard import api as dashboard
from .glossary import api as glossary
blueprint = Blueprint('api', __name__, url_prefix='/api')
api = Api(blueprint)
api.add_namespace(tools, path='/tools')
api.add_namespace(project, path='/project')
api.add_namespace(inputs, path='/inputs')
api.add_namespace(dashboard, path='/dashboards')
api.add_namespace(glossary, path='/glossary')
|
from flask import Blueprint
from flask_restplus import Api
from .tools import api as tools
from .project import api as project
from .inputs import api as inputs
from .dashboard import api as dashboard
from .glossary import api as glossary
blueprint = Blueprint('api', __name__, url_prefix='/api')
api = Api(blueprint)
api.add_namespace(tools, path='/tools')
api.add_namespace(project, path='/project')
api.add_namespace(inputs, path='/inputs')
api.add_namespace(dashboard, path='/dashboards')
api.add_namespace(glossary, path='/glossary')
@api.errorhandler
def default_error_handler(error):
"""Default error handler"""
import traceback
trace = traceback.format_exc()
return {'message': error.message, 'trace': trace}, 500
|
Add general error handler for unhandled exceptions in api
|
Add general error handler for unhandled exceptions in api
|
Python
|
mit
|
architecture-building-systems/CityEnergyAnalyst,architecture-building-systems/CityEnergyAnalyst,architecture-building-systems/CityEnergyAnalyst
|
from flask import Blueprint
from flask_restplus import Api
from .tools import api as tools
from .project import api as project
from .inputs import api as inputs
from .dashboard import api as dashboard
from .glossary import api as glossary
blueprint = Blueprint('api', __name__, url_prefix='/api')
api = Api(blueprint)
api.add_namespace(tools, path='/tools')
api.add_namespace(project, path='/project')
api.add_namespace(inputs, path='/inputs')
api.add_namespace(dashboard, path='/dashboards')
api.add_namespace(glossary, path='/glossary')
Add general error handler for unhandled exceptions in api
|
from flask import Blueprint
from flask_restplus import Api
from .tools import api as tools
from .project import api as project
from .inputs import api as inputs
from .dashboard import api as dashboard
from .glossary import api as glossary
blueprint = Blueprint('api', __name__, url_prefix='/api')
api = Api(blueprint)
api.add_namespace(tools, path='/tools')
api.add_namespace(project, path='/project')
api.add_namespace(inputs, path='/inputs')
api.add_namespace(dashboard, path='/dashboards')
api.add_namespace(glossary, path='/glossary')
@api.errorhandler
def default_error_handler(error):
"""Default error handler"""
import traceback
trace = traceback.format_exc()
return {'message': error.message, 'trace': trace}, 500
|
<commit_before>from flask import Blueprint
from flask_restplus import Api
from .tools import api as tools
from .project import api as project
from .inputs import api as inputs
from .dashboard import api as dashboard
from .glossary import api as glossary
blueprint = Blueprint('api', __name__, url_prefix='/api')
api = Api(blueprint)
api.add_namespace(tools, path='/tools')
api.add_namespace(project, path='/project')
api.add_namespace(inputs, path='/inputs')
api.add_namespace(dashboard, path='/dashboards')
api.add_namespace(glossary, path='/glossary')
<commit_msg>Add general error handler for unhandled exceptions in api<commit_after>
|
from flask import Blueprint
from flask_restplus import Api
from .tools import api as tools
from .project import api as project
from .inputs import api as inputs
from .dashboard import api as dashboard
from .glossary import api as glossary
blueprint = Blueprint('api', __name__, url_prefix='/api')
api = Api(blueprint)
api.add_namespace(tools, path='/tools')
api.add_namespace(project, path='/project')
api.add_namespace(inputs, path='/inputs')
api.add_namespace(dashboard, path='/dashboards')
api.add_namespace(glossary, path='/glossary')
@api.errorhandler
def default_error_handler(error):
"""Default error handler"""
import traceback
trace = traceback.format_exc()
return {'message': error.message, 'trace': trace}, 500
|
from flask import Blueprint
from flask_restplus import Api
from .tools import api as tools
from .project import api as project
from .inputs import api as inputs
from .dashboard import api as dashboard
from .glossary import api as glossary
blueprint = Blueprint('api', __name__, url_prefix='/api')
api = Api(blueprint)
api.add_namespace(tools, path='/tools')
api.add_namespace(project, path='/project')
api.add_namespace(inputs, path='/inputs')
api.add_namespace(dashboard, path='/dashboards')
api.add_namespace(glossary, path='/glossary')
Add general error handler for unhandled exceptions in apifrom flask import Blueprint
from flask_restplus import Api
from .tools import api as tools
from .project import api as project
from .inputs import api as inputs
from .dashboard import api as dashboard
from .glossary import api as glossary
blueprint = Blueprint('api', __name__, url_prefix='/api')
api = Api(blueprint)
api.add_namespace(tools, path='/tools')
api.add_namespace(project, path='/project')
api.add_namespace(inputs, path='/inputs')
api.add_namespace(dashboard, path='/dashboards')
api.add_namespace(glossary, path='/glossary')
@api.errorhandler
def default_error_handler(error):
"""Default error handler"""
import traceback
trace = traceback.format_exc()
return {'message': error.message, 'trace': trace}, 500
|
<commit_before>from flask import Blueprint
from flask_restplus import Api
from .tools import api as tools
from .project import api as project
from .inputs import api as inputs
from .dashboard import api as dashboard
from .glossary import api as glossary
blueprint = Blueprint('api', __name__, url_prefix='/api')
api = Api(blueprint)
api.add_namespace(tools, path='/tools')
api.add_namespace(project, path='/project')
api.add_namespace(inputs, path='/inputs')
api.add_namespace(dashboard, path='/dashboards')
api.add_namespace(glossary, path='/glossary')
<commit_msg>Add general error handler for unhandled exceptions in api<commit_after>from flask import Blueprint
from flask_restplus import Api
from .tools import api as tools
from .project import api as project
from .inputs import api as inputs
from .dashboard import api as dashboard
from .glossary import api as glossary
blueprint = Blueprint('api', __name__, url_prefix='/api')
api = Api(blueprint)
api.add_namespace(tools, path='/tools')
api.add_namespace(project, path='/project')
api.add_namespace(inputs, path='/inputs')
api.add_namespace(dashboard, path='/dashboards')
api.add_namespace(glossary, path='/glossary')
@api.errorhandler
def default_error_handler(error):
"""Default error handler"""
import traceback
trace = traceback.format_exc()
return {'message': error.message, 'trace': trace}, 500
|
b3f978bef561bb10c7c897f8da2e3abae2612390
|
grappa/__init__.py
|
grappa/__init__.py
|
# -*- coding: utf-8 -*
"""
`grappa` provides two different testing styles: `should` and `expect`.
should
------
Example using ``should`` style::
from grappa import should
should('foo').be.equal.to('foo')
'foo' | should.be.equal.to('foo')
expect
------
Example using ``expect`` style::
from grappa import expect
expect([1, 2, 3]).to.contain([2, 3])
[1, 2, 3] | expect.to.contain([2, 3])
For assertion operators and aliases, see `operators documentation`_.
.. _`operators documentation`: operators.html
Reference
---------
"""
# Export public API module members
from .api import * # noqa
from .api import __all__ # noqa
# Package metadata
__author__ = 'Tomas Aparicio'
__license__ = 'MIT'
# Current package version
__version__ = '0.1.9'
|
# -*- coding: utf-8 -*
"""
`grappa` provides two different testing styles: `should` and `expect`.
should
------
Example using ``should`` style::
from grappa import should
should('foo').be.equal.to('foo')
'foo' | should.be.equal.to('foo')
expect
------
Example using ``expect`` style::
from grappa import expect
expect([1, 2, 3]).to.contain([2, 3])
[1, 2, 3] | expect.to.contain([2, 3])
For assertion operators and aliases, see `operators documentation`_.
.. _`operators documentation`: operators.html
Reference
---------
"""
# Export public API module members
from .api import * # noqa
from .api import __all__ # noqa
# Package metadata
__author__ = 'Tomas Aparicio'
__license__ = 'MIT'
# Current package version
__version__ = '0.1.10'
|
Bump version: 0.1.9 → 0.1.10
|
Bump version: 0.1.9 → 0.1.10
|
Python
|
mit
|
grappa-py/grappa
|
# -*- coding: utf-8 -*
"""
`grappa` provides two different testing styles: `should` and `expect`.
should
------
Example using ``should`` style::
from grappa import should
should('foo').be.equal.to('foo')
'foo' | should.be.equal.to('foo')
expect
------
Example using ``expect`` style::
from grappa import expect
expect([1, 2, 3]).to.contain([2, 3])
[1, 2, 3] | expect.to.contain([2, 3])
For assertion operators and aliases, see `operators documentation`_.
.. _`operators documentation`: operators.html
Reference
---------
"""
# Export public API module members
from .api import * # noqa
from .api import __all__ # noqa
# Package metadata
__author__ = 'Tomas Aparicio'
__license__ = 'MIT'
# Current package version
__version__ = '0.1.9'
Bump version: 0.1.9 → 0.1.10
|
# -*- coding: utf-8 -*
"""
`grappa` provides two different testing styles: `should` and `expect`.
should
------
Example using ``should`` style::
from grappa import should
should('foo').be.equal.to('foo')
'foo' | should.be.equal.to('foo')
expect
------
Example using ``expect`` style::
from grappa import expect
expect([1, 2, 3]).to.contain([2, 3])
[1, 2, 3] | expect.to.contain([2, 3])
For assertion operators and aliases, see `operators documentation`_.
.. _`operators documentation`: operators.html
Reference
---------
"""
# Export public API module members
from .api import * # noqa
from .api import __all__ # noqa
# Package metadata
__author__ = 'Tomas Aparicio'
__license__ = 'MIT'
# Current package version
__version__ = '0.1.10'
|
<commit_before># -*- coding: utf-8 -*
"""
`grappa` provides two different testing styles: `should` and `expect`.
should
------
Example using ``should`` style::
from grappa import should
should('foo').be.equal.to('foo')
'foo' | should.be.equal.to('foo')
expect
------
Example using ``expect`` style::
from grappa import expect
expect([1, 2, 3]).to.contain([2, 3])
[1, 2, 3] | expect.to.contain([2, 3])
For assertion operators and aliases, see `operators documentation`_.
.. _`operators documentation`: operators.html
Reference
---------
"""
# Export public API module members
from .api import * # noqa
from .api import __all__ # noqa
# Package metadata
__author__ = 'Tomas Aparicio'
__license__ = 'MIT'
# Current package version
__version__ = '0.1.9'
<commit_msg>Bump version: 0.1.9 → 0.1.10<commit_after>
|
# -*- coding: utf-8 -*
"""
`grappa` provides two different testing styles: `should` and `expect`.
should
------
Example using ``should`` style::
from grappa import should
should('foo').be.equal.to('foo')
'foo' | should.be.equal.to('foo')
expect
------
Example using ``expect`` style::
from grappa import expect
expect([1, 2, 3]).to.contain([2, 3])
[1, 2, 3] | expect.to.contain([2, 3])
For assertion operators and aliases, see `operators documentation`_.
.. _`operators documentation`: operators.html
Reference
---------
"""
# Export public API module members
from .api import * # noqa
from .api import __all__ # noqa
# Package metadata
__author__ = 'Tomas Aparicio'
__license__ = 'MIT'
# Current package version
__version__ = '0.1.10'
|
# -*- coding: utf-8 -*
"""
`grappa` provides two different testing styles: `should` and `expect`.
should
------
Example using ``should`` style::
from grappa import should
should('foo').be.equal.to('foo')
'foo' | should.be.equal.to('foo')
expect
------
Example using ``expect`` style::
from grappa import expect
expect([1, 2, 3]).to.contain([2, 3])
[1, 2, 3] | expect.to.contain([2, 3])
For assertion operators and aliases, see `operators documentation`_.
.. _`operators documentation`: operators.html
Reference
---------
"""
# Export public API module members
from .api import * # noqa
from .api import __all__ # noqa
# Package metadata
__author__ = 'Tomas Aparicio'
__license__ = 'MIT'
# Current package version
__version__ = '0.1.9'
Bump version: 0.1.9 → 0.1.10# -*- coding: utf-8 -*
"""
`grappa` provides two different testing styles: `should` and `expect`.
should
------
Example using ``should`` style::
from grappa import should
should('foo').be.equal.to('foo')
'foo' | should.be.equal.to('foo')
expect
------
Example using ``expect`` style::
from grappa import expect
expect([1, 2, 3]).to.contain([2, 3])
[1, 2, 3] | expect.to.contain([2, 3])
For assertion operators and aliases, see `operators documentation`_.
.. _`operators documentation`: operators.html
Reference
---------
"""
# Export public API module members
from .api import * # noqa
from .api import __all__ # noqa
# Package metadata
__author__ = 'Tomas Aparicio'
__license__ = 'MIT'
# Current package version
__version__ = '0.1.10'
|
<commit_before># -*- coding: utf-8 -*
"""
`grappa` provides two different testing styles: `should` and `expect`.
should
------
Example using ``should`` style::
from grappa import should
should('foo').be.equal.to('foo')
'foo' | should.be.equal.to('foo')
expect
------
Example using ``expect`` style::
from grappa import expect
expect([1, 2, 3]).to.contain([2, 3])
[1, 2, 3] | expect.to.contain([2, 3])
For assertion operators and aliases, see `operators documentation`_.
.. _`operators documentation`: operators.html
Reference
---------
"""
# Export public API module members
from .api import * # noqa
from .api import __all__ # noqa
# Package metadata
__author__ = 'Tomas Aparicio'
__license__ = 'MIT'
# Current package version
__version__ = '0.1.9'
<commit_msg>Bump version: 0.1.9 → 0.1.10<commit_after># -*- coding: utf-8 -*
"""
`grappa` provides two different testing styles: `should` and `expect`.
should
------
Example using ``should`` style::
from grappa import should
should('foo').be.equal.to('foo')
'foo' | should.be.equal.to('foo')
expect
------
Example using ``expect`` style::
from grappa import expect
expect([1, 2, 3]).to.contain([2, 3])
[1, 2, 3] | expect.to.contain([2, 3])
For assertion operators and aliases, see `operators documentation`_.
.. _`operators documentation`: operators.html
Reference
---------
"""
# Export public API module members
from .api import * # noqa
from .api import __all__ # noqa
# Package metadata
__author__ = 'Tomas Aparicio'
__license__ = 'MIT'
# Current package version
__version__ = '0.1.10'
|
e8d5732e94d14a3a72999bd270af1fd3f3a2e09f
|
fileutil_posix.py
|
fileutil_posix.py
|
import sys, os, subprocess
def run(args, workdir=None):
p = subprocess.Popen(args, close_fds=True, cwd=workdir)
return p.wait()
if sys.platform == "darwin":
shell_open_command = "open"
else:
shell_open_command = "xdg-open"
def shell_open(path, workdir=None):
return run([shell_open_command, path], workdir=workdir) == 0
def get_user_config_dir(name=""):
path = os.environ.get("HOME", "")
if name:
path = os.path.join(path, "." + name)
return os.path.realpath(path)
__all__ = (
"shell_open",
"get_user_config_dir",
)
|
import sys, os, subprocess
def run(args, workdir=None):
p = subprocess.Popen(args, close_fds=True, cwd=workdir)
return p.wait()
if sys.platform == "darwin":
shell_open_command = "open"
else:
shell_open_command = "xdg-open"
def shell_open(path, workdir=None):
return run([shell_open_command, path], workdir=workdir) == 0
def get_user_config_dir(name=""):
path = os.environ.get("XDG_CONFIG_HOME")
if not path:
path = os.path.join(os.environ.get("HOME", "/"), ".config")
if name:
path = os.path.join(path, name)
return os.path.realpath(path)
__all__ = (
"shell_open",
"get_user_config_dir",
)
|
Use XDG_CONFIG_HOME for configuration directory.
|
Use XDG_CONFIG_HOME for configuration directory.
|
Python
|
mit
|
shaurz/devo
|
import sys, os, subprocess
def run(args, workdir=None):
p = subprocess.Popen(args, close_fds=True, cwd=workdir)
return p.wait()
if sys.platform == "darwin":
shell_open_command = "open"
else:
shell_open_command = "xdg-open"
def shell_open(path, workdir=None):
return run([shell_open_command, path], workdir=workdir) == 0
def get_user_config_dir(name=""):
path = os.environ.get("HOME", "")
if name:
path = os.path.join(path, "." + name)
return os.path.realpath(path)
__all__ = (
"shell_open",
"get_user_config_dir",
)
Use XDG_CONFIG_HOME for configuration directory.
|
import sys, os, subprocess
def run(args, workdir=None):
p = subprocess.Popen(args, close_fds=True, cwd=workdir)
return p.wait()
if sys.platform == "darwin":
shell_open_command = "open"
else:
shell_open_command = "xdg-open"
def shell_open(path, workdir=None):
return run([shell_open_command, path], workdir=workdir) == 0
def get_user_config_dir(name=""):
path = os.environ.get("XDG_CONFIG_HOME")
if not path:
path = os.path.join(os.environ.get("HOME", "/"), ".config")
if name:
path = os.path.join(path, name)
return os.path.realpath(path)
__all__ = (
"shell_open",
"get_user_config_dir",
)
|
<commit_before>import sys, os, subprocess
def run(args, workdir=None):
p = subprocess.Popen(args, close_fds=True, cwd=workdir)
return p.wait()
if sys.platform == "darwin":
shell_open_command = "open"
else:
shell_open_command = "xdg-open"
def shell_open(path, workdir=None):
return run([shell_open_command, path], workdir=workdir) == 0
def get_user_config_dir(name=""):
path = os.environ.get("HOME", "")
if name:
path = os.path.join(path, "." + name)
return os.path.realpath(path)
__all__ = (
"shell_open",
"get_user_config_dir",
)
<commit_msg>Use XDG_CONFIG_HOME for configuration directory.<commit_after>
|
import sys, os, subprocess
def run(args, workdir=None):
p = subprocess.Popen(args, close_fds=True, cwd=workdir)
return p.wait()
if sys.platform == "darwin":
shell_open_command = "open"
else:
shell_open_command = "xdg-open"
def shell_open(path, workdir=None):
return run([shell_open_command, path], workdir=workdir) == 0
def get_user_config_dir(name=""):
path = os.environ.get("XDG_CONFIG_HOME")
if not path:
path = os.path.join(os.environ.get("HOME", "/"), ".config")
if name:
path = os.path.join(path, name)
return os.path.realpath(path)
__all__ = (
"shell_open",
"get_user_config_dir",
)
|
import sys, os, subprocess
def run(args, workdir=None):
p = subprocess.Popen(args, close_fds=True, cwd=workdir)
return p.wait()
if sys.platform == "darwin":
shell_open_command = "open"
else:
shell_open_command = "xdg-open"
def shell_open(path, workdir=None):
return run([shell_open_command, path], workdir=workdir) == 0
def get_user_config_dir(name=""):
path = os.environ.get("HOME", "")
if name:
path = os.path.join(path, "." + name)
return os.path.realpath(path)
__all__ = (
"shell_open",
"get_user_config_dir",
)
Use XDG_CONFIG_HOME for configuration directory.import sys, os, subprocess
def run(args, workdir=None):
p = subprocess.Popen(args, close_fds=True, cwd=workdir)
return p.wait()
if sys.platform == "darwin":
shell_open_command = "open"
else:
shell_open_command = "xdg-open"
def shell_open(path, workdir=None):
return run([shell_open_command, path], workdir=workdir) == 0
def get_user_config_dir(name=""):
path = os.environ.get("XDG_CONFIG_HOME")
if not path:
path = os.path.join(os.environ.get("HOME", "/"), ".config")
if name:
path = os.path.join(path, name)
return os.path.realpath(path)
__all__ = (
"shell_open",
"get_user_config_dir",
)
|
<commit_before>import sys, os, subprocess
def run(args, workdir=None):
p = subprocess.Popen(args, close_fds=True, cwd=workdir)
return p.wait()
if sys.platform == "darwin":
shell_open_command = "open"
else:
shell_open_command = "xdg-open"
def shell_open(path, workdir=None):
return run([shell_open_command, path], workdir=workdir) == 0
def get_user_config_dir(name=""):
path = os.environ.get("HOME", "")
if name:
path = os.path.join(path, "." + name)
return os.path.realpath(path)
__all__ = (
"shell_open",
"get_user_config_dir",
)
<commit_msg>Use XDG_CONFIG_HOME for configuration directory.<commit_after>import sys, os, subprocess
def run(args, workdir=None):
p = subprocess.Popen(args, close_fds=True, cwd=workdir)
return p.wait()
if sys.platform == "darwin":
shell_open_command = "open"
else:
shell_open_command = "xdg-open"
def shell_open(path, workdir=None):
return run([shell_open_command, path], workdir=workdir) == 0
def get_user_config_dir(name=""):
path = os.environ.get("XDG_CONFIG_HOME")
if not path:
path = os.path.join(os.environ.get("HOME", "/"), ".config")
if name:
path = os.path.join(path, name)
return os.path.realpath(path)
__all__ = (
"shell_open",
"get_user_config_dir",
)
|
1f6892876d9f0a02d62de304a37c7a6e8d369a58
|
dimod/reference/samplers/random_sampler.py
|
dimod/reference/samplers/random_sampler.py
|
"""
RandomSampler
-------------
A random sampler that can be used for unit testing and debugging.
"""
import numpy as np
from dimod.core.sampler import Sampler
from dimod.response import Response, SampleView
__all__ = ['RandomSampler']
class RandomSampler(Sampler):
"""Gives random samples.
Note that this sampler is intended for testing.
"""
def __init__(self):
Sampler.__init__(self)
self.sample_kwargs = {'num_reads': []}
def sample(self, bqm, num_reads=10):
"""Gives random samples.
Args:
todo
Returns:
:obj:`.Response`: The vartype will match the given binary quadratic model.
Notes:
For each variable in each sample, the value is chosen by a coin flip.
"""
values = np.asarray(list(bqm.vartype.value), dtype='int8')
samples = np.random.choice(values, (num_reads, len(bqm)))
variable_labels = list(bqm.linear)
label_to_idx = {v: idx for idx, v in enumerate(variable_labels)}
energies = [bqm.energy(SampleView(idx, samples, label_to_idx)) for idx in range(num_reads)]
return Response.from_matrix(samples, {'energy': energies},
vartype=bqm.vartype, variable_labels=variable_labels)
|
"""
RandomSampler
-------------
A random sampler that can be used for unit testing and debugging.
"""
import numpy as np
from dimod.core.sampler import Sampler
from dimod.response import Response, SampleView
__all__ = ['RandomSampler']
class RandomSampler(Sampler):
"""Gives random samples.
Note that this sampler is intended for testing.
"""
properties = None
parameters = None
def __init__(self):
self.parameters = {'num_reads': []}
self.properties = {}
def sample(self, bqm, num_reads=10):
"""Gives random samples.
Args:
todo
Returns:
:obj:`.Response`: The vartype will match the given binary quadratic model.
Notes:
For each variable in each sample, the value is chosen by a coin flip.
"""
values = np.asarray(list(bqm.vartype.value), dtype='int8')
samples = np.random.choice(values, (num_reads, len(bqm)))
variable_labels = list(bqm.linear)
label_to_idx = {v: idx for idx, v in enumerate(variable_labels)}
energies = [bqm.energy(SampleView(idx, samples, label_to_idx)) for idx in range(num_reads)]
return Response.from_matrix(samples, {'energy': energies},
vartype=bqm.vartype, variable_labels=variable_labels)
|
Update RandomSampler to use the new Sampler abc
|
Update RandomSampler to use the new Sampler abc
|
Python
|
apache-2.0
|
oneklc/dimod,oneklc/dimod
|
"""
RandomSampler
-------------
A random sampler that can be used for unit testing and debugging.
"""
import numpy as np
from dimod.core.sampler import Sampler
from dimod.response import Response, SampleView
__all__ = ['RandomSampler']
class RandomSampler(Sampler):
"""Gives random samples.
Note that this sampler is intended for testing.
"""
def __init__(self):
Sampler.__init__(self)
self.sample_kwargs = {'num_reads': []}
def sample(self, bqm, num_reads=10):
"""Gives random samples.
Args:
todo
Returns:
:obj:`.Response`: The vartype will match the given binary quadratic model.
Notes:
For each variable in each sample, the value is chosen by a coin flip.
"""
values = np.asarray(list(bqm.vartype.value), dtype='int8')
samples = np.random.choice(values, (num_reads, len(bqm)))
variable_labels = list(bqm.linear)
label_to_idx = {v: idx for idx, v in enumerate(variable_labels)}
energies = [bqm.energy(SampleView(idx, samples, label_to_idx)) for idx in range(num_reads)]
return Response.from_matrix(samples, {'energy': energies},
vartype=bqm.vartype, variable_labels=variable_labels)
Update RandomSampler to use the new Sampler abc
|
"""
RandomSampler
-------------
A random sampler that can be used for unit testing and debugging.
"""
import numpy as np
from dimod.core.sampler import Sampler
from dimod.response import Response, SampleView
__all__ = ['RandomSampler']
class RandomSampler(Sampler):
"""Gives random samples.
Note that this sampler is intended for testing.
"""
properties = None
parameters = None
def __init__(self):
self.parameters = {'num_reads': []}
self.properties = {}
def sample(self, bqm, num_reads=10):
"""Gives random samples.
Args:
todo
Returns:
:obj:`.Response`: The vartype will match the given binary quadratic model.
Notes:
For each variable in each sample, the value is chosen by a coin flip.
"""
values = np.asarray(list(bqm.vartype.value), dtype='int8')
samples = np.random.choice(values, (num_reads, len(bqm)))
variable_labels = list(bqm.linear)
label_to_idx = {v: idx for idx, v in enumerate(variable_labels)}
energies = [bqm.energy(SampleView(idx, samples, label_to_idx)) for idx in range(num_reads)]
return Response.from_matrix(samples, {'energy': energies},
vartype=bqm.vartype, variable_labels=variable_labels)
|
<commit_before>"""
RandomSampler
-------------
A random sampler that can be used for unit testing and debugging.
"""
import numpy as np
from dimod.core.sampler import Sampler
from dimod.response import Response, SampleView
__all__ = ['RandomSampler']
class RandomSampler(Sampler):
"""Gives random samples.
Note that this sampler is intended for testing.
"""
def __init__(self):
Sampler.__init__(self)
self.sample_kwargs = {'num_reads': []}
def sample(self, bqm, num_reads=10):
"""Gives random samples.
Args:
todo
Returns:
:obj:`.Response`: The vartype will match the given binary quadratic model.
Notes:
For each variable in each sample, the value is chosen by a coin flip.
"""
values = np.asarray(list(bqm.vartype.value), dtype='int8')
samples = np.random.choice(values, (num_reads, len(bqm)))
variable_labels = list(bqm.linear)
label_to_idx = {v: idx for idx, v in enumerate(variable_labels)}
energies = [bqm.energy(SampleView(idx, samples, label_to_idx)) for idx in range(num_reads)]
return Response.from_matrix(samples, {'energy': energies},
vartype=bqm.vartype, variable_labels=variable_labels)
<commit_msg>Update RandomSampler to use the new Sampler abc<commit_after>
|
"""
RandomSampler
-------------
A random sampler that can be used for unit testing and debugging.
"""
import numpy as np
from dimod.core.sampler import Sampler
from dimod.response import Response, SampleView
__all__ = ['RandomSampler']
class RandomSampler(Sampler):
"""Gives random samples.
Note that this sampler is intended for testing.
"""
properties = None
parameters = None
def __init__(self):
self.parameters = {'num_reads': []}
self.properties = {}
def sample(self, bqm, num_reads=10):
"""Gives random samples.
Args:
todo
Returns:
:obj:`.Response`: The vartype will match the given binary quadratic model.
Notes:
For each variable in each sample, the value is chosen by a coin flip.
"""
values = np.asarray(list(bqm.vartype.value), dtype='int8')
samples = np.random.choice(values, (num_reads, len(bqm)))
variable_labels = list(bqm.linear)
label_to_idx = {v: idx for idx, v in enumerate(variable_labels)}
energies = [bqm.energy(SampleView(idx, samples, label_to_idx)) for idx in range(num_reads)]
return Response.from_matrix(samples, {'energy': energies},
vartype=bqm.vartype, variable_labels=variable_labels)
|
"""
RandomSampler
-------------
A random sampler that can be used for unit testing and debugging.
"""
import numpy as np
from dimod.core.sampler import Sampler
from dimod.response import Response, SampleView
__all__ = ['RandomSampler']
class RandomSampler(Sampler):
"""Gives random samples.
Note that this sampler is intended for testing.
"""
def __init__(self):
Sampler.__init__(self)
self.sample_kwargs = {'num_reads': []}
def sample(self, bqm, num_reads=10):
"""Gives random samples.
Args:
todo
Returns:
:obj:`.Response`: The vartype will match the given binary quadratic model.
Notes:
For each variable in each sample, the value is chosen by a coin flip.
"""
values = np.asarray(list(bqm.vartype.value), dtype='int8')
samples = np.random.choice(values, (num_reads, len(bqm)))
variable_labels = list(bqm.linear)
label_to_idx = {v: idx for idx, v in enumerate(variable_labels)}
energies = [bqm.energy(SampleView(idx, samples, label_to_idx)) for idx in range(num_reads)]
return Response.from_matrix(samples, {'energy': energies},
vartype=bqm.vartype, variable_labels=variable_labels)
Update RandomSampler to use the new Sampler abc"""
RandomSampler
-------------
A random sampler that can be used for unit testing and debugging.
"""
import numpy as np
from dimod.core.sampler import Sampler
from dimod.response import Response, SampleView
__all__ = ['RandomSampler']
class RandomSampler(Sampler):
"""Gives random samples.
Note that this sampler is intended for testing.
"""
properties = None
parameters = None
def __init__(self):
self.parameters = {'num_reads': []}
self.properties = {}
def sample(self, bqm, num_reads=10):
"""Gives random samples.
Args:
todo
Returns:
:obj:`.Response`: The vartype will match the given binary quadratic model.
Notes:
For each variable in each sample, the value is chosen by a coin flip.
"""
values = np.asarray(list(bqm.vartype.value), dtype='int8')
samples = np.random.choice(values, (num_reads, len(bqm)))
variable_labels = list(bqm.linear)
label_to_idx = {v: idx for idx, v in enumerate(variable_labels)}
energies = [bqm.energy(SampleView(idx, samples, label_to_idx)) for idx in range(num_reads)]
return Response.from_matrix(samples, {'energy': energies},
vartype=bqm.vartype, variable_labels=variable_labels)
|
<commit_before>"""
RandomSampler
-------------
A random sampler that can be used for unit testing and debugging.
"""
import numpy as np
from dimod.core.sampler import Sampler
from dimod.response import Response, SampleView
__all__ = ['RandomSampler']
class RandomSampler(Sampler):
"""Gives random samples.
Note that this sampler is intended for testing.
"""
def __init__(self):
Sampler.__init__(self)
self.sample_kwargs = {'num_reads': []}
def sample(self, bqm, num_reads=10):
"""Gives random samples.
Args:
todo
Returns:
:obj:`.Response`: The vartype will match the given binary quadratic model.
Notes:
For each variable in each sample, the value is chosen by a coin flip.
"""
values = np.asarray(list(bqm.vartype.value), dtype='int8')
samples = np.random.choice(values, (num_reads, len(bqm)))
variable_labels = list(bqm.linear)
label_to_idx = {v: idx for idx, v in enumerate(variable_labels)}
energies = [bqm.energy(SampleView(idx, samples, label_to_idx)) for idx in range(num_reads)]
return Response.from_matrix(samples, {'energy': energies},
vartype=bqm.vartype, variable_labels=variable_labels)
<commit_msg>Update RandomSampler to use the new Sampler abc<commit_after>"""
RandomSampler
-------------
A random sampler that can be used for unit testing and debugging.
"""
import numpy as np
from dimod.core.sampler import Sampler
from dimod.response import Response, SampleView
__all__ = ['RandomSampler']
class RandomSampler(Sampler):
"""Gives random samples.
Note that this sampler is intended for testing.
"""
properties = None
parameters = None
def __init__(self):
self.parameters = {'num_reads': []}
self.properties = {}
def sample(self, bqm, num_reads=10):
"""Gives random samples.
Args:
todo
Returns:
:obj:`.Response`: The vartype will match the given binary quadratic model.
Notes:
For each variable in each sample, the value is chosen by a coin flip.
"""
values = np.asarray(list(bqm.vartype.value), dtype='int8')
samples = np.random.choice(values, (num_reads, len(bqm)))
variable_labels = list(bqm.linear)
label_to_idx = {v: idx for idx, v in enumerate(variable_labels)}
energies = [bqm.energy(SampleView(idx, samples, label_to_idx)) for idx in range(num_reads)]
return Response.from_matrix(samples, {'energy': energies},
vartype=bqm.vartype, variable_labels=variable_labels)
|
e84ca44178e984a356c0d77b6ce76040b74dd520
|
bin/upload_version.py
|
bin/upload_version.py
|
#!python
import os
import sys
import json
import requests
if __name__ == '__main__':
# create release
version = sys.argv[1]
filename = sys.argv[2]
github_token = os.environ['GITHUB_TOKEN']
auth = (github_token, 'x-oauth-basic')
commit_sha = os.environ['CIRCLE_SHA1']
params = json.dumps({
'tag_name': 'v{0}'.format(version),
'name': 're:dash v{0}'.format(version),
'target_commitish': commit_sha
})
response = requests.post('https://api.github.com/repos/everythingme/redash/releases',
data=params,
auth=auth)
upload_url = response.json()['upload_url']
upload_url = upload_url.replace('{?name}', '')
with open(filename) as file_content:
headers = {'Content-Type': 'application/gzip'}
response = requests.post(upload_url, file_content, params={'name': filename}, auth=auth, headers=headers, verify=False)
|
#!python
import os
import sys
import json
import requests
if __name__ == '__main__':
version = sys.argv[1]
filename = sys.argv[2].split('/')[-1]
github_token = os.environ['GITHUB_TOKEN']
auth = (github_token, 'x-oauth-basic')
commit_sha = os.environ['CIRCLE_SHA1']
params = json.dumps({
'tag_name': 'v{0}'.format(version),
'name': 're:dash v{0}'.format(version),
'target_commitish': commit_sha
})
response = requests.post('https://api.github.com/repos/everythingme/redash/releases',
data=params,
auth=auth)
upload_url = response.json()['upload_url']
upload_url = upload_url.replace('{?name}', '')
with open(filename) as file_content:
headers = {'Content-Type': 'application/gzip'}
response = requests.post(upload_url, file_content, params={'name': filename}, auth=auth, headers=headers, verify=False)
|
Use only filename; without path
|
Use only filename; without path
|
Python
|
bsd-2-clause
|
chriszs/redash,alexanderlz/redash,rockwotj/redash,akariv/redash,useabode/redash,pubnative/redash,amino-data/redash,ninneko/redash,pubnative/redash,denisov-vlad/redash,easytaxibr/redash,moritz9/redash,denisov-vlad/redash,pubnative/redash,jmvasquez/redashtest,easytaxibr/redash,easytaxibr/redash,crowdworks/redash,stefanseifert/redash,vishesh92/redash,moritz9/redash,M32Media/redash,M32Media/redash,akariv/redash,moritz9/redash,denisov-vlad/redash,crowdworks/redash,stefanseifert/redash,denisov-vlad/redash,amino-data/redash,jmvasquez/redashtest,stefanseifert/redash,ninneko/redash,akariv/redash,getredash/redash,amino-data/redash,akariv/redash,pubnative/redash,ninneko/redash,akariv/redash,chriszs/redash,M32Media/redash,useabode/redash,hudl/redash,vishesh92/redash,44px/redash,guaguadev/redash,chriszs/redash,jmvasquez/redashtest,getredash/redash,vishesh92/redash,rockwotj/redash,guaguadev/redash,EverlyWell/redash,stefanseifert/redash,getredash/redash,crowdworks/redash,EverlyWell/redash,ninneko/redash,jmvasquez/redashtest,imsally/redash,alexanderlz/redash,hudl/redash,stefanseifert/redash,imsally/redash,moritz9/redash,ninneko/redash,44px/redash,jmvasquez/redashtest,pubnative/redash,imsally/redash,amino-data/redash,alexanderlz/redash,chriszs/redash,easytaxibr/redash,EverlyWell/redash,denisov-vlad/redash,hudl/redash,44px/redash,alexanderlz/redash,vishesh92/redash,crowdworks/redash,guaguadev/redash,getredash/redash,guaguadev/redash,hudl/redash,rockwotj/redash,imsally/redash,getredash/redash,useabode/redash,useabode/redash,EverlyWell/redash,44px/redash,rockwotj/redash,guaguadev/redash,easytaxibr/redash,M32Media/redash
|
#!python
import os
import sys
import json
import requests
if __name__ == '__main__':
# create release
version = sys.argv[1]
filename = sys.argv[2]
github_token = os.environ['GITHUB_TOKEN']
auth = (github_token, 'x-oauth-basic')
commit_sha = os.environ['CIRCLE_SHA1']
params = json.dumps({
'tag_name': 'v{0}'.format(version),
'name': 're:dash v{0}'.format(version),
'target_commitish': commit_sha
})
response = requests.post('https://api.github.com/repos/everythingme/redash/releases',
data=params,
auth=auth)
upload_url = response.json()['upload_url']
upload_url = upload_url.replace('{?name}', '')
with open(filename) as file_content:
headers = {'Content-Type': 'application/gzip'}
response = requests.post(upload_url, file_content, params={'name': filename}, auth=auth, headers=headers, verify=False)
Use only filename; without path
|
#!python
import os
import sys
import json
import requests
if __name__ == '__main__':
version = sys.argv[1]
filename = sys.argv[2].split('/')[-1]
github_token = os.environ['GITHUB_TOKEN']
auth = (github_token, 'x-oauth-basic')
commit_sha = os.environ['CIRCLE_SHA1']
params = json.dumps({
'tag_name': 'v{0}'.format(version),
'name': 're:dash v{0}'.format(version),
'target_commitish': commit_sha
})
response = requests.post('https://api.github.com/repos/everythingme/redash/releases',
data=params,
auth=auth)
upload_url = response.json()['upload_url']
upload_url = upload_url.replace('{?name}', '')
with open(filename) as file_content:
headers = {'Content-Type': 'application/gzip'}
response = requests.post(upload_url, file_content, params={'name': filename}, auth=auth, headers=headers, verify=False)
|
<commit_before>#!python
import os
import sys
import json
import requests
if __name__ == '__main__':
# create release
version = sys.argv[1]
filename = sys.argv[2]
github_token = os.environ['GITHUB_TOKEN']
auth = (github_token, 'x-oauth-basic')
commit_sha = os.environ['CIRCLE_SHA1']
params = json.dumps({
'tag_name': 'v{0}'.format(version),
'name': 're:dash v{0}'.format(version),
'target_commitish': commit_sha
})
response = requests.post('https://api.github.com/repos/everythingme/redash/releases',
data=params,
auth=auth)
upload_url = response.json()['upload_url']
upload_url = upload_url.replace('{?name}', '')
with open(filename) as file_content:
headers = {'Content-Type': 'application/gzip'}
response = requests.post(upload_url, file_content, params={'name': filename}, auth=auth, headers=headers, verify=False)
<commit_msg>Use only filename; without path<commit_after>
|
#!python
import os
import sys
import json
import requests
if __name__ == '__main__':
version = sys.argv[1]
filename = sys.argv[2].split('/')[-1]
github_token = os.environ['GITHUB_TOKEN']
auth = (github_token, 'x-oauth-basic')
commit_sha = os.environ['CIRCLE_SHA1']
params = json.dumps({
'tag_name': 'v{0}'.format(version),
'name': 're:dash v{0}'.format(version),
'target_commitish': commit_sha
})
response = requests.post('https://api.github.com/repos/everythingme/redash/releases',
data=params,
auth=auth)
upload_url = response.json()['upload_url']
upload_url = upload_url.replace('{?name}', '')
with open(filename) as file_content:
headers = {'Content-Type': 'application/gzip'}
response = requests.post(upload_url, file_content, params={'name': filename}, auth=auth, headers=headers, verify=False)
|
#!python
import os
import sys
import json
import requests
if __name__ == '__main__':
# create release
version = sys.argv[1]
filename = sys.argv[2]
github_token = os.environ['GITHUB_TOKEN']
auth = (github_token, 'x-oauth-basic')
commit_sha = os.environ['CIRCLE_SHA1']
params = json.dumps({
'tag_name': 'v{0}'.format(version),
'name': 're:dash v{0}'.format(version),
'target_commitish': commit_sha
})
response = requests.post('https://api.github.com/repos/everythingme/redash/releases',
data=params,
auth=auth)
upload_url = response.json()['upload_url']
upload_url = upload_url.replace('{?name}', '')
with open(filename) as file_content:
headers = {'Content-Type': 'application/gzip'}
response = requests.post(upload_url, file_content, params={'name': filename}, auth=auth, headers=headers, verify=False)
Use only filename; without path#!python
import os
import sys
import json
import requests
if __name__ == '__main__':
version = sys.argv[1]
filename = sys.argv[2].split('/')[-1]
github_token = os.environ['GITHUB_TOKEN']
auth = (github_token, 'x-oauth-basic')
commit_sha = os.environ['CIRCLE_SHA1']
params = json.dumps({
'tag_name': 'v{0}'.format(version),
'name': 're:dash v{0}'.format(version),
'target_commitish': commit_sha
})
response = requests.post('https://api.github.com/repos/everythingme/redash/releases',
data=params,
auth=auth)
upload_url = response.json()['upload_url']
upload_url = upload_url.replace('{?name}', '')
with open(filename) as file_content:
headers = {'Content-Type': 'application/gzip'}
response = requests.post(upload_url, file_content, params={'name': filename}, auth=auth, headers=headers, verify=False)
|
<commit_before>#!python
import os
import sys
import json
import requests
if __name__ == '__main__':
# create release
version = sys.argv[1]
filename = sys.argv[2]
github_token = os.environ['GITHUB_TOKEN']
auth = (github_token, 'x-oauth-basic')
commit_sha = os.environ['CIRCLE_SHA1']
params = json.dumps({
'tag_name': 'v{0}'.format(version),
'name': 're:dash v{0}'.format(version),
'target_commitish': commit_sha
})
response = requests.post('https://api.github.com/repos/everythingme/redash/releases',
data=params,
auth=auth)
upload_url = response.json()['upload_url']
upload_url = upload_url.replace('{?name}', '')
with open(filename) as file_content:
headers = {'Content-Type': 'application/gzip'}
response = requests.post(upload_url, file_content, params={'name': filename}, auth=auth, headers=headers, verify=False)
<commit_msg>Use only filename; without path<commit_after>#!python
import os
import sys
import json
import requests
if __name__ == '__main__':
version = sys.argv[1]
filename = sys.argv[2].split('/')[-1]
github_token = os.environ['GITHUB_TOKEN']
auth = (github_token, 'x-oauth-basic')
commit_sha = os.environ['CIRCLE_SHA1']
params = json.dumps({
'tag_name': 'v{0}'.format(version),
'name': 're:dash v{0}'.format(version),
'target_commitish': commit_sha
})
response = requests.post('https://api.github.com/repos/everythingme/redash/releases',
data=params,
auth=auth)
upload_url = response.json()['upload_url']
upload_url = upload_url.replace('{?name}', '')
with open(filename) as file_content:
headers = {'Content-Type': 'application/gzip'}
response = requests.post(upload_url, file_content, params={'name': filename}, auth=auth, headers=headers, verify=False)
|
f28716fba7f3b351b37fdfbb6e6cd1225592da57
|
example/app/templatetags/sqlformat.py
|
example/app/templatetags/sqlformat.py
|
from __future__ import unicode_literals
import sqlparse
from django import template
register = template.Library()
@register.filter
def sqlformat(sql):
return sqlparse.format(str(sql), reindent=True)
|
from __future__ import unicode_literals
import sqlparse
from django import template
register = template.Library()
@register.filter
def sqlformat(sql):
'''
Format SQL queries.
'''
return sqlparse.format(str(sql), reindent=True, wrap_after=120)
|
Use less vertical space in query formatting
|
Use less vertical space in query formatting
|
Python
|
bsd-3-clause
|
zostera/django-modeltrans,zostera/django-modeltrans
|
from __future__ import unicode_literals
import sqlparse
from django import template
register = template.Library()
@register.filter
def sqlformat(sql):
return sqlparse.format(str(sql), reindent=True)
Use less vertical space in query formatting
|
from __future__ import unicode_literals
import sqlparse
from django import template
register = template.Library()
@register.filter
def sqlformat(sql):
'''
Format SQL queries.
'''
return sqlparse.format(str(sql), reindent=True, wrap_after=120)
|
<commit_before>from __future__ import unicode_literals
import sqlparse
from django import template
register = template.Library()
@register.filter
def sqlformat(sql):
return sqlparse.format(str(sql), reindent=True)
<commit_msg>Use less vertical space in query formatting<commit_after>
|
from __future__ import unicode_literals
import sqlparse
from django import template
register = template.Library()
@register.filter
def sqlformat(sql):
'''
Format SQL queries.
'''
return sqlparse.format(str(sql), reindent=True, wrap_after=120)
|
from __future__ import unicode_literals
import sqlparse
from django import template
register = template.Library()
@register.filter
def sqlformat(sql):
return sqlparse.format(str(sql), reindent=True)
Use less vertical space in query formattingfrom __future__ import unicode_literals
import sqlparse
from django import template
register = template.Library()
@register.filter
def sqlformat(sql):
'''
Format SQL queries.
'''
return sqlparse.format(str(sql), reindent=True, wrap_after=120)
|
<commit_before>from __future__ import unicode_literals
import sqlparse
from django import template
register = template.Library()
@register.filter
def sqlformat(sql):
return sqlparse.format(str(sql), reindent=True)
<commit_msg>Use less vertical space in query formatting<commit_after>from __future__ import unicode_literals
import sqlparse
from django import template
register = template.Library()
@register.filter
def sqlformat(sql):
'''
Format SQL queries.
'''
return sqlparse.format(str(sql), reindent=True, wrap_after=120)
|
a015eea9dfe94fe10ee44159f92b9ce196c14f2f
|
hooks/settings/settings_gunicorn.py
|
hooks/settings/settings_gunicorn.py
|
# -*- coding: utf-8 -*-
"""
Related article: https://sebest.github.io/post/protips-using-gunicorn-inside-a-docker-image/
Parameters you might want to override:
GUNICORN_BIND="0.0.0.0:8005"
"""
import os
workers = 4
bind = "0.0.0.0:8005"
worker_class = "eventlet"
worker_connections = 10
# Overwrite some Gunicorns params by ENV variables
for k, v in os.environ.items():
if k.startswith("GUNICORN_"):
key = k.split('_', 1)[1].lower()
locals()[key] = v
|
# -*- coding: utf-8 -*-
"""
Related article: https://sebest.github.io/post/protips-using-gunicorn-inside-a-docker-image/
Parameters you might want to override:
GUNICORN_BIND="0.0.0.0:8005"
"""
import os
workers = 4
bind = "0.0.0.0:8005"
worker_class = "eventlet"
worker_connections = 100
# Overwrite some Gunicorns params by ENV variables
for k, v in os.environ.items():
if k.startswith("GUNICORN_"):
key = k.split('_', 1)[1].lower()
locals()[key] = v
|
Increase Gunicorn worker eventlet connections, 10 -> 100.
|
Increase Gunicorn worker eventlet connections, 10 -> 100.
|
Python
|
mit
|
business-factory/captain-hook
|
# -*- coding: utf-8 -*-
"""
Related article: https://sebest.github.io/post/protips-using-gunicorn-inside-a-docker-image/
Parameters you might want to override:
GUNICORN_BIND="0.0.0.0:8005"
"""
import os
workers = 4
bind = "0.0.0.0:8005"
worker_class = "eventlet"
worker_connections = 10
# Overwrite some Gunicorns params by ENV variables
for k, v in os.environ.items():
if k.startswith("GUNICORN_"):
key = k.split('_', 1)[1].lower()
locals()[key] = v
Increase Gunicorn worker eventlet connections, 10 -> 100.
|
# -*- coding: utf-8 -*-
"""
Related article: https://sebest.github.io/post/protips-using-gunicorn-inside-a-docker-image/
Parameters you might want to override:
GUNICORN_BIND="0.0.0.0:8005"
"""
import os
workers = 4
bind = "0.0.0.0:8005"
worker_class = "eventlet"
worker_connections = 100
# Overwrite some Gunicorns params by ENV variables
for k, v in os.environ.items():
if k.startswith("GUNICORN_"):
key = k.split('_', 1)[1].lower()
locals()[key] = v
|
<commit_before># -*- coding: utf-8 -*-
"""
Related article: https://sebest.github.io/post/protips-using-gunicorn-inside-a-docker-image/
Parameters you might want to override:
GUNICORN_BIND="0.0.0.0:8005"
"""
import os
workers = 4
bind = "0.0.0.0:8005"
worker_class = "eventlet"
worker_connections = 10
# Overwrite some Gunicorns params by ENV variables
for k, v in os.environ.items():
if k.startswith("GUNICORN_"):
key = k.split('_', 1)[1].lower()
locals()[key] = v
<commit_msg>Increase Gunicorn worker eventlet connections, 10 -> 100.<commit_after>
|
# -*- coding: utf-8 -*-
"""
Related article: https://sebest.github.io/post/protips-using-gunicorn-inside-a-docker-image/
Parameters you might want to override:
GUNICORN_BIND="0.0.0.0:8005"
"""
import os
workers = 4
bind = "0.0.0.0:8005"
worker_class = "eventlet"
worker_connections = 100
# Overwrite some Gunicorns params by ENV variables
for k, v in os.environ.items():
if k.startswith("GUNICORN_"):
key = k.split('_', 1)[1].lower()
locals()[key] = v
|
# -*- coding: utf-8 -*-
"""
Related article: https://sebest.github.io/post/protips-using-gunicorn-inside-a-docker-image/
Parameters you might want to override:
GUNICORN_BIND="0.0.0.0:8005"
"""
import os
workers = 4
bind = "0.0.0.0:8005"
worker_class = "eventlet"
worker_connections = 10
# Overwrite some Gunicorns params by ENV variables
for k, v in os.environ.items():
if k.startswith("GUNICORN_"):
key = k.split('_', 1)[1].lower()
locals()[key] = v
Increase Gunicorn worker eventlet connections, 10 -> 100.# -*- coding: utf-8 -*-
"""
Related article: https://sebest.github.io/post/protips-using-gunicorn-inside-a-docker-image/
Parameters you might want to override:
GUNICORN_BIND="0.0.0.0:8005"
"""
import os
workers = 4
bind = "0.0.0.0:8005"
worker_class = "eventlet"
worker_connections = 100
# Overwrite some Gunicorns params by ENV variables
for k, v in os.environ.items():
if k.startswith("GUNICORN_"):
key = k.split('_', 1)[1].lower()
locals()[key] = v
|
<commit_before># -*- coding: utf-8 -*-
"""
Related article: https://sebest.github.io/post/protips-using-gunicorn-inside-a-docker-image/
Parameters you might want to override:
GUNICORN_BIND="0.0.0.0:8005"
"""
import os
workers = 4
bind = "0.0.0.0:8005"
worker_class = "eventlet"
worker_connections = 10
# Overwrite some Gunicorns params by ENV variables
for k, v in os.environ.items():
if k.startswith("GUNICORN_"):
key = k.split('_', 1)[1].lower()
locals()[key] = v
<commit_msg>Increase Gunicorn worker eventlet connections, 10 -> 100.<commit_after># -*- coding: utf-8 -*-
"""
Related article: https://sebest.github.io/post/protips-using-gunicorn-inside-a-docker-image/
Parameters you might want to override:
GUNICORN_BIND="0.0.0.0:8005"
"""
import os
workers = 4
bind = "0.0.0.0:8005"
worker_class = "eventlet"
worker_connections = 100
# Overwrite some Gunicorns params by ENV variables
for k, v in os.environ.items():
if k.startswith("GUNICORN_"):
key = k.split('_', 1)[1].lower()
locals()[key] = v
|
d2676e499cb5fbc5864f5f4583a1d476c6406b36
|
extruct/jsonld.py
|
extruct/jsonld.py
|
# -*- coding: utf-8 -*-
"""
JSON-LD extractor
"""
import json
import re
import lxml.etree
from extruct.utils import parse_html
HTML_OR_JS_COMMENTLINE = re.compile(r'^\s*(//.*|<!--.*-->)')
class JsonLdExtractor(object):
_xp_jsonld = lxml.etree.XPath('descendant-or-self::script[@type="application/ld+json"]')
def extract(self, htmlstring, base_url=None, encoding="UTF-8"):
tree = parse_html(htmlstring, encoding=encoding)
return self.extract_items(tree, base_url=base_url)
def extract_items(self, document, base_url=None):
return [
item
for items in map(self._extract_items, self._xp_jsonld(document))
if items for item in items if item
]
def _extract_items(self, node):
script = node.xpath('string()')
try:
# TODO: `strict=False` can be configurable if needed
data = json.loads(script, strict=False)
except ValueError:
# sometimes JSON-decoding errors are due to leading HTML or JavaScript comments
data = json.loads(
HTML_OR_JS_COMMENTLINE.sub('', script), strict=False)
if isinstance(data, list):
for item in data:
yield item
elif isinstance(data, dict):
yield data
|
# -*- coding: utf-8 -*-
"""
JSON-LD extractor
"""
import json
import re
import lxml.etree
from extruct.utils import parse_html
HTML_OR_JS_COMMENTLINE = re.compile(r'^\s*(//.*|<!--.*-->)')
class JsonLdExtractor(object):
_xp_jsonld = lxml.etree.XPath('descendant-or-self::script[@type="application/ld+json"]')
def extract(self, htmlstring, base_url=None, encoding="UTF-8"):
tree = parse_html(htmlstring, encoding=encoding)
return self.extract_items(tree, base_url=base_url)
def extract_items(self, document, base_url=None):
return [
item
for items in map(self._extract_items, self._xp_jsonld(document))
if items for item in items if item
]
def _extract_items(self, node):
script = node.xpath('string()')
try:
# TODO: `strict=False` can be configurable if needed
data = json.loads(script, strict=False)
except ValueError:
# sometimes JSON-decoding errors are due to leading HTML or JavaScript comments
data = json.loads(
HTML_OR_JS_COMMENTLINE.sub('', script), strict=False)
if isinstance(data, list):
for item in data:
yield item
elif isinstance(data, dict):
yield data
|
Remove an unrelated, unnecessary extra line break
|
Remove an unrelated, unnecessary extra line break
|
Python
|
bsd-3-clause
|
scrapinghub/extruct
|
# -*- coding: utf-8 -*-
"""
JSON-LD extractor
"""
import json
import re
import lxml.etree
from extruct.utils import parse_html
HTML_OR_JS_COMMENTLINE = re.compile(r'^\s*(//.*|<!--.*-->)')
class JsonLdExtractor(object):
_xp_jsonld = lxml.etree.XPath('descendant-or-self::script[@type="application/ld+json"]')
def extract(self, htmlstring, base_url=None, encoding="UTF-8"):
tree = parse_html(htmlstring, encoding=encoding)
return self.extract_items(tree, base_url=base_url)
def extract_items(self, document, base_url=None):
return [
item
for items in map(self._extract_items, self._xp_jsonld(document))
if items for item in items if item
]
def _extract_items(self, node):
script = node.xpath('string()')
try:
# TODO: `strict=False` can be configurable if needed
data = json.loads(script, strict=False)
except ValueError:
# sometimes JSON-decoding errors are due to leading HTML or JavaScript comments
data = json.loads(
HTML_OR_JS_COMMENTLINE.sub('', script), strict=False)
if isinstance(data, list):
for item in data:
yield item
elif isinstance(data, dict):
yield data
Remove an unrelated, unnecessary extra line break
|
# -*- coding: utf-8 -*-
"""
JSON-LD extractor
"""
import json
import re
import lxml.etree
from extruct.utils import parse_html
HTML_OR_JS_COMMENTLINE = re.compile(r'^\s*(//.*|<!--.*-->)')
class JsonLdExtractor(object):
_xp_jsonld = lxml.etree.XPath('descendant-or-self::script[@type="application/ld+json"]')
def extract(self, htmlstring, base_url=None, encoding="UTF-8"):
tree = parse_html(htmlstring, encoding=encoding)
return self.extract_items(tree, base_url=base_url)
def extract_items(self, document, base_url=None):
return [
item
for items in map(self._extract_items, self._xp_jsonld(document))
if items for item in items if item
]
def _extract_items(self, node):
script = node.xpath('string()')
try:
# TODO: `strict=False` can be configurable if needed
data = json.loads(script, strict=False)
except ValueError:
# sometimes JSON-decoding errors are due to leading HTML or JavaScript comments
data = json.loads(
HTML_OR_JS_COMMENTLINE.sub('', script), strict=False)
if isinstance(data, list):
for item in data:
yield item
elif isinstance(data, dict):
yield data
|
<commit_before># -*- coding: utf-8 -*-
"""
JSON-LD extractor
"""
import json
import re
import lxml.etree
from extruct.utils import parse_html
HTML_OR_JS_COMMENTLINE = re.compile(r'^\s*(//.*|<!--.*-->)')
class JsonLdExtractor(object):
_xp_jsonld = lxml.etree.XPath('descendant-or-self::script[@type="application/ld+json"]')
def extract(self, htmlstring, base_url=None, encoding="UTF-8"):
tree = parse_html(htmlstring, encoding=encoding)
return self.extract_items(tree, base_url=base_url)
def extract_items(self, document, base_url=None):
return [
item
for items in map(self._extract_items, self._xp_jsonld(document))
if items for item in items if item
]
def _extract_items(self, node):
script = node.xpath('string()')
try:
# TODO: `strict=False` can be configurable if needed
data = json.loads(script, strict=False)
except ValueError:
# sometimes JSON-decoding errors are due to leading HTML or JavaScript comments
data = json.loads(
HTML_OR_JS_COMMENTLINE.sub('', script), strict=False)
if isinstance(data, list):
for item in data:
yield item
elif isinstance(data, dict):
yield data
<commit_msg>Remove an unrelated, unnecessary extra line break<commit_after>
|
# -*- coding: utf-8 -*-
"""
JSON-LD extractor
"""
import json
import re
import lxml.etree
from extruct.utils import parse_html
HTML_OR_JS_COMMENTLINE = re.compile(r'^\s*(//.*|<!--.*-->)')
class JsonLdExtractor(object):
_xp_jsonld = lxml.etree.XPath('descendant-or-self::script[@type="application/ld+json"]')
def extract(self, htmlstring, base_url=None, encoding="UTF-8"):
tree = parse_html(htmlstring, encoding=encoding)
return self.extract_items(tree, base_url=base_url)
def extract_items(self, document, base_url=None):
return [
item
for items in map(self._extract_items, self._xp_jsonld(document))
if items for item in items if item
]
def _extract_items(self, node):
script = node.xpath('string()')
try:
# TODO: `strict=False` can be configurable if needed
data = json.loads(script, strict=False)
except ValueError:
# sometimes JSON-decoding errors are due to leading HTML or JavaScript comments
data = json.loads(
HTML_OR_JS_COMMENTLINE.sub('', script), strict=False)
if isinstance(data, list):
for item in data:
yield item
elif isinstance(data, dict):
yield data
|
# -*- coding: utf-8 -*-
"""
JSON-LD extractor
"""
import json
import re
import lxml.etree
from extruct.utils import parse_html
HTML_OR_JS_COMMENTLINE = re.compile(r'^\s*(//.*|<!--.*-->)')
class JsonLdExtractor(object):
_xp_jsonld = lxml.etree.XPath('descendant-or-self::script[@type="application/ld+json"]')
def extract(self, htmlstring, base_url=None, encoding="UTF-8"):
tree = parse_html(htmlstring, encoding=encoding)
return self.extract_items(tree, base_url=base_url)
def extract_items(self, document, base_url=None):
return [
item
for items in map(self._extract_items, self._xp_jsonld(document))
if items for item in items if item
]
def _extract_items(self, node):
script = node.xpath('string()')
try:
# TODO: `strict=False` can be configurable if needed
data = json.loads(script, strict=False)
except ValueError:
# sometimes JSON-decoding errors are due to leading HTML or JavaScript comments
data = json.loads(
HTML_OR_JS_COMMENTLINE.sub('', script), strict=False)
if isinstance(data, list):
for item in data:
yield item
elif isinstance(data, dict):
yield data
Remove an unrelated, unnecessary extra line break# -*- coding: utf-8 -*-
"""
JSON-LD extractor
"""
import json
import re
import lxml.etree
from extruct.utils import parse_html
HTML_OR_JS_COMMENTLINE = re.compile(r'^\s*(//.*|<!--.*-->)')
class JsonLdExtractor(object):
_xp_jsonld = lxml.etree.XPath('descendant-or-self::script[@type="application/ld+json"]')
def extract(self, htmlstring, base_url=None, encoding="UTF-8"):
tree = parse_html(htmlstring, encoding=encoding)
return self.extract_items(tree, base_url=base_url)
def extract_items(self, document, base_url=None):
return [
item
for items in map(self._extract_items, self._xp_jsonld(document))
if items for item in items if item
]
def _extract_items(self, node):
script = node.xpath('string()')
try:
# TODO: `strict=False` can be configurable if needed
data = json.loads(script, strict=False)
except ValueError:
# sometimes JSON-decoding errors are due to leading HTML or JavaScript comments
data = json.loads(
HTML_OR_JS_COMMENTLINE.sub('', script), strict=False)
if isinstance(data, list):
for item in data:
yield item
elif isinstance(data, dict):
yield data
|
<commit_before># -*- coding: utf-8 -*-
"""
JSON-LD extractor
"""
import json
import re
import lxml.etree
from extruct.utils import parse_html
HTML_OR_JS_COMMENTLINE = re.compile(r'^\s*(//.*|<!--.*-->)')
class JsonLdExtractor(object):
_xp_jsonld = lxml.etree.XPath('descendant-or-self::script[@type="application/ld+json"]')
def extract(self, htmlstring, base_url=None, encoding="UTF-8"):
tree = parse_html(htmlstring, encoding=encoding)
return self.extract_items(tree, base_url=base_url)
def extract_items(self, document, base_url=None):
return [
item
for items in map(self._extract_items, self._xp_jsonld(document))
if items for item in items if item
]
def _extract_items(self, node):
script = node.xpath('string()')
try:
# TODO: `strict=False` can be configurable if needed
data = json.loads(script, strict=False)
except ValueError:
# sometimes JSON-decoding errors are due to leading HTML or JavaScript comments
data = json.loads(
HTML_OR_JS_COMMENTLINE.sub('', script), strict=False)
if isinstance(data, list):
for item in data:
yield item
elif isinstance(data, dict):
yield data
<commit_msg>Remove an unrelated, unnecessary extra line break<commit_after># -*- coding: utf-8 -*-
"""
JSON-LD extractor
"""
import json
import re
import lxml.etree
from extruct.utils import parse_html
HTML_OR_JS_COMMENTLINE = re.compile(r'^\s*(//.*|<!--.*-->)')
class JsonLdExtractor(object):
_xp_jsonld = lxml.etree.XPath('descendant-or-self::script[@type="application/ld+json"]')
def extract(self, htmlstring, base_url=None, encoding="UTF-8"):
tree = parse_html(htmlstring, encoding=encoding)
return self.extract_items(tree, base_url=base_url)
def extract_items(self, document, base_url=None):
return [
item
for items in map(self._extract_items, self._xp_jsonld(document))
if items for item in items if item
]
def _extract_items(self, node):
script = node.xpath('string()')
try:
# TODO: `strict=False` can be configurable if needed
data = json.loads(script, strict=False)
except ValueError:
# sometimes JSON-decoding errors are due to leading HTML or JavaScript comments
data = json.loads(
HTML_OR_JS_COMMENTLINE.sub('', script), strict=False)
if isinstance(data, list):
for item in data:
yield item
elif isinstance(data, dict):
yield data
|
4ed0272e82c3bdef548643f4b9bce8f6bc510a42
|
classes.py
|
classes.py
|
from collections import namedtuple
class User:
def __init__(self, id_, first_name, last_name='', username=''):
self.id = id_
self.first_name = first_name
self.last_name = last_name
self.username = username
@classmethod
def from_database(cls, user):
return cls(*user)
@classmethod
def from_telegram(cls, user):
copy = user.copy()
copy['id_'] = copy['id']
del copy['id']
return cls(**copy)
class Quote:
def __init__(self, id_, chat_id, message_id, sent_at, sent_by,
content, quoted_by=None):
self.id = id_
self.chat_id = chat_id
self.message_id = message_id
self.sent_at = sent_at
self.sent_by = sent_by
self.content = content
self.quoted_by = quoted_by
@classmethod
def from_database(cls, quote):
return cls(*quote)
Result = namedtuple('Result', ['quote', 'user'])
|
from collections import namedtuple
class User:
def __init__(self, id_, first_name, last_name='', username='',
language_code=''):
self.id = id_
self.first_name = first_name
self.last_name = last_name
self.username = username
@classmethod
def from_database(cls, user):
return cls(*user)
@classmethod
def from_telegram(cls, user):
copy = user.copy()
copy['id_'] = copy['id']
del copy['id']
return cls(**copy)
class Quote:
def __init__(self, id_, chat_id, message_id, sent_at, sent_by,
content, quoted_by=None):
self.id = id_
self.chat_id = chat_id
self.message_id = message_id
self.sent_at = sent_at
self.sent_by = sent_by
self.content = content
self.quoted_by = quoted_by
@classmethod
def from_database(cls, quote):
return cls(*quote)
Result = namedtuple('Result', ['quote', 'user'])
|
Update User class to match Telegram API
|
Update User class to match Telegram API
|
Python
|
mit
|
Doktor/soup-dumpling
|
from collections import namedtuple
class User:
def __init__(self, id_, first_name, last_name='', username=''):
self.id = id_
self.first_name = first_name
self.last_name = last_name
self.username = username
@classmethod
def from_database(cls, user):
return cls(*user)
@classmethod
def from_telegram(cls, user):
copy = user.copy()
copy['id_'] = copy['id']
del copy['id']
return cls(**copy)
class Quote:
def __init__(self, id_, chat_id, message_id, sent_at, sent_by,
content, quoted_by=None):
self.id = id_
self.chat_id = chat_id
self.message_id = message_id
self.sent_at = sent_at
self.sent_by = sent_by
self.content = content
self.quoted_by = quoted_by
@classmethod
def from_database(cls, quote):
return cls(*quote)
Result = namedtuple('Result', ['quote', 'user'])
Update User class to match Telegram API
|
from collections import namedtuple
class User:
def __init__(self, id_, first_name, last_name='', username='',
language_code=''):
self.id = id_
self.first_name = first_name
self.last_name = last_name
self.username = username
@classmethod
def from_database(cls, user):
return cls(*user)
@classmethod
def from_telegram(cls, user):
copy = user.copy()
copy['id_'] = copy['id']
del copy['id']
return cls(**copy)
class Quote:
def __init__(self, id_, chat_id, message_id, sent_at, sent_by,
content, quoted_by=None):
self.id = id_
self.chat_id = chat_id
self.message_id = message_id
self.sent_at = sent_at
self.sent_by = sent_by
self.content = content
self.quoted_by = quoted_by
@classmethod
def from_database(cls, quote):
return cls(*quote)
Result = namedtuple('Result', ['quote', 'user'])
|
<commit_before>from collections import namedtuple
class User:
def __init__(self, id_, first_name, last_name='', username=''):
self.id = id_
self.first_name = first_name
self.last_name = last_name
self.username = username
@classmethod
def from_database(cls, user):
return cls(*user)
@classmethod
def from_telegram(cls, user):
copy = user.copy()
copy['id_'] = copy['id']
del copy['id']
return cls(**copy)
class Quote:
def __init__(self, id_, chat_id, message_id, sent_at, sent_by,
content, quoted_by=None):
self.id = id_
self.chat_id = chat_id
self.message_id = message_id
self.sent_at = sent_at
self.sent_by = sent_by
self.content = content
self.quoted_by = quoted_by
@classmethod
def from_database(cls, quote):
return cls(*quote)
Result = namedtuple('Result', ['quote', 'user'])
<commit_msg>Update User class to match Telegram API<commit_after>
|
from collections import namedtuple
class User:
def __init__(self, id_, first_name, last_name='', username='',
language_code=''):
self.id = id_
self.first_name = first_name
self.last_name = last_name
self.username = username
@classmethod
def from_database(cls, user):
return cls(*user)
@classmethod
def from_telegram(cls, user):
copy = user.copy()
copy['id_'] = copy['id']
del copy['id']
return cls(**copy)
class Quote:
def __init__(self, id_, chat_id, message_id, sent_at, sent_by,
content, quoted_by=None):
self.id = id_
self.chat_id = chat_id
self.message_id = message_id
self.sent_at = sent_at
self.sent_by = sent_by
self.content = content
self.quoted_by = quoted_by
@classmethod
def from_database(cls, quote):
return cls(*quote)
Result = namedtuple('Result', ['quote', 'user'])
|
from collections import namedtuple
class User:
def __init__(self, id_, first_name, last_name='', username=''):
self.id = id_
self.first_name = first_name
self.last_name = last_name
self.username = username
@classmethod
def from_database(cls, user):
return cls(*user)
@classmethod
def from_telegram(cls, user):
copy = user.copy()
copy['id_'] = copy['id']
del copy['id']
return cls(**copy)
class Quote:
def __init__(self, id_, chat_id, message_id, sent_at, sent_by,
content, quoted_by=None):
self.id = id_
self.chat_id = chat_id
self.message_id = message_id
self.sent_at = sent_at
self.sent_by = sent_by
self.content = content
self.quoted_by = quoted_by
@classmethod
def from_database(cls, quote):
return cls(*quote)
Result = namedtuple('Result', ['quote', 'user'])
Update User class to match Telegram APIfrom collections import namedtuple
class User:
def __init__(self, id_, first_name, last_name='', username='',
language_code=''):
self.id = id_
self.first_name = first_name
self.last_name = last_name
self.username = username
@classmethod
def from_database(cls, user):
return cls(*user)
@classmethod
def from_telegram(cls, user):
copy = user.copy()
copy['id_'] = copy['id']
del copy['id']
return cls(**copy)
class Quote:
def __init__(self, id_, chat_id, message_id, sent_at, sent_by,
content, quoted_by=None):
self.id = id_
self.chat_id = chat_id
self.message_id = message_id
self.sent_at = sent_at
self.sent_by = sent_by
self.content = content
self.quoted_by = quoted_by
@classmethod
def from_database(cls, quote):
return cls(*quote)
Result = namedtuple('Result', ['quote', 'user'])
|
<commit_before>from collections import namedtuple
class User:
def __init__(self, id_, first_name, last_name='', username=''):
self.id = id_
self.first_name = first_name
self.last_name = last_name
self.username = username
@classmethod
def from_database(cls, user):
return cls(*user)
@classmethod
def from_telegram(cls, user):
copy = user.copy()
copy['id_'] = copy['id']
del copy['id']
return cls(**copy)
class Quote:
def __init__(self, id_, chat_id, message_id, sent_at, sent_by,
content, quoted_by=None):
self.id = id_
self.chat_id = chat_id
self.message_id = message_id
self.sent_at = sent_at
self.sent_by = sent_by
self.content = content
self.quoted_by = quoted_by
@classmethod
def from_database(cls, quote):
return cls(*quote)
Result = namedtuple('Result', ['quote', 'user'])
<commit_msg>Update User class to match Telegram API<commit_after>from collections import namedtuple
class User:
def __init__(self, id_, first_name, last_name='', username='',
language_code=''):
self.id = id_
self.first_name = first_name
self.last_name = last_name
self.username = username
@classmethod
def from_database(cls, user):
return cls(*user)
@classmethod
def from_telegram(cls, user):
copy = user.copy()
copy['id_'] = copy['id']
del copy['id']
return cls(**copy)
class Quote:
def __init__(self, id_, chat_id, message_id, sent_at, sent_by,
content, quoted_by=None):
self.id = id_
self.chat_id = chat_id
self.message_id = message_id
self.sent_at = sent_at
self.sent_by = sent_by
self.content = content
self.quoted_by = quoted_by
@classmethod
def from_database(cls, quote):
return cls(*quote)
Result = namedtuple('Result', ['quote', 'user'])
|
6392a40e5ec1cc1190f5870f6d7c9cc3710dfd46
|
contrib/fast-import/p4-clean-tags.py
|
contrib/fast-import/p4-clean-tags.py
|
#!/usr/bin/python
#
# p4-debug.py
#
# Author: Simon Hausmann <hausmann@kde.org>
# License: MIT <http://www.opensource.org/licenses/mit-license.php>
#
# removes unused p4 import tags
#
import os, string, sys
import popen2, getopt
branch = "refs/heads/master"
try:
opts, args = getopt.getopt(sys.argv[1:], "", [ "branch=" ])
except getopt.GetoptError:
print "fixme, syntax error"
sys.exit(1)
for o, a in opts:
if o == "--branch":
branch = "refs/heads/" + a
sout, sin, serr = popen2.popen3("git-name-rev --tags `git-rev-parse %s`" % branch)
output = sout.read()
tagIdx = output.index(" tags/p4/")
caretIdx = output.index("^")
rev = int(output[tagIdx + 9 : caretIdx])
allTags = os.popen("git tag -l p4/").readlines()
for i in range(len(allTags)):
allTags[i] = int(allTags[i][3:-1])
allTags.sort()
allTags.remove(rev)
for rev in allTags:
print os.popen("git tag -d p4/%s" % rev).read()
|
#!/usr/bin/python
#
# p4-debug.py
#
# Author: Simon Hausmann <hausmann@kde.org>
# License: MIT <http://www.opensource.org/licenses/mit-license.php>
#
# removes unused p4 import tags
#
import os, string, sys
import popen2, getopt
branch = "refs/heads/master"
try:
opts, args = getopt.getopt(sys.argv[1:], "", [ "branch=" ])
except getopt.GetoptError:
print "fixme, syntax error"
sys.exit(1)
for o, a in opts:
if o == "--branch":
branch = "refs/heads/" + a
sout, sin, serr = popen2.popen3("git-name-rev --tags `git-rev-parse %s`" % branch)
output = sout.read()
tagIdx = output.index(" tags/p4/")
try:
caretIdx = output.index("^")
except:
caretIdx = len(output) - 1
rev = int(output[tagIdx + 9 : caretIdx])
allTags = os.popen("git tag -l p4/").readlines()
for i in range(len(allTags)):
allTags[i] = int(allTags[i][3:-1])
allTags.sort()
allTags.remove(rev)
for rev in allTags:
print os.popen("git tag -d p4/%s" % rev).read()
|
Adjust the output parsing of git name-rev to handle the output of the latest git version.
|
Adjust the output parsing of git name-rev to handle the output of the latest git version.
Signed-off-by: Simon Hausmann <0a3c2b6956b68ea9079d8d9063677b62a646d61b@kde.org>
|
Python
|
mit
|
destenson/git,destenson/git,destenson/git,destenson/git,destenson/git,destenson/git,destenson/git,destenson/git
|
#!/usr/bin/python
#
# p4-debug.py
#
# Author: Simon Hausmann <hausmann@kde.org>
# License: MIT <http://www.opensource.org/licenses/mit-license.php>
#
# removes unused p4 import tags
#
import os, string, sys
import popen2, getopt
branch = "refs/heads/master"
try:
opts, args = getopt.getopt(sys.argv[1:], "", [ "branch=" ])
except getopt.GetoptError:
print "fixme, syntax error"
sys.exit(1)
for o, a in opts:
if o == "--branch":
branch = "refs/heads/" + a
sout, sin, serr = popen2.popen3("git-name-rev --tags `git-rev-parse %s`" % branch)
output = sout.read()
tagIdx = output.index(" tags/p4/")
caretIdx = output.index("^")
rev = int(output[tagIdx + 9 : caretIdx])
allTags = os.popen("git tag -l p4/").readlines()
for i in range(len(allTags)):
allTags[i] = int(allTags[i][3:-1])
allTags.sort()
allTags.remove(rev)
for rev in allTags:
print os.popen("git tag -d p4/%s" % rev).read()
Adjust the output parsing of git name-rev to handle the output of the latest git version.
Signed-off-by: Simon Hausmann <0a3c2b6956b68ea9079d8d9063677b62a646d61b@kde.org>
|
#!/usr/bin/python
#
# p4-debug.py
#
# Author: Simon Hausmann <hausmann@kde.org>
# License: MIT <http://www.opensource.org/licenses/mit-license.php>
#
# removes unused p4 import tags
#
import os, string, sys
import popen2, getopt
branch = "refs/heads/master"
try:
opts, args = getopt.getopt(sys.argv[1:], "", [ "branch=" ])
except getopt.GetoptError:
print "fixme, syntax error"
sys.exit(1)
for o, a in opts:
if o == "--branch":
branch = "refs/heads/" + a
sout, sin, serr = popen2.popen3("git-name-rev --tags `git-rev-parse %s`" % branch)
output = sout.read()
tagIdx = output.index(" tags/p4/")
try:
caretIdx = output.index("^")
except:
caretIdx = len(output) - 1
rev = int(output[tagIdx + 9 : caretIdx])
allTags = os.popen("git tag -l p4/").readlines()
for i in range(len(allTags)):
allTags[i] = int(allTags[i][3:-1])
allTags.sort()
allTags.remove(rev)
for rev in allTags:
print os.popen("git tag -d p4/%s" % rev).read()
|
<commit_before>#!/usr/bin/python
#
# p4-debug.py
#
# Author: Simon Hausmann <hausmann@kde.org>
# License: MIT <http://www.opensource.org/licenses/mit-license.php>
#
# removes unused p4 import tags
#
import os, string, sys
import popen2, getopt
branch = "refs/heads/master"
try:
opts, args = getopt.getopt(sys.argv[1:], "", [ "branch=" ])
except getopt.GetoptError:
print "fixme, syntax error"
sys.exit(1)
for o, a in opts:
if o == "--branch":
branch = "refs/heads/" + a
sout, sin, serr = popen2.popen3("git-name-rev --tags `git-rev-parse %s`" % branch)
output = sout.read()
tagIdx = output.index(" tags/p4/")
caretIdx = output.index("^")
rev = int(output[tagIdx + 9 : caretIdx])
allTags = os.popen("git tag -l p4/").readlines()
for i in range(len(allTags)):
allTags[i] = int(allTags[i][3:-1])
allTags.sort()
allTags.remove(rev)
for rev in allTags:
print os.popen("git tag -d p4/%s" % rev).read()
<commit_msg>Adjust the output parsing of git name-rev to handle the output of the latest git version.
Signed-off-by: Simon Hausmann <0a3c2b6956b68ea9079d8d9063677b62a646d61b@kde.org><commit_after>
|
#!/usr/bin/python
#
# p4-debug.py
#
# Author: Simon Hausmann <hausmann@kde.org>
# License: MIT <http://www.opensource.org/licenses/mit-license.php>
#
# removes unused p4 import tags
#
import os, string, sys
import popen2, getopt
branch = "refs/heads/master"
try:
opts, args = getopt.getopt(sys.argv[1:], "", [ "branch=" ])
except getopt.GetoptError:
print "fixme, syntax error"
sys.exit(1)
for o, a in opts:
if o == "--branch":
branch = "refs/heads/" + a
sout, sin, serr = popen2.popen3("git-name-rev --tags `git-rev-parse %s`" % branch)
output = sout.read()
tagIdx = output.index(" tags/p4/")
try:
caretIdx = output.index("^")
except:
caretIdx = len(output) - 1
rev = int(output[tagIdx + 9 : caretIdx])
allTags = os.popen("git tag -l p4/").readlines()
for i in range(len(allTags)):
allTags[i] = int(allTags[i][3:-1])
allTags.sort()
allTags.remove(rev)
for rev in allTags:
print os.popen("git tag -d p4/%s" % rev).read()
|
#!/usr/bin/python
#
# p4-debug.py
#
# Author: Simon Hausmann <hausmann@kde.org>
# License: MIT <http://www.opensource.org/licenses/mit-license.php>
#
# removes unused p4 import tags
#
import os, string, sys
import popen2, getopt
branch = "refs/heads/master"
try:
opts, args = getopt.getopt(sys.argv[1:], "", [ "branch=" ])
except getopt.GetoptError:
print "fixme, syntax error"
sys.exit(1)
for o, a in opts:
if o == "--branch":
branch = "refs/heads/" + a
sout, sin, serr = popen2.popen3("git-name-rev --tags `git-rev-parse %s`" % branch)
output = sout.read()
tagIdx = output.index(" tags/p4/")
caretIdx = output.index("^")
rev = int(output[tagIdx + 9 : caretIdx])
allTags = os.popen("git tag -l p4/").readlines()
for i in range(len(allTags)):
allTags[i] = int(allTags[i][3:-1])
allTags.sort()
allTags.remove(rev)
for rev in allTags:
print os.popen("git tag -d p4/%s" % rev).read()
Adjust the output parsing of git name-rev to handle the output of the latest git version.
Signed-off-by: Simon Hausmann <0a3c2b6956b68ea9079d8d9063677b62a646d61b@kde.org>#!/usr/bin/python
#
# p4-debug.py
#
# Author: Simon Hausmann <hausmann@kde.org>
# License: MIT <http://www.opensource.org/licenses/mit-license.php>
#
# removes unused p4 import tags
#
import os, string, sys
import popen2, getopt
branch = "refs/heads/master"
try:
opts, args = getopt.getopt(sys.argv[1:], "", [ "branch=" ])
except getopt.GetoptError:
print "fixme, syntax error"
sys.exit(1)
for o, a in opts:
if o == "--branch":
branch = "refs/heads/" + a
sout, sin, serr = popen2.popen3("git-name-rev --tags `git-rev-parse %s`" % branch)
output = sout.read()
tagIdx = output.index(" tags/p4/")
try:
caretIdx = output.index("^")
except:
caretIdx = len(output) - 1
rev = int(output[tagIdx + 9 : caretIdx])
allTags = os.popen("git tag -l p4/").readlines()
for i in range(len(allTags)):
allTags[i] = int(allTags[i][3:-1])
allTags.sort()
allTags.remove(rev)
for rev in allTags:
print os.popen("git tag -d p4/%s" % rev).read()
|
<commit_before>#!/usr/bin/python
#
# p4-debug.py
#
# Author: Simon Hausmann <hausmann@kde.org>
# License: MIT <http://www.opensource.org/licenses/mit-license.php>
#
# removes unused p4 import tags
#
import os, string, sys
import popen2, getopt
branch = "refs/heads/master"
try:
opts, args = getopt.getopt(sys.argv[1:], "", [ "branch=" ])
except getopt.GetoptError:
print "fixme, syntax error"
sys.exit(1)
for o, a in opts:
if o == "--branch":
branch = "refs/heads/" + a
sout, sin, serr = popen2.popen3("git-name-rev --tags `git-rev-parse %s`" % branch)
output = sout.read()
tagIdx = output.index(" tags/p4/")
caretIdx = output.index("^")
rev = int(output[tagIdx + 9 : caretIdx])
allTags = os.popen("git tag -l p4/").readlines()
for i in range(len(allTags)):
allTags[i] = int(allTags[i][3:-1])
allTags.sort()
allTags.remove(rev)
for rev in allTags:
print os.popen("git tag -d p4/%s" % rev).read()
<commit_msg>Adjust the output parsing of git name-rev to handle the output of the latest git version.
Signed-off-by: Simon Hausmann <0a3c2b6956b68ea9079d8d9063677b62a646d61b@kde.org><commit_after>#!/usr/bin/python
#
# p4-debug.py
#
# Author: Simon Hausmann <hausmann@kde.org>
# License: MIT <http://www.opensource.org/licenses/mit-license.php>
#
# removes unused p4 import tags
#
import os, string, sys
import popen2, getopt
branch = "refs/heads/master"
try:
opts, args = getopt.getopt(sys.argv[1:], "", [ "branch=" ])
except getopt.GetoptError:
print "fixme, syntax error"
sys.exit(1)
for o, a in opts:
if o == "--branch":
branch = "refs/heads/" + a
sout, sin, serr = popen2.popen3("git-name-rev --tags `git-rev-parse %s`" % branch)
output = sout.read()
tagIdx = output.index(" tags/p4/")
try:
caretIdx = output.index("^")
except:
caretIdx = len(output) - 1
rev = int(output[tagIdx + 9 : caretIdx])
allTags = os.popen("git tag -l p4/").readlines()
for i in range(len(allTags)):
allTags[i] = int(allTags[i][3:-1])
allTags.sort()
allTags.remove(rev)
for rev in allTags:
print os.popen("git tag -d p4/%s" % rev).read()
|
49fb141da9bf544b73001c1e49ef19e85e88cefb
|
example.py
|
example.py
|
from ui import *
import traceback
################################################################################
# example usage
status = Text('Tests something | ok: 0 | error: 0 | fail: 0', style='RB')
try:
aoeu
except Exception as e:
err = traceback.format_exc()[:-1]
exception = TextNoWrap(err)
def test(i):
test = Box(exception,
title_parts=[' F ', ' a_test%d ' % i],
option_parts=[' Traceback ', ' stdOut '])
return test
tests = List(test(1), test(2), test(3), test(4), test(5), test(6), test(7),
test(8), test(9))
# tests = List(*[test(i) for i in range(40)])
# tests = List(test(1), test(2), test(3), test(4), test(5), test(6))
p = VerticalPile(status, tests)
p.current_window = p.windows[-1]
class App(Application):
windows = {'default': p}
def handle(self, key):
if key == 'a':
pass
super(App, self).handle(key)
if __name__ == '__main__':
App().run()
|
from ui import *
import traceback
################################################################################
# example usage
status = TextNoWrap('Tests something | ok: 0 | error: 0 | fail: 0', style='RB')
try:
aoeu
except Exception as e:
err = traceback.format_exc()[:-1]
exception = TextNoWrap(err)
def test(i):
test = Box(exception,
title_parts=[' F ', ' a_test%d ' % i],
option_parts=[' Traceback ', ' stdOut ', ' stdErr '])
return test
tests = List(test(1), test(2), test(3), test(4), test(5), test(6), test(7),
test(8), test(9))
# tests = List(*[test(i) for i in range(40)])
# tests = List(test(1), test(2), test(3), test(4), test(5), test(6))
p = VerticalPile(status, tests)
p.current_window = p.windows[-1]
class App(Application):
windows = {'default': p}
def handle(self, key):
if key == 'a':
pass
super(App, self).handle(key)
if __name__ == '__main__':
App().run()
|
Add extra booger-like properties before transferring
|
Add extra booger-like properties before transferring
|
Python
|
mit
|
thenoviceoof/booger,thenoviceoof/booger
|
from ui import *
import traceback
################################################################################
# example usage
status = Text('Tests something | ok: 0 | error: 0 | fail: 0', style='RB')
try:
aoeu
except Exception as e:
err = traceback.format_exc()[:-1]
exception = TextNoWrap(err)
def test(i):
test = Box(exception,
title_parts=[' F ', ' a_test%d ' % i],
option_parts=[' Traceback ', ' stdOut '])
return test
tests = List(test(1), test(2), test(3), test(4), test(5), test(6), test(7),
test(8), test(9))
# tests = List(*[test(i) for i in range(40)])
# tests = List(test(1), test(2), test(3), test(4), test(5), test(6))
p = VerticalPile(status, tests)
p.current_window = p.windows[-1]
class App(Application):
windows = {'default': p}
def handle(self, key):
if key == 'a':
pass
super(App, self).handle(key)
if __name__ == '__main__':
App().run()
Add extra booger-like properties before transferring
|
from ui import *
import traceback
################################################################################
# example usage
status = TextNoWrap('Tests something | ok: 0 | error: 0 | fail: 0', style='RB')
try:
aoeu
except Exception as e:
err = traceback.format_exc()[:-1]
exception = TextNoWrap(err)
def test(i):
test = Box(exception,
title_parts=[' F ', ' a_test%d ' % i],
option_parts=[' Traceback ', ' stdOut ', ' stdErr '])
return test
tests = List(test(1), test(2), test(3), test(4), test(5), test(6), test(7),
test(8), test(9))
# tests = List(*[test(i) for i in range(40)])
# tests = List(test(1), test(2), test(3), test(4), test(5), test(6))
p = VerticalPile(status, tests)
p.current_window = p.windows[-1]
class App(Application):
windows = {'default': p}
def handle(self, key):
if key == 'a':
pass
super(App, self).handle(key)
if __name__ == '__main__':
App().run()
|
<commit_before>from ui import *
import traceback
################################################################################
# example usage
status = Text('Tests something | ok: 0 | error: 0 | fail: 0', style='RB')
try:
aoeu
except Exception as e:
err = traceback.format_exc()[:-1]
exception = TextNoWrap(err)
def test(i):
test = Box(exception,
title_parts=[' F ', ' a_test%d ' % i],
option_parts=[' Traceback ', ' stdOut '])
return test
tests = List(test(1), test(2), test(3), test(4), test(5), test(6), test(7),
test(8), test(9))
# tests = List(*[test(i) for i in range(40)])
# tests = List(test(1), test(2), test(3), test(4), test(5), test(6))
p = VerticalPile(status, tests)
p.current_window = p.windows[-1]
class App(Application):
windows = {'default': p}
def handle(self, key):
if key == 'a':
pass
super(App, self).handle(key)
if __name__ == '__main__':
App().run()
<commit_msg>Add extra booger-like properties before transferring<commit_after>
|
from ui import *
import traceback
################################################################################
# example usage
status = TextNoWrap('Tests something | ok: 0 | error: 0 | fail: 0', style='RB')
try:
aoeu
except Exception as e:
err = traceback.format_exc()[:-1]
exception = TextNoWrap(err)
def test(i):
test = Box(exception,
title_parts=[' F ', ' a_test%d ' % i],
option_parts=[' Traceback ', ' stdOut ', ' stdErr '])
return test
tests = List(test(1), test(2), test(3), test(4), test(5), test(6), test(7),
test(8), test(9))
# tests = List(*[test(i) for i in range(40)])
# tests = List(test(1), test(2), test(3), test(4), test(5), test(6))
p = VerticalPile(status, tests)
p.current_window = p.windows[-1]
class App(Application):
windows = {'default': p}
def handle(self, key):
if key == 'a':
pass
super(App, self).handle(key)
if __name__ == '__main__':
App().run()
|
from ui import *
import traceback
################################################################################
# example usage
status = Text('Tests something | ok: 0 | error: 0 | fail: 0', style='RB')
try:
aoeu
except Exception as e:
err = traceback.format_exc()[:-1]
exception = TextNoWrap(err)
def test(i):
test = Box(exception,
title_parts=[' F ', ' a_test%d ' % i],
option_parts=[' Traceback ', ' stdOut '])
return test
tests = List(test(1), test(2), test(3), test(4), test(5), test(6), test(7),
test(8), test(9))
# tests = List(*[test(i) for i in range(40)])
# tests = List(test(1), test(2), test(3), test(4), test(5), test(6))
p = VerticalPile(status, tests)
p.current_window = p.windows[-1]
class App(Application):
windows = {'default': p}
def handle(self, key):
if key == 'a':
pass
super(App, self).handle(key)
if __name__ == '__main__':
App().run()
Add extra booger-like properties before transferringfrom ui import *
import traceback
################################################################################
# example usage
status = TextNoWrap('Tests something | ok: 0 | error: 0 | fail: 0', style='RB')
try:
aoeu
except Exception as e:
err = traceback.format_exc()[:-1]
exception = TextNoWrap(err)
def test(i):
test = Box(exception,
title_parts=[' F ', ' a_test%d ' % i],
option_parts=[' Traceback ', ' stdOut ', ' stdErr '])
return test
tests = List(test(1), test(2), test(3), test(4), test(5), test(6), test(7),
test(8), test(9))
# tests = List(*[test(i) for i in range(40)])
# tests = List(test(1), test(2), test(3), test(4), test(5), test(6))
p = VerticalPile(status, tests)
p.current_window = p.windows[-1]
class App(Application):
windows = {'default': p}
def handle(self, key):
if key == 'a':
pass
super(App, self).handle(key)
if __name__ == '__main__':
App().run()
|
<commit_before>from ui import *
import traceback
################################################################################
# example usage
status = Text('Tests something | ok: 0 | error: 0 | fail: 0', style='RB')
try:
aoeu
except Exception as e:
err = traceback.format_exc()[:-1]
exception = TextNoWrap(err)
def test(i):
test = Box(exception,
title_parts=[' F ', ' a_test%d ' % i],
option_parts=[' Traceback ', ' stdOut '])
return test
tests = List(test(1), test(2), test(3), test(4), test(5), test(6), test(7),
test(8), test(9))
# tests = List(*[test(i) for i in range(40)])
# tests = List(test(1), test(2), test(3), test(4), test(5), test(6))
p = VerticalPile(status, tests)
p.current_window = p.windows[-1]
class App(Application):
windows = {'default': p}
def handle(self, key):
if key == 'a':
pass
super(App, self).handle(key)
if __name__ == '__main__':
App().run()
<commit_msg>Add extra booger-like properties before transferring<commit_after>from ui import *
import traceback
################################################################################
# example usage
status = TextNoWrap('Tests something | ok: 0 | error: 0 | fail: 0', style='RB')
try:
aoeu
except Exception as e:
err = traceback.format_exc()[:-1]
exception = TextNoWrap(err)
def test(i):
test = Box(exception,
title_parts=[' F ', ' a_test%d ' % i],
option_parts=[' Traceback ', ' stdOut ', ' stdErr '])
return test
tests = List(test(1), test(2), test(3), test(4), test(5), test(6), test(7),
test(8), test(9))
# tests = List(*[test(i) for i in range(40)])
# tests = List(test(1), test(2), test(3), test(4), test(5), test(6))
p = VerticalPile(status, tests)
p.current_window = p.windows[-1]
class App(Application):
windows = {'default': p}
def handle(self, key):
if key == 'a':
pass
super(App, self).handle(key)
if __name__ == '__main__':
App().run()
|
6506ece45d123bcf615a636245bc12498b5348de
|
hsdecomp/ptrutil.py
|
hsdecomp/ptrutil.py
|
import struct
from hsdecomp.types import *
def read_half_word(settings, file_offset):
return struct.unpack(settings.rt.halfword.struct, settings.binary[file_offset:file_offset+settings.rt.halfword.size])[0]
def read_word(settings, file_offset):
return struct.unpack(settings.rt.word.struct, settings.binary[file_offset:file_offset+settings.rt.word.size])[0]
def pointer_offset(settings, pointer, offset):
if isinstance(pointer, Tagged):
offset += pointer.tag
assert isinstance(pointer.untagged, Offset)
return Tagged(untagged = Offset(base = pointer.untagged.base, index = pointer.untagged.index + offset // settings.rt.word.size), tag = offset % settings.rt.word.size)
elif isinstance(pointer, StaticValue):
return StaticValue(value = pointer.value + offset)
elif isinstance(pointer, UnknownValue):
return UnknownValue()
else:
assert False,"bad pointer to offset"
def dereference(settings, parsed, pointer, stack):
if isinstance(pointer, Offset):
if isinstance(pointer.base, HeapPointer):
return parsed['heaps'][pointer.base.heap_segment][pointer.index]
elif isinstance(pointer.base, StackPointer):
return stack[pointer.index]
elif isinstance(pointer, StaticValue):
assert pointer.value % settings.rt.word.size == 0
return Tagged(StaticValue(value = read_word(settings, settings.data_offset + pointer.value)), tag = 0)
|
import struct
from hsdecomp.types import *
def read_half_word(settings, file_offset):
return struct.unpack(settings.rt.halfword.struct, settings.binary[file_offset:file_offset+settings.rt.halfword.size])[0]
def read_word(settings, file_offset):
return struct.unpack(settings.rt.word.struct, settings.binary[file_offset:file_offset+settings.rt.word.size])[0]
def pointer_offset(settings, pointer, offset):
if isinstance(pointer, Tagged):
offset += pointer.tag
assert isinstance(pointer.untagged, Offset)
return Tagged(untagged = Offset(base = pointer.untagged.base, index = pointer.untagged.index + offset // settings.rt.word.size), tag = offset % settings.rt.word.size)
elif isinstance(pointer, UnknownValue):
return UnknownValue()
else:
assert False,"bad pointer to offset"
def dereference(settings, parsed, pointer, stack):
if isinstance(pointer, Offset):
if isinstance(pointer.base, HeapPointer):
return parsed['heaps'][pointer.base.heap_segment][pointer.index]
elif isinstance(pointer.base, StackPointer):
return stack[pointer.index]
elif isinstance(pointer, StaticValue):
assert pointer.value % settings.rt.word.size == 0
return Tagged(StaticValue(value = read_word(settings, settings.data_offset + pointer.value)), tag = 0)
|
Kill obsolete case in pointer_offset
|
Kill obsolete case in pointer_offset
|
Python
|
mit
|
gereeter/hsdecomp
|
import struct
from hsdecomp.types import *
def read_half_word(settings, file_offset):
return struct.unpack(settings.rt.halfword.struct, settings.binary[file_offset:file_offset+settings.rt.halfword.size])[0]
def read_word(settings, file_offset):
return struct.unpack(settings.rt.word.struct, settings.binary[file_offset:file_offset+settings.rt.word.size])[0]
def pointer_offset(settings, pointer, offset):
if isinstance(pointer, Tagged):
offset += pointer.tag
assert isinstance(pointer.untagged, Offset)
return Tagged(untagged = Offset(base = pointer.untagged.base, index = pointer.untagged.index + offset // settings.rt.word.size), tag = offset % settings.rt.word.size)
elif isinstance(pointer, StaticValue):
return StaticValue(value = pointer.value + offset)
elif isinstance(pointer, UnknownValue):
return UnknownValue()
else:
assert False,"bad pointer to offset"
def dereference(settings, parsed, pointer, stack):
if isinstance(pointer, Offset):
if isinstance(pointer.base, HeapPointer):
return parsed['heaps'][pointer.base.heap_segment][pointer.index]
elif isinstance(pointer.base, StackPointer):
return stack[pointer.index]
elif isinstance(pointer, StaticValue):
assert pointer.value % settings.rt.word.size == 0
return Tagged(StaticValue(value = read_word(settings, settings.data_offset + pointer.value)), tag = 0)
Kill obsolete case in pointer_offset
|
import struct
from hsdecomp.types import *
def read_half_word(settings, file_offset):
return struct.unpack(settings.rt.halfword.struct, settings.binary[file_offset:file_offset+settings.rt.halfword.size])[0]
def read_word(settings, file_offset):
return struct.unpack(settings.rt.word.struct, settings.binary[file_offset:file_offset+settings.rt.word.size])[0]
def pointer_offset(settings, pointer, offset):
if isinstance(pointer, Tagged):
offset += pointer.tag
assert isinstance(pointer.untagged, Offset)
return Tagged(untagged = Offset(base = pointer.untagged.base, index = pointer.untagged.index + offset // settings.rt.word.size), tag = offset % settings.rt.word.size)
elif isinstance(pointer, UnknownValue):
return UnknownValue()
else:
assert False,"bad pointer to offset"
def dereference(settings, parsed, pointer, stack):
if isinstance(pointer, Offset):
if isinstance(pointer.base, HeapPointer):
return parsed['heaps'][pointer.base.heap_segment][pointer.index]
elif isinstance(pointer.base, StackPointer):
return stack[pointer.index]
elif isinstance(pointer, StaticValue):
assert pointer.value % settings.rt.word.size == 0
return Tagged(StaticValue(value = read_word(settings, settings.data_offset + pointer.value)), tag = 0)
|
<commit_before>import struct
from hsdecomp.types import *
def read_half_word(settings, file_offset):
return struct.unpack(settings.rt.halfword.struct, settings.binary[file_offset:file_offset+settings.rt.halfword.size])[0]
def read_word(settings, file_offset):
return struct.unpack(settings.rt.word.struct, settings.binary[file_offset:file_offset+settings.rt.word.size])[0]
def pointer_offset(settings, pointer, offset):
if isinstance(pointer, Tagged):
offset += pointer.tag
assert isinstance(pointer.untagged, Offset)
return Tagged(untagged = Offset(base = pointer.untagged.base, index = pointer.untagged.index + offset // settings.rt.word.size), tag = offset % settings.rt.word.size)
elif isinstance(pointer, StaticValue):
return StaticValue(value = pointer.value + offset)
elif isinstance(pointer, UnknownValue):
return UnknownValue()
else:
assert False,"bad pointer to offset"
def dereference(settings, parsed, pointer, stack):
if isinstance(pointer, Offset):
if isinstance(pointer.base, HeapPointer):
return parsed['heaps'][pointer.base.heap_segment][pointer.index]
elif isinstance(pointer.base, StackPointer):
return stack[pointer.index]
elif isinstance(pointer, StaticValue):
assert pointer.value % settings.rt.word.size == 0
return Tagged(StaticValue(value = read_word(settings, settings.data_offset + pointer.value)), tag = 0)
<commit_msg>Kill obsolete case in pointer_offset<commit_after>
|
import struct
from hsdecomp.types import *
def read_half_word(settings, file_offset):
return struct.unpack(settings.rt.halfword.struct, settings.binary[file_offset:file_offset+settings.rt.halfword.size])[0]
def read_word(settings, file_offset):
return struct.unpack(settings.rt.word.struct, settings.binary[file_offset:file_offset+settings.rt.word.size])[0]
def pointer_offset(settings, pointer, offset):
if isinstance(pointer, Tagged):
offset += pointer.tag
assert isinstance(pointer.untagged, Offset)
return Tagged(untagged = Offset(base = pointer.untagged.base, index = pointer.untagged.index + offset // settings.rt.word.size), tag = offset % settings.rt.word.size)
elif isinstance(pointer, UnknownValue):
return UnknownValue()
else:
assert False,"bad pointer to offset"
def dereference(settings, parsed, pointer, stack):
if isinstance(pointer, Offset):
if isinstance(pointer.base, HeapPointer):
return parsed['heaps'][pointer.base.heap_segment][pointer.index]
elif isinstance(pointer.base, StackPointer):
return stack[pointer.index]
elif isinstance(pointer, StaticValue):
assert pointer.value % settings.rt.word.size == 0
return Tagged(StaticValue(value = read_word(settings, settings.data_offset + pointer.value)), tag = 0)
|
import struct
from hsdecomp.types import *
def read_half_word(settings, file_offset):
return struct.unpack(settings.rt.halfword.struct, settings.binary[file_offset:file_offset+settings.rt.halfword.size])[0]
def read_word(settings, file_offset):
return struct.unpack(settings.rt.word.struct, settings.binary[file_offset:file_offset+settings.rt.word.size])[0]
def pointer_offset(settings, pointer, offset):
if isinstance(pointer, Tagged):
offset += pointer.tag
assert isinstance(pointer.untagged, Offset)
return Tagged(untagged = Offset(base = pointer.untagged.base, index = pointer.untagged.index + offset // settings.rt.word.size), tag = offset % settings.rt.word.size)
elif isinstance(pointer, StaticValue):
return StaticValue(value = pointer.value + offset)
elif isinstance(pointer, UnknownValue):
return UnknownValue()
else:
assert False,"bad pointer to offset"
def dereference(settings, parsed, pointer, stack):
if isinstance(pointer, Offset):
if isinstance(pointer.base, HeapPointer):
return parsed['heaps'][pointer.base.heap_segment][pointer.index]
elif isinstance(pointer.base, StackPointer):
return stack[pointer.index]
elif isinstance(pointer, StaticValue):
assert pointer.value % settings.rt.word.size == 0
return Tagged(StaticValue(value = read_word(settings, settings.data_offset + pointer.value)), tag = 0)
Kill obsolete case in pointer_offsetimport struct
from hsdecomp.types import *
def read_half_word(settings, file_offset):
return struct.unpack(settings.rt.halfword.struct, settings.binary[file_offset:file_offset+settings.rt.halfword.size])[0]
def read_word(settings, file_offset):
return struct.unpack(settings.rt.word.struct, settings.binary[file_offset:file_offset+settings.rt.word.size])[0]
def pointer_offset(settings, pointer, offset):
if isinstance(pointer, Tagged):
offset += pointer.tag
assert isinstance(pointer.untagged, Offset)
return Tagged(untagged = Offset(base = pointer.untagged.base, index = pointer.untagged.index + offset // settings.rt.word.size), tag = offset % settings.rt.word.size)
elif isinstance(pointer, UnknownValue):
return UnknownValue()
else:
assert False,"bad pointer to offset"
def dereference(settings, parsed, pointer, stack):
if isinstance(pointer, Offset):
if isinstance(pointer.base, HeapPointer):
return parsed['heaps'][pointer.base.heap_segment][pointer.index]
elif isinstance(pointer.base, StackPointer):
return stack[pointer.index]
elif isinstance(pointer, StaticValue):
assert pointer.value % settings.rt.word.size == 0
return Tagged(StaticValue(value = read_word(settings, settings.data_offset + pointer.value)), tag = 0)
|
<commit_before>import struct
from hsdecomp.types import *
def read_half_word(settings, file_offset):
return struct.unpack(settings.rt.halfword.struct, settings.binary[file_offset:file_offset+settings.rt.halfword.size])[0]
def read_word(settings, file_offset):
return struct.unpack(settings.rt.word.struct, settings.binary[file_offset:file_offset+settings.rt.word.size])[0]
def pointer_offset(settings, pointer, offset):
if isinstance(pointer, Tagged):
offset += pointer.tag
assert isinstance(pointer.untagged, Offset)
return Tagged(untagged = Offset(base = pointer.untagged.base, index = pointer.untagged.index + offset // settings.rt.word.size), tag = offset % settings.rt.word.size)
elif isinstance(pointer, StaticValue):
return StaticValue(value = pointer.value + offset)
elif isinstance(pointer, UnknownValue):
return UnknownValue()
else:
assert False,"bad pointer to offset"
def dereference(settings, parsed, pointer, stack):
if isinstance(pointer, Offset):
if isinstance(pointer.base, HeapPointer):
return parsed['heaps'][pointer.base.heap_segment][pointer.index]
elif isinstance(pointer.base, StackPointer):
return stack[pointer.index]
elif isinstance(pointer, StaticValue):
assert pointer.value % settings.rt.word.size == 0
return Tagged(StaticValue(value = read_word(settings, settings.data_offset + pointer.value)), tag = 0)
<commit_msg>Kill obsolete case in pointer_offset<commit_after>import struct
from hsdecomp.types import *
def read_half_word(settings, file_offset):
return struct.unpack(settings.rt.halfword.struct, settings.binary[file_offset:file_offset+settings.rt.halfword.size])[0]
def read_word(settings, file_offset):
return struct.unpack(settings.rt.word.struct, settings.binary[file_offset:file_offset+settings.rt.word.size])[0]
def pointer_offset(settings, pointer, offset):
if isinstance(pointer, Tagged):
offset += pointer.tag
assert isinstance(pointer.untagged, Offset)
return Tagged(untagged = Offset(base = pointer.untagged.base, index = pointer.untagged.index + offset // settings.rt.word.size), tag = offset % settings.rt.word.size)
elif isinstance(pointer, UnknownValue):
return UnknownValue()
else:
assert False,"bad pointer to offset"
def dereference(settings, parsed, pointer, stack):
if isinstance(pointer, Offset):
if isinstance(pointer.base, HeapPointer):
return parsed['heaps'][pointer.base.heap_segment][pointer.index]
elif isinstance(pointer.base, StackPointer):
return stack[pointer.index]
elif isinstance(pointer, StaticValue):
assert pointer.value % settings.rt.word.size == 0
return Tagged(StaticValue(value = read_word(settings, settings.data_offset + pointer.value)), tag = 0)
|
d901af0d908053d11db556c7755dbce32e9d1311
|
importlib_resources/tests/test_contents.py
|
importlib_resources/tests/test_contents.py
|
import unittest
import importlib_resources as resources
from . import data01
from . import util
class ContentsTests:
@property
def contents(self):
return sorted(
[el for el in list(resources.contents(self.data)) if el != '__pycache__']
)
class ContentsDiskTests(ContentsTests, unittest.TestCase):
def setUp(self):
self.data = data01
def test_contents(self):
self.assertEqual(
self.contents,
[
'__init__.py',
'binary.file',
'subdirectory',
'utf-16.file',
'utf-8.file',
],
)
class ContentsZipTests(ContentsTests, util.ZipSetup, unittest.TestCase):
def test_contents(self):
self.assertEqual(
self.contents,
[
'__init__.py',
'binary.file',
'subdirectory',
'utf-16.file',
'utf-8.file',
],
)
class ContentsNamespaceTests(ContentsTests, unittest.TestCase):
def setUp(self):
from . import namespacedata01
self.data = namespacedata01
def test_contents(self):
self.assertEqual(
self.contents,
[
'binary.file',
'utf-16.file',
'utf-8.file',
],
)
|
import unittest
import importlib_resources as resources
from . import data01
from . import util
class ContentsTests:
expected = {
'__init__.py',
'binary.file',
'subdirectory',
'utf-16.file',
'utf-8.file',
}
def test_contents(self):
assert self.expected <= set(resources.contents(self.data))
class ContentsDiskTests(ContentsTests, unittest.TestCase):
def setUp(self):
self.data = data01
class ContentsZipTests(ContentsTests, util.ZipSetup, unittest.TestCase):
pass
class ContentsNamespaceTests(ContentsTests, unittest.TestCase):
expected = {
# no __init__ because of namespace design
# no subdirectory as incidental difference in fixture
'binary.file',
'utf-16.file',
'utf-8.file',
}
def setUp(self):
from . import namespacedata01
self.data = namespacedata01
|
Consolidate some behavior and re-use 'set' comparison for less strict unordered comparisons.
|
Consolidate some behavior and re-use 'set' comparison for less strict unordered comparisons.
|
Python
|
apache-2.0
|
python/importlib_resources
|
import unittest
import importlib_resources as resources
from . import data01
from . import util
class ContentsTests:
@property
def contents(self):
return sorted(
[el for el in list(resources.contents(self.data)) if el != '__pycache__']
)
class ContentsDiskTests(ContentsTests, unittest.TestCase):
def setUp(self):
self.data = data01
def test_contents(self):
self.assertEqual(
self.contents,
[
'__init__.py',
'binary.file',
'subdirectory',
'utf-16.file',
'utf-8.file',
],
)
class ContentsZipTests(ContentsTests, util.ZipSetup, unittest.TestCase):
def test_contents(self):
self.assertEqual(
self.contents,
[
'__init__.py',
'binary.file',
'subdirectory',
'utf-16.file',
'utf-8.file',
],
)
class ContentsNamespaceTests(ContentsTests, unittest.TestCase):
def setUp(self):
from . import namespacedata01
self.data = namespacedata01
def test_contents(self):
self.assertEqual(
self.contents,
[
'binary.file',
'utf-16.file',
'utf-8.file',
],
)
Consolidate some behavior and re-use 'set' comparison for less strict unordered comparisons.
|
import unittest
import importlib_resources as resources
from . import data01
from . import util
class ContentsTests:
expected = {
'__init__.py',
'binary.file',
'subdirectory',
'utf-16.file',
'utf-8.file',
}
def test_contents(self):
assert self.expected <= set(resources.contents(self.data))
class ContentsDiskTests(ContentsTests, unittest.TestCase):
def setUp(self):
self.data = data01
class ContentsZipTests(ContentsTests, util.ZipSetup, unittest.TestCase):
pass
class ContentsNamespaceTests(ContentsTests, unittest.TestCase):
expected = {
# no __init__ because of namespace design
# no subdirectory as incidental difference in fixture
'binary.file',
'utf-16.file',
'utf-8.file',
}
def setUp(self):
from . import namespacedata01
self.data = namespacedata01
|
<commit_before>import unittest
import importlib_resources as resources
from . import data01
from . import util
class ContentsTests:
@property
def contents(self):
return sorted(
[el for el in list(resources.contents(self.data)) if el != '__pycache__']
)
class ContentsDiskTests(ContentsTests, unittest.TestCase):
def setUp(self):
self.data = data01
def test_contents(self):
self.assertEqual(
self.contents,
[
'__init__.py',
'binary.file',
'subdirectory',
'utf-16.file',
'utf-8.file',
],
)
class ContentsZipTests(ContentsTests, util.ZipSetup, unittest.TestCase):
def test_contents(self):
self.assertEqual(
self.contents,
[
'__init__.py',
'binary.file',
'subdirectory',
'utf-16.file',
'utf-8.file',
],
)
class ContentsNamespaceTests(ContentsTests, unittest.TestCase):
def setUp(self):
from . import namespacedata01
self.data = namespacedata01
def test_contents(self):
self.assertEqual(
self.contents,
[
'binary.file',
'utf-16.file',
'utf-8.file',
],
)
<commit_msg>Consolidate some behavior and re-use 'set' comparison for less strict unordered comparisons.<commit_after>
|
import unittest
import importlib_resources as resources
from . import data01
from . import util
class ContentsTests:
expected = {
'__init__.py',
'binary.file',
'subdirectory',
'utf-16.file',
'utf-8.file',
}
def test_contents(self):
assert self.expected <= set(resources.contents(self.data))
class ContentsDiskTests(ContentsTests, unittest.TestCase):
def setUp(self):
self.data = data01
class ContentsZipTests(ContentsTests, util.ZipSetup, unittest.TestCase):
pass
class ContentsNamespaceTests(ContentsTests, unittest.TestCase):
expected = {
# no __init__ because of namespace design
# no subdirectory as incidental difference in fixture
'binary.file',
'utf-16.file',
'utf-8.file',
}
def setUp(self):
from . import namespacedata01
self.data = namespacedata01
|
import unittest
import importlib_resources as resources
from . import data01
from . import util
class ContentsTests:
@property
def contents(self):
return sorted(
[el for el in list(resources.contents(self.data)) if el != '__pycache__']
)
class ContentsDiskTests(ContentsTests, unittest.TestCase):
def setUp(self):
self.data = data01
def test_contents(self):
self.assertEqual(
self.contents,
[
'__init__.py',
'binary.file',
'subdirectory',
'utf-16.file',
'utf-8.file',
],
)
class ContentsZipTests(ContentsTests, util.ZipSetup, unittest.TestCase):
def test_contents(self):
self.assertEqual(
self.contents,
[
'__init__.py',
'binary.file',
'subdirectory',
'utf-16.file',
'utf-8.file',
],
)
class ContentsNamespaceTests(ContentsTests, unittest.TestCase):
def setUp(self):
from . import namespacedata01
self.data = namespacedata01
def test_contents(self):
self.assertEqual(
self.contents,
[
'binary.file',
'utf-16.file',
'utf-8.file',
],
)
Consolidate some behavior and re-use 'set' comparison for less strict unordered comparisons.import unittest
import importlib_resources as resources
from . import data01
from . import util
class ContentsTests:
expected = {
'__init__.py',
'binary.file',
'subdirectory',
'utf-16.file',
'utf-8.file',
}
def test_contents(self):
assert self.expected <= set(resources.contents(self.data))
class ContentsDiskTests(ContentsTests, unittest.TestCase):
def setUp(self):
self.data = data01
class ContentsZipTests(ContentsTests, util.ZipSetup, unittest.TestCase):
pass
class ContentsNamespaceTests(ContentsTests, unittest.TestCase):
expected = {
# no __init__ because of namespace design
# no subdirectory as incidental difference in fixture
'binary.file',
'utf-16.file',
'utf-8.file',
}
def setUp(self):
from . import namespacedata01
self.data = namespacedata01
|
<commit_before>import unittest
import importlib_resources as resources
from . import data01
from . import util
class ContentsTests:
@property
def contents(self):
return sorted(
[el for el in list(resources.contents(self.data)) if el != '__pycache__']
)
class ContentsDiskTests(ContentsTests, unittest.TestCase):
def setUp(self):
self.data = data01
def test_contents(self):
self.assertEqual(
self.contents,
[
'__init__.py',
'binary.file',
'subdirectory',
'utf-16.file',
'utf-8.file',
],
)
class ContentsZipTests(ContentsTests, util.ZipSetup, unittest.TestCase):
def test_contents(self):
self.assertEqual(
self.contents,
[
'__init__.py',
'binary.file',
'subdirectory',
'utf-16.file',
'utf-8.file',
],
)
class ContentsNamespaceTests(ContentsTests, unittest.TestCase):
def setUp(self):
from . import namespacedata01
self.data = namespacedata01
def test_contents(self):
self.assertEqual(
self.contents,
[
'binary.file',
'utf-16.file',
'utf-8.file',
],
)
<commit_msg>Consolidate some behavior and re-use 'set' comparison for less strict unordered comparisons.<commit_after>import unittest
import importlib_resources as resources
from . import data01
from . import util
class ContentsTests:
expected = {
'__init__.py',
'binary.file',
'subdirectory',
'utf-16.file',
'utf-8.file',
}
def test_contents(self):
assert self.expected <= set(resources.contents(self.data))
class ContentsDiskTests(ContentsTests, unittest.TestCase):
def setUp(self):
self.data = data01
class ContentsZipTests(ContentsTests, util.ZipSetup, unittest.TestCase):
pass
class ContentsNamespaceTests(ContentsTests, unittest.TestCase):
expected = {
# no __init__ because of namespace design
# no subdirectory as incidental difference in fixture
'binary.file',
'utf-16.file',
'utf-8.file',
}
def setUp(self):
from . import namespacedata01
self.data = namespacedata01
|
eab9a90c41d140d2029edba44065a2380ddf1f11
|
syncplay/__init__.py
|
syncplay/__init__.py
|
version = '1.3.0'
milestone = 'Chami'
release_number = '3'
projectURL = 'http://syncplay.pl/'
|
version = '1.3.0'
milestone = 'Chami'
release_number = '4'
projectURL = 'http://syncplay.pl/'
|
Increase release number to 4 (1.3.0 Beta 3)
|
Increase release number to 4 (1.3.0 Beta 3)
|
Python
|
apache-2.0
|
NeverDecaf/syncplay,Syncplay/syncplay,alby128/syncplay,NeverDecaf/syncplay,alby128/syncplay,Syncplay/syncplay
|
version = '1.3.0'
milestone = 'Chami'
release_number = '3'
projectURL = 'http://syncplay.pl/'
Increase release number to 4 (1.3.0 Beta 3)
|
version = '1.3.0'
milestone = 'Chami'
release_number = '4'
projectURL = 'http://syncplay.pl/'
|
<commit_before>version = '1.3.0'
milestone = 'Chami'
release_number = '3'
projectURL = 'http://syncplay.pl/'
<commit_msg>Increase release number to 4 (1.3.0 Beta 3)<commit_after>
|
version = '1.3.0'
milestone = 'Chami'
release_number = '4'
projectURL = 'http://syncplay.pl/'
|
version = '1.3.0'
milestone = 'Chami'
release_number = '3'
projectURL = 'http://syncplay.pl/'
Increase release number to 4 (1.3.0 Beta 3)version = '1.3.0'
milestone = 'Chami'
release_number = '4'
projectURL = 'http://syncplay.pl/'
|
<commit_before>version = '1.3.0'
milestone = 'Chami'
release_number = '3'
projectURL = 'http://syncplay.pl/'
<commit_msg>Increase release number to 4 (1.3.0 Beta 3)<commit_after>version = '1.3.0'
milestone = 'Chami'
release_number = '4'
projectURL = 'http://syncplay.pl/'
|
54b619f75fd07b9843054e1077e908981d5fa89e
|
crm_compassion/tests/__init__.py
|
crm_compassion/tests/__init__.py
|
# -*- encoding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2015 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Albert SHENOUDA <albert.shenouda@efrei.net>
#
# The licence is in the file __openerp__.py
#
##############################################################################
from . import test_crm_compassion
checks = [
test_crm_compassion
]
|
# -*- encoding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2015 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Albert SHENOUDA <albert.shenouda@efrei.net>
#
# The licence is in the file __openerp__.py
#
##############################################################################
from . import test_crm_compassion
checks = [
test_crm_compassion
]
|
Fix pep8 in init file
|
Fix pep8 in init file
|
Python
|
agpl-3.0
|
ecino/compassion-modules,CompassionCH/compassion-modules,philippe89/compassion-modules,MickSandoz/compassion-modules,eicher31/compassion-modules,ecino/compassion-modules,Secheron/compassion-modules,CompassionCH/compassion-modules,MickSandoz/compassion-modules,Secheron/compassion-modules,MickSandoz/compassion-modules,ndtran/compassion-modules,ndtran/compassion-modules,ecino/compassion-modules,ndtran/compassion-modules,eicher31/compassion-modules,ecino/compassion-modules,maxime-beck/compassion-modules,maxime-beck/compassion-modules,CompassionCH/compassion-modules,ecino/compassion-modules,philippe89/compassion-modules,philippe89/compassion-modules,emgirardin/compassion-modules,maxime-beck/compassion-modules,Secheron/compassion-modules,eicher31/compassion-modules,emgirardin/compassion-modules,maxime-beck/compassion-modules,CompassionCH/compassion-modules,eicher31/compassion-modules,emgirardin/compassion-modules,eicher31/compassion-modules
|
# -*- encoding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2015 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Albert SHENOUDA <albert.shenouda@efrei.net>
#
# The licence is in the file __openerp__.py
#
##############################################################################
from . import test_crm_compassion
checks = [
test_crm_compassion
]Fix pep8 in init file
|
# -*- encoding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2015 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Albert SHENOUDA <albert.shenouda@efrei.net>
#
# The licence is in the file __openerp__.py
#
##############################################################################
from . import test_crm_compassion
checks = [
test_crm_compassion
]
|
<commit_before># -*- encoding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2015 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Albert SHENOUDA <albert.shenouda@efrei.net>
#
# The licence is in the file __openerp__.py
#
##############################################################################
from . import test_crm_compassion
checks = [
test_crm_compassion
]<commit_msg>Fix pep8 in init file<commit_after>
|
# -*- encoding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2015 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Albert SHENOUDA <albert.shenouda@efrei.net>
#
# The licence is in the file __openerp__.py
#
##############################################################################
from . import test_crm_compassion
checks = [
test_crm_compassion
]
|
# -*- encoding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2015 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Albert SHENOUDA <albert.shenouda@efrei.net>
#
# The licence is in the file __openerp__.py
#
##############################################################################
from . import test_crm_compassion
checks = [
test_crm_compassion
]Fix pep8 in init file# -*- encoding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2015 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Albert SHENOUDA <albert.shenouda@efrei.net>
#
# The licence is in the file __openerp__.py
#
##############################################################################
from . import test_crm_compassion
checks = [
test_crm_compassion
]
|
<commit_before># -*- encoding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2015 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Albert SHENOUDA <albert.shenouda@efrei.net>
#
# The licence is in the file __openerp__.py
#
##############################################################################
from . import test_crm_compassion
checks = [
test_crm_compassion
]<commit_msg>Fix pep8 in init file<commit_after># -*- encoding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2015 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Albert SHENOUDA <albert.shenouda@efrei.net>
#
# The licence is in the file __openerp__.py
#
##############################################################################
from . import test_crm_compassion
checks = [
test_crm_compassion
]
|
7c12e3d3fc4e09658b0ad7dc7a1fbb80e6ec80b8
|
generate_table.py
|
generate_table.py
|
import json
import xml.etree.ElementTree as etree
table = etree.Element("table", attrib={
"class": "tablesorter",
"cellspacing": "1",
"cellpadding": "0",
})
thead = etree.SubElement(table, "thead")
tbody = etree.SubElement(table, "tbody")
tr = etree.SubElement(thead, "tr")
for heading in ["Product", "Release Date", "Original Price", "Stock Value Today"]:
th = etree.SubElement(tr, "th")
th.text = heading
for product in json.load(open("apple-specs-with-current.json")):
row = etree.SubElement(tbody, "tr")
value = 0
data = [
product["name"],
product["introduction-date"].split("T")[0],
"$" + unicode(product["original-price"]),
"${:d}".format(int(product["stock-shares"] * 599.55)),
]
for field in data:
td = etree.SubElement(row, "td")
td.text = field
print etree.tostring(table)
|
import json
import xml.etree.ElementTree as etree
table = etree.Element("table", attrib={
"id": "apple",
"class": "tablesorter",
"cellspacing": "1",
"cellpadding": "0",
})
thead = etree.SubElement(table, "thead")
tbody = etree.SubElement(table, "tbody")
tr = etree.SubElement(thead, "tr")
for heading in ["Product", "Release Date", "Original Price", "Stock Value Today"]:
th = etree.SubElement(tr, "th")
th.text = heading
for product in json.load(open("apple-specs-with-current.json")):
row = etree.SubElement(tbody, "tr")
value = 0
data = [
product["name"],
product["introduction-date"].split("T")[0],
"$" + unicode(product["original-price"]),
"${:d}".format(int(product["stock-shares"] * 599.55)),
]
for field in data:
td = etree.SubElement(row, "td")
td.text = field
print etree.tostring(table)
|
Make sure to add the apple id
|
Make sure to add the apple id
|
Python
|
mit
|
kyleconroy/apple-stock
|
import json
import xml.etree.ElementTree as etree
table = etree.Element("table", attrib={
"class": "tablesorter",
"cellspacing": "1",
"cellpadding": "0",
})
thead = etree.SubElement(table, "thead")
tbody = etree.SubElement(table, "tbody")
tr = etree.SubElement(thead, "tr")
for heading in ["Product", "Release Date", "Original Price", "Stock Value Today"]:
th = etree.SubElement(tr, "th")
th.text = heading
for product in json.load(open("apple-specs-with-current.json")):
row = etree.SubElement(tbody, "tr")
value = 0
data = [
product["name"],
product["introduction-date"].split("T")[0],
"$" + unicode(product["original-price"]),
"${:d}".format(int(product["stock-shares"] * 599.55)),
]
for field in data:
td = etree.SubElement(row, "td")
td.text = field
print etree.tostring(table)
Make sure to add the apple id
|
import json
import xml.etree.ElementTree as etree
table = etree.Element("table", attrib={
"id": "apple",
"class": "tablesorter",
"cellspacing": "1",
"cellpadding": "0",
})
thead = etree.SubElement(table, "thead")
tbody = etree.SubElement(table, "tbody")
tr = etree.SubElement(thead, "tr")
for heading in ["Product", "Release Date", "Original Price", "Stock Value Today"]:
th = etree.SubElement(tr, "th")
th.text = heading
for product in json.load(open("apple-specs-with-current.json")):
row = etree.SubElement(tbody, "tr")
value = 0
data = [
product["name"],
product["introduction-date"].split("T")[0],
"$" + unicode(product["original-price"]),
"${:d}".format(int(product["stock-shares"] * 599.55)),
]
for field in data:
td = etree.SubElement(row, "td")
td.text = field
print etree.tostring(table)
|
<commit_before>import json
import xml.etree.ElementTree as etree
table = etree.Element("table", attrib={
"class": "tablesorter",
"cellspacing": "1",
"cellpadding": "0",
})
thead = etree.SubElement(table, "thead")
tbody = etree.SubElement(table, "tbody")
tr = etree.SubElement(thead, "tr")
for heading in ["Product", "Release Date", "Original Price", "Stock Value Today"]:
th = etree.SubElement(tr, "th")
th.text = heading
for product in json.load(open("apple-specs-with-current.json")):
row = etree.SubElement(tbody, "tr")
value = 0
data = [
product["name"],
product["introduction-date"].split("T")[0],
"$" + unicode(product["original-price"]),
"${:d}".format(int(product["stock-shares"] * 599.55)),
]
for field in data:
td = etree.SubElement(row, "td")
td.text = field
print etree.tostring(table)
<commit_msg>Make sure to add the apple id<commit_after>
|
import json
import xml.etree.ElementTree as etree
table = etree.Element("table", attrib={
"id": "apple",
"class": "tablesorter",
"cellspacing": "1",
"cellpadding": "0",
})
thead = etree.SubElement(table, "thead")
tbody = etree.SubElement(table, "tbody")
tr = etree.SubElement(thead, "tr")
for heading in ["Product", "Release Date", "Original Price", "Stock Value Today"]:
th = etree.SubElement(tr, "th")
th.text = heading
for product in json.load(open("apple-specs-with-current.json")):
row = etree.SubElement(tbody, "tr")
value = 0
data = [
product["name"],
product["introduction-date"].split("T")[0],
"$" + unicode(product["original-price"]),
"${:d}".format(int(product["stock-shares"] * 599.55)),
]
for field in data:
td = etree.SubElement(row, "td")
td.text = field
print etree.tostring(table)
|
import json
import xml.etree.ElementTree as etree
table = etree.Element("table", attrib={
"class": "tablesorter",
"cellspacing": "1",
"cellpadding": "0",
})
thead = etree.SubElement(table, "thead")
tbody = etree.SubElement(table, "tbody")
tr = etree.SubElement(thead, "tr")
for heading in ["Product", "Release Date", "Original Price", "Stock Value Today"]:
th = etree.SubElement(tr, "th")
th.text = heading
for product in json.load(open("apple-specs-with-current.json")):
row = etree.SubElement(tbody, "tr")
value = 0
data = [
product["name"],
product["introduction-date"].split("T")[0],
"$" + unicode(product["original-price"]),
"${:d}".format(int(product["stock-shares"] * 599.55)),
]
for field in data:
td = etree.SubElement(row, "td")
td.text = field
print etree.tostring(table)
Make sure to add the apple idimport json
import xml.etree.ElementTree as etree
table = etree.Element("table", attrib={
"id": "apple",
"class": "tablesorter",
"cellspacing": "1",
"cellpadding": "0",
})
thead = etree.SubElement(table, "thead")
tbody = etree.SubElement(table, "tbody")
tr = etree.SubElement(thead, "tr")
for heading in ["Product", "Release Date", "Original Price", "Stock Value Today"]:
th = etree.SubElement(tr, "th")
th.text = heading
for product in json.load(open("apple-specs-with-current.json")):
row = etree.SubElement(tbody, "tr")
value = 0
data = [
product["name"],
product["introduction-date"].split("T")[0],
"$" + unicode(product["original-price"]),
"${:d}".format(int(product["stock-shares"] * 599.55)),
]
for field in data:
td = etree.SubElement(row, "td")
td.text = field
print etree.tostring(table)
|
<commit_before>import json
import xml.etree.ElementTree as etree
table = etree.Element("table", attrib={
"class": "tablesorter",
"cellspacing": "1",
"cellpadding": "0",
})
thead = etree.SubElement(table, "thead")
tbody = etree.SubElement(table, "tbody")
tr = etree.SubElement(thead, "tr")
for heading in ["Product", "Release Date", "Original Price", "Stock Value Today"]:
th = etree.SubElement(tr, "th")
th.text = heading
for product in json.load(open("apple-specs-with-current.json")):
row = etree.SubElement(tbody, "tr")
value = 0
data = [
product["name"],
product["introduction-date"].split("T")[0],
"$" + unicode(product["original-price"]),
"${:d}".format(int(product["stock-shares"] * 599.55)),
]
for field in data:
td = etree.SubElement(row, "td")
td.text = field
print etree.tostring(table)
<commit_msg>Make sure to add the apple id<commit_after>import json
import xml.etree.ElementTree as etree
table = etree.Element("table", attrib={
"id": "apple",
"class": "tablesorter",
"cellspacing": "1",
"cellpadding": "0",
})
thead = etree.SubElement(table, "thead")
tbody = etree.SubElement(table, "tbody")
tr = etree.SubElement(thead, "tr")
for heading in ["Product", "Release Date", "Original Price", "Stock Value Today"]:
th = etree.SubElement(tr, "th")
th.text = heading
for product in json.load(open("apple-specs-with-current.json")):
row = etree.SubElement(tbody, "tr")
value = 0
data = [
product["name"],
product["introduction-date"].split("T")[0],
"$" + unicode(product["original-price"]),
"${:d}".format(int(product["stock-shares"] * 599.55)),
]
for field in data:
td = etree.SubElement(row, "td")
td.text = field
print etree.tostring(table)
|
21c82cbf238b0a87f8aeebf713b7218a282ae280
|
pyconca2017/pycon_sponsors/templatetags/presenters.py
|
pyconca2017/pycon_sponsors/templatetags/presenters.py
|
from django.template.loader_tags import register
from django.template import loader, Context, defaultfilters, TemplateDoesNotExist
import markdown
presenters = {
'Speaker': 'presenters/speaker_presenter.html'
}
generic_template = 'presenters/object_presenter.html'
@register.simple_tag(takes_context=True)
def present(context, obj):
model_name = type(obj).__name__
template_name = presenters.get(model_name, generic_template)
t = loader.get_template(template_name)
return t.render(Context({
'model_name': model_name,
'obj': obj,
}))
@register.filter
def noval(data, placeholder):
if data:
return data
return placeholder
@register.simple_tag(takes_context=True)
def include_md(context, template_name):
lang = context['LANGUAGE_CODE'].replace('-', '_')
try:
t = loader.render_to_string('markdown/{}/{}'.format(lang, template_name), context)
except TemplateDoesNotExist:
t = loader.render_to_string('markdown/en_US/{}'.format(template_name), context)
html = markdown.markdown(t)
return defaultfilters.safe(html)
|
from django.template.loader_tags import register
from django.template import loader, Context, defaultfilters, TemplateDoesNotExist
import markdown
presenters = {
'Speaker': 'presenters/speaker_presenter.html'
}
generic_template = 'presenters/object_presenter.html'
@register.simple_tag(takes_context=True)
def present(context, obj):
model_name = type(obj).__name__
template_name = presenters.get(model_name, generic_template)
t = loader.get_template(template_name)
return t.render(Context({
'model_name': model_name,
'obj': obj,
}))
@register.filter
def noval(data, placeholder):
if data:
return data
return placeholder
@register.simple_tag(takes_context=True)
def include_md(context, template_name):
lang = context['LANGUAGE_CODE'].replace('-', '_')
try:
t = loader.render_to_string('markdown/{}/{}'.format(lang, template_name), context)
except TemplateDoesNotExist:
t = loader.render_to_string('markdown/en/{}'.format(template_name), context)
html = markdown.markdown(t)
return defaultfilters.safe(html)
|
Fix a bug in the template tag.
|
:bug: Fix a bug in the template tag.
|
Python
|
mit
|
pyconca/2017-web,pyconca/2017-web,pyconca/2017-web,pyconca/2017-web
|
from django.template.loader_tags import register
from django.template import loader, Context, defaultfilters, TemplateDoesNotExist
import markdown
presenters = {
'Speaker': 'presenters/speaker_presenter.html'
}
generic_template = 'presenters/object_presenter.html'
@register.simple_tag(takes_context=True)
def present(context, obj):
model_name = type(obj).__name__
template_name = presenters.get(model_name, generic_template)
t = loader.get_template(template_name)
return t.render(Context({
'model_name': model_name,
'obj': obj,
}))
@register.filter
def noval(data, placeholder):
if data:
return data
return placeholder
@register.simple_tag(takes_context=True)
def include_md(context, template_name):
lang = context['LANGUAGE_CODE'].replace('-', '_')
try:
t = loader.render_to_string('markdown/{}/{}'.format(lang, template_name), context)
except TemplateDoesNotExist:
t = loader.render_to_string('markdown/en_US/{}'.format(template_name), context)
html = markdown.markdown(t)
return defaultfilters.safe(html)
:bug: Fix a bug in the template tag.
|
from django.template.loader_tags import register
from django.template import loader, Context, defaultfilters, TemplateDoesNotExist
import markdown
presenters = {
'Speaker': 'presenters/speaker_presenter.html'
}
generic_template = 'presenters/object_presenter.html'
@register.simple_tag(takes_context=True)
def present(context, obj):
model_name = type(obj).__name__
template_name = presenters.get(model_name, generic_template)
t = loader.get_template(template_name)
return t.render(Context({
'model_name': model_name,
'obj': obj,
}))
@register.filter
def noval(data, placeholder):
if data:
return data
return placeholder
@register.simple_tag(takes_context=True)
def include_md(context, template_name):
lang = context['LANGUAGE_CODE'].replace('-', '_')
try:
t = loader.render_to_string('markdown/{}/{}'.format(lang, template_name), context)
except TemplateDoesNotExist:
t = loader.render_to_string('markdown/en/{}'.format(template_name), context)
html = markdown.markdown(t)
return defaultfilters.safe(html)
|
<commit_before>from django.template.loader_tags import register
from django.template import loader, Context, defaultfilters, TemplateDoesNotExist
import markdown
presenters = {
'Speaker': 'presenters/speaker_presenter.html'
}
generic_template = 'presenters/object_presenter.html'
@register.simple_tag(takes_context=True)
def present(context, obj):
model_name = type(obj).__name__
template_name = presenters.get(model_name, generic_template)
t = loader.get_template(template_name)
return t.render(Context({
'model_name': model_name,
'obj': obj,
}))
@register.filter
def noval(data, placeholder):
if data:
return data
return placeholder
@register.simple_tag(takes_context=True)
def include_md(context, template_name):
lang = context['LANGUAGE_CODE'].replace('-', '_')
try:
t = loader.render_to_string('markdown/{}/{}'.format(lang, template_name), context)
except TemplateDoesNotExist:
t = loader.render_to_string('markdown/en_US/{}'.format(template_name), context)
html = markdown.markdown(t)
return defaultfilters.safe(html)
<commit_msg>:bug: Fix a bug in the template tag.<commit_after>
|
from django.template.loader_tags import register
from django.template import loader, Context, defaultfilters, TemplateDoesNotExist
import markdown
presenters = {
'Speaker': 'presenters/speaker_presenter.html'
}
generic_template = 'presenters/object_presenter.html'
@register.simple_tag(takes_context=True)
def present(context, obj):
model_name = type(obj).__name__
template_name = presenters.get(model_name, generic_template)
t = loader.get_template(template_name)
return t.render(Context({
'model_name': model_name,
'obj': obj,
}))
@register.filter
def noval(data, placeholder):
if data:
return data
return placeholder
@register.simple_tag(takes_context=True)
def include_md(context, template_name):
lang = context['LANGUAGE_CODE'].replace('-', '_')
try:
t = loader.render_to_string('markdown/{}/{}'.format(lang, template_name), context)
except TemplateDoesNotExist:
t = loader.render_to_string('markdown/en/{}'.format(template_name), context)
html = markdown.markdown(t)
return defaultfilters.safe(html)
|
from django.template.loader_tags import register
from django.template import loader, Context, defaultfilters, TemplateDoesNotExist
import markdown
presenters = {
'Speaker': 'presenters/speaker_presenter.html'
}
generic_template = 'presenters/object_presenter.html'
@register.simple_tag(takes_context=True)
def present(context, obj):
model_name = type(obj).__name__
template_name = presenters.get(model_name, generic_template)
t = loader.get_template(template_name)
return t.render(Context({
'model_name': model_name,
'obj': obj,
}))
@register.filter
def noval(data, placeholder):
if data:
return data
return placeholder
@register.simple_tag(takes_context=True)
def include_md(context, template_name):
lang = context['LANGUAGE_CODE'].replace('-', '_')
try:
t = loader.render_to_string('markdown/{}/{}'.format(lang, template_name), context)
except TemplateDoesNotExist:
t = loader.render_to_string('markdown/en_US/{}'.format(template_name), context)
html = markdown.markdown(t)
return defaultfilters.safe(html)
:bug: Fix a bug in the template tag.from django.template.loader_tags import register
from django.template import loader, Context, defaultfilters, TemplateDoesNotExist
import markdown
presenters = {
'Speaker': 'presenters/speaker_presenter.html'
}
generic_template = 'presenters/object_presenter.html'
@register.simple_tag(takes_context=True)
def present(context, obj):
model_name = type(obj).__name__
template_name = presenters.get(model_name, generic_template)
t = loader.get_template(template_name)
return t.render(Context({
'model_name': model_name,
'obj': obj,
}))
@register.filter
def noval(data, placeholder):
if data:
return data
return placeholder
@register.simple_tag(takes_context=True)
def include_md(context, template_name):
lang = context['LANGUAGE_CODE'].replace('-', '_')
try:
t = loader.render_to_string('markdown/{}/{}'.format(lang, template_name), context)
except TemplateDoesNotExist:
t = loader.render_to_string('markdown/en/{}'.format(template_name), context)
html = markdown.markdown(t)
return defaultfilters.safe(html)
|
<commit_before>from django.template.loader_tags import register
from django.template import loader, Context, defaultfilters, TemplateDoesNotExist
import markdown
presenters = {
'Speaker': 'presenters/speaker_presenter.html'
}
generic_template = 'presenters/object_presenter.html'
@register.simple_tag(takes_context=True)
def present(context, obj):
model_name = type(obj).__name__
template_name = presenters.get(model_name, generic_template)
t = loader.get_template(template_name)
return t.render(Context({
'model_name': model_name,
'obj': obj,
}))
@register.filter
def noval(data, placeholder):
if data:
return data
return placeholder
@register.simple_tag(takes_context=True)
def include_md(context, template_name):
lang = context['LANGUAGE_CODE'].replace('-', '_')
try:
t = loader.render_to_string('markdown/{}/{}'.format(lang, template_name), context)
except TemplateDoesNotExist:
t = loader.render_to_string('markdown/en_US/{}'.format(template_name), context)
html = markdown.markdown(t)
return defaultfilters.safe(html)
<commit_msg>:bug: Fix a bug in the template tag.<commit_after>from django.template.loader_tags import register
from django.template import loader, Context, defaultfilters, TemplateDoesNotExist
import markdown
presenters = {
'Speaker': 'presenters/speaker_presenter.html'
}
generic_template = 'presenters/object_presenter.html'
@register.simple_tag(takes_context=True)
def present(context, obj):
model_name = type(obj).__name__
template_name = presenters.get(model_name, generic_template)
t = loader.get_template(template_name)
return t.render(Context({
'model_name': model_name,
'obj': obj,
}))
@register.filter
def noval(data, placeholder):
if data:
return data
return placeholder
@register.simple_tag(takes_context=True)
def include_md(context, template_name):
lang = context['LANGUAGE_CODE'].replace('-', '_')
try:
t = loader.render_to_string('markdown/{}/{}'.format(lang, template_name), context)
except TemplateDoesNotExist:
t = loader.render_to_string('markdown/en/{}'.format(template_name), context)
html = markdown.markdown(t)
return defaultfilters.safe(html)
|
26bbb0b8cca1d44548c96726d4b4e8296a482d12
|
ircstat/defaults.py
|
ircstat/defaults.py
|
# Copyright 2013 John Reese
# Licensed under the MIT license
filename_regex = r'(?:[a-z]+_)#(?P<channel>[a-z]+)_(?P<date>\d{8}).log'
channel_regex_group = 1
date_regex_group = 2
date_format = r'%Y%m%d'
|
# Copyright 2013 John Reese
# Licensed under the MIT license
# the regex to parse data from irc log filenames.
# must contain two named matching groups:
# channel: the name of the channel
# date: the date of the conversation
filename_regex = r'#?(?P<channel>[a-z]+)_(?P<date>\d{8}).log'
# the format of the date content in the matched filename.
# must follow python's datetime.strptime() format, as defined at
# http://docs.python.org/2/library/datetime.html#strftime-strptime-behavior
filename_date_format = r'%Y%m%d'
|
Clean up and document default config values
|
Clean up and document default config values
|
Python
|
mit
|
jreese/ircstat,jreese/ircstat
|
# Copyright 2013 John Reese
# Licensed under the MIT license
filename_regex = r'(?:[a-z]+_)#(?P<channel>[a-z]+)_(?P<date>\d{8}).log'
channel_regex_group = 1
date_regex_group = 2
date_format = r'%Y%m%d'
Clean up and document default config values
|
# Copyright 2013 John Reese
# Licensed under the MIT license
# the regex to parse data from irc log filenames.
# must contain two named matching groups:
# channel: the name of the channel
# date: the date of the conversation
filename_regex = r'#?(?P<channel>[a-z]+)_(?P<date>\d{8}).log'
# the format of the date content in the matched filename.
# must follow python's datetime.strptime() format, as defined at
# http://docs.python.org/2/library/datetime.html#strftime-strptime-behavior
filename_date_format = r'%Y%m%d'
|
<commit_before># Copyright 2013 John Reese
# Licensed under the MIT license
filename_regex = r'(?:[a-z]+_)#(?P<channel>[a-z]+)_(?P<date>\d{8}).log'
channel_regex_group = 1
date_regex_group = 2
date_format = r'%Y%m%d'
<commit_msg>Clean up and document default config values<commit_after>
|
# Copyright 2013 John Reese
# Licensed under the MIT license
# the regex to parse data from irc log filenames.
# must contain two named matching groups:
# channel: the name of the channel
# date: the date of the conversation
filename_regex = r'#?(?P<channel>[a-z]+)_(?P<date>\d{8}).log'
# the format of the date content in the matched filename.
# must follow python's datetime.strptime() format, as defined at
# http://docs.python.org/2/library/datetime.html#strftime-strptime-behavior
filename_date_format = r'%Y%m%d'
|
# Copyright 2013 John Reese
# Licensed under the MIT license
filename_regex = r'(?:[a-z]+_)#(?P<channel>[a-z]+)_(?P<date>\d{8}).log'
channel_regex_group = 1
date_regex_group = 2
date_format = r'%Y%m%d'
Clean up and document default config values# Copyright 2013 John Reese
# Licensed under the MIT license
# the regex to parse data from irc log filenames.
# must contain two named matching groups:
# channel: the name of the channel
# date: the date of the conversation
filename_regex = r'#?(?P<channel>[a-z]+)_(?P<date>\d{8}).log'
# the format of the date content in the matched filename.
# must follow python's datetime.strptime() format, as defined at
# http://docs.python.org/2/library/datetime.html#strftime-strptime-behavior
filename_date_format = r'%Y%m%d'
|
<commit_before># Copyright 2013 John Reese
# Licensed under the MIT license
filename_regex = r'(?:[a-z]+_)#(?P<channel>[a-z]+)_(?P<date>\d{8}).log'
channel_regex_group = 1
date_regex_group = 2
date_format = r'%Y%m%d'
<commit_msg>Clean up and document default config values<commit_after># Copyright 2013 John Reese
# Licensed under the MIT license
# the regex to parse data from irc log filenames.
# must contain two named matching groups:
# channel: the name of the channel
# date: the date of the conversation
filename_regex = r'#?(?P<channel>[a-z]+)_(?P<date>\d{8}).log'
# the format of the date content in the matched filename.
# must follow python's datetime.strptime() format, as defined at
# http://docs.python.org/2/library/datetime.html#strftime-strptime-behavior
filename_date_format = r'%Y%m%d'
|
9116828db256ecb1a1e303e31049e526ab9ae8eb
|
mailqueue/urls.py
|
mailqueue/urls.py
|
from django.conf.urls import patterns, url
urlpatterns = patterns('mailqueue.views',
url(r'^clear$', 'clear_sent_messages', name='clear_sent_messages'),
url(r'^$', 'run_mail_job', name='run_mail_job'),
)
|
from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^clear$', views.clear_sent_messages, name='clear_sent_messages'),
url(r'^$', views.run_mail_job, name='run_mail_job'),
]
|
Remove warning "deprecated" in url.py
|
Remove warning "deprecated" in url.py
version django=1.9.6
RemovedInDjango110Warning: django.conf.urls.patterns() is deprecated and will be removed in Django 1.10. Update your urlpatterns to be a list of django.conf.urls.url() instances instead.
|
Python
|
mit
|
dstegelman/django-mail-queue,Goury/django-mail-queue,Goury/django-mail-queue,dstegelman/django-mail-queue
|
from django.conf.urls import patterns, url
urlpatterns = patterns('mailqueue.views',
url(r'^clear$', 'clear_sent_messages', name='clear_sent_messages'),
url(r'^$', 'run_mail_job', name='run_mail_job'),
)
Remove warning "deprecated" in url.py
version django=1.9.6
RemovedInDjango110Warning: django.conf.urls.patterns() is deprecated and will be removed in Django 1.10. Update your urlpatterns to be a list of django.conf.urls.url() instances instead.
|
from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^clear$', views.clear_sent_messages, name='clear_sent_messages'),
url(r'^$', views.run_mail_job, name='run_mail_job'),
]
|
<commit_before>from django.conf.urls import patterns, url
urlpatterns = patterns('mailqueue.views',
url(r'^clear$', 'clear_sent_messages', name='clear_sent_messages'),
url(r'^$', 'run_mail_job', name='run_mail_job'),
)
<commit_msg>Remove warning "deprecated" in url.py
version django=1.9.6
RemovedInDjango110Warning: django.conf.urls.patterns() is deprecated and will be removed in Django 1.10. Update your urlpatterns to be a list of django.conf.urls.url() instances instead.<commit_after>
|
from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^clear$', views.clear_sent_messages, name='clear_sent_messages'),
url(r'^$', views.run_mail_job, name='run_mail_job'),
]
|
from django.conf.urls import patterns, url
urlpatterns = patterns('mailqueue.views',
url(r'^clear$', 'clear_sent_messages', name='clear_sent_messages'),
url(r'^$', 'run_mail_job', name='run_mail_job'),
)
Remove warning "deprecated" in url.py
version django=1.9.6
RemovedInDjango110Warning: django.conf.urls.patterns() is deprecated and will be removed in Django 1.10. Update your urlpatterns to be a list of django.conf.urls.url() instances instead.from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^clear$', views.clear_sent_messages, name='clear_sent_messages'),
url(r'^$', views.run_mail_job, name='run_mail_job'),
]
|
<commit_before>from django.conf.urls import patterns, url
urlpatterns = patterns('mailqueue.views',
url(r'^clear$', 'clear_sent_messages', name='clear_sent_messages'),
url(r'^$', 'run_mail_job', name='run_mail_job'),
)
<commit_msg>Remove warning "deprecated" in url.py
version django=1.9.6
RemovedInDjango110Warning: django.conf.urls.patterns() is deprecated and will be removed in Django 1.10. Update your urlpatterns to be a list of django.conf.urls.url() instances instead.<commit_after>from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^clear$', views.clear_sent_messages, name='clear_sent_messages'),
url(r'^$', views.run_mail_job, name='run_mail_job'),
]
|
05fc49863d202b2e12f8ac822c40bab4618aeff1
|
moocng/peerreview/managers.py
|
moocng/peerreview/managers.py
|
# Copyright 2013 Rooter Analysis S.L.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from django.db import models
class PeerReviewAssignmentManager(models.Manager):
def from_course(self, course):
return self.get_query_set().filter(kq__unit__course=course)
|
# Copyright 2013 Rooter Analysis S.L.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from django.db import models
class PeerReviewAssignmentManager(models.Manager):
def from_course(self, course):
return self.get_query_set().filter(
kq__unit__course=course).order_by('kq__unit__order')
|
Fix the order of assignments
|
Fix the order of assignments
|
Python
|
apache-2.0
|
OpenMOOC/moocng,GeographicaGS/moocng,GeographicaGS/moocng,GeographicaGS/moocng,OpenMOOC/moocng,GeographicaGS/moocng
|
# Copyright 2013 Rooter Analysis S.L.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from django.db import models
class PeerReviewAssignmentManager(models.Manager):
def from_course(self, course):
return self.get_query_set().filter(kq__unit__course=course)
Fix the order of assignments
|
# Copyright 2013 Rooter Analysis S.L.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from django.db import models
class PeerReviewAssignmentManager(models.Manager):
def from_course(self, course):
return self.get_query_set().filter(
kq__unit__course=course).order_by('kq__unit__order')
|
<commit_before># Copyright 2013 Rooter Analysis S.L.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from django.db import models
class PeerReviewAssignmentManager(models.Manager):
def from_course(self, course):
return self.get_query_set().filter(kq__unit__course=course)
<commit_msg>Fix the order of assignments<commit_after>
|
# Copyright 2013 Rooter Analysis S.L.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from django.db import models
class PeerReviewAssignmentManager(models.Manager):
def from_course(self, course):
return self.get_query_set().filter(
kq__unit__course=course).order_by('kq__unit__order')
|
# Copyright 2013 Rooter Analysis S.L.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from django.db import models
class PeerReviewAssignmentManager(models.Manager):
def from_course(self, course):
return self.get_query_set().filter(kq__unit__course=course)
Fix the order of assignments# Copyright 2013 Rooter Analysis S.L.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from django.db import models
class PeerReviewAssignmentManager(models.Manager):
def from_course(self, course):
return self.get_query_set().filter(
kq__unit__course=course).order_by('kq__unit__order')
|
<commit_before># Copyright 2013 Rooter Analysis S.L.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from django.db import models
class PeerReviewAssignmentManager(models.Manager):
def from_course(self, course):
return self.get_query_set().filter(kq__unit__course=course)
<commit_msg>Fix the order of assignments<commit_after># Copyright 2013 Rooter Analysis S.L.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from django.db import models
class PeerReviewAssignmentManager(models.Manager):
def from_course(self, course):
return self.get_query_set().filter(
kq__unit__course=course).order_by('kq__unit__order')
|
68c768634503d359fac23869e20931f0b39897dc
|
fulfil_client/contrib/mocking.py
|
fulfil_client/contrib/mocking.py
|
# -*- coding: utf-8 -*-
try:
from unittest import mock
except ImportError:
import mock
class MockFulfil(object):
"""
A Mock object that helps mock away the Fulfil API
for testing.
"""
responses = []
models = {}
def __init__(self, target, responses=None):
self.target = target
self.reset_mocks()
if responses:
self.responses.extend(responses)
def __enter__(self):
self.start()
return self
def __exit__(self, type, value, traceback):
self.stop()
self.reset_mocks()
return type is None
def model(self, model_name):
return self.models.setdefault(
model_name, mock.MagicMock(name=model_name)
)
def start(self):
"""
Start the patch
"""
self._patcher = mock.patch(target=self.target)
MockClient = self._patcher.start()
instance = MockClient.return_value
instance.model.side_effect = mock.Mock(
side_effect=self.model
)
def stop(self):
"""
End the patch
"""
self._patcher.stop()
def reset_mocks(self):
"""
Reset all the mocks
"""
self.models = {}
|
# -*- coding: utf-8 -*-
try:
from unittest import mock
except ImportError:
import mock
class MockFulfil(object):
"""
A Mock object that helps mock away the Fulfil API
for testing.
"""
responses = []
models = {}
context = {}
subdomain = 'mock-test'
def __init__(self, target, responses=None):
self.target = target
self.reset_mocks()
if responses:
self.responses.extend(responses)
def __enter__(self):
self.start()
return self
def __exit__(self, type, value, traceback):
self.stop()
self.reset_mocks()
return type is None
def model(self, model_name):
return self.models.setdefault(
model_name, mock.MagicMock(name=model_name)
)
def start(self):
"""
Start the patch
"""
self._patcher = mock.patch(target=self.target)
MockClient = self._patcher.start()
instance = MockClient.return_value
instance.model.side_effect = mock.Mock(
side_effect=self.model
)
def stop(self):
"""
End the patch
"""
self._patcher.stop()
def reset_mocks(self):
"""
Reset all the mocks
"""
self.models = {}
self.context = {}
|
Add subdomain and context to mock
|
Add subdomain and context to mock
|
Python
|
isc
|
sharoonthomas/fulfil-python-api,fulfilio/fulfil-python-api
|
# -*- coding: utf-8 -*-
try:
from unittest import mock
except ImportError:
import mock
class MockFulfil(object):
"""
A Mock object that helps mock away the Fulfil API
for testing.
"""
responses = []
models = {}
def __init__(self, target, responses=None):
self.target = target
self.reset_mocks()
if responses:
self.responses.extend(responses)
def __enter__(self):
self.start()
return self
def __exit__(self, type, value, traceback):
self.stop()
self.reset_mocks()
return type is None
def model(self, model_name):
return self.models.setdefault(
model_name, mock.MagicMock(name=model_name)
)
def start(self):
"""
Start the patch
"""
self._patcher = mock.patch(target=self.target)
MockClient = self._patcher.start()
instance = MockClient.return_value
instance.model.side_effect = mock.Mock(
side_effect=self.model
)
def stop(self):
"""
End the patch
"""
self._patcher.stop()
def reset_mocks(self):
"""
Reset all the mocks
"""
self.models = {}
Add subdomain and context to mock
|
# -*- coding: utf-8 -*-
try:
from unittest import mock
except ImportError:
import mock
class MockFulfil(object):
"""
A Mock object that helps mock away the Fulfil API
for testing.
"""
responses = []
models = {}
context = {}
subdomain = 'mock-test'
def __init__(self, target, responses=None):
self.target = target
self.reset_mocks()
if responses:
self.responses.extend(responses)
def __enter__(self):
self.start()
return self
def __exit__(self, type, value, traceback):
self.stop()
self.reset_mocks()
return type is None
def model(self, model_name):
return self.models.setdefault(
model_name, mock.MagicMock(name=model_name)
)
def start(self):
"""
Start the patch
"""
self._patcher = mock.patch(target=self.target)
MockClient = self._patcher.start()
instance = MockClient.return_value
instance.model.side_effect = mock.Mock(
side_effect=self.model
)
def stop(self):
"""
End the patch
"""
self._patcher.stop()
def reset_mocks(self):
"""
Reset all the mocks
"""
self.models = {}
self.context = {}
|
<commit_before># -*- coding: utf-8 -*-
try:
from unittest import mock
except ImportError:
import mock
class MockFulfil(object):
"""
A Mock object that helps mock away the Fulfil API
for testing.
"""
responses = []
models = {}
def __init__(self, target, responses=None):
self.target = target
self.reset_mocks()
if responses:
self.responses.extend(responses)
def __enter__(self):
self.start()
return self
def __exit__(self, type, value, traceback):
self.stop()
self.reset_mocks()
return type is None
def model(self, model_name):
return self.models.setdefault(
model_name, mock.MagicMock(name=model_name)
)
def start(self):
"""
Start the patch
"""
self._patcher = mock.patch(target=self.target)
MockClient = self._patcher.start()
instance = MockClient.return_value
instance.model.side_effect = mock.Mock(
side_effect=self.model
)
def stop(self):
"""
End the patch
"""
self._patcher.stop()
def reset_mocks(self):
"""
Reset all the mocks
"""
self.models = {}
<commit_msg>Add subdomain and context to mock<commit_after>
|
# -*- coding: utf-8 -*-
try:
from unittest import mock
except ImportError:
import mock
class MockFulfil(object):
"""
A Mock object that helps mock away the Fulfil API
for testing.
"""
responses = []
models = {}
context = {}
subdomain = 'mock-test'
def __init__(self, target, responses=None):
self.target = target
self.reset_mocks()
if responses:
self.responses.extend(responses)
def __enter__(self):
self.start()
return self
def __exit__(self, type, value, traceback):
self.stop()
self.reset_mocks()
return type is None
def model(self, model_name):
return self.models.setdefault(
model_name, mock.MagicMock(name=model_name)
)
def start(self):
"""
Start the patch
"""
self._patcher = mock.patch(target=self.target)
MockClient = self._patcher.start()
instance = MockClient.return_value
instance.model.side_effect = mock.Mock(
side_effect=self.model
)
def stop(self):
"""
End the patch
"""
self._patcher.stop()
def reset_mocks(self):
"""
Reset all the mocks
"""
self.models = {}
self.context = {}
|
# -*- coding: utf-8 -*-
try:
from unittest import mock
except ImportError:
import mock
class MockFulfil(object):
"""
A Mock object that helps mock away the Fulfil API
for testing.
"""
responses = []
models = {}
def __init__(self, target, responses=None):
self.target = target
self.reset_mocks()
if responses:
self.responses.extend(responses)
def __enter__(self):
self.start()
return self
def __exit__(self, type, value, traceback):
self.stop()
self.reset_mocks()
return type is None
def model(self, model_name):
return self.models.setdefault(
model_name, mock.MagicMock(name=model_name)
)
def start(self):
"""
Start the patch
"""
self._patcher = mock.patch(target=self.target)
MockClient = self._patcher.start()
instance = MockClient.return_value
instance.model.side_effect = mock.Mock(
side_effect=self.model
)
def stop(self):
"""
End the patch
"""
self._patcher.stop()
def reset_mocks(self):
"""
Reset all the mocks
"""
self.models = {}
Add subdomain and context to mock# -*- coding: utf-8 -*-
try:
from unittest import mock
except ImportError:
import mock
class MockFulfil(object):
"""
A Mock object that helps mock away the Fulfil API
for testing.
"""
responses = []
models = {}
context = {}
subdomain = 'mock-test'
def __init__(self, target, responses=None):
self.target = target
self.reset_mocks()
if responses:
self.responses.extend(responses)
def __enter__(self):
self.start()
return self
def __exit__(self, type, value, traceback):
self.stop()
self.reset_mocks()
return type is None
def model(self, model_name):
return self.models.setdefault(
model_name, mock.MagicMock(name=model_name)
)
def start(self):
"""
Start the patch
"""
self._patcher = mock.patch(target=self.target)
MockClient = self._patcher.start()
instance = MockClient.return_value
instance.model.side_effect = mock.Mock(
side_effect=self.model
)
def stop(self):
"""
End the patch
"""
self._patcher.stop()
def reset_mocks(self):
"""
Reset all the mocks
"""
self.models = {}
self.context = {}
|
<commit_before># -*- coding: utf-8 -*-
try:
from unittest import mock
except ImportError:
import mock
class MockFulfil(object):
"""
A Mock object that helps mock away the Fulfil API
for testing.
"""
responses = []
models = {}
def __init__(self, target, responses=None):
self.target = target
self.reset_mocks()
if responses:
self.responses.extend(responses)
def __enter__(self):
self.start()
return self
def __exit__(self, type, value, traceback):
self.stop()
self.reset_mocks()
return type is None
def model(self, model_name):
return self.models.setdefault(
model_name, mock.MagicMock(name=model_name)
)
def start(self):
"""
Start the patch
"""
self._patcher = mock.patch(target=self.target)
MockClient = self._patcher.start()
instance = MockClient.return_value
instance.model.side_effect = mock.Mock(
side_effect=self.model
)
def stop(self):
"""
End the patch
"""
self._patcher.stop()
def reset_mocks(self):
"""
Reset all the mocks
"""
self.models = {}
<commit_msg>Add subdomain and context to mock<commit_after># -*- coding: utf-8 -*-
try:
from unittest import mock
except ImportError:
import mock
class MockFulfil(object):
"""
A Mock object that helps mock away the Fulfil API
for testing.
"""
responses = []
models = {}
context = {}
subdomain = 'mock-test'
def __init__(self, target, responses=None):
self.target = target
self.reset_mocks()
if responses:
self.responses.extend(responses)
def __enter__(self):
self.start()
return self
def __exit__(self, type, value, traceback):
self.stop()
self.reset_mocks()
return type is None
def model(self, model_name):
return self.models.setdefault(
model_name, mock.MagicMock(name=model_name)
)
def start(self):
"""
Start the patch
"""
self._patcher = mock.patch(target=self.target)
MockClient = self._patcher.start()
instance = MockClient.return_value
instance.model.side_effect = mock.Mock(
side_effect=self.model
)
def stop(self):
"""
End the patch
"""
self._patcher.stop()
def reset_mocks(self):
"""
Reset all the mocks
"""
self.models = {}
self.context = {}
|
3a0b5bd923eff1fb143aa73fc54f735e7b330068
|
examples/plot_sphere_function.py
|
examples/plot_sphere_function.py
|
#!/usr/bin/env python3
# coding: utf-8
"""
================================================
Optimization Benchmark: Plot the Sphere Function
================================================
This example show how to plot the *Sphere function*.
"""
###############################################################################
# Import required packages
import numpy as np
import matplotlib.pyplot as plt
from ailib.utils.plot import plot_2d_contour_solution_space, plot_2d_solution_space
from ailib.optimize.functions.unconstrained import sphere
###############################################################################
# Plot the sphere function
plot_2d_solution_space(sphere,
xmin=-2*np.ones(2),
xmax=2*np.ones(2),
xstar=np.zeros(2),
angle_view=(55, 83),
title="Sphere function",
output_file_name="sphere_3d.png")
plt.tight_layout()
plt.show()
###############################################################################
# Plot the contours
plot_2d_contour_solution_space(sphere,
xmin=-10*np.ones(2),
xmax=10*np.ones(2),
xstar=np.zeros(2),
title="Sphere function",
output_file_name="sphere.png")
plt.tight_layout()
plt.show()
|
#!/usr/bin/env python3
# coding: utf-8
"""
================================================
Optimization Benchmark: Plot the Sphere Function
================================================
This example show how to plot the *Sphere function*.
"""
###############################################################################
# Import required packages
import numpy as np
import matplotlib.pyplot as plt
from ailib.utils.plot import plot_2d_contour_solution_space, plot_2d_solution_space
from ailib.optimize.functions.unconstrained import sphere
###############################################################################
# Plot the sphere function
plot_2d_solution_space(sphere,
xmin=-2*np.ones(2),
xmax=2*np.ones(2),
xstar=np.zeros(2),
angle_view=(55, 83),
title="Sphere function")
plt.tight_layout()
plt.show()
###############################################################################
# Plot the contours
plot_2d_contour_solution_space(sphere,
xmin=-10*np.ones(2),
xmax=10*np.ones(2),
xstar=np.zeros(2),
title="Sphere function")
plt.tight_layout()
plt.show()
|
Switch off the output file generation.
|
Switch off the output file generation.
|
Python
|
mit
|
jeremiedecock/pyai,jeremiedecock/pyai
|
#!/usr/bin/env python3
# coding: utf-8
"""
================================================
Optimization Benchmark: Plot the Sphere Function
================================================
This example show how to plot the *Sphere function*.
"""
###############################################################################
# Import required packages
import numpy as np
import matplotlib.pyplot as plt
from ailib.utils.plot import plot_2d_contour_solution_space, plot_2d_solution_space
from ailib.optimize.functions.unconstrained import sphere
###############################################################################
# Plot the sphere function
plot_2d_solution_space(sphere,
xmin=-2*np.ones(2),
xmax=2*np.ones(2),
xstar=np.zeros(2),
angle_view=(55, 83),
title="Sphere function",
output_file_name="sphere_3d.png")
plt.tight_layout()
plt.show()
###############################################################################
# Plot the contours
plot_2d_contour_solution_space(sphere,
xmin=-10*np.ones(2),
xmax=10*np.ones(2),
xstar=np.zeros(2),
title="Sphere function",
output_file_name="sphere.png")
plt.tight_layout()
plt.show()
Switch off the output file generation.
|
#!/usr/bin/env python3
# coding: utf-8
"""
================================================
Optimization Benchmark: Plot the Sphere Function
================================================
This example show how to plot the *Sphere function*.
"""
###############################################################################
# Import required packages
import numpy as np
import matplotlib.pyplot as plt
from ailib.utils.plot import plot_2d_contour_solution_space, plot_2d_solution_space
from ailib.optimize.functions.unconstrained import sphere
###############################################################################
# Plot the sphere function
plot_2d_solution_space(sphere,
xmin=-2*np.ones(2),
xmax=2*np.ones(2),
xstar=np.zeros(2),
angle_view=(55, 83),
title="Sphere function")
plt.tight_layout()
plt.show()
###############################################################################
# Plot the contours
plot_2d_contour_solution_space(sphere,
xmin=-10*np.ones(2),
xmax=10*np.ones(2),
xstar=np.zeros(2),
title="Sphere function")
plt.tight_layout()
plt.show()
|
<commit_before>#!/usr/bin/env python3
# coding: utf-8
"""
================================================
Optimization Benchmark: Plot the Sphere Function
================================================
This example show how to plot the *Sphere function*.
"""
###############################################################################
# Import required packages
import numpy as np
import matplotlib.pyplot as plt
from ailib.utils.plot import plot_2d_contour_solution_space, plot_2d_solution_space
from ailib.optimize.functions.unconstrained import sphere
###############################################################################
# Plot the sphere function
plot_2d_solution_space(sphere,
xmin=-2*np.ones(2),
xmax=2*np.ones(2),
xstar=np.zeros(2),
angle_view=(55, 83),
title="Sphere function",
output_file_name="sphere_3d.png")
plt.tight_layout()
plt.show()
###############################################################################
# Plot the contours
plot_2d_contour_solution_space(sphere,
xmin=-10*np.ones(2),
xmax=10*np.ones(2),
xstar=np.zeros(2),
title="Sphere function",
output_file_name="sphere.png")
plt.tight_layout()
plt.show()
<commit_msg>Switch off the output file generation.<commit_after>
|
#!/usr/bin/env python3
# coding: utf-8
"""
================================================
Optimization Benchmark: Plot the Sphere Function
================================================
This example show how to plot the *Sphere function*.
"""
###############################################################################
# Import required packages
import numpy as np
import matplotlib.pyplot as plt
from ailib.utils.plot import plot_2d_contour_solution_space, plot_2d_solution_space
from ailib.optimize.functions.unconstrained import sphere
###############################################################################
# Plot the sphere function
plot_2d_solution_space(sphere,
xmin=-2*np.ones(2),
xmax=2*np.ones(2),
xstar=np.zeros(2),
angle_view=(55, 83),
title="Sphere function")
plt.tight_layout()
plt.show()
###############################################################################
# Plot the contours
plot_2d_contour_solution_space(sphere,
xmin=-10*np.ones(2),
xmax=10*np.ones(2),
xstar=np.zeros(2),
title="Sphere function")
plt.tight_layout()
plt.show()
|
#!/usr/bin/env python3
# coding: utf-8
"""
================================================
Optimization Benchmark: Plot the Sphere Function
================================================
This example show how to plot the *Sphere function*.
"""
###############################################################################
# Import required packages
import numpy as np
import matplotlib.pyplot as plt
from ailib.utils.plot import plot_2d_contour_solution_space, plot_2d_solution_space
from ailib.optimize.functions.unconstrained import sphere
###############################################################################
# Plot the sphere function
plot_2d_solution_space(sphere,
xmin=-2*np.ones(2),
xmax=2*np.ones(2),
xstar=np.zeros(2),
angle_view=(55, 83),
title="Sphere function",
output_file_name="sphere_3d.png")
plt.tight_layout()
plt.show()
###############################################################################
# Plot the contours
plot_2d_contour_solution_space(sphere,
xmin=-10*np.ones(2),
xmax=10*np.ones(2),
xstar=np.zeros(2),
title="Sphere function",
output_file_name="sphere.png")
plt.tight_layout()
plt.show()
Switch off the output file generation.#!/usr/bin/env python3
# coding: utf-8
"""
================================================
Optimization Benchmark: Plot the Sphere Function
================================================
This example show how to plot the *Sphere function*.
"""
###############################################################################
# Import required packages
import numpy as np
import matplotlib.pyplot as plt
from ailib.utils.plot import plot_2d_contour_solution_space, plot_2d_solution_space
from ailib.optimize.functions.unconstrained import sphere
###############################################################################
# Plot the sphere function
plot_2d_solution_space(sphere,
xmin=-2*np.ones(2),
xmax=2*np.ones(2),
xstar=np.zeros(2),
angle_view=(55, 83),
title="Sphere function")
plt.tight_layout()
plt.show()
###############################################################################
# Plot the contours
plot_2d_contour_solution_space(sphere,
xmin=-10*np.ones(2),
xmax=10*np.ones(2),
xstar=np.zeros(2),
title="Sphere function")
plt.tight_layout()
plt.show()
|
<commit_before>#!/usr/bin/env python3
# coding: utf-8
"""
================================================
Optimization Benchmark: Plot the Sphere Function
================================================
This example show how to plot the *Sphere function*.
"""
###############################################################################
# Import required packages
import numpy as np
import matplotlib.pyplot as plt
from ailib.utils.plot import plot_2d_contour_solution_space, plot_2d_solution_space
from ailib.optimize.functions.unconstrained import sphere
###############################################################################
# Plot the sphere function
plot_2d_solution_space(sphere,
xmin=-2*np.ones(2),
xmax=2*np.ones(2),
xstar=np.zeros(2),
angle_view=(55, 83),
title="Sphere function",
output_file_name="sphere_3d.png")
plt.tight_layout()
plt.show()
###############################################################################
# Plot the contours
plot_2d_contour_solution_space(sphere,
xmin=-10*np.ones(2),
xmax=10*np.ones(2),
xstar=np.zeros(2),
title="Sphere function",
output_file_name="sphere.png")
plt.tight_layout()
plt.show()
<commit_msg>Switch off the output file generation.<commit_after>#!/usr/bin/env python3
# coding: utf-8
"""
================================================
Optimization Benchmark: Plot the Sphere Function
================================================
This example show how to plot the *Sphere function*.
"""
###############################################################################
# Import required packages
import numpy as np
import matplotlib.pyplot as plt
from ailib.utils.plot import plot_2d_contour_solution_space, plot_2d_solution_space
from ailib.optimize.functions.unconstrained import sphere
###############################################################################
# Plot the sphere function
plot_2d_solution_space(sphere,
xmin=-2*np.ones(2),
xmax=2*np.ones(2),
xstar=np.zeros(2),
angle_view=(55, 83),
title="Sphere function")
plt.tight_layout()
plt.show()
###############################################################################
# Plot the contours
plot_2d_contour_solution_space(sphere,
xmin=-10*np.ones(2),
xmax=10*np.ones(2),
xstar=np.zeros(2),
title="Sphere function")
plt.tight_layout()
plt.show()
|
04fcda42222fff1daad780db53190bcfb721d034
|
polling_stations/apps/data_collection/management/commands/import_mid_sussex.py
|
polling_stations/apps/data_collection/management/commands/import_mid_sussex.py
|
"""
Imports Mid Sussex
"""
import sys
from django.contrib.gis.geos import Point, GEOSGeometry
from data_collection.management.commands import BaseKamlImporter
class Command(BaseKamlImporter):
"""
Imports the Polling Station data from Mid Sussex
"""
council_id = 'E07000228'
districts_name = 'msdc_3830_pollingdistricts_polygon.kmz'
stations_name = 'R3900_pollingstations.csv'
def station_record_to_dict(self, record):
location = Point(float(record.xcoord), float(record.ycoord), srid=self.srid)
address = "\n".join([record.venue, record.street, record.town])
return {
'internal_council_id': record.statnum,
'postcode': record.postcode,
'address': address,
'location': location
}
|
"""
Imports Mid Sussex
"""
import sys
from lxml import etree
from django.contrib.gis.geos import Point, GEOSGeometry
from data_collection.management.commands import BaseKamlImporter
class Command(BaseKamlImporter):
"""
Imports the Polling Station data from Mid Sussex
"""
council_id = 'E07000228'
districts_name = 'msdc_3830_pollingdistricts_polygon.kmz'
stations_name = 'R3900_pollingstations.csv'
def extract_msercode_from_description(self, description):
html = etree.HTML(str(description).replace('&', '&'))
rows = html.xpath("//td")
return rows[7].text
def district_record_to_dict(self, record):
msercode = self.extract_msercode_from_description(record['description'])
geojson = self.strip_z_values(record.geom.geojson)
poly = self.clean_poly(GEOSGeometry(geojson, srid=self.get_srid('districts')))
return {
'internal_council_id': msercode,
'name' : record['Name'].value,
'area' : poly
}
def station_record_to_dict(self, record):
location = Point(float(record.xcoord), float(record.ycoord), srid=self.srid)
address = "\n".join([record.venue, record.street, record.town])
return {
'internal_council_id': record.msercode,
'postcode': record.postcode,
'address': address,
'location': location,
'polling_district_id': record.msercode
}
|
Fix Mid Sussex Import script
|
Fix Mid Sussex Import script
Set polling_district_id
Use mserid as internal_council_id to avoid importing duplicate points
|
Python
|
bsd-3-clause
|
chris48s/UK-Polling-Stations,andylolz/UK-Polling-Stations,DemocracyClub/UK-Polling-Stations,DemocracyClub/UK-Polling-Stations,chris48s/UK-Polling-Stations,andylolz/UK-Polling-Stations,chris48s/UK-Polling-Stations,DemocracyClub/UK-Polling-Stations,andylolz/UK-Polling-Stations
|
"""
Imports Mid Sussex
"""
import sys
from django.contrib.gis.geos import Point, GEOSGeometry
from data_collection.management.commands import BaseKamlImporter
class Command(BaseKamlImporter):
"""
Imports the Polling Station data from Mid Sussex
"""
council_id = 'E07000228'
districts_name = 'msdc_3830_pollingdistricts_polygon.kmz'
stations_name = 'R3900_pollingstations.csv'
def station_record_to_dict(self, record):
location = Point(float(record.xcoord), float(record.ycoord), srid=self.srid)
address = "\n".join([record.venue, record.street, record.town])
return {
'internal_council_id': record.statnum,
'postcode': record.postcode,
'address': address,
'location': location
}
Fix Mid Sussex Import script
Set polling_district_id
Use mserid as internal_council_id to avoid importing duplicate points
|
"""
Imports Mid Sussex
"""
import sys
from lxml import etree
from django.contrib.gis.geos import Point, GEOSGeometry
from data_collection.management.commands import BaseKamlImporter
class Command(BaseKamlImporter):
"""
Imports the Polling Station data from Mid Sussex
"""
council_id = 'E07000228'
districts_name = 'msdc_3830_pollingdistricts_polygon.kmz'
stations_name = 'R3900_pollingstations.csv'
def extract_msercode_from_description(self, description):
html = etree.HTML(str(description).replace('&', '&'))
rows = html.xpath("//td")
return rows[7].text
def district_record_to_dict(self, record):
msercode = self.extract_msercode_from_description(record['description'])
geojson = self.strip_z_values(record.geom.geojson)
poly = self.clean_poly(GEOSGeometry(geojson, srid=self.get_srid('districts')))
return {
'internal_council_id': msercode,
'name' : record['Name'].value,
'area' : poly
}
def station_record_to_dict(self, record):
location = Point(float(record.xcoord), float(record.ycoord), srid=self.srid)
address = "\n".join([record.venue, record.street, record.town])
return {
'internal_council_id': record.msercode,
'postcode': record.postcode,
'address': address,
'location': location,
'polling_district_id': record.msercode
}
|
<commit_before>"""
Imports Mid Sussex
"""
import sys
from django.contrib.gis.geos import Point, GEOSGeometry
from data_collection.management.commands import BaseKamlImporter
class Command(BaseKamlImporter):
"""
Imports the Polling Station data from Mid Sussex
"""
council_id = 'E07000228'
districts_name = 'msdc_3830_pollingdistricts_polygon.kmz'
stations_name = 'R3900_pollingstations.csv'
def station_record_to_dict(self, record):
location = Point(float(record.xcoord), float(record.ycoord), srid=self.srid)
address = "\n".join([record.venue, record.street, record.town])
return {
'internal_council_id': record.statnum,
'postcode': record.postcode,
'address': address,
'location': location
}
<commit_msg>Fix Mid Sussex Import script
Set polling_district_id
Use mserid as internal_council_id to avoid importing duplicate points<commit_after>
|
"""
Imports Mid Sussex
"""
import sys
from lxml import etree
from django.contrib.gis.geos import Point, GEOSGeometry
from data_collection.management.commands import BaseKamlImporter
class Command(BaseKamlImporter):
"""
Imports the Polling Station data from Mid Sussex
"""
council_id = 'E07000228'
districts_name = 'msdc_3830_pollingdistricts_polygon.kmz'
stations_name = 'R3900_pollingstations.csv'
def extract_msercode_from_description(self, description):
html = etree.HTML(str(description).replace('&', '&'))
rows = html.xpath("//td")
return rows[7].text
def district_record_to_dict(self, record):
msercode = self.extract_msercode_from_description(record['description'])
geojson = self.strip_z_values(record.geom.geojson)
poly = self.clean_poly(GEOSGeometry(geojson, srid=self.get_srid('districts')))
return {
'internal_council_id': msercode,
'name' : record['Name'].value,
'area' : poly
}
def station_record_to_dict(self, record):
location = Point(float(record.xcoord), float(record.ycoord), srid=self.srid)
address = "\n".join([record.venue, record.street, record.town])
return {
'internal_council_id': record.msercode,
'postcode': record.postcode,
'address': address,
'location': location,
'polling_district_id': record.msercode
}
|
"""
Imports Mid Sussex
"""
import sys
from django.contrib.gis.geos import Point, GEOSGeometry
from data_collection.management.commands import BaseKamlImporter
class Command(BaseKamlImporter):
"""
Imports the Polling Station data from Mid Sussex
"""
council_id = 'E07000228'
districts_name = 'msdc_3830_pollingdistricts_polygon.kmz'
stations_name = 'R3900_pollingstations.csv'
def station_record_to_dict(self, record):
location = Point(float(record.xcoord), float(record.ycoord), srid=self.srid)
address = "\n".join([record.venue, record.street, record.town])
return {
'internal_council_id': record.statnum,
'postcode': record.postcode,
'address': address,
'location': location
}
Fix Mid Sussex Import script
Set polling_district_id
Use mserid as internal_council_id to avoid importing duplicate points"""
Imports Mid Sussex
"""
import sys
from lxml import etree
from django.contrib.gis.geos import Point, GEOSGeometry
from data_collection.management.commands import BaseKamlImporter
class Command(BaseKamlImporter):
"""
Imports the Polling Station data from Mid Sussex
"""
council_id = 'E07000228'
districts_name = 'msdc_3830_pollingdistricts_polygon.kmz'
stations_name = 'R3900_pollingstations.csv'
def extract_msercode_from_description(self, description):
html = etree.HTML(str(description).replace('&', '&'))
rows = html.xpath("//td")
return rows[7].text
def district_record_to_dict(self, record):
msercode = self.extract_msercode_from_description(record['description'])
geojson = self.strip_z_values(record.geom.geojson)
poly = self.clean_poly(GEOSGeometry(geojson, srid=self.get_srid('districts')))
return {
'internal_council_id': msercode,
'name' : record['Name'].value,
'area' : poly
}
def station_record_to_dict(self, record):
location = Point(float(record.xcoord), float(record.ycoord), srid=self.srid)
address = "\n".join([record.venue, record.street, record.town])
return {
'internal_council_id': record.msercode,
'postcode': record.postcode,
'address': address,
'location': location,
'polling_district_id': record.msercode
}
|
<commit_before>"""
Imports Mid Sussex
"""
import sys
from django.contrib.gis.geos import Point, GEOSGeometry
from data_collection.management.commands import BaseKamlImporter
class Command(BaseKamlImporter):
"""
Imports the Polling Station data from Mid Sussex
"""
council_id = 'E07000228'
districts_name = 'msdc_3830_pollingdistricts_polygon.kmz'
stations_name = 'R3900_pollingstations.csv'
def station_record_to_dict(self, record):
location = Point(float(record.xcoord), float(record.ycoord), srid=self.srid)
address = "\n".join([record.venue, record.street, record.town])
return {
'internal_council_id': record.statnum,
'postcode': record.postcode,
'address': address,
'location': location
}
<commit_msg>Fix Mid Sussex Import script
Set polling_district_id
Use mserid as internal_council_id to avoid importing duplicate points<commit_after>"""
Imports Mid Sussex
"""
import sys
from lxml import etree
from django.contrib.gis.geos import Point, GEOSGeometry
from data_collection.management.commands import BaseKamlImporter
class Command(BaseKamlImporter):
"""
Imports the Polling Station data from Mid Sussex
"""
council_id = 'E07000228'
districts_name = 'msdc_3830_pollingdistricts_polygon.kmz'
stations_name = 'R3900_pollingstations.csv'
def extract_msercode_from_description(self, description):
html = etree.HTML(str(description).replace('&', '&'))
rows = html.xpath("//td")
return rows[7].text
def district_record_to_dict(self, record):
msercode = self.extract_msercode_from_description(record['description'])
geojson = self.strip_z_values(record.geom.geojson)
poly = self.clean_poly(GEOSGeometry(geojson, srid=self.get_srid('districts')))
return {
'internal_council_id': msercode,
'name' : record['Name'].value,
'area' : poly
}
def station_record_to_dict(self, record):
location = Point(float(record.xcoord), float(record.ycoord), srid=self.srid)
address = "\n".join([record.venue, record.street, record.town])
return {
'internal_council_id': record.msercode,
'postcode': record.postcode,
'address': address,
'location': location,
'polling_district_id': record.msercode
}
|
1c3a9f3a19fe2f4296775ae508b92e4208a9687a
|
content/test/gpu/gpu_tests/pixel_expectations.py
|
content/test/gpu/gpu_tests/pixel_expectations.py
|
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry.page import test_expectations
# Valid expectation conditions are:
#
# Operating systems:
# win, xp, vista, win7, mac, leopard, snowleopard, lion, mountainlion,
# linux, chromeos, android
#
# GPU vendors:
# amd, arm, broadcom, hisilicon, intel, imagination, nvidia, qualcomm,
# vivante
#
# Specific GPUs can be listed as a tuple with vendor name and device ID.
# Examples: ('nvidia', 0x1234), ('arm', 'Mali-T604')
# Device IDs must be paired with a GPU vendor.
class PixelExpectations(test_expectations.TestExpectations):
def SetExpectations(self):
# Sample Usage:
# self.Fail('Pixel.Canvas2DRedBox',
# ['mac', 'amd', ('nvidia', 0x1234)], bug=123)
self.Fail('Pixel.CSS3DBlueBox', bug=416450)
pass
|
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry.page import test_expectations
# Valid expectation conditions are:
#
# Operating systems:
# win, xp, vista, win7, mac, leopard, snowleopard, lion, mountainlion,
# linux, chromeos, android
#
# GPU vendors:
# amd, arm, broadcom, hisilicon, intel, imagination, nvidia, qualcomm,
# vivante
#
# Specific GPUs can be listed as a tuple with vendor name and device ID.
# Examples: ('nvidia', 0x1234), ('arm', 'Mali-T604')
# Device IDs must be paired with a GPU vendor.
class PixelExpectations(test_expectations.TestExpectations):
def SetExpectations(self):
# Sample Usage:
# self.Fail('Pixel.Canvas2DRedBox',
# ['mac', 'amd', ('nvidia', 0x1234)], bug=123)
pass
|
Remove failing expectations for the pixel tests.
|
Remove failing expectations for the pixel tests.
The expectations were only in temporarily to allow a rebaseline.
BUG=416450
Review URL: https://codereview.chromium.org/591213002
Cr-Commit-Position: 972c6d2dc6dd5efdad1377c0d224e03eb8f276f7@{#296061}
|
Python
|
bsd-3-clause
|
M4sse/chromium.src,M4sse/chromium.src,jaruba/chromium.src,dednal/chromium.src,Pluto-tv/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,jaruba/chromium.src,Fireblend/chromium-crosswalk,markYoungH/chromium.src,krieger-od/nwjs_chromium.src,chuan9/chromium-crosswalk,fujunwei/chromium-crosswalk,hgl888/chromium-crosswalk,ltilve/chromium,ltilve/chromium,mohamed--abdel-maksoud/chromium.src,krieger-od/nwjs_chromium.src,M4sse/chromium.src,fujunwei/chromium-crosswalk,jaruba/chromium.src,Chilledheart/chromium,dushu1203/chromium.src,chuan9/chromium-crosswalk,dushu1203/chromium.src,dushu1203/chromium.src,chuan9/chromium-crosswalk,ltilve/chromium,Jonekee/chromium.src,fujunwei/chromium-crosswalk,M4sse/chromium.src,Jonekee/chromium.src,fujunwei/chromium-crosswalk,dednal/chromium.src,Jonekee/chromium.src,mohamed--abdel-maksoud/chromium.src,axinging/chromium-crosswalk,Chilledheart/chromium,M4sse/chromium.src,chuan9/chromium-crosswalk,dushu1203/chromium.src,jaruba/chromium.src,Pluto-tv/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,hgl888/chromium-crosswalk-efl,PeterWangIntel/chromium-crosswalk,Chilledheart/chromium,mohamed--abdel-maksoud/chromium.src,ltilve/chromium,axinging/chromium-crosswalk,Just-D/chromium-1,axinging/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,Fireblend/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,Chilledheart/chromium,M4sse/chromium.src,TheTypoMaster/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,krieger-od/nwjs_chromium.src,hgl888/chromium-crosswalk-efl,Jonekee/chromium.src,markYoungH/chromium.src,Jonekee/chromium.src,dushu1203/chromium.src,dushu1203/chromium.src,mohamed--abdel-maksoud/chromium.src,Just-D/chromium-1,PeterWangIntel/chromium-crosswalk,jaruba/chromium.src,Just-D/chromium-1,dushu1203/chromium.src,hgl888/chromium-crosswalk,krieger-od/nwjs_chromium.src,crosswalk-project/chromium-crosswalk-efl,dushu1203/chromium.src,Fireblend/chromium-crosswalk,Fireblend/chromium-crosswalk,Jonekee/chromium.src,ltilve/chromium,chuan9/chromium-crosswalk,hgl888/chromium-crosswalk,axinging/chromium-crosswalk,Chilledheart/chromium,hgl888/chromium-crosswalk-efl,Pluto-tv/chromium-crosswalk,dednal/chromium.src,dednal/chromium.src,markYoungH/chromium.src,crosswalk-project/chromium-crosswalk-efl,fujunwei/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,krieger-od/nwjs_chromium.src,hgl888/chromium-crosswalk-efl,Fireblend/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,Just-D/chromium-1,dushu1203/chromium.src,Pluto-tv/chromium-crosswalk,hgl888/chromium-crosswalk,M4sse/chromium.src,ltilve/chromium,axinging/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,markYoungH/chromium.src,Pluto-tv/chromium-crosswalk,M4sse/chromium.src,markYoungH/chromium.src,markYoungH/chromium.src,mohamed--abdel-maksoud/chromium.src,TheTypoMaster/chromium-crosswalk,krieger-od/nwjs_chromium.src,jaruba/chromium.src,markYoungH/chromium.src,Just-D/chromium-1,Jonekee/chromium.src,fujunwei/chromium-crosswalk,dednal/chromium.src,PeterWangIntel/chromium-crosswalk,krieger-od/nwjs_chromium.src,dednal/chromium.src,hgl888/chromium-crosswalk-efl,krieger-od/nwjs_chromium.src,markYoungH/chromium.src,hgl888/chromium-crosswalk-efl,M4sse/chromium.src,hgl888/chromium-crosswalk-efl,TheTypoMaster/chromium-crosswalk,hgl888/chromium-crosswalk,Pluto-tv/chromium-crosswalk,dushu1203/chromium.src,Chilledheart/chromium,dednal/chromium.src,fujunwei/chromium-crosswalk,dednal/chromium.src,Chilledheart/chromium,axinging/chromium-crosswalk,fujunwei/chromium-crosswalk,Fireblend/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,Jonekee/chromium.src,TheTypoMaster/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,fujunwei/chromium-crosswalk,krieger-od/nwjs_chromium.src,M4sse/chromium.src,hgl888/chromium-crosswalk-efl,Fireblend/chromium-crosswalk,markYoungH/chromium.src,hgl888/chromium-crosswalk-efl,PeterWangIntel/chromium-crosswalk,axinging/chromium-crosswalk,axinging/chromium-crosswalk,markYoungH/chromium.src,dednal/chromium.src,TheTypoMaster/chromium-crosswalk,hgl888/chromium-crosswalk-efl,dushu1203/chromium.src,chuan9/chromium-crosswalk,ltilve/chromium,chuan9/chromium-crosswalk,axinging/chromium-crosswalk,chuan9/chromium-crosswalk,Just-D/chromium-1,markYoungH/chromium.src,chuan9/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,Just-D/chromium-1,Pluto-tv/chromium-crosswalk,Fireblend/chromium-crosswalk,Jonekee/chromium.src,krieger-od/nwjs_chromium.src,mohamed--abdel-maksoud/chromium.src,krieger-od/nwjs_chromium.src,Chilledheart/chromium,ltilve/chromium,jaruba/chromium.src,axinging/chromium-crosswalk,dednal/chromium.src,jaruba/chromium.src,TheTypoMaster/chromium-crosswalk,Pluto-tv/chromium-crosswalk,hgl888/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,Just-D/chromium-1,hgl888/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,ltilve/chromium,crosswalk-project/chromium-crosswalk-efl,crosswalk-project/chromium-crosswalk-efl,TheTypoMaster/chromium-crosswalk,jaruba/chromium.src,Just-D/chromium-1,Pluto-tv/chromium-crosswalk,axinging/chromium-crosswalk,hgl888/chromium-crosswalk,M4sse/chromium.src,dednal/chromium.src,mohamed--abdel-maksoud/chromium.src,Chilledheart/chromium,hgl888/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,Jonekee/chromium.src,Fireblend/chromium-crosswalk,Jonekee/chromium.src,jaruba/chromium.src,jaruba/chromium.src
|
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry.page import test_expectations
# Valid expectation conditions are:
#
# Operating systems:
# win, xp, vista, win7, mac, leopard, snowleopard, lion, mountainlion,
# linux, chromeos, android
#
# GPU vendors:
# amd, arm, broadcom, hisilicon, intel, imagination, nvidia, qualcomm,
# vivante
#
# Specific GPUs can be listed as a tuple with vendor name and device ID.
# Examples: ('nvidia', 0x1234), ('arm', 'Mali-T604')
# Device IDs must be paired with a GPU vendor.
class PixelExpectations(test_expectations.TestExpectations):
def SetExpectations(self):
# Sample Usage:
# self.Fail('Pixel.Canvas2DRedBox',
# ['mac', 'amd', ('nvidia', 0x1234)], bug=123)
self.Fail('Pixel.CSS3DBlueBox', bug=416450)
pass
Remove failing expectations for the pixel tests.
The expectations were only in temporarily to allow a rebaseline.
BUG=416450
Review URL: https://codereview.chromium.org/591213002
Cr-Commit-Position: 972c6d2dc6dd5efdad1377c0d224e03eb8f276f7@{#296061}
|
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry.page import test_expectations
# Valid expectation conditions are:
#
# Operating systems:
# win, xp, vista, win7, mac, leopard, snowleopard, lion, mountainlion,
# linux, chromeos, android
#
# GPU vendors:
# amd, arm, broadcom, hisilicon, intel, imagination, nvidia, qualcomm,
# vivante
#
# Specific GPUs can be listed as a tuple with vendor name and device ID.
# Examples: ('nvidia', 0x1234), ('arm', 'Mali-T604')
# Device IDs must be paired with a GPU vendor.
class PixelExpectations(test_expectations.TestExpectations):
def SetExpectations(self):
# Sample Usage:
# self.Fail('Pixel.Canvas2DRedBox',
# ['mac', 'amd', ('nvidia', 0x1234)], bug=123)
pass
|
<commit_before># Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry.page import test_expectations
# Valid expectation conditions are:
#
# Operating systems:
# win, xp, vista, win7, mac, leopard, snowleopard, lion, mountainlion,
# linux, chromeos, android
#
# GPU vendors:
# amd, arm, broadcom, hisilicon, intel, imagination, nvidia, qualcomm,
# vivante
#
# Specific GPUs can be listed as a tuple with vendor name and device ID.
# Examples: ('nvidia', 0x1234), ('arm', 'Mali-T604')
# Device IDs must be paired with a GPU vendor.
class PixelExpectations(test_expectations.TestExpectations):
def SetExpectations(self):
# Sample Usage:
# self.Fail('Pixel.Canvas2DRedBox',
# ['mac', 'amd', ('nvidia', 0x1234)], bug=123)
self.Fail('Pixel.CSS3DBlueBox', bug=416450)
pass
<commit_msg>Remove failing expectations for the pixel tests.
The expectations were only in temporarily to allow a rebaseline.
BUG=416450
Review URL: https://codereview.chromium.org/591213002
Cr-Commit-Position: 972c6d2dc6dd5efdad1377c0d224e03eb8f276f7@{#296061}<commit_after>
|
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry.page import test_expectations
# Valid expectation conditions are:
#
# Operating systems:
# win, xp, vista, win7, mac, leopard, snowleopard, lion, mountainlion,
# linux, chromeos, android
#
# GPU vendors:
# amd, arm, broadcom, hisilicon, intel, imagination, nvidia, qualcomm,
# vivante
#
# Specific GPUs can be listed as a tuple with vendor name and device ID.
# Examples: ('nvidia', 0x1234), ('arm', 'Mali-T604')
# Device IDs must be paired with a GPU vendor.
class PixelExpectations(test_expectations.TestExpectations):
def SetExpectations(self):
# Sample Usage:
# self.Fail('Pixel.Canvas2DRedBox',
# ['mac', 'amd', ('nvidia', 0x1234)], bug=123)
pass
|
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry.page import test_expectations
# Valid expectation conditions are:
#
# Operating systems:
# win, xp, vista, win7, mac, leopard, snowleopard, lion, mountainlion,
# linux, chromeos, android
#
# GPU vendors:
# amd, arm, broadcom, hisilicon, intel, imagination, nvidia, qualcomm,
# vivante
#
# Specific GPUs can be listed as a tuple with vendor name and device ID.
# Examples: ('nvidia', 0x1234), ('arm', 'Mali-T604')
# Device IDs must be paired with a GPU vendor.
class PixelExpectations(test_expectations.TestExpectations):
def SetExpectations(self):
# Sample Usage:
# self.Fail('Pixel.Canvas2DRedBox',
# ['mac', 'amd', ('nvidia', 0x1234)], bug=123)
self.Fail('Pixel.CSS3DBlueBox', bug=416450)
pass
Remove failing expectations for the pixel tests.
The expectations were only in temporarily to allow a rebaseline.
BUG=416450
Review URL: https://codereview.chromium.org/591213002
Cr-Commit-Position: 972c6d2dc6dd5efdad1377c0d224e03eb8f276f7@{#296061}# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry.page import test_expectations
# Valid expectation conditions are:
#
# Operating systems:
# win, xp, vista, win7, mac, leopard, snowleopard, lion, mountainlion,
# linux, chromeos, android
#
# GPU vendors:
# amd, arm, broadcom, hisilicon, intel, imagination, nvidia, qualcomm,
# vivante
#
# Specific GPUs can be listed as a tuple with vendor name and device ID.
# Examples: ('nvidia', 0x1234), ('arm', 'Mali-T604')
# Device IDs must be paired with a GPU vendor.
class PixelExpectations(test_expectations.TestExpectations):
def SetExpectations(self):
# Sample Usage:
# self.Fail('Pixel.Canvas2DRedBox',
# ['mac', 'amd', ('nvidia', 0x1234)], bug=123)
pass
|
<commit_before># Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry.page import test_expectations
# Valid expectation conditions are:
#
# Operating systems:
# win, xp, vista, win7, mac, leopard, snowleopard, lion, mountainlion,
# linux, chromeos, android
#
# GPU vendors:
# amd, arm, broadcom, hisilicon, intel, imagination, nvidia, qualcomm,
# vivante
#
# Specific GPUs can be listed as a tuple with vendor name and device ID.
# Examples: ('nvidia', 0x1234), ('arm', 'Mali-T604')
# Device IDs must be paired with a GPU vendor.
class PixelExpectations(test_expectations.TestExpectations):
def SetExpectations(self):
# Sample Usage:
# self.Fail('Pixel.Canvas2DRedBox',
# ['mac', 'amd', ('nvidia', 0x1234)], bug=123)
self.Fail('Pixel.CSS3DBlueBox', bug=416450)
pass
<commit_msg>Remove failing expectations for the pixel tests.
The expectations were only in temporarily to allow a rebaseline.
BUG=416450
Review URL: https://codereview.chromium.org/591213002
Cr-Commit-Position: 972c6d2dc6dd5efdad1377c0d224e03eb8f276f7@{#296061}<commit_after># Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry.page import test_expectations
# Valid expectation conditions are:
#
# Operating systems:
# win, xp, vista, win7, mac, leopard, snowleopard, lion, mountainlion,
# linux, chromeos, android
#
# GPU vendors:
# amd, arm, broadcom, hisilicon, intel, imagination, nvidia, qualcomm,
# vivante
#
# Specific GPUs can be listed as a tuple with vendor name and device ID.
# Examples: ('nvidia', 0x1234), ('arm', 'Mali-T604')
# Device IDs must be paired with a GPU vendor.
class PixelExpectations(test_expectations.TestExpectations):
def SetExpectations(self):
# Sample Usage:
# self.Fail('Pixel.Canvas2DRedBox',
# ['mac', 'amd', ('nvidia', 0x1234)], bug=123)
pass
|
8225a74078901bf90ae60ace9f5902bbfc1b5526
|
cla_public/config/docker.py
|
cla_public/config/docker.py
|
from cla_public.config.common import *
DEBUG = os.environ.get('SET_DEBUG', False) == 'True'
SECRET_KEY = os.environ['SECRET_KEY']
SESSION_COOKIE_SECURE = os.environ.get('CLA_ENV', '') in ['prod', 'staging']
HOST_NAME = os.environ.get('HOST_NAME') or os.environ.get('HOSTNAME')
BACKEND_BASE_URI = os.environ['BACKEND_BASE_URI']
LAALAA_API_HOST = os.environ.get(
'LAALAA_API_HOST', 'https://prod.laalaa.dsd.io')
LOGGING['loggers'] = {
'': {
'handlers': ['console'],
'level': os.environ.get('LOG_LEVEL', 'INFO')
}
}
|
from cla_public.config.common import *
DEBUG = os.environ.get('SET_DEBUG', False) == 'True'
SECRET_KEY = os.environ['SECRET_KEY']
SESSION_COOKIE_SECURE = os.environ.get('CLA_ENV', '') in ['prod', 'staging']
HOST_NAME = os.environ.get('HOST_NAME') or os.environ.get('HOSTNAME')
BACKEND_BASE_URI = os.environ['BACKEND_BASE_URI']
LAALAA_API_HOST = os.environ.get(
'LAALAA_API_HOST', 'https://prod.laalaa.dsd.io')
LOGGING['handlers']['console']['formatter'] = 'logstash'
LOGGING['loggers'] = {
'': {
'handlers': ['console'],
'level': os.environ.get('LOG_LEVEL', 'INFO')
}
}
|
Enable logstash formatter for console logs
|
Enable logstash formatter for console logs
|
Python
|
mit
|
ministryofjustice/cla_public,ministryofjustice/cla_public,ministryofjustice/cla_public,ministryofjustice/cla_public
|
from cla_public.config.common import *
DEBUG = os.environ.get('SET_DEBUG', False) == 'True'
SECRET_KEY = os.environ['SECRET_KEY']
SESSION_COOKIE_SECURE = os.environ.get('CLA_ENV', '') in ['prod', 'staging']
HOST_NAME = os.environ.get('HOST_NAME') or os.environ.get('HOSTNAME')
BACKEND_BASE_URI = os.environ['BACKEND_BASE_URI']
LAALAA_API_HOST = os.environ.get(
'LAALAA_API_HOST', 'https://prod.laalaa.dsd.io')
LOGGING['loggers'] = {
'': {
'handlers': ['console'],
'level': os.environ.get('LOG_LEVEL', 'INFO')
}
}
Enable logstash formatter for console logs
|
from cla_public.config.common import *
DEBUG = os.environ.get('SET_DEBUG', False) == 'True'
SECRET_KEY = os.environ['SECRET_KEY']
SESSION_COOKIE_SECURE = os.environ.get('CLA_ENV', '') in ['prod', 'staging']
HOST_NAME = os.environ.get('HOST_NAME') or os.environ.get('HOSTNAME')
BACKEND_BASE_URI = os.environ['BACKEND_BASE_URI']
LAALAA_API_HOST = os.environ.get(
'LAALAA_API_HOST', 'https://prod.laalaa.dsd.io')
LOGGING['handlers']['console']['formatter'] = 'logstash'
LOGGING['loggers'] = {
'': {
'handlers': ['console'],
'level': os.environ.get('LOG_LEVEL', 'INFO')
}
}
|
<commit_before>from cla_public.config.common import *
DEBUG = os.environ.get('SET_DEBUG', False) == 'True'
SECRET_KEY = os.environ['SECRET_KEY']
SESSION_COOKIE_SECURE = os.environ.get('CLA_ENV', '') in ['prod', 'staging']
HOST_NAME = os.environ.get('HOST_NAME') or os.environ.get('HOSTNAME')
BACKEND_BASE_URI = os.environ['BACKEND_BASE_URI']
LAALAA_API_HOST = os.environ.get(
'LAALAA_API_HOST', 'https://prod.laalaa.dsd.io')
LOGGING['loggers'] = {
'': {
'handlers': ['console'],
'level': os.environ.get('LOG_LEVEL', 'INFO')
}
}
<commit_msg>Enable logstash formatter for console logs<commit_after>
|
from cla_public.config.common import *
DEBUG = os.environ.get('SET_DEBUG', False) == 'True'
SECRET_KEY = os.environ['SECRET_KEY']
SESSION_COOKIE_SECURE = os.environ.get('CLA_ENV', '') in ['prod', 'staging']
HOST_NAME = os.environ.get('HOST_NAME') or os.environ.get('HOSTNAME')
BACKEND_BASE_URI = os.environ['BACKEND_BASE_URI']
LAALAA_API_HOST = os.environ.get(
'LAALAA_API_HOST', 'https://prod.laalaa.dsd.io')
LOGGING['handlers']['console']['formatter'] = 'logstash'
LOGGING['loggers'] = {
'': {
'handlers': ['console'],
'level': os.environ.get('LOG_LEVEL', 'INFO')
}
}
|
from cla_public.config.common import *
DEBUG = os.environ.get('SET_DEBUG', False) == 'True'
SECRET_KEY = os.environ['SECRET_KEY']
SESSION_COOKIE_SECURE = os.environ.get('CLA_ENV', '') in ['prod', 'staging']
HOST_NAME = os.environ.get('HOST_NAME') or os.environ.get('HOSTNAME')
BACKEND_BASE_URI = os.environ['BACKEND_BASE_URI']
LAALAA_API_HOST = os.environ.get(
'LAALAA_API_HOST', 'https://prod.laalaa.dsd.io')
LOGGING['loggers'] = {
'': {
'handlers': ['console'],
'level': os.environ.get('LOG_LEVEL', 'INFO')
}
}
Enable logstash formatter for console logsfrom cla_public.config.common import *
DEBUG = os.environ.get('SET_DEBUG', False) == 'True'
SECRET_KEY = os.environ['SECRET_KEY']
SESSION_COOKIE_SECURE = os.environ.get('CLA_ENV', '') in ['prod', 'staging']
HOST_NAME = os.environ.get('HOST_NAME') or os.environ.get('HOSTNAME')
BACKEND_BASE_URI = os.environ['BACKEND_BASE_URI']
LAALAA_API_HOST = os.environ.get(
'LAALAA_API_HOST', 'https://prod.laalaa.dsd.io')
LOGGING['handlers']['console']['formatter'] = 'logstash'
LOGGING['loggers'] = {
'': {
'handlers': ['console'],
'level': os.environ.get('LOG_LEVEL', 'INFO')
}
}
|
<commit_before>from cla_public.config.common import *
DEBUG = os.environ.get('SET_DEBUG', False) == 'True'
SECRET_KEY = os.environ['SECRET_KEY']
SESSION_COOKIE_SECURE = os.environ.get('CLA_ENV', '') in ['prod', 'staging']
HOST_NAME = os.environ.get('HOST_NAME') or os.environ.get('HOSTNAME')
BACKEND_BASE_URI = os.environ['BACKEND_BASE_URI']
LAALAA_API_HOST = os.environ.get(
'LAALAA_API_HOST', 'https://prod.laalaa.dsd.io')
LOGGING['loggers'] = {
'': {
'handlers': ['console'],
'level': os.environ.get('LOG_LEVEL', 'INFO')
}
}
<commit_msg>Enable logstash formatter for console logs<commit_after>from cla_public.config.common import *
DEBUG = os.environ.get('SET_DEBUG', False) == 'True'
SECRET_KEY = os.environ['SECRET_KEY']
SESSION_COOKIE_SECURE = os.environ.get('CLA_ENV', '') in ['prod', 'staging']
HOST_NAME = os.environ.get('HOST_NAME') or os.environ.get('HOSTNAME')
BACKEND_BASE_URI = os.environ['BACKEND_BASE_URI']
LAALAA_API_HOST = os.environ.get(
'LAALAA_API_HOST', 'https://prod.laalaa.dsd.io')
LOGGING['handlers']['console']['formatter'] = 'logstash'
LOGGING['loggers'] = {
'': {
'handlers': ['console'],
'level': os.environ.get('LOG_LEVEL', 'INFO')
}
}
|
d36e624acb349b3fd78bb3fb91ba0bcc696719c2
|
imagekit/utils.py
|
imagekit/utils.py
|
import tempfile
import types
from django.utils.functional import wraps
from imagekit.lib import Image
def img_to_fobj(img, format, **kwargs):
tmp = tempfile.TemporaryFile()
# Preserve transparency if the image is in Pallette (P) mode.
if img.mode == 'P':
kwargs['transparency'] = len(img.split()[-1].getcolors())
else:
img.convert('RGB')
img.save(tmp, format, **kwargs)
tmp.seek(0)
return tmp
def get_spec_files(instance):
try:
ik = getattr(instance, '_ik')
except AttributeError:
return []
else:
return [getattr(instance, n) for n in ik.spec_file_names]
def open_image(target):
img = Image.open(target)
img.copy = types.MethodType(_wrap_copy(img.copy), img, img.__class__)
return img
def _wrap_copy(f):
@wraps(f)
def copy(self):
img = f()
try:
img.app = self.app
except AttributeError:
pass
try:
img._getexif = self._getexif
except AttributeError:
pass
return img
return copy
|
import tempfile
import types
from django.utils.functional import wraps
from imagekit.lib import Image
def img_to_fobj(img, format, **kwargs):
tmp = tempfile.TemporaryFile()
# Preserve transparency if the image is in Pallette (P) mode.
transparency_formats = ('PNG', 'GIF', )
if img.mode == 'P' and format in transparency_formats:
kwargs['transparency'] = len(img.split()[-1].getcolors())
else:
img = img.convert('RGB')
img.save(tmp, format, **kwargs)
tmp.seek(0)
return tmp
def get_spec_files(instance):
try:
ik = getattr(instance, '_ik')
except AttributeError:
return []
else:
return [getattr(instance, n) for n in ik.spec_file_names]
def open_image(target):
img = Image.open(target)
img.copy = types.MethodType(_wrap_copy(img.copy), img, img.__class__)
return img
def _wrap_copy(f):
@wraps(f)
def copy(self):
img = f()
try:
img.app = self.app
except AttributeError:
pass
try:
img._getexif = self._getexif
except AttributeError:
pass
return img
return copy
|
Fix conversion of PNG "palette" or "P" mode images to JPEG. "P" mode images need to be converted to 'RGB' if target image format is not PNG or GIF.
|
Fix conversion of PNG "palette" or "P" mode images to JPEG.
"P" mode images need to be converted to 'RGB' if target image format is not PNG or GIF.
|
Python
|
bsd-3-clause
|
pcompassion/django-imagekit,tawanda/django-imagekit,tawanda/django-imagekit,FundedByMe/django-imagekit,pcompassion/django-imagekit,pcompassion/django-imagekit,FundedByMe/django-imagekit
|
import tempfile
import types
from django.utils.functional import wraps
from imagekit.lib import Image
def img_to_fobj(img, format, **kwargs):
tmp = tempfile.TemporaryFile()
# Preserve transparency if the image is in Pallette (P) mode.
if img.mode == 'P':
kwargs['transparency'] = len(img.split()[-1].getcolors())
else:
img.convert('RGB')
img.save(tmp, format, **kwargs)
tmp.seek(0)
return tmp
def get_spec_files(instance):
try:
ik = getattr(instance, '_ik')
except AttributeError:
return []
else:
return [getattr(instance, n) for n in ik.spec_file_names]
def open_image(target):
img = Image.open(target)
img.copy = types.MethodType(_wrap_copy(img.copy), img, img.__class__)
return img
def _wrap_copy(f):
@wraps(f)
def copy(self):
img = f()
try:
img.app = self.app
except AttributeError:
pass
try:
img._getexif = self._getexif
except AttributeError:
pass
return img
return copy
Fix conversion of PNG "palette" or "P" mode images to JPEG.
"P" mode images need to be converted to 'RGB' if target image format is not PNG or GIF.
|
import tempfile
import types
from django.utils.functional import wraps
from imagekit.lib import Image
def img_to_fobj(img, format, **kwargs):
tmp = tempfile.TemporaryFile()
# Preserve transparency if the image is in Pallette (P) mode.
transparency_formats = ('PNG', 'GIF', )
if img.mode == 'P' and format in transparency_formats:
kwargs['transparency'] = len(img.split()[-1].getcolors())
else:
img = img.convert('RGB')
img.save(tmp, format, **kwargs)
tmp.seek(0)
return tmp
def get_spec_files(instance):
try:
ik = getattr(instance, '_ik')
except AttributeError:
return []
else:
return [getattr(instance, n) for n in ik.spec_file_names]
def open_image(target):
img = Image.open(target)
img.copy = types.MethodType(_wrap_copy(img.copy), img, img.__class__)
return img
def _wrap_copy(f):
@wraps(f)
def copy(self):
img = f()
try:
img.app = self.app
except AttributeError:
pass
try:
img._getexif = self._getexif
except AttributeError:
pass
return img
return copy
|
<commit_before>import tempfile
import types
from django.utils.functional import wraps
from imagekit.lib import Image
def img_to_fobj(img, format, **kwargs):
tmp = tempfile.TemporaryFile()
# Preserve transparency if the image is in Pallette (P) mode.
if img.mode == 'P':
kwargs['transparency'] = len(img.split()[-1].getcolors())
else:
img.convert('RGB')
img.save(tmp, format, **kwargs)
tmp.seek(0)
return tmp
def get_spec_files(instance):
try:
ik = getattr(instance, '_ik')
except AttributeError:
return []
else:
return [getattr(instance, n) for n in ik.spec_file_names]
def open_image(target):
img = Image.open(target)
img.copy = types.MethodType(_wrap_copy(img.copy), img, img.__class__)
return img
def _wrap_copy(f):
@wraps(f)
def copy(self):
img = f()
try:
img.app = self.app
except AttributeError:
pass
try:
img._getexif = self._getexif
except AttributeError:
pass
return img
return copy
<commit_msg>Fix conversion of PNG "palette" or "P" mode images to JPEG.
"P" mode images need to be converted to 'RGB' if target image format is not PNG or GIF.<commit_after>
|
import tempfile
import types
from django.utils.functional import wraps
from imagekit.lib import Image
def img_to_fobj(img, format, **kwargs):
tmp = tempfile.TemporaryFile()
# Preserve transparency if the image is in Pallette (P) mode.
transparency_formats = ('PNG', 'GIF', )
if img.mode == 'P' and format in transparency_formats:
kwargs['transparency'] = len(img.split()[-1].getcolors())
else:
img = img.convert('RGB')
img.save(tmp, format, **kwargs)
tmp.seek(0)
return tmp
def get_spec_files(instance):
try:
ik = getattr(instance, '_ik')
except AttributeError:
return []
else:
return [getattr(instance, n) for n in ik.spec_file_names]
def open_image(target):
img = Image.open(target)
img.copy = types.MethodType(_wrap_copy(img.copy), img, img.__class__)
return img
def _wrap_copy(f):
@wraps(f)
def copy(self):
img = f()
try:
img.app = self.app
except AttributeError:
pass
try:
img._getexif = self._getexif
except AttributeError:
pass
return img
return copy
|
import tempfile
import types
from django.utils.functional import wraps
from imagekit.lib import Image
def img_to_fobj(img, format, **kwargs):
tmp = tempfile.TemporaryFile()
# Preserve transparency if the image is in Pallette (P) mode.
if img.mode == 'P':
kwargs['transparency'] = len(img.split()[-1].getcolors())
else:
img.convert('RGB')
img.save(tmp, format, **kwargs)
tmp.seek(0)
return tmp
def get_spec_files(instance):
try:
ik = getattr(instance, '_ik')
except AttributeError:
return []
else:
return [getattr(instance, n) for n in ik.spec_file_names]
def open_image(target):
img = Image.open(target)
img.copy = types.MethodType(_wrap_copy(img.copy), img, img.__class__)
return img
def _wrap_copy(f):
@wraps(f)
def copy(self):
img = f()
try:
img.app = self.app
except AttributeError:
pass
try:
img._getexif = self._getexif
except AttributeError:
pass
return img
return copy
Fix conversion of PNG "palette" or "P" mode images to JPEG.
"P" mode images need to be converted to 'RGB' if target image format is not PNG or GIF.import tempfile
import types
from django.utils.functional import wraps
from imagekit.lib import Image
def img_to_fobj(img, format, **kwargs):
tmp = tempfile.TemporaryFile()
# Preserve transparency if the image is in Pallette (P) mode.
transparency_formats = ('PNG', 'GIF', )
if img.mode == 'P' and format in transparency_formats:
kwargs['transparency'] = len(img.split()[-1].getcolors())
else:
img = img.convert('RGB')
img.save(tmp, format, **kwargs)
tmp.seek(0)
return tmp
def get_spec_files(instance):
try:
ik = getattr(instance, '_ik')
except AttributeError:
return []
else:
return [getattr(instance, n) for n in ik.spec_file_names]
def open_image(target):
img = Image.open(target)
img.copy = types.MethodType(_wrap_copy(img.copy), img, img.__class__)
return img
def _wrap_copy(f):
@wraps(f)
def copy(self):
img = f()
try:
img.app = self.app
except AttributeError:
pass
try:
img._getexif = self._getexif
except AttributeError:
pass
return img
return copy
|
<commit_before>import tempfile
import types
from django.utils.functional import wraps
from imagekit.lib import Image
def img_to_fobj(img, format, **kwargs):
tmp = tempfile.TemporaryFile()
# Preserve transparency if the image is in Pallette (P) mode.
if img.mode == 'P':
kwargs['transparency'] = len(img.split()[-1].getcolors())
else:
img.convert('RGB')
img.save(tmp, format, **kwargs)
tmp.seek(0)
return tmp
def get_spec_files(instance):
try:
ik = getattr(instance, '_ik')
except AttributeError:
return []
else:
return [getattr(instance, n) for n in ik.spec_file_names]
def open_image(target):
img = Image.open(target)
img.copy = types.MethodType(_wrap_copy(img.copy), img, img.__class__)
return img
def _wrap_copy(f):
@wraps(f)
def copy(self):
img = f()
try:
img.app = self.app
except AttributeError:
pass
try:
img._getexif = self._getexif
except AttributeError:
pass
return img
return copy
<commit_msg>Fix conversion of PNG "palette" or "P" mode images to JPEG.
"P" mode images need to be converted to 'RGB' if target image format is not PNG or GIF.<commit_after>import tempfile
import types
from django.utils.functional import wraps
from imagekit.lib import Image
def img_to_fobj(img, format, **kwargs):
tmp = tempfile.TemporaryFile()
# Preserve transparency if the image is in Pallette (P) mode.
transparency_formats = ('PNG', 'GIF', )
if img.mode == 'P' and format in transparency_formats:
kwargs['transparency'] = len(img.split()[-1].getcolors())
else:
img = img.convert('RGB')
img.save(tmp, format, **kwargs)
tmp.seek(0)
return tmp
def get_spec_files(instance):
try:
ik = getattr(instance, '_ik')
except AttributeError:
return []
else:
return [getattr(instance, n) for n in ik.spec_file_names]
def open_image(target):
img = Image.open(target)
img.copy = types.MethodType(_wrap_copy(img.copy), img, img.__class__)
return img
def _wrap_copy(f):
@wraps(f)
def copy(self):
img = f()
try:
img.app = self.app
except AttributeError:
pass
try:
img._getexif = self._getexif
except AttributeError:
pass
return img
return copy
|
04ca2afaa43cc4de88020235a7e1bf4d4377c5bc
|
pearbot.py
|
pearbot.py
|
#!/usr/bin/env python3
# vim:fileencoding=utf-8:ts=8:et:sw=4:sts=4:tw=79
"""
pearbot.py
The PearBot IRC bot entry point.
It sets up logging and starts up the IRC client.
Copyright (c) 2014 Twisted Pear <pear at twistedpear dot at>
See the file LICENSE for copying permission.
"""
import asyncio
import command
import config
import logging
import protocol
import signal
LOG_FORMAT = "%(asctime)-15s %(name)-10s %(levelname)-8s %(message)s"
logging.basicConfig(level=logging.INFO, format=LOG_FORMAT)
def main():
logger = logging.getLogger("pearbot")
logger.info("PearBot started.")
loop = asyncio.get_event_loop()
client_config = config.get_config("irc")
client = protocol.Protocol(**client_config)
cmdhdl_config = config.get_config("cmd")
# we don't need to remember this instance
command.CommandHandler(client, **cmdhdl_config)
def shutdown():
logger.info("Shutdown signal received.")
client.shutdown()
loop.add_signal_handler(signal.SIGTERM, shutdown)
logger.info("Running protocol activity.")
task = client.run()
loop.run_until_complete(task)
logger.info("Protocol activity ceased.")
logger.info("Exiting...")
if __name__ == "__main__":
main()
|
#!/usr/bin/env python3
# vim:fileencoding=utf-8:ts=8:et:sw=4:sts=4:tw=79
"""
pearbot.py
The PearBot IRC bot entry point.
It sets up logging and starts up the IRC client.
Copyright (c) 2014 Twisted Pear <pear at twistedpear dot at>
See the file LICENSE for copying permission.
"""
import asyncio
import command
import config
import logging
import protocol
import signal
LOG_FORMAT = "{asctime} [{process}] {levelname}({name}): {message}"
logging.basicConfig(level=logging.INFO, format=LOG_FORMAT, style="{")
def main():
logger = logging.getLogger("pearbot")
logger.info("PearBot started.")
loop = asyncio.get_event_loop()
client_config = config.get_config("irc")
client = protocol.Protocol(**client_config)
cmdhdl_config = config.get_config("cmd")
# we don't need to remember this instance
command.CommandHandler(client, **cmdhdl_config)
def shutdown():
logger.info("Shutdown signal received.")
client.shutdown()
loop.add_signal_handler(signal.SIGTERM, shutdown)
logger.info("Running protocol activity.")
task = client.run()
loop.run_until_complete(task)
logger.info("Protocol activity ceased.")
logger.info("Exiting...")
if __name__ == "__main__":
main()
|
Change log format (added PID)
|
Change log format (added PID)
|
Python
|
mit
|
pyrige/pump19
|
#!/usr/bin/env python3
# vim:fileencoding=utf-8:ts=8:et:sw=4:sts=4:tw=79
"""
pearbot.py
The PearBot IRC bot entry point.
It sets up logging and starts up the IRC client.
Copyright (c) 2014 Twisted Pear <pear at twistedpear dot at>
See the file LICENSE for copying permission.
"""
import asyncio
import command
import config
import logging
import protocol
import signal
LOG_FORMAT = "%(asctime)-15s %(name)-10s %(levelname)-8s %(message)s"
logging.basicConfig(level=logging.INFO, format=LOG_FORMAT)
def main():
logger = logging.getLogger("pearbot")
logger.info("PearBot started.")
loop = asyncio.get_event_loop()
client_config = config.get_config("irc")
client = protocol.Protocol(**client_config)
cmdhdl_config = config.get_config("cmd")
# we don't need to remember this instance
command.CommandHandler(client, **cmdhdl_config)
def shutdown():
logger.info("Shutdown signal received.")
client.shutdown()
loop.add_signal_handler(signal.SIGTERM, shutdown)
logger.info("Running protocol activity.")
task = client.run()
loop.run_until_complete(task)
logger.info("Protocol activity ceased.")
logger.info("Exiting...")
if __name__ == "__main__":
main()
Change log format (added PID)
|
#!/usr/bin/env python3
# vim:fileencoding=utf-8:ts=8:et:sw=4:sts=4:tw=79
"""
pearbot.py
The PearBot IRC bot entry point.
It sets up logging and starts up the IRC client.
Copyright (c) 2014 Twisted Pear <pear at twistedpear dot at>
See the file LICENSE for copying permission.
"""
import asyncio
import command
import config
import logging
import protocol
import signal
LOG_FORMAT = "{asctime} [{process}] {levelname}({name}): {message}"
logging.basicConfig(level=logging.INFO, format=LOG_FORMAT, style="{")
def main():
logger = logging.getLogger("pearbot")
logger.info("PearBot started.")
loop = asyncio.get_event_loop()
client_config = config.get_config("irc")
client = protocol.Protocol(**client_config)
cmdhdl_config = config.get_config("cmd")
# we don't need to remember this instance
command.CommandHandler(client, **cmdhdl_config)
def shutdown():
logger.info("Shutdown signal received.")
client.shutdown()
loop.add_signal_handler(signal.SIGTERM, shutdown)
logger.info("Running protocol activity.")
task = client.run()
loop.run_until_complete(task)
logger.info("Protocol activity ceased.")
logger.info("Exiting...")
if __name__ == "__main__":
main()
|
<commit_before>#!/usr/bin/env python3
# vim:fileencoding=utf-8:ts=8:et:sw=4:sts=4:tw=79
"""
pearbot.py
The PearBot IRC bot entry point.
It sets up logging and starts up the IRC client.
Copyright (c) 2014 Twisted Pear <pear at twistedpear dot at>
See the file LICENSE for copying permission.
"""
import asyncio
import command
import config
import logging
import protocol
import signal
LOG_FORMAT = "%(asctime)-15s %(name)-10s %(levelname)-8s %(message)s"
logging.basicConfig(level=logging.INFO, format=LOG_FORMAT)
def main():
logger = logging.getLogger("pearbot")
logger.info("PearBot started.")
loop = asyncio.get_event_loop()
client_config = config.get_config("irc")
client = protocol.Protocol(**client_config)
cmdhdl_config = config.get_config("cmd")
# we don't need to remember this instance
command.CommandHandler(client, **cmdhdl_config)
def shutdown():
logger.info("Shutdown signal received.")
client.shutdown()
loop.add_signal_handler(signal.SIGTERM, shutdown)
logger.info("Running protocol activity.")
task = client.run()
loop.run_until_complete(task)
logger.info("Protocol activity ceased.")
logger.info("Exiting...")
if __name__ == "__main__":
main()
<commit_msg>Change log format (added PID)<commit_after>
|
#!/usr/bin/env python3
# vim:fileencoding=utf-8:ts=8:et:sw=4:sts=4:tw=79
"""
pearbot.py
The PearBot IRC bot entry point.
It sets up logging and starts up the IRC client.
Copyright (c) 2014 Twisted Pear <pear at twistedpear dot at>
See the file LICENSE for copying permission.
"""
import asyncio
import command
import config
import logging
import protocol
import signal
LOG_FORMAT = "{asctime} [{process}] {levelname}({name}): {message}"
logging.basicConfig(level=logging.INFO, format=LOG_FORMAT, style="{")
def main():
logger = logging.getLogger("pearbot")
logger.info("PearBot started.")
loop = asyncio.get_event_loop()
client_config = config.get_config("irc")
client = protocol.Protocol(**client_config)
cmdhdl_config = config.get_config("cmd")
# we don't need to remember this instance
command.CommandHandler(client, **cmdhdl_config)
def shutdown():
logger.info("Shutdown signal received.")
client.shutdown()
loop.add_signal_handler(signal.SIGTERM, shutdown)
logger.info("Running protocol activity.")
task = client.run()
loop.run_until_complete(task)
logger.info("Protocol activity ceased.")
logger.info("Exiting...")
if __name__ == "__main__":
main()
|
#!/usr/bin/env python3
# vim:fileencoding=utf-8:ts=8:et:sw=4:sts=4:tw=79
"""
pearbot.py
The PearBot IRC bot entry point.
It sets up logging and starts up the IRC client.
Copyright (c) 2014 Twisted Pear <pear at twistedpear dot at>
See the file LICENSE for copying permission.
"""
import asyncio
import command
import config
import logging
import protocol
import signal
LOG_FORMAT = "%(asctime)-15s %(name)-10s %(levelname)-8s %(message)s"
logging.basicConfig(level=logging.INFO, format=LOG_FORMAT)
def main():
logger = logging.getLogger("pearbot")
logger.info("PearBot started.")
loop = asyncio.get_event_loop()
client_config = config.get_config("irc")
client = protocol.Protocol(**client_config)
cmdhdl_config = config.get_config("cmd")
# we don't need to remember this instance
command.CommandHandler(client, **cmdhdl_config)
def shutdown():
logger.info("Shutdown signal received.")
client.shutdown()
loop.add_signal_handler(signal.SIGTERM, shutdown)
logger.info("Running protocol activity.")
task = client.run()
loop.run_until_complete(task)
logger.info("Protocol activity ceased.")
logger.info("Exiting...")
if __name__ == "__main__":
main()
Change log format (added PID)#!/usr/bin/env python3
# vim:fileencoding=utf-8:ts=8:et:sw=4:sts=4:tw=79
"""
pearbot.py
The PearBot IRC bot entry point.
It sets up logging and starts up the IRC client.
Copyright (c) 2014 Twisted Pear <pear at twistedpear dot at>
See the file LICENSE for copying permission.
"""
import asyncio
import command
import config
import logging
import protocol
import signal
LOG_FORMAT = "{asctime} [{process}] {levelname}({name}): {message}"
logging.basicConfig(level=logging.INFO, format=LOG_FORMAT, style="{")
def main():
logger = logging.getLogger("pearbot")
logger.info("PearBot started.")
loop = asyncio.get_event_loop()
client_config = config.get_config("irc")
client = protocol.Protocol(**client_config)
cmdhdl_config = config.get_config("cmd")
# we don't need to remember this instance
command.CommandHandler(client, **cmdhdl_config)
def shutdown():
logger.info("Shutdown signal received.")
client.shutdown()
loop.add_signal_handler(signal.SIGTERM, shutdown)
logger.info("Running protocol activity.")
task = client.run()
loop.run_until_complete(task)
logger.info("Protocol activity ceased.")
logger.info("Exiting...")
if __name__ == "__main__":
main()
|
<commit_before>#!/usr/bin/env python3
# vim:fileencoding=utf-8:ts=8:et:sw=4:sts=4:tw=79
"""
pearbot.py
The PearBot IRC bot entry point.
It sets up logging and starts up the IRC client.
Copyright (c) 2014 Twisted Pear <pear at twistedpear dot at>
See the file LICENSE for copying permission.
"""
import asyncio
import command
import config
import logging
import protocol
import signal
LOG_FORMAT = "%(asctime)-15s %(name)-10s %(levelname)-8s %(message)s"
logging.basicConfig(level=logging.INFO, format=LOG_FORMAT)
def main():
logger = logging.getLogger("pearbot")
logger.info("PearBot started.")
loop = asyncio.get_event_loop()
client_config = config.get_config("irc")
client = protocol.Protocol(**client_config)
cmdhdl_config = config.get_config("cmd")
# we don't need to remember this instance
command.CommandHandler(client, **cmdhdl_config)
def shutdown():
logger.info("Shutdown signal received.")
client.shutdown()
loop.add_signal_handler(signal.SIGTERM, shutdown)
logger.info("Running protocol activity.")
task = client.run()
loop.run_until_complete(task)
logger.info("Protocol activity ceased.")
logger.info("Exiting...")
if __name__ == "__main__":
main()
<commit_msg>Change log format (added PID)<commit_after>#!/usr/bin/env python3
# vim:fileencoding=utf-8:ts=8:et:sw=4:sts=4:tw=79
"""
pearbot.py
The PearBot IRC bot entry point.
It sets up logging and starts up the IRC client.
Copyright (c) 2014 Twisted Pear <pear at twistedpear dot at>
See the file LICENSE for copying permission.
"""
import asyncio
import command
import config
import logging
import protocol
import signal
LOG_FORMAT = "{asctime} [{process}] {levelname}({name}): {message}"
logging.basicConfig(level=logging.INFO, format=LOG_FORMAT, style="{")
def main():
logger = logging.getLogger("pearbot")
logger.info("PearBot started.")
loop = asyncio.get_event_loop()
client_config = config.get_config("irc")
client = protocol.Protocol(**client_config)
cmdhdl_config = config.get_config("cmd")
# we don't need to remember this instance
command.CommandHandler(client, **cmdhdl_config)
def shutdown():
logger.info("Shutdown signal received.")
client.shutdown()
loop.add_signal_handler(signal.SIGTERM, shutdown)
logger.info("Running protocol activity.")
task = client.run()
loop.run_until_complete(task)
logger.info("Protocol activity ceased.")
logger.info("Exiting...")
if __name__ == "__main__":
main()
|
b96ac3debb472dcf3aaac84f43309a4d01a27159
|
exam/tests/test_dynamic_import.py
|
exam/tests/test_dynamic_import.py
|
# -*- coding: utf-8 -*-
from django.test import TestCase
from should_dsl import should
from exam.dynamic_import import create_specific_exam
from core.tests import FormatTest
from sys import stderr
class TestDynamicImport(FormatTest, TestCase):
def setUp(self):
self.my_type = '[Exam - Dynamic Import]'
stderr.write(self.__str__())
def test_create_biopsy_exam(self):
from biopsy.models import Biopsy
specific_exam = create_specific_exam('Biopsy')
specific_exam | should | be_kind_of(Biopsy)
def test_create_necropsy_exam(self):
from necropsy.models import Necropsy
specific_exam = create_specific_exam('Necropsy')
specific_exam | should | be_kind_of(Necropsy)
def test_create_specific_exam_invalid_param(self):
from exam.exceptions import InvalidParameter
InvalidParameter | should | be_thrown_by(lambda: create_specific_exam(
''))
def test_create_specific_exam_invalid_model(self):
from exam.exceptions import ModelDoesNotExist
ModelDoesNotExist | should | be_thrown_by(lambda: create_specific_exam(
'InvalidModel'))
|
# -*- coding: utf-8 -*-
from django.test import TestCase
from should_dsl import should
from exam.dynamic_import import create_specific_exam
from core.tests import FormatTest
from sys import stderr
class TestDynamicImport(FormatTest, TestCase):
def setUp(self):
self.my_type = '[Exam - Dynamic Import]'
stderr.write(self.__str__())
def test_create_biopsy_exam(self):
from biopsy.models import Biopsy
biopsy_exam = create_specific_exam('Biopsy')
biopsy_exam | should | be_kind_of(Biopsy)
def test_create_necropsy_exam(self):
from necropsy.models import Necropsy
necropsy_exam = create_specific_exam('Necropsy')
necropsy_exam | should | be_kind_of(Necropsy)
def test_create_specific_exam_invalid_param(self):
from exam.exceptions import InvalidParameter
InvalidParameter | should | be_thrown_by(lambda: create_specific_exam(
''))
def test_create_specific_exam_invalid_model(self):
from exam.exceptions import ModelDoesNotExist
ModelDoesNotExist | should | be_thrown_by(lambda: create_specific_exam(
'InvalidModel'))
|
Update variables names in exam tests
|
Update variables names in exam tests
|
Python
|
mit
|
msfernandes/anato-hub,msfernandes/anato-hub,msfernandes/anato-hub,msfernandes/anato-hub
|
# -*- coding: utf-8 -*-
from django.test import TestCase
from should_dsl import should
from exam.dynamic_import import create_specific_exam
from core.tests import FormatTest
from sys import stderr
class TestDynamicImport(FormatTest, TestCase):
def setUp(self):
self.my_type = '[Exam - Dynamic Import]'
stderr.write(self.__str__())
def test_create_biopsy_exam(self):
from biopsy.models import Biopsy
specific_exam = create_specific_exam('Biopsy')
specific_exam | should | be_kind_of(Biopsy)
def test_create_necropsy_exam(self):
from necropsy.models import Necropsy
specific_exam = create_specific_exam('Necropsy')
specific_exam | should | be_kind_of(Necropsy)
def test_create_specific_exam_invalid_param(self):
from exam.exceptions import InvalidParameter
InvalidParameter | should | be_thrown_by(lambda: create_specific_exam(
''))
def test_create_specific_exam_invalid_model(self):
from exam.exceptions import ModelDoesNotExist
ModelDoesNotExist | should | be_thrown_by(lambda: create_specific_exam(
'InvalidModel'))
Update variables names in exam tests
|
# -*- coding: utf-8 -*-
from django.test import TestCase
from should_dsl import should
from exam.dynamic_import import create_specific_exam
from core.tests import FormatTest
from sys import stderr
class TestDynamicImport(FormatTest, TestCase):
def setUp(self):
self.my_type = '[Exam - Dynamic Import]'
stderr.write(self.__str__())
def test_create_biopsy_exam(self):
from biopsy.models import Biopsy
biopsy_exam = create_specific_exam('Biopsy')
biopsy_exam | should | be_kind_of(Biopsy)
def test_create_necropsy_exam(self):
from necropsy.models import Necropsy
necropsy_exam = create_specific_exam('Necropsy')
necropsy_exam | should | be_kind_of(Necropsy)
def test_create_specific_exam_invalid_param(self):
from exam.exceptions import InvalidParameter
InvalidParameter | should | be_thrown_by(lambda: create_specific_exam(
''))
def test_create_specific_exam_invalid_model(self):
from exam.exceptions import ModelDoesNotExist
ModelDoesNotExist | should | be_thrown_by(lambda: create_specific_exam(
'InvalidModel'))
|
<commit_before># -*- coding: utf-8 -*-
from django.test import TestCase
from should_dsl import should
from exam.dynamic_import import create_specific_exam
from core.tests import FormatTest
from sys import stderr
class TestDynamicImport(FormatTest, TestCase):
def setUp(self):
self.my_type = '[Exam - Dynamic Import]'
stderr.write(self.__str__())
def test_create_biopsy_exam(self):
from biopsy.models import Biopsy
specific_exam = create_specific_exam('Biopsy')
specific_exam | should | be_kind_of(Biopsy)
def test_create_necropsy_exam(self):
from necropsy.models import Necropsy
specific_exam = create_specific_exam('Necropsy')
specific_exam | should | be_kind_of(Necropsy)
def test_create_specific_exam_invalid_param(self):
from exam.exceptions import InvalidParameter
InvalidParameter | should | be_thrown_by(lambda: create_specific_exam(
''))
def test_create_specific_exam_invalid_model(self):
from exam.exceptions import ModelDoesNotExist
ModelDoesNotExist | should | be_thrown_by(lambda: create_specific_exam(
'InvalidModel'))
<commit_msg>Update variables names in exam tests<commit_after>
|
# -*- coding: utf-8 -*-
from django.test import TestCase
from should_dsl import should
from exam.dynamic_import import create_specific_exam
from core.tests import FormatTest
from sys import stderr
class TestDynamicImport(FormatTest, TestCase):
def setUp(self):
self.my_type = '[Exam - Dynamic Import]'
stderr.write(self.__str__())
def test_create_biopsy_exam(self):
from biopsy.models import Biopsy
biopsy_exam = create_specific_exam('Biopsy')
biopsy_exam | should | be_kind_of(Biopsy)
def test_create_necropsy_exam(self):
from necropsy.models import Necropsy
necropsy_exam = create_specific_exam('Necropsy')
necropsy_exam | should | be_kind_of(Necropsy)
def test_create_specific_exam_invalid_param(self):
from exam.exceptions import InvalidParameter
InvalidParameter | should | be_thrown_by(lambda: create_specific_exam(
''))
def test_create_specific_exam_invalid_model(self):
from exam.exceptions import ModelDoesNotExist
ModelDoesNotExist | should | be_thrown_by(lambda: create_specific_exam(
'InvalidModel'))
|
# -*- coding: utf-8 -*-
from django.test import TestCase
from should_dsl import should
from exam.dynamic_import import create_specific_exam
from core.tests import FormatTest
from sys import stderr
class TestDynamicImport(FormatTest, TestCase):
def setUp(self):
self.my_type = '[Exam - Dynamic Import]'
stderr.write(self.__str__())
def test_create_biopsy_exam(self):
from biopsy.models import Biopsy
specific_exam = create_specific_exam('Biopsy')
specific_exam | should | be_kind_of(Biopsy)
def test_create_necropsy_exam(self):
from necropsy.models import Necropsy
specific_exam = create_specific_exam('Necropsy')
specific_exam | should | be_kind_of(Necropsy)
def test_create_specific_exam_invalid_param(self):
from exam.exceptions import InvalidParameter
InvalidParameter | should | be_thrown_by(lambda: create_specific_exam(
''))
def test_create_specific_exam_invalid_model(self):
from exam.exceptions import ModelDoesNotExist
ModelDoesNotExist | should | be_thrown_by(lambda: create_specific_exam(
'InvalidModel'))
Update variables names in exam tests# -*- coding: utf-8 -*-
from django.test import TestCase
from should_dsl import should
from exam.dynamic_import import create_specific_exam
from core.tests import FormatTest
from sys import stderr
class TestDynamicImport(FormatTest, TestCase):
def setUp(self):
self.my_type = '[Exam - Dynamic Import]'
stderr.write(self.__str__())
def test_create_biopsy_exam(self):
from biopsy.models import Biopsy
biopsy_exam = create_specific_exam('Biopsy')
biopsy_exam | should | be_kind_of(Biopsy)
def test_create_necropsy_exam(self):
from necropsy.models import Necropsy
necropsy_exam = create_specific_exam('Necropsy')
necropsy_exam | should | be_kind_of(Necropsy)
def test_create_specific_exam_invalid_param(self):
from exam.exceptions import InvalidParameter
InvalidParameter | should | be_thrown_by(lambda: create_specific_exam(
''))
def test_create_specific_exam_invalid_model(self):
from exam.exceptions import ModelDoesNotExist
ModelDoesNotExist | should | be_thrown_by(lambda: create_specific_exam(
'InvalidModel'))
|
<commit_before># -*- coding: utf-8 -*-
from django.test import TestCase
from should_dsl import should
from exam.dynamic_import import create_specific_exam
from core.tests import FormatTest
from sys import stderr
class TestDynamicImport(FormatTest, TestCase):
def setUp(self):
self.my_type = '[Exam - Dynamic Import]'
stderr.write(self.__str__())
def test_create_biopsy_exam(self):
from biopsy.models import Biopsy
specific_exam = create_specific_exam('Biopsy')
specific_exam | should | be_kind_of(Biopsy)
def test_create_necropsy_exam(self):
from necropsy.models import Necropsy
specific_exam = create_specific_exam('Necropsy')
specific_exam | should | be_kind_of(Necropsy)
def test_create_specific_exam_invalid_param(self):
from exam.exceptions import InvalidParameter
InvalidParameter | should | be_thrown_by(lambda: create_specific_exam(
''))
def test_create_specific_exam_invalid_model(self):
from exam.exceptions import ModelDoesNotExist
ModelDoesNotExist | should | be_thrown_by(lambda: create_specific_exam(
'InvalidModel'))
<commit_msg>Update variables names in exam tests<commit_after># -*- coding: utf-8 -*-
from django.test import TestCase
from should_dsl import should
from exam.dynamic_import import create_specific_exam
from core.tests import FormatTest
from sys import stderr
class TestDynamicImport(FormatTest, TestCase):
def setUp(self):
self.my_type = '[Exam - Dynamic Import]'
stderr.write(self.__str__())
def test_create_biopsy_exam(self):
from biopsy.models import Biopsy
biopsy_exam = create_specific_exam('Biopsy')
biopsy_exam | should | be_kind_of(Biopsy)
def test_create_necropsy_exam(self):
from necropsy.models import Necropsy
necropsy_exam = create_specific_exam('Necropsy')
necropsy_exam | should | be_kind_of(Necropsy)
def test_create_specific_exam_invalid_param(self):
from exam.exceptions import InvalidParameter
InvalidParameter | should | be_thrown_by(lambda: create_specific_exam(
''))
def test_create_specific_exam_invalid_model(self):
from exam.exceptions import ModelDoesNotExist
ModelDoesNotExist | should | be_thrown_by(lambda: create_specific_exam(
'InvalidModel'))
|
353fe0141267a8e50992f564bd991eba096a3fca
|
zforce.py
|
zforce.py
|
import zipfile
def bf_extract(zfile, password):
zip = zipfile.ZipFile(zfile)
try:
zip.setpassword(password)
zip.extractall()
except:
pass
finally:
zip.close()
if __name__ == "__main__":
bf_extract("spmv.zip", "ok")
|
import zipfile
import optparse
class InvalidZip(Exception):
def __init__(self, value):
self.value = value
def __str__(self):
return repr(self.value)
def bf_extract(zfile, password):
res = True
if (zipfile.is_zipfile(zfile)):
zip = zipfile.ZipFile(zfile)
try:
zip.setpassword(password)
zip.extractall()
except:
res = False
finally:
zip.close()
else:
raise InvalidZip("invalid zip file: " + zfile)
if res:
return True
else:
return False
def find_password(list_file, zip_file):
try:
file = open(list_file)
while True:
line = file.readline()
line = line[:-1]
if not line:
break
try:
if bf_extract(zip_file, line):
print "The password is " + line
break
except InvalidZip:
break
except IOError:
return
def main():
p = optparse.OptionParser()
p.add_option('-l', help="Person is required",
dest="list_file")
p.add_option('-f', help="Person is required",
dest="zip_file")
options, arguments = p.parse_args()
find_password(options.list_file, options.zip_file)
if __name__ == "__main__":
main()
|
Decompress a zip given a list file
|
Decompress a zip given a list file
|
Python
|
apache-2.0
|
alexst07/ZipBruteforce
|
import zipfile
def bf_extract(zfile, password):
zip = zipfile.ZipFile(zfile)
try:
zip.setpassword(password)
zip.extractall()
except:
pass
finally:
zip.close()
if __name__ == "__main__":
bf_extract("spmv.zip", "ok")Decompress a zip given a list file
|
import zipfile
import optparse
class InvalidZip(Exception):
def __init__(self, value):
self.value = value
def __str__(self):
return repr(self.value)
def bf_extract(zfile, password):
res = True
if (zipfile.is_zipfile(zfile)):
zip = zipfile.ZipFile(zfile)
try:
zip.setpassword(password)
zip.extractall()
except:
res = False
finally:
zip.close()
else:
raise InvalidZip("invalid zip file: " + zfile)
if res:
return True
else:
return False
def find_password(list_file, zip_file):
try:
file = open(list_file)
while True:
line = file.readline()
line = line[:-1]
if not line:
break
try:
if bf_extract(zip_file, line):
print "The password is " + line
break
except InvalidZip:
break
except IOError:
return
def main():
p = optparse.OptionParser()
p.add_option('-l', help="Person is required",
dest="list_file")
p.add_option('-f', help="Person is required",
dest="zip_file")
options, arguments = p.parse_args()
find_password(options.list_file, options.zip_file)
if __name__ == "__main__":
main()
|
<commit_before>import zipfile
def bf_extract(zfile, password):
zip = zipfile.ZipFile(zfile)
try:
zip.setpassword(password)
zip.extractall()
except:
pass
finally:
zip.close()
if __name__ == "__main__":
bf_extract("spmv.zip", "ok")<commit_msg>Decompress a zip given a list file<commit_after>
|
import zipfile
import optparse
class InvalidZip(Exception):
def __init__(self, value):
self.value = value
def __str__(self):
return repr(self.value)
def bf_extract(zfile, password):
res = True
if (zipfile.is_zipfile(zfile)):
zip = zipfile.ZipFile(zfile)
try:
zip.setpassword(password)
zip.extractall()
except:
res = False
finally:
zip.close()
else:
raise InvalidZip("invalid zip file: " + zfile)
if res:
return True
else:
return False
def find_password(list_file, zip_file):
try:
file = open(list_file)
while True:
line = file.readline()
line = line[:-1]
if not line:
break
try:
if bf_extract(zip_file, line):
print "The password is " + line
break
except InvalidZip:
break
except IOError:
return
def main():
p = optparse.OptionParser()
p.add_option('-l', help="Person is required",
dest="list_file")
p.add_option('-f', help="Person is required",
dest="zip_file")
options, arguments = p.parse_args()
find_password(options.list_file, options.zip_file)
if __name__ == "__main__":
main()
|
import zipfile
def bf_extract(zfile, password):
zip = zipfile.ZipFile(zfile)
try:
zip.setpassword(password)
zip.extractall()
except:
pass
finally:
zip.close()
if __name__ == "__main__":
bf_extract("spmv.zip", "ok")Decompress a zip given a list fileimport zipfile
import optparse
class InvalidZip(Exception):
def __init__(self, value):
self.value = value
def __str__(self):
return repr(self.value)
def bf_extract(zfile, password):
res = True
if (zipfile.is_zipfile(zfile)):
zip = zipfile.ZipFile(zfile)
try:
zip.setpassword(password)
zip.extractall()
except:
res = False
finally:
zip.close()
else:
raise InvalidZip("invalid zip file: " + zfile)
if res:
return True
else:
return False
def find_password(list_file, zip_file):
try:
file = open(list_file)
while True:
line = file.readline()
line = line[:-1]
if not line:
break
try:
if bf_extract(zip_file, line):
print "The password is " + line
break
except InvalidZip:
break
except IOError:
return
def main():
p = optparse.OptionParser()
p.add_option('-l', help="Person is required",
dest="list_file")
p.add_option('-f', help="Person is required",
dest="zip_file")
options, arguments = p.parse_args()
find_password(options.list_file, options.zip_file)
if __name__ == "__main__":
main()
|
<commit_before>import zipfile
def bf_extract(zfile, password):
zip = zipfile.ZipFile(zfile)
try:
zip.setpassword(password)
zip.extractall()
except:
pass
finally:
zip.close()
if __name__ == "__main__":
bf_extract("spmv.zip", "ok")<commit_msg>Decompress a zip given a list file<commit_after>import zipfile
import optparse
class InvalidZip(Exception):
def __init__(self, value):
self.value = value
def __str__(self):
return repr(self.value)
def bf_extract(zfile, password):
res = True
if (zipfile.is_zipfile(zfile)):
zip = zipfile.ZipFile(zfile)
try:
zip.setpassword(password)
zip.extractall()
except:
res = False
finally:
zip.close()
else:
raise InvalidZip("invalid zip file: " + zfile)
if res:
return True
else:
return False
def find_password(list_file, zip_file):
try:
file = open(list_file)
while True:
line = file.readline()
line = line[:-1]
if not line:
break
try:
if bf_extract(zip_file, line):
print "The password is " + line
break
except InvalidZip:
break
except IOError:
return
def main():
p = optparse.OptionParser()
p.add_option('-l', help="Person is required",
dest="list_file")
p.add_option('-f', help="Person is required",
dest="zip_file")
options, arguments = p.parse_args()
find_password(options.list_file, options.zip_file)
if __name__ == "__main__":
main()
|
1f958dc4439fbe435b1d0381d15860708f1f9745
|
constance/__init__.py
|
constance/__init__.py
|
from .base import Config
__version__ = '1.0a1'
try:
from django.apps import AppConfig # noqa
except ImportError:
config = Config()
else:
default_app_config = 'constance.apps.ConstanceConfig'
|
from .base import Config
from django.utils.functional import SimpleLazyObject
__version__ = '1.0a1'
try:
from django.apps import AppConfig # noqa
except ImportError:
config = SimpleLazyObject(Config)
else:
default_app_config = 'constance.apps.ConstanceConfig'
|
Make the config object lazy for old Djangos.
|
Make the config object lazy for old Djangos.
This should prevent import time side effects from instantiating the config object directly there.
|
Python
|
bsd-3-clause
|
gmflanagan/waterboy,vinnyrose/django-constance,jerzyk/django-constance,metalpriest/django-constance,thorgate/django-constance,jmerdich/django-constance,jazzband/django-constance,jonzlin95/django-constance,winzard/django-constance,metalpriest/django-constance,jazzband/django-constance,django-leonardo/django-constance,pombredanne/django-constance,APSL/django-constance,michaelkuty/django-constance,dmugtasimov/django-constance,thorgate/django-constance,Andrey86/django-constance,askabelin/django-constance,pombredanne/django-constance,jezdez/django-constance,Andrey86/django-constance,michaelkuty/django-constance,APSL/django-constance,jezdez/django-constance,jerzyk/django-constance,askabelin/django-constance,django-leonardo/django-constance,jmerdich/django-constance,jonzlin95/django-constance,jazzband/django-constance,dmugtasimov/django-constance,winzard/django-constance,vinnyrose/django-constance
|
from .base import Config
__version__ = '1.0a1'
try:
from django.apps import AppConfig # noqa
except ImportError:
config = Config()
else:
default_app_config = 'constance.apps.ConstanceConfig'
Make the config object lazy for old Djangos.
This should prevent import time side effects from instantiating the config object directly there.
|
from .base import Config
from django.utils.functional import SimpleLazyObject
__version__ = '1.0a1'
try:
from django.apps import AppConfig # noqa
except ImportError:
config = SimpleLazyObject(Config)
else:
default_app_config = 'constance.apps.ConstanceConfig'
|
<commit_before>from .base import Config
__version__ = '1.0a1'
try:
from django.apps import AppConfig # noqa
except ImportError:
config = Config()
else:
default_app_config = 'constance.apps.ConstanceConfig'
<commit_msg>Make the config object lazy for old Djangos.
This should prevent import time side effects from instantiating the config object directly there.<commit_after>
|
from .base import Config
from django.utils.functional import SimpleLazyObject
__version__ = '1.0a1'
try:
from django.apps import AppConfig # noqa
except ImportError:
config = SimpleLazyObject(Config)
else:
default_app_config = 'constance.apps.ConstanceConfig'
|
from .base import Config
__version__ = '1.0a1'
try:
from django.apps import AppConfig # noqa
except ImportError:
config = Config()
else:
default_app_config = 'constance.apps.ConstanceConfig'
Make the config object lazy for old Djangos.
This should prevent import time side effects from instantiating the config object directly there.from .base import Config
from django.utils.functional import SimpleLazyObject
__version__ = '1.0a1'
try:
from django.apps import AppConfig # noqa
except ImportError:
config = SimpleLazyObject(Config)
else:
default_app_config = 'constance.apps.ConstanceConfig'
|
<commit_before>from .base import Config
__version__ = '1.0a1'
try:
from django.apps import AppConfig # noqa
except ImportError:
config = Config()
else:
default_app_config = 'constance.apps.ConstanceConfig'
<commit_msg>Make the config object lazy for old Djangos.
This should prevent import time side effects from instantiating the config object directly there.<commit_after>from .base import Config
from django.utils.functional import SimpleLazyObject
__version__ = '1.0a1'
try:
from django.apps import AppConfig # noqa
except ImportError:
config = SimpleLazyObject(Config)
else:
default_app_config = 'constance.apps.ConstanceConfig'
|
55983918f76066662496a82d321ac482c1668492
|
profile_bs_xf03id/startup/50-scans.py
|
profile_bs_xf03id/startup/50-scans.py
|
# vim: sw=4 ts=4 sts expandtab smarttab
# HXN step-scan configuration
import hxntools.scans
from bluesky.global_state import get_gs
gs = get_gs()
hxntools.scans.setup()
ct = hxntools.scans.count
ascan = hxntools.scans.absolute_scan
dscan = hxntools.scans.relative_scan
fermat = hxntools.scans.relative_fermat
spiral = hxntools.scans.relative_spiral
mesh = hxntools.scans.absolute_mesh
dmesh = hxntools.scans.relative_mesh
gs.DETS = [zebra, sclr1, merlin1, smll, lakeshore2, xbpm, dcm, s1]
gs.TABLE_COLS = ['sclr2_ch2','sclr2_ch3', 'sclr2_ch4', 'ssx', 'ssy', 'ssz',
't_base', 't_sample', 't_vlens', 't_hlens']
# Plot this by default versus motor position:
gs.PLOT_Y = 'Det2_V'
|
# vim: sw=4 ts=4 sts expandtab smarttab
# HXN step-scan configuration
import hxntools.scans
from bluesky.global_state import get_gs
gs = get_gs()
hxntools.scans.setup()
ct = hxntools.scans.count
ascan = hxntools.scans.absolute_scan
dscan = hxntools.scans.relative_scan
fermat = hxntools.scans.relative_fermat
spiral = hxntools.scans.relative_spiral
mesh = hxntools.scans.absolute_mesh
dmesh = hxntools.scans.relative_mesh
gs.DETS = [zebra, sclr1, merlin1, xspress3, smll, lakeshore2, xbpm, dcm, s1]
gs.TABLE_COLS = ['sclr2_ch2','sclr2_ch3', 'sclr2_ch4', 'ssx', 'ssy', 'ssz',
't_base', 't_sample', 't_vlens', 't_hlens']
# Plot this by default versus motor position:
gs.PLOT_Y = 'Det2_V'
|
Add xspress3 to step-scan detector list
|
Add xspress3 to step-scan detector list
|
Python
|
bsd-2-clause
|
NSLS-II-HXN/ipython_ophyd,NSLS-II-HXN/ipython_ophyd
|
# vim: sw=4 ts=4 sts expandtab smarttab
# HXN step-scan configuration
import hxntools.scans
from bluesky.global_state import get_gs
gs = get_gs()
hxntools.scans.setup()
ct = hxntools.scans.count
ascan = hxntools.scans.absolute_scan
dscan = hxntools.scans.relative_scan
fermat = hxntools.scans.relative_fermat
spiral = hxntools.scans.relative_spiral
mesh = hxntools.scans.absolute_mesh
dmesh = hxntools.scans.relative_mesh
gs.DETS = [zebra, sclr1, merlin1, smll, lakeshore2, xbpm, dcm, s1]
gs.TABLE_COLS = ['sclr2_ch2','sclr2_ch3', 'sclr2_ch4', 'ssx', 'ssy', 'ssz',
't_base', 't_sample', 't_vlens', 't_hlens']
# Plot this by default versus motor position:
gs.PLOT_Y = 'Det2_V'
Add xspress3 to step-scan detector list
|
# vim: sw=4 ts=4 sts expandtab smarttab
# HXN step-scan configuration
import hxntools.scans
from bluesky.global_state import get_gs
gs = get_gs()
hxntools.scans.setup()
ct = hxntools.scans.count
ascan = hxntools.scans.absolute_scan
dscan = hxntools.scans.relative_scan
fermat = hxntools.scans.relative_fermat
spiral = hxntools.scans.relative_spiral
mesh = hxntools.scans.absolute_mesh
dmesh = hxntools.scans.relative_mesh
gs.DETS = [zebra, sclr1, merlin1, xspress3, smll, lakeshore2, xbpm, dcm, s1]
gs.TABLE_COLS = ['sclr2_ch2','sclr2_ch3', 'sclr2_ch4', 'ssx', 'ssy', 'ssz',
't_base', 't_sample', 't_vlens', 't_hlens']
# Plot this by default versus motor position:
gs.PLOT_Y = 'Det2_V'
|
<commit_before># vim: sw=4 ts=4 sts expandtab smarttab
# HXN step-scan configuration
import hxntools.scans
from bluesky.global_state import get_gs
gs = get_gs()
hxntools.scans.setup()
ct = hxntools.scans.count
ascan = hxntools.scans.absolute_scan
dscan = hxntools.scans.relative_scan
fermat = hxntools.scans.relative_fermat
spiral = hxntools.scans.relative_spiral
mesh = hxntools.scans.absolute_mesh
dmesh = hxntools.scans.relative_mesh
gs.DETS = [zebra, sclr1, merlin1, smll, lakeshore2, xbpm, dcm, s1]
gs.TABLE_COLS = ['sclr2_ch2','sclr2_ch3', 'sclr2_ch4', 'ssx', 'ssy', 'ssz',
't_base', 't_sample', 't_vlens', 't_hlens']
# Plot this by default versus motor position:
gs.PLOT_Y = 'Det2_V'
<commit_msg>Add xspress3 to step-scan detector list<commit_after>
|
# vim: sw=4 ts=4 sts expandtab smarttab
# HXN step-scan configuration
import hxntools.scans
from bluesky.global_state import get_gs
gs = get_gs()
hxntools.scans.setup()
ct = hxntools.scans.count
ascan = hxntools.scans.absolute_scan
dscan = hxntools.scans.relative_scan
fermat = hxntools.scans.relative_fermat
spiral = hxntools.scans.relative_spiral
mesh = hxntools.scans.absolute_mesh
dmesh = hxntools.scans.relative_mesh
gs.DETS = [zebra, sclr1, merlin1, xspress3, smll, lakeshore2, xbpm, dcm, s1]
gs.TABLE_COLS = ['sclr2_ch2','sclr2_ch3', 'sclr2_ch4', 'ssx', 'ssy', 'ssz',
't_base', 't_sample', 't_vlens', 't_hlens']
# Plot this by default versus motor position:
gs.PLOT_Y = 'Det2_V'
|
# vim: sw=4 ts=4 sts expandtab smarttab
# HXN step-scan configuration
import hxntools.scans
from bluesky.global_state import get_gs
gs = get_gs()
hxntools.scans.setup()
ct = hxntools.scans.count
ascan = hxntools.scans.absolute_scan
dscan = hxntools.scans.relative_scan
fermat = hxntools.scans.relative_fermat
spiral = hxntools.scans.relative_spiral
mesh = hxntools.scans.absolute_mesh
dmesh = hxntools.scans.relative_mesh
gs.DETS = [zebra, sclr1, merlin1, smll, lakeshore2, xbpm, dcm, s1]
gs.TABLE_COLS = ['sclr2_ch2','sclr2_ch3', 'sclr2_ch4', 'ssx', 'ssy', 'ssz',
't_base', 't_sample', 't_vlens', 't_hlens']
# Plot this by default versus motor position:
gs.PLOT_Y = 'Det2_V'
Add xspress3 to step-scan detector list# vim: sw=4 ts=4 sts expandtab smarttab
# HXN step-scan configuration
import hxntools.scans
from bluesky.global_state import get_gs
gs = get_gs()
hxntools.scans.setup()
ct = hxntools.scans.count
ascan = hxntools.scans.absolute_scan
dscan = hxntools.scans.relative_scan
fermat = hxntools.scans.relative_fermat
spiral = hxntools.scans.relative_spiral
mesh = hxntools.scans.absolute_mesh
dmesh = hxntools.scans.relative_mesh
gs.DETS = [zebra, sclr1, merlin1, xspress3, smll, lakeshore2, xbpm, dcm, s1]
gs.TABLE_COLS = ['sclr2_ch2','sclr2_ch3', 'sclr2_ch4', 'ssx', 'ssy', 'ssz',
't_base', 't_sample', 't_vlens', 't_hlens']
# Plot this by default versus motor position:
gs.PLOT_Y = 'Det2_V'
|
<commit_before># vim: sw=4 ts=4 sts expandtab smarttab
# HXN step-scan configuration
import hxntools.scans
from bluesky.global_state import get_gs
gs = get_gs()
hxntools.scans.setup()
ct = hxntools.scans.count
ascan = hxntools.scans.absolute_scan
dscan = hxntools.scans.relative_scan
fermat = hxntools.scans.relative_fermat
spiral = hxntools.scans.relative_spiral
mesh = hxntools.scans.absolute_mesh
dmesh = hxntools.scans.relative_mesh
gs.DETS = [zebra, sclr1, merlin1, smll, lakeshore2, xbpm, dcm, s1]
gs.TABLE_COLS = ['sclr2_ch2','sclr2_ch3', 'sclr2_ch4', 'ssx', 'ssy', 'ssz',
't_base', 't_sample', 't_vlens', 't_hlens']
# Plot this by default versus motor position:
gs.PLOT_Y = 'Det2_V'
<commit_msg>Add xspress3 to step-scan detector list<commit_after># vim: sw=4 ts=4 sts expandtab smarttab
# HXN step-scan configuration
import hxntools.scans
from bluesky.global_state import get_gs
gs = get_gs()
hxntools.scans.setup()
ct = hxntools.scans.count
ascan = hxntools.scans.absolute_scan
dscan = hxntools.scans.relative_scan
fermat = hxntools.scans.relative_fermat
spiral = hxntools.scans.relative_spiral
mesh = hxntools.scans.absolute_mesh
dmesh = hxntools.scans.relative_mesh
gs.DETS = [zebra, sclr1, merlin1, xspress3, smll, lakeshore2, xbpm, dcm, s1]
gs.TABLE_COLS = ['sclr2_ch2','sclr2_ch3', 'sclr2_ch4', 'ssx', 'ssy', 'ssz',
't_base', 't_sample', 't_vlens', 't_hlens']
# Plot this by default versus motor position:
gs.PLOT_Y = 'Det2_V'
|
1cb79216f992ea0f31abb28031a74f6e703582cb
|
YouKnowShit/DownloadPic.py
|
YouKnowShit/DownloadPic.py
|
import requests
import bs4
import os
import urllib.request
import shutil
import re
base_url = 'http://www.j8vlib.com/cn/vl_searchbyid.php?keyword='
srcDir = 'F:\\utorrent\\WEST'
filterWord = "video_jacket_img"
filenames = os.listdir(srcDir)
for filename in filenames:
preFileName = filename.split(".")[0]
if (preFileName[-1] == "A" or preFileName[-1] == "B" or preFileName[-1] == "C"):
preFileName = preFileName[0:len(preFileName) - 1]
destPicName = srcDir + os.sep + preFileName + '.jpg'
if (os.path.isfile(destPicName)):
print(destPicName + ' already here.\n')
else:
full_url = base_url + preFileName
response = requests.get(full_url)
soup = bs4.BeautifulSoup(response.text, "html.parser")
try:
imgsrc = soup.find(id = filterWord)['src']
print(preFileName + "\n" + imgsrc)
print(destPicName + "\n")
if not (os.path.isfile(destPicName)):
urllib.request.urlretrieve(imgsrc, destPicName)
except:
print('Can not find picture of ' + filename + '\n')
|
import requests
import bs4
import os
import urllib.request
import shutil
import re
base_url = 'http://www.jav11b.com/cn/vl_searchbyid.php?keyword='
srcDir = 'H:\\temp'
filterWord = "video_jacket_img"
filenames = os.listdir(srcDir)
for filename in filenames:
preFileName = filename.split(".")[0]
if (preFileName[-1] == "A" or preFileName[-1] == "B" or preFileName[-1] == "C"):
preFileName = preFileName[0:len(preFileName) - 1]
destPicName = srcDir + os.sep + preFileName + '.jpg'
if (os.path.isfile(destPicName)):
print(destPicName + ' already here.\n')
else:
full_url = base_url + preFileName
response = requests.get(full_url)
soup = bs4.BeautifulSoup(response.text, "html.parser")
try:
imgsrc = soup.find(id = filterWord)['src']
print(preFileName + "\n" + imgsrc)
print(destPicName + "\n")
if not (os.path.isfile(destPicName)):
urllib.request.urlretrieve(imgsrc, destPicName)
except:
print('Can not find picture of ' + filename + '\n')
|
Update the pic download base url.
|
Update the pic download base url.
|
Python
|
mit
|
jiangtianyu2009/PiSoftCake
|
import requests
import bs4
import os
import urllib.request
import shutil
import re
base_url = 'http://www.j8vlib.com/cn/vl_searchbyid.php?keyword='
srcDir = 'F:\\utorrent\\WEST'
filterWord = "video_jacket_img"
filenames = os.listdir(srcDir)
for filename in filenames:
preFileName = filename.split(".")[0]
if (preFileName[-1] == "A" or preFileName[-1] == "B" or preFileName[-1] == "C"):
preFileName = preFileName[0:len(preFileName) - 1]
destPicName = srcDir + os.sep + preFileName + '.jpg'
if (os.path.isfile(destPicName)):
print(destPicName + ' already here.\n')
else:
full_url = base_url + preFileName
response = requests.get(full_url)
soup = bs4.BeautifulSoup(response.text, "html.parser")
try:
imgsrc = soup.find(id = filterWord)['src']
print(preFileName + "\n" + imgsrc)
print(destPicName + "\n")
if not (os.path.isfile(destPicName)):
urllib.request.urlretrieve(imgsrc, destPicName)
except:
print('Can not find picture of ' + filename + '\n')Update the pic download base url.
|
import requests
import bs4
import os
import urllib.request
import shutil
import re
base_url = 'http://www.jav11b.com/cn/vl_searchbyid.php?keyword='
srcDir = 'H:\\temp'
filterWord = "video_jacket_img"
filenames = os.listdir(srcDir)
for filename in filenames:
preFileName = filename.split(".")[0]
if (preFileName[-1] == "A" or preFileName[-1] == "B" or preFileName[-1] == "C"):
preFileName = preFileName[0:len(preFileName) - 1]
destPicName = srcDir + os.sep + preFileName + '.jpg'
if (os.path.isfile(destPicName)):
print(destPicName + ' already here.\n')
else:
full_url = base_url + preFileName
response = requests.get(full_url)
soup = bs4.BeautifulSoup(response.text, "html.parser")
try:
imgsrc = soup.find(id = filterWord)['src']
print(preFileName + "\n" + imgsrc)
print(destPicName + "\n")
if not (os.path.isfile(destPicName)):
urllib.request.urlretrieve(imgsrc, destPicName)
except:
print('Can not find picture of ' + filename + '\n')
|
<commit_before>import requests
import bs4
import os
import urllib.request
import shutil
import re
base_url = 'http://www.j8vlib.com/cn/vl_searchbyid.php?keyword='
srcDir = 'F:\\utorrent\\WEST'
filterWord = "video_jacket_img"
filenames = os.listdir(srcDir)
for filename in filenames:
preFileName = filename.split(".")[0]
if (preFileName[-1] == "A" or preFileName[-1] == "B" or preFileName[-1] == "C"):
preFileName = preFileName[0:len(preFileName) - 1]
destPicName = srcDir + os.sep + preFileName + '.jpg'
if (os.path.isfile(destPicName)):
print(destPicName + ' already here.\n')
else:
full_url = base_url + preFileName
response = requests.get(full_url)
soup = bs4.BeautifulSoup(response.text, "html.parser")
try:
imgsrc = soup.find(id = filterWord)['src']
print(preFileName + "\n" + imgsrc)
print(destPicName + "\n")
if not (os.path.isfile(destPicName)):
urllib.request.urlretrieve(imgsrc, destPicName)
except:
print('Can not find picture of ' + filename + '\n')<commit_msg>Update the pic download base url.<commit_after>
|
import requests
import bs4
import os
import urllib.request
import shutil
import re
base_url = 'http://www.jav11b.com/cn/vl_searchbyid.php?keyword='
srcDir = 'H:\\temp'
filterWord = "video_jacket_img"
filenames = os.listdir(srcDir)
for filename in filenames:
preFileName = filename.split(".")[0]
if (preFileName[-1] == "A" or preFileName[-1] == "B" or preFileName[-1] == "C"):
preFileName = preFileName[0:len(preFileName) - 1]
destPicName = srcDir + os.sep + preFileName + '.jpg'
if (os.path.isfile(destPicName)):
print(destPicName + ' already here.\n')
else:
full_url = base_url + preFileName
response = requests.get(full_url)
soup = bs4.BeautifulSoup(response.text, "html.parser")
try:
imgsrc = soup.find(id = filterWord)['src']
print(preFileName + "\n" + imgsrc)
print(destPicName + "\n")
if not (os.path.isfile(destPicName)):
urllib.request.urlretrieve(imgsrc, destPicName)
except:
print('Can not find picture of ' + filename + '\n')
|
import requests
import bs4
import os
import urllib.request
import shutil
import re
base_url = 'http://www.j8vlib.com/cn/vl_searchbyid.php?keyword='
srcDir = 'F:\\utorrent\\WEST'
filterWord = "video_jacket_img"
filenames = os.listdir(srcDir)
for filename in filenames:
preFileName = filename.split(".")[0]
if (preFileName[-1] == "A" or preFileName[-1] == "B" or preFileName[-1] == "C"):
preFileName = preFileName[0:len(preFileName) - 1]
destPicName = srcDir + os.sep + preFileName + '.jpg'
if (os.path.isfile(destPicName)):
print(destPicName + ' already here.\n')
else:
full_url = base_url + preFileName
response = requests.get(full_url)
soup = bs4.BeautifulSoup(response.text, "html.parser")
try:
imgsrc = soup.find(id = filterWord)['src']
print(preFileName + "\n" + imgsrc)
print(destPicName + "\n")
if not (os.path.isfile(destPicName)):
urllib.request.urlretrieve(imgsrc, destPicName)
except:
print('Can not find picture of ' + filename + '\n')Update the pic download base url.import requests
import bs4
import os
import urllib.request
import shutil
import re
base_url = 'http://www.jav11b.com/cn/vl_searchbyid.php?keyword='
srcDir = 'H:\\temp'
filterWord = "video_jacket_img"
filenames = os.listdir(srcDir)
for filename in filenames:
preFileName = filename.split(".")[0]
if (preFileName[-1] == "A" or preFileName[-1] == "B" or preFileName[-1] == "C"):
preFileName = preFileName[0:len(preFileName) - 1]
destPicName = srcDir + os.sep + preFileName + '.jpg'
if (os.path.isfile(destPicName)):
print(destPicName + ' already here.\n')
else:
full_url = base_url + preFileName
response = requests.get(full_url)
soup = bs4.BeautifulSoup(response.text, "html.parser")
try:
imgsrc = soup.find(id = filterWord)['src']
print(preFileName + "\n" + imgsrc)
print(destPicName + "\n")
if not (os.path.isfile(destPicName)):
urllib.request.urlretrieve(imgsrc, destPicName)
except:
print('Can not find picture of ' + filename + '\n')
|
<commit_before>import requests
import bs4
import os
import urllib.request
import shutil
import re
base_url = 'http://www.j8vlib.com/cn/vl_searchbyid.php?keyword='
srcDir = 'F:\\utorrent\\WEST'
filterWord = "video_jacket_img"
filenames = os.listdir(srcDir)
for filename in filenames:
preFileName = filename.split(".")[0]
if (preFileName[-1] == "A" or preFileName[-1] == "B" or preFileName[-1] == "C"):
preFileName = preFileName[0:len(preFileName) - 1]
destPicName = srcDir + os.sep + preFileName + '.jpg'
if (os.path.isfile(destPicName)):
print(destPicName + ' already here.\n')
else:
full_url = base_url + preFileName
response = requests.get(full_url)
soup = bs4.BeautifulSoup(response.text, "html.parser")
try:
imgsrc = soup.find(id = filterWord)['src']
print(preFileName + "\n" + imgsrc)
print(destPicName + "\n")
if not (os.path.isfile(destPicName)):
urllib.request.urlretrieve(imgsrc, destPicName)
except:
print('Can not find picture of ' + filename + '\n')<commit_msg>Update the pic download base url.<commit_after>import requests
import bs4
import os
import urllib.request
import shutil
import re
base_url = 'http://www.jav11b.com/cn/vl_searchbyid.php?keyword='
srcDir = 'H:\\temp'
filterWord = "video_jacket_img"
filenames = os.listdir(srcDir)
for filename in filenames:
preFileName = filename.split(".")[0]
if (preFileName[-1] == "A" or preFileName[-1] == "B" or preFileName[-1] == "C"):
preFileName = preFileName[0:len(preFileName) - 1]
destPicName = srcDir + os.sep + preFileName + '.jpg'
if (os.path.isfile(destPicName)):
print(destPicName + ' already here.\n')
else:
full_url = base_url + preFileName
response = requests.get(full_url)
soup = bs4.BeautifulSoup(response.text, "html.parser")
try:
imgsrc = soup.find(id = filterWord)['src']
print(preFileName + "\n" + imgsrc)
print(destPicName + "\n")
if not (os.path.isfile(destPicName)):
urllib.request.urlretrieve(imgsrc, destPicName)
except:
print('Can not find picture of ' + filename + '\n')
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.