commit
stringlengths 40
40
| old_file
stringlengths 4
118
| new_file
stringlengths 4
118
| old_contents
stringlengths 0
2.94k
| new_contents
stringlengths 1
4.43k
| subject
stringlengths 15
444
| message
stringlengths 16
3.45k
| lang
stringclasses 1
value | license
stringclasses 13
values | repos
stringlengths 5
43.2k
| prompt
stringlengths 17
4.58k
| response
stringlengths 1
4.43k
| prompt_tagged
stringlengths 58
4.62k
| response_tagged
stringlengths 1
4.43k
| text
stringlengths 132
7.29k
| text_tagged
stringlengths 173
7.33k
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
ec35b758d76bd2501f407819451306a20800f874
|
setup.py
|
setup.py
|
from setuptools import setup, find_packages
setup(
name="cybox",
version="1.0b1",
author="CybOX Project, MITRE Corporation",
author_email="cybox@mitre.org",
description="An API for parsing and generating CybOX content.",
url="http://cybox.mitre.org",
packages=find_packages(),
install_requires=['lxml>=2.3', 'python-dateutil'],
classifiers=[
"Programming Language :: Python",
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"License :: OSI Approved :: BSD License",
"Operating System :: OS Independent",
]
)
|
from setuptools import setup, find_packages
setup(
name="cybox",
version="1.0.0b1",
author="CybOX Project, MITRE Corporation",
author_email="cybox@mitre.org",
description="A Python library for parsing and generating CybOX content.",
url="http://cybox.mitre.org",
packages=find_packages(),
install_requires=['lxml>=2.3', 'python-dateutil'],
classifiers=[
"Programming Language :: Python",
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"License :: OSI Approved :: BSD License",
"Operating System :: OS Independent",
]
)
|
Prepare to release on PyPI
|
Prepare to release on PyPI
[ci skip]
|
Python
|
bsd-3-clause
|
CybOXProject/python-cybox
|
from setuptools import setup, find_packages
setup(
name="cybox",
version="1.0b1",
author="CybOX Project, MITRE Corporation",
author_email="cybox@mitre.org",
description="An API for parsing and generating CybOX content.",
url="http://cybox.mitre.org",
packages=find_packages(),
install_requires=['lxml>=2.3', 'python-dateutil'],
classifiers=[
"Programming Language :: Python",
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"License :: OSI Approved :: BSD License",
"Operating System :: OS Independent",
]
)
Prepare to release on PyPI
[ci skip]
|
from setuptools import setup, find_packages
setup(
name="cybox",
version="1.0.0b1",
author="CybOX Project, MITRE Corporation",
author_email="cybox@mitre.org",
description="A Python library for parsing and generating CybOX content.",
url="http://cybox.mitre.org",
packages=find_packages(),
install_requires=['lxml>=2.3', 'python-dateutil'],
classifiers=[
"Programming Language :: Python",
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"License :: OSI Approved :: BSD License",
"Operating System :: OS Independent",
]
)
|
<commit_before>from setuptools import setup, find_packages
setup(
name="cybox",
version="1.0b1",
author="CybOX Project, MITRE Corporation",
author_email="cybox@mitre.org",
description="An API for parsing and generating CybOX content.",
url="http://cybox.mitre.org",
packages=find_packages(),
install_requires=['lxml>=2.3', 'python-dateutil'],
classifiers=[
"Programming Language :: Python",
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"License :: OSI Approved :: BSD License",
"Operating System :: OS Independent",
]
)
<commit_msg>Prepare to release on PyPI
[ci skip]<commit_after>
|
from setuptools import setup, find_packages
setup(
name="cybox",
version="1.0.0b1",
author="CybOX Project, MITRE Corporation",
author_email="cybox@mitre.org",
description="A Python library for parsing and generating CybOX content.",
url="http://cybox.mitre.org",
packages=find_packages(),
install_requires=['lxml>=2.3', 'python-dateutil'],
classifiers=[
"Programming Language :: Python",
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"License :: OSI Approved :: BSD License",
"Operating System :: OS Independent",
]
)
|
from setuptools import setup, find_packages
setup(
name="cybox",
version="1.0b1",
author="CybOX Project, MITRE Corporation",
author_email="cybox@mitre.org",
description="An API for parsing and generating CybOX content.",
url="http://cybox.mitre.org",
packages=find_packages(),
install_requires=['lxml>=2.3', 'python-dateutil'],
classifiers=[
"Programming Language :: Python",
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"License :: OSI Approved :: BSD License",
"Operating System :: OS Independent",
]
)
Prepare to release on PyPI
[ci skip]from setuptools import setup, find_packages
setup(
name="cybox",
version="1.0.0b1",
author="CybOX Project, MITRE Corporation",
author_email="cybox@mitre.org",
description="A Python library for parsing and generating CybOX content.",
url="http://cybox.mitre.org",
packages=find_packages(),
install_requires=['lxml>=2.3', 'python-dateutil'],
classifiers=[
"Programming Language :: Python",
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"License :: OSI Approved :: BSD License",
"Operating System :: OS Independent",
]
)
|
<commit_before>from setuptools import setup, find_packages
setup(
name="cybox",
version="1.0b1",
author="CybOX Project, MITRE Corporation",
author_email="cybox@mitre.org",
description="An API for parsing and generating CybOX content.",
url="http://cybox.mitre.org",
packages=find_packages(),
install_requires=['lxml>=2.3', 'python-dateutil'],
classifiers=[
"Programming Language :: Python",
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"License :: OSI Approved :: BSD License",
"Operating System :: OS Independent",
]
)
<commit_msg>Prepare to release on PyPI
[ci skip]<commit_after>from setuptools import setup, find_packages
setup(
name="cybox",
version="1.0.0b1",
author="CybOX Project, MITRE Corporation",
author_email="cybox@mitre.org",
description="A Python library for parsing and generating CybOX content.",
url="http://cybox.mitre.org",
packages=find_packages(),
install_requires=['lxml>=2.3', 'python-dateutil'],
classifiers=[
"Programming Language :: Python",
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"License :: OSI Approved :: BSD License",
"Operating System :: OS Independent",
]
)
|
9382715f276e91e9ad08de1aef1c1be5cc434359
|
setup.py
|
setup.py
|
from setuptools import setup, find_packages
setup(
name="taxii-client",
version="0.0.2",
url="https://github.com/Intelworks/taxii-client/",
author="Intelworks",
author_email="development@intelworks.com",
packages=find_packages(),
scripts=[
'bin/taxii-collections',
'bin/taxii-discovery',
'bin/taxii-poll',
'bin/taxii-push',
],
install_requires=[
'libtaxii==1.1.105-SNAPSHOT',
'pytz',
'colorlog',
],
)
|
import os
from setuptools import setup, find_packages
def here(*path):
return os.path.join(os.path.dirname(__file__), *path)
with open(here('README.rst')) as fp:
long_description = fp.read()
setup(
name="taxii-client",
description="Client for interacting with TAXII servers",
long_description=long_description,
version="0.0.2",
url="https://github.com/Intelworks/taxii-client/",
author="Intelworks",
author_email="development@intelworks.com",
packages=find_packages(),
scripts=[
'bin/taxii-collections',
'bin/taxii-discovery',
'bin/taxii-poll',
'bin/taxii-push',
],
install_requires=[
'libtaxii==1.1.105-SNAPSHOT',
'pytz',
'colorlog',
],
)
|
Include description and long description (README) in package metadata
|
Include description and long description (README) in package metadata
|
Python
|
bsd-3-clause
|
Intelworks/cabby
|
from setuptools import setup, find_packages
setup(
name="taxii-client",
version="0.0.2",
url="https://github.com/Intelworks/taxii-client/",
author="Intelworks",
author_email="development@intelworks.com",
packages=find_packages(),
scripts=[
'bin/taxii-collections',
'bin/taxii-discovery',
'bin/taxii-poll',
'bin/taxii-push',
],
install_requires=[
'libtaxii==1.1.105-SNAPSHOT',
'pytz',
'colorlog',
],
)
Include description and long description (README) in package metadata
|
import os
from setuptools import setup, find_packages
def here(*path):
return os.path.join(os.path.dirname(__file__), *path)
with open(here('README.rst')) as fp:
long_description = fp.read()
setup(
name="taxii-client",
description="Client for interacting with TAXII servers",
long_description=long_description,
version="0.0.2",
url="https://github.com/Intelworks/taxii-client/",
author="Intelworks",
author_email="development@intelworks.com",
packages=find_packages(),
scripts=[
'bin/taxii-collections',
'bin/taxii-discovery',
'bin/taxii-poll',
'bin/taxii-push',
],
install_requires=[
'libtaxii==1.1.105-SNAPSHOT',
'pytz',
'colorlog',
],
)
|
<commit_before>from setuptools import setup, find_packages
setup(
name="taxii-client",
version="0.0.2",
url="https://github.com/Intelworks/taxii-client/",
author="Intelworks",
author_email="development@intelworks.com",
packages=find_packages(),
scripts=[
'bin/taxii-collections',
'bin/taxii-discovery',
'bin/taxii-poll',
'bin/taxii-push',
],
install_requires=[
'libtaxii==1.1.105-SNAPSHOT',
'pytz',
'colorlog',
],
)
<commit_msg>Include description and long description (README) in package metadata<commit_after>
|
import os
from setuptools import setup, find_packages
def here(*path):
return os.path.join(os.path.dirname(__file__), *path)
with open(here('README.rst')) as fp:
long_description = fp.read()
setup(
name="taxii-client",
description="Client for interacting with TAXII servers",
long_description=long_description,
version="0.0.2",
url="https://github.com/Intelworks/taxii-client/",
author="Intelworks",
author_email="development@intelworks.com",
packages=find_packages(),
scripts=[
'bin/taxii-collections',
'bin/taxii-discovery',
'bin/taxii-poll',
'bin/taxii-push',
],
install_requires=[
'libtaxii==1.1.105-SNAPSHOT',
'pytz',
'colorlog',
],
)
|
from setuptools import setup, find_packages
setup(
name="taxii-client",
version="0.0.2",
url="https://github.com/Intelworks/taxii-client/",
author="Intelworks",
author_email="development@intelworks.com",
packages=find_packages(),
scripts=[
'bin/taxii-collections',
'bin/taxii-discovery',
'bin/taxii-poll',
'bin/taxii-push',
],
install_requires=[
'libtaxii==1.1.105-SNAPSHOT',
'pytz',
'colorlog',
],
)
Include description and long description (README) in package metadataimport os
from setuptools import setup, find_packages
def here(*path):
return os.path.join(os.path.dirname(__file__), *path)
with open(here('README.rst')) as fp:
long_description = fp.read()
setup(
name="taxii-client",
description="Client for interacting with TAXII servers",
long_description=long_description,
version="0.0.2",
url="https://github.com/Intelworks/taxii-client/",
author="Intelworks",
author_email="development@intelworks.com",
packages=find_packages(),
scripts=[
'bin/taxii-collections',
'bin/taxii-discovery',
'bin/taxii-poll',
'bin/taxii-push',
],
install_requires=[
'libtaxii==1.1.105-SNAPSHOT',
'pytz',
'colorlog',
],
)
|
<commit_before>from setuptools import setup, find_packages
setup(
name="taxii-client",
version="0.0.2",
url="https://github.com/Intelworks/taxii-client/",
author="Intelworks",
author_email="development@intelworks.com",
packages=find_packages(),
scripts=[
'bin/taxii-collections',
'bin/taxii-discovery',
'bin/taxii-poll',
'bin/taxii-push',
],
install_requires=[
'libtaxii==1.1.105-SNAPSHOT',
'pytz',
'colorlog',
],
)
<commit_msg>Include description and long description (README) in package metadata<commit_after>import os
from setuptools import setup, find_packages
def here(*path):
return os.path.join(os.path.dirname(__file__), *path)
with open(here('README.rst')) as fp:
long_description = fp.read()
setup(
name="taxii-client",
description="Client for interacting with TAXII servers",
long_description=long_description,
version="0.0.2",
url="https://github.com/Intelworks/taxii-client/",
author="Intelworks",
author_email="development@intelworks.com",
packages=find_packages(),
scripts=[
'bin/taxii-collections',
'bin/taxii-discovery',
'bin/taxii-poll',
'bin/taxii-push',
],
install_requires=[
'libtaxii==1.1.105-SNAPSHOT',
'pytz',
'colorlog',
],
)
|
361fa65dbbec06bd7147a01329b0c783e69824be
|
setup.py
|
setup.py
|
from __future__ import print_function
import sys
from setuptools import setup
if sys.version_info < (3, 2):
print('ERROR: jump-consistent-hash requires Python version 3.2 or newer.',
file=sys.stderr)
sys.exit(1)
setup(name='jump_consistent_hash',
version='1.0.3',
description='Implementation of the Jump Consistent Hash algorithm',
author='Peter Renström',
license='MIT',
url='https://github.com/renstrom/python-jump-consistent-hash',
packages=['jump'],
test_suite='tests',
keywords=[
'jump hash',
'jumphash',
'consistent hash',
'hash algorithm',
'hash'
],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4'
])
|
from __future__ import print_function
import sys
from setuptools import setup
if sys.version_info < (3, 2):
print('ERROR: jump-consistent-hash requires Python version 3.2 or newer.',
file=sys.stderr)
sys.exit(1)
setup(name='jump_consistent_hash',
version='1.0.3',
description='Implementation of the Jump Consistent Hash algorithm',
author='Peter Renström',
license='MIT',
url='https://github.com/renstrom/python-jump-consistent-hash',
packages=['jump'],
test_suite='tests',
keywords=[
'jump hash',
'jumphash',
'jump consistent hash',
'consistent hash',
'hash algorithm',
'hash'
],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4'
])
|
Add 'jump consistent hash' keyword.
|
Add 'jump consistent hash' keyword.
|
Python
|
mit
|
renstrom/python-jump-consistent-hash,renstrom/python-jump-consistent-hash,renstrom/python-jump-consistent-hash
|
from __future__ import print_function
import sys
from setuptools import setup
if sys.version_info < (3, 2):
print('ERROR: jump-consistent-hash requires Python version 3.2 or newer.',
file=sys.stderr)
sys.exit(1)
setup(name='jump_consistent_hash',
version='1.0.3',
description='Implementation of the Jump Consistent Hash algorithm',
author='Peter Renström',
license='MIT',
url='https://github.com/renstrom/python-jump-consistent-hash',
packages=['jump'],
test_suite='tests',
keywords=[
'jump hash',
'jumphash',
'consistent hash',
'hash algorithm',
'hash'
],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4'
])
Add 'jump consistent hash' keyword.
|
from __future__ import print_function
import sys
from setuptools import setup
if sys.version_info < (3, 2):
print('ERROR: jump-consistent-hash requires Python version 3.2 or newer.',
file=sys.stderr)
sys.exit(1)
setup(name='jump_consistent_hash',
version='1.0.3',
description='Implementation of the Jump Consistent Hash algorithm',
author='Peter Renström',
license='MIT',
url='https://github.com/renstrom/python-jump-consistent-hash',
packages=['jump'],
test_suite='tests',
keywords=[
'jump hash',
'jumphash',
'jump consistent hash',
'consistent hash',
'hash algorithm',
'hash'
],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4'
])
|
<commit_before>from __future__ import print_function
import sys
from setuptools import setup
if sys.version_info < (3, 2):
print('ERROR: jump-consistent-hash requires Python version 3.2 or newer.',
file=sys.stderr)
sys.exit(1)
setup(name='jump_consistent_hash',
version='1.0.3',
description='Implementation of the Jump Consistent Hash algorithm',
author='Peter Renström',
license='MIT',
url='https://github.com/renstrom/python-jump-consistent-hash',
packages=['jump'],
test_suite='tests',
keywords=[
'jump hash',
'jumphash',
'consistent hash',
'hash algorithm',
'hash'
],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4'
])
<commit_msg>Add 'jump consistent hash' keyword.<commit_after>
|
from __future__ import print_function
import sys
from setuptools import setup
if sys.version_info < (3, 2):
print('ERROR: jump-consistent-hash requires Python version 3.2 or newer.',
file=sys.stderr)
sys.exit(1)
setup(name='jump_consistent_hash',
version='1.0.3',
description='Implementation of the Jump Consistent Hash algorithm',
author='Peter Renström',
license='MIT',
url='https://github.com/renstrom/python-jump-consistent-hash',
packages=['jump'],
test_suite='tests',
keywords=[
'jump hash',
'jumphash',
'jump consistent hash',
'consistent hash',
'hash algorithm',
'hash'
],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4'
])
|
from __future__ import print_function
import sys
from setuptools import setup
if sys.version_info < (3, 2):
print('ERROR: jump-consistent-hash requires Python version 3.2 or newer.',
file=sys.stderr)
sys.exit(1)
setup(name='jump_consistent_hash',
version='1.0.3',
description='Implementation of the Jump Consistent Hash algorithm',
author='Peter Renström',
license='MIT',
url='https://github.com/renstrom/python-jump-consistent-hash',
packages=['jump'],
test_suite='tests',
keywords=[
'jump hash',
'jumphash',
'consistent hash',
'hash algorithm',
'hash'
],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4'
])
Add 'jump consistent hash' keyword.from __future__ import print_function
import sys
from setuptools import setup
if sys.version_info < (3, 2):
print('ERROR: jump-consistent-hash requires Python version 3.2 or newer.',
file=sys.stderr)
sys.exit(1)
setup(name='jump_consistent_hash',
version='1.0.3',
description='Implementation of the Jump Consistent Hash algorithm',
author='Peter Renström',
license='MIT',
url='https://github.com/renstrom/python-jump-consistent-hash',
packages=['jump'],
test_suite='tests',
keywords=[
'jump hash',
'jumphash',
'jump consistent hash',
'consistent hash',
'hash algorithm',
'hash'
],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4'
])
|
<commit_before>from __future__ import print_function
import sys
from setuptools import setup
if sys.version_info < (3, 2):
print('ERROR: jump-consistent-hash requires Python version 3.2 or newer.',
file=sys.stderr)
sys.exit(1)
setup(name='jump_consistent_hash',
version='1.0.3',
description='Implementation of the Jump Consistent Hash algorithm',
author='Peter Renström',
license='MIT',
url='https://github.com/renstrom/python-jump-consistent-hash',
packages=['jump'],
test_suite='tests',
keywords=[
'jump hash',
'jumphash',
'consistent hash',
'hash algorithm',
'hash'
],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4'
])
<commit_msg>Add 'jump consistent hash' keyword.<commit_after>from __future__ import print_function
import sys
from setuptools import setup
if sys.version_info < (3, 2):
print('ERROR: jump-consistent-hash requires Python version 3.2 or newer.',
file=sys.stderr)
sys.exit(1)
setup(name='jump_consistent_hash',
version='1.0.3',
description='Implementation of the Jump Consistent Hash algorithm',
author='Peter Renström',
license='MIT',
url='https://github.com/renstrom/python-jump-consistent-hash',
packages=['jump'],
test_suite='tests',
keywords=[
'jump hash',
'jumphash',
'jump consistent hash',
'consistent hash',
'hash algorithm',
'hash'
],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4'
])
|
110340c911b84594358a68f7a5a8fb4e9cb16c51
|
setup.py
|
setup.py
|
#!/usr/bin/env python
from setuptools import setup, find_packages
required_modules = [
"simplejson",
"web.py",
]
readme = """
Web.py Helpers
"""
setup(
name="webpy_helpers",
version="0.0.2",
description="",
author="Christopher H. Casebeer",
author_email="",
url="https://github.com/casebeer/webpy_helpers",
py_modules=["webpy_helpers"],
install_requires=required_modules,
tests_require=["nose"],
test_suite="nose.collector",
long_description=readme,
classifiers=[
"License :: OSI Approved :: BSD License",
"Intended Audience :: Developers",
]
)
|
#!/usr/bin/env python
from setuptools import setup, find_packages
required_modules = [
"simplejson",
"web.py",
]
readme = """
Web.py Helpers
"""
setup(
name="webpy_helpers",
version="0.0.2",
description="",
author="Christopher H. Casebeer",
author_email="",
url="https://github.com/casebeer/webpy_helpers",
py_modules=["webpy_helpers"],
install_requires=required_modules,
tests_require=["nose", "paste"],
test_suite="nose.collector",
long_description=readme,
classifiers=[
"License :: OSI Approved :: BSD License",
"Intended Audience :: Developers",
]
)
|
Add paste requirement for testing (for web.py)
|
Add paste requirement for testing (for web.py)
|
Python
|
bsd-2-clause
|
casebeer/webpy_helpers
|
#!/usr/bin/env python
from setuptools import setup, find_packages
required_modules = [
"simplejson",
"web.py",
]
readme = """
Web.py Helpers
"""
setup(
name="webpy_helpers",
version="0.0.2",
description="",
author="Christopher H. Casebeer",
author_email="",
url="https://github.com/casebeer/webpy_helpers",
py_modules=["webpy_helpers"],
install_requires=required_modules,
tests_require=["nose"],
test_suite="nose.collector",
long_description=readme,
classifiers=[
"License :: OSI Approved :: BSD License",
"Intended Audience :: Developers",
]
)
Add paste requirement for testing (for web.py)
|
#!/usr/bin/env python
from setuptools import setup, find_packages
required_modules = [
"simplejson",
"web.py",
]
readme = """
Web.py Helpers
"""
setup(
name="webpy_helpers",
version="0.0.2",
description="",
author="Christopher H. Casebeer",
author_email="",
url="https://github.com/casebeer/webpy_helpers",
py_modules=["webpy_helpers"],
install_requires=required_modules,
tests_require=["nose", "paste"],
test_suite="nose.collector",
long_description=readme,
classifiers=[
"License :: OSI Approved :: BSD License",
"Intended Audience :: Developers",
]
)
|
<commit_before>#!/usr/bin/env python
from setuptools import setup, find_packages
required_modules = [
"simplejson",
"web.py",
]
readme = """
Web.py Helpers
"""
setup(
name="webpy_helpers",
version="0.0.2",
description="",
author="Christopher H. Casebeer",
author_email="",
url="https://github.com/casebeer/webpy_helpers",
py_modules=["webpy_helpers"],
install_requires=required_modules,
tests_require=["nose"],
test_suite="nose.collector",
long_description=readme,
classifiers=[
"License :: OSI Approved :: BSD License",
"Intended Audience :: Developers",
]
)
<commit_msg>Add paste requirement for testing (for web.py)<commit_after>
|
#!/usr/bin/env python
from setuptools import setup, find_packages
required_modules = [
"simplejson",
"web.py",
]
readme = """
Web.py Helpers
"""
setup(
name="webpy_helpers",
version="0.0.2",
description="",
author="Christopher H. Casebeer",
author_email="",
url="https://github.com/casebeer/webpy_helpers",
py_modules=["webpy_helpers"],
install_requires=required_modules,
tests_require=["nose", "paste"],
test_suite="nose.collector",
long_description=readme,
classifiers=[
"License :: OSI Approved :: BSD License",
"Intended Audience :: Developers",
]
)
|
#!/usr/bin/env python
from setuptools import setup, find_packages
required_modules = [
"simplejson",
"web.py",
]
readme = """
Web.py Helpers
"""
setup(
name="webpy_helpers",
version="0.0.2",
description="",
author="Christopher H. Casebeer",
author_email="",
url="https://github.com/casebeer/webpy_helpers",
py_modules=["webpy_helpers"],
install_requires=required_modules,
tests_require=["nose"],
test_suite="nose.collector",
long_description=readme,
classifiers=[
"License :: OSI Approved :: BSD License",
"Intended Audience :: Developers",
]
)
Add paste requirement for testing (for web.py)#!/usr/bin/env python
from setuptools import setup, find_packages
required_modules = [
"simplejson",
"web.py",
]
readme = """
Web.py Helpers
"""
setup(
name="webpy_helpers",
version="0.0.2",
description="",
author="Christopher H. Casebeer",
author_email="",
url="https://github.com/casebeer/webpy_helpers",
py_modules=["webpy_helpers"],
install_requires=required_modules,
tests_require=["nose", "paste"],
test_suite="nose.collector",
long_description=readme,
classifiers=[
"License :: OSI Approved :: BSD License",
"Intended Audience :: Developers",
]
)
|
<commit_before>#!/usr/bin/env python
from setuptools import setup, find_packages
required_modules = [
"simplejson",
"web.py",
]
readme = """
Web.py Helpers
"""
setup(
name="webpy_helpers",
version="0.0.2",
description="",
author="Christopher H. Casebeer",
author_email="",
url="https://github.com/casebeer/webpy_helpers",
py_modules=["webpy_helpers"],
install_requires=required_modules,
tests_require=["nose"],
test_suite="nose.collector",
long_description=readme,
classifiers=[
"License :: OSI Approved :: BSD License",
"Intended Audience :: Developers",
]
)
<commit_msg>Add paste requirement for testing (for web.py)<commit_after>#!/usr/bin/env python
from setuptools import setup, find_packages
required_modules = [
"simplejson",
"web.py",
]
readme = """
Web.py Helpers
"""
setup(
name="webpy_helpers",
version="0.0.2",
description="",
author="Christopher H. Casebeer",
author_email="",
url="https://github.com/casebeer/webpy_helpers",
py_modules=["webpy_helpers"],
install_requires=required_modules,
tests_require=["nose", "paste"],
test_suite="nose.collector",
long_description=readme,
classifiers=[
"License :: OSI Approved :: BSD License",
"Intended Audience :: Developers",
]
)
|
f47e7790c0b61f6191615a4e4a341bc0a172b388
|
setup.py
|
setup.py
|
# -*- coding: utf-8 -*-
import io
from setuptools import setup, find_packages
setup(
name='django-pipeline',
version='1.5.2',
description='Pipeline is an asset packaging library for Django.',
long_description=io.open('README.rst', encoding='utf-8').read() + '\n\n' +
io.open('HISTORY.rst', encoding='utf-8').read(),
author='Timothée Peignier',
author_email='timothee.peignier@tryphon.org',
url='https://github.com/cyberdelia/django-pipeline',
license='MIT',
packages=find_packages(exclude=['tests', 'tests.tests']),
zip_safe=False,
install_requires=[
'futures>=2.1.3',
],
include_package_data=True,
classifiers=[
'Environment :: Web Environment',
'Intended Audience :: Developers',
'Development Status :: 5 - Production/Stable',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Topic :: Utilities',
]
)
|
# -*- coding: utf-8 -*-
import io
from setuptools import setup, find_packages
install_requires = []
try:
from concurrent import futures
except ImportError:
futures = None
install_requires.append('futures>=2.1.3')
setup(
name='django-pipeline',
version='1.5.2',
description='Pipeline is an asset packaging library for Django.',
long_description=io.open('README.rst', encoding='utf-8').read() + '\n\n' +
io.open('HISTORY.rst', encoding='utf-8').read(),
author='Timothée Peignier',
author_email='timothee.peignier@tryphon.org',
url='https://github.com/cyberdelia/django-pipeline',
license='MIT',
packages=find_packages(exclude=['tests', 'tests.tests']),
zip_safe=False,
install_requires=install_requires,
include_package_data=True,
classifiers=[
'Environment :: Web Environment',
'Intended Audience :: Developers',
'Development Status :: 5 - Production/Stable',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Topic :: Utilities',
]
)
|
Remove useless requirement on Python 3.2+
|
Remove useless requirement on Python 3.2+
|
Python
|
mit
|
d9pouces/django-pipeline,d9pouces/django-pipeline,d9pouces/django-pipeline
|
# -*- coding: utf-8 -*-
import io
from setuptools import setup, find_packages
setup(
name='django-pipeline',
version='1.5.2',
description='Pipeline is an asset packaging library for Django.',
long_description=io.open('README.rst', encoding='utf-8').read() + '\n\n' +
io.open('HISTORY.rst', encoding='utf-8').read(),
author='Timothée Peignier',
author_email='timothee.peignier@tryphon.org',
url='https://github.com/cyberdelia/django-pipeline',
license='MIT',
packages=find_packages(exclude=['tests', 'tests.tests']),
zip_safe=False,
install_requires=[
'futures>=2.1.3',
],
include_package_data=True,
classifiers=[
'Environment :: Web Environment',
'Intended Audience :: Developers',
'Development Status :: 5 - Production/Stable',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Topic :: Utilities',
]
)
Remove useless requirement on Python 3.2+
|
# -*- coding: utf-8 -*-
import io
from setuptools import setup, find_packages
install_requires = []
try:
from concurrent import futures
except ImportError:
futures = None
install_requires.append('futures>=2.1.3')
setup(
name='django-pipeline',
version='1.5.2',
description='Pipeline is an asset packaging library for Django.',
long_description=io.open('README.rst', encoding='utf-8').read() + '\n\n' +
io.open('HISTORY.rst', encoding='utf-8').read(),
author='Timothée Peignier',
author_email='timothee.peignier@tryphon.org',
url='https://github.com/cyberdelia/django-pipeline',
license='MIT',
packages=find_packages(exclude=['tests', 'tests.tests']),
zip_safe=False,
install_requires=install_requires,
include_package_data=True,
classifiers=[
'Environment :: Web Environment',
'Intended Audience :: Developers',
'Development Status :: 5 - Production/Stable',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Topic :: Utilities',
]
)
|
<commit_before># -*- coding: utf-8 -*-
import io
from setuptools import setup, find_packages
setup(
name='django-pipeline',
version='1.5.2',
description='Pipeline is an asset packaging library for Django.',
long_description=io.open('README.rst', encoding='utf-8').read() + '\n\n' +
io.open('HISTORY.rst', encoding='utf-8').read(),
author='Timothée Peignier',
author_email='timothee.peignier@tryphon.org',
url='https://github.com/cyberdelia/django-pipeline',
license='MIT',
packages=find_packages(exclude=['tests', 'tests.tests']),
zip_safe=False,
install_requires=[
'futures>=2.1.3',
],
include_package_data=True,
classifiers=[
'Environment :: Web Environment',
'Intended Audience :: Developers',
'Development Status :: 5 - Production/Stable',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Topic :: Utilities',
]
)
<commit_msg>Remove useless requirement on Python 3.2+<commit_after>
|
# -*- coding: utf-8 -*-
import io
from setuptools import setup, find_packages
install_requires = []
try:
from concurrent import futures
except ImportError:
futures = None
install_requires.append('futures>=2.1.3')
setup(
name='django-pipeline',
version='1.5.2',
description='Pipeline is an asset packaging library for Django.',
long_description=io.open('README.rst', encoding='utf-8').read() + '\n\n' +
io.open('HISTORY.rst', encoding='utf-8').read(),
author='Timothée Peignier',
author_email='timothee.peignier@tryphon.org',
url='https://github.com/cyberdelia/django-pipeline',
license='MIT',
packages=find_packages(exclude=['tests', 'tests.tests']),
zip_safe=False,
install_requires=install_requires,
include_package_data=True,
classifiers=[
'Environment :: Web Environment',
'Intended Audience :: Developers',
'Development Status :: 5 - Production/Stable',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Topic :: Utilities',
]
)
|
# -*- coding: utf-8 -*-
import io
from setuptools import setup, find_packages
setup(
name='django-pipeline',
version='1.5.2',
description='Pipeline is an asset packaging library for Django.',
long_description=io.open('README.rst', encoding='utf-8').read() + '\n\n' +
io.open('HISTORY.rst', encoding='utf-8').read(),
author='Timothée Peignier',
author_email='timothee.peignier@tryphon.org',
url='https://github.com/cyberdelia/django-pipeline',
license='MIT',
packages=find_packages(exclude=['tests', 'tests.tests']),
zip_safe=False,
install_requires=[
'futures>=2.1.3',
],
include_package_data=True,
classifiers=[
'Environment :: Web Environment',
'Intended Audience :: Developers',
'Development Status :: 5 - Production/Stable',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Topic :: Utilities',
]
)
Remove useless requirement on Python 3.2+# -*- coding: utf-8 -*-
import io
from setuptools import setup, find_packages
install_requires = []
try:
from concurrent import futures
except ImportError:
futures = None
install_requires.append('futures>=2.1.3')
setup(
name='django-pipeline',
version='1.5.2',
description='Pipeline is an asset packaging library for Django.',
long_description=io.open('README.rst', encoding='utf-8').read() + '\n\n' +
io.open('HISTORY.rst', encoding='utf-8').read(),
author='Timothée Peignier',
author_email='timothee.peignier@tryphon.org',
url='https://github.com/cyberdelia/django-pipeline',
license='MIT',
packages=find_packages(exclude=['tests', 'tests.tests']),
zip_safe=False,
install_requires=install_requires,
include_package_data=True,
classifiers=[
'Environment :: Web Environment',
'Intended Audience :: Developers',
'Development Status :: 5 - Production/Stable',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Topic :: Utilities',
]
)
|
<commit_before># -*- coding: utf-8 -*-
import io
from setuptools import setup, find_packages
setup(
name='django-pipeline',
version='1.5.2',
description='Pipeline is an asset packaging library for Django.',
long_description=io.open('README.rst', encoding='utf-8').read() + '\n\n' +
io.open('HISTORY.rst', encoding='utf-8').read(),
author='Timothée Peignier',
author_email='timothee.peignier@tryphon.org',
url='https://github.com/cyberdelia/django-pipeline',
license='MIT',
packages=find_packages(exclude=['tests', 'tests.tests']),
zip_safe=False,
install_requires=[
'futures>=2.1.3',
],
include_package_data=True,
classifiers=[
'Environment :: Web Environment',
'Intended Audience :: Developers',
'Development Status :: 5 - Production/Stable',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Topic :: Utilities',
]
)
<commit_msg>Remove useless requirement on Python 3.2+<commit_after># -*- coding: utf-8 -*-
import io
from setuptools import setup, find_packages
install_requires = []
try:
from concurrent import futures
except ImportError:
futures = None
install_requires.append('futures>=2.1.3')
setup(
name='django-pipeline',
version='1.5.2',
description='Pipeline is an asset packaging library for Django.',
long_description=io.open('README.rst', encoding='utf-8').read() + '\n\n' +
io.open('HISTORY.rst', encoding='utf-8').read(),
author='Timothée Peignier',
author_email='timothee.peignier@tryphon.org',
url='https://github.com/cyberdelia/django-pipeline',
license='MIT',
packages=find_packages(exclude=['tests', 'tests.tests']),
zip_safe=False,
install_requires=install_requires,
include_package_data=True,
classifiers=[
'Environment :: Web Environment',
'Intended Audience :: Developers',
'Development Status :: 5 - Production/Stable',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Topic :: Utilities',
]
)
|
1ee442e79df7c7a79076460dea930bbd7d87b00a
|
setup.py
|
setup.py
|
from distutils.core import setup
# Keeping all Python code for package in src directory
setup(name='quantitation',
url='http://www.awblocker.com',
version='0.1',
description='Absolute quantitation for LC/MSMS proteomics via MCMC',
author='Alexander W Blocker',
author_email='ablocker@gmail.com',
packages=['quantitation','quantitation.glm'],
package_dir = {'': 'lib'}
)
|
from distutils.core import setup
# Keeping all Python code for package in src directory
setup(name='quantitation',
url='http://www.awblocker.com',
version='0.1',
description='Absolute quantitation for LC/MSMS proteomics via MCMC',
author='Alexander W Blocker',
author_email='ablocker@gmail.com',
packages=['quantitation','quantitation.glm'],
package_dir = {'': 'lib'},
requires=['numpy(>=1.6)','scipy(>=0.9)']
)
|
Add requires for development use, at least
|
Add requires for development use, at least
|
Python
|
bsd-3-clause
|
awblocker/quantitation,awblocker/quantitation,awblocker/quantitation
|
from distutils.core import setup
# Keeping all Python code for package in src directory
setup(name='quantitation',
url='http://www.awblocker.com',
version='0.1',
description='Absolute quantitation for LC/MSMS proteomics via MCMC',
author='Alexander W Blocker',
author_email='ablocker@gmail.com',
packages=['quantitation','quantitation.glm'],
package_dir = {'': 'lib'}
)
Add requires for development use, at least
|
from distutils.core import setup
# Keeping all Python code for package in src directory
setup(name='quantitation',
url='http://www.awblocker.com',
version='0.1',
description='Absolute quantitation for LC/MSMS proteomics via MCMC',
author='Alexander W Blocker',
author_email='ablocker@gmail.com',
packages=['quantitation','quantitation.glm'],
package_dir = {'': 'lib'},
requires=['numpy(>=1.6)','scipy(>=0.9)']
)
|
<commit_before>from distutils.core import setup
# Keeping all Python code for package in src directory
setup(name='quantitation',
url='http://www.awblocker.com',
version='0.1',
description='Absolute quantitation for LC/MSMS proteomics via MCMC',
author='Alexander W Blocker',
author_email='ablocker@gmail.com',
packages=['quantitation','quantitation.glm'],
package_dir = {'': 'lib'}
)
<commit_msg>Add requires for development use, at least<commit_after>
|
from distutils.core import setup
# Keeping all Python code for package in src directory
setup(name='quantitation',
url='http://www.awblocker.com',
version='0.1',
description='Absolute quantitation for LC/MSMS proteomics via MCMC',
author='Alexander W Blocker',
author_email='ablocker@gmail.com',
packages=['quantitation','quantitation.glm'],
package_dir = {'': 'lib'},
requires=['numpy(>=1.6)','scipy(>=0.9)']
)
|
from distutils.core import setup
# Keeping all Python code for package in src directory
setup(name='quantitation',
url='http://www.awblocker.com',
version='0.1',
description='Absolute quantitation for LC/MSMS proteomics via MCMC',
author='Alexander W Blocker',
author_email='ablocker@gmail.com',
packages=['quantitation','quantitation.glm'],
package_dir = {'': 'lib'}
)
Add requires for development use, at leastfrom distutils.core import setup
# Keeping all Python code for package in src directory
setup(name='quantitation',
url='http://www.awblocker.com',
version='0.1',
description='Absolute quantitation for LC/MSMS proteomics via MCMC',
author='Alexander W Blocker',
author_email='ablocker@gmail.com',
packages=['quantitation','quantitation.glm'],
package_dir = {'': 'lib'},
requires=['numpy(>=1.6)','scipy(>=0.9)']
)
|
<commit_before>from distutils.core import setup
# Keeping all Python code for package in src directory
setup(name='quantitation',
url='http://www.awblocker.com',
version='0.1',
description='Absolute quantitation for LC/MSMS proteomics via MCMC',
author='Alexander W Blocker',
author_email='ablocker@gmail.com',
packages=['quantitation','quantitation.glm'],
package_dir = {'': 'lib'}
)
<commit_msg>Add requires for development use, at least<commit_after>from distutils.core import setup
# Keeping all Python code for package in src directory
setup(name='quantitation',
url='http://www.awblocker.com',
version='0.1',
description='Absolute quantitation for LC/MSMS proteomics via MCMC',
author='Alexander W Blocker',
author_email='ablocker@gmail.com',
packages=['quantitation','quantitation.glm'],
package_dir = {'': 'lib'},
requires=['numpy(>=1.6)','scipy(>=0.9)']
)
|
fbcc9b1332b9977a790b82b8cf29a4c241de9650
|
setup.py
|
setup.py
|
from setuptools import setup
setup(
name='flaws',
version='0.0.1',
author='Alexander Schepanovski',
author_email='suor.web@gmail.com',
description='Finds flaws in your python code',
long_description=open('README.rst').read(),
url='http://github.com/Suor/flaws',
license='BSD',
py_modules=['astpp'],
packages=['flaws'],
install_requires=[
'funcy>=1.1',
],
entry_points = {
'console_scripts': [
'flaws = flaws:main',
],
},
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Environment :: Console',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Intended Audience :: Developers',
'Topic :: Software Development :: Quality Assurance',
'Topic :: Utilities',
'Topic :: Software Development :: Libraries :: Python Modules',
]
)
|
from setuptools import setup
setup(
name='flaws',
version='0.0.1',
author='Alexander Schepanovski',
author_email='suor.web@gmail.com',
description='Finds flaws in your python code',
long_description=open('README.rst').read(),
url='http://github.com/Suor/flaws',
license='BSD',
packages=['flaws'],
install_requires=[
'funcy>=1.1',
],
entry_points = {
'console_scripts': [
'flaws = flaws:main',
],
},
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Environment :: Console',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Intended Audience :: Developers',
'Topic :: Software Development :: Quality Assurance',
'Topic :: Utilities',
'Topic :: Software Development :: Libraries :: Python Modules',
]
)
|
Remove astpp from installation modules
|
Remove astpp from installation modules
|
Python
|
bsd-2-clause
|
Suor/flaws
|
from setuptools import setup
setup(
name='flaws',
version='0.0.1',
author='Alexander Schepanovski',
author_email='suor.web@gmail.com',
description='Finds flaws in your python code',
long_description=open('README.rst').read(),
url='http://github.com/Suor/flaws',
license='BSD',
py_modules=['astpp'],
packages=['flaws'],
install_requires=[
'funcy>=1.1',
],
entry_points = {
'console_scripts': [
'flaws = flaws:main',
],
},
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Environment :: Console',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Intended Audience :: Developers',
'Topic :: Software Development :: Quality Assurance',
'Topic :: Utilities',
'Topic :: Software Development :: Libraries :: Python Modules',
]
)
Remove astpp from installation modules
|
from setuptools import setup
setup(
name='flaws',
version='0.0.1',
author='Alexander Schepanovski',
author_email='suor.web@gmail.com',
description='Finds flaws in your python code',
long_description=open('README.rst').read(),
url='http://github.com/Suor/flaws',
license='BSD',
packages=['flaws'],
install_requires=[
'funcy>=1.1',
],
entry_points = {
'console_scripts': [
'flaws = flaws:main',
],
},
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Environment :: Console',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Intended Audience :: Developers',
'Topic :: Software Development :: Quality Assurance',
'Topic :: Utilities',
'Topic :: Software Development :: Libraries :: Python Modules',
]
)
|
<commit_before>from setuptools import setup
setup(
name='flaws',
version='0.0.1',
author='Alexander Schepanovski',
author_email='suor.web@gmail.com',
description='Finds flaws in your python code',
long_description=open('README.rst').read(),
url='http://github.com/Suor/flaws',
license='BSD',
py_modules=['astpp'],
packages=['flaws'],
install_requires=[
'funcy>=1.1',
],
entry_points = {
'console_scripts': [
'flaws = flaws:main',
],
},
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Environment :: Console',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Intended Audience :: Developers',
'Topic :: Software Development :: Quality Assurance',
'Topic :: Utilities',
'Topic :: Software Development :: Libraries :: Python Modules',
]
)
<commit_msg>Remove astpp from installation modules<commit_after>
|
from setuptools import setup
setup(
name='flaws',
version='0.0.1',
author='Alexander Schepanovski',
author_email='suor.web@gmail.com',
description='Finds flaws in your python code',
long_description=open('README.rst').read(),
url='http://github.com/Suor/flaws',
license='BSD',
packages=['flaws'],
install_requires=[
'funcy>=1.1',
],
entry_points = {
'console_scripts': [
'flaws = flaws:main',
],
},
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Environment :: Console',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Intended Audience :: Developers',
'Topic :: Software Development :: Quality Assurance',
'Topic :: Utilities',
'Topic :: Software Development :: Libraries :: Python Modules',
]
)
|
from setuptools import setup
setup(
name='flaws',
version='0.0.1',
author='Alexander Schepanovski',
author_email='suor.web@gmail.com',
description='Finds flaws in your python code',
long_description=open('README.rst').read(),
url='http://github.com/Suor/flaws',
license='BSD',
py_modules=['astpp'],
packages=['flaws'],
install_requires=[
'funcy>=1.1',
],
entry_points = {
'console_scripts': [
'flaws = flaws:main',
],
},
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Environment :: Console',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Intended Audience :: Developers',
'Topic :: Software Development :: Quality Assurance',
'Topic :: Utilities',
'Topic :: Software Development :: Libraries :: Python Modules',
]
)
Remove astpp from installation modulesfrom setuptools import setup
setup(
name='flaws',
version='0.0.1',
author='Alexander Schepanovski',
author_email='suor.web@gmail.com',
description='Finds flaws in your python code',
long_description=open('README.rst').read(),
url='http://github.com/Suor/flaws',
license='BSD',
packages=['flaws'],
install_requires=[
'funcy>=1.1',
],
entry_points = {
'console_scripts': [
'flaws = flaws:main',
],
},
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Environment :: Console',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Intended Audience :: Developers',
'Topic :: Software Development :: Quality Assurance',
'Topic :: Utilities',
'Topic :: Software Development :: Libraries :: Python Modules',
]
)
|
<commit_before>from setuptools import setup
setup(
name='flaws',
version='0.0.1',
author='Alexander Schepanovski',
author_email='suor.web@gmail.com',
description='Finds flaws in your python code',
long_description=open('README.rst').read(),
url='http://github.com/Suor/flaws',
license='BSD',
py_modules=['astpp'],
packages=['flaws'],
install_requires=[
'funcy>=1.1',
],
entry_points = {
'console_scripts': [
'flaws = flaws:main',
],
},
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Environment :: Console',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Intended Audience :: Developers',
'Topic :: Software Development :: Quality Assurance',
'Topic :: Utilities',
'Topic :: Software Development :: Libraries :: Python Modules',
]
)
<commit_msg>Remove astpp from installation modules<commit_after>from setuptools import setup
setup(
name='flaws',
version='0.0.1',
author='Alexander Schepanovski',
author_email='suor.web@gmail.com',
description='Finds flaws in your python code',
long_description=open('README.rst').read(),
url='http://github.com/Suor/flaws',
license='BSD',
packages=['flaws'],
install_requires=[
'funcy>=1.1',
],
entry_points = {
'console_scripts': [
'flaws = flaws:main',
],
},
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Environment :: Console',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Intended Audience :: Developers',
'Topic :: Software Development :: Quality Assurance',
'Topic :: Utilities',
'Topic :: Software Development :: Libraries :: Python Modules',
]
)
|
7f20f67d70ef4351d838621191b3447893b604d3
|
simplemooc/courses/decorators.py
|
simplemooc/courses/decorators.py
|
from django.shortcuts import redirect, get_object_or_404
from django.contrib import messages
from .models import Course, Enrollment
def enrollment_required(view_func):
def _wrapper(request, *args, **kwargs):
slug = kwargs['slug']
course = get_object_or_404(Course, slug=slug)
has_permission = request.user.is_staff
if not has_permission:
try:
enrollment = Enrollment.objects.get(
course=course, user=request.user
)
except Enrollment.DoesNotExist:
message = 'Desculpe, mas você não tem permissão de acesso'
else:
if enrollment.is_approved():
has_permission = True
else:
message = 'Sua inscrição do curso ainda está pendente'
if not has_permission:
messages.error(request, message)
return redirect('accounts:dashboard')
request.course = course
return view_func(request, *args, **kwargs)
return _wrapper
|
from django.shortcuts import redirect
from django.contrib import messages
from django.contrib.contenttypes.models import ContentType
from .models import Course, CourseTRB, Enrollment
from .utils import get_course_by_instance
def enrollment_required(view_func):
def _wrapper(request, *args, **kwargs):
slug = kwargs['slug']
course = get_course_by_instance(slug, Course, CourseTRB)
content_type = ContentType.objects.get_for_model(course)
has_permission = request.user.is_staff
if not has_permission:
try:
enrollment = Enrollment.objects.get(
content_type=content_type,
object_id=course.id,
user=request.user
)
except Enrollment.DoesNotExist:
message = 'Desculpe, mas você não tem permissão de acesso'
else:
if enrollment.is_approved():
has_permission = True
else:
message = 'Sua inscrição do curso ainda está pendente'
if not has_permission:
messages.error(request, message)
return redirect('accounts:dashboard')
request.course = course
return view_func(request, *args, **kwargs)
return _wrapper
|
Update decorator to work with content_type relation
|
Update decorator to work with content_type relation
|
Python
|
mit
|
mazulo/simplemooc,mazulo/simplemooc
|
from django.shortcuts import redirect, get_object_or_404
from django.contrib import messages
from .models import Course, Enrollment
def enrollment_required(view_func):
def _wrapper(request, *args, **kwargs):
slug = kwargs['slug']
course = get_object_or_404(Course, slug=slug)
has_permission = request.user.is_staff
if not has_permission:
try:
enrollment = Enrollment.objects.get(
course=course, user=request.user
)
except Enrollment.DoesNotExist:
message = 'Desculpe, mas você não tem permissão de acesso'
else:
if enrollment.is_approved():
has_permission = True
else:
message = 'Sua inscrição do curso ainda está pendente'
if not has_permission:
messages.error(request, message)
return redirect('accounts:dashboard')
request.course = course
return view_func(request, *args, **kwargs)
return _wrapper
Update decorator to work with content_type relation
|
from django.shortcuts import redirect
from django.contrib import messages
from django.contrib.contenttypes.models import ContentType
from .models import Course, CourseTRB, Enrollment
from .utils import get_course_by_instance
def enrollment_required(view_func):
def _wrapper(request, *args, **kwargs):
slug = kwargs['slug']
course = get_course_by_instance(slug, Course, CourseTRB)
content_type = ContentType.objects.get_for_model(course)
has_permission = request.user.is_staff
if not has_permission:
try:
enrollment = Enrollment.objects.get(
content_type=content_type,
object_id=course.id,
user=request.user
)
except Enrollment.DoesNotExist:
message = 'Desculpe, mas você não tem permissão de acesso'
else:
if enrollment.is_approved():
has_permission = True
else:
message = 'Sua inscrição do curso ainda está pendente'
if not has_permission:
messages.error(request, message)
return redirect('accounts:dashboard')
request.course = course
return view_func(request, *args, **kwargs)
return _wrapper
|
<commit_before>from django.shortcuts import redirect, get_object_or_404
from django.contrib import messages
from .models import Course, Enrollment
def enrollment_required(view_func):
def _wrapper(request, *args, **kwargs):
slug = kwargs['slug']
course = get_object_or_404(Course, slug=slug)
has_permission = request.user.is_staff
if not has_permission:
try:
enrollment = Enrollment.objects.get(
course=course, user=request.user
)
except Enrollment.DoesNotExist:
message = 'Desculpe, mas você não tem permissão de acesso'
else:
if enrollment.is_approved():
has_permission = True
else:
message = 'Sua inscrição do curso ainda está pendente'
if not has_permission:
messages.error(request, message)
return redirect('accounts:dashboard')
request.course = course
return view_func(request, *args, **kwargs)
return _wrapper
<commit_msg>Update decorator to work with content_type relation<commit_after>
|
from django.shortcuts import redirect
from django.contrib import messages
from django.contrib.contenttypes.models import ContentType
from .models import Course, CourseTRB, Enrollment
from .utils import get_course_by_instance
def enrollment_required(view_func):
def _wrapper(request, *args, **kwargs):
slug = kwargs['slug']
course = get_course_by_instance(slug, Course, CourseTRB)
content_type = ContentType.objects.get_for_model(course)
has_permission = request.user.is_staff
if not has_permission:
try:
enrollment = Enrollment.objects.get(
content_type=content_type,
object_id=course.id,
user=request.user
)
except Enrollment.DoesNotExist:
message = 'Desculpe, mas você não tem permissão de acesso'
else:
if enrollment.is_approved():
has_permission = True
else:
message = 'Sua inscrição do curso ainda está pendente'
if not has_permission:
messages.error(request, message)
return redirect('accounts:dashboard')
request.course = course
return view_func(request, *args, **kwargs)
return _wrapper
|
from django.shortcuts import redirect, get_object_or_404
from django.contrib import messages
from .models import Course, Enrollment
def enrollment_required(view_func):
def _wrapper(request, *args, **kwargs):
slug = kwargs['slug']
course = get_object_or_404(Course, slug=slug)
has_permission = request.user.is_staff
if not has_permission:
try:
enrollment = Enrollment.objects.get(
course=course, user=request.user
)
except Enrollment.DoesNotExist:
message = 'Desculpe, mas você não tem permissão de acesso'
else:
if enrollment.is_approved():
has_permission = True
else:
message = 'Sua inscrição do curso ainda está pendente'
if not has_permission:
messages.error(request, message)
return redirect('accounts:dashboard')
request.course = course
return view_func(request, *args, **kwargs)
return _wrapper
Update decorator to work with content_type relationfrom django.shortcuts import redirect
from django.contrib import messages
from django.contrib.contenttypes.models import ContentType
from .models import Course, CourseTRB, Enrollment
from .utils import get_course_by_instance
def enrollment_required(view_func):
def _wrapper(request, *args, **kwargs):
slug = kwargs['slug']
course = get_course_by_instance(slug, Course, CourseTRB)
content_type = ContentType.objects.get_for_model(course)
has_permission = request.user.is_staff
if not has_permission:
try:
enrollment = Enrollment.objects.get(
content_type=content_type,
object_id=course.id,
user=request.user
)
except Enrollment.DoesNotExist:
message = 'Desculpe, mas você não tem permissão de acesso'
else:
if enrollment.is_approved():
has_permission = True
else:
message = 'Sua inscrição do curso ainda está pendente'
if not has_permission:
messages.error(request, message)
return redirect('accounts:dashboard')
request.course = course
return view_func(request, *args, **kwargs)
return _wrapper
|
<commit_before>from django.shortcuts import redirect, get_object_or_404
from django.contrib import messages
from .models import Course, Enrollment
def enrollment_required(view_func):
def _wrapper(request, *args, **kwargs):
slug = kwargs['slug']
course = get_object_or_404(Course, slug=slug)
has_permission = request.user.is_staff
if not has_permission:
try:
enrollment = Enrollment.objects.get(
course=course, user=request.user
)
except Enrollment.DoesNotExist:
message = 'Desculpe, mas você não tem permissão de acesso'
else:
if enrollment.is_approved():
has_permission = True
else:
message = 'Sua inscrição do curso ainda está pendente'
if not has_permission:
messages.error(request, message)
return redirect('accounts:dashboard')
request.course = course
return view_func(request, *args, **kwargs)
return _wrapper
<commit_msg>Update decorator to work with content_type relation<commit_after>from django.shortcuts import redirect
from django.contrib import messages
from django.contrib.contenttypes.models import ContentType
from .models import Course, CourseTRB, Enrollment
from .utils import get_course_by_instance
def enrollment_required(view_func):
def _wrapper(request, *args, **kwargs):
slug = kwargs['slug']
course = get_course_by_instance(slug, Course, CourseTRB)
content_type = ContentType.objects.get_for_model(course)
has_permission = request.user.is_staff
if not has_permission:
try:
enrollment = Enrollment.objects.get(
content_type=content_type,
object_id=course.id,
user=request.user
)
except Enrollment.DoesNotExist:
message = 'Desculpe, mas você não tem permissão de acesso'
else:
if enrollment.is_approved():
has_permission = True
else:
message = 'Sua inscrição do curso ainda está pendente'
if not has_permission:
messages.error(request, message)
return redirect('accounts:dashboard')
request.course = course
return view_func(request, *args, **kwargs)
return _wrapper
|
7c4b19fee9a50804921fc1084655d05ea3b7e89b
|
setup.py
|
setup.py
|
from distutils.core import setup
setup(
name='django-robots',
version=__import__('robots').__version__,
description='Robots exclusion application for Django, complementing Sitemaps.',
long_description=open('docs/overview.txt').read(),
author='Jannis Leidel',
author_email='jannis@leidel.info',
url='http://code.google.com/p/django-robots/',
download_url='http://github.com/jezdez/django-dbtemplates/zipball/0.5.4',
packages=['robots'],
package_dir={'dbtemplates': 'dbtemplates'},
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Framework :: Django',
]
)
|
from distutils.core import setup
setup(
name='django-robots',
version=__import__('robots').__version__,
description='Robots exclusion application for Django, complementing Sitemaps.',
long_description=open('docs/overview.txt').read(),
author='Jannis Leidel',
author_email='jannis@leidel.info',
url='http://code.google.com/p/django-robots/',
packages=['robots'],
package_dir={'dbtemplates': 'dbtemplates'},
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Framework :: Django',
]
)
|
Remove download URL since Github doesn't get his act together. Damnit
|
Remove download URL since Github doesn't get his act together. Damnit
committer: Jannis Leidel <jannis@leidel.info>
--HG--
extra : convert_revision : 410200249f2c4981c9e0e8e5cf9334b0e17ec3d4
|
Python
|
bsd-3-clause
|
amitu/django-robots,amitu/django-robots,jscott1971/django-robots,jazzband/django-robots,freakboy3742/django-robots,philippeowagner/django-robots,freakboy3742/django-robots,pbs/django-robots,pbs/django-robots,jscott1971/django-robots,pbs/django-robots,jezdez/django-robots,philippeowagner/django-robots,jezdez/django-robots,jazzband/django-robots,gbezyuk/django-robots,gbezyuk/django-robots
|
from distutils.core import setup
setup(
name='django-robots',
version=__import__('robots').__version__,
description='Robots exclusion application for Django, complementing Sitemaps.',
long_description=open('docs/overview.txt').read(),
author='Jannis Leidel',
author_email='jannis@leidel.info',
url='http://code.google.com/p/django-robots/',
download_url='http://github.com/jezdez/django-dbtemplates/zipball/0.5.4',
packages=['robots'],
package_dir={'dbtemplates': 'dbtemplates'},
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Framework :: Django',
]
)
Remove download URL since Github doesn't get his act together. Damnit
committer: Jannis Leidel <jannis@leidel.info>
--HG--
extra : convert_revision : 410200249f2c4981c9e0e8e5cf9334b0e17ec3d4
|
from distutils.core import setup
setup(
name='django-robots',
version=__import__('robots').__version__,
description='Robots exclusion application for Django, complementing Sitemaps.',
long_description=open('docs/overview.txt').read(),
author='Jannis Leidel',
author_email='jannis@leidel.info',
url='http://code.google.com/p/django-robots/',
packages=['robots'],
package_dir={'dbtemplates': 'dbtemplates'},
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Framework :: Django',
]
)
|
<commit_before>from distutils.core import setup
setup(
name='django-robots',
version=__import__('robots').__version__,
description='Robots exclusion application for Django, complementing Sitemaps.',
long_description=open('docs/overview.txt').read(),
author='Jannis Leidel',
author_email='jannis@leidel.info',
url='http://code.google.com/p/django-robots/',
download_url='http://github.com/jezdez/django-dbtemplates/zipball/0.5.4',
packages=['robots'],
package_dir={'dbtemplates': 'dbtemplates'},
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Framework :: Django',
]
)
<commit_msg>Remove download URL since Github doesn't get his act together. Damnit
committer: Jannis Leidel <jannis@leidel.info>
--HG--
extra : convert_revision : 410200249f2c4981c9e0e8e5cf9334b0e17ec3d4<commit_after>
|
from distutils.core import setup
setup(
name='django-robots',
version=__import__('robots').__version__,
description='Robots exclusion application for Django, complementing Sitemaps.',
long_description=open('docs/overview.txt').read(),
author='Jannis Leidel',
author_email='jannis@leidel.info',
url='http://code.google.com/p/django-robots/',
packages=['robots'],
package_dir={'dbtemplates': 'dbtemplates'},
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Framework :: Django',
]
)
|
from distutils.core import setup
setup(
name='django-robots',
version=__import__('robots').__version__,
description='Robots exclusion application for Django, complementing Sitemaps.',
long_description=open('docs/overview.txt').read(),
author='Jannis Leidel',
author_email='jannis@leidel.info',
url='http://code.google.com/p/django-robots/',
download_url='http://github.com/jezdez/django-dbtemplates/zipball/0.5.4',
packages=['robots'],
package_dir={'dbtemplates': 'dbtemplates'},
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Framework :: Django',
]
)
Remove download URL since Github doesn't get his act together. Damnit
committer: Jannis Leidel <jannis@leidel.info>
--HG--
extra : convert_revision : 410200249f2c4981c9e0e8e5cf9334b0e17ec3d4from distutils.core import setup
setup(
name='django-robots',
version=__import__('robots').__version__,
description='Robots exclusion application for Django, complementing Sitemaps.',
long_description=open('docs/overview.txt').read(),
author='Jannis Leidel',
author_email='jannis@leidel.info',
url='http://code.google.com/p/django-robots/',
packages=['robots'],
package_dir={'dbtemplates': 'dbtemplates'},
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Framework :: Django',
]
)
|
<commit_before>from distutils.core import setup
setup(
name='django-robots',
version=__import__('robots').__version__,
description='Robots exclusion application for Django, complementing Sitemaps.',
long_description=open('docs/overview.txt').read(),
author='Jannis Leidel',
author_email='jannis@leidel.info',
url='http://code.google.com/p/django-robots/',
download_url='http://github.com/jezdez/django-dbtemplates/zipball/0.5.4',
packages=['robots'],
package_dir={'dbtemplates': 'dbtemplates'},
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Framework :: Django',
]
)
<commit_msg>Remove download URL since Github doesn't get his act together. Damnit
committer: Jannis Leidel <jannis@leidel.info>
--HG--
extra : convert_revision : 410200249f2c4981c9e0e8e5cf9334b0e17ec3d4<commit_after>from distutils.core import setup
setup(
name='django-robots',
version=__import__('robots').__version__,
description='Robots exclusion application for Django, complementing Sitemaps.',
long_description=open('docs/overview.txt').read(),
author='Jannis Leidel',
author_email='jannis@leidel.info',
url='http://code.google.com/p/django-robots/',
packages=['robots'],
package_dir={'dbtemplates': 'dbtemplates'},
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Framework :: Django',
]
)
|
c22c6c3a0927f224cb9a396173292ec2a332a74e
|
setup.py
|
setup.py
|
from setuptools import setup
setup(
name='polygraph',
version='0.1.0',
description='Python library for defining GraphQL schemas',
url='https://github.com/yen223/polygraph/',
author='Wei Yen, Lee',
author_email='hello@weiyen.net',
license='MIT',
install_requires=[
'marshmallow>=3.0.0b2',
'graphql-core>=1.0.1',
],
extras_require={
'dev': [
'flake8',
'ipython',
'autopep8',
]
}
)
|
from setuptools import setup
setup(
name='polygraph',
version='0.1.0',
description='Python library for defining GraphQL schemas',
url='https://github.com/yen223/polygraph/',
author='Wei Yen, Lee',
author_email='hello@weiyen.net',
license='MIT',
install_requires=[
'marshmallow>=3.0.0b2',
'graphql-core>=1.0.1',
],
extras_require={
'dev': [
'flake8',
'ipython',
'autopep8',
'isort',
]
}
)
|
Add isort as a development requirement
|
Add isort as a development requirement
|
Python
|
mit
|
polygraph-python/polygraph
|
from setuptools import setup
setup(
name='polygraph',
version='0.1.0',
description='Python library for defining GraphQL schemas',
url='https://github.com/yen223/polygraph/',
author='Wei Yen, Lee',
author_email='hello@weiyen.net',
license='MIT',
install_requires=[
'marshmallow>=3.0.0b2',
'graphql-core>=1.0.1',
],
extras_require={
'dev': [
'flake8',
'ipython',
'autopep8',
]
}
)
Add isort as a development requirement
|
from setuptools import setup
setup(
name='polygraph',
version='0.1.0',
description='Python library for defining GraphQL schemas',
url='https://github.com/yen223/polygraph/',
author='Wei Yen, Lee',
author_email='hello@weiyen.net',
license='MIT',
install_requires=[
'marshmallow>=3.0.0b2',
'graphql-core>=1.0.1',
],
extras_require={
'dev': [
'flake8',
'ipython',
'autopep8',
'isort',
]
}
)
|
<commit_before>from setuptools import setup
setup(
name='polygraph',
version='0.1.0',
description='Python library for defining GraphQL schemas',
url='https://github.com/yen223/polygraph/',
author='Wei Yen, Lee',
author_email='hello@weiyen.net',
license='MIT',
install_requires=[
'marshmallow>=3.0.0b2',
'graphql-core>=1.0.1',
],
extras_require={
'dev': [
'flake8',
'ipython',
'autopep8',
]
}
)
<commit_msg>Add isort as a development requirement<commit_after>
|
from setuptools import setup
setup(
name='polygraph',
version='0.1.0',
description='Python library for defining GraphQL schemas',
url='https://github.com/yen223/polygraph/',
author='Wei Yen, Lee',
author_email='hello@weiyen.net',
license='MIT',
install_requires=[
'marshmallow>=3.0.0b2',
'graphql-core>=1.0.1',
],
extras_require={
'dev': [
'flake8',
'ipython',
'autopep8',
'isort',
]
}
)
|
from setuptools import setup
setup(
name='polygraph',
version='0.1.0',
description='Python library for defining GraphQL schemas',
url='https://github.com/yen223/polygraph/',
author='Wei Yen, Lee',
author_email='hello@weiyen.net',
license='MIT',
install_requires=[
'marshmallow>=3.0.0b2',
'graphql-core>=1.0.1',
],
extras_require={
'dev': [
'flake8',
'ipython',
'autopep8',
]
}
)
Add isort as a development requirementfrom setuptools import setup
setup(
name='polygraph',
version='0.1.0',
description='Python library for defining GraphQL schemas',
url='https://github.com/yen223/polygraph/',
author='Wei Yen, Lee',
author_email='hello@weiyen.net',
license='MIT',
install_requires=[
'marshmallow>=3.0.0b2',
'graphql-core>=1.0.1',
],
extras_require={
'dev': [
'flake8',
'ipython',
'autopep8',
'isort',
]
}
)
|
<commit_before>from setuptools import setup
setup(
name='polygraph',
version='0.1.0',
description='Python library for defining GraphQL schemas',
url='https://github.com/yen223/polygraph/',
author='Wei Yen, Lee',
author_email='hello@weiyen.net',
license='MIT',
install_requires=[
'marshmallow>=3.0.0b2',
'graphql-core>=1.0.1',
],
extras_require={
'dev': [
'flake8',
'ipython',
'autopep8',
]
}
)
<commit_msg>Add isort as a development requirement<commit_after>from setuptools import setup
setup(
name='polygraph',
version='0.1.0',
description='Python library for defining GraphQL schemas',
url='https://github.com/yen223/polygraph/',
author='Wei Yen, Lee',
author_email='hello@weiyen.net',
license='MIT',
install_requires=[
'marshmallow>=3.0.0b2',
'graphql-core>=1.0.1',
],
extras_require={
'dev': [
'flake8',
'ipython',
'autopep8',
'isort',
]
}
)
|
72d9e19dfc4d0ad7ec2074f45ec16b54b3c8379a
|
setup.py
|
setup.py
|
#!/usr/bin/env python3
from setuptools import setup, find_packages
setup(
name='ppp_cas',
version='0.3.2',
description='CAS plugin for PPP',
url='https://github.com/ProjetPP',
author='Projet Pensées Profondes',
author_email='marc.chevalier@ens-lyon.org',
license='MIT',
classifiers=[
'Environment :: No Input/Output (Daemon)',
'Development Status :: 1 - Planning',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Topic :: Internet :: WWW/HTTP :: WSGI :: Application',
'Topic :: Software Development :: Libraries',
],
install_requires=[
'ppp_datamodel>=0.5.10',
'ppp_libmodule>=0.7,<0.8',
],
packages=[
'ppp_cas',
],
)
|
#!/usr/bin/env python3
from setuptools import setup, find_packages
setup(
name='ppp_cas',
version='0.3.2',
description='CAS plugin for PPP',
url='https://github.com/ProjetPP',
author='Projet Pensées Profondes',
author_email='marc.chevalier@ens-lyon.org',
license='MIT',
classifiers=[
'Environment :: No Input/Output (Daemon)',
'Development Status :: 1 - Planning',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Topic :: Internet :: WWW/HTTP :: WSGI :: Application',
'Topic :: Software Development :: Libraries',
],
install_requires=[
'ppp_datamodel>=0.5.10',
'ppp_libmodule>=0.7,<0.8',
'ply',
],
packages=[
'ppp_cas',
],
)
|
Add PLY as a dependency.
|
Add PLY as a dependency.
|
Python
|
mit
|
ProjetPP/PPP-CAS,iScienceLuvr/PPP-CAS,ProjetPP/PPP-CAS,iScienceLuvr/PPP-CAS
|
#!/usr/bin/env python3
from setuptools import setup, find_packages
setup(
name='ppp_cas',
version='0.3.2',
description='CAS plugin for PPP',
url='https://github.com/ProjetPP',
author='Projet Pensées Profondes',
author_email='marc.chevalier@ens-lyon.org',
license='MIT',
classifiers=[
'Environment :: No Input/Output (Daemon)',
'Development Status :: 1 - Planning',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Topic :: Internet :: WWW/HTTP :: WSGI :: Application',
'Topic :: Software Development :: Libraries',
],
install_requires=[
'ppp_datamodel>=0.5.10',
'ppp_libmodule>=0.7,<0.8',
],
packages=[
'ppp_cas',
],
)
Add PLY as a dependency.
|
#!/usr/bin/env python3
from setuptools import setup, find_packages
setup(
name='ppp_cas',
version='0.3.2',
description='CAS plugin for PPP',
url='https://github.com/ProjetPP',
author='Projet Pensées Profondes',
author_email='marc.chevalier@ens-lyon.org',
license='MIT',
classifiers=[
'Environment :: No Input/Output (Daemon)',
'Development Status :: 1 - Planning',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Topic :: Internet :: WWW/HTTP :: WSGI :: Application',
'Topic :: Software Development :: Libraries',
],
install_requires=[
'ppp_datamodel>=0.5.10',
'ppp_libmodule>=0.7,<0.8',
'ply',
],
packages=[
'ppp_cas',
],
)
|
<commit_before>#!/usr/bin/env python3
from setuptools import setup, find_packages
setup(
name='ppp_cas',
version='0.3.2',
description='CAS plugin for PPP',
url='https://github.com/ProjetPP',
author='Projet Pensées Profondes',
author_email='marc.chevalier@ens-lyon.org',
license='MIT',
classifiers=[
'Environment :: No Input/Output (Daemon)',
'Development Status :: 1 - Planning',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Topic :: Internet :: WWW/HTTP :: WSGI :: Application',
'Topic :: Software Development :: Libraries',
],
install_requires=[
'ppp_datamodel>=0.5.10',
'ppp_libmodule>=0.7,<0.8',
],
packages=[
'ppp_cas',
],
)
<commit_msg>Add PLY as a dependency.<commit_after>
|
#!/usr/bin/env python3
from setuptools import setup, find_packages
setup(
name='ppp_cas',
version='0.3.2',
description='CAS plugin for PPP',
url='https://github.com/ProjetPP',
author='Projet Pensées Profondes',
author_email='marc.chevalier@ens-lyon.org',
license='MIT',
classifiers=[
'Environment :: No Input/Output (Daemon)',
'Development Status :: 1 - Planning',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Topic :: Internet :: WWW/HTTP :: WSGI :: Application',
'Topic :: Software Development :: Libraries',
],
install_requires=[
'ppp_datamodel>=0.5.10',
'ppp_libmodule>=0.7,<0.8',
'ply',
],
packages=[
'ppp_cas',
],
)
|
#!/usr/bin/env python3
from setuptools import setup, find_packages
setup(
name='ppp_cas',
version='0.3.2',
description='CAS plugin for PPP',
url='https://github.com/ProjetPP',
author='Projet Pensées Profondes',
author_email='marc.chevalier@ens-lyon.org',
license='MIT',
classifiers=[
'Environment :: No Input/Output (Daemon)',
'Development Status :: 1 - Planning',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Topic :: Internet :: WWW/HTTP :: WSGI :: Application',
'Topic :: Software Development :: Libraries',
],
install_requires=[
'ppp_datamodel>=0.5.10',
'ppp_libmodule>=0.7,<0.8',
],
packages=[
'ppp_cas',
],
)
Add PLY as a dependency.#!/usr/bin/env python3
from setuptools import setup, find_packages
setup(
name='ppp_cas',
version='0.3.2',
description='CAS plugin for PPP',
url='https://github.com/ProjetPP',
author='Projet Pensées Profondes',
author_email='marc.chevalier@ens-lyon.org',
license='MIT',
classifiers=[
'Environment :: No Input/Output (Daemon)',
'Development Status :: 1 - Planning',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Topic :: Internet :: WWW/HTTP :: WSGI :: Application',
'Topic :: Software Development :: Libraries',
],
install_requires=[
'ppp_datamodel>=0.5.10',
'ppp_libmodule>=0.7,<0.8',
'ply',
],
packages=[
'ppp_cas',
],
)
|
<commit_before>#!/usr/bin/env python3
from setuptools import setup, find_packages
setup(
name='ppp_cas',
version='0.3.2',
description='CAS plugin for PPP',
url='https://github.com/ProjetPP',
author='Projet Pensées Profondes',
author_email='marc.chevalier@ens-lyon.org',
license='MIT',
classifiers=[
'Environment :: No Input/Output (Daemon)',
'Development Status :: 1 - Planning',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Topic :: Internet :: WWW/HTTP :: WSGI :: Application',
'Topic :: Software Development :: Libraries',
],
install_requires=[
'ppp_datamodel>=0.5.10',
'ppp_libmodule>=0.7,<0.8',
],
packages=[
'ppp_cas',
],
)
<commit_msg>Add PLY as a dependency.<commit_after>#!/usr/bin/env python3
from setuptools import setup, find_packages
setup(
name='ppp_cas',
version='0.3.2',
description='CAS plugin for PPP',
url='https://github.com/ProjetPP',
author='Projet Pensées Profondes',
author_email='marc.chevalier@ens-lyon.org',
license='MIT',
classifiers=[
'Environment :: No Input/Output (Daemon)',
'Development Status :: 1 - Planning',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Topic :: Internet :: WWW/HTTP :: WSGI :: Application',
'Topic :: Software Development :: Libraries',
],
install_requires=[
'ppp_datamodel>=0.5.10',
'ppp_libmodule>=0.7,<0.8',
'ply',
],
packages=[
'ppp_cas',
],
)
|
e6e819b6c08751ae84921687e3e93a4888fb2d5e
|
setup.py
|
setup.py
|
import os
from setuptools import setup
from withtool import __version__
def read(fname):
path = os.path.join(os.path.dirname(__file__), fname)
with open(path, encoding='utf-8') as f:
return f.read()
setup(
name='with',
version=__version__,
description='A shell context manager',
long_description=read('README.rst'),
author='Renan Ivo',
author_email='renanivom@gmail.com',
url='https://github.com/renanivo/with',
keywords='context manager shell command line repl',
scripts=['bin/with'],
install_requires=[
'appdirs==1.4.0',
'docopt==0.6.2',
'prompt-toolkit==0.60',
'python-slugify==1.2.0',
],
packages=['withtool'],
classifiers=[
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
]
)
|
import os
from setuptools import setup
from withtool import __version__
def read(fname):
path = os.path.join(os.path.dirname(__file__), fname)
with open(path, encoding='utf-8') as f:
return f.read()
setup(
name='with',
version=__version__,
description='A shell context manager',
long_description=read('README.rst'),
author='Renan Ivo',
author_email='renanivom@gmail.com',
url='https://github.com/renanivo/with',
keywords='context manager shell command line repl',
scripts=['bin/with'],
install_requires=[
'appdirs==1.4.0',
'docopt==0.6.2',
'prompt-toolkit==1.0',
'python-slugify==1.2.0',
],
packages=['withtool'],
classifiers=[
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
]
)
|
Upgrade dependency prompt-toolkit to ==1.0
|
Upgrade dependency prompt-toolkit to ==1.0
|
Python
|
mit
|
renanivo/with
|
import os
from setuptools import setup
from withtool import __version__
def read(fname):
path = os.path.join(os.path.dirname(__file__), fname)
with open(path, encoding='utf-8') as f:
return f.read()
setup(
name='with',
version=__version__,
description='A shell context manager',
long_description=read('README.rst'),
author='Renan Ivo',
author_email='renanivom@gmail.com',
url='https://github.com/renanivo/with',
keywords='context manager shell command line repl',
scripts=['bin/with'],
install_requires=[
'appdirs==1.4.0',
'docopt==0.6.2',
'prompt-toolkit==0.60',
'python-slugify==1.2.0',
],
packages=['withtool'],
classifiers=[
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
]
)
Upgrade dependency prompt-toolkit to ==1.0
|
import os
from setuptools import setup
from withtool import __version__
def read(fname):
path = os.path.join(os.path.dirname(__file__), fname)
with open(path, encoding='utf-8') as f:
return f.read()
setup(
name='with',
version=__version__,
description='A shell context manager',
long_description=read('README.rst'),
author='Renan Ivo',
author_email='renanivom@gmail.com',
url='https://github.com/renanivo/with',
keywords='context manager shell command line repl',
scripts=['bin/with'],
install_requires=[
'appdirs==1.4.0',
'docopt==0.6.2',
'prompt-toolkit==1.0',
'python-slugify==1.2.0',
],
packages=['withtool'],
classifiers=[
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
]
)
|
<commit_before>import os
from setuptools import setup
from withtool import __version__
def read(fname):
path = os.path.join(os.path.dirname(__file__), fname)
with open(path, encoding='utf-8') as f:
return f.read()
setup(
name='with',
version=__version__,
description='A shell context manager',
long_description=read('README.rst'),
author='Renan Ivo',
author_email='renanivom@gmail.com',
url='https://github.com/renanivo/with',
keywords='context manager shell command line repl',
scripts=['bin/with'],
install_requires=[
'appdirs==1.4.0',
'docopt==0.6.2',
'prompt-toolkit==0.60',
'python-slugify==1.2.0',
],
packages=['withtool'],
classifiers=[
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
]
)
<commit_msg>Upgrade dependency prompt-toolkit to ==1.0<commit_after>
|
import os
from setuptools import setup
from withtool import __version__
def read(fname):
path = os.path.join(os.path.dirname(__file__), fname)
with open(path, encoding='utf-8') as f:
return f.read()
setup(
name='with',
version=__version__,
description='A shell context manager',
long_description=read('README.rst'),
author='Renan Ivo',
author_email='renanivom@gmail.com',
url='https://github.com/renanivo/with',
keywords='context manager shell command line repl',
scripts=['bin/with'],
install_requires=[
'appdirs==1.4.0',
'docopt==0.6.2',
'prompt-toolkit==1.0',
'python-slugify==1.2.0',
],
packages=['withtool'],
classifiers=[
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
]
)
|
import os
from setuptools import setup
from withtool import __version__
def read(fname):
path = os.path.join(os.path.dirname(__file__), fname)
with open(path, encoding='utf-8') as f:
return f.read()
setup(
name='with',
version=__version__,
description='A shell context manager',
long_description=read('README.rst'),
author='Renan Ivo',
author_email='renanivom@gmail.com',
url='https://github.com/renanivo/with',
keywords='context manager shell command line repl',
scripts=['bin/with'],
install_requires=[
'appdirs==1.4.0',
'docopt==0.6.2',
'prompt-toolkit==0.60',
'python-slugify==1.2.0',
],
packages=['withtool'],
classifiers=[
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
]
)
Upgrade dependency prompt-toolkit to ==1.0import os
from setuptools import setup
from withtool import __version__
def read(fname):
path = os.path.join(os.path.dirname(__file__), fname)
with open(path, encoding='utf-8') as f:
return f.read()
setup(
name='with',
version=__version__,
description='A shell context manager',
long_description=read('README.rst'),
author='Renan Ivo',
author_email='renanivom@gmail.com',
url='https://github.com/renanivo/with',
keywords='context manager shell command line repl',
scripts=['bin/with'],
install_requires=[
'appdirs==1.4.0',
'docopt==0.6.2',
'prompt-toolkit==1.0',
'python-slugify==1.2.0',
],
packages=['withtool'],
classifiers=[
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
]
)
|
<commit_before>import os
from setuptools import setup
from withtool import __version__
def read(fname):
path = os.path.join(os.path.dirname(__file__), fname)
with open(path, encoding='utf-8') as f:
return f.read()
setup(
name='with',
version=__version__,
description='A shell context manager',
long_description=read('README.rst'),
author='Renan Ivo',
author_email='renanivom@gmail.com',
url='https://github.com/renanivo/with',
keywords='context manager shell command line repl',
scripts=['bin/with'],
install_requires=[
'appdirs==1.4.0',
'docopt==0.6.2',
'prompt-toolkit==0.60',
'python-slugify==1.2.0',
],
packages=['withtool'],
classifiers=[
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
]
)
<commit_msg>Upgrade dependency prompt-toolkit to ==1.0<commit_after>import os
from setuptools import setup
from withtool import __version__
def read(fname):
path = os.path.join(os.path.dirname(__file__), fname)
with open(path, encoding='utf-8') as f:
return f.read()
setup(
name='with',
version=__version__,
description='A shell context manager',
long_description=read('README.rst'),
author='Renan Ivo',
author_email='renanivom@gmail.com',
url='https://github.com/renanivo/with',
keywords='context manager shell command line repl',
scripts=['bin/with'],
install_requires=[
'appdirs==1.4.0',
'docopt==0.6.2',
'prompt-toolkit==1.0',
'python-slugify==1.2.0',
],
packages=['withtool'],
classifiers=[
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
]
)
|
ab54955caef852c2a73789b09c8f37e81591e98f
|
setup.py
|
setup.py
|
import multiprocessing
from setuptools import setup, find_packages
requirements = []
with open('requirements.txt', 'r') as in_:
requirements = in_.readlines()
setup(
name='myfitnesspal',
version='1.2.2',
url='http://github.com/coddingtonbear/python-myfitnesspal/',
description='Access health and fitness data stored in Myfitnesspal',
author='Adam Coddington',
author_email='me@adamcoddington.net',
classifiers=[
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Utilities',
],
packages=find_packages(),
install_requires=requirements,
test_suite='nose.collector',
tests_require=[
'mimic',
'nose',
]
)
|
import multiprocessing
from setuptools import setup, find_packages
requirements = []
with open('requirements.txt', 'r') as in_:
requirements = in_.readlines()
setup(
name='myfitnesspal',
version='1.2.2',
url='http://github.com/coddingtonbear/python-myfitnesspal/',
description='Access health and fitness data stored in Myfitnesspal',
author='Adam Coddington',
author_email='me@adamcoddington.net',
classifiers=[
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Utilities',
],
packages=find_packages(),
install_requires=requirements,
test_suite='nose.collector',
tests_require=[
'gmcquillan-mimic',
'nose',
]
)
|
Use the redistribution of (now discontinued) mimic as gmcquillan-mimic.
|
Use the redistribution of (now discontinued) mimic as gmcquillan-mimic.
|
Python
|
mit
|
tomn46037/python-myfitnesspal,rbelzile/python-myfitnesspal,coddingtonbear/python-myfitnesspal,tomn46037/python-myfitnesspal,rbelzile/python-myfitnesspal
|
import multiprocessing
from setuptools import setup, find_packages
requirements = []
with open('requirements.txt', 'r') as in_:
requirements = in_.readlines()
setup(
name='myfitnesspal',
version='1.2.2',
url='http://github.com/coddingtonbear/python-myfitnesspal/',
description='Access health and fitness data stored in Myfitnesspal',
author='Adam Coddington',
author_email='me@adamcoddington.net',
classifiers=[
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Utilities',
],
packages=find_packages(),
install_requires=requirements,
test_suite='nose.collector',
tests_require=[
'mimic',
'nose',
]
)
Use the redistribution of (now discontinued) mimic as gmcquillan-mimic.
|
import multiprocessing
from setuptools import setup, find_packages
requirements = []
with open('requirements.txt', 'r') as in_:
requirements = in_.readlines()
setup(
name='myfitnesspal',
version='1.2.2',
url='http://github.com/coddingtonbear/python-myfitnesspal/',
description='Access health and fitness data stored in Myfitnesspal',
author='Adam Coddington',
author_email='me@adamcoddington.net',
classifiers=[
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Utilities',
],
packages=find_packages(),
install_requires=requirements,
test_suite='nose.collector',
tests_require=[
'gmcquillan-mimic',
'nose',
]
)
|
<commit_before>import multiprocessing
from setuptools import setup, find_packages
requirements = []
with open('requirements.txt', 'r') as in_:
requirements = in_.readlines()
setup(
name='myfitnesspal',
version='1.2.2',
url='http://github.com/coddingtonbear/python-myfitnesspal/',
description='Access health and fitness data stored in Myfitnesspal',
author='Adam Coddington',
author_email='me@adamcoddington.net',
classifiers=[
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Utilities',
],
packages=find_packages(),
install_requires=requirements,
test_suite='nose.collector',
tests_require=[
'mimic',
'nose',
]
)
<commit_msg>Use the redistribution of (now discontinued) mimic as gmcquillan-mimic.<commit_after>
|
import multiprocessing
from setuptools import setup, find_packages
requirements = []
with open('requirements.txt', 'r') as in_:
requirements = in_.readlines()
setup(
name='myfitnesspal',
version='1.2.2',
url='http://github.com/coddingtonbear/python-myfitnesspal/',
description='Access health and fitness data stored in Myfitnesspal',
author='Adam Coddington',
author_email='me@adamcoddington.net',
classifiers=[
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Utilities',
],
packages=find_packages(),
install_requires=requirements,
test_suite='nose.collector',
tests_require=[
'gmcquillan-mimic',
'nose',
]
)
|
import multiprocessing
from setuptools import setup, find_packages
requirements = []
with open('requirements.txt', 'r') as in_:
requirements = in_.readlines()
setup(
name='myfitnesspal',
version='1.2.2',
url='http://github.com/coddingtonbear/python-myfitnesspal/',
description='Access health and fitness data stored in Myfitnesspal',
author='Adam Coddington',
author_email='me@adamcoddington.net',
classifiers=[
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Utilities',
],
packages=find_packages(),
install_requires=requirements,
test_suite='nose.collector',
tests_require=[
'mimic',
'nose',
]
)
Use the redistribution of (now discontinued) mimic as gmcquillan-mimic.import multiprocessing
from setuptools import setup, find_packages
requirements = []
with open('requirements.txt', 'r') as in_:
requirements = in_.readlines()
setup(
name='myfitnesspal',
version='1.2.2',
url='http://github.com/coddingtonbear/python-myfitnesspal/',
description='Access health and fitness data stored in Myfitnesspal',
author='Adam Coddington',
author_email='me@adamcoddington.net',
classifiers=[
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Utilities',
],
packages=find_packages(),
install_requires=requirements,
test_suite='nose.collector',
tests_require=[
'gmcquillan-mimic',
'nose',
]
)
|
<commit_before>import multiprocessing
from setuptools import setup, find_packages
requirements = []
with open('requirements.txt', 'r') as in_:
requirements = in_.readlines()
setup(
name='myfitnesspal',
version='1.2.2',
url='http://github.com/coddingtonbear/python-myfitnesspal/',
description='Access health and fitness data stored in Myfitnesspal',
author='Adam Coddington',
author_email='me@adamcoddington.net',
classifiers=[
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Utilities',
],
packages=find_packages(),
install_requires=requirements,
test_suite='nose.collector',
tests_require=[
'mimic',
'nose',
]
)
<commit_msg>Use the redistribution of (now discontinued) mimic as gmcquillan-mimic.<commit_after>import multiprocessing
from setuptools import setup, find_packages
requirements = []
with open('requirements.txt', 'r') as in_:
requirements = in_.readlines()
setup(
name='myfitnesspal',
version='1.2.2',
url='http://github.com/coddingtonbear/python-myfitnesspal/',
description='Access health and fitness data stored in Myfitnesspal',
author='Adam Coddington',
author_email='me@adamcoddington.net',
classifiers=[
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Utilities',
],
packages=find_packages(),
install_requires=requirements,
test_suite='nose.collector',
tests_require=[
'gmcquillan-mimic',
'nose',
]
)
|
15b2054454ff41d743c06e231f64585ea7219d92
|
setup.py
|
setup.py
|
'''
XBMC Addon Manager
------------------
A CLI utility for searching and listing XBMC Addon Repositories.
Setup
`````
::
$ pip install xam
$ xam --help
Links
`````
* `website <http://github.com/jbeluch/xam/>`_
'''
from setuptools import setup
def get_requires():
'''If python > 2.7, argparse and OrderedDict will be included. Otherwsise
we need external packages.
'''
requires = ['requests']
try:
import argparse
except ImportError:
requires.append('argparse')
try:
from collections import OrderedDict
except ImportError:
requires.append('collective.ordereddict')
return requires
setup(
name='xam',
version='0.2',
url='http://github.com/jbeluch/xam/',
license='BSD',
author='Jonathan Beluch',
author_email='web@jonathanbeluch.com',
description='A utility for listing, searching and viewing source code for '
'XBMC addons.',
long_description=__doc__,
packages=['xam'],
platforms='any',
install_requires=get_requires(),
classifiers=[
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.7',
'Environment :: Console',
'Topic :: Utilities',
],
entry_points={
'console_scripts': [
'xam = xam.xam:main'
]
}
)
|
'''
XBMC Addon Manager
------------------
A CLI utility for searching and listing XBMC Addon Repositories.
Setup
`````
::
$ pip install xam
$ xam --help
Links
`````
* `website <http://github.com/jbeluch/xam/>`_
'''
from setuptools import setup
def get_requires():
'''If python > 2.7, argparse and OrderedDict will be included. Otherwsise
we need external packages.
'''
requires = ['requests']
try:
import argparse
except ImportError:
requires.append('argparse')
try:
from collections import OrderedDict
except ImportError:
requires.append('collective.ordereddict')
return requires
setup(
name='xam',
version='0.3-dev',
url='http://github.com/jbeluch/xam/',
license='BSD',
author='Jonathan Beluch',
author_email='web@jonathanbeluch.com',
description='A utility for listing, searching and viewing source code for '
'XBMC addons.',
long_description=__doc__,
packages=['xam'],
platforms='any',
install_requires=get_requires(),
classifiers=[
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.7',
'Environment :: Console',
'Topic :: Utilities',
],
entry_points={
'console_scripts': [
'xam = xam.xam:main'
]
}
)
|
Prepare for development of next release
|
Prepare for development of next release
|
Python
|
bsd-3-clause
|
jbeluch/xam
|
'''
XBMC Addon Manager
------------------
A CLI utility for searching and listing XBMC Addon Repositories.
Setup
`````
::
$ pip install xam
$ xam --help
Links
`````
* `website <http://github.com/jbeluch/xam/>`_
'''
from setuptools import setup
def get_requires():
'''If python > 2.7, argparse and OrderedDict will be included. Otherwsise
we need external packages.
'''
requires = ['requests']
try:
import argparse
except ImportError:
requires.append('argparse')
try:
from collections import OrderedDict
except ImportError:
requires.append('collective.ordereddict')
return requires
setup(
name='xam',
version='0.2',
url='http://github.com/jbeluch/xam/',
license='BSD',
author='Jonathan Beluch',
author_email='web@jonathanbeluch.com',
description='A utility for listing, searching and viewing source code for '
'XBMC addons.',
long_description=__doc__,
packages=['xam'],
platforms='any',
install_requires=get_requires(),
classifiers=[
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.7',
'Environment :: Console',
'Topic :: Utilities',
],
entry_points={
'console_scripts': [
'xam = xam.xam:main'
]
}
)
Prepare for development of next release
|
'''
XBMC Addon Manager
------------------
A CLI utility for searching and listing XBMC Addon Repositories.
Setup
`````
::
$ pip install xam
$ xam --help
Links
`````
* `website <http://github.com/jbeluch/xam/>`_
'''
from setuptools import setup
def get_requires():
'''If python > 2.7, argparse and OrderedDict will be included. Otherwsise
we need external packages.
'''
requires = ['requests']
try:
import argparse
except ImportError:
requires.append('argparse')
try:
from collections import OrderedDict
except ImportError:
requires.append('collective.ordereddict')
return requires
setup(
name='xam',
version='0.3-dev',
url='http://github.com/jbeluch/xam/',
license='BSD',
author='Jonathan Beluch',
author_email='web@jonathanbeluch.com',
description='A utility for listing, searching and viewing source code for '
'XBMC addons.',
long_description=__doc__,
packages=['xam'],
platforms='any',
install_requires=get_requires(),
classifiers=[
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.7',
'Environment :: Console',
'Topic :: Utilities',
],
entry_points={
'console_scripts': [
'xam = xam.xam:main'
]
}
)
|
<commit_before>'''
XBMC Addon Manager
------------------
A CLI utility for searching and listing XBMC Addon Repositories.
Setup
`````
::
$ pip install xam
$ xam --help
Links
`````
* `website <http://github.com/jbeluch/xam/>`_
'''
from setuptools import setup
def get_requires():
'''If python > 2.7, argparse and OrderedDict will be included. Otherwsise
we need external packages.
'''
requires = ['requests']
try:
import argparse
except ImportError:
requires.append('argparse')
try:
from collections import OrderedDict
except ImportError:
requires.append('collective.ordereddict')
return requires
setup(
name='xam',
version='0.2',
url='http://github.com/jbeluch/xam/',
license='BSD',
author='Jonathan Beluch',
author_email='web@jonathanbeluch.com',
description='A utility for listing, searching and viewing source code for '
'XBMC addons.',
long_description=__doc__,
packages=['xam'],
platforms='any',
install_requires=get_requires(),
classifiers=[
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.7',
'Environment :: Console',
'Topic :: Utilities',
],
entry_points={
'console_scripts': [
'xam = xam.xam:main'
]
}
)
<commit_msg>Prepare for development of next release<commit_after>
|
'''
XBMC Addon Manager
------------------
A CLI utility for searching and listing XBMC Addon Repositories.
Setup
`````
::
$ pip install xam
$ xam --help
Links
`````
* `website <http://github.com/jbeluch/xam/>`_
'''
from setuptools import setup
def get_requires():
'''If python > 2.7, argparse and OrderedDict will be included. Otherwsise
we need external packages.
'''
requires = ['requests']
try:
import argparse
except ImportError:
requires.append('argparse')
try:
from collections import OrderedDict
except ImportError:
requires.append('collective.ordereddict')
return requires
setup(
name='xam',
version='0.3-dev',
url='http://github.com/jbeluch/xam/',
license='BSD',
author='Jonathan Beluch',
author_email='web@jonathanbeluch.com',
description='A utility for listing, searching and viewing source code for '
'XBMC addons.',
long_description=__doc__,
packages=['xam'],
platforms='any',
install_requires=get_requires(),
classifiers=[
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.7',
'Environment :: Console',
'Topic :: Utilities',
],
entry_points={
'console_scripts': [
'xam = xam.xam:main'
]
}
)
|
'''
XBMC Addon Manager
------------------
A CLI utility for searching and listing XBMC Addon Repositories.
Setup
`````
::
$ pip install xam
$ xam --help
Links
`````
* `website <http://github.com/jbeluch/xam/>`_
'''
from setuptools import setup
def get_requires():
'''If python > 2.7, argparse and OrderedDict will be included. Otherwsise
we need external packages.
'''
requires = ['requests']
try:
import argparse
except ImportError:
requires.append('argparse')
try:
from collections import OrderedDict
except ImportError:
requires.append('collective.ordereddict')
return requires
setup(
name='xam',
version='0.2',
url='http://github.com/jbeluch/xam/',
license='BSD',
author='Jonathan Beluch',
author_email='web@jonathanbeluch.com',
description='A utility for listing, searching and viewing source code for '
'XBMC addons.',
long_description=__doc__,
packages=['xam'],
platforms='any',
install_requires=get_requires(),
classifiers=[
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.7',
'Environment :: Console',
'Topic :: Utilities',
],
entry_points={
'console_scripts': [
'xam = xam.xam:main'
]
}
)
Prepare for development of next release'''
XBMC Addon Manager
------------------
A CLI utility for searching and listing XBMC Addon Repositories.
Setup
`````
::
$ pip install xam
$ xam --help
Links
`````
* `website <http://github.com/jbeluch/xam/>`_
'''
from setuptools import setup
def get_requires():
'''If python > 2.7, argparse and OrderedDict will be included. Otherwsise
we need external packages.
'''
requires = ['requests']
try:
import argparse
except ImportError:
requires.append('argparse')
try:
from collections import OrderedDict
except ImportError:
requires.append('collective.ordereddict')
return requires
setup(
name='xam',
version='0.3-dev',
url='http://github.com/jbeluch/xam/',
license='BSD',
author='Jonathan Beluch',
author_email='web@jonathanbeluch.com',
description='A utility for listing, searching and viewing source code for '
'XBMC addons.',
long_description=__doc__,
packages=['xam'],
platforms='any',
install_requires=get_requires(),
classifiers=[
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.7',
'Environment :: Console',
'Topic :: Utilities',
],
entry_points={
'console_scripts': [
'xam = xam.xam:main'
]
}
)
|
<commit_before>'''
XBMC Addon Manager
------------------
A CLI utility for searching and listing XBMC Addon Repositories.
Setup
`````
::
$ pip install xam
$ xam --help
Links
`````
* `website <http://github.com/jbeluch/xam/>`_
'''
from setuptools import setup
def get_requires():
'''If python > 2.7, argparse and OrderedDict will be included. Otherwsise
we need external packages.
'''
requires = ['requests']
try:
import argparse
except ImportError:
requires.append('argparse')
try:
from collections import OrderedDict
except ImportError:
requires.append('collective.ordereddict')
return requires
setup(
name='xam',
version='0.2',
url='http://github.com/jbeluch/xam/',
license='BSD',
author='Jonathan Beluch',
author_email='web@jonathanbeluch.com',
description='A utility for listing, searching and viewing source code for '
'XBMC addons.',
long_description=__doc__,
packages=['xam'],
platforms='any',
install_requires=get_requires(),
classifiers=[
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.7',
'Environment :: Console',
'Topic :: Utilities',
],
entry_points={
'console_scripts': [
'xam = xam.xam:main'
]
}
)
<commit_msg>Prepare for development of next release<commit_after>'''
XBMC Addon Manager
------------------
A CLI utility for searching and listing XBMC Addon Repositories.
Setup
`````
::
$ pip install xam
$ xam --help
Links
`````
* `website <http://github.com/jbeluch/xam/>`_
'''
from setuptools import setup
def get_requires():
'''If python > 2.7, argparse and OrderedDict will be included. Otherwsise
we need external packages.
'''
requires = ['requests']
try:
import argparse
except ImportError:
requires.append('argparse')
try:
from collections import OrderedDict
except ImportError:
requires.append('collective.ordereddict')
return requires
setup(
name='xam',
version='0.3-dev',
url='http://github.com/jbeluch/xam/',
license='BSD',
author='Jonathan Beluch',
author_email='web@jonathanbeluch.com',
description='A utility for listing, searching and viewing source code for '
'XBMC addons.',
long_description=__doc__,
packages=['xam'],
platforms='any',
install_requires=get_requires(),
classifiers=[
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.7',
'Environment :: Console',
'Topic :: Utilities',
],
entry_points={
'console_scripts': [
'xam = xam.xam:main'
]
}
)
|
50b2343f8a8c60b647fe254f79a05a5599d97862
|
setup.py
|
setup.py
|
from setuptools import setup, find_packages
import unittest
import doctest
# Read in the version number
exec(open('src/nash/version.py', 'r').read())
requirements = ["numpy==1.11.2"]
test_requirements = ['hypothesis>=3.6.0']
def test_suite():
"""Discover all tests in the tests dir"""
test_loader = unittest.TestLoader()
# Read in unit tests
test_suite = test_loader.discover('tests')
# Read in doctests from README
test_suite.addTests(doctest.DocFileSuite('README.md',
optionflags=doctest.ELLIPSIS))
return test_suite
setup(
name='nashpy',
version=__version__,
install_requires=requirements,
tests_require=test_requirements,
author='Vince Knight, James Campbell',
author_email=('knightva@cardiff.ac.uk'),
packages=find_packages('src'),
package_dir={"": "src"},
test_suite='setup.test_suite',
url='',
license='The MIT License (MIT)',
description='A library to compute equilibria of 2 player normal form games',
)
|
from setuptools import setup, find_packages
import unittest
import doctest
# Read in the version number
exec(open('src/nash/version.py', 'r').read())
requirements = ["numpy==1.11.2"]
def test_suite():
"""Discover all tests in the tests dir"""
test_loader = unittest.TestLoader()
# Read in unit tests
test_suite = test_loader.discover('tests')
# Read in doctests from README
test_suite.addTests(doctest.DocFileSuite('README.md',
optionflags=doctest.ELLIPSIS))
return test_suite
setup(
name='nashpy',
version=__version__,
install_requires=requirements,
author='Vince Knight, James Campbell',
author_email=('knightva@cardiff.ac.uk'),
packages=find_packages('src'),
package_dir={"": "src"},
test_suite='setup.test_suite',
url='',
license='The MIT License (MIT)',
description='A library to compute equilibria of 2 player normal form games',
)
|
Revert "Add hypothesis as test requirement."
|
Revert "Add hypothesis as test requirement."
This reverts commit 7e340017f4bb0a8a99219f3896071ab07a017f4f.
|
Python
|
mit
|
drvinceknight/Nashpy
|
from setuptools import setup, find_packages
import unittest
import doctest
# Read in the version number
exec(open('src/nash/version.py', 'r').read())
requirements = ["numpy==1.11.2"]
test_requirements = ['hypothesis>=3.6.0']
def test_suite():
"""Discover all tests in the tests dir"""
test_loader = unittest.TestLoader()
# Read in unit tests
test_suite = test_loader.discover('tests')
# Read in doctests from README
test_suite.addTests(doctest.DocFileSuite('README.md',
optionflags=doctest.ELLIPSIS))
return test_suite
setup(
name='nashpy',
version=__version__,
install_requires=requirements,
tests_require=test_requirements,
author='Vince Knight, James Campbell',
author_email=('knightva@cardiff.ac.uk'),
packages=find_packages('src'),
package_dir={"": "src"},
test_suite='setup.test_suite',
url='',
license='The MIT License (MIT)',
description='A library to compute equilibria of 2 player normal form games',
)
Revert "Add hypothesis as test requirement."
This reverts commit 7e340017f4bb0a8a99219f3896071ab07a017f4f.
|
from setuptools import setup, find_packages
import unittest
import doctest
# Read in the version number
exec(open('src/nash/version.py', 'r').read())
requirements = ["numpy==1.11.2"]
def test_suite():
"""Discover all tests in the tests dir"""
test_loader = unittest.TestLoader()
# Read in unit tests
test_suite = test_loader.discover('tests')
# Read in doctests from README
test_suite.addTests(doctest.DocFileSuite('README.md',
optionflags=doctest.ELLIPSIS))
return test_suite
setup(
name='nashpy',
version=__version__,
install_requires=requirements,
author='Vince Knight, James Campbell',
author_email=('knightva@cardiff.ac.uk'),
packages=find_packages('src'),
package_dir={"": "src"},
test_suite='setup.test_suite',
url='',
license='The MIT License (MIT)',
description='A library to compute equilibria of 2 player normal form games',
)
|
<commit_before>from setuptools import setup, find_packages
import unittest
import doctest
# Read in the version number
exec(open('src/nash/version.py', 'r').read())
requirements = ["numpy==1.11.2"]
test_requirements = ['hypothesis>=3.6.0']
def test_suite():
"""Discover all tests in the tests dir"""
test_loader = unittest.TestLoader()
# Read in unit tests
test_suite = test_loader.discover('tests')
# Read in doctests from README
test_suite.addTests(doctest.DocFileSuite('README.md',
optionflags=doctest.ELLIPSIS))
return test_suite
setup(
name='nashpy',
version=__version__,
install_requires=requirements,
tests_require=test_requirements,
author='Vince Knight, James Campbell',
author_email=('knightva@cardiff.ac.uk'),
packages=find_packages('src'),
package_dir={"": "src"},
test_suite='setup.test_suite',
url='',
license='The MIT License (MIT)',
description='A library to compute equilibria of 2 player normal form games',
)
<commit_msg>Revert "Add hypothesis as test requirement."
This reverts commit 7e340017f4bb0a8a99219f3896071ab07a017f4f.<commit_after>
|
from setuptools import setup, find_packages
import unittest
import doctest
# Read in the version number
exec(open('src/nash/version.py', 'r').read())
requirements = ["numpy==1.11.2"]
def test_suite():
"""Discover all tests in the tests dir"""
test_loader = unittest.TestLoader()
# Read in unit tests
test_suite = test_loader.discover('tests')
# Read in doctests from README
test_suite.addTests(doctest.DocFileSuite('README.md',
optionflags=doctest.ELLIPSIS))
return test_suite
setup(
name='nashpy',
version=__version__,
install_requires=requirements,
author='Vince Knight, James Campbell',
author_email=('knightva@cardiff.ac.uk'),
packages=find_packages('src'),
package_dir={"": "src"},
test_suite='setup.test_suite',
url='',
license='The MIT License (MIT)',
description='A library to compute equilibria of 2 player normal form games',
)
|
from setuptools import setup, find_packages
import unittest
import doctest
# Read in the version number
exec(open('src/nash/version.py', 'r').read())
requirements = ["numpy==1.11.2"]
test_requirements = ['hypothesis>=3.6.0']
def test_suite():
"""Discover all tests in the tests dir"""
test_loader = unittest.TestLoader()
# Read in unit tests
test_suite = test_loader.discover('tests')
# Read in doctests from README
test_suite.addTests(doctest.DocFileSuite('README.md',
optionflags=doctest.ELLIPSIS))
return test_suite
setup(
name='nashpy',
version=__version__,
install_requires=requirements,
tests_require=test_requirements,
author='Vince Knight, James Campbell',
author_email=('knightva@cardiff.ac.uk'),
packages=find_packages('src'),
package_dir={"": "src"},
test_suite='setup.test_suite',
url='',
license='The MIT License (MIT)',
description='A library to compute equilibria of 2 player normal form games',
)
Revert "Add hypothesis as test requirement."
This reverts commit 7e340017f4bb0a8a99219f3896071ab07a017f4f.from setuptools import setup, find_packages
import unittest
import doctest
# Read in the version number
exec(open('src/nash/version.py', 'r').read())
requirements = ["numpy==1.11.2"]
def test_suite():
"""Discover all tests in the tests dir"""
test_loader = unittest.TestLoader()
# Read in unit tests
test_suite = test_loader.discover('tests')
# Read in doctests from README
test_suite.addTests(doctest.DocFileSuite('README.md',
optionflags=doctest.ELLIPSIS))
return test_suite
setup(
name='nashpy',
version=__version__,
install_requires=requirements,
author='Vince Knight, James Campbell',
author_email=('knightva@cardiff.ac.uk'),
packages=find_packages('src'),
package_dir={"": "src"},
test_suite='setup.test_suite',
url='',
license='The MIT License (MIT)',
description='A library to compute equilibria of 2 player normal form games',
)
|
<commit_before>from setuptools import setup, find_packages
import unittest
import doctest
# Read in the version number
exec(open('src/nash/version.py', 'r').read())
requirements = ["numpy==1.11.2"]
test_requirements = ['hypothesis>=3.6.0']
def test_suite():
"""Discover all tests in the tests dir"""
test_loader = unittest.TestLoader()
# Read in unit tests
test_suite = test_loader.discover('tests')
# Read in doctests from README
test_suite.addTests(doctest.DocFileSuite('README.md',
optionflags=doctest.ELLIPSIS))
return test_suite
setup(
name='nashpy',
version=__version__,
install_requires=requirements,
tests_require=test_requirements,
author='Vince Knight, James Campbell',
author_email=('knightva@cardiff.ac.uk'),
packages=find_packages('src'),
package_dir={"": "src"},
test_suite='setup.test_suite',
url='',
license='The MIT License (MIT)',
description='A library to compute equilibria of 2 player normal form games',
)
<commit_msg>Revert "Add hypothesis as test requirement."
This reverts commit 7e340017f4bb0a8a99219f3896071ab07a017f4f.<commit_after>from setuptools import setup, find_packages
import unittest
import doctest
# Read in the version number
exec(open('src/nash/version.py', 'r').read())
requirements = ["numpy==1.11.2"]
def test_suite():
"""Discover all tests in the tests dir"""
test_loader = unittest.TestLoader()
# Read in unit tests
test_suite = test_loader.discover('tests')
# Read in doctests from README
test_suite.addTests(doctest.DocFileSuite('README.md',
optionflags=doctest.ELLIPSIS))
return test_suite
setup(
name='nashpy',
version=__version__,
install_requires=requirements,
author='Vince Knight, James Campbell',
author_email=('knightva@cardiff.ac.uk'),
packages=find_packages('src'),
package_dir={"": "src"},
test_suite='setup.test_suite',
url='',
license='The MIT License (MIT)',
description='A library to compute equilibria of 2 player normal form games',
)
|
9a5c17781178e8c97a4749e49374c3b4449c7387
|
tests/test_models.py
|
tests/test_models.py
|
from unittest import TestCase
import unittest.mock
from molecupy.structures import Model, AtomicStructure, SmallMolecule
class ModelTest(TestCase):
def setUp(self):
self.small_molecule1 = unittest.mock.Mock(spec=SmallMolecule)
self.small_molecule2 = unittest.mock.Mock(spec=SmallMolecule)
class ModelCreationTest(ModelTest):
def test_can_create_chain(self):
model = Model()
self.assertIsInstance(model, AtomicStructure)
self.assertEqual(model._atoms, set())
def test_model_repr(self):
model = Model()
self.assertEqual(str(model), "<Model (0 atoms)>")
class ModelSmallMoleculeTests(ModelTest):
def test_can_add_small_molecules(self):
model = Model()
self.assertEqual(model.small_molecules(), set())
model.add_small_molecule(self.small_molecule1)
self.assertEqual(model.small_molecules(), set([self.small_molecule1]))
model.add_small_molecule(self.small_molecule2)
self.assertEqual(
model.small_molecules(),
set([self.small_molecule1, self.small_molecule2])
)
|
from unittest import TestCase
import unittest.mock
from molecupy.structures import Model, AtomicStructure, SmallMolecule
class ModelTest(TestCase):
def setUp(self):
self.small_molecule1 = unittest.mock.Mock(spec=SmallMolecule)
self.small_molecule2 = unittest.mock.Mock(spec=SmallMolecule)
class ModelCreationTest(ModelTest):
def test_can_create_chain(self):
model = Model()
self.assertIsInstance(model, AtomicStructure)
self.assertEqual(model._atoms, set())
def test_model_repr(self):
model = Model()
self.assertEqual(str(model), "<Model (0 atoms)>")
class ModelSmallMoleculeTests(ModelTest):
def test_can_add_small_molecules(self):
model = Model()
self.assertEqual(model.small_molecules(), set())
model.add_small_molecule(self.small_molecule1)
self.assertEqual(model.small_molecules(), set([self.small_molecule1]))
model.add_small_molecule(self.small_molecule2)
self.assertEqual(
model.small_molecules(),
set([self.small_molecule1, self.small_molecule2])
)
def test_must_use_method_to_add_small_molecule(self):
model = Model()
self.assertEqual(model.small_molecules(), set())
model.small_molecules().add(self.small_molecule1)
self.assertEqual(model.small_molecules(), set())
|
Make small molecules read only
|
Make small molecules read only
|
Python
|
mit
|
samirelanduk/atomium,samirelanduk/atomium,samirelanduk/molecupy
|
from unittest import TestCase
import unittest.mock
from molecupy.structures import Model, AtomicStructure, SmallMolecule
class ModelTest(TestCase):
def setUp(self):
self.small_molecule1 = unittest.mock.Mock(spec=SmallMolecule)
self.small_molecule2 = unittest.mock.Mock(spec=SmallMolecule)
class ModelCreationTest(ModelTest):
def test_can_create_chain(self):
model = Model()
self.assertIsInstance(model, AtomicStructure)
self.assertEqual(model._atoms, set())
def test_model_repr(self):
model = Model()
self.assertEqual(str(model), "<Model (0 atoms)>")
class ModelSmallMoleculeTests(ModelTest):
def test_can_add_small_molecules(self):
model = Model()
self.assertEqual(model.small_molecules(), set())
model.add_small_molecule(self.small_molecule1)
self.assertEqual(model.small_molecules(), set([self.small_molecule1]))
model.add_small_molecule(self.small_molecule2)
self.assertEqual(
model.small_molecules(),
set([self.small_molecule1, self.small_molecule2])
)
Make small molecules read only
|
from unittest import TestCase
import unittest.mock
from molecupy.structures import Model, AtomicStructure, SmallMolecule
class ModelTest(TestCase):
def setUp(self):
self.small_molecule1 = unittest.mock.Mock(spec=SmallMolecule)
self.small_molecule2 = unittest.mock.Mock(spec=SmallMolecule)
class ModelCreationTest(ModelTest):
def test_can_create_chain(self):
model = Model()
self.assertIsInstance(model, AtomicStructure)
self.assertEqual(model._atoms, set())
def test_model_repr(self):
model = Model()
self.assertEqual(str(model), "<Model (0 atoms)>")
class ModelSmallMoleculeTests(ModelTest):
def test_can_add_small_molecules(self):
model = Model()
self.assertEqual(model.small_molecules(), set())
model.add_small_molecule(self.small_molecule1)
self.assertEqual(model.small_molecules(), set([self.small_molecule1]))
model.add_small_molecule(self.small_molecule2)
self.assertEqual(
model.small_molecules(),
set([self.small_molecule1, self.small_molecule2])
)
def test_must_use_method_to_add_small_molecule(self):
model = Model()
self.assertEqual(model.small_molecules(), set())
model.small_molecules().add(self.small_molecule1)
self.assertEqual(model.small_molecules(), set())
|
<commit_before>from unittest import TestCase
import unittest.mock
from molecupy.structures import Model, AtomicStructure, SmallMolecule
class ModelTest(TestCase):
def setUp(self):
self.small_molecule1 = unittest.mock.Mock(spec=SmallMolecule)
self.small_molecule2 = unittest.mock.Mock(spec=SmallMolecule)
class ModelCreationTest(ModelTest):
def test_can_create_chain(self):
model = Model()
self.assertIsInstance(model, AtomicStructure)
self.assertEqual(model._atoms, set())
def test_model_repr(self):
model = Model()
self.assertEqual(str(model), "<Model (0 atoms)>")
class ModelSmallMoleculeTests(ModelTest):
def test_can_add_small_molecules(self):
model = Model()
self.assertEqual(model.small_molecules(), set())
model.add_small_molecule(self.small_molecule1)
self.assertEqual(model.small_molecules(), set([self.small_molecule1]))
model.add_small_molecule(self.small_molecule2)
self.assertEqual(
model.small_molecules(),
set([self.small_molecule1, self.small_molecule2])
)
<commit_msg>Make small molecules read only<commit_after>
|
from unittest import TestCase
import unittest.mock
from molecupy.structures import Model, AtomicStructure, SmallMolecule
class ModelTest(TestCase):
def setUp(self):
self.small_molecule1 = unittest.mock.Mock(spec=SmallMolecule)
self.small_molecule2 = unittest.mock.Mock(spec=SmallMolecule)
class ModelCreationTest(ModelTest):
def test_can_create_chain(self):
model = Model()
self.assertIsInstance(model, AtomicStructure)
self.assertEqual(model._atoms, set())
def test_model_repr(self):
model = Model()
self.assertEqual(str(model), "<Model (0 atoms)>")
class ModelSmallMoleculeTests(ModelTest):
def test_can_add_small_molecules(self):
model = Model()
self.assertEqual(model.small_molecules(), set())
model.add_small_molecule(self.small_molecule1)
self.assertEqual(model.small_molecules(), set([self.small_molecule1]))
model.add_small_molecule(self.small_molecule2)
self.assertEqual(
model.small_molecules(),
set([self.small_molecule1, self.small_molecule2])
)
def test_must_use_method_to_add_small_molecule(self):
model = Model()
self.assertEqual(model.small_molecules(), set())
model.small_molecules().add(self.small_molecule1)
self.assertEqual(model.small_molecules(), set())
|
from unittest import TestCase
import unittest.mock
from molecupy.structures import Model, AtomicStructure, SmallMolecule
class ModelTest(TestCase):
def setUp(self):
self.small_molecule1 = unittest.mock.Mock(spec=SmallMolecule)
self.small_molecule2 = unittest.mock.Mock(spec=SmallMolecule)
class ModelCreationTest(ModelTest):
def test_can_create_chain(self):
model = Model()
self.assertIsInstance(model, AtomicStructure)
self.assertEqual(model._atoms, set())
def test_model_repr(self):
model = Model()
self.assertEqual(str(model), "<Model (0 atoms)>")
class ModelSmallMoleculeTests(ModelTest):
def test_can_add_small_molecules(self):
model = Model()
self.assertEqual(model.small_molecules(), set())
model.add_small_molecule(self.small_molecule1)
self.assertEqual(model.small_molecules(), set([self.small_molecule1]))
model.add_small_molecule(self.small_molecule2)
self.assertEqual(
model.small_molecules(),
set([self.small_molecule1, self.small_molecule2])
)
Make small molecules read onlyfrom unittest import TestCase
import unittest.mock
from molecupy.structures import Model, AtomicStructure, SmallMolecule
class ModelTest(TestCase):
def setUp(self):
self.small_molecule1 = unittest.mock.Mock(spec=SmallMolecule)
self.small_molecule2 = unittest.mock.Mock(spec=SmallMolecule)
class ModelCreationTest(ModelTest):
def test_can_create_chain(self):
model = Model()
self.assertIsInstance(model, AtomicStructure)
self.assertEqual(model._atoms, set())
def test_model_repr(self):
model = Model()
self.assertEqual(str(model), "<Model (0 atoms)>")
class ModelSmallMoleculeTests(ModelTest):
def test_can_add_small_molecules(self):
model = Model()
self.assertEqual(model.small_molecules(), set())
model.add_small_molecule(self.small_molecule1)
self.assertEqual(model.small_molecules(), set([self.small_molecule1]))
model.add_small_molecule(self.small_molecule2)
self.assertEqual(
model.small_molecules(),
set([self.small_molecule1, self.small_molecule2])
)
def test_must_use_method_to_add_small_molecule(self):
model = Model()
self.assertEqual(model.small_molecules(), set())
model.small_molecules().add(self.small_molecule1)
self.assertEqual(model.small_molecules(), set())
|
<commit_before>from unittest import TestCase
import unittest.mock
from molecupy.structures import Model, AtomicStructure, SmallMolecule
class ModelTest(TestCase):
def setUp(self):
self.small_molecule1 = unittest.mock.Mock(spec=SmallMolecule)
self.small_molecule2 = unittest.mock.Mock(spec=SmallMolecule)
class ModelCreationTest(ModelTest):
def test_can_create_chain(self):
model = Model()
self.assertIsInstance(model, AtomicStructure)
self.assertEqual(model._atoms, set())
def test_model_repr(self):
model = Model()
self.assertEqual(str(model), "<Model (0 atoms)>")
class ModelSmallMoleculeTests(ModelTest):
def test_can_add_small_molecules(self):
model = Model()
self.assertEqual(model.small_molecules(), set())
model.add_small_molecule(self.small_molecule1)
self.assertEqual(model.small_molecules(), set([self.small_molecule1]))
model.add_small_molecule(self.small_molecule2)
self.assertEqual(
model.small_molecules(),
set([self.small_molecule1, self.small_molecule2])
)
<commit_msg>Make small molecules read only<commit_after>from unittest import TestCase
import unittest.mock
from molecupy.structures import Model, AtomicStructure, SmallMolecule
class ModelTest(TestCase):
def setUp(self):
self.small_molecule1 = unittest.mock.Mock(spec=SmallMolecule)
self.small_molecule2 = unittest.mock.Mock(spec=SmallMolecule)
class ModelCreationTest(ModelTest):
def test_can_create_chain(self):
model = Model()
self.assertIsInstance(model, AtomicStructure)
self.assertEqual(model._atoms, set())
def test_model_repr(self):
model = Model()
self.assertEqual(str(model), "<Model (0 atoms)>")
class ModelSmallMoleculeTests(ModelTest):
def test_can_add_small_molecules(self):
model = Model()
self.assertEqual(model.small_molecules(), set())
model.add_small_molecule(self.small_molecule1)
self.assertEqual(model.small_molecules(), set([self.small_molecule1]))
model.add_small_molecule(self.small_molecule2)
self.assertEqual(
model.small_molecules(),
set([self.small_molecule1, self.small_molecule2])
)
def test_must_use_method_to_add_small_molecule(self):
model = Model()
self.assertEqual(model.small_molecules(), set())
model.small_molecules().add(self.small_molecule1)
self.assertEqual(model.small_molecules(), set())
|
2eca3cd6e0065a65ed65b3ce13fc7f7d9caf1717
|
AAA.py
|
AAA.py
|
import os
import sys
try: # ST3
from .Lib.sublime_lib.path import get_package_name, get_package_path
PLUGIN_NAME = get_package_name()
libpath = os.path.join(get_package_path(), "Lib")
except ValueError: # ST2
# For some reason the import does only work when RELOADING the plugin, not
# when ST is loading it initially.
# from lib.sublime_lib.path import get_package_name, get_package_path
path = os.path.normpath(os.getcwdu())
PLUGIN_NAME = os.path.basename(path)
libpath = os.path.join(path, "Lib")
def add(path):
if not path in sys.path:
sys.path.append(path)
print("[%s] Added %s to sys.path." % (PLUGIN_NAME, path))
# Make sublime_lib (and more) available for all packages.
add(libpath)
# Differentiate between Python 2 and Python 3 packages (split by folder)
add(os.path.join(libpath, "_py%d" % sys.version_info[0]))
|
import os
import sys
try: # ST3
from .Lib.sublime_lib.path import get_package_name
PLUGIN_NAME = get_package_name()
path = os.path.dirname(__file__)
libpath = os.path.join(path, "Lib")
except ValueError: # ST2
# For some reason the import does only work when RELOADING the plugin, not
# when ST is loading it initially.
# from lib.sublime_lib.path import get_package_name, get_package_path
path = os.path.normpath(os.getcwdu())
PLUGIN_NAME = os.path.basename(path)
libpath = os.path.join(path, "Lib")
def add(path):
if not path in sys.path:
sys.path.append(path)
print("[%s] Added %s to sys.path." % (PLUGIN_NAME, path))
# Make sublime_lib (and more) available for all packages.
add(libpath)
# Differentiate between Python 2 and Python 3 packages (split by folder)
add(os.path.join(libpath, "_py%d" % sys.version_info[0]))
|
Make imports work when in .sublime-package
|
Make imports work when in .sublime-package
|
Python
|
mit
|
SublimeText/AAAPackageDev,SublimeText/AAAPackageDev,SublimeText/PackageDev
|
import os
import sys
try: # ST3
from .Lib.sublime_lib.path import get_package_name, get_package_path
PLUGIN_NAME = get_package_name()
libpath = os.path.join(get_package_path(), "Lib")
except ValueError: # ST2
# For some reason the import does only work when RELOADING the plugin, not
# when ST is loading it initially.
# from lib.sublime_lib.path import get_package_name, get_package_path
path = os.path.normpath(os.getcwdu())
PLUGIN_NAME = os.path.basename(path)
libpath = os.path.join(path, "Lib")
def add(path):
if not path in sys.path:
sys.path.append(path)
print("[%s] Added %s to sys.path." % (PLUGIN_NAME, path))
# Make sublime_lib (and more) available for all packages.
add(libpath)
# Differentiate between Python 2 and Python 3 packages (split by folder)
add(os.path.join(libpath, "_py%d" % sys.version_info[0]))
Make imports work when in .sublime-package
|
import os
import sys
try: # ST3
from .Lib.sublime_lib.path import get_package_name
PLUGIN_NAME = get_package_name()
path = os.path.dirname(__file__)
libpath = os.path.join(path, "Lib")
except ValueError: # ST2
# For some reason the import does only work when RELOADING the plugin, not
# when ST is loading it initially.
# from lib.sublime_lib.path import get_package_name, get_package_path
path = os.path.normpath(os.getcwdu())
PLUGIN_NAME = os.path.basename(path)
libpath = os.path.join(path, "Lib")
def add(path):
if not path in sys.path:
sys.path.append(path)
print("[%s] Added %s to sys.path." % (PLUGIN_NAME, path))
# Make sublime_lib (and more) available for all packages.
add(libpath)
# Differentiate between Python 2 and Python 3 packages (split by folder)
add(os.path.join(libpath, "_py%d" % sys.version_info[0]))
|
<commit_before>import os
import sys
try: # ST3
from .Lib.sublime_lib.path import get_package_name, get_package_path
PLUGIN_NAME = get_package_name()
libpath = os.path.join(get_package_path(), "Lib")
except ValueError: # ST2
# For some reason the import does only work when RELOADING the plugin, not
# when ST is loading it initially.
# from lib.sublime_lib.path import get_package_name, get_package_path
path = os.path.normpath(os.getcwdu())
PLUGIN_NAME = os.path.basename(path)
libpath = os.path.join(path, "Lib")
def add(path):
if not path in sys.path:
sys.path.append(path)
print("[%s] Added %s to sys.path." % (PLUGIN_NAME, path))
# Make sublime_lib (and more) available for all packages.
add(libpath)
# Differentiate between Python 2 and Python 3 packages (split by folder)
add(os.path.join(libpath, "_py%d" % sys.version_info[0]))
<commit_msg>Make imports work when in .sublime-package<commit_after>
|
import os
import sys
try: # ST3
from .Lib.sublime_lib.path import get_package_name
PLUGIN_NAME = get_package_name()
path = os.path.dirname(__file__)
libpath = os.path.join(path, "Lib")
except ValueError: # ST2
# For some reason the import does only work when RELOADING the plugin, not
# when ST is loading it initially.
# from lib.sublime_lib.path import get_package_name, get_package_path
path = os.path.normpath(os.getcwdu())
PLUGIN_NAME = os.path.basename(path)
libpath = os.path.join(path, "Lib")
def add(path):
if not path in sys.path:
sys.path.append(path)
print("[%s] Added %s to sys.path." % (PLUGIN_NAME, path))
# Make sublime_lib (and more) available for all packages.
add(libpath)
# Differentiate between Python 2 and Python 3 packages (split by folder)
add(os.path.join(libpath, "_py%d" % sys.version_info[0]))
|
import os
import sys
try: # ST3
from .Lib.sublime_lib.path import get_package_name, get_package_path
PLUGIN_NAME = get_package_name()
libpath = os.path.join(get_package_path(), "Lib")
except ValueError: # ST2
# For some reason the import does only work when RELOADING the plugin, not
# when ST is loading it initially.
# from lib.sublime_lib.path import get_package_name, get_package_path
path = os.path.normpath(os.getcwdu())
PLUGIN_NAME = os.path.basename(path)
libpath = os.path.join(path, "Lib")
def add(path):
if not path in sys.path:
sys.path.append(path)
print("[%s] Added %s to sys.path." % (PLUGIN_NAME, path))
# Make sublime_lib (and more) available for all packages.
add(libpath)
# Differentiate between Python 2 and Python 3 packages (split by folder)
add(os.path.join(libpath, "_py%d" % sys.version_info[0]))
Make imports work when in .sublime-packageimport os
import sys
try: # ST3
from .Lib.sublime_lib.path import get_package_name
PLUGIN_NAME = get_package_name()
path = os.path.dirname(__file__)
libpath = os.path.join(path, "Lib")
except ValueError: # ST2
# For some reason the import does only work when RELOADING the plugin, not
# when ST is loading it initially.
# from lib.sublime_lib.path import get_package_name, get_package_path
path = os.path.normpath(os.getcwdu())
PLUGIN_NAME = os.path.basename(path)
libpath = os.path.join(path, "Lib")
def add(path):
if not path in sys.path:
sys.path.append(path)
print("[%s] Added %s to sys.path." % (PLUGIN_NAME, path))
# Make sublime_lib (and more) available for all packages.
add(libpath)
# Differentiate between Python 2 and Python 3 packages (split by folder)
add(os.path.join(libpath, "_py%d" % sys.version_info[0]))
|
<commit_before>import os
import sys
try: # ST3
from .Lib.sublime_lib.path import get_package_name, get_package_path
PLUGIN_NAME = get_package_name()
libpath = os.path.join(get_package_path(), "Lib")
except ValueError: # ST2
# For some reason the import does only work when RELOADING the plugin, not
# when ST is loading it initially.
# from lib.sublime_lib.path import get_package_name, get_package_path
path = os.path.normpath(os.getcwdu())
PLUGIN_NAME = os.path.basename(path)
libpath = os.path.join(path, "Lib")
def add(path):
if not path in sys.path:
sys.path.append(path)
print("[%s] Added %s to sys.path." % (PLUGIN_NAME, path))
# Make sublime_lib (and more) available for all packages.
add(libpath)
# Differentiate between Python 2 and Python 3 packages (split by folder)
add(os.path.join(libpath, "_py%d" % sys.version_info[0]))
<commit_msg>Make imports work when in .sublime-package<commit_after>import os
import sys
try: # ST3
from .Lib.sublime_lib.path import get_package_name
PLUGIN_NAME = get_package_name()
path = os.path.dirname(__file__)
libpath = os.path.join(path, "Lib")
except ValueError: # ST2
# For some reason the import does only work when RELOADING the plugin, not
# when ST is loading it initially.
# from lib.sublime_lib.path import get_package_name, get_package_path
path = os.path.normpath(os.getcwdu())
PLUGIN_NAME = os.path.basename(path)
libpath = os.path.join(path, "Lib")
def add(path):
if not path in sys.path:
sys.path.append(path)
print("[%s] Added %s to sys.path." % (PLUGIN_NAME, path))
# Make sublime_lib (and more) available for all packages.
add(libpath)
# Differentiate between Python 2 and Python 3 packages (split by folder)
add(os.path.join(libpath, "_py%d" % sys.version_info[0]))
|
ed201c4cb78dd9fe1bbc9563f8219f9127cbfe1e
|
app.py
|
app.py
|
# !/usr/bin/python3
# -*- coding: utf-8 -*-
# Copyright (c) 2017-2020 Rhilip <rhilipruan@gmail.com>
import pymysql
from flask import Flask
from flaskext.mysql import MySQL
from flask_cors import CORS
from flask_cache import Cache
app = Flask(__name__, instance_relative_config=True)
app.config.from_object('config')
app.config.from_pyfile('config.py')
class Database(MySQL):
def exec(self, sql: str, args=None, r_dict: bool = False, fetch_all: bool = False, ret_row: bool = False):
db = self.get_db()
cursor = db.cursor(pymysql.cursors.DictCursor) if r_dict else db.cursor() # Cursor type
row = cursor.execute(sql, args)
data = cursor.fetchall() if fetch_all else cursor.fetchone() # The lines of return info (one or all)
return (row, data) if ret_row else data
mysql = Database(app=app, autocommit=True)
cache = Cache(app)
CORS(app)
|
# !/usr/bin/python3
# -*- coding: utf-8 -*-
# Copyright (c) 2017-2020 Rhilip <rhilipruan@gmail.com>
import pymysql
from flask import Flask
from flaskext.mysql import MySQL
from flask_cors import CORS
from flask_caching import Cache
app = Flask(__name__, instance_relative_config=True)
app.config.from_object('config')
app.config.from_pyfile('config.py')
class Database(MySQL):
def exec(self, sql: str, args=None, r_dict: bool = False, fetch_all: bool = False, ret_row: bool = False):
db = self.get_db()
cursor = db.cursor(pymysql.cursors.DictCursor) if r_dict else db.cursor() # Cursor type
row = cursor.execute(sql, args)
data = cursor.fetchall() if fetch_all else cursor.fetchone() # The lines of return info (one or all)
return (row, data) if ret_row else data
mysql = Database(app=app, autocommit=True)
cache = Cache(app)
CORS(app)
|
Change Cache Provider Since Flask upgraded
|
:alien: Change Cache Provider Since Flask upgraded
See: https://github.com/thadeusb/flask-cache/issues/188
|
Python
|
mit
|
Rhilip/PT-help-server
|
# !/usr/bin/python3
# -*- coding: utf-8 -*-
# Copyright (c) 2017-2020 Rhilip <rhilipruan@gmail.com>
import pymysql
from flask import Flask
from flaskext.mysql import MySQL
from flask_cors import CORS
from flask_cache import Cache
app = Flask(__name__, instance_relative_config=True)
app.config.from_object('config')
app.config.from_pyfile('config.py')
class Database(MySQL):
def exec(self, sql: str, args=None, r_dict: bool = False, fetch_all: bool = False, ret_row: bool = False):
db = self.get_db()
cursor = db.cursor(pymysql.cursors.DictCursor) if r_dict else db.cursor() # Cursor type
row = cursor.execute(sql, args)
data = cursor.fetchall() if fetch_all else cursor.fetchone() # The lines of return info (one or all)
return (row, data) if ret_row else data
mysql = Database(app=app, autocommit=True)
cache = Cache(app)
CORS(app)
:alien: Change Cache Provider Since Flask upgraded
See: https://github.com/thadeusb/flask-cache/issues/188
|
# !/usr/bin/python3
# -*- coding: utf-8 -*-
# Copyright (c) 2017-2020 Rhilip <rhilipruan@gmail.com>
import pymysql
from flask import Flask
from flaskext.mysql import MySQL
from flask_cors import CORS
from flask_caching import Cache
app = Flask(__name__, instance_relative_config=True)
app.config.from_object('config')
app.config.from_pyfile('config.py')
class Database(MySQL):
def exec(self, sql: str, args=None, r_dict: bool = False, fetch_all: bool = False, ret_row: bool = False):
db = self.get_db()
cursor = db.cursor(pymysql.cursors.DictCursor) if r_dict else db.cursor() # Cursor type
row = cursor.execute(sql, args)
data = cursor.fetchall() if fetch_all else cursor.fetchone() # The lines of return info (one or all)
return (row, data) if ret_row else data
mysql = Database(app=app, autocommit=True)
cache = Cache(app)
CORS(app)
|
<commit_before># !/usr/bin/python3
# -*- coding: utf-8 -*-
# Copyright (c) 2017-2020 Rhilip <rhilipruan@gmail.com>
import pymysql
from flask import Flask
from flaskext.mysql import MySQL
from flask_cors import CORS
from flask_cache import Cache
app = Flask(__name__, instance_relative_config=True)
app.config.from_object('config')
app.config.from_pyfile('config.py')
class Database(MySQL):
def exec(self, sql: str, args=None, r_dict: bool = False, fetch_all: bool = False, ret_row: bool = False):
db = self.get_db()
cursor = db.cursor(pymysql.cursors.DictCursor) if r_dict else db.cursor() # Cursor type
row = cursor.execute(sql, args)
data = cursor.fetchall() if fetch_all else cursor.fetchone() # The lines of return info (one or all)
return (row, data) if ret_row else data
mysql = Database(app=app, autocommit=True)
cache = Cache(app)
CORS(app)
<commit_msg>:alien: Change Cache Provider Since Flask upgraded
See: https://github.com/thadeusb/flask-cache/issues/188<commit_after>
|
# !/usr/bin/python3
# -*- coding: utf-8 -*-
# Copyright (c) 2017-2020 Rhilip <rhilipruan@gmail.com>
import pymysql
from flask import Flask
from flaskext.mysql import MySQL
from flask_cors import CORS
from flask_caching import Cache
app = Flask(__name__, instance_relative_config=True)
app.config.from_object('config')
app.config.from_pyfile('config.py')
class Database(MySQL):
def exec(self, sql: str, args=None, r_dict: bool = False, fetch_all: bool = False, ret_row: bool = False):
db = self.get_db()
cursor = db.cursor(pymysql.cursors.DictCursor) if r_dict else db.cursor() # Cursor type
row = cursor.execute(sql, args)
data = cursor.fetchall() if fetch_all else cursor.fetchone() # The lines of return info (one or all)
return (row, data) if ret_row else data
mysql = Database(app=app, autocommit=True)
cache = Cache(app)
CORS(app)
|
# !/usr/bin/python3
# -*- coding: utf-8 -*-
# Copyright (c) 2017-2020 Rhilip <rhilipruan@gmail.com>
import pymysql
from flask import Flask
from flaskext.mysql import MySQL
from flask_cors import CORS
from flask_cache import Cache
app = Flask(__name__, instance_relative_config=True)
app.config.from_object('config')
app.config.from_pyfile('config.py')
class Database(MySQL):
def exec(self, sql: str, args=None, r_dict: bool = False, fetch_all: bool = False, ret_row: bool = False):
db = self.get_db()
cursor = db.cursor(pymysql.cursors.DictCursor) if r_dict else db.cursor() # Cursor type
row = cursor.execute(sql, args)
data = cursor.fetchall() if fetch_all else cursor.fetchone() # The lines of return info (one or all)
return (row, data) if ret_row else data
mysql = Database(app=app, autocommit=True)
cache = Cache(app)
CORS(app)
:alien: Change Cache Provider Since Flask upgraded
See: https://github.com/thadeusb/flask-cache/issues/188# !/usr/bin/python3
# -*- coding: utf-8 -*-
# Copyright (c) 2017-2020 Rhilip <rhilipruan@gmail.com>
import pymysql
from flask import Flask
from flaskext.mysql import MySQL
from flask_cors import CORS
from flask_caching import Cache
app = Flask(__name__, instance_relative_config=True)
app.config.from_object('config')
app.config.from_pyfile('config.py')
class Database(MySQL):
def exec(self, sql: str, args=None, r_dict: bool = False, fetch_all: bool = False, ret_row: bool = False):
db = self.get_db()
cursor = db.cursor(pymysql.cursors.DictCursor) if r_dict else db.cursor() # Cursor type
row = cursor.execute(sql, args)
data = cursor.fetchall() if fetch_all else cursor.fetchone() # The lines of return info (one or all)
return (row, data) if ret_row else data
mysql = Database(app=app, autocommit=True)
cache = Cache(app)
CORS(app)
|
<commit_before># !/usr/bin/python3
# -*- coding: utf-8 -*-
# Copyright (c) 2017-2020 Rhilip <rhilipruan@gmail.com>
import pymysql
from flask import Flask
from flaskext.mysql import MySQL
from flask_cors import CORS
from flask_cache import Cache
app = Flask(__name__, instance_relative_config=True)
app.config.from_object('config')
app.config.from_pyfile('config.py')
class Database(MySQL):
def exec(self, sql: str, args=None, r_dict: bool = False, fetch_all: bool = False, ret_row: bool = False):
db = self.get_db()
cursor = db.cursor(pymysql.cursors.DictCursor) if r_dict else db.cursor() # Cursor type
row = cursor.execute(sql, args)
data = cursor.fetchall() if fetch_all else cursor.fetchone() # The lines of return info (one or all)
return (row, data) if ret_row else data
mysql = Database(app=app, autocommit=True)
cache = Cache(app)
CORS(app)
<commit_msg>:alien: Change Cache Provider Since Flask upgraded
See: https://github.com/thadeusb/flask-cache/issues/188<commit_after># !/usr/bin/python3
# -*- coding: utf-8 -*-
# Copyright (c) 2017-2020 Rhilip <rhilipruan@gmail.com>
import pymysql
from flask import Flask
from flaskext.mysql import MySQL
from flask_cors import CORS
from flask_caching import Cache
app = Flask(__name__, instance_relative_config=True)
app.config.from_object('config')
app.config.from_pyfile('config.py')
class Database(MySQL):
def exec(self, sql: str, args=None, r_dict: bool = False, fetch_all: bool = False, ret_row: bool = False):
db = self.get_db()
cursor = db.cursor(pymysql.cursors.DictCursor) if r_dict else db.cursor() # Cursor type
row = cursor.execute(sql, args)
data = cursor.fetchall() if fetch_all else cursor.fetchone() # The lines of return info (one or all)
return (row, data) if ret_row else data
mysql = Database(app=app, autocommit=True)
cache = Cache(app)
CORS(app)
|
4f9bb7a81f52b5ee46be338e5c699411286f1401
|
tasks.py
|
tasks.py
|
from invocations import docs
from invocations.testing import test
from invocations.packaging import release
from invoke import Collection
from invoke import run
from invoke import task
@task(help={
'pty': "Whether to run tests under a pseudo-tty",
})
def integration_tests(pty=True):
"""Runs integration tests."""
cmd = 'inv test -o --tests=integration'
run(cmd + ('' if pty else ' --no-pty'), pty=pty)
ns = Collection(test, integration_tests, release, docs)
|
from invocations import docs
from invocations.testing import test
from invocations.packaging import release
from invoke import Collection
from invoke import run
from invoke import task
@task(help={
'pty': "Whether to run tests under a pseudo-tty",
})
def integration(pty=True):
"""Runs integration tests."""
cmd = 'inv test -o --tests=integration'
run(cmd + ('' if pty else ' --no-pty'), pty=pty)
ns = Collection(test, integration, release, docs)
|
Rename integration tests task for consistency w/ other projs
|
Rename integration tests task for consistency w/ other projs
|
Python
|
bsd-2-clause
|
bitprophet/releases
|
from invocations import docs
from invocations.testing import test
from invocations.packaging import release
from invoke import Collection
from invoke import run
from invoke import task
@task(help={
'pty': "Whether to run tests under a pseudo-tty",
})
def integration_tests(pty=True):
"""Runs integration tests."""
cmd = 'inv test -o --tests=integration'
run(cmd + ('' if pty else ' --no-pty'), pty=pty)
ns = Collection(test, integration_tests, release, docs)
Rename integration tests task for consistency w/ other projs
|
from invocations import docs
from invocations.testing import test
from invocations.packaging import release
from invoke import Collection
from invoke import run
from invoke import task
@task(help={
'pty': "Whether to run tests under a pseudo-tty",
})
def integration(pty=True):
"""Runs integration tests."""
cmd = 'inv test -o --tests=integration'
run(cmd + ('' if pty else ' --no-pty'), pty=pty)
ns = Collection(test, integration, release, docs)
|
<commit_before>from invocations import docs
from invocations.testing import test
from invocations.packaging import release
from invoke import Collection
from invoke import run
from invoke import task
@task(help={
'pty': "Whether to run tests under a pseudo-tty",
})
def integration_tests(pty=True):
"""Runs integration tests."""
cmd = 'inv test -o --tests=integration'
run(cmd + ('' if pty else ' --no-pty'), pty=pty)
ns = Collection(test, integration_tests, release, docs)
<commit_msg>Rename integration tests task for consistency w/ other projs<commit_after>
|
from invocations import docs
from invocations.testing import test
from invocations.packaging import release
from invoke import Collection
from invoke import run
from invoke import task
@task(help={
'pty': "Whether to run tests under a pseudo-tty",
})
def integration(pty=True):
"""Runs integration tests."""
cmd = 'inv test -o --tests=integration'
run(cmd + ('' if pty else ' --no-pty'), pty=pty)
ns = Collection(test, integration, release, docs)
|
from invocations import docs
from invocations.testing import test
from invocations.packaging import release
from invoke import Collection
from invoke import run
from invoke import task
@task(help={
'pty': "Whether to run tests under a pseudo-tty",
})
def integration_tests(pty=True):
"""Runs integration tests."""
cmd = 'inv test -o --tests=integration'
run(cmd + ('' if pty else ' --no-pty'), pty=pty)
ns = Collection(test, integration_tests, release, docs)
Rename integration tests task for consistency w/ other projsfrom invocations import docs
from invocations.testing import test
from invocations.packaging import release
from invoke import Collection
from invoke import run
from invoke import task
@task(help={
'pty': "Whether to run tests under a pseudo-tty",
})
def integration(pty=True):
"""Runs integration tests."""
cmd = 'inv test -o --tests=integration'
run(cmd + ('' if pty else ' --no-pty'), pty=pty)
ns = Collection(test, integration, release, docs)
|
<commit_before>from invocations import docs
from invocations.testing import test
from invocations.packaging import release
from invoke import Collection
from invoke import run
from invoke import task
@task(help={
'pty': "Whether to run tests under a pseudo-tty",
})
def integration_tests(pty=True):
"""Runs integration tests."""
cmd = 'inv test -o --tests=integration'
run(cmd + ('' if pty else ' --no-pty'), pty=pty)
ns = Collection(test, integration_tests, release, docs)
<commit_msg>Rename integration tests task for consistency w/ other projs<commit_after>from invocations import docs
from invocations.testing import test
from invocations.packaging import release
from invoke import Collection
from invoke import run
from invoke import task
@task(help={
'pty': "Whether to run tests under a pseudo-tty",
})
def integration(pty=True):
"""Runs integration tests."""
cmd = 'inv test -o --tests=integration'
run(cmd + ('' if pty else ' --no-pty'), pty=pty)
ns = Collection(test, integration, release, docs)
|
3ca0007970056e665b28c62d39ed6073309a97cd
|
kovfig.py
|
kovfig.py
|
#! /usr/bin/env python
# coding:utf-8
# the number of loop for train IBM Model 2
loop_count = 10
phrase_model_file = "./phrase.model"
bigram_model_file = "./bigram.model"
if __name__ == '__main__':
pass
|
#! /usr/bin/env python
# coding:utf-8
from os import path
# the number of loop for train IBM Model 2
loop_count = 10
phrase_model_file = path.join(
path.abspath(path.dirname(__file__)),
"phrase.model"
)
bigram_model_file = path.join(
path.abspath(path.dirname(__file__)),
"bigram.model"
)
if __name__ == '__main__':
print("{} = {}".format(
"loop_count",
loop_count))
print("{} = {}".format(
"phrase_model_file",
phrase_model_file))
print("{} = {}".format(
"bigram_model_file",
bigram_model_file))
|
Modify path to use __file__
|
Modify path to use __file__
|
Python
|
mit
|
kenkov/kovlive
|
#! /usr/bin/env python
# coding:utf-8
# the number of loop for train IBM Model 2
loop_count = 10
phrase_model_file = "./phrase.model"
bigram_model_file = "./bigram.model"
if __name__ == '__main__':
pass
Modify path to use __file__
|
#! /usr/bin/env python
# coding:utf-8
from os import path
# the number of loop for train IBM Model 2
loop_count = 10
phrase_model_file = path.join(
path.abspath(path.dirname(__file__)),
"phrase.model"
)
bigram_model_file = path.join(
path.abspath(path.dirname(__file__)),
"bigram.model"
)
if __name__ == '__main__':
print("{} = {}".format(
"loop_count",
loop_count))
print("{} = {}".format(
"phrase_model_file",
phrase_model_file))
print("{} = {}".format(
"bigram_model_file",
bigram_model_file))
|
<commit_before>#! /usr/bin/env python
# coding:utf-8
# the number of loop for train IBM Model 2
loop_count = 10
phrase_model_file = "./phrase.model"
bigram_model_file = "./bigram.model"
if __name__ == '__main__':
pass
<commit_msg>Modify path to use __file__<commit_after>
|
#! /usr/bin/env python
# coding:utf-8
from os import path
# the number of loop for train IBM Model 2
loop_count = 10
phrase_model_file = path.join(
path.abspath(path.dirname(__file__)),
"phrase.model"
)
bigram_model_file = path.join(
path.abspath(path.dirname(__file__)),
"bigram.model"
)
if __name__ == '__main__':
print("{} = {}".format(
"loop_count",
loop_count))
print("{} = {}".format(
"phrase_model_file",
phrase_model_file))
print("{} = {}".format(
"bigram_model_file",
bigram_model_file))
|
#! /usr/bin/env python
# coding:utf-8
# the number of loop for train IBM Model 2
loop_count = 10
phrase_model_file = "./phrase.model"
bigram_model_file = "./bigram.model"
if __name__ == '__main__':
pass
Modify path to use __file__#! /usr/bin/env python
# coding:utf-8
from os import path
# the number of loop for train IBM Model 2
loop_count = 10
phrase_model_file = path.join(
path.abspath(path.dirname(__file__)),
"phrase.model"
)
bigram_model_file = path.join(
path.abspath(path.dirname(__file__)),
"bigram.model"
)
if __name__ == '__main__':
print("{} = {}".format(
"loop_count",
loop_count))
print("{} = {}".format(
"phrase_model_file",
phrase_model_file))
print("{} = {}".format(
"bigram_model_file",
bigram_model_file))
|
<commit_before>#! /usr/bin/env python
# coding:utf-8
# the number of loop for train IBM Model 2
loop_count = 10
phrase_model_file = "./phrase.model"
bigram_model_file = "./bigram.model"
if __name__ == '__main__':
pass
<commit_msg>Modify path to use __file__<commit_after>#! /usr/bin/env python
# coding:utf-8
from os import path
# the number of loop for train IBM Model 2
loop_count = 10
phrase_model_file = path.join(
path.abspath(path.dirname(__file__)),
"phrase.model"
)
bigram_model_file = path.join(
path.abspath(path.dirname(__file__)),
"bigram.model"
)
if __name__ == '__main__':
print("{} = {}".format(
"loop_count",
loop_count))
print("{} = {}".format(
"phrase_model_file",
phrase_model_file))
print("{} = {}".format(
"bigram_model_file",
bigram_model_file))
|
01cea97dad211746d2d5ba4ae5c03aa06121a544
|
tests/test_geocode.py
|
tests/test_geocode.py
|
import unittest
import fiona
from shapely.geometry import Point
import geopandas as gpd
from geopandas.geocode import geocode, _prepare_geocode_result
class TestGeocode(unittest.TestCase):
def test_prepare_result(self):
# Calls _prepare_result with sample results from the geocoder call
# loop
p0 = Point(12.3, -45.6) # Treat these as lat/lon
p1 = Point(-23.4, 56.7)
d = {'a': ('address0', p0.coords[0]),
'b': ('address1', p1.coords[0])}
df = _prepare_geocode_result(d)
assert type(df) is gpd.GeoDataFrame
self.assertEqual(fiona.crs.from_epsg(4326), df.crs)
self.assertEqual(len(df), 2)
self.assertIn('address', df)
coords = df.loc['a']['geometry'].coords[0]
test = p0.coords[0]
# Output from the df should be lon/lat
self.assertAlmostEqual(coords[0], test[1])
self.assertAlmostEqual(coords[1], test[0])
coords = df.loc['b']['geometry'].coords[0]
test = p1.coords[0]
self.assertAlmostEqual(coords[0], test[1])
self.assertAlmostEqual(coords[1], test[0])
def test_bad_provider(self):
self.assertRaises(ValueError, geocode, ['cambridge, ma'], 'badprovider')
|
import unittest
import fiona
from shapely.geometry import Point
import geopandas as gpd
from geopandas.geocode import geocode, _prepare_geocode_result
class TestGeocode(unittest.TestCase):
def test_prepare_result(self):
# Calls _prepare_result with sample results from the geocoder call
# loop
p0 = Point(12.3, -45.6) # Treat these as lat/lon
p1 = Point(-23.4, 56.7)
d = {'a': ('address0', p0.coords[0]),
'b': ('address1', p1.coords[0])}
df = _prepare_geocode_result(d)
assert type(df) is gpd.GeoDataFrame
self.assertEqual(fiona.crs.from_epsg(4326), df.crs)
self.assertEqual(len(df), 2)
self.assert_('address' in df)
coords = df.loc['a']['geometry'].coords[0]
test = p0.coords[0]
# Output from the df should be lon/lat
self.assertAlmostEqual(coords[0], test[1])
self.assertAlmostEqual(coords[1], test[0])
coords = df.loc['b']['geometry'].coords[0]
test = p1.coords[0]
self.assertAlmostEqual(coords[0], test[1])
self.assertAlmostEqual(coords[1], test[0])
def test_bad_provider(self):
self.assertRaises(ValueError, geocode, ['cambridge, ma'], 'badprovider')
|
Change assertIn to assert_ for python 2.6 support
|
Change assertIn to assert_ for python 2.6 support
|
Python
|
bsd-3-clause
|
koldunovn/geopandas,geopandas/geopandas,urschrei/geopandas,geopandas/geopandas,maxalbert/geopandas,jorisvandenbossche/geopandas,ozak/geopandas,IamJeffG/geopandas,micahcochran/geopandas,fonnesbeck/geopandas,scw/geopandas,jorisvandenbossche/geopandas,micahcochran/geopandas,geopandas/geopandas,kwinkunks/geopandas,jdmcbr/geopandas,jdmcbr/geopandas,perrygeo/geopandas,snario/geopandas,jorisvandenbossche/geopandas,ozak/geopandas
|
import unittest
import fiona
from shapely.geometry import Point
import geopandas as gpd
from geopandas.geocode import geocode, _prepare_geocode_result
class TestGeocode(unittest.TestCase):
def test_prepare_result(self):
# Calls _prepare_result with sample results from the geocoder call
# loop
p0 = Point(12.3, -45.6) # Treat these as lat/lon
p1 = Point(-23.4, 56.7)
d = {'a': ('address0', p0.coords[0]),
'b': ('address1', p1.coords[0])}
df = _prepare_geocode_result(d)
assert type(df) is gpd.GeoDataFrame
self.assertEqual(fiona.crs.from_epsg(4326), df.crs)
self.assertEqual(len(df), 2)
self.assertIn('address', df)
coords = df.loc['a']['geometry'].coords[0]
test = p0.coords[0]
# Output from the df should be lon/lat
self.assertAlmostEqual(coords[0], test[1])
self.assertAlmostEqual(coords[1], test[0])
coords = df.loc['b']['geometry'].coords[0]
test = p1.coords[0]
self.assertAlmostEqual(coords[0], test[1])
self.assertAlmostEqual(coords[1], test[0])
def test_bad_provider(self):
self.assertRaises(ValueError, geocode, ['cambridge, ma'], 'badprovider')
Change assertIn to assert_ for python 2.6 support
|
import unittest
import fiona
from shapely.geometry import Point
import geopandas as gpd
from geopandas.geocode import geocode, _prepare_geocode_result
class TestGeocode(unittest.TestCase):
def test_prepare_result(self):
# Calls _prepare_result with sample results from the geocoder call
# loop
p0 = Point(12.3, -45.6) # Treat these as lat/lon
p1 = Point(-23.4, 56.7)
d = {'a': ('address0', p0.coords[0]),
'b': ('address1', p1.coords[0])}
df = _prepare_geocode_result(d)
assert type(df) is gpd.GeoDataFrame
self.assertEqual(fiona.crs.from_epsg(4326), df.crs)
self.assertEqual(len(df), 2)
self.assert_('address' in df)
coords = df.loc['a']['geometry'].coords[0]
test = p0.coords[0]
# Output from the df should be lon/lat
self.assertAlmostEqual(coords[0], test[1])
self.assertAlmostEqual(coords[1], test[0])
coords = df.loc['b']['geometry'].coords[0]
test = p1.coords[0]
self.assertAlmostEqual(coords[0], test[1])
self.assertAlmostEqual(coords[1], test[0])
def test_bad_provider(self):
self.assertRaises(ValueError, geocode, ['cambridge, ma'], 'badprovider')
|
<commit_before>import unittest
import fiona
from shapely.geometry import Point
import geopandas as gpd
from geopandas.geocode import geocode, _prepare_geocode_result
class TestGeocode(unittest.TestCase):
def test_prepare_result(self):
# Calls _prepare_result with sample results from the geocoder call
# loop
p0 = Point(12.3, -45.6) # Treat these as lat/lon
p1 = Point(-23.4, 56.7)
d = {'a': ('address0', p0.coords[0]),
'b': ('address1', p1.coords[0])}
df = _prepare_geocode_result(d)
assert type(df) is gpd.GeoDataFrame
self.assertEqual(fiona.crs.from_epsg(4326), df.crs)
self.assertEqual(len(df), 2)
self.assertIn('address', df)
coords = df.loc['a']['geometry'].coords[0]
test = p0.coords[0]
# Output from the df should be lon/lat
self.assertAlmostEqual(coords[0], test[1])
self.assertAlmostEqual(coords[1], test[0])
coords = df.loc['b']['geometry'].coords[0]
test = p1.coords[0]
self.assertAlmostEqual(coords[0], test[1])
self.assertAlmostEqual(coords[1], test[0])
def test_bad_provider(self):
self.assertRaises(ValueError, geocode, ['cambridge, ma'], 'badprovider')
<commit_msg>Change assertIn to assert_ for python 2.6 support<commit_after>
|
import unittest
import fiona
from shapely.geometry import Point
import geopandas as gpd
from geopandas.geocode import geocode, _prepare_geocode_result
class TestGeocode(unittest.TestCase):
def test_prepare_result(self):
# Calls _prepare_result with sample results from the geocoder call
# loop
p0 = Point(12.3, -45.6) # Treat these as lat/lon
p1 = Point(-23.4, 56.7)
d = {'a': ('address0', p0.coords[0]),
'b': ('address1', p1.coords[0])}
df = _prepare_geocode_result(d)
assert type(df) is gpd.GeoDataFrame
self.assertEqual(fiona.crs.from_epsg(4326), df.crs)
self.assertEqual(len(df), 2)
self.assert_('address' in df)
coords = df.loc['a']['geometry'].coords[0]
test = p0.coords[0]
# Output from the df should be lon/lat
self.assertAlmostEqual(coords[0], test[1])
self.assertAlmostEqual(coords[1], test[0])
coords = df.loc['b']['geometry'].coords[0]
test = p1.coords[0]
self.assertAlmostEqual(coords[0], test[1])
self.assertAlmostEqual(coords[1], test[0])
def test_bad_provider(self):
self.assertRaises(ValueError, geocode, ['cambridge, ma'], 'badprovider')
|
import unittest
import fiona
from shapely.geometry import Point
import geopandas as gpd
from geopandas.geocode import geocode, _prepare_geocode_result
class TestGeocode(unittest.TestCase):
def test_prepare_result(self):
# Calls _prepare_result with sample results from the geocoder call
# loop
p0 = Point(12.3, -45.6) # Treat these as lat/lon
p1 = Point(-23.4, 56.7)
d = {'a': ('address0', p0.coords[0]),
'b': ('address1', p1.coords[0])}
df = _prepare_geocode_result(d)
assert type(df) is gpd.GeoDataFrame
self.assertEqual(fiona.crs.from_epsg(4326), df.crs)
self.assertEqual(len(df), 2)
self.assertIn('address', df)
coords = df.loc['a']['geometry'].coords[0]
test = p0.coords[0]
# Output from the df should be lon/lat
self.assertAlmostEqual(coords[0], test[1])
self.assertAlmostEqual(coords[1], test[0])
coords = df.loc['b']['geometry'].coords[0]
test = p1.coords[0]
self.assertAlmostEqual(coords[0], test[1])
self.assertAlmostEqual(coords[1], test[0])
def test_bad_provider(self):
self.assertRaises(ValueError, geocode, ['cambridge, ma'], 'badprovider')
Change assertIn to assert_ for python 2.6 supportimport unittest
import fiona
from shapely.geometry import Point
import geopandas as gpd
from geopandas.geocode import geocode, _prepare_geocode_result
class TestGeocode(unittest.TestCase):
def test_prepare_result(self):
# Calls _prepare_result with sample results from the geocoder call
# loop
p0 = Point(12.3, -45.6) # Treat these as lat/lon
p1 = Point(-23.4, 56.7)
d = {'a': ('address0', p0.coords[0]),
'b': ('address1', p1.coords[0])}
df = _prepare_geocode_result(d)
assert type(df) is gpd.GeoDataFrame
self.assertEqual(fiona.crs.from_epsg(4326), df.crs)
self.assertEqual(len(df), 2)
self.assert_('address' in df)
coords = df.loc['a']['geometry'].coords[0]
test = p0.coords[0]
# Output from the df should be lon/lat
self.assertAlmostEqual(coords[0], test[1])
self.assertAlmostEqual(coords[1], test[0])
coords = df.loc['b']['geometry'].coords[0]
test = p1.coords[0]
self.assertAlmostEqual(coords[0], test[1])
self.assertAlmostEqual(coords[1], test[0])
def test_bad_provider(self):
self.assertRaises(ValueError, geocode, ['cambridge, ma'], 'badprovider')
|
<commit_before>import unittest
import fiona
from shapely.geometry import Point
import geopandas as gpd
from geopandas.geocode import geocode, _prepare_geocode_result
class TestGeocode(unittest.TestCase):
def test_prepare_result(self):
# Calls _prepare_result with sample results from the geocoder call
# loop
p0 = Point(12.3, -45.6) # Treat these as lat/lon
p1 = Point(-23.4, 56.7)
d = {'a': ('address0', p0.coords[0]),
'b': ('address1', p1.coords[0])}
df = _prepare_geocode_result(d)
assert type(df) is gpd.GeoDataFrame
self.assertEqual(fiona.crs.from_epsg(4326), df.crs)
self.assertEqual(len(df), 2)
self.assertIn('address', df)
coords = df.loc['a']['geometry'].coords[0]
test = p0.coords[0]
# Output from the df should be lon/lat
self.assertAlmostEqual(coords[0], test[1])
self.assertAlmostEqual(coords[1], test[0])
coords = df.loc['b']['geometry'].coords[0]
test = p1.coords[0]
self.assertAlmostEqual(coords[0], test[1])
self.assertAlmostEqual(coords[1], test[0])
def test_bad_provider(self):
self.assertRaises(ValueError, geocode, ['cambridge, ma'], 'badprovider')
<commit_msg>Change assertIn to assert_ for python 2.6 support<commit_after>import unittest
import fiona
from shapely.geometry import Point
import geopandas as gpd
from geopandas.geocode import geocode, _prepare_geocode_result
class TestGeocode(unittest.TestCase):
def test_prepare_result(self):
# Calls _prepare_result with sample results from the geocoder call
# loop
p0 = Point(12.3, -45.6) # Treat these as lat/lon
p1 = Point(-23.4, 56.7)
d = {'a': ('address0', p0.coords[0]),
'b': ('address1', p1.coords[0])}
df = _prepare_geocode_result(d)
assert type(df) is gpd.GeoDataFrame
self.assertEqual(fiona.crs.from_epsg(4326), df.crs)
self.assertEqual(len(df), 2)
self.assert_('address' in df)
coords = df.loc['a']['geometry'].coords[0]
test = p0.coords[0]
# Output from the df should be lon/lat
self.assertAlmostEqual(coords[0], test[1])
self.assertAlmostEqual(coords[1], test[0])
coords = df.loc['b']['geometry'].coords[0]
test = p1.coords[0]
self.assertAlmostEqual(coords[0], test[1])
self.assertAlmostEqual(coords[1], test[0])
def test_bad_provider(self):
self.assertRaises(ValueError, geocode, ['cambridge, ma'], 'badprovider')
|
e9eb29d300d4072a32d824d4f588ff76a905bb89
|
gunicorn_settings.py
|
gunicorn_settings.py
|
bind = '127.0.0.1:8001'
workers = 2
worker_class = 'gevent'
timeout = 30
keepalive = 2
errorlog = '-'
|
workers = 2
worker_class = 'gevent'
timeout = 30
keepalive = 2
errorlog = '-'
|
Use IP and PORT environment variables if set
|
Use IP and PORT environment variables if set
|
Python
|
apache-2.0
|
notapresent/rbm2m,notapresent/rbm2m
|
bind = '127.0.0.1:8001'
workers = 2
worker_class = 'gevent'
timeout = 30
keepalive = 2
errorlog = '-'
Use IP and PORT environment variables if set
|
workers = 2
worker_class = 'gevent'
timeout = 30
keepalive = 2
errorlog = '-'
|
<commit_before>bind = '127.0.0.1:8001'
workers = 2
worker_class = 'gevent'
timeout = 30
keepalive = 2
errorlog = '-'
<commit_msg>Use IP and PORT environment variables if set<commit_after>
|
workers = 2
worker_class = 'gevent'
timeout = 30
keepalive = 2
errorlog = '-'
|
bind = '127.0.0.1:8001'
workers = 2
worker_class = 'gevent'
timeout = 30
keepalive = 2
errorlog = '-'
Use IP and PORT environment variables if setworkers = 2
worker_class = 'gevent'
timeout = 30
keepalive = 2
errorlog = '-'
|
<commit_before>bind = '127.0.0.1:8001'
workers = 2
worker_class = 'gevent'
timeout = 30
keepalive = 2
errorlog = '-'
<commit_msg>Use IP and PORT environment variables if set<commit_after>workers = 2
worker_class = 'gevent'
timeout = 30
keepalive = 2
errorlog = '-'
|
da9bfba9f8111fa62ff4b0387e3b2faf8f367855
|
perpendicular-least-squares.py
|
perpendicular-least-squares.py
|
__author__ = 'Jacob Bieker'
import os, sys
import numpy
from multiprocessing import Pool
|
__author__ = 'Jacob Bieker'
import os, sys
import numpy
from multiprocessing import Pool
def line_solve():
# TODO: Find the Least Squares for a line
return 0
def plane_solve():
# TODO: Find the least Squares for a plane
return 0
def read_clusters():
# TODO: Read in the files containing the cluster points
return 0
def determine_type():
# TODO: Determine the type (i.e. Plane or Line) to solve for
return 0
def initial_guess(largest_cluster, type):
if type == "line":
return 0
elif type == "plane":
return 0
# TODO: Get an inital guess from the perpendicular least squares method from the largest cluster
return
if __name__ == "__main__":
arguments = str(sys.argv)
print(arguments)
|
Create stub functions and skeleton of program
|
Create stub functions and skeleton of program
|
Python
|
mit
|
jacobbieker/GCP-perpendicular-least-squares,jacobbieker/GCP-perpendicular-least-squares,jacobbieker/GCP-perpendicular-least-squares
|
__author__ = 'Jacob Bieker'
import os, sys
import numpy
from multiprocessing import Pool
Create stub functions and skeleton of program
|
__author__ = 'Jacob Bieker'
import os, sys
import numpy
from multiprocessing import Pool
def line_solve():
# TODO: Find the Least Squares for a line
return 0
def plane_solve():
# TODO: Find the least Squares for a plane
return 0
def read_clusters():
# TODO: Read in the files containing the cluster points
return 0
def determine_type():
# TODO: Determine the type (i.e. Plane or Line) to solve for
return 0
def initial_guess(largest_cluster, type):
if type == "line":
return 0
elif type == "plane":
return 0
# TODO: Get an inital guess from the perpendicular least squares method from the largest cluster
return
if __name__ == "__main__":
arguments = str(sys.argv)
print(arguments)
|
<commit_before>__author__ = 'Jacob Bieker'
import os, sys
import numpy
from multiprocessing import Pool
<commit_msg>Create stub functions and skeleton of program<commit_after>
|
__author__ = 'Jacob Bieker'
import os, sys
import numpy
from multiprocessing import Pool
def line_solve():
# TODO: Find the Least Squares for a line
return 0
def plane_solve():
# TODO: Find the least Squares for a plane
return 0
def read_clusters():
# TODO: Read in the files containing the cluster points
return 0
def determine_type():
# TODO: Determine the type (i.e. Plane or Line) to solve for
return 0
def initial_guess(largest_cluster, type):
if type == "line":
return 0
elif type == "plane":
return 0
# TODO: Get an inital guess from the perpendicular least squares method from the largest cluster
return
if __name__ == "__main__":
arguments = str(sys.argv)
print(arguments)
|
__author__ = 'Jacob Bieker'
import os, sys
import numpy
from multiprocessing import Pool
Create stub functions and skeleton of program__author__ = 'Jacob Bieker'
import os, sys
import numpy
from multiprocessing import Pool
def line_solve():
# TODO: Find the Least Squares for a line
return 0
def plane_solve():
# TODO: Find the least Squares for a plane
return 0
def read_clusters():
# TODO: Read in the files containing the cluster points
return 0
def determine_type():
# TODO: Determine the type (i.e. Plane or Line) to solve for
return 0
def initial_guess(largest_cluster, type):
if type == "line":
return 0
elif type == "plane":
return 0
# TODO: Get an inital guess from the perpendicular least squares method from the largest cluster
return
if __name__ == "__main__":
arguments = str(sys.argv)
print(arguments)
|
<commit_before>__author__ = 'Jacob Bieker'
import os, sys
import numpy
from multiprocessing import Pool
<commit_msg>Create stub functions and skeleton of program<commit_after>__author__ = 'Jacob Bieker'
import os, sys
import numpy
from multiprocessing import Pool
def line_solve():
# TODO: Find the Least Squares for a line
return 0
def plane_solve():
# TODO: Find the least Squares for a plane
return 0
def read_clusters():
# TODO: Read in the files containing the cluster points
return 0
def determine_type():
# TODO: Determine the type (i.e. Plane or Line) to solve for
return 0
def initial_guess(largest_cluster, type):
if type == "line":
return 0
elif type == "plane":
return 0
# TODO: Get an inital guess from the perpendicular least squares method from the largest cluster
return
if __name__ == "__main__":
arguments = str(sys.argv)
print(arguments)
|
1aa8344177a6e336075134ea802b14e14b8e2f03
|
utils.py
|
utils.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
def fix_str(value):
try:
return unicode(value)
except UnicodeDecodeError:
return unicode(value.decode('latin1'))
def pandas_to_dict(df):
return [{colname: (fix_str(row[i]) if type(row[i]) is str else row[i])
for i, colname in enumerate(df.columns)}
for row in df.values]
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from pandas import tslib
def fix_render(value):
if type(value) is str:
try:
return unicode(value)
except UnicodeDecodeError:
return unicode(value.decode('latin1'))
elif type(value) is tslib.Timestamp:
return value.strftime("%Y-%m-%d %H:%M:%S")
return value
def pandas_to_dict(df):
return [{colname: fix_render(row[i])
for i, colname in enumerate(df.columns)}
for row in df.values]
|
Fix date format on fix render
|
Fix date format on fix render
|
Python
|
mit
|
mlgruby/mining,chrisdamba/mining,mining/mining,jgabriellima/mining,AndrzejR/mining,chrisdamba/mining,mlgruby/mining,seagoat/mining,AndrzejR/mining,mining/mining,jgabriellima/mining,seagoat/mining,avelino/mining,avelino/mining,mlgruby/mining
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
def fix_str(value):
try:
return unicode(value)
except UnicodeDecodeError:
return unicode(value.decode('latin1'))
def pandas_to_dict(df):
return [{colname: (fix_str(row[i]) if type(row[i]) is str else row[i])
for i, colname in enumerate(df.columns)}
for row in df.values]
Fix date format on fix render
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from pandas import tslib
def fix_render(value):
if type(value) is str:
try:
return unicode(value)
except UnicodeDecodeError:
return unicode(value.decode('latin1'))
elif type(value) is tslib.Timestamp:
return value.strftime("%Y-%m-%d %H:%M:%S")
return value
def pandas_to_dict(df):
return [{colname: fix_render(row[i])
for i, colname in enumerate(df.columns)}
for row in df.values]
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
def fix_str(value):
try:
return unicode(value)
except UnicodeDecodeError:
return unicode(value.decode('latin1'))
def pandas_to_dict(df):
return [{colname: (fix_str(row[i]) if type(row[i]) is str else row[i])
for i, colname in enumerate(df.columns)}
for row in df.values]
<commit_msg>Fix date format on fix render<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from pandas import tslib
def fix_render(value):
if type(value) is str:
try:
return unicode(value)
except UnicodeDecodeError:
return unicode(value.decode('latin1'))
elif type(value) is tslib.Timestamp:
return value.strftime("%Y-%m-%d %H:%M:%S")
return value
def pandas_to_dict(df):
return [{colname: fix_render(row[i])
for i, colname in enumerate(df.columns)}
for row in df.values]
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
def fix_str(value):
try:
return unicode(value)
except UnicodeDecodeError:
return unicode(value.decode('latin1'))
def pandas_to_dict(df):
return [{colname: (fix_str(row[i]) if type(row[i]) is str else row[i])
for i, colname in enumerate(df.columns)}
for row in df.values]
Fix date format on fix render#!/usr/bin/env python
# -*- coding: utf-8 -*-
from pandas import tslib
def fix_render(value):
if type(value) is str:
try:
return unicode(value)
except UnicodeDecodeError:
return unicode(value.decode('latin1'))
elif type(value) is tslib.Timestamp:
return value.strftime("%Y-%m-%d %H:%M:%S")
return value
def pandas_to_dict(df):
return [{colname: fix_render(row[i])
for i, colname in enumerate(df.columns)}
for row in df.values]
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
def fix_str(value):
try:
return unicode(value)
except UnicodeDecodeError:
return unicode(value.decode('latin1'))
def pandas_to_dict(df):
return [{colname: (fix_str(row[i]) if type(row[i]) is str else row[i])
for i, colname in enumerate(df.columns)}
for row in df.values]
<commit_msg>Fix date format on fix render<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from pandas import tslib
def fix_render(value):
if type(value) is str:
try:
return unicode(value)
except UnicodeDecodeError:
return unicode(value.decode('latin1'))
elif type(value) is tslib.Timestamp:
return value.strftime("%Y-%m-%d %H:%M:%S")
return value
def pandas_to_dict(df):
return [{colname: fix_render(row[i])
for i, colname in enumerate(df.columns)}
for row in df.values]
|
b7f3ee836cb73d274bfd7dc415bb43e2fa743e12
|
httpserverhandler.py
|
httpserverhandler.py
|
#!/usr/bin/python
# -*-coding: utf8 -*-
from BaseHTTPServer import BaseHTTPRequestHandler
import mimetypes
from os import curdir, sep
import os
class HttpServerHandler(BaseHTTPRequestHandler):
allowed_extensions = ['.html', '.jpg', '.gif', '.js', '.css', '.tff', '.woff']
def has_permission_to_reply(self, file_path):
file_name, file_extension = os.path.splitext(file_path)
send_reply = file_extension in self.allowed_extensions
mimetype = mimetypes.guess_type(file_name + file_extension)
return mimetype, send_reply
def do_GET(self):
file_path = self.path
if file_path == "/":
file_path = "/index.html"
try:
mimetype, send_reply = self.has_permission_to_reply(file_path)
if True:
full_path = curdir + sep + "pages" + sep + file_path
f = open(full_path)
self.send_response(200)
self.send_header('Content-type', mimetype)
self.end_headers()
self.wfile.write(f.read())
f.close()
return
except IOError:
self.send_error(404, 'File Not Found: %s' % file_path)
|
#!/usr/bin/python
# -*-coding: utf8 -*-
from BaseHTTPServer import BaseHTTPRequestHandler
import mimetypes
from os import curdir, sep
import os
class HttpServerHandler(BaseHTTPRequestHandler):
allowed_extensions = ['.html', '.jpg', '.gif', '.ico', '.js', '.css', '.tff', '.woff']
def has_permission_to_reply(self, file_path):
file_name, file_extension = os.path.splitext(file_path)
send_reply = file_extension in self.allowed_extensions
mimetype = mimetypes.guess_type(file_name + file_extension)
return mimetype, send_reply
def do_GET(self):
file_path = self.path
if file_path == "/":
file_path = "/index.html"
try:
mimetype, send_reply = self.has_permission_to_reply(file_path)
if send_reply:
full_path = curdir + sep + "pages" + sep + file_path
f = open(full_path)
self.send_response(200)
self.send_header('Content-type', mimetype)
self.end_headers()
self.wfile.write(f.read())
f.close()
return
except IOError:
self.send_error(404, 'File Not Found: %s' % file_path)
|
Add .ico to the allowed extension list.
|
Add .ico to the allowed extension list.
|
Python
|
apache-2.0
|
gearlles/planb-client,gearlles/planb-client,gearlles/planb-client
|
#!/usr/bin/python
# -*-coding: utf8 -*-
from BaseHTTPServer import BaseHTTPRequestHandler
import mimetypes
from os import curdir, sep
import os
class HttpServerHandler(BaseHTTPRequestHandler):
allowed_extensions = ['.html', '.jpg', '.gif', '.js', '.css', '.tff', '.woff']
def has_permission_to_reply(self, file_path):
file_name, file_extension = os.path.splitext(file_path)
send_reply = file_extension in self.allowed_extensions
mimetype = mimetypes.guess_type(file_name + file_extension)
return mimetype, send_reply
def do_GET(self):
file_path = self.path
if file_path == "/":
file_path = "/index.html"
try:
mimetype, send_reply = self.has_permission_to_reply(file_path)
if True:
full_path = curdir + sep + "pages" + sep + file_path
f = open(full_path)
self.send_response(200)
self.send_header('Content-type', mimetype)
self.end_headers()
self.wfile.write(f.read())
f.close()
return
except IOError:
self.send_error(404, 'File Not Found: %s' % file_path)Add .ico to the allowed extension list.
|
#!/usr/bin/python
# -*-coding: utf8 -*-
from BaseHTTPServer import BaseHTTPRequestHandler
import mimetypes
from os import curdir, sep
import os
class HttpServerHandler(BaseHTTPRequestHandler):
allowed_extensions = ['.html', '.jpg', '.gif', '.ico', '.js', '.css', '.tff', '.woff']
def has_permission_to_reply(self, file_path):
file_name, file_extension = os.path.splitext(file_path)
send_reply = file_extension in self.allowed_extensions
mimetype = mimetypes.guess_type(file_name + file_extension)
return mimetype, send_reply
def do_GET(self):
file_path = self.path
if file_path == "/":
file_path = "/index.html"
try:
mimetype, send_reply = self.has_permission_to_reply(file_path)
if send_reply:
full_path = curdir + sep + "pages" + sep + file_path
f = open(full_path)
self.send_response(200)
self.send_header('Content-type', mimetype)
self.end_headers()
self.wfile.write(f.read())
f.close()
return
except IOError:
self.send_error(404, 'File Not Found: %s' % file_path)
|
<commit_before>#!/usr/bin/python
# -*-coding: utf8 -*-
from BaseHTTPServer import BaseHTTPRequestHandler
import mimetypes
from os import curdir, sep
import os
class HttpServerHandler(BaseHTTPRequestHandler):
allowed_extensions = ['.html', '.jpg', '.gif', '.js', '.css', '.tff', '.woff']
def has_permission_to_reply(self, file_path):
file_name, file_extension = os.path.splitext(file_path)
send_reply = file_extension in self.allowed_extensions
mimetype = mimetypes.guess_type(file_name + file_extension)
return mimetype, send_reply
def do_GET(self):
file_path = self.path
if file_path == "/":
file_path = "/index.html"
try:
mimetype, send_reply = self.has_permission_to_reply(file_path)
if True:
full_path = curdir + sep + "pages" + sep + file_path
f = open(full_path)
self.send_response(200)
self.send_header('Content-type', mimetype)
self.end_headers()
self.wfile.write(f.read())
f.close()
return
except IOError:
self.send_error(404, 'File Not Found: %s' % file_path)<commit_msg>Add .ico to the allowed extension list.<commit_after>
|
#!/usr/bin/python
# -*-coding: utf8 -*-
from BaseHTTPServer import BaseHTTPRequestHandler
import mimetypes
from os import curdir, sep
import os
class HttpServerHandler(BaseHTTPRequestHandler):
allowed_extensions = ['.html', '.jpg', '.gif', '.ico', '.js', '.css', '.tff', '.woff']
def has_permission_to_reply(self, file_path):
file_name, file_extension = os.path.splitext(file_path)
send_reply = file_extension in self.allowed_extensions
mimetype = mimetypes.guess_type(file_name + file_extension)
return mimetype, send_reply
def do_GET(self):
file_path = self.path
if file_path == "/":
file_path = "/index.html"
try:
mimetype, send_reply = self.has_permission_to_reply(file_path)
if send_reply:
full_path = curdir + sep + "pages" + sep + file_path
f = open(full_path)
self.send_response(200)
self.send_header('Content-type', mimetype)
self.end_headers()
self.wfile.write(f.read())
f.close()
return
except IOError:
self.send_error(404, 'File Not Found: %s' % file_path)
|
#!/usr/bin/python
# -*-coding: utf8 -*-
from BaseHTTPServer import BaseHTTPRequestHandler
import mimetypes
from os import curdir, sep
import os
class HttpServerHandler(BaseHTTPRequestHandler):
allowed_extensions = ['.html', '.jpg', '.gif', '.js', '.css', '.tff', '.woff']
def has_permission_to_reply(self, file_path):
file_name, file_extension = os.path.splitext(file_path)
send_reply = file_extension in self.allowed_extensions
mimetype = mimetypes.guess_type(file_name + file_extension)
return mimetype, send_reply
def do_GET(self):
file_path = self.path
if file_path == "/":
file_path = "/index.html"
try:
mimetype, send_reply = self.has_permission_to_reply(file_path)
if True:
full_path = curdir + sep + "pages" + sep + file_path
f = open(full_path)
self.send_response(200)
self.send_header('Content-type', mimetype)
self.end_headers()
self.wfile.write(f.read())
f.close()
return
except IOError:
self.send_error(404, 'File Not Found: %s' % file_path)Add .ico to the allowed extension list.#!/usr/bin/python
# -*-coding: utf8 -*-
from BaseHTTPServer import BaseHTTPRequestHandler
import mimetypes
from os import curdir, sep
import os
class HttpServerHandler(BaseHTTPRequestHandler):
allowed_extensions = ['.html', '.jpg', '.gif', '.ico', '.js', '.css', '.tff', '.woff']
def has_permission_to_reply(self, file_path):
file_name, file_extension = os.path.splitext(file_path)
send_reply = file_extension in self.allowed_extensions
mimetype = mimetypes.guess_type(file_name + file_extension)
return mimetype, send_reply
def do_GET(self):
file_path = self.path
if file_path == "/":
file_path = "/index.html"
try:
mimetype, send_reply = self.has_permission_to_reply(file_path)
if send_reply:
full_path = curdir + sep + "pages" + sep + file_path
f = open(full_path)
self.send_response(200)
self.send_header('Content-type', mimetype)
self.end_headers()
self.wfile.write(f.read())
f.close()
return
except IOError:
self.send_error(404, 'File Not Found: %s' % file_path)
|
<commit_before>#!/usr/bin/python
# -*-coding: utf8 -*-
from BaseHTTPServer import BaseHTTPRequestHandler
import mimetypes
from os import curdir, sep
import os
class HttpServerHandler(BaseHTTPRequestHandler):
allowed_extensions = ['.html', '.jpg', '.gif', '.js', '.css', '.tff', '.woff']
def has_permission_to_reply(self, file_path):
file_name, file_extension = os.path.splitext(file_path)
send_reply = file_extension in self.allowed_extensions
mimetype = mimetypes.guess_type(file_name + file_extension)
return mimetype, send_reply
def do_GET(self):
file_path = self.path
if file_path == "/":
file_path = "/index.html"
try:
mimetype, send_reply = self.has_permission_to_reply(file_path)
if True:
full_path = curdir + sep + "pages" + sep + file_path
f = open(full_path)
self.send_response(200)
self.send_header('Content-type', mimetype)
self.end_headers()
self.wfile.write(f.read())
f.close()
return
except IOError:
self.send_error(404, 'File Not Found: %s' % file_path)<commit_msg>Add .ico to the allowed extension list.<commit_after>#!/usr/bin/python
# -*-coding: utf8 -*-
from BaseHTTPServer import BaseHTTPRequestHandler
import mimetypes
from os import curdir, sep
import os
class HttpServerHandler(BaseHTTPRequestHandler):
allowed_extensions = ['.html', '.jpg', '.gif', '.ico', '.js', '.css', '.tff', '.woff']
def has_permission_to_reply(self, file_path):
file_name, file_extension = os.path.splitext(file_path)
send_reply = file_extension in self.allowed_extensions
mimetype = mimetypes.guess_type(file_name + file_extension)
return mimetype, send_reply
def do_GET(self):
file_path = self.path
if file_path == "/":
file_path = "/index.html"
try:
mimetype, send_reply = self.has_permission_to_reply(file_path)
if send_reply:
full_path = curdir + sep + "pages" + sep + file_path
f = open(full_path)
self.send_response(200)
self.send_header('Content-type', mimetype)
self.end_headers()
self.wfile.write(f.read())
f.close()
return
except IOError:
self.send_error(404, 'File Not Found: %s' % file_path)
|
f3cb8becb4f243d9f9a955aa3cafe53f3bf1e548
|
examples/rRaman.py
|
examples/rRaman.py
|
# -*- coding: utf-8 -*-
"""
Resonance Raman
==========
A Resonance Raman plot.
"""
import WrightTools as wt
from WrightTools import datasets
p = datasets.BrunoldrRaman.LDS821_514nm_80mW
data = wt.data.from_BrunoldrRaman(p)
trash_pixels = 56
data = data.split(0, 843.0)[1]
data.convert('wn', verbose=False)
artist = wt.artists.mpl_1D(data)
d = artist.plot()
|
# -*- coding: utf-8 -*-
"""
Resonance Raman
===============
A Resonance Raman plot.
"""
import WrightTools as wt
from WrightTools import datasets
p = datasets.BrunoldrRaman.LDS821_514nm_80mW
data = wt.data.from_BrunoldrRaman(p)
trash_pixels = 56
data = data.split(0, 843.0)[1]
data.convert('wn', verbose=False)
artist = wt.artists.mpl_1D(data)
d = artist.plot()
|
Fix rST formatting in example
|
Fix rST formatting in example
|
Python
|
mit
|
wright-group/WrightTools,wright-group/WrightTools
|
# -*- coding: utf-8 -*-
"""
Resonance Raman
==========
A Resonance Raman plot.
"""
import WrightTools as wt
from WrightTools import datasets
p = datasets.BrunoldrRaman.LDS821_514nm_80mW
data = wt.data.from_BrunoldrRaman(p)
trash_pixels = 56
data = data.split(0, 843.0)[1]
data.convert('wn', verbose=False)
artist = wt.artists.mpl_1D(data)
d = artist.plot()
Fix rST formatting in example
|
# -*- coding: utf-8 -*-
"""
Resonance Raman
===============
A Resonance Raman plot.
"""
import WrightTools as wt
from WrightTools import datasets
p = datasets.BrunoldrRaman.LDS821_514nm_80mW
data = wt.data.from_BrunoldrRaman(p)
trash_pixels = 56
data = data.split(0, 843.0)[1]
data.convert('wn', verbose=False)
artist = wt.artists.mpl_1D(data)
d = artist.plot()
|
<commit_before># -*- coding: utf-8 -*-
"""
Resonance Raman
==========
A Resonance Raman plot.
"""
import WrightTools as wt
from WrightTools import datasets
p = datasets.BrunoldrRaman.LDS821_514nm_80mW
data = wt.data.from_BrunoldrRaman(p)
trash_pixels = 56
data = data.split(0, 843.0)[1]
data.convert('wn', verbose=False)
artist = wt.artists.mpl_1D(data)
d = artist.plot()
<commit_msg>Fix rST formatting in example<commit_after>
|
# -*- coding: utf-8 -*-
"""
Resonance Raman
===============
A Resonance Raman plot.
"""
import WrightTools as wt
from WrightTools import datasets
p = datasets.BrunoldrRaman.LDS821_514nm_80mW
data = wt.data.from_BrunoldrRaman(p)
trash_pixels = 56
data = data.split(0, 843.0)[1]
data.convert('wn', verbose=False)
artist = wt.artists.mpl_1D(data)
d = artist.plot()
|
# -*- coding: utf-8 -*-
"""
Resonance Raman
==========
A Resonance Raman plot.
"""
import WrightTools as wt
from WrightTools import datasets
p = datasets.BrunoldrRaman.LDS821_514nm_80mW
data = wt.data.from_BrunoldrRaman(p)
trash_pixels = 56
data = data.split(0, 843.0)[1]
data.convert('wn', verbose=False)
artist = wt.artists.mpl_1D(data)
d = artist.plot()
Fix rST formatting in example# -*- coding: utf-8 -*-
"""
Resonance Raman
===============
A Resonance Raman plot.
"""
import WrightTools as wt
from WrightTools import datasets
p = datasets.BrunoldrRaman.LDS821_514nm_80mW
data = wt.data.from_BrunoldrRaman(p)
trash_pixels = 56
data = data.split(0, 843.0)[1]
data.convert('wn', verbose=False)
artist = wt.artists.mpl_1D(data)
d = artist.plot()
|
<commit_before># -*- coding: utf-8 -*-
"""
Resonance Raman
==========
A Resonance Raman plot.
"""
import WrightTools as wt
from WrightTools import datasets
p = datasets.BrunoldrRaman.LDS821_514nm_80mW
data = wt.data.from_BrunoldrRaman(p)
trash_pixels = 56
data = data.split(0, 843.0)[1]
data.convert('wn', verbose=False)
artist = wt.artists.mpl_1D(data)
d = artist.plot()
<commit_msg>Fix rST formatting in example<commit_after># -*- coding: utf-8 -*-
"""
Resonance Raman
===============
A Resonance Raman plot.
"""
import WrightTools as wt
from WrightTools import datasets
p = datasets.BrunoldrRaman.LDS821_514nm_80mW
data = wt.data.from_BrunoldrRaman(p)
trash_pixels = 56
data = data.split(0, 843.0)[1]
data.convert('wn', verbose=False)
artist = wt.artists.mpl_1D(data)
d = artist.plot()
|
1a4994e86c01b33878d022574782df88b2f4016a
|
fuzzinator/call/file_reader_decorator.py
|
fuzzinator/call/file_reader_decorator.py
|
# Copyright (c) 2017 Renata Hodovan, Akos Kiss.
#
# Licensed under the BSD 3-Clause License
# <LICENSE.rst or https://opensource.org/licenses/BSD-3-Clause>.
# This file may not be copied, modified, or distributed except
# according to those terms.
import os
from . import CallableDecorator
class FileReaderDecorator(CallableDecorator):
"""
Decorator for SUTs that take input as a file path: saves the content of
the failing test case.
Moreover, the issue (if any) is also extended with the new ``'filename'``
property containing the name of the test case (as received in the ``test``
argument).
**Example configuration snippet:**
.. code-block:: ini
[sut.foo]
call=fuzzinator.call.SubprocessCall
call.decorate(0)=fuzzionator.call.FileReaderDecorator
[sut.foo.call]
# assuming that foo takes one file as input specified on command line
command=/home/alice/foo/bin/foo {test}
"""
def decorator(self, **kwargs):
def wrapper(fn):
def reader(*args, **kwargs):
issue = fn(*args, **kwargs)
if issue is not None:
with open(issue['test'], 'rb') as f:
issue['filename'] = os.path.basename(issue['test'])
issue['test'] = f.read()
return issue
return reader
return wrapper
|
# Copyright (c) 2017-2018 Renata Hodovan, Akos Kiss.
#
# Licensed under the BSD 3-Clause License
# <LICENSE.rst or https://opensource.org/licenses/BSD-3-Clause>.
# This file may not be copied, modified, or distributed except
# according to those terms.
import os
from . import CallableDecorator
class FileReaderDecorator(CallableDecorator):
"""
Decorator for SUTs that take input as a file path: saves the content of
the failing test case.
Moreover, the issue (if any) is also extended with the new ``'filename'``
property containing the name of the test case (as received in the ``test``
argument).
**Example configuration snippet:**
.. code-block:: ini
[sut.foo]
call=fuzzinator.call.SubprocessCall
call.decorate(0)=fuzzionator.call.FileReaderDecorator
[sut.foo.call]
# assuming that foo takes one file as input specified on command line
command=/home/alice/foo/bin/foo {test}
"""
def decorator(self, **kwargs):
def wrapper(fn):
def reader(*args, **kwargs):
issue = fn(*args, **kwargs)
if issue is not None:
with open(kwargs['test'], 'rb') as f:
issue['filename'] = os.path.basename(kwargs['test'])
issue['test'] = f.read()
return issue
return reader
return wrapper
|
Fix test extraction in FileReaderDecorator.
|
Fix test extraction in FileReaderDecorator.
|
Python
|
bsd-3-clause
|
renatahodovan/fuzzinator,akosthekiss/fuzzinator,renatahodovan/fuzzinator,akosthekiss/fuzzinator,akosthekiss/fuzzinator,renatahodovan/fuzzinator,akosthekiss/fuzzinator,renatahodovan/fuzzinator
|
# Copyright (c) 2017 Renata Hodovan, Akos Kiss.
#
# Licensed under the BSD 3-Clause License
# <LICENSE.rst or https://opensource.org/licenses/BSD-3-Clause>.
# This file may not be copied, modified, or distributed except
# according to those terms.
import os
from . import CallableDecorator
class FileReaderDecorator(CallableDecorator):
"""
Decorator for SUTs that take input as a file path: saves the content of
the failing test case.
Moreover, the issue (if any) is also extended with the new ``'filename'``
property containing the name of the test case (as received in the ``test``
argument).
**Example configuration snippet:**
.. code-block:: ini
[sut.foo]
call=fuzzinator.call.SubprocessCall
call.decorate(0)=fuzzionator.call.FileReaderDecorator
[sut.foo.call]
# assuming that foo takes one file as input specified on command line
command=/home/alice/foo/bin/foo {test}
"""
def decorator(self, **kwargs):
def wrapper(fn):
def reader(*args, **kwargs):
issue = fn(*args, **kwargs)
if issue is not None:
with open(issue['test'], 'rb') as f:
issue['filename'] = os.path.basename(issue['test'])
issue['test'] = f.read()
return issue
return reader
return wrapper
Fix test extraction in FileReaderDecorator.
|
# Copyright (c) 2017-2018 Renata Hodovan, Akos Kiss.
#
# Licensed under the BSD 3-Clause License
# <LICENSE.rst or https://opensource.org/licenses/BSD-3-Clause>.
# This file may not be copied, modified, or distributed except
# according to those terms.
import os
from . import CallableDecorator
class FileReaderDecorator(CallableDecorator):
"""
Decorator for SUTs that take input as a file path: saves the content of
the failing test case.
Moreover, the issue (if any) is also extended with the new ``'filename'``
property containing the name of the test case (as received in the ``test``
argument).
**Example configuration snippet:**
.. code-block:: ini
[sut.foo]
call=fuzzinator.call.SubprocessCall
call.decorate(0)=fuzzionator.call.FileReaderDecorator
[sut.foo.call]
# assuming that foo takes one file as input specified on command line
command=/home/alice/foo/bin/foo {test}
"""
def decorator(self, **kwargs):
def wrapper(fn):
def reader(*args, **kwargs):
issue = fn(*args, **kwargs)
if issue is not None:
with open(kwargs['test'], 'rb') as f:
issue['filename'] = os.path.basename(kwargs['test'])
issue['test'] = f.read()
return issue
return reader
return wrapper
|
<commit_before># Copyright (c) 2017 Renata Hodovan, Akos Kiss.
#
# Licensed under the BSD 3-Clause License
# <LICENSE.rst or https://opensource.org/licenses/BSD-3-Clause>.
# This file may not be copied, modified, or distributed except
# according to those terms.
import os
from . import CallableDecorator
class FileReaderDecorator(CallableDecorator):
"""
Decorator for SUTs that take input as a file path: saves the content of
the failing test case.
Moreover, the issue (if any) is also extended with the new ``'filename'``
property containing the name of the test case (as received in the ``test``
argument).
**Example configuration snippet:**
.. code-block:: ini
[sut.foo]
call=fuzzinator.call.SubprocessCall
call.decorate(0)=fuzzionator.call.FileReaderDecorator
[sut.foo.call]
# assuming that foo takes one file as input specified on command line
command=/home/alice/foo/bin/foo {test}
"""
def decorator(self, **kwargs):
def wrapper(fn):
def reader(*args, **kwargs):
issue = fn(*args, **kwargs)
if issue is not None:
with open(issue['test'], 'rb') as f:
issue['filename'] = os.path.basename(issue['test'])
issue['test'] = f.read()
return issue
return reader
return wrapper
<commit_msg>Fix test extraction in FileReaderDecorator.<commit_after>
|
# Copyright (c) 2017-2018 Renata Hodovan, Akos Kiss.
#
# Licensed under the BSD 3-Clause License
# <LICENSE.rst or https://opensource.org/licenses/BSD-3-Clause>.
# This file may not be copied, modified, or distributed except
# according to those terms.
import os
from . import CallableDecorator
class FileReaderDecorator(CallableDecorator):
"""
Decorator for SUTs that take input as a file path: saves the content of
the failing test case.
Moreover, the issue (if any) is also extended with the new ``'filename'``
property containing the name of the test case (as received in the ``test``
argument).
**Example configuration snippet:**
.. code-block:: ini
[sut.foo]
call=fuzzinator.call.SubprocessCall
call.decorate(0)=fuzzionator.call.FileReaderDecorator
[sut.foo.call]
# assuming that foo takes one file as input specified on command line
command=/home/alice/foo/bin/foo {test}
"""
def decorator(self, **kwargs):
def wrapper(fn):
def reader(*args, **kwargs):
issue = fn(*args, **kwargs)
if issue is not None:
with open(kwargs['test'], 'rb') as f:
issue['filename'] = os.path.basename(kwargs['test'])
issue['test'] = f.read()
return issue
return reader
return wrapper
|
# Copyright (c) 2017 Renata Hodovan, Akos Kiss.
#
# Licensed under the BSD 3-Clause License
# <LICENSE.rst or https://opensource.org/licenses/BSD-3-Clause>.
# This file may not be copied, modified, or distributed except
# according to those terms.
import os
from . import CallableDecorator
class FileReaderDecorator(CallableDecorator):
"""
Decorator for SUTs that take input as a file path: saves the content of
the failing test case.
Moreover, the issue (if any) is also extended with the new ``'filename'``
property containing the name of the test case (as received in the ``test``
argument).
**Example configuration snippet:**
.. code-block:: ini
[sut.foo]
call=fuzzinator.call.SubprocessCall
call.decorate(0)=fuzzionator.call.FileReaderDecorator
[sut.foo.call]
# assuming that foo takes one file as input specified on command line
command=/home/alice/foo/bin/foo {test}
"""
def decorator(self, **kwargs):
def wrapper(fn):
def reader(*args, **kwargs):
issue = fn(*args, **kwargs)
if issue is not None:
with open(issue['test'], 'rb') as f:
issue['filename'] = os.path.basename(issue['test'])
issue['test'] = f.read()
return issue
return reader
return wrapper
Fix test extraction in FileReaderDecorator.# Copyright (c) 2017-2018 Renata Hodovan, Akos Kiss.
#
# Licensed under the BSD 3-Clause License
# <LICENSE.rst or https://opensource.org/licenses/BSD-3-Clause>.
# This file may not be copied, modified, or distributed except
# according to those terms.
import os
from . import CallableDecorator
class FileReaderDecorator(CallableDecorator):
"""
Decorator for SUTs that take input as a file path: saves the content of
the failing test case.
Moreover, the issue (if any) is also extended with the new ``'filename'``
property containing the name of the test case (as received in the ``test``
argument).
**Example configuration snippet:**
.. code-block:: ini
[sut.foo]
call=fuzzinator.call.SubprocessCall
call.decorate(0)=fuzzionator.call.FileReaderDecorator
[sut.foo.call]
# assuming that foo takes one file as input specified on command line
command=/home/alice/foo/bin/foo {test}
"""
def decorator(self, **kwargs):
def wrapper(fn):
def reader(*args, **kwargs):
issue = fn(*args, **kwargs)
if issue is not None:
with open(kwargs['test'], 'rb') as f:
issue['filename'] = os.path.basename(kwargs['test'])
issue['test'] = f.read()
return issue
return reader
return wrapper
|
<commit_before># Copyright (c) 2017 Renata Hodovan, Akos Kiss.
#
# Licensed under the BSD 3-Clause License
# <LICENSE.rst or https://opensource.org/licenses/BSD-3-Clause>.
# This file may not be copied, modified, or distributed except
# according to those terms.
import os
from . import CallableDecorator
class FileReaderDecorator(CallableDecorator):
"""
Decorator for SUTs that take input as a file path: saves the content of
the failing test case.
Moreover, the issue (if any) is also extended with the new ``'filename'``
property containing the name of the test case (as received in the ``test``
argument).
**Example configuration snippet:**
.. code-block:: ini
[sut.foo]
call=fuzzinator.call.SubprocessCall
call.decorate(0)=fuzzionator.call.FileReaderDecorator
[sut.foo.call]
# assuming that foo takes one file as input specified on command line
command=/home/alice/foo/bin/foo {test}
"""
def decorator(self, **kwargs):
def wrapper(fn):
def reader(*args, **kwargs):
issue = fn(*args, **kwargs)
if issue is not None:
with open(issue['test'], 'rb') as f:
issue['filename'] = os.path.basename(issue['test'])
issue['test'] = f.read()
return issue
return reader
return wrapper
<commit_msg>Fix test extraction in FileReaderDecorator.<commit_after># Copyright (c) 2017-2018 Renata Hodovan, Akos Kiss.
#
# Licensed under the BSD 3-Clause License
# <LICENSE.rst or https://opensource.org/licenses/BSD-3-Clause>.
# This file may not be copied, modified, or distributed except
# according to those terms.
import os
from . import CallableDecorator
class FileReaderDecorator(CallableDecorator):
"""
Decorator for SUTs that take input as a file path: saves the content of
the failing test case.
Moreover, the issue (if any) is also extended with the new ``'filename'``
property containing the name of the test case (as received in the ``test``
argument).
**Example configuration snippet:**
.. code-block:: ini
[sut.foo]
call=fuzzinator.call.SubprocessCall
call.decorate(0)=fuzzionator.call.FileReaderDecorator
[sut.foo.call]
# assuming that foo takes one file as input specified on command line
command=/home/alice/foo/bin/foo {test}
"""
def decorator(self, **kwargs):
def wrapper(fn):
def reader(*args, **kwargs):
issue = fn(*args, **kwargs)
if issue is not None:
with open(kwargs['test'], 'rb') as f:
issue['filename'] = os.path.basename(kwargs['test'])
issue['test'] = f.read()
return issue
return reader
return wrapper
|
1e7349bc3e7282f6103d6d67949da60045d1f06c
|
libs/utils/utils.py
|
libs/utils/utils.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""The package that contains groups all the functions needed by other scripts."""
import os.path
import sys
sys.path.insert(0, '../')
import logging
import json
import configparser
def get_configuration():
"""Get global configuration from service.cfg"""
config = configparser.ConfigParser()
config.read("service.cfg")
return config
def get_logger(debug):
"""Get logger object"""
logging.basicConfig(
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
level=logging.INFO
)
logger = logging.getLogger(__name__)
if debug is False:
logging.disable(logging.CRITICAL)
return logger
def write_json(data, json_file):
"""General function used everywhere to write data into a json file"""
with open(json_file, "w") as json_file:
json.dump(data, json_file)
def read_json(json_file):
"""General function used everywhere to read a json file"""
with open(json_file, "r") as json_file:
return json.load(json_file)
def load_subscribers_json():
"""Defining command to check (and create) the subscribers.json file"""
global SUBSCRIBERS
if not os.path.isfile("json/subscribers.json"):
SUBSCRIBERS = []
else:
SUBSCRIBERS = read_json("json/subscribers.json")
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""The package that contains groups all the functions needed by other scripts."""
import os.path
import sys
sys.path.insert(0, '../')
import logging
import json
import configparser
def get_configuration():
"""Get global configuration from service.cfg"""
config = configparser.ConfigParser()
config.read("service.cfg")
return config
def get_logger(debug):
"""Get logger object"""
logging.basicConfig(
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
level=logging.INFO
)
logger = logging.getLogger(__name__)
if debug is False:
logging.disable(logging.CRITICAL)
return logger
def write_json(data, json_file):
"""General function used everywhere to write data into a json file"""
with open(json_file, "w") as json_file:
json.dump(data, json_file)
def read_json(json_file):
"""General function used everywhere to read a json file"""
with open(json_file, "r") as json_file:
return json.load(json_file)
def load_subscribers_json(json_file="json/subscribers.json"):
"""Defining command to check (and create) the subscribers.json file"""
global SUBSCRIBERS
SUBSCRIBERS = read_json(json_file) if os.path.isfile(json_file) else []
|
Enable testing with other subscriber files
|
Enable testing with other subscriber files
`load_subscribers_json("json/test_subscribers.json")` is now possible.
|
Python
|
mit
|
UnivaqTelegramBot/UnivaqInformaticaBot,giacomocerquone/UnivaqBot
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""The package that contains groups all the functions needed by other scripts."""
import os.path
import sys
sys.path.insert(0, '../')
import logging
import json
import configparser
def get_configuration():
"""Get global configuration from service.cfg"""
config = configparser.ConfigParser()
config.read("service.cfg")
return config
def get_logger(debug):
"""Get logger object"""
logging.basicConfig(
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
level=logging.INFO
)
logger = logging.getLogger(__name__)
if debug is False:
logging.disable(logging.CRITICAL)
return logger
def write_json(data, json_file):
"""General function used everywhere to write data into a json file"""
with open(json_file, "w") as json_file:
json.dump(data, json_file)
def read_json(json_file):
"""General function used everywhere to read a json file"""
with open(json_file, "r") as json_file:
return json.load(json_file)
def load_subscribers_json():
"""Defining command to check (and create) the subscribers.json file"""
global SUBSCRIBERS
if not os.path.isfile("json/subscribers.json"):
SUBSCRIBERS = []
else:
SUBSCRIBERS = read_json("json/subscribers.json")
Enable testing with other subscriber files
`load_subscribers_json("json/test_subscribers.json")` is now possible.
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""The package that contains groups all the functions needed by other scripts."""
import os.path
import sys
sys.path.insert(0, '../')
import logging
import json
import configparser
def get_configuration():
"""Get global configuration from service.cfg"""
config = configparser.ConfigParser()
config.read("service.cfg")
return config
def get_logger(debug):
"""Get logger object"""
logging.basicConfig(
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
level=logging.INFO
)
logger = logging.getLogger(__name__)
if debug is False:
logging.disable(logging.CRITICAL)
return logger
def write_json(data, json_file):
"""General function used everywhere to write data into a json file"""
with open(json_file, "w") as json_file:
json.dump(data, json_file)
def read_json(json_file):
"""General function used everywhere to read a json file"""
with open(json_file, "r") as json_file:
return json.load(json_file)
def load_subscribers_json(json_file="json/subscribers.json"):
"""Defining command to check (and create) the subscribers.json file"""
global SUBSCRIBERS
SUBSCRIBERS = read_json(json_file) if os.path.isfile(json_file) else []
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""The package that contains groups all the functions needed by other scripts."""
import os.path
import sys
sys.path.insert(0, '../')
import logging
import json
import configparser
def get_configuration():
"""Get global configuration from service.cfg"""
config = configparser.ConfigParser()
config.read("service.cfg")
return config
def get_logger(debug):
"""Get logger object"""
logging.basicConfig(
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
level=logging.INFO
)
logger = logging.getLogger(__name__)
if debug is False:
logging.disable(logging.CRITICAL)
return logger
def write_json(data, json_file):
"""General function used everywhere to write data into a json file"""
with open(json_file, "w") as json_file:
json.dump(data, json_file)
def read_json(json_file):
"""General function used everywhere to read a json file"""
with open(json_file, "r") as json_file:
return json.load(json_file)
def load_subscribers_json():
"""Defining command to check (and create) the subscribers.json file"""
global SUBSCRIBERS
if not os.path.isfile("json/subscribers.json"):
SUBSCRIBERS = []
else:
SUBSCRIBERS = read_json("json/subscribers.json")
<commit_msg>Enable testing with other subscriber files
`load_subscribers_json("json/test_subscribers.json")` is now possible.<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""The package that contains groups all the functions needed by other scripts."""
import os.path
import sys
sys.path.insert(0, '../')
import logging
import json
import configparser
def get_configuration():
"""Get global configuration from service.cfg"""
config = configparser.ConfigParser()
config.read("service.cfg")
return config
def get_logger(debug):
"""Get logger object"""
logging.basicConfig(
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
level=logging.INFO
)
logger = logging.getLogger(__name__)
if debug is False:
logging.disable(logging.CRITICAL)
return logger
def write_json(data, json_file):
"""General function used everywhere to write data into a json file"""
with open(json_file, "w") as json_file:
json.dump(data, json_file)
def read_json(json_file):
"""General function used everywhere to read a json file"""
with open(json_file, "r") as json_file:
return json.load(json_file)
def load_subscribers_json(json_file="json/subscribers.json"):
"""Defining command to check (and create) the subscribers.json file"""
global SUBSCRIBERS
SUBSCRIBERS = read_json(json_file) if os.path.isfile(json_file) else []
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""The package that contains groups all the functions needed by other scripts."""
import os.path
import sys
sys.path.insert(0, '../')
import logging
import json
import configparser
def get_configuration():
"""Get global configuration from service.cfg"""
config = configparser.ConfigParser()
config.read("service.cfg")
return config
def get_logger(debug):
"""Get logger object"""
logging.basicConfig(
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
level=logging.INFO
)
logger = logging.getLogger(__name__)
if debug is False:
logging.disable(logging.CRITICAL)
return logger
def write_json(data, json_file):
"""General function used everywhere to write data into a json file"""
with open(json_file, "w") as json_file:
json.dump(data, json_file)
def read_json(json_file):
"""General function used everywhere to read a json file"""
with open(json_file, "r") as json_file:
return json.load(json_file)
def load_subscribers_json():
"""Defining command to check (and create) the subscribers.json file"""
global SUBSCRIBERS
if not os.path.isfile("json/subscribers.json"):
SUBSCRIBERS = []
else:
SUBSCRIBERS = read_json("json/subscribers.json")
Enable testing with other subscriber files
`load_subscribers_json("json/test_subscribers.json")` is now possible.#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""The package that contains groups all the functions needed by other scripts."""
import os.path
import sys
sys.path.insert(0, '../')
import logging
import json
import configparser
def get_configuration():
"""Get global configuration from service.cfg"""
config = configparser.ConfigParser()
config.read("service.cfg")
return config
def get_logger(debug):
"""Get logger object"""
logging.basicConfig(
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
level=logging.INFO
)
logger = logging.getLogger(__name__)
if debug is False:
logging.disable(logging.CRITICAL)
return logger
def write_json(data, json_file):
"""General function used everywhere to write data into a json file"""
with open(json_file, "w") as json_file:
json.dump(data, json_file)
def read_json(json_file):
"""General function used everywhere to read a json file"""
with open(json_file, "r") as json_file:
return json.load(json_file)
def load_subscribers_json(json_file="json/subscribers.json"):
"""Defining command to check (and create) the subscribers.json file"""
global SUBSCRIBERS
SUBSCRIBERS = read_json(json_file) if os.path.isfile(json_file) else []
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""The package that contains groups all the functions needed by other scripts."""
import os.path
import sys
sys.path.insert(0, '../')
import logging
import json
import configparser
def get_configuration():
"""Get global configuration from service.cfg"""
config = configparser.ConfigParser()
config.read("service.cfg")
return config
def get_logger(debug):
"""Get logger object"""
logging.basicConfig(
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
level=logging.INFO
)
logger = logging.getLogger(__name__)
if debug is False:
logging.disable(logging.CRITICAL)
return logger
def write_json(data, json_file):
"""General function used everywhere to write data into a json file"""
with open(json_file, "w") as json_file:
json.dump(data, json_file)
def read_json(json_file):
"""General function used everywhere to read a json file"""
with open(json_file, "r") as json_file:
return json.load(json_file)
def load_subscribers_json():
"""Defining command to check (and create) the subscribers.json file"""
global SUBSCRIBERS
if not os.path.isfile("json/subscribers.json"):
SUBSCRIBERS = []
else:
SUBSCRIBERS = read_json("json/subscribers.json")
<commit_msg>Enable testing with other subscriber files
`load_subscribers_json("json/test_subscribers.json")` is now possible.<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""The package that contains groups all the functions needed by other scripts."""
import os.path
import sys
sys.path.insert(0, '../')
import logging
import json
import configparser
def get_configuration():
"""Get global configuration from service.cfg"""
config = configparser.ConfigParser()
config.read("service.cfg")
return config
def get_logger(debug):
"""Get logger object"""
logging.basicConfig(
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
level=logging.INFO
)
logger = logging.getLogger(__name__)
if debug is False:
logging.disable(logging.CRITICAL)
return logger
def write_json(data, json_file):
"""General function used everywhere to write data into a json file"""
with open(json_file, "w") as json_file:
json.dump(data, json_file)
def read_json(json_file):
"""General function used everywhere to read a json file"""
with open(json_file, "r") as json_file:
return json.load(json_file)
def load_subscribers_json(json_file="json/subscribers.json"):
"""Defining command to check (and create) the subscribers.json file"""
global SUBSCRIBERS
SUBSCRIBERS = read_json(json_file) if os.path.isfile(json_file) else []
|
2fbd90a9995e8552e818e53d3b213e4cfef470de
|
molly/installer/dbcreate.py
|
molly/installer/dbcreate.py
|
"""
Creates a database for Molly, and appropriate users, once given login
information as super user, or by running as root.
"""
from molly.installer.utils import quiet_exec, CommandFailed
def create(dba_user, dba_pass, username, password, database):
creds = []
if dba_user:
creds += ['-U', dba_user]
if dba_pass:
os.environ['PGPASSWORD'] = dba_pass
try:
quiet_exec(['psql'] + creds + ['-c',"CREATE USER %s WITH PASSWORD '%s';" % (username, password)], 'dbcreate')
except CommandFailed:
pass
quiet_exec(['psql'] + creds + ['-c',"ALTER ROLE %s WITH PASSWORD '%s';" % (username, password)], 'dbcreate')
try:
quiet_exec(['createdb'] + creds + ['-T','template_postgis',database], 'dbcreate')
except CommandFailed:
quiet_exec(['dropdb'] + creds + [database], 'dbcreate')
quiet_exec(['createdb'] + creds + ['-T','template_postgis',database], 'dbcreate')
quiet_exec(['psql'] + creds + ['-c',"GRANT ALL ON DATABASE %s TO %s;" % (database, username)], 'dbcreate')
if dba_pass:
del os.environ['PGPASSWORD']
|
"""
Creates a database for Molly, and appropriate users, once given login
information as super user, or by running as root.
"""
import os
from molly.installer.utils import quiet_exec, CommandFailed
def create(dba_user, dba_pass, username, password, database):
creds = []
if dba_user:
creds += ['-U', dba_user]
if dba_pass:
os.environ['PGPASSWORD'] = dba_pass
try:
quiet_exec(['psql'] + creds + ['-c',"CREATE USER %s WITH PASSWORD '%s';" % (username, password)], 'dbcreate')
except CommandFailed:
pass
quiet_exec(['psql'] + creds + ['-c',"ALTER ROLE %s WITH PASSWORD '%s';" % (username, password)], 'dbcreate')
try:
quiet_exec(['createdb'] + creds + ['-T','template_postgis',database], 'dbcreate')
except CommandFailed:
quiet_exec(['dropdb'] + creds + [database], 'dbcreate')
quiet_exec(['createdb'] + creds + ['-T','template_postgis',database], 'dbcreate')
quiet_exec(['psql'] + creds + ['-c',"GRANT ALL ON DATABASE %s TO %s;" % (database, username)], 'dbcreate')
if dba_pass:
del os.environ['PGPASSWORD']
|
Fix broken setting of postgres password
|
Fix broken setting of postgres password
|
Python
|
apache-2.0
|
mollyproject/mollyproject,mollyproject/mollyproject,mollyproject/mollyproject
|
"""
Creates a database for Molly, and appropriate users, once given login
information as super user, or by running as root.
"""
from molly.installer.utils import quiet_exec, CommandFailed
def create(dba_user, dba_pass, username, password, database):
creds = []
if dba_user:
creds += ['-U', dba_user]
if dba_pass:
os.environ['PGPASSWORD'] = dba_pass
try:
quiet_exec(['psql'] + creds + ['-c',"CREATE USER %s WITH PASSWORD '%s';" % (username, password)], 'dbcreate')
except CommandFailed:
pass
quiet_exec(['psql'] + creds + ['-c',"ALTER ROLE %s WITH PASSWORD '%s';" % (username, password)], 'dbcreate')
try:
quiet_exec(['createdb'] + creds + ['-T','template_postgis',database], 'dbcreate')
except CommandFailed:
quiet_exec(['dropdb'] + creds + [database], 'dbcreate')
quiet_exec(['createdb'] + creds + ['-T','template_postgis',database], 'dbcreate')
quiet_exec(['psql'] + creds + ['-c',"GRANT ALL ON DATABASE %s TO %s;" % (database, username)], 'dbcreate')
if dba_pass:
del os.environ['PGPASSWORD']
Fix broken setting of postgres password
|
"""
Creates a database for Molly, and appropriate users, once given login
information as super user, or by running as root.
"""
import os
from molly.installer.utils import quiet_exec, CommandFailed
def create(dba_user, dba_pass, username, password, database):
creds = []
if dba_user:
creds += ['-U', dba_user]
if dba_pass:
os.environ['PGPASSWORD'] = dba_pass
try:
quiet_exec(['psql'] + creds + ['-c',"CREATE USER %s WITH PASSWORD '%s';" % (username, password)], 'dbcreate')
except CommandFailed:
pass
quiet_exec(['psql'] + creds + ['-c',"ALTER ROLE %s WITH PASSWORD '%s';" % (username, password)], 'dbcreate')
try:
quiet_exec(['createdb'] + creds + ['-T','template_postgis',database], 'dbcreate')
except CommandFailed:
quiet_exec(['dropdb'] + creds + [database], 'dbcreate')
quiet_exec(['createdb'] + creds + ['-T','template_postgis',database], 'dbcreate')
quiet_exec(['psql'] + creds + ['-c',"GRANT ALL ON DATABASE %s TO %s;" % (database, username)], 'dbcreate')
if dba_pass:
del os.environ['PGPASSWORD']
|
<commit_before>"""
Creates a database for Molly, and appropriate users, once given login
information as super user, or by running as root.
"""
from molly.installer.utils import quiet_exec, CommandFailed
def create(dba_user, dba_pass, username, password, database):
creds = []
if dba_user:
creds += ['-U', dba_user]
if dba_pass:
os.environ['PGPASSWORD'] = dba_pass
try:
quiet_exec(['psql'] + creds + ['-c',"CREATE USER %s WITH PASSWORD '%s';" % (username, password)], 'dbcreate')
except CommandFailed:
pass
quiet_exec(['psql'] + creds + ['-c',"ALTER ROLE %s WITH PASSWORD '%s';" % (username, password)], 'dbcreate')
try:
quiet_exec(['createdb'] + creds + ['-T','template_postgis',database], 'dbcreate')
except CommandFailed:
quiet_exec(['dropdb'] + creds + [database], 'dbcreate')
quiet_exec(['createdb'] + creds + ['-T','template_postgis',database], 'dbcreate')
quiet_exec(['psql'] + creds + ['-c',"GRANT ALL ON DATABASE %s TO %s;" % (database, username)], 'dbcreate')
if dba_pass:
del os.environ['PGPASSWORD']
<commit_msg>Fix broken setting of postgres password<commit_after>
|
"""
Creates a database for Molly, and appropriate users, once given login
information as super user, or by running as root.
"""
import os
from molly.installer.utils import quiet_exec, CommandFailed
def create(dba_user, dba_pass, username, password, database):
creds = []
if dba_user:
creds += ['-U', dba_user]
if dba_pass:
os.environ['PGPASSWORD'] = dba_pass
try:
quiet_exec(['psql'] + creds + ['-c',"CREATE USER %s WITH PASSWORD '%s';" % (username, password)], 'dbcreate')
except CommandFailed:
pass
quiet_exec(['psql'] + creds + ['-c',"ALTER ROLE %s WITH PASSWORD '%s';" % (username, password)], 'dbcreate')
try:
quiet_exec(['createdb'] + creds + ['-T','template_postgis',database], 'dbcreate')
except CommandFailed:
quiet_exec(['dropdb'] + creds + [database], 'dbcreate')
quiet_exec(['createdb'] + creds + ['-T','template_postgis',database], 'dbcreate')
quiet_exec(['psql'] + creds + ['-c',"GRANT ALL ON DATABASE %s TO %s;" % (database, username)], 'dbcreate')
if dba_pass:
del os.environ['PGPASSWORD']
|
"""
Creates a database for Molly, and appropriate users, once given login
information as super user, or by running as root.
"""
from molly.installer.utils import quiet_exec, CommandFailed
def create(dba_user, dba_pass, username, password, database):
creds = []
if dba_user:
creds += ['-U', dba_user]
if dba_pass:
os.environ['PGPASSWORD'] = dba_pass
try:
quiet_exec(['psql'] + creds + ['-c',"CREATE USER %s WITH PASSWORD '%s';" % (username, password)], 'dbcreate')
except CommandFailed:
pass
quiet_exec(['psql'] + creds + ['-c',"ALTER ROLE %s WITH PASSWORD '%s';" % (username, password)], 'dbcreate')
try:
quiet_exec(['createdb'] + creds + ['-T','template_postgis',database], 'dbcreate')
except CommandFailed:
quiet_exec(['dropdb'] + creds + [database], 'dbcreate')
quiet_exec(['createdb'] + creds + ['-T','template_postgis',database], 'dbcreate')
quiet_exec(['psql'] + creds + ['-c',"GRANT ALL ON DATABASE %s TO %s;" % (database, username)], 'dbcreate')
if dba_pass:
del os.environ['PGPASSWORD']
Fix broken setting of postgres password"""
Creates a database for Molly, and appropriate users, once given login
information as super user, or by running as root.
"""
import os
from molly.installer.utils import quiet_exec, CommandFailed
def create(dba_user, dba_pass, username, password, database):
creds = []
if dba_user:
creds += ['-U', dba_user]
if dba_pass:
os.environ['PGPASSWORD'] = dba_pass
try:
quiet_exec(['psql'] + creds + ['-c',"CREATE USER %s WITH PASSWORD '%s';" % (username, password)], 'dbcreate')
except CommandFailed:
pass
quiet_exec(['psql'] + creds + ['-c',"ALTER ROLE %s WITH PASSWORD '%s';" % (username, password)], 'dbcreate')
try:
quiet_exec(['createdb'] + creds + ['-T','template_postgis',database], 'dbcreate')
except CommandFailed:
quiet_exec(['dropdb'] + creds + [database], 'dbcreate')
quiet_exec(['createdb'] + creds + ['-T','template_postgis',database], 'dbcreate')
quiet_exec(['psql'] + creds + ['-c',"GRANT ALL ON DATABASE %s TO %s;" % (database, username)], 'dbcreate')
if dba_pass:
del os.environ['PGPASSWORD']
|
<commit_before>"""
Creates a database for Molly, and appropriate users, once given login
information as super user, or by running as root.
"""
from molly.installer.utils import quiet_exec, CommandFailed
def create(dba_user, dba_pass, username, password, database):
creds = []
if dba_user:
creds += ['-U', dba_user]
if dba_pass:
os.environ['PGPASSWORD'] = dba_pass
try:
quiet_exec(['psql'] + creds + ['-c',"CREATE USER %s WITH PASSWORD '%s';" % (username, password)], 'dbcreate')
except CommandFailed:
pass
quiet_exec(['psql'] + creds + ['-c',"ALTER ROLE %s WITH PASSWORD '%s';" % (username, password)], 'dbcreate')
try:
quiet_exec(['createdb'] + creds + ['-T','template_postgis',database], 'dbcreate')
except CommandFailed:
quiet_exec(['dropdb'] + creds + [database], 'dbcreate')
quiet_exec(['createdb'] + creds + ['-T','template_postgis',database], 'dbcreate')
quiet_exec(['psql'] + creds + ['-c',"GRANT ALL ON DATABASE %s TO %s;" % (database, username)], 'dbcreate')
if dba_pass:
del os.environ['PGPASSWORD']
<commit_msg>Fix broken setting of postgres password<commit_after>"""
Creates a database for Molly, and appropriate users, once given login
information as super user, or by running as root.
"""
import os
from molly.installer.utils import quiet_exec, CommandFailed
def create(dba_user, dba_pass, username, password, database):
creds = []
if dba_user:
creds += ['-U', dba_user]
if dba_pass:
os.environ['PGPASSWORD'] = dba_pass
try:
quiet_exec(['psql'] + creds + ['-c',"CREATE USER %s WITH PASSWORD '%s';" % (username, password)], 'dbcreate')
except CommandFailed:
pass
quiet_exec(['psql'] + creds + ['-c',"ALTER ROLE %s WITH PASSWORD '%s';" % (username, password)], 'dbcreate')
try:
quiet_exec(['createdb'] + creds + ['-T','template_postgis',database], 'dbcreate')
except CommandFailed:
quiet_exec(['dropdb'] + creds + [database], 'dbcreate')
quiet_exec(['createdb'] + creds + ['-T','template_postgis',database], 'dbcreate')
quiet_exec(['psql'] + creds + ['-c',"GRANT ALL ON DATABASE %s TO %s;" % (database, username)], 'dbcreate')
if dba_pass:
del os.environ['PGPASSWORD']
|
a7da562df18dc0ad22425d516ed48c93c3211f05
|
userena/contrib/umessages/urls.py
|
userena/contrib/umessages/urls.py
|
from django.conf.urls import *
from userena.contrib.umessages import views as messages_views
from django.contrib.auth.decorators import login_required
urlpatterns = patterns('',
url(r'^compose/$',
messages_views.message_compose,
name='userena_umessages_compose'),
url(r'^compose/(?P<recipients>[\+\.\w]+)/$',
messages_views.message_compose,
name='userena_umessages_compose_to'),
url(r'^reply/(?P<parent_id>[\d]+)/$',
messages_views.message_compose,
name='userena_umessages_reply'),
url(r'^view/(?P<username>[\.\w]+)/$',
login_required(messages_views.MessageDetailListView.as_view()),
name='userena_umessages_detail'),
url(r'^remove/$',
messages_views.message_remove,
name='userena_umessages_remove'),
url(r'^unremove/$',
messages_views.message_remove,
{'undo': True},
name='userena_umessages_unremove'),
url(r'^$',
login_required(messages_views.MessageListView.as_view()),
name='userena_umessages_list'),
)
|
from django.conf.urls import *
from userena.contrib.umessages import views as messages_views
from django.contrib.auth.decorators import login_required
urlpatterns = patterns('',
url(r'^compose/$',
messages_views.message_compose,
name='userena_umessages_compose'),
url(r'^compose/(?P<recipients>[\@\+\.\w-]+)/$',
messages_views.message_compose,
name='userena_umessages_compose_to'),
url(r'^reply/(?P<parent_id>[\d]+)/$',
messages_views.message_compose,
name='userena_umessages_reply'),
url(r'^view/(?P<username>[\@\.\w-]+)/$',
login_required(messages_views.MessageDetailListView.as_view()),
name='userena_umessages_detail'),
url(r'^remove/$',
messages_views.message_remove,
name='userena_umessages_remove'),
url(r'^unremove/$',
messages_views.message_remove,
{'undo': True},
name='userena_umessages_unremove'),
url(r'^$',
login_required(messages_views.MessageListView.as_view()),
name='userena_umessages_list'),
)
|
Allow @ and - in usernames also in umessages.
|
Allow @ and - in usernames also in umessages.
|
Python
|
bsd-3-clause
|
ugoertz/django-userena,ugoertz/django-userena,ugoertz/django-userena
|
from django.conf.urls import *
from userena.contrib.umessages import views as messages_views
from django.contrib.auth.decorators import login_required
urlpatterns = patterns('',
url(r'^compose/$',
messages_views.message_compose,
name='userena_umessages_compose'),
url(r'^compose/(?P<recipients>[\+\.\w]+)/$',
messages_views.message_compose,
name='userena_umessages_compose_to'),
url(r'^reply/(?P<parent_id>[\d]+)/$',
messages_views.message_compose,
name='userena_umessages_reply'),
url(r'^view/(?P<username>[\.\w]+)/$',
login_required(messages_views.MessageDetailListView.as_view()),
name='userena_umessages_detail'),
url(r'^remove/$',
messages_views.message_remove,
name='userena_umessages_remove'),
url(r'^unremove/$',
messages_views.message_remove,
{'undo': True},
name='userena_umessages_unremove'),
url(r'^$',
login_required(messages_views.MessageListView.as_view()),
name='userena_umessages_list'),
)
Allow @ and - in usernames also in umessages.
|
from django.conf.urls import *
from userena.contrib.umessages import views as messages_views
from django.contrib.auth.decorators import login_required
urlpatterns = patterns('',
url(r'^compose/$',
messages_views.message_compose,
name='userena_umessages_compose'),
url(r'^compose/(?P<recipients>[\@\+\.\w-]+)/$',
messages_views.message_compose,
name='userena_umessages_compose_to'),
url(r'^reply/(?P<parent_id>[\d]+)/$',
messages_views.message_compose,
name='userena_umessages_reply'),
url(r'^view/(?P<username>[\@\.\w-]+)/$',
login_required(messages_views.MessageDetailListView.as_view()),
name='userena_umessages_detail'),
url(r'^remove/$',
messages_views.message_remove,
name='userena_umessages_remove'),
url(r'^unremove/$',
messages_views.message_remove,
{'undo': True},
name='userena_umessages_unremove'),
url(r'^$',
login_required(messages_views.MessageListView.as_view()),
name='userena_umessages_list'),
)
|
<commit_before>from django.conf.urls import *
from userena.contrib.umessages import views as messages_views
from django.contrib.auth.decorators import login_required
urlpatterns = patterns('',
url(r'^compose/$',
messages_views.message_compose,
name='userena_umessages_compose'),
url(r'^compose/(?P<recipients>[\+\.\w]+)/$',
messages_views.message_compose,
name='userena_umessages_compose_to'),
url(r'^reply/(?P<parent_id>[\d]+)/$',
messages_views.message_compose,
name='userena_umessages_reply'),
url(r'^view/(?P<username>[\.\w]+)/$',
login_required(messages_views.MessageDetailListView.as_view()),
name='userena_umessages_detail'),
url(r'^remove/$',
messages_views.message_remove,
name='userena_umessages_remove'),
url(r'^unremove/$',
messages_views.message_remove,
{'undo': True},
name='userena_umessages_unremove'),
url(r'^$',
login_required(messages_views.MessageListView.as_view()),
name='userena_umessages_list'),
)
<commit_msg>Allow @ and - in usernames also in umessages.<commit_after>
|
from django.conf.urls import *
from userena.contrib.umessages import views as messages_views
from django.contrib.auth.decorators import login_required
urlpatterns = patterns('',
url(r'^compose/$',
messages_views.message_compose,
name='userena_umessages_compose'),
url(r'^compose/(?P<recipients>[\@\+\.\w-]+)/$',
messages_views.message_compose,
name='userena_umessages_compose_to'),
url(r'^reply/(?P<parent_id>[\d]+)/$',
messages_views.message_compose,
name='userena_umessages_reply'),
url(r'^view/(?P<username>[\@\.\w-]+)/$',
login_required(messages_views.MessageDetailListView.as_view()),
name='userena_umessages_detail'),
url(r'^remove/$',
messages_views.message_remove,
name='userena_umessages_remove'),
url(r'^unremove/$',
messages_views.message_remove,
{'undo': True},
name='userena_umessages_unremove'),
url(r'^$',
login_required(messages_views.MessageListView.as_view()),
name='userena_umessages_list'),
)
|
from django.conf.urls import *
from userena.contrib.umessages import views as messages_views
from django.contrib.auth.decorators import login_required
urlpatterns = patterns('',
url(r'^compose/$',
messages_views.message_compose,
name='userena_umessages_compose'),
url(r'^compose/(?P<recipients>[\+\.\w]+)/$',
messages_views.message_compose,
name='userena_umessages_compose_to'),
url(r'^reply/(?P<parent_id>[\d]+)/$',
messages_views.message_compose,
name='userena_umessages_reply'),
url(r'^view/(?P<username>[\.\w]+)/$',
login_required(messages_views.MessageDetailListView.as_view()),
name='userena_umessages_detail'),
url(r'^remove/$',
messages_views.message_remove,
name='userena_umessages_remove'),
url(r'^unremove/$',
messages_views.message_remove,
{'undo': True},
name='userena_umessages_unremove'),
url(r'^$',
login_required(messages_views.MessageListView.as_view()),
name='userena_umessages_list'),
)
Allow @ and - in usernames also in umessages.from django.conf.urls import *
from userena.contrib.umessages import views as messages_views
from django.contrib.auth.decorators import login_required
urlpatterns = patterns('',
url(r'^compose/$',
messages_views.message_compose,
name='userena_umessages_compose'),
url(r'^compose/(?P<recipients>[\@\+\.\w-]+)/$',
messages_views.message_compose,
name='userena_umessages_compose_to'),
url(r'^reply/(?P<parent_id>[\d]+)/$',
messages_views.message_compose,
name='userena_umessages_reply'),
url(r'^view/(?P<username>[\@\.\w-]+)/$',
login_required(messages_views.MessageDetailListView.as_view()),
name='userena_umessages_detail'),
url(r'^remove/$',
messages_views.message_remove,
name='userena_umessages_remove'),
url(r'^unremove/$',
messages_views.message_remove,
{'undo': True},
name='userena_umessages_unremove'),
url(r'^$',
login_required(messages_views.MessageListView.as_view()),
name='userena_umessages_list'),
)
|
<commit_before>from django.conf.urls import *
from userena.contrib.umessages import views as messages_views
from django.contrib.auth.decorators import login_required
urlpatterns = patterns('',
url(r'^compose/$',
messages_views.message_compose,
name='userena_umessages_compose'),
url(r'^compose/(?P<recipients>[\+\.\w]+)/$',
messages_views.message_compose,
name='userena_umessages_compose_to'),
url(r'^reply/(?P<parent_id>[\d]+)/$',
messages_views.message_compose,
name='userena_umessages_reply'),
url(r'^view/(?P<username>[\.\w]+)/$',
login_required(messages_views.MessageDetailListView.as_view()),
name='userena_umessages_detail'),
url(r'^remove/$',
messages_views.message_remove,
name='userena_umessages_remove'),
url(r'^unremove/$',
messages_views.message_remove,
{'undo': True},
name='userena_umessages_unremove'),
url(r'^$',
login_required(messages_views.MessageListView.as_view()),
name='userena_umessages_list'),
)
<commit_msg>Allow @ and - in usernames also in umessages.<commit_after>from django.conf.urls import *
from userena.contrib.umessages import views as messages_views
from django.contrib.auth.decorators import login_required
urlpatterns = patterns('',
url(r'^compose/$',
messages_views.message_compose,
name='userena_umessages_compose'),
url(r'^compose/(?P<recipients>[\@\+\.\w-]+)/$',
messages_views.message_compose,
name='userena_umessages_compose_to'),
url(r'^reply/(?P<parent_id>[\d]+)/$',
messages_views.message_compose,
name='userena_umessages_reply'),
url(r'^view/(?P<username>[\@\.\w-]+)/$',
login_required(messages_views.MessageDetailListView.as_view()),
name='userena_umessages_detail'),
url(r'^remove/$',
messages_views.message_remove,
name='userena_umessages_remove'),
url(r'^unremove/$',
messages_views.message_remove,
{'undo': True},
name='userena_umessages_unremove'),
url(r'^$',
login_required(messages_views.MessageListView.as_view()),
name='userena_umessages_list'),
)
|
dcbc6c0579871ce3f9b813b0d92f3b7642c750a1
|
linter.py
|
linter.py
|
from SublimeLinter.lint import Linter, util
class CSSLint(Linter):
cmd = 'csslint --format=compact'
regex = r'''(?xi)
^.+:\s* # filename
# csslint emits errors that pertain to the code as a whole,
# in which case there is no line/col information, so that
# part is optional.
(?:line\ (?P<line>\d+),\ col\ (?P<col>\d+),\ )?
(?:(?P<error>error)|(?P<warning>warning))\ -\ (?P<message>.*)
'''
word_re = r'^([#\.]?[-\w]+)'
error_stream = util.STREAM_STDOUT
tempfile_suffix = 'css'
defaults = {
'selector': 'source.css - meta.attribute-with-value',
'--errors=,': '',
'--warnings=,': '',
'--ignore=,': ''
}
def split_match(self, match):
"""
Extract and return values from match.
We override this method so that general errors that do not have
a line number can be placed at the beginning of the code.
"""
match, line, col, error, warning, message, near = super().split_match(match)
if line is None and message:
line = 0
col = 0
return match, line, col, error, warning, message, near
|
from SublimeLinter.lint import Linter, util
class CSSLint(Linter):
cmd = 'csslint --format=compact ${temp_file}'
regex = r'''(?xi)
^.+:\s* # filename
# csslint emits errors that pertain to the code as a whole,
# in which case there is no line/col information, so that
# part is optional.
(?:line\ (?P<line>\d+),\ col\ (?P<col>\d+),\ )?
(?:(?P<error>error)|(?P<warning>warning))\ -\ (?P<message>.*)
'''
word_re = r'^([#\.]?[-\w]+)'
error_stream = util.STREAM_STDOUT
tempfile_suffix = 'css'
defaults = {
'selector': 'source.css - meta.attribute-with-value',
'--errors=,': '',
'--warnings=,': '',
'--ignore=,': ''
}
def split_match(self, match):
"""
Extract and return values from match.
We override this method so that general errors that do not have
a line number can be placed at the beginning of the code.
"""
match, line, col, error, warning, message, near = super().split_match(match)
if line is None and message:
line = 0
col = 0
return match, line, col, error, warning, message, near
|
Add `${temp_file}` marker to `cmd`
|
Add `${temp_file}` marker to `cmd`
Implicit adding of the `${temp_file}` by the framework has been deprecated and SublimeLinter also logs about it.
|
Python
|
mit
|
SublimeLinter/SublimeLinter-csslint
|
from SublimeLinter.lint import Linter, util
class CSSLint(Linter):
cmd = 'csslint --format=compact'
regex = r'''(?xi)
^.+:\s* # filename
# csslint emits errors that pertain to the code as a whole,
# in which case there is no line/col information, so that
# part is optional.
(?:line\ (?P<line>\d+),\ col\ (?P<col>\d+),\ )?
(?:(?P<error>error)|(?P<warning>warning))\ -\ (?P<message>.*)
'''
word_re = r'^([#\.]?[-\w]+)'
error_stream = util.STREAM_STDOUT
tempfile_suffix = 'css'
defaults = {
'selector': 'source.css - meta.attribute-with-value',
'--errors=,': '',
'--warnings=,': '',
'--ignore=,': ''
}
def split_match(self, match):
"""
Extract and return values from match.
We override this method so that general errors that do not have
a line number can be placed at the beginning of the code.
"""
match, line, col, error, warning, message, near = super().split_match(match)
if line is None and message:
line = 0
col = 0
return match, line, col, error, warning, message, near
Add `${temp_file}` marker to `cmd`
Implicit adding of the `${temp_file}` by the framework has been deprecated and SublimeLinter also logs about it.
|
from SublimeLinter.lint import Linter, util
class CSSLint(Linter):
cmd = 'csslint --format=compact ${temp_file}'
regex = r'''(?xi)
^.+:\s* # filename
# csslint emits errors that pertain to the code as a whole,
# in which case there is no line/col information, so that
# part is optional.
(?:line\ (?P<line>\d+),\ col\ (?P<col>\d+),\ )?
(?:(?P<error>error)|(?P<warning>warning))\ -\ (?P<message>.*)
'''
word_re = r'^([#\.]?[-\w]+)'
error_stream = util.STREAM_STDOUT
tempfile_suffix = 'css'
defaults = {
'selector': 'source.css - meta.attribute-with-value',
'--errors=,': '',
'--warnings=,': '',
'--ignore=,': ''
}
def split_match(self, match):
"""
Extract and return values from match.
We override this method so that general errors that do not have
a line number can be placed at the beginning of the code.
"""
match, line, col, error, warning, message, near = super().split_match(match)
if line is None and message:
line = 0
col = 0
return match, line, col, error, warning, message, near
|
<commit_before>from SublimeLinter.lint import Linter, util
class CSSLint(Linter):
cmd = 'csslint --format=compact'
regex = r'''(?xi)
^.+:\s* # filename
# csslint emits errors that pertain to the code as a whole,
# in which case there is no line/col information, so that
# part is optional.
(?:line\ (?P<line>\d+),\ col\ (?P<col>\d+),\ )?
(?:(?P<error>error)|(?P<warning>warning))\ -\ (?P<message>.*)
'''
word_re = r'^([#\.]?[-\w]+)'
error_stream = util.STREAM_STDOUT
tempfile_suffix = 'css'
defaults = {
'selector': 'source.css - meta.attribute-with-value',
'--errors=,': '',
'--warnings=,': '',
'--ignore=,': ''
}
def split_match(self, match):
"""
Extract and return values from match.
We override this method so that general errors that do not have
a line number can be placed at the beginning of the code.
"""
match, line, col, error, warning, message, near = super().split_match(match)
if line is None and message:
line = 0
col = 0
return match, line, col, error, warning, message, near
<commit_msg>Add `${temp_file}` marker to `cmd`
Implicit adding of the `${temp_file}` by the framework has been deprecated and SublimeLinter also logs about it.<commit_after>
|
from SublimeLinter.lint import Linter, util
class CSSLint(Linter):
cmd = 'csslint --format=compact ${temp_file}'
regex = r'''(?xi)
^.+:\s* # filename
# csslint emits errors that pertain to the code as a whole,
# in which case there is no line/col information, so that
# part is optional.
(?:line\ (?P<line>\d+),\ col\ (?P<col>\d+),\ )?
(?:(?P<error>error)|(?P<warning>warning))\ -\ (?P<message>.*)
'''
word_re = r'^([#\.]?[-\w]+)'
error_stream = util.STREAM_STDOUT
tempfile_suffix = 'css'
defaults = {
'selector': 'source.css - meta.attribute-with-value',
'--errors=,': '',
'--warnings=,': '',
'--ignore=,': ''
}
def split_match(self, match):
"""
Extract and return values from match.
We override this method so that general errors that do not have
a line number can be placed at the beginning of the code.
"""
match, line, col, error, warning, message, near = super().split_match(match)
if line is None and message:
line = 0
col = 0
return match, line, col, error, warning, message, near
|
from SublimeLinter.lint import Linter, util
class CSSLint(Linter):
cmd = 'csslint --format=compact'
regex = r'''(?xi)
^.+:\s* # filename
# csslint emits errors that pertain to the code as a whole,
# in which case there is no line/col information, so that
# part is optional.
(?:line\ (?P<line>\d+),\ col\ (?P<col>\d+),\ )?
(?:(?P<error>error)|(?P<warning>warning))\ -\ (?P<message>.*)
'''
word_re = r'^([#\.]?[-\w]+)'
error_stream = util.STREAM_STDOUT
tempfile_suffix = 'css'
defaults = {
'selector': 'source.css - meta.attribute-with-value',
'--errors=,': '',
'--warnings=,': '',
'--ignore=,': ''
}
def split_match(self, match):
"""
Extract and return values from match.
We override this method so that general errors that do not have
a line number can be placed at the beginning of the code.
"""
match, line, col, error, warning, message, near = super().split_match(match)
if line is None and message:
line = 0
col = 0
return match, line, col, error, warning, message, near
Add `${temp_file}` marker to `cmd`
Implicit adding of the `${temp_file}` by the framework has been deprecated and SublimeLinter also logs about it.from SublimeLinter.lint import Linter, util
class CSSLint(Linter):
cmd = 'csslint --format=compact ${temp_file}'
regex = r'''(?xi)
^.+:\s* # filename
# csslint emits errors that pertain to the code as a whole,
# in which case there is no line/col information, so that
# part is optional.
(?:line\ (?P<line>\d+),\ col\ (?P<col>\d+),\ )?
(?:(?P<error>error)|(?P<warning>warning))\ -\ (?P<message>.*)
'''
word_re = r'^([#\.]?[-\w]+)'
error_stream = util.STREAM_STDOUT
tempfile_suffix = 'css'
defaults = {
'selector': 'source.css - meta.attribute-with-value',
'--errors=,': '',
'--warnings=,': '',
'--ignore=,': ''
}
def split_match(self, match):
"""
Extract and return values from match.
We override this method so that general errors that do not have
a line number can be placed at the beginning of the code.
"""
match, line, col, error, warning, message, near = super().split_match(match)
if line is None and message:
line = 0
col = 0
return match, line, col, error, warning, message, near
|
<commit_before>from SublimeLinter.lint import Linter, util
class CSSLint(Linter):
cmd = 'csslint --format=compact'
regex = r'''(?xi)
^.+:\s* # filename
# csslint emits errors that pertain to the code as a whole,
# in which case there is no line/col information, so that
# part is optional.
(?:line\ (?P<line>\d+),\ col\ (?P<col>\d+),\ )?
(?:(?P<error>error)|(?P<warning>warning))\ -\ (?P<message>.*)
'''
word_re = r'^([#\.]?[-\w]+)'
error_stream = util.STREAM_STDOUT
tempfile_suffix = 'css'
defaults = {
'selector': 'source.css - meta.attribute-with-value',
'--errors=,': '',
'--warnings=,': '',
'--ignore=,': ''
}
def split_match(self, match):
"""
Extract and return values from match.
We override this method so that general errors that do not have
a line number can be placed at the beginning of the code.
"""
match, line, col, error, warning, message, near = super().split_match(match)
if line is None and message:
line = 0
col = 0
return match, line, col, error, warning, message, near
<commit_msg>Add `${temp_file}` marker to `cmd`
Implicit adding of the `${temp_file}` by the framework has been deprecated and SublimeLinter also logs about it.<commit_after>from SublimeLinter.lint import Linter, util
class CSSLint(Linter):
cmd = 'csslint --format=compact ${temp_file}'
regex = r'''(?xi)
^.+:\s* # filename
# csslint emits errors that pertain to the code as a whole,
# in which case there is no line/col information, so that
# part is optional.
(?:line\ (?P<line>\d+),\ col\ (?P<col>\d+),\ )?
(?:(?P<error>error)|(?P<warning>warning))\ -\ (?P<message>.*)
'''
word_re = r'^([#\.]?[-\w]+)'
error_stream = util.STREAM_STDOUT
tempfile_suffix = 'css'
defaults = {
'selector': 'source.css - meta.attribute-with-value',
'--errors=,': '',
'--warnings=,': '',
'--ignore=,': ''
}
def split_match(self, match):
"""
Extract and return values from match.
We override this method so that general errors that do not have
a line number can be placed at the beginning of the code.
"""
match, line, col, error, warning, message, near = super().split_match(match)
if line is None and message:
line = 0
col = 0
return match, line, col, error, warning, message, near
|
64f2720507067d10f298aa50245fa3b7b57a5bd4
|
dabuildsys/srcname.py
|
dabuildsys/srcname.py
|
#!/usr/bin/python
"""
Code to normalize the source package name specifier into the actual packages.
Returns the package checkouts.
"""
from common import BuildError
import apt
import config
import checkout
def expand_srcname_spec(spec):
"""Parse a list of source packages on which the operation is to be performed.
If some variant of 'all' is specified, comparison against packages currently
APT repository is made and packages which have older version in APT than in Git
are returned."""
if len(spec) > 1 or not spec[0].startswith('all'):
return [checkout.PackageCheckout(pkg) for pkg in spec], {}
else:
if spec[0] == 'all':
releases = config.releases
elif spec[0].startswith('all:'):
releases = [spec[0].split(':')[1]]
else:
raise BuildError("Invalid all-package qualifier specified")
cache = {}
packages = set()
repos = {}
for release in releases:
_, _, apt_repo = apt.get_release(release)
repos[release] = apt_repo
comparison = apt.compare_against_git(apt_repo, checkout_cache=cache)
packages |= set(checkout.lookup_by_package_name(pkg) for pkg, gitver, aptver in comparison if gitver)
return [cache[pkg] for pkg in packages], repos
|
#!/usr/bin/python
"""
Code to normalize the source package name specifier into the actual packages.
Returns the package checkouts.
"""
from common import BuildError
import apt
import config
import checkout
def expand_srcname_spec(spec):
"""Parse a list of source packages on which the operation is to be performed.
If some variant of 'all' is specified, comparison against packages currently
APT repository is made and packages which have older version in APT than in Git
are returned."""
if len(spec) == 1 and spec[0] == '*':
checkouts = []
for pkg in config.package_map:
try:
checkouts.append(checkout.PackageCheckout(pkg))
except Exception as e:
pass
return checkouts, {}
elif len(spec) > 1 or not spec[0].startswith('all'):
return [checkout.PackageCheckout(pkg) for pkg in spec], {}
else:
if spec[0] == 'all':
releases = config.releases
elif spec[0].startswith('all:'):
releases = [spec[0].split(':')[1]]
else:
raise BuildError("Invalid all-package qualifier specified")
cache = {}
packages = set()
repos = {}
for release in releases:
_, _, apt_repo = apt.get_release(release)
repos[release] = apt_repo
comparison = apt.compare_against_git(apt_repo, checkout_cache=cache)
packages |= set(checkout.lookup_by_package_name(pkg) for pkg, gitver, aptver in comparison if gitver)
return [cache[pkg] for pkg in packages], repos
|
Implement '*' package for all packages in Git
|
Implement '*' package for all packages in Git
|
Python
|
mit
|
mit-athena/build-system
|
#!/usr/bin/python
"""
Code to normalize the source package name specifier into the actual packages.
Returns the package checkouts.
"""
from common import BuildError
import apt
import config
import checkout
def expand_srcname_spec(spec):
"""Parse a list of source packages on which the operation is to be performed.
If some variant of 'all' is specified, comparison against packages currently
APT repository is made and packages which have older version in APT than in Git
are returned."""
if len(spec) > 1 or not spec[0].startswith('all'):
return [checkout.PackageCheckout(pkg) for pkg in spec], {}
else:
if spec[0] == 'all':
releases = config.releases
elif spec[0].startswith('all:'):
releases = [spec[0].split(':')[1]]
else:
raise BuildError("Invalid all-package qualifier specified")
cache = {}
packages = set()
repos = {}
for release in releases:
_, _, apt_repo = apt.get_release(release)
repos[release] = apt_repo
comparison = apt.compare_against_git(apt_repo, checkout_cache=cache)
packages |= set(checkout.lookup_by_package_name(pkg) for pkg, gitver, aptver in comparison if gitver)
return [cache[pkg] for pkg in packages], repos
Implement '*' package for all packages in Git
|
#!/usr/bin/python
"""
Code to normalize the source package name specifier into the actual packages.
Returns the package checkouts.
"""
from common import BuildError
import apt
import config
import checkout
def expand_srcname_spec(spec):
"""Parse a list of source packages on which the operation is to be performed.
If some variant of 'all' is specified, comparison against packages currently
APT repository is made and packages which have older version in APT than in Git
are returned."""
if len(spec) == 1 and spec[0] == '*':
checkouts = []
for pkg in config.package_map:
try:
checkouts.append(checkout.PackageCheckout(pkg))
except Exception as e:
pass
return checkouts, {}
elif len(spec) > 1 or not spec[0].startswith('all'):
return [checkout.PackageCheckout(pkg) for pkg in spec], {}
else:
if spec[0] == 'all':
releases = config.releases
elif spec[0].startswith('all:'):
releases = [spec[0].split(':')[1]]
else:
raise BuildError("Invalid all-package qualifier specified")
cache = {}
packages = set()
repos = {}
for release in releases:
_, _, apt_repo = apt.get_release(release)
repos[release] = apt_repo
comparison = apt.compare_against_git(apt_repo, checkout_cache=cache)
packages |= set(checkout.lookup_by_package_name(pkg) for pkg, gitver, aptver in comparison if gitver)
return [cache[pkg] for pkg in packages], repos
|
<commit_before>#!/usr/bin/python
"""
Code to normalize the source package name specifier into the actual packages.
Returns the package checkouts.
"""
from common import BuildError
import apt
import config
import checkout
def expand_srcname_spec(spec):
"""Parse a list of source packages on which the operation is to be performed.
If some variant of 'all' is specified, comparison against packages currently
APT repository is made and packages which have older version in APT than in Git
are returned."""
if len(spec) > 1 or not spec[0].startswith('all'):
return [checkout.PackageCheckout(pkg) for pkg in spec], {}
else:
if spec[0] == 'all':
releases = config.releases
elif spec[0].startswith('all:'):
releases = [spec[0].split(':')[1]]
else:
raise BuildError("Invalid all-package qualifier specified")
cache = {}
packages = set()
repos = {}
for release in releases:
_, _, apt_repo = apt.get_release(release)
repos[release] = apt_repo
comparison = apt.compare_against_git(apt_repo, checkout_cache=cache)
packages |= set(checkout.lookup_by_package_name(pkg) for pkg, gitver, aptver in comparison if gitver)
return [cache[pkg] for pkg in packages], repos
<commit_msg>Implement '*' package for all packages in Git<commit_after>
|
#!/usr/bin/python
"""
Code to normalize the source package name specifier into the actual packages.
Returns the package checkouts.
"""
from common import BuildError
import apt
import config
import checkout
def expand_srcname_spec(spec):
"""Parse a list of source packages on which the operation is to be performed.
If some variant of 'all' is specified, comparison against packages currently
APT repository is made and packages which have older version in APT than in Git
are returned."""
if len(spec) == 1 and spec[0] == '*':
checkouts = []
for pkg in config.package_map:
try:
checkouts.append(checkout.PackageCheckout(pkg))
except Exception as e:
pass
return checkouts, {}
elif len(spec) > 1 or not spec[0].startswith('all'):
return [checkout.PackageCheckout(pkg) for pkg in spec], {}
else:
if spec[0] == 'all':
releases = config.releases
elif spec[0].startswith('all:'):
releases = [spec[0].split(':')[1]]
else:
raise BuildError("Invalid all-package qualifier specified")
cache = {}
packages = set()
repos = {}
for release in releases:
_, _, apt_repo = apt.get_release(release)
repos[release] = apt_repo
comparison = apt.compare_against_git(apt_repo, checkout_cache=cache)
packages |= set(checkout.lookup_by_package_name(pkg) for pkg, gitver, aptver in comparison if gitver)
return [cache[pkg] for pkg in packages], repos
|
#!/usr/bin/python
"""
Code to normalize the source package name specifier into the actual packages.
Returns the package checkouts.
"""
from common import BuildError
import apt
import config
import checkout
def expand_srcname_spec(spec):
"""Parse a list of source packages on which the operation is to be performed.
If some variant of 'all' is specified, comparison against packages currently
APT repository is made and packages which have older version in APT than in Git
are returned."""
if len(spec) > 1 or not spec[0].startswith('all'):
return [checkout.PackageCheckout(pkg) for pkg in spec], {}
else:
if spec[0] == 'all':
releases = config.releases
elif spec[0].startswith('all:'):
releases = [spec[0].split(':')[1]]
else:
raise BuildError("Invalid all-package qualifier specified")
cache = {}
packages = set()
repos = {}
for release in releases:
_, _, apt_repo = apt.get_release(release)
repos[release] = apt_repo
comparison = apt.compare_against_git(apt_repo, checkout_cache=cache)
packages |= set(checkout.lookup_by_package_name(pkg) for pkg, gitver, aptver in comparison if gitver)
return [cache[pkg] for pkg in packages], repos
Implement '*' package for all packages in Git#!/usr/bin/python
"""
Code to normalize the source package name specifier into the actual packages.
Returns the package checkouts.
"""
from common import BuildError
import apt
import config
import checkout
def expand_srcname_spec(spec):
"""Parse a list of source packages on which the operation is to be performed.
If some variant of 'all' is specified, comparison against packages currently
APT repository is made and packages which have older version in APT than in Git
are returned."""
if len(spec) == 1 and spec[0] == '*':
checkouts = []
for pkg in config.package_map:
try:
checkouts.append(checkout.PackageCheckout(pkg))
except Exception as e:
pass
return checkouts, {}
elif len(spec) > 1 or not spec[0].startswith('all'):
return [checkout.PackageCheckout(pkg) for pkg in spec], {}
else:
if spec[0] == 'all':
releases = config.releases
elif spec[0].startswith('all:'):
releases = [spec[0].split(':')[1]]
else:
raise BuildError("Invalid all-package qualifier specified")
cache = {}
packages = set()
repos = {}
for release in releases:
_, _, apt_repo = apt.get_release(release)
repos[release] = apt_repo
comparison = apt.compare_against_git(apt_repo, checkout_cache=cache)
packages |= set(checkout.lookup_by_package_name(pkg) for pkg, gitver, aptver in comparison if gitver)
return [cache[pkg] for pkg in packages], repos
|
<commit_before>#!/usr/bin/python
"""
Code to normalize the source package name specifier into the actual packages.
Returns the package checkouts.
"""
from common import BuildError
import apt
import config
import checkout
def expand_srcname_spec(spec):
"""Parse a list of source packages on which the operation is to be performed.
If some variant of 'all' is specified, comparison against packages currently
APT repository is made and packages which have older version in APT than in Git
are returned."""
if len(spec) > 1 or not spec[0].startswith('all'):
return [checkout.PackageCheckout(pkg) for pkg in spec], {}
else:
if spec[0] == 'all':
releases = config.releases
elif spec[0].startswith('all:'):
releases = [spec[0].split(':')[1]]
else:
raise BuildError("Invalid all-package qualifier specified")
cache = {}
packages = set()
repos = {}
for release in releases:
_, _, apt_repo = apt.get_release(release)
repos[release] = apt_repo
comparison = apt.compare_against_git(apt_repo, checkout_cache=cache)
packages |= set(checkout.lookup_by_package_name(pkg) for pkg, gitver, aptver in comparison if gitver)
return [cache[pkg] for pkg in packages], repos
<commit_msg>Implement '*' package for all packages in Git<commit_after>#!/usr/bin/python
"""
Code to normalize the source package name specifier into the actual packages.
Returns the package checkouts.
"""
from common import BuildError
import apt
import config
import checkout
def expand_srcname_spec(spec):
"""Parse a list of source packages on which the operation is to be performed.
If some variant of 'all' is specified, comparison against packages currently
APT repository is made and packages which have older version in APT than in Git
are returned."""
if len(spec) == 1 and spec[0] == '*':
checkouts = []
for pkg in config.package_map:
try:
checkouts.append(checkout.PackageCheckout(pkg))
except Exception as e:
pass
return checkouts, {}
elif len(spec) > 1 or not spec[0].startswith('all'):
return [checkout.PackageCheckout(pkg) for pkg in spec], {}
else:
if spec[0] == 'all':
releases = config.releases
elif spec[0].startswith('all:'):
releases = [spec[0].split(':')[1]]
else:
raise BuildError("Invalid all-package qualifier specified")
cache = {}
packages = set()
repos = {}
for release in releases:
_, _, apt_repo = apt.get_release(release)
repos[release] = apt_repo
comparison = apt.compare_against_git(apt_repo, checkout_cache=cache)
packages |= set(checkout.lookup_by_package_name(pkg) for pkg, gitver, aptver in comparison if gitver)
return [cache[pkg] for pkg in packages], repos
|
98f26daf7c2c062d3bd72352413641e0df111871
|
src/ansible/forms.py
|
src/ansible/forms.py
|
from django import forms
from django.conf import settings
from django.forms import ModelForm
from ansible.models import Playbook
class AnsibleForm1(ModelForm):
class Meta:
model = Playbook
fields = ['repository', 'username']
class AnsibleForm2(ModelForm):
class Meta:
model = Playbook
fields = ['inventory', 'user']
class LoginForm(forms.Form):
username = forms.CharField(label='Username', max_length=100)
password = forms.CharField(label='Password', max_length=100)
class PlaybookFileForm(forms.Form):
filename = forms.CharField(label='Filename', max_length=100)
playbook = forms.CharField(widget=forms.Textarea(attrs={'rows':30,'cols':80}))
|
from django import forms
from django.core.validators import ValidationError
from django.conf import settings
from django.forms import ModelForm
from ansible.models import Playbook
import utils.playbook as playbook_utils
import os
class AnsibleForm1(ModelForm):
class Meta:
model = Playbook
fields = ['repository', 'username']
class AnsibleForm2(ModelForm):
class Meta:
model = Playbook
fields = ['inventory', 'user']
class LoginForm(forms.Form):
username = forms.CharField(label='Username', max_length=100)
password = forms.CharField(label='Password', max_length=100)
class PlaybookFileForm(forms.Form):
filename = forms.CharField(label='Filename', max_length=100)
playbook = forms.CharField(widget=forms.Textarea(attrs={'rows':30,'cols':80}))
def __init__(self, *args, **kwargs):
self.pk = kwargs.pop('pk', None)
super(PlaybookFileForm, self).__init__(*args, **kwargs)
def clean_filename(self):
data = playbook_utils.append_extension(self.cleaned_data['filename'])
playbook = Playbook.query_set.get(pk=self.pk)
playbook_dir = playbook.directory
playbook_file_path = os.path.join(playbook_dir, data)
if os.path.exists(playbook_file_path):
raise forms.ValidationError("Filename already used")
return data
|
Use clean_filename to validate if filename is already used
|
Use clean_filename to validate if filename is already used
|
Python
|
bsd-3-clause
|
lozadaOmr/ansible-admin,lozadaOmr/ansible-admin,lozadaOmr/ansible-admin
|
from django import forms
from django.conf import settings
from django.forms import ModelForm
from ansible.models import Playbook
class AnsibleForm1(ModelForm):
class Meta:
model = Playbook
fields = ['repository', 'username']
class AnsibleForm2(ModelForm):
class Meta:
model = Playbook
fields = ['inventory', 'user']
class LoginForm(forms.Form):
username = forms.CharField(label='Username', max_length=100)
password = forms.CharField(label='Password', max_length=100)
class PlaybookFileForm(forms.Form):
filename = forms.CharField(label='Filename', max_length=100)
playbook = forms.CharField(widget=forms.Textarea(attrs={'rows':30,'cols':80}))
Use clean_filename to validate if filename is already used
|
from django import forms
from django.core.validators import ValidationError
from django.conf import settings
from django.forms import ModelForm
from ansible.models import Playbook
import utils.playbook as playbook_utils
import os
class AnsibleForm1(ModelForm):
class Meta:
model = Playbook
fields = ['repository', 'username']
class AnsibleForm2(ModelForm):
class Meta:
model = Playbook
fields = ['inventory', 'user']
class LoginForm(forms.Form):
username = forms.CharField(label='Username', max_length=100)
password = forms.CharField(label='Password', max_length=100)
class PlaybookFileForm(forms.Form):
filename = forms.CharField(label='Filename', max_length=100)
playbook = forms.CharField(widget=forms.Textarea(attrs={'rows':30,'cols':80}))
def __init__(self, *args, **kwargs):
self.pk = kwargs.pop('pk', None)
super(PlaybookFileForm, self).__init__(*args, **kwargs)
def clean_filename(self):
data = playbook_utils.append_extension(self.cleaned_data['filename'])
playbook = Playbook.query_set.get(pk=self.pk)
playbook_dir = playbook.directory
playbook_file_path = os.path.join(playbook_dir, data)
if os.path.exists(playbook_file_path):
raise forms.ValidationError("Filename already used")
return data
|
<commit_before>from django import forms
from django.conf import settings
from django.forms import ModelForm
from ansible.models import Playbook
class AnsibleForm1(ModelForm):
class Meta:
model = Playbook
fields = ['repository', 'username']
class AnsibleForm2(ModelForm):
class Meta:
model = Playbook
fields = ['inventory', 'user']
class LoginForm(forms.Form):
username = forms.CharField(label='Username', max_length=100)
password = forms.CharField(label='Password', max_length=100)
class PlaybookFileForm(forms.Form):
filename = forms.CharField(label='Filename', max_length=100)
playbook = forms.CharField(widget=forms.Textarea(attrs={'rows':30,'cols':80}))
<commit_msg>Use clean_filename to validate if filename is already used<commit_after>
|
from django import forms
from django.core.validators import ValidationError
from django.conf import settings
from django.forms import ModelForm
from ansible.models import Playbook
import utils.playbook as playbook_utils
import os
class AnsibleForm1(ModelForm):
class Meta:
model = Playbook
fields = ['repository', 'username']
class AnsibleForm2(ModelForm):
class Meta:
model = Playbook
fields = ['inventory', 'user']
class LoginForm(forms.Form):
username = forms.CharField(label='Username', max_length=100)
password = forms.CharField(label='Password', max_length=100)
class PlaybookFileForm(forms.Form):
filename = forms.CharField(label='Filename', max_length=100)
playbook = forms.CharField(widget=forms.Textarea(attrs={'rows':30,'cols':80}))
def __init__(self, *args, **kwargs):
self.pk = kwargs.pop('pk', None)
super(PlaybookFileForm, self).__init__(*args, **kwargs)
def clean_filename(self):
data = playbook_utils.append_extension(self.cleaned_data['filename'])
playbook = Playbook.query_set.get(pk=self.pk)
playbook_dir = playbook.directory
playbook_file_path = os.path.join(playbook_dir, data)
if os.path.exists(playbook_file_path):
raise forms.ValidationError("Filename already used")
return data
|
from django import forms
from django.conf import settings
from django.forms import ModelForm
from ansible.models import Playbook
class AnsibleForm1(ModelForm):
class Meta:
model = Playbook
fields = ['repository', 'username']
class AnsibleForm2(ModelForm):
class Meta:
model = Playbook
fields = ['inventory', 'user']
class LoginForm(forms.Form):
username = forms.CharField(label='Username', max_length=100)
password = forms.CharField(label='Password', max_length=100)
class PlaybookFileForm(forms.Form):
filename = forms.CharField(label='Filename', max_length=100)
playbook = forms.CharField(widget=forms.Textarea(attrs={'rows':30,'cols':80}))
Use clean_filename to validate if filename is already usedfrom django import forms
from django.core.validators import ValidationError
from django.conf import settings
from django.forms import ModelForm
from ansible.models import Playbook
import utils.playbook as playbook_utils
import os
class AnsibleForm1(ModelForm):
class Meta:
model = Playbook
fields = ['repository', 'username']
class AnsibleForm2(ModelForm):
class Meta:
model = Playbook
fields = ['inventory', 'user']
class LoginForm(forms.Form):
username = forms.CharField(label='Username', max_length=100)
password = forms.CharField(label='Password', max_length=100)
class PlaybookFileForm(forms.Form):
filename = forms.CharField(label='Filename', max_length=100)
playbook = forms.CharField(widget=forms.Textarea(attrs={'rows':30,'cols':80}))
def __init__(self, *args, **kwargs):
self.pk = kwargs.pop('pk', None)
super(PlaybookFileForm, self).__init__(*args, **kwargs)
def clean_filename(self):
data = playbook_utils.append_extension(self.cleaned_data['filename'])
playbook = Playbook.query_set.get(pk=self.pk)
playbook_dir = playbook.directory
playbook_file_path = os.path.join(playbook_dir, data)
if os.path.exists(playbook_file_path):
raise forms.ValidationError("Filename already used")
return data
|
<commit_before>from django import forms
from django.conf import settings
from django.forms import ModelForm
from ansible.models import Playbook
class AnsibleForm1(ModelForm):
class Meta:
model = Playbook
fields = ['repository', 'username']
class AnsibleForm2(ModelForm):
class Meta:
model = Playbook
fields = ['inventory', 'user']
class LoginForm(forms.Form):
username = forms.CharField(label='Username', max_length=100)
password = forms.CharField(label='Password', max_length=100)
class PlaybookFileForm(forms.Form):
filename = forms.CharField(label='Filename', max_length=100)
playbook = forms.CharField(widget=forms.Textarea(attrs={'rows':30,'cols':80}))
<commit_msg>Use clean_filename to validate if filename is already used<commit_after>from django import forms
from django.core.validators import ValidationError
from django.conf import settings
from django.forms import ModelForm
from ansible.models import Playbook
import utils.playbook as playbook_utils
import os
class AnsibleForm1(ModelForm):
class Meta:
model = Playbook
fields = ['repository', 'username']
class AnsibleForm2(ModelForm):
class Meta:
model = Playbook
fields = ['inventory', 'user']
class LoginForm(forms.Form):
username = forms.CharField(label='Username', max_length=100)
password = forms.CharField(label='Password', max_length=100)
class PlaybookFileForm(forms.Form):
filename = forms.CharField(label='Filename', max_length=100)
playbook = forms.CharField(widget=forms.Textarea(attrs={'rows':30,'cols':80}))
def __init__(self, *args, **kwargs):
self.pk = kwargs.pop('pk', None)
super(PlaybookFileForm, self).__init__(*args, **kwargs)
def clean_filename(self):
data = playbook_utils.append_extension(self.cleaned_data['filename'])
playbook = Playbook.query_set.get(pk=self.pk)
playbook_dir = playbook.directory
playbook_file_path = os.path.join(playbook_dir, data)
if os.path.exists(playbook_file_path):
raise forms.ValidationError("Filename already used")
return data
|
9fed4624f457f7643ff2aa83921409cb7e580039
|
moviealert/forms.py
|
moviealert/forms.py
|
from django import forms
from django.conf import settings
from moviealert.base.widgets import CalendarWidget
from .models import TaskList, RegionData
class MovieForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
super(MovieForm, self).__init__(*args, **kwargs)
self.fields['movie_date'] = forms.DateField(
widget=CalendarWidget,
input_formats=settings.ALLOWED_DATE_FORMAT)
self.fields["city"] = forms.CharField(
widget=forms.TextInput(attrs={"id": "txtSearch"}))
self.fields["city"].label = "City Name"
def clean(self):
cleaned_data = super(MovieForm, self).clean()
cleaned_data['city'] = RegionData.objects.get(
bms_city=cleaned_data['city'])
class Meta:
model = TaskList
exclude = ("username", "task_completed", "notified", "movie_found",)
|
from django import forms
from django.conf import settings
from moviealert.base.widgets import CalendarWidget
from .models import TaskList, RegionData
class MovieForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
super(MovieForm, self).__init__(*args, **kwargs)
self.fields['movie_date'] = forms.DateField(
widget=CalendarWidget(attrs={"readonly": "readonly",
"style": "background:white;"}),
input_formats=settings.ALLOWED_DATE_FORMAT)
self.fields["city"] = forms.CharField(
widget=forms.TextInput(attrs={"id": "txtSearch"}))
self.fields["city"].label = "City Name"
def clean(self):
cleaned_data = super(MovieForm, self).clean()
cleaned_data['city'] = RegionData.objects.get(
bms_city=cleaned_data['city'])
class Meta:
model = TaskList
exclude = ("username", "task_completed", "notified", "movie_found",)
|
Disable manual input of date.
|
Disable manual input of date.
Made the input field read-only to prevent input of date manually.
|
Python
|
mit
|
iAmMrinal0/django_moviealert,iAmMrinal0/django_moviealert,iAmMrinal0/django_moviealert
|
from django import forms
from django.conf import settings
from moviealert.base.widgets import CalendarWidget
from .models import TaskList, RegionData
class MovieForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
super(MovieForm, self).__init__(*args, **kwargs)
self.fields['movie_date'] = forms.DateField(
widget=CalendarWidget,
input_formats=settings.ALLOWED_DATE_FORMAT)
self.fields["city"] = forms.CharField(
widget=forms.TextInput(attrs={"id": "txtSearch"}))
self.fields["city"].label = "City Name"
def clean(self):
cleaned_data = super(MovieForm, self).clean()
cleaned_data['city'] = RegionData.objects.get(
bms_city=cleaned_data['city'])
class Meta:
model = TaskList
exclude = ("username", "task_completed", "notified", "movie_found",)
Disable manual input of date.
Made the input field read-only to prevent input of date manually.
|
from django import forms
from django.conf import settings
from moviealert.base.widgets import CalendarWidget
from .models import TaskList, RegionData
class MovieForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
super(MovieForm, self).__init__(*args, **kwargs)
self.fields['movie_date'] = forms.DateField(
widget=CalendarWidget(attrs={"readonly": "readonly",
"style": "background:white;"}),
input_formats=settings.ALLOWED_DATE_FORMAT)
self.fields["city"] = forms.CharField(
widget=forms.TextInput(attrs={"id": "txtSearch"}))
self.fields["city"].label = "City Name"
def clean(self):
cleaned_data = super(MovieForm, self).clean()
cleaned_data['city'] = RegionData.objects.get(
bms_city=cleaned_data['city'])
class Meta:
model = TaskList
exclude = ("username", "task_completed", "notified", "movie_found",)
|
<commit_before>from django import forms
from django.conf import settings
from moviealert.base.widgets import CalendarWidget
from .models import TaskList, RegionData
class MovieForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
super(MovieForm, self).__init__(*args, **kwargs)
self.fields['movie_date'] = forms.DateField(
widget=CalendarWidget,
input_formats=settings.ALLOWED_DATE_FORMAT)
self.fields["city"] = forms.CharField(
widget=forms.TextInput(attrs={"id": "txtSearch"}))
self.fields["city"].label = "City Name"
def clean(self):
cleaned_data = super(MovieForm, self).clean()
cleaned_data['city'] = RegionData.objects.get(
bms_city=cleaned_data['city'])
class Meta:
model = TaskList
exclude = ("username", "task_completed", "notified", "movie_found",)
<commit_msg>Disable manual input of date.
Made the input field read-only to prevent input of date manually.<commit_after>
|
from django import forms
from django.conf import settings
from moviealert.base.widgets import CalendarWidget
from .models import TaskList, RegionData
class MovieForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
super(MovieForm, self).__init__(*args, **kwargs)
self.fields['movie_date'] = forms.DateField(
widget=CalendarWidget(attrs={"readonly": "readonly",
"style": "background:white;"}),
input_formats=settings.ALLOWED_DATE_FORMAT)
self.fields["city"] = forms.CharField(
widget=forms.TextInput(attrs={"id": "txtSearch"}))
self.fields["city"].label = "City Name"
def clean(self):
cleaned_data = super(MovieForm, self).clean()
cleaned_data['city'] = RegionData.objects.get(
bms_city=cleaned_data['city'])
class Meta:
model = TaskList
exclude = ("username", "task_completed", "notified", "movie_found",)
|
from django import forms
from django.conf import settings
from moviealert.base.widgets import CalendarWidget
from .models import TaskList, RegionData
class MovieForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
super(MovieForm, self).__init__(*args, **kwargs)
self.fields['movie_date'] = forms.DateField(
widget=CalendarWidget,
input_formats=settings.ALLOWED_DATE_FORMAT)
self.fields["city"] = forms.CharField(
widget=forms.TextInput(attrs={"id": "txtSearch"}))
self.fields["city"].label = "City Name"
def clean(self):
cleaned_data = super(MovieForm, self).clean()
cleaned_data['city'] = RegionData.objects.get(
bms_city=cleaned_data['city'])
class Meta:
model = TaskList
exclude = ("username", "task_completed", "notified", "movie_found",)
Disable manual input of date.
Made the input field read-only to prevent input of date manually.from django import forms
from django.conf import settings
from moviealert.base.widgets import CalendarWidget
from .models import TaskList, RegionData
class MovieForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
super(MovieForm, self).__init__(*args, **kwargs)
self.fields['movie_date'] = forms.DateField(
widget=CalendarWidget(attrs={"readonly": "readonly",
"style": "background:white;"}),
input_formats=settings.ALLOWED_DATE_FORMAT)
self.fields["city"] = forms.CharField(
widget=forms.TextInput(attrs={"id": "txtSearch"}))
self.fields["city"].label = "City Name"
def clean(self):
cleaned_data = super(MovieForm, self).clean()
cleaned_data['city'] = RegionData.objects.get(
bms_city=cleaned_data['city'])
class Meta:
model = TaskList
exclude = ("username", "task_completed", "notified", "movie_found",)
|
<commit_before>from django import forms
from django.conf import settings
from moviealert.base.widgets import CalendarWidget
from .models import TaskList, RegionData
class MovieForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
super(MovieForm, self).__init__(*args, **kwargs)
self.fields['movie_date'] = forms.DateField(
widget=CalendarWidget,
input_formats=settings.ALLOWED_DATE_FORMAT)
self.fields["city"] = forms.CharField(
widget=forms.TextInput(attrs={"id": "txtSearch"}))
self.fields["city"].label = "City Name"
def clean(self):
cleaned_data = super(MovieForm, self).clean()
cleaned_data['city'] = RegionData.objects.get(
bms_city=cleaned_data['city'])
class Meta:
model = TaskList
exclude = ("username", "task_completed", "notified", "movie_found",)
<commit_msg>Disable manual input of date.
Made the input field read-only to prevent input of date manually.<commit_after>from django import forms
from django.conf import settings
from moviealert.base.widgets import CalendarWidget
from .models import TaskList, RegionData
class MovieForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
super(MovieForm, self).__init__(*args, **kwargs)
self.fields['movie_date'] = forms.DateField(
widget=CalendarWidget(attrs={"readonly": "readonly",
"style": "background:white;"}),
input_formats=settings.ALLOWED_DATE_FORMAT)
self.fields["city"] = forms.CharField(
widget=forms.TextInput(attrs={"id": "txtSearch"}))
self.fields["city"].label = "City Name"
def clean(self):
cleaned_data = super(MovieForm, self).clean()
cleaned_data['city'] = RegionData.objects.get(
bms_city=cleaned_data['city'])
class Meta:
model = TaskList
exclude = ("username", "task_completed", "notified", "movie_found",)
|
ddb3665a1450e8a1eeee57bbe4b5c0eb7f3f05b1
|
molly/utils/management/commands/generate_cache_manifest.py
|
molly/utils/management/commands/generate_cache_manifest.py
|
import os
import os.path
from django.core.management.base import NoArgsCommand
from django.conf import settings
class Command(NoArgsCommand):
can_import_settings = True
def handle_noargs(self, **options):
cache_manifest_path = os.path.join(settings.STATIC_ROOT,
'cache.manifest')
static_prefix_length = len(settings.STATIC_ROOT.split(os.sep))
with open(cache_manifest_path, 'w') as cache_manifest:
print >>cache_manifest, "CACHE MANIFEST"
print >>cache_manifest, "CACHE:"
for root, dirs, files in os.walk(settings.STATIC_ROOT):
if root == settings.STATIC_ROOT:
# Don't cache admin media, desktop or markers
dirs.remove('admin')
dirs.remove('desktop')
dirs.remove('markers')
url = '/'.join(root.split(os.sep)[static_prefix_length:])
for file in files:
# Don't cache uncompressed JS/CSS
_, ext = os.path.splitext(file)
if ext in ('.js','.css') and 'c' != url.split('/')[0]:
continue
print >>cache_manifest, "%s%s/%s" % (settings.STATIC_URL, url, file)
|
import os
import os.path
from django.core.management.base import NoArgsCommand
from django.conf import settings
class Command(NoArgsCommand):
can_import_settings = True
def handle_noargs(self, **options):
cache_manifest_path = os.path.join(settings.STATIC_ROOT,
'cache.manifest')
static_prefix_length = len(settings.STATIC_ROOT.split(os.sep))
with open(cache_manifest_path, 'w') as cache_manifest:
print >>cache_manifest, "CACHE MANIFEST"
print >>cache_manifest, "CACHE:"
for root, dirs, files in os.walk(settings.STATIC_ROOT):
if root == settings.STATIC_ROOT:
# Don't cache admin media, desktop or markers
if 'admin' in dirs: dirs.remove('admin')
if 'desktop' in dirs: dirs.remove('desktop')
if 'markers' in dirs: dirs.remove('markers')
if root == os.path.join(setting.STATIC_ROOT, 'touchmaplite', 'images'):
# Don't cache touchmaplite markers, we don't use them
if 'markers' in dirs: dirs.remove('markers')
if 'iui' in dirs: dirs.remove('iui')
url = '/'.join(root.split(os.sep)[static_prefix_length:])
for file in files:
# Don't cache uncompressed JS/CSS
_, ext = os.path.splitext(file)
if ext in ('.js','.css') and 'c' != url.split('/')[0]:
continue
# Don't cache ourselves!
print >>cache_manifest, "%s%s/%s" % (settings.STATIC_URL, url, file)
|
Fix cache.manifest generation when desktop app isn't loaded, also don't include unnecessary touchmaplite files (MOLLY-113)
|
Fix cache.manifest generation when desktop app isn't loaded, also don't include unnecessary touchmaplite files (MOLLY-113)
|
Python
|
apache-2.0
|
mollyproject/mollyproject,mollyproject/mollyproject,mollyproject/mollyproject
|
import os
import os.path
from django.core.management.base import NoArgsCommand
from django.conf import settings
class Command(NoArgsCommand):
can_import_settings = True
def handle_noargs(self, **options):
cache_manifest_path = os.path.join(settings.STATIC_ROOT,
'cache.manifest')
static_prefix_length = len(settings.STATIC_ROOT.split(os.sep))
with open(cache_manifest_path, 'w') as cache_manifest:
print >>cache_manifest, "CACHE MANIFEST"
print >>cache_manifest, "CACHE:"
for root, dirs, files in os.walk(settings.STATIC_ROOT):
if root == settings.STATIC_ROOT:
# Don't cache admin media, desktop or markers
dirs.remove('admin')
dirs.remove('desktop')
dirs.remove('markers')
url = '/'.join(root.split(os.sep)[static_prefix_length:])
for file in files:
# Don't cache uncompressed JS/CSS
_, ext = os.path.splitext(file)
if ext in ('.js','.css') and 'c' != url.split('/')[0]:
continue
print >>cache_manifest, "%s%s/%s" % (settings.STATIC_URL, url, file)Fix cache.manifest generation when desktop app isn't loaded, also don't include unnecessary touchmaplite files (MOLLY-113)
|
import os
import os.path
from django.core.management.base import NoArgsCommand
from django.conf import settings
class Command(NoArgsCommand):
can_import_settings = True
def handle_noargs(self, **options):
cache_manifest_path = os.path.join(settings.STATIC_ROOT,
'cache.manifest')
static_prefix_length = len(settings.STATIC_ROOT.split(os.sep))
with open(cache_manifest_path, 'w') as cache_manifest:
print >>cache_manifest, "CACHE MANIFEST"
print >>cache_manifest, "CACHE:"
for root, dirs, files in os.walk(settings.STATIC_ROOT):
if root == settings.STATIC_ROOT:
# Don't cache admin media, desktop or markers
if 'admin' in dirs: dirs.remove('admin')
if 'desktop' in dirs: dirs.remove('desktop')
if 'markers' in dirs: dirs.remove('markers')
if root == os.path.join(setting.STATIC_ROOT, 'touchmaplite', 'images'):
# Don't cache touchmaplite markers, we don't use them
if 'markers' in dirs: dirs.remove('markers')
if 'iui' in dirs: dirs.remove('iui')
url = '/'.join(root.split(os.sep)[static_prefix_length:])
for file in files:
# Don't cache uncompressed JS/CSS
_, ext = os.path.splitext(file)
if ext in ('.js','.css') and 'c' != url.split('/')[0]:
continue
# Don't cache ourselves!
print >>cache_manifest, "%s%s/%s" % (settings.STATIC_URL, url, file)
|
<commit_before>import os
import os.path
from django.core.management.base import NoArgsCommand
from django.conf import settings
class Command(NoArgsCommand):
can_import_settings = True
def handle_noargs(self, **options):
cache_manifest_path = os.path.join(settings.STATIC_ROOT,
'cache.manifest')
static_prefix_length = len(settings.STATIC_ROOT.split(os.sep))
with open(cache_manifest_path, 'w') as cache_manifest:
print >>cache_manifest, "CACHE MANIFEST"
print >>cache_manifest, "CACHE:"
for root, dirs, files in os.walk(settings.STATIC_ROOT):
if root == settings.STATIC_ROOT:
# Don't cache admin media, desktop or markers
dirs.remove('admin')
dirs.remove('desktop')
dirs.remove('markers')
url = '/'.join(root.split(os.sep)[static_prefix_length:])
for file in files:
# Don't cache uncompressed JS/CSS
_, ext = os.path.splitext(file)
if ext in ('.js','.css') and 'c' != url.split('/')[0]:
continue
print >>cache_manifest, "%s%s/%s" % (settings.STATIC_URL, url, file)<commit_msg>Fix cache.manifest generation when desktop app isn't loaded, also don't include unnecessary touchmaplite files (MOLLY-113)<commit_after>
|
import os
import os.path
from django.core.management.base import NoArgsCommand
from django.conf import settings
class Command(NoArgsCommand):
can_import_settings = True
def handle_noargs(self, **options):
cache_manifest_path = os.path.join(settings.STATIC_ROOT,
'cache.manifest')
static_prefix_length = len(settings.STATIC_ROOT.split(os.sep))
with open(cache_manifest_path, 'w') as cache_manifest:
print >>cache_manifest, "CACHE MANIFEST"
print >>cache_manifest, "CACHE:"
for root, dirs, files in os.walk(settings.STATIC_ROOT):
if root == settings.STATIC_ROOT:
# Don't cache admin media, desktop or markers
if 'admin' in dirs: dirs.remove('admin')
if 'desktop' in dirs: dirs.remove('desktop')
if 'markers' in dirs: dirs.remove('markers')
if root == os.path.join(setting.STATIC_ROOT, 'touchmaplite', 'images'):
# Don't cache touchmaplite markers, we don't use them
if 'markers' in dirs: dirs.remove('markers')
if 'iui' in dirs: dirs.remove('iui')
url = '/'.join(root.split(os.sep)[static_prefix_length:])
for file in files:
# Don't cache uncompressed JS/CSS
_, ext = os.path.splitext(file)
if ext in ('.js','.css') and 'c' != url.split('/')[0]:
continue
# Don't cache ourselves!
print >>cache_manifest, "%s%s/%s" % (settings.STATIC_URL, url, file)
|
import os
import os.path
from django.core.management.base import NoArgsCommand
from django.conf import settings
class Command(NoArgsCommand):
can_import_settings = True
def handle_noargs(self, **options):
cache_manifest_path = os.path.join(settings.STATIC_ROOT,
'cache.manifest')
static_prefix_length = len(settings.STATIC_ROOT.split(os.sep))
with open(cache_manifest_path, 'w') as cache_manifest:
print >>cache_manifest, "CACHE MANIFEST"
print >>cache_manifest, "CACHE:"
for root, dirs, files in os.walk(settings.STATIC_ROOT):
if root == settings.STATIC_ROOT:
# Don't cache admin media, desktop or markers
dirs.remove('admin')
dirs.remove('desktop')
dirs.remove('markers')
url = '/'.join(root.split(os.sep)[static_prefix_length:])
for file in files:
# Don't cache uncompressed JS/CSS
_, ext = os.path.splitext(file)
if ext in ('.js','.css') and 'c' != url.split('/')[0]:
continue
print >>cache_manifest, "%s%s/%s" % (settings.STATIC_URL, url, file)Fix cache.manifest generation when desktop app isn't loaded, also don't include unnecessary touchmaplite files (MOLLY-113)import os
import os.path
from django.core.management.base import NoArgsCommand
from django.conf import settings
class Command(NoArgsCommand):
can_import_settings = True
def handle_noargs(self, **options):
cache_manifest_path = os.path.join(settings.STATIC_ROOT,
'cache.manifest')
static_prefix_length = len(settings.STATIC_ROOT.split(os.sep))
with open(cache_manifest_path, 'w') as cache_manifest:
print >>cache_manifest, "CACHE MANIFEST"
print >>cache_manifest, "CACHE:"
for root, dirs, files in os.walk(settings.STATIC_ROOT):
if root == settings.STATIC_ROOT:
# Don't cache admin media, desktop or markers
if 'admin' in dirs: dirs.remove('admin')
if 'desktop' in dirs: dirs.remove('desktop')
if 'markers' in dirs: dirs.remove('markers')
if root == os.path.join(setting.STATIC_ROOT, 'touchmaplite', 'images'):
# Don't cache touchmaplite markers, we don't use them
if 'markers' in dirs: dirs.remove('markers')
if 'iui' in dirs: dirs.remove('iui')
url = '/'.join(root.split(os.sep)[static_prefix_length:])
for file in files:
# Don't cache uncompressed JS/CSS
_, ext = os.path.splitext(file)
if ext in ('.js','.css') and 'c' != url.split('/')[0]:
continue
# Don't cache ourselves!
print >>cache_manifest, "%s%s/%s" % (settings.STATIC_URL, url, file)
|
<commit_before>import os
import os.path
from django.core.management.base import NoArgsCommand
from django.conf import settings
class Command(NoArgsCommand):
can_import_settings = True
def handle_noargs(self, **options):
cache_manifest_path = os.path.join(settings.STATIC_ROOT,
'cache.manifest')
static_prefix_length = len(settings.STATIC_ROOT.split(os.sep))
with open(cache_manifest_path, 'w') as cache_manifest:
print >>cache_manifest, "CACHE MANIFEST"
print >>cache_manifest, "CACHE:"
for root, dirs, files in os.walk(settings.STATIC_ROOT):
if root == settings.STATIC_ROOT:
# Don't cache admin media, desktop or markers
dirs.remove('admin')
dirs.remove('desktop')
dirs.remove('markers')
url = '/'.join(root.split(os.sep)[static_prefix_length:])
for file in files:
# Don't cache uncompressed JS/CSS
_, ext = os.path.splitext(file)
if ext in ('.js','.css') and 'c' != url.split('/')[0]:
continue
print >>cache_manifest, "%s%s/%s" % (settings.STATIC_URL, url, file)<commit_msg>Fix cache.manifest generation when desktop app isn't loaded, also don't include unnecessary touchmaplite files (MOLLY-113)<commit_after>import os
import os.path
from django.core.management.base import NoArgsCommand
from django.conf import settings
class Command(NoArgsCommand):
can_import_settings = True
def handle_noargs(self, **options):
cache_manifest_path = os.path.join(settings.STATIC_ROOT,
'cache.manifest')
static_prefix_length = len(settings.STATIC_ROOT.split(os.sep))
with open(cache_manifest_path, 'w') as cache_manifest:
print >>cache_manifest, "CACHE MANIFEST"
print >>cache_manifest, "CACHE:"
for root, dirs, files in os.walk(settings.STATIC_ROOT):
if root == settings.STATIC_ROOT:
# Don't cache admin media, desktop or markers
if 'admin' in dirs: dirs.remove('admin')
if 'desktop' in dirs: dirs.remove('desktop')
if 'markers' in dirs: dirs.remove('markers')
if root == os.path.join(setting.STATIC_ROOT, 'touchmaplite', 'images'):
# Don't cache touchmaplite markers, we don't use them
if 'markers' in dirs: dirs.remove('markers')
if 'iui' in dirs: dirs.remove('iui')
url = '/'.join(root.split(os.sep)[static_prefix_length:])
for file in files:
# Don't cache uncompressed JS/CSS
_, ext = os.path.splitext(file)
if ext in ('.js','.css') and 'c' != url.split('/')[0]:
continue
# Don't cache ourselves!
print >>cache_manifest, "%s%s/%s" % (settings.STATIC_URL, url, file)
|
e50682cfd285c5de42118245ba8a30f559ef1f20
|
rst2pdf/utils.py
|
rst2pdf/utils.py
|
#$HeadURL$
#$LastChangedDate$
#$LastChangedRevision$
import sys
from reportlab.platypus import PageBreak, Spacer
from flowables import *
import shlex
from log import log
def parseRaw (data):
'''Parse and process a simple DSL to handle creation of flowables.
Supported (can add others on request):
* PageBreak
* Spacer width, height
'''
elements=[]
lines=data.splitlines()
for line in lines:
lexer=shlex.shlex(line)
lexer.whitespace+=','
tokens=list(lexer)
command=tokens[0]
if command == 'PageBreak':
if len(tokens)==1:
elements.append(MyPageBreak())
else:
elements.append(MyPageBreak(tokens[1]))
if command == 'Spacer':
elements.append(Spacer(int(tokens[1]),int(tokens[2])))
if command == 'Transition':
elements.append(Transition(*tokens[1:]))
return elements
# Looks like this is not used anywhere now
#def depth (node):
# if node.parent==None:
# return 0
# else:
# return 1+depth(node.parent)
|
# -*- coding: utf-8 -*-
#$HeadURL$
#$LastChangedDate$
#$LastChangedRevision$
import sys
from reportlab.platypus import PageBreak, Spacer
from flowables import *
import shlex
from log import log
def parseRaw (data):
'''Parse and process a simple DSL to handle creation of flowables.
Supported (can add others on request):
* PageBreak
* Spacer width, height
'''
elements=[]
lines=data.splitlines()
for line in lines:
lexer=shlex.shlex(line)
lexer.whitespace+=','
tokens=list(lexer)
command=tokens[0]
if command == 'PageBreak':
if len(tokens)==1:
elements.append(MyPageBreak())
else:
elements.append(MyPageBreak(tokens[1]))
if command == 'Spacer':
elements.append(Spacer(int(tokens[1]),int(tokens[2])))
if command == 'Transition':
elements.append(Transition(*tokens[1:]))
return elements
# Looks like this is not used anywhere now
#def depth (node):
# if node.parent==None:
# return 0
# else:
# return 1+depth(node.parent)
|
Fix encoding (thanks to Yasushi Masuda)
|
Fix encoding (thanks to Yasushi Masuda)
|
Python
|
mit
|
thomaspurchas/rst2pdf,thomaspurchas/rst2pdf
|
#$HeadURL$
#$LastChangedDate$
#$LastChangedRevision$
import sys
from reportlab.platypus import PageBreak, Spacer
from flowables import *
import shlex
from log import log
def parseRaw (data):
'''Parse and process a simple DSL to handle creation of flowables.
Supported (can add others on request):
* PageBreak
* Spacer width, height
'''
elements=[]
lines=data.splitlines()
for line in lines:
lexer=shlex.shlex(line)
lexer.whitespace+=','
tokens=list(lexer)
command=tokens[0]
if command == 'PageBreak':
if len(tokens)==1:
elements.append(MyPageBreak())
else:
elements.append(MyPageBreak(tokens[1]))
if command == 'Spacer':
elements.append(Spacer(int(tokens[1]),int(tokens[2])))
if command == 'Transition':
elements.append(Transition(*tokens[1:]))
return elements
# Looks like this is not used anywhere now
#def depth (node):
# if node.parent==None:
# return 0
# else:
# return 1+depth(node.parent)
Fix encoding (thanks to Yasushi Masuda)
|
# -*- coding: utf-8 -*-
#$HeadURL$
#$LastChangedDate$
#$LastChangedRevision$
import sys
from reportlab.platypus import PageBreak, Spacer
from flowables import *
import shlex
from log import log
def parseRaw (data):
'''Parse and process a simple DSL to handle creation of flowables.
Supported (can add others on request):
* PageBreak
* Spacer width, height
'''
elements=[]
lines=data.splitlines()
for line in lines:
lexer=shlex.shlex(line)
lexer.whitespace+=','
tokens=list(lexer)
command=tokens[0]
if command == 'PageBreak':
if len(tokens)==1:
elements.append(MyPageBreak())
else:
elements.append(MyPageBreak(tokens[1]))
if command == 'Spacer':
elements.append(Spacer(int(tokens[1]),int(tokens[2])))
if command == 'Transition':
elements.append(Transition(*tokens[1:]))
return elements
# Looks like this is not used anywhere now
#def depth (node):
# if node.parent==None:
# return 0
# else:
# return 1+depth(node.parent)
|
<commit_before>#$HeadURL$
#$LastChangedDate$
#$LastChangedRevision$
import sys
from reportlab.platypus import PageBreak, Spacer
from flowables import *
import shlex
from log import log
def parseRaw (data):
'''Parse and process a simple DSL to handle creation of flowables.
Supported (can add others on request):
* PageBreak
* Spacer width, height
'''
elements=[]
lines=data.splitlines()
for line in lines:
lexer=shlex.shlex(line)
lexer.whitespace+=','
tokens=list(lexer)
command=tokens[0]
if command == 'PageBreak':
if len(tokens)==1:
elements.append(MyPageBreak())
else:
elements.append(MyPageBreak(tokens[1]))
if command == 'Spacer':
elements.append(Spacer(int(tokens[1]),int(tokens[2])))
if command == 'Transition':
elements.append(Transition(*tokens[1:]))
return elements
# Looks like this is not used anywhere now
#def depth (node):
# if node.parent==None:
# return 0
# else:
# return 1+depth(node.parent)
<commit_msg>Fix encoding (thanks to Yasushi Masuda)<commit_after>
|
# -*- coding: utf-8 -*-
#$HeadURL$
#$LastChangedDate$
#$LastChangedRevision$
import sys
from reportlab.platypus import PageBreak, Spacer
from flowables import *
import shlex
from log import log
def parseRaw (data):
'''Parse and process a simple DSL to handle creation of flowables.
Supported (can add others on request):
* PageBreak
* Spacer width, height
'''
elements=[]
lines=data.splitlines()
for line in lines:
lexer=shlex.shlex(line)
lexer.whitespace+=','
tokens=list(lexer)
command=tokens[0]
if command == 'PageBreak':
if len(tokens)==1:
elements.append(MyPageBreak())
else:
elements.append(MyPageBreak(tokens[1]))
if command == 'Spacer':
elements.append(Spacer(int(tokens[1]),int(tokens[2])))
if command == 'Transition':
elements.append(Transition(*tokens[1:]))
return elements
# Looks like this is not used anywhere now
#def depth (node):
# if node.parent==None:
# return 0
# else:
# return 1+depth(node.parent)
|
#$HeadURL$
#$LastChangedDate$
#$LastChangedRevision$
import sys
from reportlab.platypus import PageBreak, Spacer
from flowables import *
import shlex
from log import log
def parseRaw (data):
'''Parse and process a simple DSL to handle creation of flowables.
Supported (can add others on request):
* PageBreak
* Spacer width, height
'''
elements=[]
lines=data.splitlines()
for line in lines:
lexer=shlex.shlex(line)
lexer.whitespace+=','
tokens=list(lexer)
command=tokens[0]
if command == 'PageBreak':
if len(tokens)==1:
elements.append(MyPageBreak())
else:
elements.append(MyPageBreak(tokens[1]))
if command == 'Spacer':
elements.append(Spacer(int(tokens[1]),int(tokens[2])))
if command == 'Transition':
elements.append(Transition(*tokens[1:]))
return elements
# Looks like this is not used anywhere now
#def depth (node):
# if node.parent==None:
# return 0
# else:
# return 1+depth(node.parent)
Fix encoding (thanks to Yasushi Masuda)# -*- coding: utf-8 -*-
#$HeadURL$
#$LastChangedDate$
#$LastChangedRevision$
import sys
from reportlab.platypus import PageBreak, Spacer
from flowables import *
import shlex
from log import log
def parseRaw (data):
'''Parse and process a simple DSL to handle creation of flowables.
Supported (can add others on request):
* PageBreak
* Spacer width, height
'''
elements=[]
lines=data.splitlines()
for line in lines:
lexer=shlex.shlex(line)
lexer.whitespace+=','
tokens=list(lexer)
command=tokens[0]
if command == 'PageBreak':
if len(tokens)==1:
elements.append(MyPageBreak())
else:
elements.append(MyPageBreak(tokens[1]))
if command == 'Spacer':
elements.append(Spacer(int(tokens[1]),int(tokens[2])))
if command == 'Transition':
elements.append(Transition(*tokens[1:]))
return elements
# Looks like this is not used anywhere now
#def depth (node):
# if node.parent==None:
# return 0
# else:
# return 1+depth(node.parent)
|
<commit_before>#$HeadURL$
#$LastChangedDate$
#$LastChangedRevision$
import sys
from reportlab.platypus import PageBreak, Spacer
from flowables import *
import shlex
from log import log
def parseRaw (data):
'''Parse and process a simple DSL to handle creation of flowables.
Supported (can add others on request):
* PageBreak
* Spacer width, height
'''
elements=[]
lines=data.splitlines()
for line in lines:
lexer=shlex.shlex(line)
lexer.whitespace+=','
tokens=list(lexer)
command=tokens[0]
if command == 'PageBreak':
if len(tokens)==1:
elements.append(MyPageBreak())
else:
elements.append(MyPageBreak(tokens[1]))
if command == 'Spacer':
elements.append(Spacer(int(tokens[1]),int(tokens[2])))
if command == 'Transition':
elements.append(Transition(*tokens[1:]))
return elements
# Looks like this is not used anywhere now
#def depth (node):
# if node.parent==None:
# return 0
# else:
# return 1+depth(node.parent)
<commit_msg>Fix encoding (thanks to Yasushi Masuda)<commit_after># -*- coding: utf-8 -*-
#$HeadURL$
#$LastChangedDate$
#$LastChangedRevision$
import sys
from reportlab.platypus import PageBreak, Spacer
from flowables import *
import shlex
from log import log
def parseRaw (data):
'''Parse and process a simple DSL to handle creation of flowables.
Supported (can add others on request):
* PageBreak
* Spacer width, height
'''
elements=[]
lines=data.splitlines()
for line in lines:
lexer=shlex.shlex(line)
lexer.whitespace+=','
tokens=list(lexer)
command=tokens[0]
if command == 'PageBreak':
if len(tokens)==1:
elements.append(MyPageBreak())
else:
elements.append(MyPageBreak(tokens[1]))
if command == 'Spacer':
elements.append(Spacer(int(tokens[1]),int(tokens[2])))
if command == 'Transition':
elements.append(Transition(*tokens[1:]))
return elements
# Looks like this is not used anywhere now
#def depth (node):
# if node.parent==None:
# return 0
# else:
# return 1+depth(node.parent)
|
5111a3ec4822598d5c2bf009e26bb0eec49b7743
|
sample_config.py
|
sample_config.py
|
# Database connection config
# The URI most scrapers will use to access the DB
DB_URI = 'sqlite:///db/data.sqlite'
# The read-only URI the server will use to access the DB
DB_URI_READ_ONLY = 'file:db/data.sqlite?mode=ro'
# Server performance config
# Max time an SQL query can take before it's killed
QUERY_TIMEOUT_SECS = 2
# How long query results are cached before they're stale
QUERY_CACHE_EXPIRY_SECS = 30
# External services config
# The server's web-visible hostname, NOT including a trailing slash or port
# If you're running this locally, use 'http://localhost'
SERVER_HOST = 'http://localhost'
|
# Database connection config
# The URI most scrapers will use to access the DB
DB_URI = 'sqlite:///db/data.sqlite'
# The read-only URI the server will use to access the DB
DB_URI_READ_ONLY = 'file:db/data.sqlite?mode=ro'
# Server performance config
# Max time an SQL query can take before it's killed
QUERY_TIMEOUT_SECS = 2
# How long query results are cached before they're stale
QUERY_CACHE_EXPIRY_SECS = 30
# External services config
# The server's web-visible hostname, NOT including a trailing slash or port
# If you're running this locally, use 'http://localhost'
SERVER_HOST = 'http://localhost'
# The port on which Narcissa will answer web SQL queries
SERVER_PORT = 20410
|
Add server port to sample config
|
Add server port to sample config
|
Python
|
mit
|
mplewis/narcissa
|
# Database connection config
# The URI most scrapers will use to access the DB
DB_URI = 'sqlite:///db/data.sqlite'
# The read-only URI the server will use to access the DB
DB_URI_READ_ONLY = 'file:db/data.sqlite?mode=ro'
# Server performance config
# Max time an SQL query can take before it's killed
QUERY_TIMEOUT_SECS = 2
# How long query results are cached before they're stale
QUERY_CACHE_EXPIRY_SECS = 30
# External services config
# The server's web-visible hostname, NOT including a trailing slash or port
# If you're running this locally, use 'http://localhost'
SERVER_HOST = 'http://localhost'
Add server port to sample config
|
# Database connection config
# The URI most scrapers will use to access the DB
DB_URI = 'sqlite:///db/data.sqlite'
# The read-only URI the server will use to access the DB
DB_URI_READ_ONLY = 'file:db/data.sqlite?mode=ro'
# Server performance config
# Max time an SQL query can take before it's killed
QUERY_TIMEOUT_SECS = 2
# How long query results are cached before they're stale
QUERY_CACHE_EXPIRY_SECS = 30
# External services config
# The server's web-visible hostname, NOT including a trailing slash or port
# If you're running this locally, use 'http://localhost'
SERVER_HOST = 'http://localhost'
# The port on which Narcissa will answer web SQL queries
SERVER_PORT = 20410
|
<commit_before># Database connection config
# The URI most scrapers will use to access the DB
DB_URI = 'sqlite:///db/data.sqlite'
# The read-only URI the server will use to access the DB
DB_URI_READ_ONLY = 'file:db/data.sqlite?mode=ro'
# Server performance config
# Max time an SQL query can take before it's killed
QUERY_TIMEOUT_SECS = 2
# How long query results are cached before they're stale
QUERY_CACHE_EXPIRY_SECS = 30
# External services config
# The server's web-visible hostname, NOT including a trailing slash or port
# If you're running this locally, use 'http://localhost'
SERVER_HOST = 'http://localhost'
<commit_msg>Add server port to sample config<commit_after>
|
# Database connection config
# The URI most scrapers will use to access the DB
DB_URI = 'sqlite:///db/data.sqlite'
# The read-only URI the server will use to access the DB
DB_URI_READ_ONLY = 'file:db/data.sqlite?mode=ro'
# Server performance config
# Max time an SQL query can take before it's killed
QUERY_TIMEOUT_SECS = 2
# How long query results are cached before they're stale
QUERY_CACHE_EXPIRY_SECS = 30
# External services config
# The server's web-visible hostname, NOT including a trailing slash or port
# If you're running this locally, use 'http://localhost'
SERVER_HOST = 'http://localhost'
# The port on which Narcissa will answer web SQL queries
SERVER_PORT = 20410
|
# Database connection config
# The URI most scrapers will use to access the DB
DB_URI = 'sqlite:///db/data.sqlite'
# The read-only URI the server will use to access the DB
DB_URI_READ_ONLY = 'file:db/data.sqlite?mode=ro'
# Server performance config
# Max time an SQL query can take before it's killed
QUERY_TIMEOUT_SECS = 2
# How long query results are cached before they're stale
QUERY_CACHE_EXPIRY_SECS = 30
# External services config
# The server's web-visible hostname, NOT including a trailing slash or port
# If you're running this locally, use 'http://localhost'
SERVER_HOST = 'http://localhost'
Add server port to sample config# Database connection config
# The URI most scrapers will use to access the DB
DB_URI = 'sqlite:///db/data.sqlite'
# The read-only URI the server will use to access the DB
DB_URI_READ_ONLY = 'file:db/data.sqlite?mode=ro'
# Server performance config
# Max time an SQL query can take before it's killed
QUERY_TIMEOUT_SECS = 2
# How long query results are cached before they're stale
QUERY_CACHE_EXPIRY_SECS = 30
# External services config
# The server's web-visible hostname, NOT including a trailing slash or port
# If you're running this locally, use 'http://localhost'
SERVER_HOST = 'http://localhost'
# The port on which Narcissa will answer web SQL queries
SERVER_PORT = 20410
|
<commit_before># Database connection config
# The URI most scrapers will use to access the DB
DB_URI = 'sqlite:///db/data.sqlite'
# The read-only URI the server will use to access the DB
DB_URI_READ_ONLY = 'file:db/data.sqlite?mode=ro'
# Server performance config
# Max time an SQL query can take before it's killed
QUERY_TIMEOUT_SECS = 2
# How long query results are cached before they're stale
QUERY_CACHE_EXPIRY_SECS = 30
# External services config
# The server's web-visible hostname, NOT including a trailing slash or port
# If you're running this locally, use 'http://localhost'
SERVER_HOST = 'http://localhost'
<commit_msg>Add server port to sample config<commit_after># Database connection config
# The URI most scrapers will use to access the DB
DB_URI = 'sqlite:///db/data.sqlite'
# The read-only URI the server will use to access the DB
DB_URI_READ_ONLY = 'file:db/data.sqlite?mode=ro'
# Server performance config
# Max time an SQL query can take before it's killed
QUERY_TIMEOUT_SECS = 2
# How long query results are cached before they're stale
QUERY_CACHE_EXPIRY_SECS = 30
# External services config
# The server's web-visible hostname, NOT including a trailing slash or port
# If you're running this locally, use 'http://localhost'
SERVER_HOST = 'http://localhost'
# The port on which Narcissa will answer web SQL queries
SERVER_PORT = 20410
|
ba3c7e6e2c7fff7ed0c2b51a129b9d7c85eefc6f
|
helios/__init__.py
|
helios/__init__.py
|
from django.conf import settings
from django.core.urlresolvers import reverse
from helios.views import election_shortcut
TEMPLATE_BASE = settings.HELIOS_TEMPLATE_BASE or "helios/templates/base.html"
# a setting to ensure that only admins can create an election
ADMIN_ONLY = settings.HELIOS_ADMIN_ONLY
# allow upload of voters via CSV?
VOTERS_UPLOAD = settings.HELIOS_VOTERS_UPLOAD
# allow emailing of voters?
VOTERS_EMAIL = settings.HELIOS_VOTERS_EMAIL
|
from django.conf import settings
from django.core.urlresolvers import reverse
TEMPLATE_BASE = settings.HELIOS_TEMPLATE_BASE or "helios/templates/base.html"
# a setting to ensure that only admins can create an election
ADMIN_ONLY = settings.HELIOS_ADMIN_ONLY
# allow upload of voters via CSV?
VOTERS_UPLOAD = settings.HELIOS_VOTERS_UPLOAD
# allow emailing of voters?
VOTERS_EMAIL = settings.HELIOS_VOTERS_EMAIL
|
Remove unused import causing deprecation warning
|
Remove unused import causing deprecation warning
Warning in the form:
RemovedInDjango19Warning: "ModelXYZ" doesn't declare an explicit app_label
Apparently this happens because it tries to import models before app
configuration runs
|
Python
|
apache-2.0
|
shirlei/helios-server,benadida/helios-server,shirlei/helios-server,benadida/helios-server,shirlei/helios-server,benadida/helios-server,benadida/helios-server,benadida/helios-server,shirlei/helios-server,shirlei/helios-server
|
from django.conf import settings
from django.core.urlresolvers import reverse
from helios.views import election_shortcut
TEMPLATE_BASE = settings.HELIOS_TEMPLATE_BASE or "helios/templates/base.html"
# a setting to ensure that only admins can create an election
ADMIN_ONLY = settings.HELIOS_ADMIN_ONLY
# allow upload of voters via CSV?
VOTERS_UPLOAD = settings.HELIOS_VOTERS_UPLOAD
# allow emailing of voters?
VOTERS_EMAIL = settings.HELIOS_VOTERS_EMAIL
Remove unused import causing deprecation warning
Warning in the form:
RemovedInDjango19Warning: "ModelXYZ" doesn't declare an explicit app_label
Apparently this happens because it tries to import models before app
configuration runs
|
from django.conf import settings
from django.core.urlresolvers import reverse
TEMPLATE_BASE = settings.HELIOS_TEMPLATE_BASE or "helios/templates/base.html"
# a setting to ensure that only admins can create an election
ADMIN_ONLY = settings.HELIOS_ADMIN_ONLY
# allow upload of voters via CSV?
VOTERS_UPLOAD = settings.HELIOS_VOTERS_UPLOAD
# allow emailing of voters?
VOTERS_EMAIL = settings.HELIOS_VOTERS_EMAIL
|
<commit_before>
from django.conf import settings
from django.core.urlresolvers import reverse
from helios.views import election_shortcut
TEMPLATE_BASE = settings.HELIOS_TEMPLATE_BASE or "helios/templates/base.html"
# a setting to ensure that only admins can create an election
ADMIN_ONLY = settings.HELIOS_ADMIN_ONLY
# allow upload of voters via CSV?
VOTERS_UPLOAD = settings.HELIOS_VOTERS_UPLOAD
# allow emailing of voters?
VOTERS_EMAIL = settings.HELIOS_VOTERS_EMAIL
<commit_msg>Remove unused import causing deprecation warning
Warning in the form:
RemovedInDjango19Warning: "ModelXYZ" doesn't declare an explicit app_label
Apparently this happens because it tries to import models before app
configuration runs<commit_after>
|
from django.conf import settings
from django.core.urlresolvers import reverse
TEMPLATE_BASE = settings.HELIOS_TEMPLATE_BASE or "helios/templates/base.html"
# a setting to ensure that only admins can create an election
ADMIN_ONLY = settings.HELIOS_ADMIN_ONLY
# allow upload of voters via CSV?
VOTERS_UPLOAD = settings.HELIOS_VOTERS_UPLOAD
# allow emailing of voters?
VOTERS_EMAIL = settings.HELIOS_VOTERS_EMAIL
|
from django.conf import settings
from django.core.urlresolvers import reverse
from helios.views import election_shortcut
TEMPLATE_BASE = settings.HELIOS_TEMPLATE_BASE or "helios/templates/base.html"
# a setting to ensure that only admins can create an election
ADMIN_ONLY = settings.HELIOS_ADMIN_ONLY
# allow upload of voters via CSV?
VOTERS_UPLOAD = settings.HELIOS_VOTERS_UPLOAD
# allow emailing of voters?
VOTERS_EMAIL = settings.HELIOS_VOTERS_EMAIL
Remove unused import causing deprecation warning
Warning in the form:
RemovedInDjango19Warning: "ModelXYZ" doesn't declare an explicit app_label
Apparently this happens because it tries to import models before app
configuration runsfrom django.conf import settings
from django.core.urlresolvers import reverse
TEMPLATE_BASE = settings.HELIOS_TEMPLATE_BASE or "helios/templates/base.html"
# a setting to ensure that only admins can create an election
ADMIN_ONLY = settings.HELIOS_ADMIN_ONLY
# allow upload of voters via CSV?
VOTERS_UPLOAD = settings.HELIOS_VOTERS_UPLOAD
# allow emailing of voters?
VOTERS_EMAIL = settings.HELIOS_VOTERS_EMAIL
|
<commit_before>
from django.conf import settings
from django.core.urlresolvers import reverse
from helios.views import election_shortcut
TEMPLATE_BASE = settings.HELIOS_TEMPLATE_BASE or "helios/templates/base.html"
# a setting to ensure that only admins can create an election
ADMIN_ONLY = settings.HELIOS_ADMIN_ONLY
# allow upload of voters via CSV?
VOTERS_UPLOAD = settings.HELIOS_VOTERS_UPLOAD
# allow emailing of voters?
VOTERS_EMAIL = settings.HELIOS_VOTERS_EMAIL
<commit_msg>Remove unused import causing deprecation warning
Warning in the form:
RemovedInDjango19Warning: "ModelXYZ" doesn't declare an explicit app_label
Apparently this happens because it tries to import models before app
configuration runs<commit_after>from django.conf import settings
from django.core.urlresolvers import reverse
TEMPLATE_BASE = settings.HELIOS_TEMPLATE_BASE or "helios/templates/base.html"
# a setting to ensure that only admins can create an election
ADMIN_ONLY = settings.HELIOS_ADMIN_ONLY
# allow upload of voters via CSV?
VOTERS_UPLOAD = settings.HELIOS_VOTERS_UPLOAD
# allow emailing of voters?
VOTERS_EMAIL = settings.HELIOS_VOTERS_EMAIL
|
2a1f1ca653fcd0a8fbaa465ba664da0a1ede6306
|
simuvex/s_run.py
|
simuvex/s_run.py
|
#!/usr/bin/env python
from .s_ref import RefTypes
import s_options as o
class SimRun:
def __init__(self, options = None, mode = None):
# the options and mode
if options is None:
options = o.default_options[mode]
self.options = options
self.mode = mode
self._exits = [ ]
self._refs = { }
self.options = options
for t in RefTypes:
self._refs[t] = [ ]
def refs(self):
return self._refs
def exits(self):
return self._exits
# Categorize and add a sequence of refs to this run
def add_refs(self, *refs):
for r in refs:
if o.SYMBOLIC not in self.options and r.is_symbolic():
continue
self._refs[type(r)].append(r)
# Categorize and add a sequence of exits to this run
def add_exits(self, *exits):
for e in exits:
if o.SYMBOLIC not in self.options and e.sim_value.is_symbolic():
continue
self._exits.append(e)
# Copy the references
def copy_refs(self, other):
for ref_list in other.refs().itervalues():
self.add_refs(*ref_list)
|
#!/usr/bin/env python
from .s_ref import RefTypes
import s_options as o
class SimRun(object):
def __init__(self, options = None, mode = None):
# the options and mode
if options is None:
options = o.default_options[mode]
self.options = options
self.mode = mode
self._exits = [ ]
self._refs = { }
self.options = options
for t in RefTypes:
self._refs[t] = [ ]
def refs(self):
return self._refs
def exits(self):
return self._exits
# Categorize and add a sequence of refs to this run
def add_refs(self, *refs):
for r in refs:
if o.SYMBOLIC not in self.options and r.is_symbolic():
continue
self._refs[type(r)].append(r)
# Categorize and add a sequence of exits to this run
def add_exits(self, *exits):
for e in exits:
if o.SYMBOLIC not in self.options and e.sim_value.is_symbolic():
continue
self._exits.append(e)
# Copy the references
def copy_refs(self, other):
for ref_list in other.refs().itervalues():
self.add_refs(*ref_list)
|
Make SimRun a new-style Python class.
|
Make SimRun a new-style Python class.
|
Python
|
bsd-2-clause
|
chubbymaggie/simuvex,iamahuman/angr,chubbymaggie/angr,chubbymaggie/simuvex,angr/angr,zhuyue1314/simuvex,schieb/angr,iamahuman/angr,angr/angr,tyb0807/angr,f-prettyland/angr,axt/angr,tyb0807/angr,f-prettyland/angr,chubbymaggie/angr,angr/angr,schieb/angr,iamahuman/angr,schieb/angr,tyb0807/angr,angr/simuvex,axt/angr,chubbymaggie/simuvex,chubbymaggie/angr,axt/angr,f-prettyland/angr
|
#!/usr/bin/env python
from .s_ref import RefTypes
import s_options as o
class SimRun:
def __init__(self, options = None, mode = None):
# the options and mode
if options is None:
options = o.default_options[mode]
self.options = options
self.mode = mode
self._exits = [ ]
self._refs = { }
self.options = options
for t in RefTypes:
self._refs[t] = [ ]
def refs(self):
return self._refs
def exits(self):
return self._exits
# Categorize and add a sequence of refs to this run
def add_refs(self, *refs):
for r in refs:
if o.SYMBOLIC not in self.options and r.is_symbolic():
continue
self._refs[type(r)].append(r)
# Categorize and add a sequence of exits to this run
def add_exits(self, *exits):
for e in exits:
if o.SYMBOLIC not in self.options and e.sim_value.is_symbolic():
continue
self._exits.append(e)
# Copy the references
def copy_refs(self, other):
for ref_list in other.refs().itervalues():
self.add_refs(*ref_list)
Make SimRun a new-style Python class.
|
#!/usr/bin/env python
from .s_ref import RefTypes
import s_options as o
class SimRun(object):
def __init__(self, options = None, mode = None):
# the options and mode
if options is None:
options = o.default_options[mode]
self.options = options
self.mode = mode
self._exits = [ ]
self._refs = { }
self.options = options
for t in RefTypes:
self._refs[t] = [ ]
def refs(self):
return self._refs
def exits(self):
return self._exits
# Categorize and add a sequence of refs to this run
def add_refs(self, *refs):
for r in refs:
if o.SYMBOLIC not in self.options and r.is_symbolic():
continue
self._refs[type(r)].append(r)
# Categorize and add a sequence of exits to this run
def add_exits(self, *exits):
for e in exits:
if o.SYMBOLIC not in self.options and e.sim_value.is_symbolic():
continue
self._exits.append(e)
# Copy the references
def copy_refs(self, other):
for ref_list in other.refs().itervalues():
self.add_refs(*ref_list)
|
<commit_before>#!/usr/bin/env python
from .s_ref import RefTypes
import s_options as o
class SimRun:
def __init__(self, options = None, mode = None):
# the options and mode
if options is None:
options = o.default_options[mode]
self.options = options
self.mode = mode
self._exits = [ ]
self._refs = { }
self.options = options
for t in RefTypes:
self._refs[t] = [ ]
def refs(self):
return self._refs
def exits(self):
return self._exits
# Categorize and add a sequence of refs to this run
def add_refs(self, *refs):
for r in refs:
if o.SYMBOLIC not in self.options and r.is_symbolic():
continue
self._refs[type(r)].append(r)
# Categorize and add a sequence of exits to this run
def add_exits(self, *exits):
for e in exits:
if o.SYMBOLIC not in self.options and e.sim_value.is_symbolic():
continue
self._exits.append(e)
# Copy the references
def copy_refs(self, other):
for ref_list in other.refs().itervalues():
self.add_refs(*ref_list)
<commit_msg>Make SimRun a new-style Python class.<commit_after>
|
#!/usr/bin/env python
from .s_ref import RefTypes
import s_options as o
class SimRun(object):
def __init__(self, options = None, mode = None):
# the options and mode
if options is None:
options = o.default_options[mode]
self.options = options
self.mode = mode
self._exits = [ ]
self._refs = { }
self.options = options
for t in RefTypes:
self._refs[t] = [ ]
def refs(self):
return self._refs
def exits(self):
return self._exits
# Categorize and add a sequence of refs to this run
def add_refs(self, *refs):
for r in refs:
if o.SYMBOLIC not in self.options and r.is_symbolic():
continue
self._refs[type(r)].append(r)
# Categorize and add a sequence of exits to this run
def add_exits(self, *exits):
for e in exits:
if o.SYMBOLIC not in self.options and e.sim_value.is_symbolic():
continue
self._exits.append(e)
# Copy the references
def copy_refs(self, other):
for ref_list in other.refs().itervalues():
self.add_refs(*ref_list)
|
#!/usr/bin/env python
from .s_ref import RefTypes
import s_options as o
class SimRun:
def __init__(self, options = None, mode = None):
# the options and mode
if options is None:
options = o.default_options[mode]
self.options = options
self.mode = mode
self._exits = [ ]
self._refs = { }
self.options = options
for t in RefTypes:
self._refs[t] = [ ]
def refs(self):
return self._refs
def exits(self):
return self._exits
# Categorize and add a sequence of refs to this run
def add_refs(self, *refs):
for r in refs:
if o.SYMBOLIC not in self.options and r.is_symbolic():
continue
self._refs[type(r)].append(r)
# Categorize and add a sequence of exits to this run
def add_exits(self, *exits):
for e in exits:
if o.SYMBOLIC not in self.options and e.sim_value.is_symbolic():
continue
self._exits.append(e)
# Copy the references
def copy_refs(self, other):
for ref_list in other.refs().itervalues():
self.add_refs(*ref_list)
Make SimRun a new-style Python class.#!/usr/bin/env python
from .s_ref import RefTypes
import s_options as o
class SimRun(object):
def __init__(self, options = None, mode = None):
# the options and mode
if options is None:
options = o.default_options[mode]
self.options = options
self.mode = mode
self._exits = [ ]
self._refs = { }
self.options = options
for t in RefTypes:
self._refs[t] = [ ]
def refs(self):
return self._refs
def exits(self):
return self._exits
# Categorize and add a sequence of refs to this run
def add_refs(self, *refs):
for r in refs:
if o.SYMBOLIC not in self.options and r.is_symbolic():
continue
self._refs[type(r)].append(r)
# Categorize and add a sequence of exits to this run
def add_exits(self, *exits):
for e in exits:
if o.SYMBOLIC not in self.options and e.sim_value.is_symbolic():
continue
self._exits.append(e)
# Copy the references
def copy_refs(self, other):
for ref_list in other.refs().itervalues():
self.add_refs(*ref_list)
|
<commit_before>#!/usr/bin/env python
from .s_ref import RefTypes
import s_options as o
class SimRun:
def __init__(self, options = None, mode = None):
# the options and mode
if options is None:
options = o.default_options[mode]
self.options = options
self.mode = mode
self._exits = [ ]
self._refs = { }
self.options = options
for t in RefTypes:
self._refs[t] = [ ]
def refs(self):
return self._refs
def exits(self):
return self._exits
# Categorize and add a sequence of refs to this run
def add_refs(self, *refs):
for r in refs:
if o.SYMBOLIC not in self.options and r.is_symbolic():
continue
self._refs[type(r)].append(r)
# Categorize and add a sequence of exits to this run
def add_exits(self, *exits):
for e in exits:
if o.SYMBOLIC not in self.options and e.sim_value.is_symbolic():
continue
self._exits.append(e)
# Copy the references
def copy_refs(self, other):
for ref_list in other.refs().itervalues():
self.add_refs(*ref_list)
<commit_msg>Make SimRun a new-style Python class.<commit_after>#!/usr/bin/env python
from .s_ref import RefTypes
import s_options as o
class SimRun(object):
def __init__(self, options = None, mode = None):
# the options and mode
if options is None:
options = o.default_options[mode]
self.options = options
self.mode = mode
self._exits = [ ]
self._refs = { }
self.options = options
for t in RefTypes:
self._refs[t] = [ ]
def refs(self):
return self._refs
def exits(self):
return self._exits
# Categorize and add a sequence of refs to this run
def add_refs(self, *refs):
for r in refs:
if o.SYMBOLIC not in self.options and r.is_symbolic():
continue
self._refs[type(r)].append(r)
# Categorize and add a sequence of exits to this run
def add_exits(self, *exits):
for e in exits:
if o.SYMBOLIC not in self.options and e.sim_value.is_symbolic():
continue
self._exits.append(e)
# Copy the references
def copy_refs(self, other):
for ref_list in other.refs().itervalues():
self.add_refs(*ref_list)
|
146e6caf7d47e7ea0bedf057ec9c129818942c07
|
mixmind/__init__.py
|
mixmind/__init__.py
|
# mixmind/__init__.py
import logging
log = logging.getLogger(__name__)
from flask import Flask
from flask_uploads import UploadSet, DATA, configure_uploads
app = Flask(__name__, instance_relative_config=True)
app.config.from_object('config')
app.config.from_pyfile('config.py')
with open('local_secret') as fp: # TODO config management
app.config['SECRET_KEY'] = fp.read().strip()
# flask-security
app.config['SECURITY_PASSWORD_SALT'] = 'salty'
# flask-uploads
app.config['UPLOADS_DEFAULT_DEST'] = './stockdb'
datafiles = UploadSet('datafiles', DATA)
configure_uploads(app, (datafiles,))
from mixmind.database import db, init_db
db.init_app(app)
with app.app_context():
init_db()
import mixmind.views # to assosciate views with app
|
# mixmind/__init__.py
import logging
log = logging.getLogger(__name__)
from flask import Flask
from flask_uploads import UploadSet, DATA, configure_uploads
app = Flask(__name__, instance_relative_config=True)
app.config.from_object('config')
app.config.from_pyfile('config.py')
# flask-uploads
app.config['UPLOADS_DEFAULT_DEST'] = './stockdb'
datafiles = UploadSet('datafiles', DATA)
configure_uploads(app, (datafiles,))
from mixmind.database import db, init_db
db.init_app(app)
with app.app_context():
init_db()
import mixmind.views # to assosciate views with app
|
Remove other secret lookups from init
|
Remove other secret lookups from init
|
Python
|
apache-2.0
|
twschum/mix-mind,twschum/mix-mind,twschum/mix-mind,twschum/mix-mind
|
# mixmind/__init__.py
import logging
log = logging.getLogger(__name__)
from flask import Flask
from flask_uploads import UploadSet, DATA, configure_uploads
app = Flask(__name__, instance_relative_config=True)
app.config.from_object('config')
app.config.from_pyfile('config.py')
with open('local_secret') as fp: # TODO config management
app.config['SECRET_KEY'] = fp.read().strip()
# flask-security
app.config['SECURITY_PASSWORD_SALT'] = 'salty'
# flask-uploads
app.config['UPLOADS_DEFAULT_DEST'] = './stockdb'
datafiles = UploadSet('datafiles', DATA)
configure_uploads(app, (datafiles,))
from mixmind.database import db, init_db
db.init_app(app)
with app.app_context():
init_db()
import mixmind.views # to assosciate views with app
Remove other secret lookups from init
|
# mixmind/__init__.py
import logging
log = logging.getLogger(__name__)
from flask import Flask
from flask_uploads import UploadSet, DATA, configure_uploads
app = Flask(__name__, instance_relative_config=True)
app.config.from_object('config')
app.config.from_pyfile('config.py')
# flask-uploads
app.config['UPLOADS_DEFAULT_DEST'] = './stockdb'
datafiles = UploadSet('datafiles', DATA)
configure_uploads(app, (datafiles,))
from mixmind.database import db, init_db
db.init_app(app)
with app.app_context():
init_db()
import mixmind.views # to assosciate views with app
|
<commit_before># mixmind/__init__.py
import logging
log = logging.getLogger(__name__)
from flask import Flask
from flask_uploads import UploadSet, DATA, configure_uploads
app = Flask(__name__, instance_relative_config=True)
app.config.from_object('config')
app.config.from_pyfile('config.py')
with open('local_secret') as fp: # TODO config management
app.config['SECRET_KEY'] = fp.read().strip()
# flask-security
app.config['SECURITY_PASSWORD_SALT'] = 'salty'
# flask-uploads
app.config['UPLOADS_DEFAULT_DEST'] = './stockdb'
datafiles = UploadSet('datafiles', DATA)
configure_uploads(app, (datafiles,))
from mixmind.database import db, init_db
db.init_app(app)
with app.app_context():
init_db()
import mixmind.views # to assosciate views with app
<commit_msg>Remove other secret lookups from init<commit_after>
|
# mixmind/__init__.py
import logging
log = logging.getLogger(__name__)
from flask import Flask
from flask_uploads import UploadSet, DATA, configure_uploads
app = Flask(__name__, instance_relative_config=True)
app.config.from_object('config')
app.config.from_pyfile('config.py')
# flask-uploads
app.config['UPLOADS_DEFAULT_DEST'] = './stockdb'
datafiles = UploadSet('datafiles', DATA)
configure_uploads(app, (datafiles,))
from mixmind.database import db, init_db
db.init_app(app)
with app.app_context():
init_db()
import mixmind.views # to assosciate views with app
|
# mixmind/__init__.py
import logging
log = logging.getLogger(__name__)
from flask import Flask
from flask_uploads import UploadSet, DATA, configure_uploads
app = Flask(__name__, instance_relative_config=True)
app.config.from_object('config')
app.config.from_pyfile('config.py')
with open('local_secret') as fp: # TODO config management
app.config['SECRET_KEY'] = fp.read().strip()
# flask-security
app.config['SECURITY_PASSWORD_SALT'] = 'salty'
# flask-uploads
app.config['UPLOADS_DEFAULT_DEST'] = './stockdb'
datafiles = UploadSet('datafiles', DATA)
configure_uploads(app, (datafiles,))
from mixmind.database import db, init_db
db.init_app(app)
with app.app_context():
init_db()
import mixmind.views # to assosciate views with app
Remove other secret lookups from init# mixmind/__init__.py
import logging
log = logging.getLogger(__name__)
from flask import Flask
from flask_uploads import UploadSet, DATA, configure_uploads
app = Flask(__name__, instance_relative_config=True)
app.config.from_object('config')
app.config.from_pyfile('config.py')
# flask-uploads
app.config['UPLOADS_DEFAULT_DEST'] = './stockdb'
datafiles = UploadSet('datafiles', DATA)
configure_uploads(app, (datafiles,))
from mixmind.database import db, init_db
db.init_app(app)
with app.app_context():
init_db()
import mixmind.views # to assosciate views with app
|
<commit_before># mixmind/__init__.py
import logging
log = logging.getLogger(__name__)
from flask import Flask
from flask_uploads import UploadSet, DATA, configure_uploads
app = Flask(__name__, instance_relative_config=True)
app.config.from_object('config')
app.config.from_pyfile('config.py')
with open('local_secret') as fp: # TODO config management
app.config['SECRET_KEY'] = fp.read().strip()
# flask-security
app.config['SECURITY_PASSWORD_SALT'] = 'salty'
# flask-uploads
app.config['UPLOADS_DEFAULT_DEST'] = './stockdb'
datafiles = UploadSet('datafiles', DATA)
configure_uploads(app, (datafiles,))
from mixmind.database import db, init_db
db.init_app(app)
with app.app_context():
init_db()
import mixmind.views # to assosciate views with app
<commit_msg>Remove other secret lookups from init<commit_after># mixmind/__init__.py
import logging
log = logging.getLogger(__name__)
from flask import Flask
from flask_uploads import UploadSet, DATA, configure_uploads
app = Flask(__name__, instance_relative_config=True)
app.config.from_object('config')
app.config.from_pyfile('config.py')
# flask-uploads
app.config['UPLOADS_DEFAULT_DEST'] = './stockdb'
datafiles = UploadSet('datafiles', DATA)
configure_uploads(app, (datafiles,))
from mixmind.database import db, init_db
db.init_app(app)
with app.app_context():
init_db()
import mixmind.views # to assosciate views with app
|
0696e73342e994093b887c731eedc20a6d7a82ac
|
concurrency/test_get_websites.py
|
concurrency/test_get_websites.py
|
import unittest
from requests import Request
from unittest.mock import patch, MagicMock
from concurrency.get_websites import load_url as load_url
class MockResponse():
def __init__(self):
self.text = "foo"
self.status_code = 200
class TestGetWebsites(unittest.TestCase):
@patch('concurrency.get_websites.requests')
def test_load_url_returns_data(self, m):
""" Check that we're getting the data from a request object """
m.get = MagicMock(return_value=MockResponse())
data = data = load_url('fazzbear')
self.assertEqual(data, 'foo')
@patch('concurrency.get_websites.requests')
def test_load_called_with_correct_url(self, m):
""" Check that we're making the request with the url we pass """
m.get = MagicMock(return_value=MockResponse())
data = load_url('fakeurl')
m.get.assert_called_with('fakeurl')
if __name__ == "__main__":
unittest.main()
|
import unittest
from requests import Request
from unittest.mock import patch, MagicMock
from concurrency.get_websites import load_url as load_url
class MockResponse():
def __init__(self):
self.text = "foo"
self.status_code = 200
class TestGetWebsites(unittest.TestCase):
@patch('concurrency.get_websites.requests')
def test_load_url_returns_data(self, m):
""" Check that we're getting the data from a request object """
m.get = MagicMock(return_value=MockResponse())
data = load_url('fazzbear')
self.assertEqual(data, 'foo')
@patch('concurrency.get_websites.requests')
def test_load_called_with_correct_url(self, m):
""" Check that we're making the request with the url we pass """
m.get = MagicMock(return_value=MockResponse())
load_url('fakeurl')
m.get.assert_called_with('fakeurl')
if __name__ == "__main__":
unittest.main()
|
Fix some typos in concurrency test
|
Fix some typos in concurrency test
|
Python
|
mit
|
b-ritter/python-notes,b-ritter/python-notes
|
import unittest
from requests import Request
from unittest.mock import patch, MagicMock
from concurrency.get_websites import load_url as load_url
class MockResponse():
def __init__(self):
self.text = "foo"
self.status_code = 200
class TestGetWebsites(unittest.TestCase):
@patch('concurrency.get_websites.requests')
def test_load_url_returns_data(self, m):
""" Check that we're getting the data from a request object """
m.get = MagicMock(return_value=MockResponse())
data = data = load_url('fazzbear')
self.assertEqual(data, 'foo')
@patch('concurrency.get_websites.requests')
def test_load_called_with_correct_url(self, m):
""" Check that we're making the request with the url we pass """
m.get = MagicMock(return_value=MockResponse())
data = load_url('fakeurl')
m.get.assert_called_with('fakeurl')
if __name__ == "__main__":
unittest.main()Fix some typos in concurrency test
|
import unittest
from requests import Request
from unittest.mock import patch, MagicMock
from concurrency.get_websites import load_url as load_url
class MockResponse():
def __init__(self):
self.text = "foo"
self.status_code = 200
class TestGetWebsites(unittest.TestCase):
@patch('concurrency.get_websites.requests')
def test_load_url_returns_data(self, m):
""" Check that we're getting the data from a request object """
m.get = MagicMock(return_value=MockResponse())
data = load_url('fazzbear')
self.assertEqual(data, 'foo')
@patch('concurrency.get_websites.requests')
def test_load_called_with_correct_url(self, m):
""" Check that we're making the request with the url we pass """
m.get = MagicMock(return_value=MockResponse())
load_url('fakeurl')
m.get.assert_called_with('fakeurl')
if __name__ == "__main__":
unittest.main()
|
<commit_before>import unittest
from requests import Request
from unittest.mock import patch, MagicMock
from concurrency.get_websites import load_url as load_url
class MockResponse():
def __init__(self):
self.text = "foo"
self.status_code = 200
class TestGetWebsites(unittest.TestCase):
@patch('concurrency.get_websites.requests')
def test_load_url_returns_data(self, m):
""" Check that we're getting the data from a request object """
m.get = MagicMock(return_value=MockResponse())
data = data = load_url('fazzbear')
self.assertEqual(data, 'foo')
@patch('concurrency.get_websites.requests')
def test_load_called_with_correct_url(self, m):
""" Check that we're making the request with the url we pass """
m.get = MagicMock(return_value=MockResponse())
data = load_url('fakeurl')
m.get.assert_called_with('fakeurl')
if __name__ == "__main__":
unittest.main()<commit_msg>Fix some typos in concurrency test<commit_after>
|
import unittest
from requests import Request
from unittest.mock import patch, MagicMock
from concurrency.get_websites import load_url as load_url
class MockResponse():
def __init__(self):
self.text = "foo"
self.status_code = 200
class TestGetWebsites(unittest.TestCase):
@patch('concurrency.get_websites.requests')
def test_load_url_returns_data(self, m):
""" Check that we're getting the data from a request object """
m.get = MagicMock(return_value=MockResponse())
data = load_url('fazzbear')
self.assertEqual(data, 'foo')
@patch('concurrency.get_websites.requests')
def test_load_called_with_correct_url(self, m):
""" Check that we're making the request with the url we pass """
m.get = MagicMock(return_value=MockResponse())
load_url('fakeurl')
m.get.assert_called_with('fakeurl')
if __name__ == "__main__":
unittest.main()
|
import unittest
from requests import Request
from unittest.mock import patch, MagicMock
from concurrency.get_websites import load_url as load_url
class MockResponse():
def __init__(self):
self.text = "foo"
self.status_code = 200
class TestGetWebsites(unittest.TestCase):
@patch('concurrency.get_websites.requests')
def test_load_url_returns_data(self, m):
""" Check that we're getting the data from a request object """
m.get = MagicMock(return_value=MockResponse())
data = data = load_url('fazzbear')
self.assertEqual(data, 'foo')
@patch('concurrency.get_websites.requests')
def test_load_called_with_correct_url(self, m):
""" Check that we're making the request with the url we pass """
m.get = MagicMock(return_value=MockResponse())
data = load_url('fakeurl')
m.get.assert_called_with('fakeurl')
if __name__ == "__main__":
unittest.main()Fix some typos in concurrency testimport unittest
from requests import Request
from unittest.mock import patch, MagicMock
from concurrency.get_websites import load_url as load_url
class MockResponse():
def __init__(self):
self.text = "foo"
self.status_code = 200
class TestGetWebsites(unittest.TestCase):
@patch('concurrency.get_websites.requests')
def test_load_url_returns_data(self, m):
""" Check that we're getting the data from a request object """
m.get = MagicMock(return_value=MockResponse())
data = load_url('fazzbear')
self.assertEqual(data, 'foo')
@patch('concurrency.get_websites.requests')
def test_load_called_with_correct_url(self, m):
""" Check that we're making the request with the url we pass """
m.get = MagicMock(return_value=MockResponse())
load_url('fakeurl')
m.get.assert_called_with('fakeurl')
if __name__ == "__main__":
unittest.main()
|
<commit_before>import unittest
from requests import Request
from unittest.mock import patch, MagicMock
from concurrency.get_websites import load_url as load_url
class MockResponse():
def __init__(self):
self.text = "foo"
self.status_code = 200
class TestGetWebsites(unittest.TestCase):
@patch('concurrency.get_websites.requests')
def test_load_url_returns_data(self, m):
""" Check that we're getting the data from a request object """
m.get = MagicMock(return_value=MockResponse())
data = data = load_url('fazzbear')
self.assertEqual(data, 'foo')
@patch('concurrency.get_websites.requests')
def test_load_called_with_correct_url(self, m):
""" Check that we're making the request with the url we pass """
m.get = MagicMock(return_value=MockResponse())
data = load_url('fakeurl')
m.get.assert_called_with('fakeurl')
if __name__ == "__main__":
unittest.main()<commit_msg>Fix some typos in concurrency test<commit_after>import unittest
from requests import Request
from unittest.mock import patch, MagicMock
from concurrency.get_websites import load_url as load_url
class MockResponse():
def __init__(self):
self.text = "foo"
self.status_code = 200
class TestGetWebsites(unittest.TestCase):
@patch('concurrency.get_websites.requests')
def test_load_url_returns_data(self, m):
""" Check that we're getting the data from a request object """
m.get = MagicMock(return_value=MockResponse())
data = load_url('fazzbear')
self.assertEqual(data, 'foo')
@patch('concurrency.get_websites.requests')
def test_load_called_with_correct_url(self, m):
""" Check that we're making the request with the url we pass """
m.get = MagicMock(return_value=MockResponse())
load_url('fakeurl')
m.get.assert_called_with('fakeurl')
if __name__ == "__main__":
unittest.main()
|
899882be398f8a31e706a590c0a7e297c1589c25
|
threat_intel/util/error_messages.py
|
threat_intel/util/error_messages.py
|
# -*- coding: utf-8 -*-
#
# A set of simple methods for writing messages to stderr
#
import sys
from traceback import extract_tb
from traceback import format_list
def write_exception(e):
exc_type, __, exc_traceback = sys.exc_info()
sys.stderr.write('[ERROR] {0} {1}\n'.format(exc_type.__name__, e.message if e.message else ''))
for line in format_list(extract_tb(exc_traceback)):
sys.stderr.write(line)
def write_error_message(message):
sys.stderr.write('[ERROR] ')
sys.stderr.write(message)
sys.stderr.write('\n')
|
# -*- coding: utf-8 -*-
#
# A set of simple methods for writing messages to stderr
#
import sys
from traceback import extract_tb
from traceback import format_list
def write_exception(e):
exc_type, __, exc_traceback = sys.exc_info()
sys.stderr.write('[ERROR] {0} {1}\n'.format(exc_type.__name__, str(e)))
for line in format_list(extract_tb(exc_traceback)):
sys.stderr.write(line)
def write_error_message(message):
sys.stderr.write('[ERROR] ')
sys.stderr.write(message)
sys.stderr.write('\n')
|
Fix deprecation warning interfering with tests
|
Fix deprecation warning interfering with tests
|
Python
|
mit
|
Yelp/threat_intel,megancarney/threat_intel,SYNchroACK/threat_intel
|
# -*- coding: utf-8 -*-
#
# A set of simple methods for writing messages to stderr
#
import sys
from traceback import extract_tb
from traceback import format_list
def write_exception(e):
exc_type, __, exc_traceback = sys.exc_info()
sys.stderr.write('[ERROR] {0} {1}\n'.format(exc_type.__name__, e.message if e.message else ''))
for line in format_list(extract_tb(exc_traceback)):
sys.stderr.write(line)
def write_error_message(message):
sys.stderr.write('[ERROR] ')
sys.stderr.write(message)
sys.stderr.write('\n')
Fix deprecation warning interfering with tests
|
# -*- coding: utf-8 -*-
#
# A set of simple methods for writing messages to stderr
#
import sys
from traceback import extract_tb
from traceback import format_list
def write_exception(e):
exc_type, __, exc_traceback = sys.exc_info()
sys.stderr.write('[ERROR] {0} {1}\n'.format(exc_type.__name__, str(e)))
for line in format_list(extract_tb(exc_traceback)):
sys.stderr.write(line)
def write_error_message(message):
sys.stderr.write('[ERROR] ')
sys.stderr.write(message)
sys.stderr.write('\n')
|
<commit_before># -*- coding: utf-8 -*-
#
# A set of simple methods for writing messages to stderr
#
import sys
from traceback import extract_tb
from traceback import format_list
def write_exception(e):
exc_type, __, exc_traceback = sys.exc_info()
sys.stderr.write('[ERROR] {0} {1}\n'.format(exc_type.__name__, e.message if e.message else ''))
for line in format_list(extract_tb(exc_traceback)):
sys.stderr.write(line)
def write_error_message(message):
sys.stderr.write('[ERROR] ')
sys.stderr.write(message)
sys.stderr.write('\n')
<commit_msg>Fix deprecation warning interfering with tests<commit_after>
|
# -*- coding: utf-8 -*-
#
# A set of simple methods for writing messages to stderr
#
import sys
from traceback import extract_tb
from traceback import format_list
def write_exception(e):
exc_type, __, exc_traceback = sys.exc_info()
sys.stderr.write('[ERROR] {0} {1}\n'.format(exc_type.__name__, str(e)))
for line in format_list(extract_tb(exc_traceback)):
sys.stderr.write(line)
def write_error_message(message):
sys.stderr.write('[ERROR] ')
sys.stderr.write(message)
sys.stderr.write('\n')
|
# -*- coding: utf-8 -*-
#
# A set of simple methods for writing messages to stderr
#
import sys
from traceback import extract_tb
from traceback import format_list
def write_exception(e):
exc_type, __, exc_traceback = sys.exc_info()
sys.stderr.write('[ERROR] {0} {1}\n'.format(exc_type.__name__, e.message if e.message else ''))
for line in format_list(extract_tb(exc_traceback)):
sys.stderr.write(line)
def write_error_message(message):
sys.stderr.write('[ERROR] ')
sys.stderr.write(message)
sys.stderr.write('\n')
Fix deprecation warning interfering with tests# -*- coding: utf-8 -*-
#
# A set of simple methods for writing messages to stderr
#
import sys
from traceback import extract_tb
from traceback import format_list
def write_exception(e):
exc_type, __, exc_traceback = sys.exc_info()
sys.stderr.write('[ERROR] {0} {1}\n'.format(exc_type.__name__, str(e)))
for line in format_list(extract_tb(exc_traceback)):
sys.stderr.write(line)
def write_error_message(message):
sys.stderr.write('[ERROR] ')
sys.stderr.write(message)
sys.stderr.write('\n')
|
<commit_before># -*- coding: utf-8 -*-
#
# A set of simple methods for writing messages to stderr
#
import sys
from traceback import extract_tb
from traceback import format_list
def write_exception(e):
exc_type, __, exc_traceback = sys.exc_info()
sys.stderr.write('[ERROR] {0} {1}\n'.format(exc_type.__name__, e.message if e.message else ''))
for line in format_list(extract_tb(exc_traceback)):
sys.stderr.write(line)
def write_error_message(message):
sys.stderr.write('[ERROR] ')
sys.stderr.write(message)
sys.stderr.write('\n')
<commit_msg>Fix deprecation warning interfering with tests<commit_after># -*- coding: utf-8 -*-
#
# A set of simple methods for writing messages to stderr
#
import sys
from traceback import extract_tb
from traceback import format_list
def write_exception(e):
exc_type, __, exc_traceback = sys.exc_info()
sys.stderr.write('[ERROR] {0} {1}\n'.format(exc_type.__name__, str(e)))
for line in format_list(extract_tb(exc_traceback)):
sys.stderr.write(line)
def write_error_message(message):
sys.stderr.write('[ERROR] ')
sys.stderr.write(message)
sys.stderr.write('\n')
|
8e41709078885313b12f3b6e619573851a21be19
|
scripts/check_env.py
|
scripts/check_env.py
|
#!/usr/bin/env python
"""Computational environment check script for 2014 SciPy Conference Tutorial:
Reproducible Research: Walking the Walk.
https://github.com/reproducible-research/scipy-tutorial-2014
"""
import sys
import subprocess
return_value = 0
required_packages = ['numpy', 'scipy', 'matplotlib', 'SimpleITK']
for package in required_packages:
print('Importing ' + package + ' ...')
try:
__import__(package, globals(), locals(), [], 0)
except ImportError:
print('Error: could not import ' + package)
return_value += 1
required_executables = ['git', 'dexy', 'ipython', 'nosetests']
for executable in required_executables:
print('Executing ' + executable + ' ...')
try:
output = subprocess.check_output([executable, '--help'])
except OSError:
print('Error: could not execute ' + executable)
return_value += 1
if return_value is 0:
print('\nSuccess.')
else:
print('\nAn error was found in your environment, please see the messages ' +
'above.')
sys.exit(return_value)
|
#!/usr/bin/env python
"""Computational environment check script for 2014 SciPy Conference Tutorial:
Reproducible Research: Walking the Walk.
https://github.com/reproducible-research/scipy-tutorial-2014
"""
import sys
import subprocess
return_value = 0
required_packages = ['numpy', 'scipy', 'matplotlib', 'SimpleITK']
for package in required_packages:
print('Importing ' + package + ' ...')
try:
__import__(package, globals(), locals(), [], 0)
except ImportError:
print('Error: could not import ' + package)
return_value += 1
print('')
required_executables = ['git', 'dexy', 'ipython', 'nosetests', 'ffmpeg']
for executable in required_executables:
print('Executing ' + executable + ' ...')
try:
output = subprocess.check_output([executable, '--help'],
stderr=subprocess.STDOUT)
except OSError:
print('Error: could not execute ' + executable)
return_value += 1
if return_value is 0:
print('\nSuccess.')
else:
print('\nAn defect was found in your environment, please see the messages ' +
'above.')
sys.exit(return_value)
|
Add ffmpeg to the required environment.
|
Add ffmpeg to the required environment.
|
Python
|
apache-2.0
|
5x5x5x5/ReproTutorial,5x5x5x5/ReproTutorial,reproducible-research/scipy-tutorial-2014,reproducible-research/scipy-tutorial-2014,5x5x5x5/ReproTutorial,reproducible-research/scipy-tutorial-2014
|
#!/usr/bin/env python
"""Computational environment check script for 2014 SciPy Conference Tutorial:
Reproducible Research: Walking the Walk.
https://github.com/reproducible-research/scipy-tutorial-2014
"""
import sys
import subprocess
return_value = 0
required_packages = ['numpy', 'scipy', 'matplotlib', 'SimpleITK']
for package in required_packages:
print('Importing ' + package + ' ...')
try:
__import__(package, globals(), locals(), [], 0)
except ImportError:
print('Error: could not import ' + package)
return_value += 1
required_executables = ['git', 'dexy', 'ipython', 'nosetests']
for executable in required_executables:
print('Executing ' + executable + ' ...')
try:
output = subprocess.check_output([executable, '--help'])
except OSError:
print('Error: could not execute ' + executable)
return_value += 1
if return_value is 0:
print('\nSuccess.')
else:
print('\nAn error was found in your environment, please see the messages ' +
'above.')
sys.exit(return_value)
Add ffmpeg to the required environment.
|
#!/usr/bin/env python
"""Computational environment check script for 2014 SciPy Conference Tutorial:
Reproducible Research: Walking the Walk.
https://github.com/reproducible-research/scipy-tutorial-2014
"""
import sys
import subprocess
return_value = 0
required_packages = ['numpy', 'scipy', 'matplotlib', 'SimpleITK']
for package in required_packages:
print('Importing ' + package + ' ...')
try:
__import__(package, globals(), locals(), [], 0)
except ImportError:
print('Error: could not import ' + package)
return_value += 1
print('')
required_executables = ['git', 'dexy', 'ipython', 'nosetests', 'ffmpeg']
for executable in required_executables:
print('Executing ' + executable + ' ...')
try:
output = subprocess.check_output([executable, '--help'],
stderr=subprocess.STDOUT)
except OSError:
print('Error: could not execute ' + executable)
return_value += 1
if return_value is 0:
print('\nSuccess.')
else:
print('\nAn defect was found in your environment, please see the messages ' +
'above.')
sys.exit(return_value)
|
<commit_before>#!/usr/bin/env python
"""Computational environment check script for 2014 SciPy Conference Tutorial:
Reproducible Research: Walking the Walk.
https://github.com/reproducible-research/scipy-tutorial-2014
"""
import sys
import subprocess
return_value = 0
required_packages = ['numpy', 'scipy', 'matplotlib', 'SimpleITK']
for package in required_packages:
print('Importing ' + package + ' ...')
try:
__import__(package, globals(), locals(), [], 0)
except ImportError:
print('Error: could not import ' + package)
return_value += 1
required_executables = ['git', 'dexy', 'ipython', 'nosetests']
for executable in required_executables:
print('Executing ' + executable + ' ...')
try:
output = subprocess.check_output([executable, '--help'])
except OSError:
print('Error: could not execute ' + executable)
return_value += 1
if return_value is 0:
print('\nSuccess.')
else:
print('\nAn error was found in your environment, please see the messages ' +
'above.')
sys.exit(return_value)
<commit_msg>Add ffmpeg to the required environment.<commit_after>
|
#!/usr/bin/env python
"""Computational environment check script for 2014 SciPy Conference Tutorial:
Reproducible Research: Walking the Walk.
https://github.com/reproducible-research/scipy-tutorial-2014
"""
import sys
import subprocess
return_value = 0
required_packages = ['numpy', 'scipy', 'matplotlib', 'SimpleITK']
for package in required_packages:
print('Importing ' + package + ' ...')
try:
__import__(package, globals(), locals(), [], 0)
except ImportError:
print('Error: could not import ' + package)
return_value += 1
print('')
required_executables = ['git', 'dexy', 'ipython', 'nosetests', 'ffmpeg']
for executable in required_executables:
print('Executing ' + executable + ' ...')
try:
output = subprocess.check_output([executable, '--help'],
stderr=subprocess.STDOUT)
except OSError:
print('Error: could not execute ' + executable)
return_value += 1
if return_value is 0:
print('\nSuccess.')
else:
print('\nAn defect was found in your environment, please see the messages ' +
'above.')
sys.exit(return_value)
|
#!/usr/bin/env python
"""Computational environment check script for 2014 SciPy Conference Tutorial:
Reproducible Research: Walking the Walk.
https://github.com/reproducible-research/scipy-tutorial-2014
"""
import sys
import subprocess
return_value = 0
required_packages = ['numpy', 'scipy', 'matplotlib', 'SimpleITK']
for package in required_packages:
print('Importing ' + package + ' ...')
try:
__import__(package, globals(), locals(), [], 0)
except ImportError:
print('Error: could not import ' + package)
return_value += 1
required_executables = ['git', 'dexy', 'ipython', 'nosetests']
for executable in required_executables:
print('Executing ' + executable + ' ...')
try:
output = subprocess.check_output([executable, '--help'])
except OSError:
print('Error: could not execute ' + executable)
return_value += 1
if return_value is 0:
print('\nSuccess.')
else:
print('\nAn error was found in your environment, please see the messages ' +
'above.')
sys.exit(return_value)
Add ffmpeg to the required environment.#!/usr/bin/env python
"""Computational environment check script for 2014 SciPy Conference Tutorial:
Reproducible Research: Walking the Walk.
https://github.com/reproducible-research/scipy-tutorial-2014
"""
import sys
import subprocess
return_value = 0
required_packages = ['numpy', 'scipy', 'matplotlib', 'SimpleITK']
for package in required_packages:
print('Importing ' + package + ' ...')
try:
__import__(package, globals(), locals(), [], 0)
except ImportError:
print('Error: could not import ' + package)
return_value += 1
print('')
required_executables = ['git', 'dexy', 'ipython', 'nosetests', 'ffmpeg']
for executable in required_executables:
print('Executing ' + executable + ' ...')
try:
output = subprocess.check_output([executable, '--help'],
stderr=subprocess.STDOUT)
except OSError:
print('Error: could not execute ' + executable)
return_value += 1
if return_value is 0:
print('\nSuccess.')
else:
print('\nAn defect was found in your environment, please see the messages ' +
'above.')
sys.exit(return_value)
|
<commit_before>#!/usr/bin/env python
"""Computational environment check script for 2014 SciPy Conference Tutorial:
Reproducible Research: Walking the Walk.
https://github.com/reproducible-research/scipy-tutorial-2014
"""
import sys
import subprocess
return_value = 0
required_packages = ['numpy', 'scipy', 'matplotlib', 'SimpleITK']
for package in required_packages:
print('Importing ' + package + ' ...')
try:
__import__(package, globals(), locals(), [], 0)
except ImportError:
print('Error: could not import ' + package)
return_value += 1
required_executables = ['git', 'dexy', 'ipython', 'nosetests']
for executable in required_executables:
print('Executing ' + executable + ' ...')
try:
output = subprocess.check_output([executable, '--help'])
except OSError:
print('Error: could not execute ' + executable)
return_value += 1
if return_value is 0:
print('\nSuccess.')
else:
print('\nAn error was found in your environment, please see the messages ' +
'above.')
sys.exit(return_value)
<commit_msg>Add ffmpeg to the required environment.<commit_after>#!/usr/bin/env python
"""Computational environment check script for 2014 SciPy Conference Tutorial:
Reproducible Research: Walking the Walk.
https://github.com/reproducible-research/scipy-tutorial-2014
"""
import sys
import subprocess
return_value = 0
required_packages = ['numpy', 'scipy', 'matplotlib', 'SimpleITK']
for package in required_packages:
print('Importing ' + package + ' ...')
try:
__import__(package, globals(), locals(), [], 0)
except ImportError:
print('Error: could not import ' + package)
return_value += 1
print('')
required_executables = ['git', 'dexy', 'ipython', 'nosetests', 'ffmpeg']
for executable in required_executables:
print('Executing ' + executable + ' ...')
try:
output = subprocess.check_output([executable, '--help'],
stderr=subprocess.STDOUT)
except OSError:
print('Error: could not execute ' + executable)
return_value += 1
if return_value is 0:
print('\nSuccess.')
else:
print('\nAn defect was found in your environment, please see the messages ' +
'above.')
sys.exit(return_value)
|
62c87379d0f4fa7cf6fc9619426fef484c918a27
|
fp/fp/urls.py
|
fp/fp/urls.py
|
from __future__ import unicode_literals
from django.conf.urls import patterns, include, url
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'fp.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^admin/', include(admin.site.urls)),
)
|
from __future__ import unicode_literals
from django.conf import settings
from django.conf.urls import patterns, include, url
from django.conf.urls.static import static
from django.contrib import admin
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'fp.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^admin/', include(admin.site.urls)),
)
if settings.DEBUG:
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
urlpatterns += staticfiles_urlpatterns()
|
Add django media file serving in debug mode
|
Add django media file serving in debug mode
|
Python
|
mit
|
j7nn7k/www.flashpacker.io
|
from __future__ import unicode_literals
from django.conf.urls import patterns, include, url
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'fp.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^admin/', include(admin.site.urls)),
)
Add django media file serving in debug mode
|
from __future__ import unicode_literals
from django.conf import settings
from django.conf.urls import patterns, include, url
from django.conf.urls.static import static
from django.contrib import admin
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'fp.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^admin/', include(admin.site.urls)),
)
if settings.DEBUG:
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
urlpatterns += staticfiles_urlpatterns()
|
<commit_before>from __future__ import unicode_literals
from django.conf.urls import patterns, include, url
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'fp.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^admin/', include(admin.site.urls)),
)
<commit_msg>Add django media file serving in debug mode<commit_after>
|
from __future__ import unicode_literals
from django.conf import settings
from django.conf.urls import patterns, include, url
from django.conf.urls.static import static
from django.contrib import admin
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'fp.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^admin/', include(admin.site.urls)),
)
if settings.DEBUG:
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
urlpatterns += staticfiles_urlpatterns()
|
from __future__ import unicode_literals
from django.conf.urls import patterns, include, url
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'fp.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^admin/', include(admin.site.urls)),
)
Add django media file serving in debug modefrom __future__ import unicode_literals
from django.conf import settings
from django.conf.urls import patterns, include, url
from django.conf.urls.static import static
from django.contrib import admin
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'fp.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^admin/', include(admin.site.urls)),
)
if settings.DEBUG:
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
urlpatterns += staticfiles_urlpatterns()
|
<commit_before>from __future__ import unicode_literals
from django.conf.urls import patterns, include, url
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'fp.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^admin/', include(admin.site.urls)),
)
<commit_msg>Add django media file serving in debug mode<commit_after>from __future__ import unicode_literals
from django.conf import settings
from django.conf.urls import patterns, include, url
from django.conf.urls.static import static
from django.contrib import admin
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'fp.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^admin/', include(admin.site.urls)),
)
if settings.DEBUG:
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
urlpatterns += staticfiles_urlpatterns()
|
062c39492a9a7965c94930479f721b321bff051b
|
tbmodels/__init__.py
|
tbmodels/__init__.py
|
# -*- coding: utf-8 -*-
# (c) 2015-2018, ETH Zurich, Institut fuer Theoretische Physik
# Author: Dominik Gresch <greschd@gmx.ch>
r"""
TBmodels is a tool for creating / loading and manipulating tight-binding models.
"""
__version__ = '1.3.1'
# import order is important due to circular imports
from . import helpers
from ._tb_model import Model
from . import _kdotp
from . import io
|
# -*- coding: utf-8 -*-
# (c) 2015-2018, ETH Zurich, Institut fuer Theoretische Physik
# Author: Dominik Gresch <greschd@gmx.ch>
r"""
TBmodels is a tool for creating / loading and manipulating tight-binding models.
"""
__version__ = '1.3.2a1'
# import order is important due to circular imports
from . import helpers
from ._tb_model import Model
from . import _kdotp
from . import io
|
Change version number to 1.3.2a1
|
Change version number to 1.3.2a1
|
Python
|
apache-2.0
|
Z2PackDev/TBmodels,Z2PackDev/TBmodels
|
# -*- coding: utf-8 -*-
# (c) 2015-2018, ETH Zurich, Institut fuer Theoretische Physik
# Author: Dominik Gresch <greschd@gmx.ch>
r"""
TBmodels is a tool for creating / loading and manipulating tight-binding models.
"""
__version__ = '1.3.1'
# import order is important due to circular imports
from . import helpers
from ._tb_model import Model
from . import _kdotp
from . import io
Change version number to 1.3.2a1
|
# -*- coding: utf-8 -*-
# (c) 2015-2018, ETH Zurich, Institut fuer Theoretische Physik
# Author: Dominik Gresch <greschd@gmx.ch>
r"""
TBmodels is a tool for creating / loading and manipulating tight-binding models.
"""
__version__ = '1.3.2a1'
# import order is important due to circular imports
from . import helpers
from ._tb_model import Model
from . import _kdotp
from . import io
|
<commit_before># -*- coding: utf-8 -*-
# (c) 2015-2018, ETH Zurich, Institut fuer Theoretische Physik
# Author: Dominik Gresch <greschd@gmx.ch>
r"""
TBmodels is a tool for creating / loading and manipulating tight-binding models.
"""
__version__ = '1.3.1'
# import order is important due to circular imports
from . import helpers
from ._tb_model import Model
from . import _kdotp
from . import io
<commit_msg>Change version number to 1.3.2a1<commit_after>
|
# -*- coding: utf-8 -*-
# (c) 2015-2018, ETH Zurich, Institut fuer Theoretische Physik
# Author: Dominik Gresch <greschd@gmx.ch>
r"""
TBmodels is a tool for creating / loading and manipulating tight-binding models.
"""
__version__ = '1.3.2a1'
# import order is important due to circular imports
from . import helpers
from ._tb_model import Model
from . import _kdotp
from . import io
|
# -*- coding: utf-8 -*-
# (c) 2015-2018, ETH Zurich, Institut fuer Theoretische Physik
# Author: Dominik Gresch <greschd@gmx.ch>
r"""
TBmodels is a tool for creating / loading and manipulating tight-binding models.
"""
__version__ = '1.3.1'
# import order is important due to circular imports
from . import helpers
from ._tb_model import Model
from . import _kdotp
from . import io
Change version number to 1.3.2a1# -*- coding: utf-8 -*-
# (c) 2015-2018, ETH Zurich, Institut fuer Theoretische Physik
# Author: Dominik Gresch <greschd@gmx.ch>
r"""
TBmodels is a tool for creating / loading and manipulating tight-binding models.
"""
__version__ = '1.3.2a1'
# import order is important due to circular imports
from . import helpers
from ._tb_model import Model
from . import _kdotp
from . import io
|
<commit_before># -*- coding: utf-8 -*-
# (c) 2015-2018, ETH Zurich, Institut fuer Theoretische Physik
# Author: Dominik Gresch <greschd@gmx.ch>
r"""
TBmodels is a tool for creating / loading and manipulating tight-binding models.
"""
__version__ = '1.3.1'
# import order is important due to circular imports
from . import helpers
from ._tb_model import Model
from . import _kdotp
from . import io
<commit_msg>Change version number to 1.3.2a1<commit_after># -*- coding: utf-8 -*-
# (c) 2015-2018, ETH Zurich, Institut fuer Theoretische Physik
# Author: Dominik Gresch <greschd@gmx.ch>
r"""
TBmodels is a tool for creating / loading and manipulating tight-binding models.
"""
__version__ = '1.3.2a1'
# import order is important due to circular imports
from . import helpers
from ._tb_model import Model
from . import _kdotp
from . import io
|
b7514ff97118f3bd0a22d620659d307226e0d1fd
|
apps/domain/src/main/core/node.py
|
apps/domain/src/main/core/node.py
|
from syft.core.node.domain.domain import Domain
node = Domain(name="om-domain")
|
from syft.grid.grid_client import connect
from syft.core.node.domain.domain import Domain
from syft.core.node.device.client import DeviceClient
from syft.grid.connections.http_connection import HTTPConnection
from syft.grid.services.worker_management_service import CreateWorkerService
node = Domain(name="om-domain")
node.immediate_services_with_reply.append(CreateWorkerService)
node._register_services() # re-register all services including SignalingService
try:
node.private_device = connect(
url="http://localhost:5000", # Domain Address
conn_type=HTTPConnection, # HTTP Connection Protocol
client_type=DeviceClient,
) # Device Client type
node.in_memory_client_registry[node.private_device.device_id] = node.private_device
except:
pass
|
ADD a new worker connection
|
ADD a new worker connection
|
Python
|
apache-2.0
|
OpenMined/PySyft,OpenMined/PySyft,OpenMined/PySyft,OpenMined/PySyft
|
from syft.core.node.domain.domain import Domain
node = Domain(name="om-domain")
ADD a new worker connection
|
from syft.grid.grid_client import connect
from syft.core.node.domain.domain import Domain
from syft.core.node.device.client import DeviceClient
from syft.grid.connections.http_connection import HTTPConnection
from syft.grid.services.worker_management_service import CreateWorkerService
node = Domain(name="om-domain")
node.immediate_services_with_reply.append(CreateWorkerService)
node._register_services() # re-register all services including SignalingService
try:
node.private_device = connect(
url="http://localhost:5000", # Domain Address
conn_type=HTTPConnection, # HTTP Connection Protocol
client_type=DeviceClient,
) # Device Client type
node.in_memory_client_registry[node.private_device.device_id] = node.private_device
except:
pass
|
<commit_before>from syft.core.node.domain.domain import Domain
node = Domain(name="om-domain")
<commit_msg>ADD a new worker connection<commit_after>
|
from syft.grid.grid_client import connect
from syft.core.node.domain.domain import Domain
from syft.core.node.device.client import DeviceClient
from syft.grid.connections.http_connection import HTTPConnection
from syft.grid.services.worker_management_service import CreateWorkerService
node = Domain(name="om-domain")
node.immediate_services_with_reply.append(CreateWorkerService)
node._register_services() # re-register all services including SignalingService
try:
node.private_device = connect(
url="http://localhost:5000", # Domain Address
conn_type=HTTPConnection, # HTTP Connection Protocol
client_type=DeviceClient,
) # Device Client type
node.in_memory_client_registry[node.private_device.device_id] = node.private_device
except:
pass
|
from syft.core.node.domain.domain import Domain
node = Domain(name="om-domain")
ADD a new worker connectionfrom syft.grid.grid_client import connect
from syft.core.node.domain.domain import Domain
from syft.core.node.device.client import DeviceClient
from syft.grid.connections.http_connection import HTTPConnection
from syft.grid.services.worker_management_service import CreateWorkerService
node = Domain(name="om-domain")
node.immediate_services_with_reply.append(CreateWorkerService)
node._register_services() # re-register all services including SignalingService
try:
node.private_device = connect(
url="http://localhost:5000", # Domain Address
conn_type=HTTPConnection, # HTTP Connection Protocol
client_type=DeviceClient,
) # Device Client type
node.in_memory_client_registry[node.private_device.device_id] = node.private_device
except:
pass
|
<commit_before>from syft.core.node.domain.domain import Domain
node = Domain(name="om-domain")
<commit_msg>ADD a new worker connection<commit_after>from syft.grid.grid_client import connect
from syft.core.node.domain.domain import Domain
from syft.core.node.device.client import DeviceClient
from syft.grid.connections.http_connection import HTTPConnection
from syft.grid.services.worker_management_service import CreateWorkerService
node = Domain(name="om-domain")
node.immediate_services_with_reply.append(CreateWorkerService)
node._register_services() # re-register all services including SignalingService
try:
node.private_device = connect(
url="http://localhost:5000", # Domain Address
conn_type=HTTPConnection, # HTTP Connection Protocol
client_type=DeviceClient,
) # Device Client type
node.in_memory_client_registry[node.private_device.device_id] = node.private_device
except:
pass
|
bcd7df79484fa7ca5b6a2d09b6496522dcfcd608
|
laikia_core/runme.py
|
laikia_core/runme.py
|
from ship import ship
from event import event
from character import character
from item import item
def main():
#Build ship
laika = ship("Laika", 100.0, 100.0, 100.0)
character_list = []
event_solar_flare = event("Solar Flare", "Solar Flare hit, damaging com system", -10, 0, 0)
event_oxygen_fire = event("Fire in oxygen garden", "Oh god, fire in the oxygen garden.", 0, -3.0, -20.9)
gun = item("gun", "When used, a gun prevents the owner from being voted into the reactor")
# Loop to create characters
num_players_str = raw_input("How many players: ")
num_players = int(num_players_str)
for _ in xrange(num_players):
#TODO add choice of character abilities
name = raw_input("Name: ")
character_a = character(name, 100.0, 2.0, 1.0, .1) #100 health 2 water, 1 oxygen, .1 com
character_list.append(character_a)
# Game Loop
if __name__ == '__main__':
main()
|
from ship import ship
from event import event
from character import character
from item import item
def main():
#Build ship
laika = ship("Laika", 100.0, 100.0, 100.0)
character_list = []
event_list = []
item_list = []
#TODO load these from a file
event_list.append(event("Solar Flare", \
"Solar Flare hit, damaging com system", \
-10, \
0, \
0))
event_list.append(event("Fire in oxygen garden", \
"Oh god, fire in the oxygen garden.", \
0, \
-3.0, \
-20.9))
item_list.append(item("gun", "When used, a gun prevents the owner from being voted into the reactor"))
# Loop to create characters
num_players_str = raw_input("How many players: ")
num_players = int(num_players_str)
for _ in xrange(num_players):
#TODO add choice of character abilities
name = raw_input("Name: ")
character_a = character(name, 100.0, 2.0, 1.0, .1) #100 health 2 water, 1 oxygen, .1 com
character_list.append(character_a)
# Game Loop
if __name__ == '__main__':
main()
|
Build event and item lists
|
Build event and item lists
|
Python
|
mit
|
jtdressel/laika
|
from ship import ship
from event import event
from character import character
from item import item
def main():
#Build ship
laika = ship("Laika", 100.0, 100.0, 100.0)
character_list = []
event_solar_flare = event("Solar Flare", "Solar Flare hit, damaging com system", -10, 0, 0)
event_oxygen_fire = event("Fire in oxygen garden", "Oh god, fire in the oxygen garden.", 0, -3.0, -20.9)
gun = item("gun", "When used, a gun prevents the owner from being voted into the reactor")
# Loop to create characters
num_players_str = raw_input("How many players: ")
num_players = int(num_players_str)
for _ in xrange(num_players):
#TODO add choice of character abilities
name = raw_input("Name: ")
character_a = character(name, 100.0, 2.0, 1.0, .1) #100 health 2 water, 1 oxygen, .1 com
character_list.append(character_a)
# Game Loop
if __name__ == '__main__':
main()Build event and item lists
|
from ship import ship
from event import event
from character import character
from item import item
def main():
#Build ship
laika = ship("Laika", 100.0, 100.0, 100.0)
character_list = []
event_list = []
item_list = []
#TODO load these from a file
event_list.append(event("Solar Flare", \
"Solar Flare hit, damaging com system", \
-10, \
0, \
0))
event_list.append(event("Fire in oxygen garden", \
"Oh god, fire in the oxygen garden.", \
0, \
-3.0, \
-20.9))
item_list.append(item("gun", "When used, a gun prevents the owner from being voted into the reactor"))
# Loop to create characters
num_players_str = raw_input("How many players: ")
num_players = int(num_players_str)
for _ in xrange(num_players):
#TODO add choice of character abilities
name = raw_input("Name: ")
character_a = character(name, 100.0, 2.0, 1.0, .1) #100 health 2 water, 1 oxygen, .1 com
character_list.append(character_a)
# Game Loop
if __name__ == '__main__':
main()
|
<commit_before>from ship import ship
from event import event
from character import character
from item import item
def main():
#Build ship
laika = ship("Laika", 100.0, 100.0, 100.0)
character_list = []
event_solar_flare = event("Solar Flare", "Solar Flare hit, damaging com system", -10, 0, 0)
event_oxygen_fire = event("Fire in oxygen garden", "Oh god, fire in the oxygen garden.", 0, -3.0, -20.9)
gun = item("gun", "When used, a gun prevents the owner from being voted into the reactor")
# Loop to create characters
num_players_str = raw_input("How many players: ")
num_players = int(num_players_str)
for _ in xrange(num_players):
#TODO add choice of character abilities
name = raw_input("Name: ")
character_a = character(name, 100.0, 2.0, 1.0, .1) #100 health 2 water, 1 oxygen, .1 com
character_list.append(character_a)
# Game Loop
if __name__ == '__main__':
main()<commit_msg>Build event and item lists<commit_after>
|
from ship import ship
from event import event
from character import character
from item import item
def main():
#Build ship
laika = ship("Laika", 100.0, 100.0, 100.0)
character_list = []
event_list = []
item_list = []
#TODO load these from a file
event_list.append(event("Solar Flare", \
"Solar Flare hit, damaging com system", \
-10, \
0, \
0))
event_list.append(event("Fire in oxygen garden", \
"Oh god, fire in the oxygen garden.", \
0, \
-3.0, \
-20.9))
item_list.append(item("gun", "When used, a gun prevents the owner from being voted into the reactor"))
# Loop to create characters
num_players_str = raw_input("How many players: ")
num_players = int(num_players_str)
for _ in xrange(num_players):
#TODO add choice of character abilities
name = raw_input("Name: ")
character_a = character(name, 100.0, 2.0, 1.0, .1) #100 health 2 water, 1 oxygen, .1 com
character_list.append(character_a)
# Game Loop
if __name__ == '__main__':
main()
|
from ship import ship
from event import event
from character import character
from item import item
def main():
#Build ship
laika = ship("Laika", 100.0, 100.0, 100.0)
character_list = []
event_solar_flare = event("Solar Flare", "Solar Flare hit, damaging com system", -10, 0, 0)
event_oxygen_fire = event("Fire in oxygen garden", "Oh god, fire in the oxygen garden.", 0, -3.0, -20.9)
gun = item("gun", "When used, a gun prevents the owner from being voted into the reactor")
# Loop to create characters
num_players_str = raw_input("How many players: ")
num_players = int(num_players_str)
for _ in xrange(num_players):
#TODO add choice of character abilities
name = raw_input("Name: ")
character_a = character(name, 100.0, 2.0, 1.0, .1) #100 health 2 water, 1 oxygen, .1 com
character_list.append(character_a)
# Game Loop
if __name__ == '__main__':
main()Build event and item listsfrom ship import ship
from event import event
from character import character
from item import item
def main():
#Build ship
laika = ship("Laika", 100.0, 100.0, 100.0)
character_list = []
event_list = []
item_list = []
#TODO load these from a file
event_list.append(event("Solar Flare", \
"Solar Flare hit, damaging com system", \
-10, \
0, \
0))
event_list.append(event("Fire in oxygen garden", \
"Oh god, fire in the oxygen garden.", \
0, \
-3.0, \
-20.9))
item_list.append(item("gun", "When used, a gun prevents the owner from being voted into the reactor"))
# Loop to create characters
num_players_str = raw_input("How many players: ")
num_players = int(num_players_str)
for _ in xrange(num_players):
#TODO add choice of character abilities
name = raw_input("Name: ")
character_a = character(name, 100.0, 2.0, 1.0, .1) #100 health 2 water, 1 oxygen, .1 com
character_list.append(character_a)
# Game Loop
if __name__ == '__main__':
main()
|
<commit_before>from ship import ship
from event import event
from character import character
from item import item
def main():
#Build ship
laika = ship("Laika", 100.0, 100.0, 100.0)
character_list = []
event_solar_flare = event("Solar Flare", "Solar Flare hit, damaging com system", -10, 0, 0)
event_oxygen_fire = event("Fire in oxygen garden", "Oh god, fire in the oxygen garden.", 0, -3.0, -20.9)
gun = item("gun", "When used, a gun prevents the owner from being voted into the reactor")
# Loop to create characters
num_players_str = raw_input("How many players: ")
num_players = int(num_players_str)
for _ in xrange(num_players):
#TODO add choice of character abilities
name = raw_input("Name: ")
character_a = character(name, 100.0, 2.0, 1.0, .1) #100 health 2 water, 1 oxygen, .1 com
character_list.append(character_a)
# Game Loop
if __name__ == '__main__':
main()<commit_msg>Build event and item lists<commit_after>from ship import ship
from event import event
from character import character
from item import item
def main():
#Build ship
laika = ship("Laika", 100.0, 100.0, 100.0)
character_list = []
event_list = []
item_list = []
#TODO load these from a file
event_list.append(event("Solar Flare", \
"Solar Flare hit, damaging com system", \
-10, \
0, \
0))
event_list.append(event("Fire in oxygen garden", \
"Oh god, fire in the oxygen garden.", \
0, \
-3.0, \
-20.9))
item_list.append(item("gun", "When used, a gun prevents the owner from being voted into the reactor"))
# Loop to create characters
num_players_str = raw_input("How many players: ")
num_players = int(num_players_str)
for _ in xrange(num_players):
#TODO add choice of character abilities
name = raw_input("Name: ")
character_a = character(name, 100.0, 2.0, 1.0, .1) #100 health 2 water, 1 oxygen, .1 com
character_list.append(character_a)
# Game Loop
if __name__ == '__main__':
main()
|
2fc65f543ba1b2d0bd52152ddaf78feb5e7594c4
|
test/test_product.py
|
test/test_product.py
|
from polypy.base import x
def test_call():
f = 2 * x
assert f(2) == 4
f = 3 * x ** 2
assert f(3) == 27
def test_str():
f = 2 * x
assert str(f) == "2x"
f = x * 2
assert str(f) == "2x"
def test_square():
f = x
assert f * x == x ** 2
f = 3 * x
assert f ** 2 == 9 * x ** 2
def test_multiply_x_and_linear_term():
f = 2 * x
assert f * x == (2 * x ** 2)
def test_multiply_two_linear_terms():
assert (3 * x) * (2 * x) == 6 * x ** 2
|
from polypy.base import x
def test_call():
f = 2 * x
assert f(2) == 4
f = 3 * x ** 2
assert f(3) == 27
def test_str():
f = 2 * x
assert str(f) == "2x"
f = x * 2
assert str(f) == "2x"
def test_square():
f = x
assert f * x == x ** 2
f = 3 * x
assert f ** 2 == 9 * x ** 2
def test_multiply_x_and_linear_term():
f = 2 * x
assert f * x == (2 * x ** 2)
def test_multiply_two_linear_terms():
assert (3 * x) * (2 * x) == 6 * x ** 2
def test_multiply_two_linear_expressions():
assert str((x + 1) * (x + 2)) == "x^2 + 2x + x + 2"
|
Add test for multiplying linear terms
|
Add test for multiplying linear terms
|
Python
|
mit
|
LordDarkula/polypy
|
from polypy.base import x
def test_call():
f = 2 * x
assert f(2) == 4
f = 3 * x ** 2
assert f(3) == 27
def test_str():
f = 2 * x
assert str(f) == "2x"
f = x * 2
assert str(f) == "2x"
def test_square():
f = x
assert f * x == x ** 2
f = 3 * x
assert f ** 2 == 9 * x ** 2
def test_multiply_x_and_linear_term():
f = 2 * x
assert f * x == (2 * x ** 2)
def test_multiply_two_linear_terms():
assert (3 * x) * (2 * x) == 6 * x ** 2
Add test for multiplying linear terms
|
from polypy.base import x
def test_call():
f = 2 * x
assert f(2) == 4
f = 3 * x ** 2
assert f(3) == 27
def test_str():
f = 2 * x
assert str(f) == "2x"
f = x * 2
assert str(f) == "2x"
def test_square():
f = x
assert f * x == x ** 2
f = 3 * x
assert f ** 2 == 9 * x ** 2
def test_multiply_x_and_linear_term():
f = 2 * x
assert f * x == (2 * x ** 2)
def test_multiply_two_linear_terms():
assert (3 * x) * (2 * x) == 6 * x ** 2
def test_multiply_two_linear_expressions():
assert str((x + 1) * (x + 2)) == "x^2 + 2x + x + 2"
|
<commit_before>from polypy.base import x
def test_call():
f = 2 * x
assert f(2) == 4
f = 3 * x ** 2
assert f(3) == 27
def test_str():
f = 2 * x
assert str(f) == "2x"
f = x * 2
assert str(f) == "2x"
def test_square():
f = x
assert f * x == x ** 2
f = 3 * x
assert f ** 2 == 9 * x ** 2
def test_multiply_x_and_linear_term():
f = 2 * x
assert f * x == (2 * x ** 2)
def test_multiply_two_linear_terms():
assert (3 * x) * (2 * x) == 6 * x ** 2
<commit_msg>Add test for multiplying linear terms<commit_after>
|
from polypy.base import x
def test_call():
f = 2 * x
assert f(2) == 4
f = 3 * x ** 2
assert f(3) == 27
def test_str():
f = 2 * x
assert str(f) == "2x"
f = x * 2
assert str(f) == "2x"
def test_square():
f = x
assert f * x == x ** 2
f = 3 * x
assert f ** 2 == 9 * x ** 2
def test_multiply_x_and_linear_term():
f = 2 * x
assert f * x == (2 * x ** 2)
def test_multiply_two_linear_terms():
assert (3 * x) * (2 * x) == 6 * x ** 2
def test_multiply_two_linear_expressions():
assert str((x + 1) * (x + 2)) == "x^2 + 2x + x + 2"
|
from polypy.base import x
def test_call():
f = 2 * x
assert f(2) == 4
f = 3 * x ** 2
assert f(3) == 27
def test_str():
f = 2 * x
assert str(f) == "2x"
f = x * 2
assert str(f) == "2x"
def test_square():
f = x
assert f * x == x ** 2
f = 3 * x
assert f ** 2 == 9 * x ** 2
def test_multiply_x_and_linear_term():
f = 2 * x
assert f * x == (2 * x ** 2)
def test_multiply_two_linear_terms():
assert (3 * x) * (2 * x) == 6 * x ** 2
Add test for multiplying linear termsfrom polypy.base import x
def test_call():
f = 2 * x
assert f(2) == 4
f = 3 * x ** 2
assert f(3) == 27
def test_str():
f = 2 * x
assert str(f) == "2x"
f = x * 2
assert str(f) == "2x"
def test_square():
f = x
assert f * x == x ** 2
f = 3 * x
assert f ** 2 == 9 * x ** 2
def test_multiply_x_and_linear_term():
f = 2 * x
assert f * x == (2 * x ** 2)
def test_multiply_two_linear_terms():
assert (3 * x) * (2 * x) == 6 * x ** 2
def test_multiply_two_linear_expressions():
assert str((x + 1) * (x + 2)) == "x^2 + 2x + x + 2"
|
<commit_before>from polypy.base import x
def test_call():
f = 2 * x
assert f(2) == 4
f = 3 * x ** 2
assert f(3) == 27
def test_str():
f = 2 * x
assert str(f) == "2x"
f = x * 2
assert str(f) == "2x"
def test_square():
f = x
assert f * x == x ** 2
f = 3 * x
assert f ** 2 == 9 * x ** 2
def test_multiply_x_and_linear_term():
f = 2 * x
assert f * x == (2 * x ** 2)
def test_multiply_two_linear_terms():
assert (3 * x) * (2 * x) == 6 * x ** 2
<commit_msg>Add test for multiplying linear terms<commit_after>from polypy.base import x
def test_call():
f = 2 * x
assert f(2) == 4
f = 3 * x ** 2
assert f(3) == 27
def test_str():
f = 2 * x
assert str(f) == "2x"
f = x * 2
assert str(f) == "2x"
def test_square():
f = x
assert f * x == x ** 2
f = 3 * x
assert f ** 2 == 9 * x ** 2
def test_multiply_x_and_linear_term():
f = 2 * x
assert f * x == (2 * x ** 2)
def test_multiply_two_linear_terms():
assert (3 * x) * (2 * x) == 6 * x ** 2
def test_multiply_two_linear_expressions():
assert str((x + 1) * (x + 2)) == "x^2 + 2x + x + 2"
|
974cd48f1954f78be4b1766445ed9b91d391cd64
|
slackclient/_util.py
|
slackclient/_util.py
|
class SearchList(list):
def find(self, name):
items = []
for child in self:
if child.__class__ == self.__class__:
items += child.find(name)
else:
if child == name:
items.append(child)
if len(items) == 1:
return items[0]
elif items != []:
return items
|
class SearchList(list):
def find(self, name):
items = []
for child in self:
if child.__class__ == self.__class__:
items += child.find(name)
else:
if child == name:
items.append(child)
if len(items) == 1:
return items[0]
elif items:
return items
else:
return None
|
Simplify expression, add explicit return value
|
Simplify expression, add explicit return value
|
Python
|
mit
|
slackhq/python-slackclient,slackapi/python-slackclient,slackapi/python-slackclient,slackapi/python-slackclient
|
class SearchList(list):
def find(self, name):
items = []
for child in self:
if child.__class__ == self.__class__:
items += child.find(name)
else:
if child == name:
items.append(child)
if len(items) == 1:
return items[0]
elif items != []:
return items
Simplify expression, add explicit return value
|
class SearchList(list):
def find(self, name):
items = []
for child in self:
if child.__class__ == self.__class__:
items += child.find(name)
else:
if child == name:
items.append(child)
if len(items) == 1:
return items[0]
elif items:
return items
else:
return None
|
<commit_before>class SearchList(list):
def find(self, name):
items = []
for child in self:
if child.__class__ == self.__class__:
items += child.find(name)
else:
if child == name:
items.append(child)
if len(items) == 1:
return items[0]
elif items != []:
return items
<commit_msg>Simplify expression, add explicit return value<commit_after>
|
class SearchList(list):
def find(self, name):
items = []
for child in self:
if child.__class__ == self.__class__:
items += child.find(name)
else:
if child == name:
items.append(child)
if len(items) == 1:
return items[0]
elif items:
return items
else:
return None
|
class SearchList(list):
def find(self, name):
items = []
for child in self:
if child.__class__ == self.__class__:
items += child.find(name)
else:
if child == name:
items.append(child)
if len(items) == 1:
return items[0]
elif items != []:
return items
Simplify expression, add explicit return valueclass SearchList(list):
def find(self, name):
items = []
for child in self:
if child.__class__ == self.__class__:
items += child.find(name)
else:
if child == name:
items.append(child)
if len(items) == 1:
return items[0]
elif items:
return items
else:
return None
|
<commit_before>class SearchList(list):
def find(self, name):
items = []
for child in self:
if child.__class__ == self.__class__:
items += child.find(name)
else:
if child == name:
items.append(child)
if len(items) == 1:
return items[0]
elif items != []:
return items
<commit_msg>Simplify expression, add explicit return value<commit_after>class SearchList(list):
def find(self, name):
items = []
for child in self:
if child.__class__ == self.__class__:
items += child.find(name)
else:
if child == name:
items.append(child)
if len(items) == 1:
return items[0]
elif items:
return items
else:
return None
|
19c1e83d3de979495fe12d3034fe4853a181039c
|
spacy/it/__init__.py
|
spacy/it/__init__.py
|
from __future__ import unicode_literals, print_function
from os import path
from ..language import Language
class Italian(Language):
pass
|
from __future__ import unicode_literals, print_function
from os import path
from ..language import Language
from ..attrs import LANG
from . import language_data
class German(Language):
lang = 'it'
class Defaults(Language.Defaults):
tokenizer_exceptions = dict(language_data.TOKENIZER_EXCEPTIONS)
lex_attr_getters = dict(Language.Defaults.lex_attr_getters)
lex_attr_getters[LANG] = lambda text: 'it'
prefixes = tuple(language_data.TOKENIZER_PREFIXES)
suffixes = tuple(language_data.TOKENIZER_SUFFIXES)
infixes = tuple(language_data.TOKENIZER_INFIXES)
tag_map = dict(language_data.TAG_MAP)
stop_words = set(language_data.STOP_WORDS)
|
Work on draft Italian tokenizer
|
Work on draft Italian tokenizer
|
Python
|
mit
|
explosion/spaCy,banglakit/spaCy,aikramer2/spaCy,raphael0202/spaCy,explosion/spaCy,explosion/spaCy,honnibal/spaCy,spacy-io/spaCy,oroszgy/spaCy.hu,oroszgy/spaCy.hu,aikramer2/spaCy,spacy-io/spaCy,raphael0202/spaCy,Gregory-Howard/spaCy,recognai/spaCy,Gregory-Howard/spaCy,Gregory-Howard/spaCy,aikramer2/spaCy,recognai/spaCy,Gregory-Howard/spaCy,Gregory-Howard/spaCy,aikramer2/spaCy,banglakit/spaCy,recognai/spaCy,spacy-io/spaCy,raphael0202/spaCy,banglakit/spaCy,aikramer2/spaCy,raphael0202/spaCy,explosion/spaCy,aikramer2/spaCy,explosion/spaCy,oroszgy/spaCy.hu,oroszgy/spaCy.hu,recognai/spaCy,spacy-io/spaCy,oroszgy/spaCy.hu,banglakit/spaCy,spacy-io/spaCy,explosion/spaCy,recognai/spaCy,honnibal/spaCy,Gregory-Howard/spaCy,honnibal/spaCy,oroszgy/spaCy.hu,spacy-io/spaCy,recognai/spaCy,banglakit/spaCy,raphael0202/spaCy,raphael0202/spaCy,banglakit/spaCy,honnibal/spaCy
|
from __future__ import unicode_literals, print_function
from os import path
from ..language import Language
class Italian(Language):
pass
Work on draft Italian tokenizer
|
from __future__ import unicode_literals, print_function
from os import path
from ..language import Language
from ..attrs import LANG
from . import language_data
class German(Language):
lang = 'it'
class Defaults(Language.Defaults):
tokenizer_exceptions = dict(language_data.TOKENIZER_EXCEPTIONS)
lex_attr_getters = dict(Language.Defaults.lex_attr_getters)
lex_attr_getters[LANG] = lambda text: 'it'
prefixes = tuple(language_data.TOKENIZER_PREFIXES)
suffixes = tuple(language_data.TOKENIZER_SUFFIXES)
infixes = tuple(language_data.TOKENIZER_INFIXES)
tag_map = dict(language_data.TAG_MAP)
stop_words = set(language_data.STOP_WORDS)
|
<commit_before>from __future__ import unicode_literals, print_function
from os import path
from ..language import Language
class Italian(Language):
pass
<commit_msg>Work on draft Italian tokenizer<commit_after>
|
from __future__ import unicode_literals, print_function
from os import path
from ..language import Language
from ..attrs import LANG
from . import language_data
class German(Language):
lang = 'it'
class Defaults(Language.Defaults):
tokenizer_exceptions = dict(language_data.TOKENIZER_EXCEPTIONS)
lex_attr_getters = dict(Language.Defaults.lex_attr_getters)
lex_attr_getters[LANG] = lambda text: 'it'
prefixes = tuple(language_data.TOKENIZER_PREFIXES)
suffixes = tuple(language_data.TOKENIZER_SUFFIXES)
infixes = tuple(language_data.TOKENIZER_INFIXES)
tag_map = dict(language_data.TAG_MAP)
stop_words = set(language_data.STOP_WORDS)
|
from __future__ import unicode_literals, print_function
from os import path
from ..language import Language
class Italian(Language):
pass
Work on draft Italian tokenizerfrom __future__ import unicode_literals, print_function
from os import path
from ..language import Language
from ..attrs import LANG
from . import language_data
class German(Language):
lang = 'it'
class Defaults(Language.Defaults):
tokenizer_exceptions = dict(language_data.TOKENIZER_EXCEPTIONS)
lex_attr_getters = dict(Language.Defaults.lex_attr_getters)
lex_attr_getters[LANG] = lambda text: 'it'
prefixes = tuple(language_data.TOKENIZER_PREFIXES)
suffixes = tuple(language_data.TOKENIZER_SUFFIXES)
infixes = tuple(language_data.TOKENIZER_INFIXES)
tag_map = dict(language_data.TAG_MAP)
stop_words = set(language_data.STOP_WORDS)
|
<commit_before>from __future__ import unicode_literals, print_function
from os import path
from ..language import Language
class Italian(Language):
pass
<commit_msg>Work on draft Italian tokenizer<commit_after>from __future__ import unicode_literals, print_function
from os import path
from ..language import Language
from ..attrs import LANG
from . import language_data
class German(Language):
lang = 'it'
class Defaults(Language.Defaults):
tokenizer_exceptions = dict(language_data.TOKENIZER_EXCEPTIONS)
lex_attr_getters = dict(Language.Defaults.lex_attr_getters)
lex_attr_getters[LANG] = lambda text: 'it'
prefixes = tuple(language_data.TOKENIZER_PREFIXES)
suffixes = tuple(language_data.TOKENIZER_SUFFIXES)
infixes = tuple(language_data.TOKENIZER_INFIXES)
tag_map = dict(language_data.TAG_MAP)
stop_words = set(language_data.STOP_WORDS)
|
81dd1c4792bca93c9df8d7acd3042c99877bff9b
|
spyonweb/spyonweb.py
|
spyonweb/spyonweb.py
|
import os
import requests
class spyonweb(object):
def __init__(self, token, url=None):
self.token = token
if url:
self.url = url
else:
self.url = "https://api.spyonweb.com/v1/"
def summary(self, domain_name):
data = requests.get(self.url + "summary/" + domain_name + "?access_token=" + self.token)
return data.json()
def domain(self, domain_name):
data = requests.get(self.url + "domain/" + domain_name + "?access_token=" + self.token)
return data.json()
def analytics(self, code, limit=None):
data = requests.get(self.url + "analytics/" + code + "?access_token=" + self.token + "&limit=" + limit)
# TODO: implement paging
return data.json()
def main():
token = os.getenv("SPYONWEB_API")
s = spyonweb(token=token)
a = "UA-34505845"
print s.analytics(a)
if __name__ == "__main__":
main()
|
import os
from argparse import ArgumentParser
import requests
class spyonweb(object):
def __init__(self, token, url=None):
self.token = token
if url:
self.url = url
else:
self.url = "https://api.spyonweb.com/v1/"
def summary(self, domain_name):
data = requests.get(self.url + "summary/" + domain_name + "?access_token=" + self.token)
return data.json()
def domain(self, domain_name):
data = requests.get(self.url + "domain/" + domain_name + "?access_token=" + self.token)
return data.json()
def analytics(self, code, limit=None):
data = requests.get(self.url + "analytics/" + code + "?access_token=" + self.token + "&limit=" + limit)
# TODO: implement paging
return data.json()
def main():
parser = ArgumentParser()
parser.add_argument('-s', '--summary', type=str, help="Specify a domain for the Request Summary API")
parser.add_argument('-d', '--domain', type=str, help="Specify a domain for the Domain API")
parser.add_argument('-a', '--analytics', type=str, help="Specify a code for the Analytics API")
args, _ = parser.parse_known_args()
token = os.getenv("SPYONWEB_API")
s = spyonweb(token=token)
if args.summary:
print s.summary(args.summary)
if args.domain:
print s.domain(args.domain)
if args.analytics:
print s.analytics(args.analytics)
if __name__ == "__main__":
main()
|
Implement basic command line parsing
|
Implement basic command line parsing
|
Python
|
apache-2.0
|
krmaxwell/spyonweb
|
import os
import requests
class spyonweb(object):
def __init__(self, token, url=None):
self.token = token
if url:
self.url = url
else:
self.url = "https://api.spyonweb.com/v1/"
def summary(self, domain_name):
data = requests.get(self.url + "summary/" + domain_name + "?access_token=" + self.token)
return data.json()
def domain(self, domain_name):
data = requests.get(self.url + "domain/" + domain_name + "?access_token=" + self.token)
return data.json()
def analytics(self, code, limit=None):
data = requests.get(self.url + "analytics/" + code + "?access_token=" + self.token + "&limit=" + limit)
# TODO: implement paging
return data.json()
def main():
token = os.getenv("SPYONWEB_API")
s = spyonweb(token=token)
a = "UA-34505845"
print s.analytics(a)
if __name__ == "__main__":
main()
Implement basic command line parsing
|
import os
from argparse import ArgumentParser
import requests
class spyonweb(object):
def __init__(self, token, url=None):
self.token = token
if url:
self.url = url
else:
self.url = "https://api.spyonweb.com/v1/"
def summary(self, domain_name):
data = requests.get(self.url + "summary/" + domain_name + "?access_token=" + self.token)
return data.json()
def domain(self, domain_name):
data = requests.get(self.url + "domain/" + domain_name + "?access_token=" + self.token)
return data.json()
def analytics(self, code, limit=None):
data = requests.get(self.url + "analytics/" + code + "?access_token=" + self.token + "&limit=" + limit)
# TODO: implement paging
return data.json()
def main():
parser = ArgumentParser()
parser.add_argument('-s', '--summary', type=str, help="Specify a domain for the Request Summary API")
parser.add_argument('-d', '--domain', type=str, help="Specify a domain for the Domain API")
parser.add_argument('-a', '--analytics', type=str, help="Specify a code for the Analytics API")
args, _ = parser.parse_known_args()
token = os.getenv("SPYONWEB_API")
s = spyonweb(token=token)
if args.summary:
print s.summary(args.summary)
if args.domain:
print s.domain(args.domain)
if args.analytics:
print s.analytics(args.analytics)
if __name__ == "__main__":
main()
|
<commit_before>import os
import requests
class spyonweb(object):
def __init__(self, token, url=None):
self.token = token
if url:
self.url = url
else:
self.url = "https://api.spyonweb.com/v1/"
def summary(self, domain_name):
data = requests.get(self.url + "summary/" + domain_name + "?access_token=" + self.token)
return data.json()
def domain(self, domain_name):
data = requests.get(self.url + "domain/" + domain_name + "?access_token=" + self.token)
return data.json()
def analytics(self, code, limit=None):
data = requests.get(self.url + "analytics/" + code + "?access_token=" + self.token + "&limit=" + limit)
# TODO: implement paging
return data.json()
def main():
token = os.getenv("SPYONWEB_API")
s = spyonweb(token=token)
a = "UA-34505845"
print s.analytics(a)
if __name__ == "__main__":
main()
<commit_msg>Implement basic command line parsing<commit_after>
|
import os
from argparse import ArgumentParser
import requests
class spyonweb(object):
def __init__(self, token, url=None):
self.token = token
if url:
self.url = url
else:
self.url = "https://api.spyonweb.com/v1/"
def summary(self, domain_name):
data = requests.get(self.url + "summary/" + domain_name + "?access_token=" + self.token)
return data.json()
def domain(self, domain_name):
data = requests.get(self.url + "domain/" + domain_name + "?access_token=" + self.token)
return data.json()
def analytics(self, code, limit=None):
data = requests.get(self.url + "analytics/" + code + "?access_token=" + self.token + "&limit=" + limit)
# TODO: implement paging
return data.json()
def main():
parser = ArgumentParser()
parser.add_argument('-s', '--summary', type=str, help="Specify a domain for the Request Summary API")
parser.add_argument('-d', '--domain', type=str, help="Specify a domain for the Domain API")
parser.add_argument('-a', '--analytics', type=str, help="Specify a code for the Analytics API")
args, _ = parser.parse_known_args()
token = os.getenv("SPYONWEB_API")
s = spyonweb(token=token)
if args.summary:
print s.summary(args.summary)
if args.domain:
print s.domain(args.domain)
if args.analytics:
print s.analytics(args.analytics)
if __name__ == "__main__":
main()
|
import os
import requests
class spyonweb(object):
def __init__(self, token, url=None):
self.token = token
if url:
self.url = url
else:
self.url = "https://api.spyonweb.com/v1/"
def summary(self, domain_name):
data = requests.get(self.url + "summary/" + domain_name + "?access_token=" + self.token)
return data.json()
def domain(self, domain_name):
data = requests.get(self.url + "domain/" + domain_name + "?access_token=" + self.token)
return data.json()
def analytics(self, code, limit=None):
data = requests.get(self.url + "analytics/" + code + "?access_token=" + self.token + "&limit=" + limit)
# TODO: implement paging
return data.json()
def main():
token = os.getenv("SPYONWEB_API")
s = spyonweb(token=token)
a = "UA-34505845"
print s.analytics(a)
if __name__ == "__main__":
main()
Implement basic command line parsingimport os
from argparse import ArgumentParser
import requests
class spyonweb(object):
def __init__(self, token, url=None):
self.token = token
if url:
self.url = url
else:
self.url = "https://api.spyonweb.com/v1/"
def summary(self, domain_name):
data = requests.get(self.url + "summary/" + domain_name + "?access_token=" + self.token)
return data.json()
def domain(self, domain_name):
data = requests.get(self.url + "domain/" + domain_name + "?access_token=" + self.token)
return data.json()
def analytics(self, code, limit=None):
data = requests.get(self.url + "analytics/" + code + "?access_token=" + self.token + "&limit=" + limit)
# TODO: implement paging
return data.json()
def main():
parser = ArgumentParser()
parser.add_argument('-s', '--summary', type=str, help="Specify a domain for the Request Summary API")
parser.add_argument('-d', '--domain', type=str, help="Specify a domain for the Domain API")
parser.add_argument('-a', '--analytics', type=str, help="Specify a code for the Analytics API")
args, _ = parser.parse_known_args()
token = os.getenv("SPYONWEB_API")
s = spyonweb(token=token)
if args.summary:
print s.summary(args.summary)
if args.domain:
print s.domain(args.domain)
if args.analytics:
print s.analytics(args.analytics)
if __name__ == "__main__":
main()
|
<commit_before>import os
import requests
class spyonweb(object):
def __init__(self, token, url=None):
self.token = token
if url:
self.url = url
else:
self.url = "https://api.spyonweb.com/v1/"
def summary(self, domain_name):
data = requests.get(self.url + "summary/" + domain_name + "?access_token=" + self.token)
return data.json()
def domain(self, domain_name):
data = requests.get(self.url + "domain/" + domain_name + "?access_token=" + self.token)
return data.json()
def analytics(self, code, limit=None):
data = requests.get(self.url + "analytics/" + code + "?access_token=" + self.token + "&limit=" + limit)
# TODO: implement paging
return data.json()
def main():
token = os.getenv("SPYONWEB_API")
s = spyonweb(token=token)
a = "UA-34505845"
print s.analytics(a)
if __name__ == "__main__":
main()
<commit_msg>Implement basic command line parsing<commit_after>import os
from argparse import ArgumentParser
import requests
class spyonweb(object):
def __init__(self, token, url=None):
self.token = token
if url:
self.url = url
else:
self.url = "https://api.spyonweb.com/v1/"
def summary(self, domain_name):
data = requests.get(self.url + "summary/" + domain_name + "?access_token=" + self.token)
return data.json()
def domain(self, domain_name):
data = requests.get(self.url + "domain/" + domain_name + "?access_token=" + self.token)
return data.json()
def analytics(self, code, limit=None):
data = requests.get(self.url + "analytics/" + code + "?access_token=" + self.token + "&limit=" + limit)
# TODO: implement paging
return data.json()
def main():
parser = ArgumentParser()
parser.add_argument('-s', '--summary', type=str, help="Specify a domain for the Request Summary API")
parser.add_argument('-d', '--domain', type=str, help="Specify a domain for the Domain API")
parser.add_argument('-a', '--analytics', type=str, help="Specify a code for the Analytics API")
args, _ = parser.parse_known_args()
token = os.getenv("SPYONWEB_API")
s = spyonweb(token=token)
if args.summary:
print s.summary(args.summary)
if args.domain:
print s.domain(args.domain)
if args.analytics:
print s.analytics(args.analytics)
if __name__ == "__main__":
main()
|
72f4dc35375ba001c2b1dbaca4d0914dc2c4de9d
|
tests/test_compat.py
|
tests/test_compat.py
|
import pytest
from django.test import TestCase
from rest_framework.request import Request
from rest_framework.test import APIRequestFactory
from rest_framework.parsers import JSONParser
from rest_framework.exceptions import ParseError
from rest_framework_jwt.compat import get_request_data
class CompatTests(TestCase):
def setUp(self):
self.factory = APIRequestFactory()
def test_get_request_data(self):
data = '{"a":"b"}'
post = self.factory.post('/', data, content_type='application/json')
request = Request(post, parsers=[JSONParser()])
assert get_request_data(request) == {'a': 'b'}
def test_get_request_data_invalid(self):
data = '{a:b}'
post = self.factory.post('/', data, content_type='application/json')
request = Request(post, parsers=[JSONParser()])
with pytest.raises(ParseError):
get_request_data(request)
|
import pytest
from django.test import TestCase
from rest_framework.request import Request
from rest_framework.test import APIRequestFactory
from rest_framework.parsers import JSONParser
from rest_framework.exceptions import ParseError
from rest_framework_jwt.compat import get_request_data
class CompatTests(TestCase):
def setUp(self):
self.factory = APIRequestFactory()
def test_get_request_data(self):
data = '{"a":"b"}'
post = self.factory.post('/', data, content_type='application/json')
request = Request(post, parsers=[JSONParser()])
assert get_request_data(request) == {'a': 'b'}
def test_get_request_data_empty(self):
data = ''
post = self.factory.post('/', data, content_type='application/json')
request = Request(post, parsers=[JSONParser()])
assert get_request_data(request) == {}
def test_get_request_data_invalid(self):
data = '{a:b}'
post = self.factory.post('/', data, content_type='application/json')
request = Request(post, parsers=[JSONParser()])
with pytest.raises(ParseError):
get_request_data(request)
|
Add test empty json post data
|
Add test empty json post data
|
Python
|
mit
|
shanemgrey/django-rest-framework-jwt,GetBlimp/django-rest-framework-jwt,ArabellaTech/django-rest-framework-jwt,orf/django-rest-framework-jwt,blaklites/django-rest-framework-jwt,plentific/django-rest-framework-jwt
|
import pytest
from django.test import TestCase
from rest_framework.request import Request
from rest_framework.test import APIRequestFactory
from rest_framework.parsers import JSONParser
from rest_framework.exceptions import ParseError
from rest_framework_jwt.compat import get_request_data
class CompatTests(TestCase):
def setUp(self):
self.factory = APIRequestFactory()
def test_get_request_data(self):
data = '{"a":"b"}'
post = self.factory.post('/', data, content_type='application/json')
request = Request(post, parsers=[JSONParser()])
assert get_request_data(request) == {'a': 'b'}
def test_get_request_data_invalid(self):
data = '{a:b}'
post = self.factory.post('/', data, content_type='application/json')
request = Request(post, parsers=[JSONParser()])
with pytest.raises(ParseError):
get_request_data(request)
Add test empty json post data
|
import pytest
from django.test import TestCase
from rest_framework.request import Request
from rest_framework.test import APIRequestFactory
from rest_framework.parsers import JSONParser
from rest_framework.exceptions import ParseError
from rest_framework_jwt.compat import get_request_data
class CompatTests(TestCase):
def setUp(self):
self.factory = APIRequestFactory()
def test_get_request_data(self):
data = '{"a":"b"}'
post = self.factory.post('/', data, content_type='application/json')
request = Request(post, parsers=[JSONParser()])
assert get_request_data(request) == {'a': 'b'}
def test_get_request_data_empty(self):
data = ''
post = self.factory.post('/', data, content_type='application/json')
request = Request(post, parsers=[JSONParser()])
assert get_request_data(request) == {}
def test_get_request_data_invalid(self):
data = '{a:b}'
post = self.factory.post('/', data, content_type='application/json')
request = Request(post, parsers=[JSONParser()])
with pytest.raises(ParseError):
get_request_data(request)
|
<commit_before>import pytest
from django.test import TestCase
from rest_framework.request import Request
from rest_framework.test import APIRequestFactory
from rest_framework.parsers import JSONParser
from rest_framework.exceptions import ParseError
from rest_framework_jwt.compat import get_request_data
class CompatTests(TestCase):
def setUp(self):
self.factory = APIRequestFactory()
def test_get_request_data(self):
data = '{"a":"b"}'
post = self.factory.post('/', data, content_type='application/json')
request = Request(post, parsers=[JSONParser()])
assert get_request_data(request) == {'a': 'b'}
def test_get_request_data_invalid(self):
data = '{a:b}'
post = self.factory.post('/', data, content_type='application/json')
request = Request(post, parsers=[JSONParser()])
with pytest.raises(ParseError):
get_request_data(request)
<commit_msg>Add test empty json post data<commit_after>
|
import pytest
from django.test import TestCase
from rest_framework.request import Request
from rest_framework.test import APIRequestFactory
from rest_framework.parsers import JSONParser
from rest_framework.exceptions import ParseError
from rest_framework_jwt.compat import get_request_data
class CompatTests(TestCase):
def setUp(self):
self.factory = APIRequestFactory()
def test_get_request_data(self):
data = '{"a":"b"}'
post = self.factory.post('/', data, content_type='application/json')
request = Request(post, parsers=[JSONParser()])
assert get_request_data(request) == {'a': 'b'}
def test_get_request_data_empty(self):
data = ''
post = self.factory.post('/', data, content_type='application/json')
request = Request(post, parsers=[JSONParser()])
assert get_request_data(request) == {}
def test_get_request_data_invalid(self):
data = '{a:b}'
post = self.factory.post('/', data, content_type='application/json')
request = Request(post, parsers=[JSONParser()])
with pytest.raises(ParseError):
get_request_data(request)
|
import pytest
from django.test import TestCase
from rest_framework.request import Request
from rest_framework.test import APIRequestFactory
from rest_framework.parsers import JSONParser
from rest_framework.exceptions import ParseError
from rest_framework_jwt.compat import get_request_data
class CompatTests(TestCase):
def setUp(self):
self.factory = APIRequestFactory()
def test_get_request_data(self):
data = '{"a":"b"}'
post = self.factory.post('/', data, content_type='application/json')
request = Request(post, parsers=[JSONParser()])
assert get_request_data(request) == {'a': 'b'}
def test_get_request_data_invalid(self):
data = '{a:b}'
post = self.factory.post('/', data, content_type='application/json')
request = Request(post, parsers=[JSONParser()])
with pytest.raises(ParseError):
get_request_data(request)
Add test empty json post dataimport pytest
from django.test import TestCase
from rest_framework.request import Request
from rest_framework.test import APIRequestFactory
from rest_framework.parsers import JSONParser
from rest_framework.exceptions import ParseError
from rest_framework_jwt.compat import get_request_data
class CompatTests(TestCase):
def setUp(self):
self.factory = APIRequestFactory()
def test_get_request_data(self):
data = '{"a":"b"}'
post = self.factory.post('/', data, content_type='application/json')
request = Request(post, parsers=[JSONParser()])
assert get_request_data(request) == {'a': 'b'}
def test_get_request_data_empty(self):
data = ''
post = self.factory.post('/', data, content_type='application/json')
request = Request(post, parsers=[JSONParser()])
assert get_request_data(request) == {}
def test_get_request_data_invalid(self):
data = '{a:b}'
post = self.factory.post('/', data, content_type='application/json')
request = Request(post, parsers=[JSONParser()])
with pytest.raises(ParseError):
get_request_data(request)
|
<commit_before>import pytest
from django.test import TestCase
from rest_framework.request import Request
from rest_framework.test import APIRequestFactory
from rest_framework.parsers import JSONParser
from rest_framework.exceptions import ParseError
from rest_framework_jwt.compat import get_request_data
class CompatTests(TestCase):
def setUp(self):
self.factory = APIRequestFactory()
def test_get_request_data(self):
data = '{"a":"b"}'
post = self.factory.post('/', data, content_type='application/json')
request = Request(post, parsers=[JSONParser()])
assert get_request_data(request) == {'a': 'b'}
def test_get_request_data_invalid(self):
data = '{a:b}'
post = self.factory.post('/', data, content_type='application/json')
request = Request(post, parsers=[JSONParser()])
with pytest.raises(ParseError):
get_request_data(request)
<commit_msg>Add test empty json post data<commit_after>import pytest
from django.test import TestCase
from rest_framework.request import Request
from rest_framework.test import APIRequestFactory
from rest_framework.parsers import JSONParser
from rest_framework.exceptions import ParseError
from rest_framework_jwt.compat import get_request_data
class CompatTests(TestCase):
def setUp(self):
self.factory = APIRequestFactory()
def test_get_request_data(self):
data = '{"a":"b"}'
post = self.factory.post('/', data, content_type='application/json')
request = Request(post, parsers=[JSONParser()])
assert get_request_data(request) == {'a': 'b'}
def test_get_request_data_empty(self):
data = ''
post = self.factory.post('/', data, content_type='application/json')
request = Request(post, parsers=[JSONParser()])
assert get_request_data(request) == {}
def test_get_request_data_invalid(self):
data = '{a:b}'
post = self.factory.post('/', data, content_type='application/json')
request = Request(post, parsers=[JSONParser()])
with pytest.raises(ParseError):
get_request_data(request)
|
a2084c42850a29c417f2a9caf9dc1c56a7945e6b
|
backend/scripts/create_training_demos.py
|
backend/scripts/create_training_demos.py
|
#!/usr/bin/env python
import rethinkdb as r
import os
if __name__ == "__main__":
conn = r.connect('localhost', 30815, db='materialscommons')
apikeys = r.table('users').pluck('apikey', 'id').run(conn)
for k in apikeys:
command = "curl -XPUT http://mctest.localhost/api/v2/users/%s/create_demo_project?apikey=%s" % (
k['id'], k['apikey'])
print command
os.system(command)
|
#!/usr/bin/env python
import rethinkdb as r
import os
if __name__ == "__main__":
conn = r.connect('localhost', 30815, db='materialscommons')
apikeys = r.table('users').pluck('apikey', 'id').run(conn)
for k in apikeys:
command = "curl -k -XPUT https://test.materialscommons.org/api/v2/users/%s/create_demo_project?apikey=%s" % (
k['id'], k['apikey'])
print command
os.system(command)
|
Change url to test and add -k flag to ignore certificate
|
Change url to test and add -k flag to ignore certificate
|
Python
|
mit
|
materials-commons/materialscommons.org,materials-commons/materialscommons.org,materials-commons/materialscommons.org,materials-commons/materialscommons.org,materials-commons/materialscommons.org
|
#!/usr/bin/env python
import rethinkdb as r
import os
if __name__ == "__main__":
conn = r.connect('localhost', 30815, db='materialscommons')
apikeys = r.table('users').pluck('apikey', 'id').run(conn)
for k in apikeys:
command = "curl -XPUT http://mctest.localhost/api/v2/users/%s/create_demo_project?apikey=%s" % (
k['id'], k['apikey'])
print command
os.system(command)
Change url to test and add -k flag to ignore certificate
|
#!/usr/bin/env python
import rethinkdb as r
import os
if __name__ == "__main__":
conn = r.connect('localhost', 30815, db='materialscommons')
apikeys = r.table('users').pluck('apikey', 'id').run(conn)
for k in apikeys:
command = "curl -k -XPUT https://test.materialscommons.org/api/v2/users/%s/create_demo_project?apikey=%s" % (
k['id'], k['apikey'])
print command
os.system(command)
|
<commit_before>#!/usr/bin/env python
import rethinkdb as r
import os
if __name__ == "__main__":
conn = r.connect('localhost', 30815, db='materialscommons')
apikeys = r.table('users').pluck('apikey', 'id').run(conn)
for k in apikeys:
command = "curl -XPUT http://mctest.localhost/api/v2/users/%s/create_demo_project?apikey=%s" % (
k['id'], k['apikey'])
print command
os.system(command)
<commit_msg>Change url to test and add -k flag to ignore certificate<commit_after>
|
#!/usr/bin/env python
import rethinkdb as r
import os
if __name__ == "__main__":
conn = r.connect('localhost', 30815, db='materialscommons')
apikeys = r.table('users').pluck('apikey', 'id').run(conn)
for k in apikeys:
command = "curl -k -XPUT https://test.materialscommons.org/api/v2/users/%s/create_demo_project?apikey=%s" % (
k['id'], k['apikey'])
print command
os.system(command)
|
#!/usr/bin/env python
import rethinkdb as r
import os
if __name__ == "__main__":
conn = r.connect('localhost', 30815, db='materialscommons')
apikeys = r.table('users').pluck('apikey', 'id').run(conn)
for k in apikeys:
command = "curl -XPUT http://mctest.localhost/api/v2/users/%s/create_demo_project?apikey=%s" % (
k['id'], k['apikey'])
print command
os.system(command)
Change url to test and add -k flag to ignore certificate#!/usr/bin/env python
import rethinkdb as r
import os
if __name__ == "__main__":
conn = r.connect('localhost', 30815, db='materialscommons')
apikeys = r.table('users').pluck('apikey', 'id').run(conn)
for k in apikeys:
command = "curl -k -XPUT https://test.materialscommons.org/api/v2/users/%s/create_demo_project?apikey=%s" % (
k['id'], k['apikey'])
print command
os.system(command)
|
<commit_before>#!/usr/bin/env python
import rethinkdb as r
import os
if __name__ == "__main__":
conn = r.connect('localhost', 30815, db='materialscommons')
apikeys = r.table('users').pluck('apikey', 'id').run(conn)
for k in apikeys:
command = "curl -XPUT http://mctest.localhost/api/v2/users/%s/create_demo_project?apikey=%s" % (
k['id'], k['apikey'])
print command
os.system(command)
<commit_msg>Change url to test and add -k flag to ignore certificate<commit_after>#!/usr/bin/env python
import rethinkdb as r
import os
if __name__ == "__main__":
conn = r.connect('localhost', 30815, db='materialscommons')
apikeys = r.table('users').pluck('apikey', 'id').run(conn)
for k in apikeys:
command = "curl -k -XPUT https://test.materialscommons.org/api/v2/users/%s/create_demo_project?apikey=%s" % (
k['id'], k['apikey'])
print command
os.system(command)
|
0ae513c9ea37e04deb3c72d0c61ca480a8c62266
|
lpthw/ex24.py
|
lpthw/ex24.py
|
print "Let's practice everything."
print "You\'d need to know \'bout escapes with \\ that do \n newlines and \t tabs."
poem = """
\t the lovely world
wtih logic so firmly planted
cannot discern \n the needs of love
nor comprehend passion from intuition
and requires an explanation
\n\t\twhere there is none.
"""
print "--------------"
print poem
print "--------------"
five = 10 - 2 + 3 - 6
print "This should be five: %s" % five
def secret_formula(started):
"""
This is not the Krabby
Patty Secret Formula (tm)
"""
jelly_beans = started * 500
jars = jelly_beans / 1000
crates = jars / 100
return jelly_beans, jars, crates
start_point = 10000
beans, jars, crates = secret_formula(start_point)
print "With a starting point of %d" % start_point
print "We'd have %d beans %d jars, and %d crates." % (beans, jars, crates)
start_point = start_point / 10
print "We can also do that this way:"
# This part is pretty darn cool. \/
print "We'd have %d beans, %d jars, and %d crates." % secret_formula(start_point)
|
print "Let's practice everything."
print "You\'d need to know \'bout escapes with \\ that do \n newlines and \t tabs."
poem = """
\t the lovely world
wtih logic so firmly planted
cannot discern \n the needs of love
nor comprehend passion from intuition
and requires an explanation
\n\t\twhere there is none.
"""
print "--------------"
print poem
print "--------------"
five = 10 - 2 + 3 - 6
print "This should be five: %s" % five
def secret_formula(started):
"""
This is not the Krabby
Patty Secret Formula (tm)
"""
jelly_beans = started * 500
jars = jelly_beans / 1000
crates = jars / 100
return jelly_beans, jars, crates
start_point = 10000
# Notice how 'beans' here was called 'jelly_beans' in the function
beans, jars, crates = secret_formula(start_point)
print "With a starting point of %d" % start_point
print "We'd have %d beans %d jars, and %d crates." % (beans, jars, crates)
start_point = start_point / 10
print "We can also do that this way:"
# This part is pretty darn cool. \/
print "We'd have %d beans, %d jars, and %d crates." % secret_formula(start_point)
|
Comment for a slick little trick.
|
Comment for a slick little trick.
|
Python
|
mit
|
jaredmanning/learning,jaredmanning/learning
|
print "Let's practice everything."
print "You\'d need to know \'bout escapes with \\ that do \n newlines and \t tabs."
poem = """
\t the lovely world
wtih logic so firmly planted
cannot discern \n the needs of love
nor comprehend passion from intuition
and requires an explanation
\n\t\twhere there is none.
"""
print "--------------"
print poem
print "--------------"
five = 10 - 2 + 3 - 6
print "This should be five: %s" % five
def secret_formula(started):
"""
This is not the Krabby
Patty Secret Formula (tm)
"""
jelly_beans = started * 500
jars = jelly_beans / 1000
crates = jars / 100
return jelly_beans, jars, crates
start_point = 10000
beans, jars, crates = secret_formula(start_point)
print "With a starting point of %d" % start_point
print "We'd have %d beans %d jars, and %d crates." % (beans, jars, crates)
start_point = start_point / 10
print "We can also do that this way:"
# This part is pretty darn cool. \/
print "We'd have %d beans, %d jars, and %d crates." % secret_formula(start_point)
Comment for a slick little trick.
|
print "Let's practice everything."
print "You\'d need to know \'bout escapes with \\ that do \n newlines and \t tabs."
poem = """
\t the lovely world
wtih logic so firmly planted
cannot discern \n the needs of love
nor comprehend passion from intuition
and requires an explanation
\n\t\twhere there is none.
"""
print "--------------"
print poem
print "--------------"
five = 10 - 2 + 3 - 6
print "This should be five: %s" % five
def secret_formula(started):
"""
This is not the Krabby
Patty Secret Formula (tm)
"""
jelly_beans = started * 500
jars = jelly_beans / 1000
crates = jars / 100
return jelly_beans, jars, crates
start_point = 10000
# Notice how 'beans' here was called 'jelly_beans' in the function
beans, jars, crates = secret_formula(start_point)
print "With a starting point of %d" % start_point
print "We'd have %d beans %d jars, and %d crates." % (beans, jars, crates)
start_point = start_point / 10
print "We can also do that this way:"
# This part is pretty darn cool. \/
print "We'd have %d beans, %d jars, and %d crates." % secret_formula(start_point)
|
<commit_before>print "Let's practice everything."
print "You\'d need to know \'bout escapes with \\ that do \n newlines and \t tabs."
poem = """
\t the lovely world
wtih logic so firmly planted
cannot discern \n the needs of love
nor comprehend passion from intuition
and requires an explanation
\n\t\twhere there is none.
"""
print "--------------"
print poem
print "--------------"
five = 10 - 2 + 3 - 6
print "This should be five: %s" % five
def secret_formula(started):
"""
This is not the Krabby
Patty Secret Formula (tm)
"""
jelly_beans = started * 500
jars = jelly_beans / 1000
crates = jars / 100
return jelly_beans, jars, crates
start_point = 10000
beans, jars, crates = secret_formula(start_point)
print "With a starting point of %d" % start_point
print "We'd have %d beans %d jars, and %d crates." % (beans, jars, crates)
start_point = start_point / 10
print "We can also do that this way:"
# This part is pretty darn cool. \/
print "We'd have %d beans, %d jars, and %d crates." % secret_formula(start_point)
<commit_msg>Comment for a slick little trick.<commit_after>
|
print "Let's practice everything."
print "You\'d need to know \'bout escapes with \\ that do \n newlines and \t tabs."
poem = """
\t the lovely world
wtih logic so firmly planted
cannot discern \n the needs of love
nor comprehend passion from intuition
and requires an explanation
\n\t\twhere there is none.
"""
print "--------------"
print poem
print "--------------"
five = 10 - 2 + 3 - 6
print "This should be five: %s" % five
def secret_formula(started):
"""
This is not the Krabby
Patty Secret Formula (tm)
"""
jelly_beans = started * 500
jars = jelly_beans / 1000
crates = jars / 100
return jelly_beans, jars, crates
start_point = 10000
# Notice how 'beans' here was called 'jelly_beans' in the function
beans, jars, crates = secret_formula(start_point)
print "With a starting point of %d" % start_point
print "We'd have %d beans %d jars, and %d crates." % (beans, jars, crates)
start_point = start_point / 10
print "We can also do that this way:"
# This part is pretty darn cool. \/
print "We'd have %d beans, %d jars, and %d crates." % secret_formula(start_point)
|
print "Let's practice everything."
print "You\'d need to know \'bout escapes with \\ that do \n newlines and \t tabs."
poem = """
\t the lovely world
wtih logic so firmly planted
cannot discern \n the needs of love
nor comprehend passion from intuition
and requires an explanation
\n\t\twhere there is none.
"""
print "--------------"
print poem
print "--------------"
five = 10 - 2 + 3 - 6
print "This should be five: %s" % five
def secret_formula(started):
"""
This is not the Krabby
Patty Secret Formula (tm)
"""
jelly_beans = started * 500
jars = jelly_beans / 1000
crates = jars / 100
return jelly_beans, jars, crates
start_point = 10000
beans, jars, crates = secret_formula(start_point)
print "With a starting point of %d" % start_point
print "We'd have %d beans %d jars, and %d crates." % (beans, jars, crates)
start_point = start_point / 10
print "We can also do that this way:"
# This part is pretty darn cool. \/
print "We'd have %d beans, %d jars, and %d crates." % secret_formula(start_point)
Comment for a slick little trick.print "Let's practice everything."
print "You\'d need to know \'bout escapes with \\ that do \n newlines and \t tabs."
poem = """
\t the lovely world
wtih logic so firmly planted
cannot discern \n the needs of love
nor comprehend passion from intuition
and requires an explanation
\n\t\twhere there is none.
"""
print "--------------"
print poem
print "--------------"
five = 10 - 2 + 3 - 6
print "This should be five: %s" % five
def secret_formula(started):
"""
This is not the Krabby
Patty Secret Formula (tm)
"""
jelly_beans = started * 500
jars = jelly_beans / 1000
crates = jars / 100
return jelly_beans, jars, crates
start_point = 10000
# Notice how 'beans' here was called 'jelly_beans' in the function
beans, jars, crates = secret_formula(start_point)
print "With a starting point of %d" % start_point
print "We'd have %d beans %d jars, and %d crates." % (beans, jars, crates)
start_point = start_point / 10
print "We can also do that this way:"
# This part is pretty darn cool. \/
print "We'd have %d beans, %d jars, and %d crates." % secret_formula(start_point)
|
<commit_before>print "Let's practice everything."
print "You\'d need to know \'bout escapes with \\ that do \n newlines and \t tabs."
poem = """
\t the lovely world
wtih logic so firmly planted
cannot discern \n the needs of love
nor comprehend passion from intuition
and requires an explanation
\n\t\twhere there is none.
"""
print "--------------"
print poem
print "--------------"
five = 10 - 2 + 3 - 6
print "This should be five: %s" % five
def secret_formula(started):
"""
This is not the Krabby
Patty Secret Formula (tm)
"""
jelly_beans = started * 500
jars = jelly_beans / 1000
crates = jars / 100
return jelly_beans, jars, crates
start_point = 10000
beans, jars, crates = secret_formula(start_point)
print "With a starting point of %d" % start_point
print "We'd have %d beans %d jars, and %d crates." % (beans, jars, crates)
start_point = start_point / 10
print "We can also do that this way:"
# This part is pretty darn cool. \/
print "We'd have %d beans, %d jars, and %d crates." % secret_formula(start_point)
<commit_msg>Comment for a slick little trick.<commit_after>print "Let's practice everything."
print "You\'d need to know \'bout escapes with \\ that do \n newlines and \t tabs."
poem = """
\t the lovely world
wtih logic so firmly planted
cannot discern \n the needs of love
nor comprehend passion from intuition
and requires an explanation
\n\t\twhere there is none.
"""
print "--------------"
print poem
print "--------------"
five = 10 - 2 + 3 - 6
print "This should be five: %s" % five
def secret_formula(started):
"""
This is not the Krabby
Patty Secret Formula (tm)
"""
jelly_beans = started * 500
jars = jelly_beans / 1000
crates = jars / 100
return jelly_beans, jars, crates
start_point = 10000
# Notice how 'beans' here was called 'jelly_beans' in the function
beans, jars, crates = secret_formula(start_point)
print "With a starting point of %d" % start_point
print "We'd have %d beans %d jars, and %d crates." % (beans, jars, crates)
start_point = start_point / 10
print "We can also do that this way:"
# This part is pretty darn cool. \/
print "We'd have %d beans, %d jars, and %d crates." % secret_formula(start_point)
|
fcc4546a736fd6adacf6f7fe0261a1c6304c931c
|
src/ipf/ipfblock/connection.py
|
src/ipf/ipfblock/connection.py
|
# -*- coding: utf-8 -*-
import ioport
class Connection(object):
""" Connection class for IPFBlock
Connection binding OPort and IPort of some IPFBlocks
"""
def __init__(self, oport, iport):
# Check port compatibility and free of input port
if ioport.is_connect_allowed(oport, iport):
self._oport = oport
self._iport = iport
self._oport.increase_binded_count()
self._iport.set_binded()
else:
raise ValueError("Can not create Connection with given ports")
def __del__(self):
self._oport.decrease_binded_count()
self._iport.set_free()
def contains_port(self, port):
return self._iport == port or self._oport == port
def process(self):
""" Send value from output port to input port """
self._iport.pass_value(self._oport.get_value())
|
# -*- coding: utf-8 -*-
import ioport
import weakref
class Connection(object):
""" Connection class for IPFBlock
Connection binding OPort and IPort of some IPFBlocks
"""
def __init__(self, oport, iport):
# Check port compatibility and free of input port
if ioport.is_connect_allowed(oport, iport):
self._oport = weakref.ref(oport)
self._iport = weakref.ref(iport)
self._oport().increase_binded_count()
self._iport().set_binded()
else:
raise ValueError("Can not create Connection with given ports")
def __del__(self):
self._oport().decrease_binded_count()
self._iport().set_free()
def contains_port(self, port):
return self._iport() == port or self._oport() == port
def process(self):
""" Send value from output port to input port """
self._iport().pass_value(self._oport().get_value())
|
Change _oport and _iport to weakref for prevention of loop references
|
Change _oport and _iport to weakref for prevention of loop references
|
Python
|
lgpl-2.1
|
anton-golubkov/Garland,anton-golubkov/Garland
|
# -*- coding: utf-8 -*-
import ioport
class Connection(object):
""" Connection class for IPFBlock
Connection binding OPort and IPort of some IPFBlocks
"""
def __init__(self, oport, iport):
# Check port compatibility and free of input port
if ioport.is_connect_allowed(oport, iport):
self._oport = oport
self._iport = iport
self._oport.increase_binded_count()
self._iport.set_binded()
else:
raise ValueError("Can not create Connection with given ports")
def __del__(self):
self._oport.decrease_binded_count()
self._iport.set_free()
def contains_port(self, port):
return self._iport == port or self._oport == port
def process(self):
""" Send value from output port to input port """
self._iport.pass_value(self._oport.get_value())
Change _oport and _iport to weakref for prevention of loop references
|
# -*- coding: utf-8 -*-
import ioport
import weakref
class Connection(object):
""" Connection class for IPFBlock
Connection binding OPort and IPort of some IPFBlocks
"""
def __init__(self, oport, iport):
# Check port compatibility and free of input port
if ioport.is_connect_allowed(oport, iport):
self._oport = weakref.ref(oport)
self._iport = weakref.ref(iport)
self._oport().increase_binded_count()
self._iport().set_binded()
else:
raise ValueError("Can not create Connection with given ports")
def __del__(self):
self._oport().decrease_binded_count()
self._iport().set_free()
def contains_port(self, port):
return self._iport() == port or self._oport() == port
def process(self):
""" Send value from output port to input port """
self._iport().pass_value(self._oport().get_value())
|
<commit_before># -*- coding: utf-8 -*-
import ioport
class Connection(object):
""" Connection class for IPFBlock
Connection binding OPort and IPort of some IPFBlocks
"""
def __init__(self, oport, iport):
# Check port compatibility and free of input port
if ioport.is_connect_allowed(oport, iport):
self._oport = oport
self._iport = iport
self._oport.increase_binded_count()
self._iport.set_binded()
else:
raise ValueError("Can not create Connection with given ports")
def __del__(self):
self._oport.decrease_binded_count()
self._iport.set_free()
def contains_port(self, port):
return self._iport == port or self._oport == port
def process(self):
""" Send value from output port to input port """
self._iport.pass_value(self._oport.get_value())
<commit_msg>Change _oport and _iport to weakref for prevention of loop references<commit_after>
|
# -*- coding: utf-8 -*-
import ioport
import weakref
class Connection(object):
""" Connection class for IPFBlock
Connection binding OPort and IPort of some IPFBlocks
"""
def __init__(self, oport, iport):
# Check port compatibility and free of input port
if ioport.is_connect_allowed(oport, iport):
self._oport = weakref.ref(oport)
self._iport = weakref.ref(iport)
self._oport().increase_binded_count()
self._iport().set_binded()
else:
raise ValueError("Can not create Connection with given ports")
def __del__(self):
self._oport().decrease_binded_count()
self._iport().set_free()
def contains_port(self, port):
return self._iport() == port or self._oport() == port
def process(self):
""" Send value from output port to input port """
self._iport().pass_value(self._oport().get_value())
|
# -*- coding: utf-8 -*-
import ioport
class Connection(object):
""" Connection class for IPFBlock
Connection binding OPort and IPort of some IPFBlocks
"""
def __init__(self, oport, iport):
# Check port compatibility and free of input port
if ioport.is_connect_allowed(oport, iport):
self._oport = oport
self._iport = iport
self._oport.increase_binded_count()
self._iport.set_binded()
else:
raise ValueError("Can not create Connection with given ports")
def __del__(self):
self._oport.decrease_binded_count()
self._iport.set_free()
def contains_port(self, port):
return self._iport == port or self._oport == port
def process(self):
""" Send value from output port to input port """
self._iport.pass_value(self._oport.get_value())
Change _oport and _iport to weakref for prevention of loop references# -*- coding: utf-8 -*-
import ioport
import weakref
class Connection(object):
""" Connection class for IPFBlock
Connection binding OPort and IPort of some IPFBlocks
"""
def __init__(self, oport, iport):
# Check port compatibility and free of input port
if ioport.is_connect_allowed(oport, iport):
self._oport = weakref.ref(oport)
self._iport = weakref.ref(iport)
self._oport().increase_binded_count()
self._iport().set_binded()
else:
raise ValueError("Can not create Connection with given ports")
def __del__(self):
self._oport().decrease_binded_count()
self._iport().set_free()
def contains_port(self, port):
return self._iport() == port or self._oport() == port
def process(self):
""" Send value from output port to input port """
self._iport().pass_value(self._oport().get_value())
|
<commit_before># -*- coding: utf-8 -*-
import ioport
class Connection(object):
""" Connection class for IPFBlock
Connection binding OPort and IPort of some IPFBlocks
"""
def __init__(self, oport, iport):
# Check port compatibility and free of input port
if ioport.is_connect_allowed(oport, iport):
self._oport = oport
self._iport = iport
self._oport.increase_binded_count()
self._iport.set_binded()
else:
raise ValueError("Can not create Connection with given ports")
def __del__(self):
self._oport.decrease_binded_count()
self._iport.set_free()
def contains_port(self, port):
return self._iport == port or self._oport == port
def process(self):
""" Send value from output port to input port """
self._iport.pass_value(self._oport.get_value())
<commit_msg>Change _oport and _iport to weakref for prevention of loop references<commit_after># -*- coding: utf-8 -*-
import ioport
import weakref
class Connection(object):
""" Connection class for IPFBlock
Connection binding OPort and IPort of some IPFBlocks
"""
def __init__(self, oport, iport):
# Check port compatibility and free of input port
if ioport.is_connect_allowed(oport, iport):
self._oport = weakref.ref(oport)
self._iport = weakref.ref(iport)
self._oport().increase_binded_count()
self._iport().set_binded()
else:
raise ValueError("Can not create Connection with given ports")
def __del__(self):
self._oport().decrease_binded_count()
self._iport().set_free()
def contains_port(self, port):
return self._iport() == port or self._oport() == port
def process(self):
""" Send value from output port to input port """
self._iport().pass_value(self._oport().get_value())
|
a4a5ca393ffc553202b266bdea790768a119f8f8
|
django_pin_auth/auth_backend.py
|
django_pin_auth/auth_backend.py
|
from django.contrib.auth import get_user_model
from .models import SingleUseToken
class PinBackend(object):
"""Authentication backend based on pin value."""
def authenticate(self, request, email=None, pin=None):
"""Authenticate user based on valid pin."""
user_model = get_user_model()
kwargs = {
user_model.USERNAME_FIELD: email
}
try:
user = user_model.objects.get(**kwargs)
except user_model.DoesNotExist:
return None
# Now that we have the user, check that we have a token
try:
token = self._get_token(user, pin)
except SingleUseToken.DoesNotExist:
return None
if token.is_valid():
# Read token (delete it)
token.read()
return user
def _get_token(self, user, pin):
"""Get the token for corresponding user and pin."""
return SingleUseToken.objects.get(user=user, token=pin)
|
from django.contrib.auth import get_user_model
from .models import SingleUseToken
class PinBackend(object):
"""Authentication backend based on pin value."""
def authenticate(self, request, email=None, pin=None):
"""Authenticate user based on valid pin."""
try:
token = self._get_token(email, pin)
except SingleUseToken.DoesNotExist:
return None
if token.is_valid():
# Read token (delete it)
token.read()
return token.user
def _get_token(self, email, pin):
"""Get the token for corresponding user and pin."""
user_model = get_user_model()
kwargs = {
'user__%s' % user_model.USERNAME_FIELD: email,
'token': pin
}
return SingleUseToken.objects.get(**kwargs)
def get_user(self, user_id):
"""Get user from id."""
user_model = get_user_model()
try:
return user_model.objects.get(pk=user_id)
except user_model.DoesNotExist:
return None
|
Refactor to query straight for user
|
refactor(verification): Refactor to query straight for user
- Implement get_user
- Change to make a single query to find the token, using a join to the
user's email
|
Python
|
mit
|
redapesolutions/django-pin-auth,redapesolutions/django-pin-auth,redapesolutions/django-pin-auth
|
from django.contrib.auth import get_user_model
from .models import SingleUseToken
class PinBackend(object):
"""Authentication backend based on pin value."""
def authenticate(self, request, email=None, pin=None):
"""Authenticate user based on valid pin."""
user_model = get_user_model()
kwargs = {
user_model.USERNAME_FIELD: email
}
try:
user = user_model.objects.get(**kwargs)
except user_model.DoesNotExist:
return None
# Now that we have the user, check that we have a token
try:
token = self._get_token(user, pin)
except SingleUseToken.DoesNotExist:
return None
if token.is_valid():
# Read token (delete it)
token.read()
return user
def _get_token(self, user, pin):
"""Get the token for corresponding user and pin."""
return SingleUseToken.objects.get(user=user, token=pin)
refactor(verification): Refactor to query straight for user
- Implement get_user
- Change to make a single query to find the token, using a join to the
user's email
|
from django.contrib.auth import get_user_model
from .models import SingleUseToken
class PinBackend(object):
"""Authentication backend based on pin value."""
def authenticate(self, request, email=None, pin=None):
"""Authenticate user based on valid pin."""
try:
token = self._get_token(email, pin)
except SingleUseToken.DoesNotExist:
return None
if token.is_valid():
# Read token (delete it)
token.read()
return token.user
def _get_token(self, email, pin):
"""Get the token for corresponding user and pin."""
user_model = get_user_model()
kwargs = {
'user__%s' % user_model.USERNAME_FIELD: email,
'token': pin
}
return SingleUseToken.objects.get(**kwargs)
def get_user(self, user_id):
"""Get user from id."""
user_model = get_user_model()
try:
return user_model.objects.get(pk=user_id)
except user_model.DoesNotExist:
return None
|
<commit_before>from django.contrib.auth import get_user_model
from .models import SingleUseToken
class PinBackend(object):
"""Authentication backend based on pin value."""
def authenticate(self, request, email=None, pin=None):
"""Authenticate user based on valid pin."""
user_model = get_user_model()
kwargs = {
user_model.USERNAME_FIELD: email
}
try:
user = user_model.objects.get(**kwargs)
except user_model.DoesNotExist:
return None
# Now that we have the user, check that we have a token
try:
token = self._get_token(user, pin)
except SingleUseToken.DoesNotExist:
return None
if token.is_valid():
# Read token (delete it)
token.read()
return user
def _get_token(self, user, pin):
"""Get the token for corresponding user and pin."""
return SingleUseToken.objects.get(user=user, token=pin)
<commit_msg>refactor(verification): Refactor to query straight for user
- Implement get_user
- Change to make a single query to find the token, using a join to the
user's email<commit_after>
|
from django.contrib.auth import get_user_model
from .models import SingleUseToken
class PinBackend(object):
"""Authentication backend based on pin value."""
def authenticate(self, request, email=None, pin=None):
"""Authenticate user based on valid pin."""
try:
token = self._get_token(email, pin)
except SingleUseToken.DoesNotExist:
return None
if token.is_valid():
# Read token (delete it)
token.read()
return token.user
def _get_token(self, email, pin):
"""Get the token for corresponding user and pin."""
user_model = get_user_model()
kwargs = {
'user__%s' % user_model.USERNAME_FIELD: email,
'token': pin
}
return SingleUseToken.objects.get(**kwargs)
def get_user(self, user_id):
"""Get user from id."""
user_model = get_user_model()
try:
return user_model.objects.get(pk=user_id)
except user_model.DoesNotExist:
return None
|
from django.contrib.auth import get_user_model
from .models import SingleUseToken
class PinBackend(object):
"""Authentication backend based on pin value."""
def authenticate(self, request, email=None, pin=None):
"""Authenticate user based on valid pin."""
user_model = get_user_model()
kwargs = {
user_model.USERNAME_FIELD: email
}
try:
user = user_model.objects.get(**kwargs)
except user_model.DoesNotExist:
return None
# Now that we have the user, check that we have a token
try:
token = self._get_token(user, pin)
except SingleUseToken.DoesNotExist:
return None
if token.is_valid():
# Read token (delete it)
token.read()
return user
def _get_token(self, user, pin):
"""Get the token for corresponding user and pin."""
return SingleUseToken.objects.get(user=user, token=pin)
refactor(verification): Refactor to query straight for user
- Implement get_user
- Change to make a single query to find the token, using a join to the
user's emailfrom django.contrib.auth import get_user_model
from .models import SingleUseToken
class PinBackend(object):
"""Authentication backend based on pin value."""
def authenticate(self, request, email=None, pin=None):
"""Authenticate user based on valid pin."""
try:
token = self._get_token(email, pin)
except SingleUseToken.DoesNotExist:
return None
if token.is_valid():
# Read token (delete it)
token.read()
return token.user
def _get_token(self, email, pin):
"""Get the token for corresponding user and pin."""
user_model = get_user_model()
kwargs = {
'user__%s' % user_model.USERNAME_FIELD: email,
'token': pin
}
return SingleUseToken.objects.get(**kwargs)
def get_user(self, user_id):
"""Get user from id."""
user_model = get_user_model()
try:
return user_model.objects.get(pk=user_id)
except user_model.DoesNotExist:
return None
|
<commit_before>from django.contrib.auth import get_user_model
from .models import SingleUseToken
class PinBackend(object):
"""Authentication backend based on pin value."""
def authenticate(self, request, email=None, pin=None):
"""Authenticate user based on valid pin."""
user_model = get_user_model()
kwargs = {
user_model.USERNAME_FIELD: email
}
try:
user = user_model.objects.get(**kwargs)
except user_model.DoesNotExist:
return None
# Now that we have the user, check that we have a token
try:
token = self._get_token(user, pin)
except SingleUseToken.DoesNotExist:
return None
if token.is_valid():
# Read token (delete it)
token.read()
return user
def _get_token(self, user, pin):
"""Get the token for corresponding user and pin."""
return SingleUseToken.objects.get(user=user, token=pin)
<commit_msg>refactor(verification): Refactor to query straight for user
- Implement get_user
- Change to make a single query to find the token, using a join to the
user's email<commit_after>from django.contrib.auth import get_user_model
from .models import SingleUseToken
class PinBackend(object):
"""Authentication backend based on pin value."""
def authenticate(self, request, email=None, pin=None):
"""Authenticate user based on valid pin."""
try:
token = self._get_token(email, pin)
except SingleUseToken.DoesNotExist:
return None
if token.is_valid():
# Read token (delete it)
token.read()
return token.user
def _get_token(self, email, pin):
"""Get the token for corresponding user and pin."""
user_model = get_user_model()
kwargs = {
'user__%s' % user_model.USERNAME_FIELD: email,
'token': pin
}
return SingleUseToken.objects.get(**kwargs)
def get_user(self, user_id):
"""Get user from id."""
user_model = get_user_model()
try:
return user_model.objects.get(pk=user_id)
except user_model.DoesNotExist:
return None
|
75ee8c74af18c2ac9b3f4975d79a5d799ccc46da
|
pylatex/graphics.py
|
pylatex/graphics.py
|
# -*- coding: utf-8 -*-
"""
pylatex.graphics
~~~~~~~~~~~~~~~~
This module implements the class that deals with graphics.
:copyright: (c) 2014 by Jelte Fennema.
:license: MIT, see License for more details.
"""
from .utils import fix_filename
from .base_classes import BaseLaTeXNamedContainer
from .package import Package
from .command import Command
class Figure(BaseLaTeXNamedContainer):
"""A class that represents a Graphic."""
def __init__(self, data=None, position=None):
packages = [Package('graphicx')]
super().__init__('figure', data=data, packages=packages,
options=position)
def add_image(self, filename, width=r'0.8\textwidth',
placement=r'\centering'):
if placement is not None:
self.append(placement)
self.append(Command('includegraphics', options='width=' + width,
arguments=fix_filename(filename)))
def add_caption(self, caption):
"""Add a caption to the figure"""
self.append(Command('caption', caption))
|
# -*- coding: utf-8 -*-
"""
pylatex.graphics
~~~~~~~~~~~~~~~~
This module implements the class that deals with graphics.
:copyright: (c) 2014 by Jelte Fennema.
:license: MIT, see License for more details.
"""
from .utils import fix_filename
from .base_classes import BaseLaTeXNamedContainer
from .package import Package
from .command import Command
class Figure(BaseLaTeXNamedContainer):
"""A class that represents a Graphic."""
def __init__(self, data=None, position=None):
packages = [Package('graphicx')]
super().__init__('figure', data=data, packages=packages,
options=position)
def add_image(self, filename, width=r'0.8\textwidth',
placement=r'\centering'):
if placement is not None:
self.append(placement)
if width is not None:
width = 'width=' + str(width)
self.append(Command('includegraphics', options=width,
arguments=fix_filename(filename)))
def add_caption(self, caption):
"""Add a caption to the figure"""
self.append(Command('caption', caption))
|
Make figure a bit better
|
Make figure a bit better
|
Python
|
mit
|
jendas1/PyLaTeX,bjodah/PyLaTeX,bjodah/PyLaTeX,ovaskevich/PyLaTeX,votti/PyLaTeX,JelteF/PyLaTeX,votti/PyLaTeX,jendas1/PyLaTeX,sebastianhaas/PyLaTeX,JelteF/PyLaTeX,ovaskevich/PyLaTeX,sebastianhaas/PyLaTeX
|
# -*- coding: utf-8 -*-
"""
pylatex.graphics
~~~~~~~~~~~~~~~~
This module implements the class that deals with graphics.
:copyright: (c) 2014 by Jelte Fennema.
:license: MIT, see License for more details.
"""
from .utils import fix_filename
from .base_classes import BaseLaTeXNamedContainer
from .package import Package
from .command import Command
class Figure(BaseLaTeXNamedContainer):
"""A class that represents a Graphic."""
def __init__(self, data=None, position=None):
packages = [Package('graphicx')]
super().__init__('figure', data=data, packages=packages,
options=position)
def add_image(self, filename, width=r'0.8\textwidth',
placement=r'\centering'):
if placement is not None:
self.append(placement)
self.append(Command('includegraphics', options='width=' + width,
arguments=fix_filename(filename)))
def add_caption(self, caption):
"""Add a caption to the figure"""
self.append(Command('caption', caption))
Make figure a bit better
|
# -*- coding: utf-8 -*-
"""
pylatex.graphics
~~~~~~~~~~~~~~~~
This module implements the class that deals with graphics.
:copyright: (c) 2014 by Jelte Fennema.
:license: MIT, see License for more details.
"""
from .utils import fix_filename
from .base_classes import BaseLaTeXNamedContainer
from .package import Package
from .command import Command
class Figure(BaseLaTeXNamedContainer):
"""A class that represents a Graphic."""
def __init__(self, data=None, position=None):
packages = [Package('graphicx')]
super().__init__('figure', data=data, packages=packages,
options=position)
def add_image(self, filename, width=r'0.8\textwidth',
placement=r'\centering'):
if placement is not None:
self.append(placement)
if width is not None:
width = 'width=' + str(width)
self.append(Command('includegraphics', options=width,
arguments=fix_filename(filename)))
def add_caption(self, caption):
"""Add a caption to the figure"""
self.append(Command('caption', caption))
|
<commit_before># -*- coding: utf-8 -*-
"""
pylatex.graphics
~~~~~~~~~~~~~~~~
This module implements the class that deals with graphics.
:copyright: (c) 2014 by Jelte Fennema.
:license: MIT, see License for more details.
"""
from .utils import fix_filename
from .base_classes import BaseLaTeXNamedContainer
from .package import Package
from .command import Command
class Figure(BaseLaTeXNamedContainer):
"""A class that represents a Graphic."""
def __init__(self, data=None, position=None):
packages = [Package('graphicx')]
super().__init__('figure', data=data, packages=packages,
options=position)
def add_image(self, filename, width=r'0.8\textwidth',
placement=r'\centering'):
if placement is not None:
self.append(placement)
self.append(Command('includegraphics', options='width=' + width,
arguments=fix_filename(filename)))
def add_caption(self, caption):
"""Add a caption to the figure"""
self.append(Command('caption', caption))
<commit_msg>Make figure a bit better<commit_after>
|
# -*- coding: utf-8 -*-
"""
pylatex.graphics
~~~~~~~~~~~~~~~~
This module implements the class that deals with graphics.
:copyright: (c) 2014 by Jelte Fennema.
:license: MIT, see License for more details.
"""
from .utils import fix_filename
from .base_classes import BaseLaTeXNamedContainer
from .package import Package
from .command import Command
class Figure(BaseLaTeXNamedContainer):
"""A class that represents a Graphic."""
def __init__(self, data=None, position=None):
packages = [Package('graphicx')]
super().__init__('figure', data=data, packages=packages,
options=position)
def add_image(self, filename, width=r'0.8\textwidth',
placement=r'\centering'):
if placement is not None:
self.append(placement)
if width is not None:
width = 'width=' + str(width)
self.append(Command('includegraphics', options=width,
arguments=fix_filename(filename)))
def add_caption(self, caption):
"""Add a caption to the figure"""
self.append(Command('caption', caption))
|
# -*- coding: utf-8 -*-
"""
pylatex.graphics
~~~~~~~~~~~~~~~~
This module implements the class that deals with graphics.
:copyright: (c) 2014 by Jelte Fennema.
:license: MIT, see License for more details.
"""
from .utils import fix_filename
from .base_classes import BaseLaTeXNamedContainer
from .package import Package
from .command import Command
class Figure(BaseLaTeXNamedContainer):
"""A class that represents a Graphic."""
def __init__(self, data=None, position=None):
packages = [Package('graphicx')]
super().__init__('figure', data=data, packages=packages,
options=position)
def add_image(self, filename, width=r'0.8\textwidth',
placement=r'\centering'):
if placement is not None:
self.append(placement)
self.append(Command('includegraphics', options='width=' + width,
arguments=fix_filename(filename)))
def add_caption(self, caption):
"""Add a caption to the figure"""
self.append(Command('caption', caption))
Make figure a bit better# -*- coding: utf-8 -*-
"""
pylatex.graphics
~~~~~~~~~~~~~~~~
This module implements the class that deals with graphics.
:copyright: (c) 2014 by Jelte Fennema.
:license: MIT, see License for more details.
"""
from .utils import fix_filename
from .base_classes import BaseLaTeXNamedContainer
from .package import Package
from .command import Command
class Figure(BaseLaTeXNamedContainer):
"""A class that represents a Graphic."""
def __init__(self, data=None, position=None):
packages = [Package('graphicx')]
super().__init__('figure', data=data, packages=packages,
options=position)
def add_image(self, filename, width=r'0.8\textwidth',
placement=r'\centering'):
if placement is not None:
self.append(placement)
if width is not None:
width = 'width=' + str(width)
self.append(Command('includegraphics', options=width,
arguments=fix_filename(filename)))
def add_caption(self, caption):
"""Add a caption to the figure"""
self.append(Command('caption', caption))
|
<commit_before># -*- coding: utf-8 -*-
"""
pylatex.graphics
~~~~~~~~~~~~~~~~
This module implements the class that deals with graphics.
:copyright: (c) 2014 by Jelte Fennema.
:license: MIT, see License for more details.
"""
from .utils import fix_filename
from .base_classes import BaseLaTeXNamedContainer
from .package import Package
from .command import Command
class Figure(BaseLaTeXNamedContainer):
"""A class that represents a Graphic."""
def __init__(self, data=None, position=None):
packages = [Package('graphicx')]
super().__init__('figure', data=data, packages=packages,
options=position)
def add_image(self, filename, width=r'0.8\textwidth',
placement=r'\centering'):
if placement is not None:
self.append(placement)
self.append(Command('includegraphics', options='width=' + width,
arguments=fix_filename(filename)))
def add_caption(self, caption):
"""Add a caption to the figure"""
self.append(Command('caption', caption))
<commit_msg>Make figure a bit better<commit_after># -*- coding: utf-8 -*-
"""
pylatex.graphics
~~~~~~~~~~~~~~~~
This module implements the class that deals with graphics.
:copyright: (c) 2014 by Jelte Fennema.
:license: MIT, see License for more details.
"""
from .utils import fix_filename
from .base_classes import BaseLaTeXNamedContainer
from .package import Package
from .command import Command
class Figure(BaseLaTeXNamedContainer):
"""A class that represents a Graphic."""
def __init__(self, data=None, position=None):
packages = [Package('graphicx')]
super().__init__('figure', data=data, packages=packages,
options=position)
def add_image(self, filename, width=r'0.8\textwidth',
placement=r'\centering'):
if placement is not None:
self.append(placement)
if width is not None:
width = 'width=' + str(width)
self.append(Command('includegraphics', options=width,
arguments=fix_filename(filename)))
def add_caption(self, caption):
"""Add a caption to the figure"""
self.append(Command('caption', caption))
|
4dea81f345de76ec4b1c9e1976ccb56639757ca7
|
django/contrib/comments/feeds.py
|
django/contrib/comments/feeds.py
|
from django.conf import settings
from django.contrib.syndication.feeds import Feed
from django.contrib.sites.models import Site
from django.contrib import comments
class LatestCommentFeed(Feed):
"""Feed of latest comments on the current site."""
def title(self):
if not hasattr(self, '_site'):
self._site = Site.objects.get_current()
return u"%s comments" % self._site.name
def link(self):
if not hasattr(self, '_site'):
self._site = Site.objects.get_current()
return "http://%s/" % (self._site.domain)
def description(self):
if not hasattr(self, '_site'):
self._site = Site.objects.get_current()
return u"Latest comments on %s" % self._site.name
def items(self):
qs = comments.get_model().objects.filter(
site__pk = settings.SITE_ID,
is_public = True,
is_removed = False,
)
if getattr(settings, 'COMMENTS_BANNED_USERS_GROUP', None):
where = ['user_id NOT IN (SELECT user_id FROM auth_users_group WHERE group_id = %s)']
params = [settings.COMMENTS_BANNED_USERS_GROUP]
qs = qs.extra(where=where, params=params)
return qs.order_by('-submit_date')[:40]
def item_pubdate(self, item):
return item.submit_date
|
from django.conf import settings
from django.contrib.syndication.feeds import Feed
from django.contrib.sites.models import Site
from django.contrib import comments
class LatestCommentFeed(Feed):
"""Feed of latest comments on the current site."""
def title(self):
if not hasattr(self, '_site'):
self._site = Site.objects.get_current()
return u"%s comments" % self._site.name
def link(self):
if not hasattr(self, '_site'):
self._site = Site.objects.get_current()
return "http://%s/" % (self._site.domain)
def description(self):
if not hasattr(self, '_site'):
self._site = Site.objects.get_current()
return u"Latest comments on %s" % self._site.name
def items(self):
qs = comments.get_model().objects.filter(
site__pk = settings.SITE_ID,
is_public = True,
is_removed = False,
)
if getattr(settings, 'COMMENTS_BANNED_USERS_GROUP', None):
where = ['user_id NOT IN (SELECT user_id FROM auth_user_groups WHERE group_id = %s)']
params = [settings.COMMENTS_BANNED_USERS_GROUP]
qs = qs.extra(where=where, params=params)
return qs.order_by('-submit_date')[:40]
def item_pubdate(self, item):
return item.submit_date
|
Use correct m2m join table name in LatestCommentsFeed
|
Use correct m2m join table name in LatestCommentsFeed
git-svn-id: http://code.djangoproject.com/svn/django/trunk@9089 bcc190cf-cafb-0310-a4f2-bffc1f526a37
--HG--
extra : convert_revision : 9ea8b1f1f4ccc068b460e76127f288742d25088e
|
Python
|
bsd-3-clause
|
adieu/django-nonrel,adieu/django-nonrel,adieu/django-nonrel,heracek/django-nonrel,heracek/django-nonrel,heracek/django-nonrel
|
from django.conf import settings
from django.contrib.syndication.feeds import Feed
from django.contrib.sites.models import Site
from django.contrib import comments
class LatestCommentFeed(Feed):
"""Feed of latest comments on the current site."""
def title(self):
if not hasattr(self, '_site'):
self._site = Site.objects.get_current()
return u"%s comments" % self._site.name
def link(self):
if not hasattr(self, '_site'):
self._site = Site.objects.get_current()
return "http://%s/" % (self._site.domain)
def description(self):
if not hasattr(self, '_site'):
self._site = Site.objects.get_current()
return u"Latest comments on %s" % self._site.name
def items(self):
qs = comments.get_model().objects.filter(
site__pk = settings.SITE_ID,
is_public = True,
is_removed = False,
)
if getattr(settings, 'COMMENTS_BANNED_USERS_GROUP', None):
where = ['user_id NOT IN (SELECT user_id FROM auth_users_group WHERE group_id = %s)']
params = [settings.COMMENTS_BANNED_USERS_GROUP]
qs = qs.extra(where=where, params=params)
return qs.order_by('-submit_date')[:40]
def item_pubdate(self, item):
return item.submit_date
Use correct m2m join table name in LatestCommentsFeed
git-svn-id: http://code.djangoproject.com/svn/django/trunk@9089 bcc190cf-cafb-0310-a4f2-bffc1f526a37
--HG--
extra : convert_revision : 9ea8b1f1f4ccc068b460e76127f288742d25088e
|
from django.conf import settings
from django.contrib.syndication.feeds import Feed
from django.contrib.sites.models import Site
from django.contrib import comments
class LatestCommentFeed(Feed):
"""Feed of latest comments on the current site."""
def title(self):
if not hasattr(self, '_site'):
self._site = Site.objects.get_current()
return u"%s comments" % self._site.name
def link(self):
if not hasattr(self, '_site'):
self._site = Site.objects.get_current()
return "http://%s/" % (self._site.domain)
def description(self):
if not hasattr(self, '_site'):
self._site = Site.objects.get_current()
return u"Latest comments on %s" % self._site.name
def items(self):
qs = comments.get_model().objects.filter(
site__pk = settings.SITE_ID,
is_public = True,
is_removed = False,
)
if getattr(settings, 'COMMENTS_BANNED_USERS_GROUP', None):
where = ['user_id NOT IN (SELECT user_id FROM auth_user_groups WHERE group_id = %s)']
params = [settings.COMMENTS_BANNED_USERS_GROUP]
qs = qs.extra(where=where, params=params)
return qs.order_by('-submit_date')[:40]
def item_pubdate(self, item):
return item.submit_date
|
<commit_before>from django.conf import settings
from django.contrib.syndication.feeds import Feed
from django.contrib.sites.models import Site
from django.contrib import comments
class LatestCommentFeed(Feed):
"""Feed of latest comments on the current site."""
def title(self):
if not hasattr(self, '_site'):
self._site = Site.objects.get_current()
return u"%s comments" % self._site.name
def link(self):
if not hasattr(self, '_site'):
self._site = Site.objects.get_current()
return "http://%s/" % (self._site.domain)
def description(self):
if not hasattr(self, '_site'):
self._site = Site.objects.get_current()
return u"Latest comments on %s" % self._site.name
def items(self):
qs = comments.get_model().objects.filter(
site__pk = settings.SITE_ID,
is_public = True,
is_removed = False,
)
if getattr(settings, 'COMMENTS_BANNED_USERS_GROUP', None):
where = ['user_id NOT IN (SELECT user_id FROM auth_users_group WHERE group_id = %s)']
params = [settings.COMMENTS_BANNED_USERS_GROUP]
qs = qs.extra(where=where, params=params)
return qs.order_by('-submit_date')[:40]
def item_pubdate(self, item):
return item.submit_date
<commit_msg>Use correct m2m join table name in LatestCommentsFeed
git-svn-id: http://code.djangoproject.com/svn/django/trunk@9089 bcc190cf-cafb-0310-a4f2-bffc1f526a37
--HG--
extra : convert_revision : 9ea8b1f1f4ccc068b460e76127f288742d25088e<commit_after>
|
from django.conf import settings
from django.contrib.syndication.feeds import Feed
from django.contrib.sites.models import Site
from django.contrib import comments
class LatestCommentFeed(Feed):
"""Feed of latest comments on the current site."""
def title(self):
if not hasattr(self, '_site'):
self._site = Site.objects.get_current()
return u"%s comments" % self._site.name
def link(self):
if not hasattr(self, '_site'):
self._site = Site.objects.get_current()
return "http://%s/" % (self._site.domain)
def description(self):
if not hasattr(self, '_site'):
self._site = Site.objects.get_current()
return u"Latest comments on %s" % self._site.name
def items(self):
qs = comments.get_model().objects.filter(
site__pk = settings.SITE_ID,
is_public = True,
is_removed = False,
)
if getattr(settings, 'COMMENTS_BANNED_USERS_GROUP', None):
where = ['user_id NOT IN (SELECT user_id FROM auth_user_groups WHERE group_id = %s)']
params = [settings.COMMENTS_BANNED_USERS_GROUP]
qs = qs.extra(where=where, params=params)
return qs.order_by('-submit_date')[:40]
def item_pubdate(self, item):
return item.submit_date
|
from django.conf import settings
from django.contrib.syndication.feeds import Feed
from django.contrib.sites.models import Site
from django.contrib import comments
class LatestCommentFeed(Feed):
"""Feed of latest comments on the current site."""
def title(self):
if not hasattr(self, '_site'):
self._site = Site.objects.get_current()
return u"%s comments" % self._site.name
def link(self):
if not hasattr(self, '_site'):
self._site = Site.objects.get_current()
return "http://%s/" % (self._site.domain)
def description(self):
if not hasattr(self, '_site'):
self._site = Site.objects.get_current()
return u"Latest comments on %s" % self._site.name
def items(self):
qs = comments.get_model().objects.filter(
site__pk = settings.SITE_ID,
is_public = True,
is_removed = False,
)
if getattr(settings, 'COMMENTS_BANNED_USERS_GROUP', None):
where = ['user_id NOT IN (SELECT user_id FROM auth_users_group WHERE group_id = %s)']
params = [settings.COMMENTS_BANNED_USERS_GROUP]
qs = qs.extra(where=where, params=params)
return qs.order_by('-submit_date')[:40]
def item_pubdate(self, item):
return item.submit_date
Use correct m2m join table name in LatestCommentsFeed
git-svn-id: http://code.djangoproject.com/svn/django/trunk@9089 bcc190cf-cafb-0310-a4f2-bffc1f526a37
--HG--
extra : convert_revision : 9ea8b1f1f4ccc068b460e76127f288742d25088efrom django.conf import settings
from django.contrib.syndication.feeds import Feed
from django.contrib.sites.models import Site
from django.contrib import comments
class LatestCommentFeed(Feed):
"""Feed of latest comments on the current site."""
def title(self):
if not hasattr(self, '_site'):
self._site = Site.objects.get_current()
return u"%s comments" % self._site.name
def link(self):
if not hasattr(self, '_site'):
self._site = Site.objects.get_current()
return "http://%s/" % (self._site.domain)
def description(self):
if not hasattr(self, '_site'):
self._site = Site.objects.get_current()
return u"Latest comments on %s" % self._site.name
def items(self):
qs = comments.get_model().objects.filter(
site__pk = settings.SITE_ID,
is_public = True,
is_removed = False,
)
if getattr(settings, 'COMMENTS_BANNED_USERS_GROUP', None):
where = ['user_id NOT IN (SELECT user_id FROM auth_user_groups WHERE group_id = %s)']
params = [settings.COMMENTS_BANNED_USERS_GROUP]
qs = qs.extra(where=where, params=params)
return qs.order_by('-submit_date')[:40]
def item_pubdate(self, item):
return item.submit_date
|
<commit_before>from django.conf import settings
from django.contrib.syndication.feeds import Feed
from django.contrib.sites.models import Site
from django.contrib import comments
class LatestCommentFeed(Feed):
"""Feed of latest comments on the current site."""
def title(self):
if not hasattr(self, '_site'):
self._site = Site.objects.get_current()
return u"%s comments" % self._site.name
def link(self):
if not hasattr(self, '_site'):
self._site = Site.objects.get_current()
return "http://%s/" % (self._site.domain)
def description(self):
if not hasattr(self, '_site'):
self._site = Site.objects.get_current()
return u"Latest comments on %s" % self._site.name
def items(self):
qs = comments.get_model().objects.filter(
site__pk = settings.SITE_ID,
is_public = True,
is_removed = False,
)
if getattr(settings, 'COMMENTS_BANNED_USERS_GROUP', None):
where = ['user_id NOT IN (SELECT user_id FROM auth_users_group WHERE group_id = %s)']
params = [settings.COMMENTS_BANNED_USERS_GROUP]
qs = qs.extra(where=where, params=params)
return qs.order_by('-submit_date')[:40]
def item_pubdate(self, item):
return item.submit_date
<commit_msg>Use correct m2m join table name in LatestCommentsFeed
git-svn-id: http://code.djangoproject.com/svn/django/trunk@9089 bcc190cf-cafb-0310-a4f2-bffc1f526a37
--HG--
extra : convert_revision : 9ea8b1f1f4ccc068b460e76127f288742d25088e<commit_after>from django.conf import settings
from django.contrib.syndication.feeds import Feed
from django.contrib.sites.models import Site
from django.contrib import comments
class LatestCommentFeed(Feed):
"""Feed of latest comments on the current site."""
def title(self):
if not hasattr(self, '_site'):
self._site = Site.objects.get_current()
return u"%s comments" % self._site.name
def link(self):
if not hasattr(self, '_site'):
self._site = Site.objects.get_current()
return "http://%s/" % (self._site.domain)
def description(self):
if not hasattr(self, '_site'):
self._site = Site.objects.get_current()
return u"Latest comments on %s" % self._site.name
def items(self):
qs = comments.get_model().objects.filter(
site__pk = settings.SITE_ID,
is_public = True,
is_removed = False,
)
if getattr(settings, 'COMMENTS_BANNED_USERS_GROUP', None):
where = ['user_id NOT IN (SELECT user_id FROM auth_user_groups WHERE group_id = %s)']
params = [settings.COMMENTS_BANNED_USERS_GROUP]
qs = qs.extra(where=where, params=params)
return qs.order_by('-submit_date')[:40]
def item_pubdate(self, item):
return item.submit_date
|
2c1b049e1e2af8feaf1dd5c2f173d8868ee29499
|
api/docs_resource.py
|
api/docs_resource.py
|
from string import Template
doc_template = Template(open("api/views/index.html", "r").read())
class DocsResource(object):
def on_get(self, request, response):
response.set_header("Strict-Transport-Security", "max-age=31536000")
response.content_type = "text/html"
response.body = doc_template.substitute(
site="%s://%s" % (request.protocol, request.headers["HOST"])
)
|
from string import Template
doc_template = Template(open("api/views/index.html", "r").read())
class DocsResource(object):
def on_get(self, request, response):
response.set_header("Strict-Transport-Security", "max-age=31536000")
response.content_type = "text/html"
response.body = doc_template.substitute(
site="%s://%s" % (request.scheme, request.headers["HOST"])
)
|
Use correct property for falcon 2.0.
|
Use correct property for falcon 2.0.
|
Python
|
mit
|
EmilStenstrom/json-tagger,EmilStenstrom/json-tagger,EmilStenstrom/json-tagger,EmilStenstrom/json-tagger,EmilStenstrom/json-tagger
|
from string import Template
doc_template = Template(open("api/views/index.html", "r").read())
class DocsResource(object):
def on_get(self, request, response):
response.set_header("Strict-Transport-Security", "max-age=31536000")
response.content_type = "text/html"
response.body = doc_template.substitute(
site="%s://%s" % (request.protocol, request.headers["HOST"])
)
Use correct property for falcon 2.0.
|
from string import Template
doc_template = Template(open("api/views/index.html", "r").read())
class DocsResource(object):
def on_get(self, request, response):
response.set_header("Strict-Transport-Security", "max-age=31536000")
response.content_type = "text/html"
response.body = doc_template.substitute(
site="%s://%s" % (request.scheme, request.headers["HOST"])
)
|
<commit_before>from string import Template
doc_template = Template(open("api/views/index.html", "r").read())
class DocsResource(object):
def on_get(self, request, response):
response.set_header("Strict-Transport-Security", "max-age=31536000")
response.content_type = "text/html"
response.body = doc_template.substitute(
site="%s://%s" % (request.protocol, request.headers["HOST"])
)
<commit_msg>Use correct property for falcon 2.0.<commit_after>
|
from string import Template
doc_template = Template(open("api/views/index.html", "r").read())
class DocsResource(object):
def on_get(self, request, response):
response.set_header("Strict-Transport-Security", "max-age=31536000")
response.content_type = "text/html"
response.body = doc_template.substitute(
site="%s://%s" % (request.scheme, request.headers["HOST"])
)
|
from string import Template
doc_template = Template(open("api/views/index.html", "r").read())
class DocsResource(object):
def on_get(self, request, response):
response.set_header("Strict-Transport-Security", "max-age=31536000")
response.content_type = "text/html"
response.body = doc_template.substitute(
site="%s://%s" % (request.protocol, request.headers["HOST"])
)
Use correct property for falcon 2.0.from string import Template
doc_template = Template(open("api/views/index.html", "r").read())
class DocsResource(object):
def on_get(self, request, response):
response.set_header("Strict-Transport-Security", "max-age=31536000")
response.content_type = "text/html"
response.body = doc_template.substitute(
site="%s://%s" % (request.scheme, request.headers["HOST"])
)
|
<commit_before>from string import Template
doc_template = Template(open("api/views/index.html", "r").read())
class DocsResource(object):
def on_get(self, request, response):
response.set_header("Strict-Transport-Security", "max-age=31536000")
response.content_type = "text/html"
response.body = doc_template.substitute(
site="%s://%s" % (request.protocol, request.headers["HOST"])
)
<commit_msg>Use correct property for falcon 2.0.<commit_after>from string import Template
doc_template = Template(open("api/views/index.html", "r").read())
class DocsResource(object):
def on_get(self, request, response):
response.set_header("Strict-Transport-Security", "max-age=31536000")
response.content_type = "text/html"
response.body = doc_template.substitute(
site="%s://%s" % (request.scheme, request.headers["HOST"])
)
|
3ceba413c57eec2034fb02e8a5557e69cf54a415
|
litslist/commands.py
|
litslist/commands.py
|
"""
Includes execution logic for the commands
"""
import os
import csv
import random
def run_create(count):
file_list = [filename for filename in os.listdir('.') if os.path.isfile(filename)]
sets = {}
for filename in file_list:
content = open(os.path.join(os.curdir, filename)).read().split('\n')
random.shuffle(content)
sets[filename[:filename.index('.')].title()] = content
if not os.path.exists('Lists'):
os.mkdir('Lists')
for i in xrange(count):
try:
f = open(os.path.join(os.curdir, 'Lists', str(i+1) + '.txt'), 'w')
file_content = ''
for category, category_list in sets.items():
file_content += (category + ' -- ' + category_list[i] + '\n')
f.write(file_content)
f.close()
except IndexError:
break_point = i
break
return
|
"""
Includes execution logic for the commands
"""
import os
import csv
import random
def run_create(count):
file_list = [filename for filename in os.listdir('.') if os.path.isfile(filename)]
sets = {}
for filename in file_list:
content = open(os.path.join(os.curdir, filename)).read().split('\n')
random.shuffle(content)
sets[filename[:filename.index('.')].title()] = content
if not os.path.exists('Lists'):
os.mkdir('Lists')
break_point = count
for i in xrange(count):
try:
f = open(os.path.join(os.curdir, 'Lists', str(i+1) + '.txt'), 'w')
file_content = ''
for category, category_list in sets.items():
file_content += (category + ' -- ' + category_list[i] + '\n')
f.write(file_content)
f.close()
except IndexError:
break_point = i
break
try:
print sets
for category, category_list in sets.items():
f = open(os.path.join(os.curdir, 'Remaining_' + category + '.txt'), 'w')
file_content = "\n".join(category_list[break_point:])
f.write(file_content)
f.close()
except NameError, e:
print e
pass
return
|
Set up creating files for unused items
|
Set up creating files for unused items
|
Python
|
mit
|
AlexMathew/litslist
|
"""
Includes execution logic for the commands
"""
import os
import csv
import random
def run_create(count):
file_list = [filename for filename in os.listdir('.') if os.path.isfile(filename)]
sets = {}
for filename in file_list:
content = open(os.path.join(os.curdir, filename)).read().split('\n')
random.shuffle(content)
sets[filename[:filename.index('.')].title()] = content
if not os.path.exists('Lists'):
os.mkdir('Lists')
for i in xrange(count):
try:
f = open(os.path.join(os.curdir, 'Lists', str(i+1) + '.txt'), 'w')
file_content = ''
for category, category_list in sets.items():
file_content += (category + ' -- ' + category_list[i] + '\n')
f.write(file_content)
f.close()
except IndexError:
break_point = i
break
return
Set up creating files for unused items
|
"""
Includes execution logic for the commands
"""
import os
import csv
import random
def run_create(count):
file_list = [filename for filename in os.listdir('.') if os.path.isfile(filename)]
sets = {}
for filename in file_list:
content = open(os.path.join(os.curdir, filename)).read().split('\n')
random.shuffle(content)
sets[filename[:filename.index('.')].title()] = content
if not os.path.exists('Lists'):
os.mkdir('Lists')
break_point = count
for i in xrange(count):
try:
f = open(os.path.join(os.curdir, 'Lists', str(i+1) + '.txt'), 'w')
file_content = ''
for category, category_list in sets.items():
file_content += (category + ' -- ' + category_list[i] + '\n')
f.write(file_content)
f.close()
except IndexError:
break_point = i
break
try:
print sets
for category, category_list in sets.items():
f = open(os.path.join(os.curdir, 'Remaining_' + category + '.txt'), 'w')
file_content = "\n".join(category_list[break_point:])
f.write(file_content)
f.close()
except NameError, e:
print e
pass
return
|
<commit_before>"""
Includes execution logic for the commands
"""
import os
import csv
import random
def run_create(count):
file_list = [filename for filename in os.listdir('.') if os.path.isfile(filename)]
sets = {}
for filename in file_list:
content = open(os.path.join(os.curdir, filename)).read().split('\n')
random.shuffle(content)
sets[filename[:filename.index('.')].title()] = content
if not os.path.exists('Lists'):
os.mkdir('Lists')
for i in xrange(count):
try:
f = open(os.path.join(os.curdir, 'Lists', str(i+1) + '.txt'), 'w')
file_content = ''
for category, category_list in sets.items():
file_content += (category + ' -- ' + category_list[i] + '\n')
f.write(file_content)
f.close()
except IndexError:
break_point = i
break
return
<commit_msg>Set up creating files for unused items<commit_after>
|
"""
Includes execution logic for the commands
"""
import os
import csv
import random
def run_create(count):
file_list = [filename for filename in os.listdir('.') if os.path.isfile(filename)]
sets = {}
for filename in file_list:
content = open(os.path.join(os.curdir, filename)).read().split('\n')
random.shuffle(content)
sets[filename[:filename.index('.')].title()] = content
if not os.path.exists('Lists'):
os.mkdir('Lists')
break_point = count
for i in xrange(count):
try:
f = open(os.path.join(os.curdir, 'Lists', str(i+1) + '.txt'), 'w')
file_content = ''
for category, category_list in sets.items():
file_content += (category + ' -- ' + category_list[i] + '\n')
f.write(file_content)
f.close()
except IndexError:
break_point = i
break
try:
print sets
for category, category_list in sets.items():
f = open(os.path.join(os.curdir, 'Remaining_' + category + '.txt'), 'w')
file_content = "\n".join(category_list[break_point:])
f.write(file_content)
f.close()
except NameError, e:
print e
pass
return
|
"""
Includes execution logic for the commands
"""
import os
import csv
import random
def run_create(count):
file_list = [filename for filename in os.listdir('.') if os.path.isfile(filename)]
sets = {}
for filename in file_list:
content = open(os.path.join(os.curdir, filename)).read().split('\n')
random.shuffle(content)
sets[filename[:filename.index('.')].title()] = content
if not os.path.exists('Lists'):
os.mkdir('Lists')
for i in xrange(count):
try:
f = open(os.path.join(os.curdir, 'Lists', str(i+1) + '.txt'), 'w')
file_content = ''
for category, category_list in sets.items():
file_content += (category + ' -- ' + category_list[i] + '\n')
f.write(file_content)
f.close()
except IndexError:
break_point = i
break
return
Set up creating files for unused items"""
Includes execution logic for the commands
"""
import os
import csv
import random
def run_create(count):
file_list = [filename for filename in os.listdir('.') if os.path.isfile(filename)]
sets = {}
for filename in file_list:
content = open(os.path.join(os.curdir, filename)).read().split('\n')
random.shuffle(content)
sets[filename[:filename.index('.')].title()] = content
if not os.path.exists('Lists'):
os.mkdir('Lists')
break_point = count
for i in xrange(count):
try:
f = open(os.path.join(os.curdir, 'Lists', str(i+1) + '.txt'), 'w')
file_content = ''
for category, category_list in sets.items():
file_content += (category + ' -- ' + category_list[i] + '\n')
f.write(file_content)
f.close()
except IndexError:
break_point = i
break
try:
print sets
for category, category_list in sets.items():
f = open(os.path.join(os.curdir, 'Remaining_' + category + '.txt'), 'w')
file_content = "\n".join(category_list[break_point:])
f.write(file_content)
f.close()
except NameError, e:
print e
pass
return
|
<commit_before>"""
Includes execution logic for the commands
"""
import os
import csv
import random
def run_create(count):
file_list = [filename for filename in os.listdir('.') if os.path.isfile(filename)]
sets = {}
for filename in file_list:
content = open(os.path.join(os.curdir, filename)).read().split('\n')
random.shuffle(content)
sets[filename[:filename.index('.')].title()] = content
if not os.path.exists('Lists'):
os.mkdir('Lists')
for i in xrange(count):
try:
f = open(os.path.join(os.curdir, 'Lists', str(i+1) + '.txt'), 'w')
file_content = ''
for category, category_list in sets.items():
file_content += (category + ' -- ' + category_list[i] + '\n')
f.write(file_content)
f.close()
except IndexError:
break_point = i
break
return
<commit_msg>Set up creating files for unused items<commit_after>"""
Includes execution logic for the commands
"""
import os
import csv
import random
def run_create(count):
file_list = [filename for filename in os.listdir('.') if os.path.isfile(filename)]
sets = {}
for filename in file_list:
content = open(os.path.join(os.curdir, filename)).read().split('\n')
random.shuffle(content)
sets[filename[:filename.index('.')].title()] = content
if not os.path.exists('Lists'):
os.mkdir('Lists')
break_point = count
for i in xrange(count):
try:
f = open(os.path.join(os.curdir, 'Lists', str(i+1) + '.txt'), 'w')
file_content = ''
for category, category_list in sets.items():
file_content += (category + ' -- ' + category_list[i] + '\n')
f.write(file_content)
f.close()
except IndexError:
break_point = i
break
try:
print sets
for category, category_list in sets.items():
f = open(os.path.join(os.curdir, 'Remaining_' + category + '.txt'), 'w')
file_content = "\n".join(category_list[break_point:])
f.write(file_content)
f.close()
except NameError, e:
print e
pass
return
|
4468827795606ae57c5a7d62f5b2f08d93387f39
|
virustotal/server.py
|
virustotal/server.py
|
#!/usr/bin/env python
from contextlib import closing
from sqlite3 import connect
from bottle import template, route, run
@route('/virustotal/<db>')
def virus(db):
with connect(db, timeout=10) as connection:
with closing(connection.cursor()) as cursor:
cursor.execute('SELECT detected, count(*) FROM virus GROUP BY detected ORDER BY detected')
return template('virustotal', title=db, cursor=cursor, refresh=10)
run(host='0.0.0.0')
|
#!/usr/bin/env python
from contextlib import closing
from sqlite3 import connect
from bottle import request, template, route, run
@route('/virustotal/<db>')
def virus(db):
with connect(db, timeout=10) as connection:
with closing(connection.cursor()) as cursor:
cursor.execute('SELECT detected, count(*) FROM virus GROUP BY detected ORDER BY detected')
return template('virustotal', title=db, cursor=cursor, refresh=request.query.refresh)
run(host='0.0.0.0')
|
Use refresh only if required in parameters (?refresh=something)
|
Use refresh only if required in parameters (?refresh=something)
|
Python
|
mit
|
enricobacis/playscraper
|
#!/usr/bin/env python
from contextlib import closing
from sqlite3 import connect
from bottle import template, route, run
@route('/virustotal/<db>')
def virus(db):
with connect(db, timeout=10) as connection:
with closing(connection.cursor()) as cursor:
cursor.execute('SELECT detected, count(*) FROM virus GROUP BY detected ORDER BY detected')
return template('virustotal', title=db, cursor=cursor, refresh=10)
run(host='0.0.0.0')
Use refresh only if required in parameters (?refresh=something)
|
#!/usr/bin/env python
from contextlib import closing
from sqlite3 import connect
from bottle import request, template, route, run
@route('/virustotal/<db>')
def virus(db):
with connect(db, timeout=10) as connection:
with closing(connection.cursor()) as cursor:
cursor.execute('SELECT detected, count(*) FROM virus GROUP BY detected ORDER BY detected')
return template('virustotal', title=db, cursor=cursor, refresh=request.query.refresh)
run(host='0.0.0.0')
|
<commit_before>#!/usr/bin/env python
from contextlib import closing
from sqlite3 import connect
from bottle import template, route, run
@route('/virustotal/<db>')
def virus(db):
with connect(db, timeout=10) as connection:
with closing(connection.cursor()) as cursor:
cursor.execute('SELECT detected, count(*) FROM virus GROUP BY detected ORDER BY detected')
return template('virustotal', title=db, cursor=cursor, refresh=10)
run(host='0.0.0.0')
<commit_msg>Use refresh only if required in parameters (?refresh=something)<commit_after>
|
#!/usr/bin/env python
from contextlib import closing
from sqlite3 import connect
from bottle import request, template, route, run
@route('/virustotal/<db>')
def virus(db):
with connect(db, timeout=10) as connection:
with closing(connection.cursor()) as cursor:
cursor.execute('SELECT detected, count(*) FROM virus GROUP BY detected ORDER BY detected')
return template('virustotal', title=db, cursor=cursor, refresh=request.query.refresh)
run(host='0.0.0.0')
|
#!/usr/bin/env python
from contextlib import closing
from sqlite3 import connect
from bottle import template, route, run
@route('/virustotal/<db>')
def virus(db):
with connect(db, timeout=10) as connection:
with closing(connection.cursor()) as cursor:
cursor.execute('SELECT detected, count(*) FROM virus GROUP BY detected ORDER BY detected')
return template('virustotal', title=db, cursor=cursor, refresh=10)
run(host='0.0.0.0')
Use refresh only if required in parameters (?refresh=something)#!/usr/bin/env python
from contextlib import closing
from sqlite3 import connect
from bottle import request, template, route, run
@route('/virustotal/<db>')
def virus(db):
with connect(db, timeout=10) as connection:
with closing(connection.cursor()) as cursor:
cursor.execute('SELECT detected, count(*) FROM virus GROUP BY detected ORDER BY detected')
return template('virustotal', title=db, cursor=cursor, refresh=request.query.refresh)
run(host='0.0.0.0')
|
<commit_before>#!/usr/bin/env python
from contextlib import closing
from sqlite3 import connect
from bottle import template, route, run
@route('/virustotal/<db>')
def virus(db):
with connect(db, timeout=10) as connection:
with closing(connection.cursor()) as cursor:
cursor.execute('SELECT detected, count(*) FROM virus GROUP BY detected ORDER BY detected')
return template('virustotal', title=db, cursor=cursor, refresh=10)
run(host='0.0.0.0')
<commit_msg>Use refresh only if required in parameters (?refresh=something)<commit_after>#!/usr/bin/env python
from contextlib import closing
from sqlite3 import connect
from bottle import request, template, route, run
@route('/virustotal/<db>')
def virus(db):
with connect(db, timeout=10) as connection:
with closing(connection.cursor()) as cursor:
cursor.execute('SELECT detected, count(*) FROM virus GROUP BY detected ORDER BY detected')
return template('virustotal', title=db, cursor=cursor, refresh=request.query.refresh)
run(host='0.0.0.0')
|
0c04f8cac3e1cbe24a5e4ed699e7c743b962e945
|
pygotham/filters.py
|
pygotham/filters.py
|
"""Template filters for use across apps."""
import bleach
from docutils import core
__all__ = 'rst_to_html'
_ALLOWED_TAGS = bleach.ALLOWED_TAGS + ['h1', 'h2', 'h3', 'h4', 'h5', 'h6', 'p']
def rst_to_html(value):
"""Return HTML generated from reStructuredText."""
parts = core.publish_parts(source=value, writer_name='html')
return bleach.clean(
parts['body_pre_docinfo'] + parts['fragment'], tags=_ALLOWED_TAGS)
|
"""Template filters for use across apps."""
import bleach
from docutils import core
__all__ = 'rst_to_html'
_ALLOWED_TAGS = bleach.ALLOWED_TAGS + [
'h1', 'h2', 'h3', 'h4', 'h5', 'h6', 'p', 'dl', 'dt', 'dd', 'cite',
]
def rst_to_html(value):
"""Return HTML generated from reStructuredText."""
parts = core.publish_parts(source=value, writer_name='html')
return bleach.clean(
parts['body_pre_docinfo'] + parts['fragment'], tags=_ALLOWED_TAGS)
|
Add additional HTML tags to reST filter
|
Add additional HTML tags to reST filter
|
Python
|
bsd-3-clause
|
djds23/pygotham-1,djds23/pygotham-1,PyGotham/pygotham,djds23/pygotham-1,djds23/pygotham-1,pathunstrom/pygotham,pathunstrom/pygotham,PyGotham/pygotham,PyGotham/pygotham,pathunstrom/pygotham,pathunstrom/pygotham,djds23/pygotham-1,PyGotham/pygotham,pathunstrom/pygotham,PyGotham/pygotham
|
"""Template filters for use across apps."""
import bleach
from docutils import core
__all__ = 'rst_to_html'
_ALLOWED_TAGS = bleach.ALLOWED_TAGS + ['h1', 'h2', 'h3', 'h4', 'h5', 'h6', 'p']
def rst_to_html(value):
"""Return HTML generated from reStructuredText."""
parts = core.publish_parts(source=value, writer_name='html')
return bleach.clean(
parts['body_pre_docinfo'] + parts['fragment'], tags=_ALLOWED_TAGS)
Add additional HTML tags to reST filter
|
"""Template filters for use across apps."""
import bleach
from docutils import core
__all__ = 'rst_to_html'
_ALLOWED_TAGS = bleach.ALLOWED_TAGS + [
'h1', 'h2', 'h3', 'h4', 'h5', 'h6', 'p', 'dl', 'dt', 'dd', 'cite',
]
def rst_to_html(value):
"""Return HTML generated from reStructuredText."""
parts = core.publish_parts(source=value, writer_name='html')
return bleach.clean(
parts['body_pre_docinfo'] + parts['fragment'], tags=_ALLOWED_TAGS)
|
<commit_before>"""Template filters for use across apps."""
import bleach
from docutils import core
__all__ = 'rst_to_html'
_ALLOWED_TAGS = bleach.ALLOWED_TAGS + ['h1', 'h2', 'h3', 'h4', 'h5', 'h6', 'p']
def rst_to_html(value):
"""Return HTML generated from reStructuredText."""
parts = core.publish_parts(source=value, writer_name='html')
return bleach.clean(
parts['body_pre_docinfo'] + parts['fragment'], tags=_ALLOWED_TAGS)
<commit_msg>Add additional HTML tags to reST filter<commit_after>
|
"""Template filters for use across apps."""
import bleach
from docutils import core
__all__ = 'rst_to_html'
_ALLOWED_TAGS = bleach.ALLOWED_TAGS + [
'h1', 'h2', 'h3', 'h4', 'h5', 'h6', 'p', 'dl', 'dt', 'dd', 'cite',
]
def rst_to_html(value):
"""Return HTML generated from reStructuredText."""
parts = core.publish_parts(source=value, writer_name='html')
return bleach.clean(
parts['body_pre_docinfo'] + parts['fragment'], tags=_ALLOWED_TAGS)
|
"""Template filters for use across apps."""
import bleach
from docutils import core
__all__ = 'rst_to_html'
_ALLOWED_TAGS = bleach.ALLOWED_TAGS + ['h1', 'h2', 'h3', 'h4', 'h5', 'h6', 'p']
def rst_to_html(value):
"""Return HTML generated from reStructuredText."""
parts = core.publish_parts(source=value, writer_name='html')
return bleach.clean(
parts['body_pre_docinfo'] + parts['fragment'], tags=_ALLOWED_TAGS)
Add additional HTML tags to reST filter"""Template filters for use across apps."""
import bleach
from docutils import core
__all__ = 'rst_to_html'
_ALLOWED_TAGS = bleach.ALLOWED_TAGS + [
'h1', 'h2', 'h3', 'h4', 'h5', 'h6', 'p', 'dl', 'dt', 'dd', 'cite',
]
def rst_to_html(value):
"""Return HTML generated from reStructuredText."""
parts = core.publish_parts(source=value, writer_name='html')
return bleach.clean(
parts['body_pre_docinfo'] + parts['fragment'], tags=_ALLOWED_TAGS)
|
<commit_before>"""Template filters for use across apps."""
import bleach
from docutils import core
__all__ = 'rst_to_html'
_ALLOWED_TAGS = bleach.ALLOWED_TAGS + ['h1', 'h2', 'h3', 'h4', 'h5', 'h6', 'p']
def rst_to_html(value):
"""Return HTML generated from reStructuredText."""
parts = core.publish_parts(source=value, writer_name='html')
return bleach.clean(
parts['body_pre_docinfo'] + parts['fragment'], tags=_ALLOWED_TAGS)
<commit_msg>Add additional HTML tags to reST filter<commit_after>"""Template filters for use across apps."""
import bleach
from docutils import core
__all__ = 'rst_to_html'
_ALLOWED_TAGS = bleach.ALLOWED_TAGS + [
'h1', 'h2', 'h3', 'h4', 'h5', 'h6', 'p', 'dl', 'dt', 'dd', 'cite',
]
def rst_to_html(value):
"""Return HTML generated from reStructuredText."""
parts = core.publish_parts(source=value, writer_name='html')
return bleach.clean(
parts['body_pre_docinfo'] + parts['fragment'], tags=_ALLOWED_TAGS)
|
6a36f0cef03af2c61dbc0c5dfd4fc47496ae1f05
|
moss/plots.py
|
moss/plots.py
|
import os.path as op
import numpy as np
import nibabel as nib
import matplotlib.pyplot as plt
import seaborn as sns
def plot_mask_distribution(fname, ax=None, hist=False):
"""Plot the distribution of voxel coordinates in a mask file.
Parameters
----------
fname : string
path to binary mask file
Returns
-------
ax : matplotlib axis object
axis with plot on it
"""
if ax is None:
ax = plt.subplot(111)
img = nib.load(fname)
mask = img.get_data()
aff = img.get_affine()
vox = np.where(mask)
vox = np.vstack([vox, np.ones(mask.sum())])
coords = np.dot(aff, vox)[:-1]
colors = sns.get_color_list()[:3]
for axis, data, color in zip(["x", "y", "z"], coords, colors):
if hist:
sns.kdeplot(data, hist=True, label=axis, color=color, ax=ax)
else:
sns.kdeplot(data, shade=True, label=axis, color=color, ax=ax)
ax.legend()
ax.set_title(op.basename(fname))
return ax
|
import numpy as np
import nibabel as nib
import matplotlib.pyplot as plt
import seaborn as sns
def plot_mask_distribution(mask_img, hist=False):
"""Plot the distribution of voxel coordinates in a mask image or file.
Parameters
----------
fname : string or nibabel image
path to binary mask file or image object with data and affine
Returns
-------
ax : matplotlib axis object
axis with plot on it
"""
if ax is None:
ax = plt.subplot(111)
if isinstance(mask_img, basestring):
img = nib.load(mask_img)
else:
img = mask_img
mask = img.get_data()
aff = img.get_affine()
vox = np.where(mask)
vox = np.vstack([vox, np.ones(mask.sum())])
coords = np.dot(aff, vox)[:-1]
colors = sns.get_color_list()[:3]
for axis, data, color in zip(["x", "y", "z"], coords, colors):
if hist:
sns.kdeplot(data, hist=True, label=axis, color=color, ax=ax)
else:
sns.kdeplot(data, shade=True, label=axis, color=color, ax=ax)
ax.legend()
return ax
|
Allow plot_mask to take nibabel image
|
Allow plot_mask to take nibabel image
|
Python
|
bsd-3-clause
|
mwaskom/moss,mwaskom/moss
|
import os.path as op
import numpy as np
import nibabel as nib
import matplotlib.pyplot as plt
import seaborn as sns
def plot_mask_distribution(fname, ax=None, hist=False):
"""Plot the distribution of voxel coordinates in a mask file.
Parameters
----------
fname : string
path to binary mask file
Returns
-------
ax : matplotlib axis object
axis with plot on it
"""
if ax is None:
ax = plt.subplot(111)
img = nib.load(fname)
mask = img.get_data()
aff = img.get_affine()
vox = np.where(mask)
vox = np.vstack([vox, np.ones(mask.sum())])
coords = np.dot(aff, vox)[:-1]
colors = sns.get_color_list()[:3]
for axis, data, color in zip(["x", "y", "z"], coords, colors):
if hist:
sns.kdeplot(data, hist=True, label=axis, color=color, ax=ax)
else:
sns.kdeplot(data, shade=True, label=axis, color=color, ax=ax)
ax.legend()
ax.set_title(op.basename(fname))
return ax
Allow plot_mask to take nibabel image
|
import numpy as np
import nibabel as nib
import matplotlib.pyplot as plt
import seaborn as sns
def plot_mask_distribution(mask_img, hist=False):
"""Plot the distribution of voxel coordinates in a mask image or file.
Parameters
----------
fname : string or nibabel image
path to binary mask file or image object with data and affine
Returns
-------
ax : matplotlib axis object
axis with plot on it
"""
if ax is None:
ax = plt.subplot(111)
if isinstance(mask_img, basestring):
img = nib.load(mask_img)
else:
img = mask_img
mask = img.get_data()
aff = img.get_affine()
vox = np.where(mask)
vox = np.vstack([vox, np.ones(mask.sum())])
coords = np.dot(aff, vox)[:-1]
colors = sns.get_color_list()[:3]
for axis, data, color in zip(["x", "y", "z"], coords, colors):
if hist:
sns.kdeplot(data, hist=True, label=axis, color=color, ax=ax)
else:
sns.kdeplot(data, shade=True, label=axis, color=color, ax=ax)
ax.legend()
return ax
|
<commit_before>import os.path as op
import numpy as np
import nibabel as nib
import matplotlib.pyplot as plt
import seaborn as sns
def plot_mask_distribution(fname, ax=None, hist=False):
"""Plot the distribution of voxel coordinates in a mask file.
Parameters
----------
fname : string
path to binary mask file
Returns
-------
ax : matplotlib axis object
axis with plot on it
"""
if ax is None:
ax = plt.subplot(111)
img = nib.load(fname)
mask = img.get_data()
aff = img.get_affine()
vox = np.where(mask)
vox = np.vstack([vox, np.ones(mask.sum())])
coords = np.dot(aff, vox)[:-1]
colors = sns.get_color_list()[:3]
for axis, data, color in zip(["x", "y", "z"], coords, colors):
if hist:
sns.kdeplot(data, hist=True, label=axis, color=color, ax=ax)
else:
sns.kdeplot(data, shade=True, label=axis, color=color, ax=ax)
ax.legend()
ax.set_title(op.basename(fname))
return ax
<commit_msg>Allow plot_mask to take nibabel image<commit_after>
|
import numpy as np
import nibabel as nib
import matplotlib.pyplot as plt
import seaborn as sns
def plot_mask_distribution(mask_img, hist=False):
"""Plot the distribution of voxel coordinates in a mask image or file.
Parameters
----------
fname : string or nibabel image
path to binary mask file or image object with data and affine
Returns
-------
ax : matplotlib axis object
axis with plot on it
"""
if ax is None:
ax = plt.subplot(111)
if isinstance(mask_img, basestring):
img = nib.load(mask_img)
else:
img = mask_img
mask = img.get_data()
aff = img.get_affine()
vox = np.where(mask)
vox = np.vstack([vox, np.ones(mask.sum())])
coords = np.dot(aff, vox)[:-1]
colors = sns.get_color_list()[:3]
for axis, data, color in zip(["x", "y", "z"], coords, colors):
if hist:
sns.kdeplot(data, hist=True, label=axis, color=color, ax=ax)
else:
sns.kdeplot(data, shade=True, label=axis, color=color, ax=ax)
ax.legend()
return ax
|
import os.path as op
import numpy as np
import nibabel as nib
import matplotlib.pyplot as plt
import seaborn as sns
def plot_mask_distribution(fname, ax=None, hist=False):
"""Plot the distribution of voxel coordinates in a mask file.
Parameters
----------
fname : string
path to binary mask file
Returns
-------
ax : matplotlib axis object
axis with plot on it
"""
if ax is None:
ax = plt.subplot(111)
img = nib.load(fname)
mask = img.get_data()
aff = img.get_affine()
vox = np.where(mask)
vox = np.vstack([vox, np.ones(mask.sum())])
coords = np.dot(aff, vox)[:-1]
colors = sns.get_color_list()[:3]
for axis, data, color in zip(["x", "y", "z"], coords, colors):
if hist:
sns.kdeplot(data, hist=True, label=axis, color=color, ax=ax)
else:
sns.kdeplot(data, shade=True, label=axis, color=color, ax=ax)
ax.legend()
ax.set_title(op.basename(fname))
return ax
Allow plot_mask to take nibabel imageimport numpy as np
import nibabel as nib
import matplotlib.pyplot as plt
import seaborn as sns
def plot_mask_distribution(mask_img, hist=False):
"""Plot the distribution of voxel coordinates in a mask image or file.
Parameters
----------
fname : string or nibabel image
path to binary mask file or image object with data and affine
Returns
-------
ax : matplotlib axis object
axis with plot on it
"""
if ax is None:
ax = plt.subplot(111)
if isinstance(mask_img, basestring):
img = nib.load(mask_img)
else:
img = mask_img
mask = img.get_data()
aff = img.get_affine()
vox = np.where(mask)
vox = np.vstack([vox, np.ones(mask.sum())])
coords = np.dot(aff, vox)[:-1]
colors = sns.get_color_list()[:3]
for axis, data, color in zip(["x", "y", "z"], coords, colors):
if hist:
sns.kdeplot(data, hist=True, label=axis, color=color, ax=ax)
else:
sns.kdeplot(data, shade=True, label=axis, color=color, ax=ax)
ax.legend()
return ax
|
<commit_before>import os.path as op
import numpy as np
import nibabel as nib
import matplotlib.pyplot as plt
import seaborn as sns
def plot_mask_distribution(fname, ax=None, hist=False):
"""Plot the distribution of voxel coordinates in a mask file.
Parameters
----------
fname : string
path to binary mask file
Returns
-------
ax : matplotlib axis object
axis with plot on it
"""
if ax is None:
ax = plt.subplot(111)
img = nib.load(fname)
mask = img.get_data()
aff = img.get_affine()
vox = np.where(mask)
vox = np.vstack([vox, np.ones(mask.sum())])
coords = np.dot(aff, vox)[:-1]
colors = sns.get_color_list()[:3]
for axis, data, color in zip(["x", "y", "z"], coords, colors):
if hist:
sns.kdeplot(data, hist=True, label=axis, color=color, ax=ax)
else:
sns.kdeplot(data, shade=True, label=axis, color=color, ax=ax)
ax.legend()
ax.set_title(op.basename(fname))
return ax
<commit_msg>Allow plot_mask to take nibabel image<commit_after>import numpy as np
import nibabel as nib
import matplotlib.pyplot as plt
import seaborn as sns
def plot_mask_distribution(mask_img, hist=False):
"""Plot the distribution of voxel coordinates in a mask image or file.
Parameters
----------
fname : string or nibabel image
path to binary mask file or image object with data and affine
Returns
-------
ax : matplotlib axis object
axis with plot on it
"""
if ax is None:
ax = plt.subplot(111)
if isinstance(mask_img, basestring):
img = nib.load(mask_img)
else:
img = mask_img
mask = img.get_data()
aff = img.get_affine()
vox = np.where(mask)
vox = np.vstack([vox, np.ones(mask.sum())])
coords = np.dot(aff, vox)[:-1]
colors = sns.get_color_list()[:3]
for axis, data, color in zip(["x", "y", "z"], coords, colors):
if hist:
sns.kdeplot(data, hist=True, label=axis, color=color, ax=ax)
else:
sns.kdeplot(data, shade=True, label=axis, color=color, ax=ax)
ax.legend()
return ax
|
9171c9afe54c63c450ba661bcc21d83546f4de50
|
bpython/__init__.py
|
bpython/__init__.py
|
# The MIT License
#
# Copyright (c) 2008 Bob Farrell
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
import os.path
__version__ = '0.10'
package_dir = os.path.abspath(os.path.dirname(__file__))
def embed(locals_=None, args=['-i', '-q'], banner=None):
from bpython.cli import main
return main(args, locals_, banner)
|
# The MIT License
#
# Copyright (c) 2008 Bob Farrell
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
import os.path
__version__ = 'mercurial'
package_dir = os.path.abspath(os.path.dirname(__file__))
def embed(locals_=None, args=['-i', '-q'], banner=None):
from bpython.cli import main
return main(args, locals_, banner)
|
Set the version name for the default branch to mercurial so we can tell when we run from the repository
|
Set the version name for the default branch to mercurial so we can tell when we run from the repository
|
Python
|
mit
|
thomasballinger/old-bpython-with-hy-support,hirochachacha/apython,myint-archive/bpython
|
# The MIT License
#
# Copyright (c) 2008 Bob Farrell
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
import os.path
__version__ = '0.10'
package_dir = os.path.abspath(os.path.dirname(__file__))
def embed(locals_=None, args=['-i', '-q'], banner=None):
from bpython.cli import main
return main(args, locals_, banner)
Set the version name for the default branch to mercurial so we can tell when we run from the repository
|
# The MIT License
#
# Copyright (c) 2008 Bob Farrell
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
import os.path
__version__ = 'mercurial'
package_dir = os.path.abspath(os.path.dirname(__file__))
def embed(locals_=None, args=['-i', '-q'], banner=None):
from bpython.cli import main
return main(args, locals_, banner)
|
<commit_before># The MIT License
#
# Copyright (c) 2008 Bob Farrell
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
import os.path
__version__ = '0.10'
package_dir = os.path.abspath(os.path.dirname(__file__))
def embed(locals_=None, args=['-i', '-q'], banner=None):
from bpython.cli import main
return main(args, locals_, banner)
<commit_msg>Set the version name for the default branch to mercurial so we can tell when we run from the repository<commit_after>
|
# The MIT License
#
# Copyright (c) 2008 Bob Farrell
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
import os.path
__version__ = 'mercurial'
package_dir = os.path.abspath(os.path.dirname(__file__))
def embed(locals_=None, args=['-i', '-q'], banner=None):
from bpython.cli import main
return main(args, locals_, banner)
|
# The MIT License
#
# Copyright (c) 2008 Bob Farrell
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
import os.path
__version__ = '0.10'
package_dir = os.path.abspath(os.path.dirname(__file__))
def embed(locals_=None, args=['-i', '-q'], banner=None):
from bpython.cli import main
return main(args, locals_, banner)
Set the version name for the default branch to mercurial so we can tell when we run from the repository# The MIT License
#
# Copyright (c) 2008 Bob Farrell
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
import os.path
__version__ = 'mercurial'
package_dir = os.path.abspath(os.path.dirname(__file__))
def embed(locals_=None, args=['-i', '-q'], banner=None):
from bpython.cli import main
return main(args, locals_, banner)
|
<commit_before># The MIT License
#
# Copyright (c) 2008 Bob Farrell
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
import os.path
__version__ = '0.10'
package_dir = os.path.abspath(os.path.dirname(__file__))
def embed(locals_=None, args=['-i', '-q'], banner=None):
from bpython.cli import main
return main(args, locals_, banner)
<commit_msg>Set the version name for the default branch to mercurial so we can tell when we run from the repository<commit_after># The MIT License
#
# Copyright (c) 2008 Bob Farrell
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
import os.path
__version__ = 'mercurial'
package_dir = os.path.abspath(os.path.dirname(__file__))
def embed(locals_=None, args=['-i', '-q'], banner=None):
from bpython.cli import main
return main(args, locals_, banner)
|
b2662ad0031d5ecbb322feb105e362f5d80f4392
|
favicon/templatetags/favtags.py
|
favicon/templatetags/favtags.py
|
from django import template
from django.utils.safestring import mark_safe
from favicon.models import Favicon, config
register = template.Library()
@register.simple_tag(takes_context=True)
def placeFavicon(context):
"""
Gets Favicon-URL for the Model.
Template Syntax:
{% placeFavicon %}
"""
fav = Favicon.objects.filter(isFavicon=True).first()
if not fav:
return '<!-- no favicon -->'
html = ''
for rel in config:
for size in sorted(config[rel], reverse=True):
n = fav.get_favicon(size=size, rel=rel)
html += '<link rel="%s" size ="%sx%s" href="%s"/>' % (
n.rel, n.size, n.size, n.faviconImage.url)
default_fav = fav.get_favicon(size=32, rel='shortcut icon')
html += '<link rel="%s" size ="%sx%s" href="%s"/>' % (
default_fav.rel, default_fav.size, default_fav.size, default_fav.faviconImage.url)
return mark_safe(html)
|
from django import template
from django.utils.safestring import mark_safe
from favicon.models import Favicon, config
register = template.Library()
@register.simple_tag(takes_context=True)
def placeFavicon(context):
"""
Gets Favicon-URL for the Model.
Template Syntax:
{% placeFavicon %}
"""
fav = Favicon.objects.filter(isFavicon=True).first()
if not fav:
return '<!-- no favicon -->'
html = ''
for rel in config:
for size in sorted(config[rel], reverse=True):
n = fav.get_favicon(size=size, rel=rel)
html += '<link rel="%s" sizes="%sx%s" href="%s"/>' % (
n.rel, n.size, n.size, n.faviconImage.url)
default_fav = fav.get_favicon(size=32, rel='shortcut icon')
html += '<link rel="%s" sizes="%sx%s" href="%s"/>' % (
default_fav.rel, default_fav.size, default_fav.size, default_fav.faviconImage.url)
return mark_safe(html)
|
Set propper link attribute 'sizes' in templatetag
|
Set propper link attribute 'sizes' in templatetag
There seems to be a typo in the templatetag generator on lines 26 and 30.
The attribute 'sizes=' written as 'size ='. This break w3c tests as that attribute is not available in the link tag.
|
Python
|
mit
|
arteria/django-favicon-plus
|
from django import template
from django.utils.safestring import mark_safe
from favicon.models import Favicon, config
register = template.Library()
@register.simple_tag(takes_context=True)
def placeFavicon(context):
"""
Gets Favicon-URL for the Model.
Template Syntax:
{% placeFavicon %}
"""
fav = Favicon.objects.filter(isFavicon=True).first()
if not fav:
return '<!-- no favicon -->'
html = ''
for rel in config:
for size in sorted(config[rel], reverse=True):
n = fav.get_favicon(size=size, rel=rel)
html += '<link rel="%s" size ="%sx%s" href="%s"/>' % (
n.rel, n.size, n.size, n.faviconImage.url)
default_fav = fav.get_favicon(size=32, rel='shortcut icon')
html += '<link rel="%s" size ="%sx%s" href="%s"/>' % (
default_fav.rel, default_fav.size, default_fav.size, default_fav.faviconImage.url)
return mark_safe(html)
Set propper link attribute 'sizes' in templatetag
There seems to be a typo in the templatetag generator on lines 26 and 30.
The attribute 'sizes=' written as 'size ='. This break w3c tests as that attribute is not available in the link tag.
|
from django import template
from django.utils.safestring import mark_safe
from favicon.models import Favicon, config
register = template.Library()
@register.simple_tag(takes_context=True)
def placeFavicon(context):
"""
Gets Favicon-URL for the Model.
Template Syntax:
{% placeFavicon %}
"""
fav = Favicon.objects.filter(isFavicon=True).first()
if not fav:
return '<!-- no favicon -->'
html = ''
for rel in config:
for size in sorted(config[rel], reverse=True):
n = fav.get_favicon(size=size, rel=rel)
html += '<link rel="%s" sizes="%sx%s" href="%s"/>' % (
n.rel, n.size, n.size, n.faviconImage.url)
default_fav = fav.get_favicon(size=32, rel='shortcut icon')
html += '<link rel="%s" sizes="%sx%s" href="%s"/>' % (
default_fav.rel, default_fav.size, default_fav.size, default_fav.faviconImage.url)
return mark_safe(html)
|
<commit_before>from django import template
from django.utils.safestring import mark_safe
from favicon.models import Favicon, config
register = template.Library()
@register.simple_tag(takes_context=True)
def placeFavicon(context):
"""
Gets Favicon-URL for the Model.
Template Syntax:
{% placeFavicon %}
"""
fav = Favicon.objects.filter(isFavicon=True).first()
if not fav:
return '<!-- no favicon -->'
html = ''
for rel in config:
for size in sorted(config[rel], reverse=True):
n = fav.get_favicon(size=size, rel=rel)
html += '<link rel="%s" size ="%sx%s" href="%s"/>' % (
n.rel, n.size, n.size, n.faviconImage.url)
default_fav = fav.get_favicon(size=32, rel='shortcut icon')
html += '<link rel="%s" size ="%sx%s" href="%s"/>' % (
default_fav.rel, default_fav.size, default_fav.size, default_fav.faviconImage.url)
return mark_safe(html)
<commit_msg>Set propper link attribute 'sizes' in templatetag
There seems to be a typo in the templatetag generator on lines 26 and 30.
The attribute 'sizes=' written as 'size ='. This break w3c tests as that attribute is not available in the link tag.<commit_after>
|
from django import template
from django.utils.safestring import mark_safe
from favicon.models import Favicon, config
register = template.Library()
@register.simple_tag(takes_context=True)
def placeFavicon(context):
"""
Gets Favicon-URL for the Model.
Template Syntax:
{% placeFavicon %}
"""
fav = Favicon.objects.filter(isFavicon=True).first()
if not fav:
return '<!-- no favicon -->'
html = ''
for rel in config:
for size in sorted(config[rel], reverse=True):
n = fav.get_favicon(size=size, rel=rel)
html += '<link rel="%s" sizes="%sx%s" href="%s"/>' % (
n.rel, n.size, n.size, n.faviconImage.url)
default_fav = fav.get_favicon(size=32, rel='shortcut icon')
html += '<link rel="%s" sizes="%sx%s" href="%s"/>' % (
default_fav.rel, default_fav.size, default_fav.size, default_fav.faviconImage.url)
return mark_safe(html)
|
from django import template
from django.utils.safestring import mark_safe
from favicon.models import Favicon, config
register = template.Library()
@register.simple_tag(takes_context=True)
def placeFavicon(context):
"""
Gets Favicon-URL for the Model.
Template Syntax:
{% placeFavicon %}
"""
fav = Favicon.objects.filter(isFavicon=True).first()
if not fav:
return '<!-- no favicon -->'
html = ''
for rel in config:
for size in sorted(config[rel], reverse=True):
n = fav.get_favicon(size=size, rel=rel)
html += '<link rel="%s" size ="%sx%s" href="%s"/>' % (
n.rel, n.size, n.size, n.faviconImage.url)
default_fav = fav.get_favicon(size=32, rel='shortcut icon')
html += '<link rel="%s" size ="%sx%s" href="%s"/>' % (
default_fav.rel, default_fav.size, default_fav.size, default_fav.faviconImage.url)
return mark_safe(html)
Set propper link attribute 'sizes' in templatetag
There seems to be a typo in the templatetag generator on lines 26 and 30.
The attribute 'sizes=' written as 'size ='. This break w3c tests as that attribute is not available in the link tag.from django import template
from django.utils.safestring import mark_safe
from favicon.models import Favicon, config
register = template.Library()
@register.simple_tag(takes_context=True)
def placeFavicon(context):
"""
Gets Favicon-URL for the Model.
Template Syntax:
{% placeFavicon %}
"""
fav = Favicon.objects.filter(isFavicon=True).first()
if not fav:
return '<!-- no favicon -->'
html = ''
for rel in config:
for size in sorted(config[rel], reverse=True):
n = fav.get_favicon(size=size, rel=rel)
html += '<link rel="%s" sizes="%sx%s" href="%s"/>' % (
n.rel, n.size, n.size, n.faviconImage.url)
default_fav = fav.get_favicon(size=32, rel='shortcut icon')
html += '<link rel="%s" sizes="%sx%s" href="%s"/>' % (
default_fav.rel, default_fav.size, default_fav.size, default_fav.faviconImage.url)
return mark_safe(html)
|
<commit_before>from django import template
from django.utils.safestring import mark_safe
from favicon.models import Favicon, config
register = template.Library()
@register.simple_tag(takes_context=True)
def placeFavicon(context):
"""
Gets Favicon-URL for the Model.
Template Syntax:
{% placeFavicon %}
"""
fav = Favicon.objects.filter(isFavicon=True).first()
if not fav:
return '<!-- no favicon -->'
html = ''
for rel in config:
for size in sorted(config[rel], reverse=True):
n = fav.get_favicon(size=size, rel=rel)
html += '<link rel="%s" size ="%sx%s" href="%s"/>' % (
n.rel, n.size, n.size, n.faviconImage.url)
default_fav = fav.get_favicon(size=32, rel='shortcut icon')
html += '<link rel="%s" size ="%sx%s" href="%s"/>' % (
default_fav.rel, default_fav.size, default_fav.size, default_fav.faviconImage.url)
return mark_safe(html)
<commit_msg>Set propper link attribute 'sizes' in templatetag
There seems to be a typo in the templatetag generator on lines 26 and 30.
The attribute 'sizes=' written as 'size ='. This break w3c tests as that attribute is not available in the link tag.<commit_after>from django import template
from django.utils.safestring import mark_safe
from favicon.models import Favicon, config
register = template.Library()
@register.simple_tag(takes_context=True)
def placeFavicon(context):
"""
Gets Favicon-URL for the Model.
Template Syntax:
{% placeFavicon %}
"""
fav = Favicon.objects.filter(isFavicon=True).first()
if not fav:
return '<!-- no favicon -->'
html = ''
for rel in config:
for size in sorted(config[rel], reverse=True):
n = fav.get_favicon(size=size, rel=rel)
html += '<link rel="%s" sizes="%sx%s" href="%s"/>' % (
n.rel, n.size, n.size, n.faviconImage.url)
default_fav = fav.get_favicon(size=32, rel='shortcut icon')
html += '<link rel="%s" sizes="%sx%s" href="%s"/>' % (
default_fav.rel, default_fav.size, default_fav.size, default_fav.faviconImage.url)
return mark_safe(html)
|
ad61975377b8d8733049054809350003be6dbff3
|
feder/virus_scan/engine/base.py
|
feder/virus_scan/engine/base.py
|
from django.contrib.sites.shortcuts import get_current_site
from django.urls import reverse
from django.core.signing import TimestampSigner
class BaseEngine:
def __init__(self):
self.signer = TimestampSigner()
def get_webhook_url(self):
return "{}://{}/{}?token={}".format(
"https",
get_current_site(None).domain,
reverse("virus_scan:webhook"),
self.signer.sign(self.name),
)
def send_scan(self, this_file, filename):
raise NotImplementedError(
"Provide 'send' in {name}".format(name=self.__class__.__name__)
)
def receive_scan(self, engine_id):
raise NotImplementedError(
"Provide 'receive_scan' in {name}".format(name=self.__class__.__name__)
)
|
from django.contrib.sites.shortcuts import get_current_site
from django.urls import reverse
from django.core.signing import TimestampSigner
class BaseEngine:
def __init__(self):
self.signer = TimestampSigner()
def get_webhook_url(self):
return "{}://{}{}?token={}".format(
"https",
get_current_site(None).domain,
reverse("virus_scan:webhook"),
self.signer.sign(self.name),
)
def send_scan(self, this_file, filename):
raise NotImplementedError(
"Provide 'send' in {name}".format(name=self.__class__.__name__)
)
def receive_scan(self, engine_id):
raise NotImplementedError(
"Provide 'receive_scan' in {name}".format(name=self.__class__.__name__)
)
|
Remove duplicated slash in virus_scan
|
Remove duplicated slash in virus_scan
|
Python
|
mit
|
watchdogpolska/feder,watchdogpolska/feder,watchdogpolska/feder,watchdogpolska/feder
|
from django.contrib.sites.shortcuts import get_current_site
from django.urls import reverse
from django.core.signing import TimestampSigner
class BaseEngine:
def __init__(self):
self.signer = TimestampSigner()
def get_webhook_url(self):
return "{}://{}/{}?token={}".format(
"https",
get_current_site(None).domain,
reverse("virus_scan:webhook"),
self.signer.sign(self.name),
)
def send_scan(self, this_file, filename):
raise NotImplementedError(
"Provide 'send' in {name}".format(name=self.__class__.__name__)
)
def receive_scan(self, engine_id):
raise NotImplementedError(
"Provide 'receive_scan' in {name}".format(name=self.__class__.__name__)
)
Remove duplicated slash in virus_scan
|
from django.contrib.sites.shortcuts import get_current_site
from django.urls import reverse
from django.core.signing import TimestampSigner
class BaseEngine:
def __init__(self):
self.signer = TimestampSigner()
def get_webhook_url(self):
return "{}://{}{}?token={}".format(
"https",
get_current_site(None).domain,
reverse("virus_scan:webhook"),
self.signer.sign(self.name),
)
def send_scan(self, this_file, filename):
raise NotImplementedError(
"Provide 'send' in {name}".format(name=self.__class__.__name__)
)
def receive_scan(self, engine_id):
raise NotImplementedError(
"Provide 'receive_scan' in {name}".format(name=self.__class__.__name__)
)
|
<commit_before>from django.contrib.sites.shortcuts import get_current_site
from django.urls import reverse
from django.core.signing import TimestampSigner
class BaseEngine:
def __init__(self):
self.signer = TimestampSigner()
def get_webhook_url(self):
return "{}://{}/{}?token={}".format(
"https",
get_current_site(None).domain,
reverse("virus_scan:webhook"),
self.signer.sign(self.name),
)
def send_scan(self, this_file, filename):
raise NotImplementedError(
"Provide 'send' in {name}".format(name=self.__class__.__name__)
)
def receive_scan(self, engine_id):
raise NotImplementedError(
"Provide 'receive_scan' in {name}".format(name=self.__class__.__name__)
)
<commit_msg>Remove duplicated slash in virus_scan<commit_after>
|
from django.contrib.sites.shortcuts import get_current_site
from django.urls import reverse
from django.core.signing import TimestampSigner
class BaseEngine:
def __init__(self):
self.signer = TimestampSigner()
def get_webhook_url(self):
return "{}://{}{}?token={}".format(
"https",
get_current_site(None).domain,
reverse("virus_scan:webhook"),
self.signer.sign(self.name),
)
def send_scan(self, this_file, filename):
raise NotImplementedError(
"Provide 'send' in {name}".format(name=self.__class__.__name__)
)
def receive_scan(self, engine_id):
raise NotImplementedError(
"Provide 'receive_scan' in {name}".format(name=self.__class__.__name__)
)
|
from django.contrib.sites.shortcuts import get_current_site
from django.urls import reverse
from django.core.signing import TimestampSigner
class BaseEngine:
def __init__(self):
self.signer = TimestampSigner()
def get_webhook_url(self):
return "{}://{}/{}?token={}".format(
"https",
get_current_site(None).domain,
reverse("virus_scan:webhook"),
self.signer.sign(self.name),
)
def send_scan(self, this_file, filename):
raise NotImplementedError(
"Provide 'send' in {name}".format(name=self.__class__.__name__)
)
def receive_scan(self, engine_id):
raise NotImplementedError(
"Provide 'receive_scan' in {name}".format(name=self.__class__.__name__)
)
Remove duplicated slash in virus_scanfrom django.contrib.sites.shortcuts import get_current_site
from django.urls import reverse
from django.core.signing import TimestampSigner
class BaseEngine:
def __init__(self):
self.signer = TimestampSigner()
def get_webhook_url(self):
return "{}://{}{}?token={}".format(
"https",
get_current_site(None).domain,
reverse("virus_scan:webhook"),
self.signer.sign(self.name),
)
def send_scan(self, this_file, filename):
raise NotImplementedError(
"Provide 'send' in {name}".format(name=self.__class__.__name__)
)
def receive_scan(self, engine_id):
raise NotImplementedError(
"Provide 'receive_scan' in {name}".format(name=self.__class__.__name__)
)
|
<commit_before>from django.contrib.sites.shortcuts import get_current_site
from django.urls import reverse
from django.core.signing import TimestampSigner
class BaseEngine:
def __init__(self):
self.signer = TimestampSigner()
def get_webhook_url(self):
return "{}://{}/{}?token={}".format(
"https",
get_current_site(None).domain,
reverse("virus_scan:webhook"),
self.signer.sign(self.name),
)
def send_scan(self, this_file, filename):
raise NotImplementedError(
"Provide 'send' in {name}".format(name=self.__class__.__name__)
)
def receive_scan(self, engine_id):
raise NotImplementedError(
"Provide 'receive_scan' in {name}".format(name=self.__class__.__name__)
)
<commit_msg>Remove duplicated slash in virus_scan<commit_after>from django.contrib.sites.shortcuts import get_current_site
from django.urls import reverse
from django.core.signing import TimestampSigner
class BaseEngine:
def __init__(self):
self.signer = TimestampSigner()
def get_webhook_url(self):
return "{}://{}{}?token={}".format(
"https",
get_current_site(None).domain,
reverse("virus_scan:webhook"),
self.signer.sign(self.name),
)
def send_scan(self, this_file, filename):
raise NotImplementedError(
"Provide 'send' in {name}".format(name=self.__class__.__name__)
)
def receive_scan(self, engine_id):
raise NotImplementedError(
"Provide 'receive_scan' in {name}".format(name=self.__class__.__name__)
)
|
67e2567b7a7d01e3225675376f189f551c42410b
|
modules/bitcoin.py
|
modules/bitcoin.py
|
"""Bitccoin ticker module for ppbot.
@package ppbot
Displays the current bitcoin pricing from mtgox
@syntax btc
"""
import requests
import string
import json
from xml.dom.minidom import parseString
from modules import *
class Bitcoin(Module):
def __init__(self, *args, **kwargs):
"""Constructor"""
Module.__init__(self, kwargs=kwargs)
self.url = "http://data.mtgox.com/api/1/BTCUSD/ticker"
def _register_events(self):
"""Register module commands."""
self.add_command('btc')
def btc(self, event):
"""Action to react/respond to user calls."""
data = {}
try:
result = self.lookup()['return']
data['high'] = result['high']['display_short']
data['last'] = result['last_local']['display_short']
data['low'] = result['low']['display_short']
data['volume'] = result['vol']['display_short']
message = "Last: %(last)s - High/Low: (%(high)s/%(low)s) - Volume: %(volume)s" % (data)
self.msg(event['target'], message)
except:
pass
def lookup(self):
"""Connects to google's secret finance API and parses the receiving json for the stock info."""
# make the parser, and send the xml to be parsed
result = requests.get(self.url)
return result.json()
|
"""Bitccoin ticker module for ppbot.
@package ppbot
Displays the current bitcoin pricing from mtgox
@syntax btc
"""
import requests
import string
import json
from xml.dom.minidom import parseString
from modules import *
class Bitcoin(Module):
def __init__(self, *args, **kwargs):
"""Constructor"""
Module.__init__(self, kwargs=kwargs)
self.url = "https://data.mtgox.com/api/1/BTCUSD/ticker"
def _register_events(self):
"""Register module commands."""
self.add_command('btc')
def btc(self, event):
"""Action to react/respond to user calls."""
data = {}
try:
result = self.lookup()['return']
data['high'] = result['high']['display_short']
data['last'] = result['last_local']['display_short']
data['low'] = result['low']['display_short']
data['volume'] = result['vol']['display_short']
message = "Last: %(last)s - High/Low: (%(high)s/%(low)s) - Volume: %(volume)s" % (data)
self.msg(event['target'], message)
except:
pass
def lookup(self):
"""Connects to google's secret finance API and parses the receiving json for the stock info."""
# make the parser, and send the xml to be parsed
result = requests.get(self.url)
return result.json()
|
Update mtgox api endpoint to use SSL
|
Update mtgox api endpoint to use SSL
|
Python
|
mit
|
billyvg/piebot
|
"""Bitccoin ticker module for ppbot.
@package ppbot
Displays the current bitcoin pricing from mtgox
@syntax btc
"""
import requests
import string
import json
from xml.dom.minidom import parseString
from modules import *
class Bitcoin(Module):
def __init__(self, *args, **kwargs):
"""Constructor"""
Module.__init__(self, kwargs=kwargs)
self.url = "http://data.mtgox.com/api/1/BTCUSD/ticker"
def _register_events(self):
"""Register module commands."""
self.add_command('btc')
def btc(self, event):
"""Action to react/respond to user calls."""
data = {}
try:
result = self.lookup()['return']
data['high'] = result['high']['display_short']
data['last'] = result['last_local']['display_short']
data['low'] = result['low']['display_short']
data['volume'] = result['vol']['display_short']
message = "Last: %(last)s - High/Low: (%(high)s/%(low)s) - Volume: %(volume)s" % (data)
self.msg(event['target'], message)
except:
pass
def lookup(self):
"""Connects to google's secret finance API and parses the receiving json for the stock info."""
# make the parser, and send the xml to be parsed
result = requests.get(self.url)
return result.json()
Update mtgox api endpoint to use SSL
|
"""Bitccoin ticker module for ppbot.
@package ppbot
Displays the current bitcoin pricing from mtgox
@syntax btc
"""
import requests
import string
import json
from xml.dom.minidom import parseString
from modules import *
class Bitcoin(Module):
def __init__(self, *args, **kwargs):
"""Constructor"""
Module.__init__(self, kwargs=kwargs)
self.url = "https://data.mtgox.com/api/1/BTCUSD/ticker"
def _register_events(self):
"""Register module commands."""
self.add_command('btc')
def btc(self, event):
"""Action to react/respond to user calls."""
data = {}
try:
result = self.lookup()['return']
data['high'] = result['high']['display_short']
data['last'] = result['last_local']['display_short']
data['low'] = result['low']['display_short']
data['volume'] = result['vol']['display_short']
message = "Last: %(last)s - High/Low: (%(high)s/%(low)s) - Volume: %(volume)s" % (data)
self.msg(event['target'], message)
except:
pass
def lookup(self):
"""Connects to google's secret finance API and parses the receiving json for the stock info."""
# make the parser, and send the xml to be parsed
result = requests.get(self.url)
return result.json()
|
<commit_before>"""Bitccoin ticker module for ppbot.
@package ppbot
Displays the current bitcoin pricing from mtgox
@syntax btc
"""
import requests
import string
import json
from xml.dom.minidom import parseString
from modules import *
class Bitcoin(Module):
def __init__(self, *args, **kwargs):
"""Constructor"""
Module.__init__(self, kwargs=kwargs)
self.url = "http://data.mtgox.com/api/1/BTCUSD/ticker"
def _register_events(self):
"""Register module commands."""
self.add_command('btc')
def btc(self, event):
"""Action to react/respond to user calls."""
data = {}
try:
result = self.lookup()['return']
data['high'] = result['high']['display_short']
data['last'] = result['last_local']['display_short']
data['low'] = result['low']['display_short']
data['volume'] = result['vol']['display_short']
message = "Last: %(last)s - High/Low: (%(high)s/%(low)s) - Volume: %(volume)s" % (data)
self.msg(event['target'], message)
except:
pass
def lookup(self):
"""Connects to google's secret finance API and parses the receiving json for the stock info."""
# make the parser, and send the xml to be parsed
result = requests.get(self.url)
return result.json()
<commit_msg>Update mtgox api endpoint to use SSL<commit_after>
|
"""Bitccoin ticker module for ppbot.
@package ppbot
Displays the current bitcoin pricing from mtgox
@syntax btc
"""
import requests
import string
import json
from xml.dom.minidom import parseString
from modules import *
class Bitcoin(Module):
def __init__(self, *args, **kwargs):
"""Constructor"""
Module.__init__(self, kwargs=kwargs)
self.url = "https://data.mtgox.com/api/1/BTCUSD/ticker"
def _register_events(self):
"""Register module commands."""
self.add_command('btc')
def btc(self, event):
"""Action to react/respond to user calls."""
data = {}
try:
result = self.lookup()['return']
data['high'] = result['high']['display_short']
data['last'] = result['last_local']['display_short']
data['low'] = result['low']['display_short']
data['volume'] = result['vol']['display_short']
message = "Last: %(last)s - High/Low: (%(high)s/%(low)s) - Volume: %(volume)s" % (data)
self.msg(event['target'], message)
except:
pass
def lookup(self):
"""Connects to google's secret finance API and parses the receiving json for the stock info."""
# make the parser, and send the xml to be parsed
result = requests.get(self.url)
return result.json()
|
"""Bitccoin ticker module for ppbot.
@package ppbot
Displays the current bitcoin pricing from mtgox
@syntax btc
"""
import requests
import string
import json
from xml.dom.minidom import parseString
from modules import *
class Bitcoin(Module):
def __init__(self, *args, **kwargs):
"""Constructor"""
Module.__init__(self, kwargs=kwargs)
self.url = "http://data.mtgox.com/api/1/BTCUSD/ticker"
def _register_events(self):
"""Register module commands."""
self.add_command('btc')
def btc(self, event):
"""Action to react/respond to user calls."""
data = {}
try:
result = self.lookup()['return']
data['high'] = result['high']['display_short']
data['last'] = result['last_local']['display_short']
data['low'] = result['low']['display_short']
data['volume'] = result['vol']['display_short']
message = "Last: %(last)s - High/Low: (%(high)s/%(low)s) - Volume: %(volume)s" % (data)
self.msg(event['target'], message)
except:
pass
def lookup(self):
"""Connects to google's secret finance API and parses the receiving json for the stock info."""
# make the parser, and send the xml to be parsed
result = requests.get(self.url)
return result.json()
Update mtgox api endpoint to use SSL"""Bitccoin ticker module for ppbot.
@package ppbot
Displays the current bitcoin pricing from mtgox
@syntax btc
"""
import requests
import string
import json
from xml.dom.minidom import parseString
from modules import *
class Bitcoin(Module):
def __init__(self, *args, **kwargs):
"""Constructor"""
Module.__init__(self, kwargs=kwargs)
self.url = "https://data.mtgox.com/api/1/BTCUSD/ticker"
def _register_events(self):
"""Register module commands."""
self.add_command('btc')
def btc(self, event):
"""Action to react/respond to user calls."""
data = {}
try:
result = self.lookup()['return']
data['high'] = result['high']['display_short']
data['last'] = result['last_local']['display_short']
data['low'] = result['low']['display_short']
data['volume'] = result['vol']['display_short']
message = "Last: %(last)s - High/Low: (%(high)s/%(low)s) - Volume: %(volume)s" % (data)
self.msg(event['target'], message)
except:
pass
def lookup(self):
"""Connects to google's secret finance API and parses the receiving json for the stock info."""
# make the parser, and send the xml to be parsed
result = requests.get(self.url)
return result.json()
|
<commit_before>"""Bitccoin ticker module for ppbot.
@package ppbot
Displays the current bitcoin pricing from mtgox
@syntax btc
"""
import requests
import string
import json
from xml.dom.minidom import parseString
from modules import *
class Bitcoin(Module):
def __init__(self, *args, **kwargs):
"""Constructor"""
Module.__init__(self, kwargs=kwargs)
self.url = "http://data.mtgox.com/api/1/BTCUSD/ticker"
def _register_events(self):
"""Register module commands."""
self.add_command('btc')
def btc(self, event):
"""Action to react/respond to user calls."""
data = {}
try:
result = self.lookup()['return']
data['high'] = result['high']['display_short']
data['last'] = result['last_local']['display_short']
data['low'] = result['low']['display_short']
data['volume'] = result['vol']['display_short']
message = "Last: %(last)s - High/Low: (%(high)s/%(low)s) - Volume: %(volume)s" % (data)
self.msg(event['target'], message)
except:
pass
def lookup(self):
"""Connects to google's secret finance API and parses the receiving json for the stock info."""
# make the parser, and send the xml to be parsed
result = requests.get(self.url)
return result.json()
<commit_msg>Update mtgox api endpoint to use SSL<commit_after>"""Bitccoin ticker module for ppbot.
@package ppbot
Displays the current bitcoin pricing from mtgox
@syntax btc
"""
import requests
import string
import json
from xml.dom.minidom import parseString
from modules import *
class Bitcoin(Module):
def __init__(self, *args, **kwargs):
"""Constructor"""
Module.__init__(self, kwargs=kwargs)
self.url = "https://data.mtgox.com/api/1/BTCUSD/ticker"
def _register_events(self):
"""Register module commands."""
self.add_command('btc')
def btc(self, event):
"""Action to react/respond to user calls."""
data = {}
try:
result = self.lookup()['return']
data['high'] = result['high']['display_short']
data['last'] = result['last_local']['display_short']
data['low'] = result['low']['display_short']
data['volume'] = result['vol']['display_short']
message = "Last: %(last)s - High/Low: (%(high)s/%(low)s) - Volume: %(volume)s" % (data)
self.msg(event['target'], message)
except:
pass
def lookup(self):
"""Connects to google's secret finance API and parses the receiving json for the stock info."""
# make the parser, and send the xml to be parsed
result = requests.get(self.url)
return result.json()
|
7ffea5f365c7b21b43eee00646f560b04c8e17e0
|
molly/conf/urls.py
|
molly/conf/urls.py
|
from django.conf.urls.defaults import RegexURLPattern
def url(pattern, name=None, extra={}):
def url_annotator(view):
view.pattern = RegexURLPattern(pattern, view, extra, name)
return view
return url_annotator
|
from django.core.urlresolvers import RegexURLPattern
def url(pattern, name=None, extra={}):
def url_annotator(view):
view.pattern = RegexURLPattern(pattern, view, extra, name)
return view
return url_annotator
|
Make new URL tag forward compatible
|
Make new URL tag forward compatible
|
Python
|
apache-2.0
|
mollyproject/mollyproject,mollyproject/mollyproject,mollyproject/mollyproject
|
from django.conf.urls.defaults import RegexURLPattern
def url(pattern, name=None, extra={}):
def url_annotator(view):
view.pattern = RegexURLPattern(pattern, view, extra, name)
return view
return url_annotator
Make new URL tag forward compatible
|
from django.core.urlresolvers import RegexURLPattern
def url(pattern, name=None, extra={}):
def url_annotator(view):
view.pattern = RegexURLPattern(pattern, view, extra, name)
return view
return url_annotator
|
<commit_before>from django.conf.urls.defaults import RegexURLPattern
def url(pattern, name=None, extra={}):
def url_annotator(view):
view.pattern = RegexURLPattern(pattern, view, extra, name)
return view
return url_annotator
<commit_msg>Make new URL tag forward compatible<commit_after>
|
from django.core.urlresolvers import RegexURLPattern
def url(pattern, name=None, extra={}):
def url_annotator(view):
view.pattern = RegexURLPattern(pattern, view, extra, name)
return view
return url_annotator
|
from django.conf.urls.defaults import RegexURLPattern
def url(pattern, name=None, extra={}):
def url_annotator(view):
view.pattern = RegexURLPattern(pattern, view, extra, name)
return view
return url_annotator
Make new URL tag forward compatiblefrom django.core.urlresolvers import RegexURLPattern
def url(pattern, name=None, extra={}):
def url_annotator(view):
view.pattern = RegexURLPattern(pattern, view, extra, name)
return view
return url_annotator
|
<commit_before>from django.conf.urls.defaults import RegexURLPattern
def url(pattern, name=None, extra={}):
def url_annotator(view):
view.pattern = RegexURLPattern(pattern, view, extra, name)
return view
return url_annotator
<commit_msg>Make new URL tag forward compatible<commit_after>from django.core.urlresolvers import RegexURLPattern
def url(pattern, name=None, extra={}):
def url_annotator(view):
view.pattern = RegexURLPattern(pattern, view, extra, name)
return view
return url_annotator
|
09d3f9167edf3230117db3b5369a10b4df774195
|
scripts/schedule.py
|
scripts/schedule.py
|
import docopt
import teuthology.misc
import teuthology.schedule
doc = """
usage: teuthology-schedule -h
teuthology-schedule [options] <conf_file> [<conf_file> ...]
Schedule ceph integration tests
positional arguments:
<conf_file> Config file to read
optional arguments:
-h, --help Show this help message and exit
-v, --verbose Be more verbose
-n <name>, --name <name> Name of suite run the job is part of
-d <desc>, --description <desc> Job description
-o <owner>, --owner <owner> Job owner
-w <worker>, --worker <worker> Which worker to use (type of machine)
[default: plana]
-p <priority>, --priority <priority> Job priority (lower is sooner)
[default: 1000]
-N <num>, --num <num> Number of times to run/queue the job
[default: 1]
--last-in-suite Mark the last job in a suite so suite
post-processing can be run
[default: False]
--email <email> Where to send the results of a suite.
Only applies to the last job in a suite.
--timeout <timeout> How many seconds to wait for jobs to
finish before emailing results. Only
applies to the last job in a suite.
"""
def main():
args = docopt.docopt(doc)
teuthology.schedule.main(args)
|
import docopt
import teuthology.misc
import teuthology.schedule
doc = """
usage: teuthology-schedule -h
teuthology-schedule [options] [--] <conf_file> [<conf_file> ...]
Schedule ceph integration tests
positional arguments:
<conf_file> Config file to read
optional arguments:
-h, --help Show this help message and exit
-v, --verbose Be more verbose
-n <name>, --name <name> Name of suite run the job is part of
-d <desc>, --description <desc> Job description
-o <owner>, --owner <owner> Job owner
-w <worker>, --worker <worker> Which worker to use (type of machine)
[default: plana]
-p <priority>, --priority <priority> Job priority (lower is sooner)
[default: 1000]
-N <num>, --num <num> Number of times to run/queue the job
[default: 1]
--last-in-suite Mark the last job in a suite so suite
post-processing can be run
[default: False]
--email <email> Where to send the results of a suite.
Only applies to the last job in a suite.
--timeout <timeout> How many seconds to wait for jobs to
finish before emailing results. Only
applies to the last job in a suite.
"""
def main():
args = docopt.docopt(doc)
teuthology.schedule.main(args)
|
Add [--] to usage statement
|
Add [--] to usage statement
Signed-off-by: Zack Cerza <f801c831581d4150a2793939287636221d62131e@inktank.com>
|
Python
|
mit
|
tchaikov/teuthology,michaelsevilla/teuthology,ceph/teuthology,yghannam/teuthology,SUSE/teuthology,caibo2014/teuthology,t-miyamae/teuthology,robbat2/teuthology,dmick/teuthology,ivotron/teuthology,zhouyuan/teuthology,ktdreyer/teuthology,dmick/teuthology,SUSE/teuthology,dreamhost/teuthology,yghannam/teuthology,tchaikov/teuthology,ivotron/teuthology,zhouyuan/teuthology,t-miyamae/teuthology,dmick/teuthology,michaelsevilla/teuthology,ceph/teuthology,robbat2/teuthology,caibo2014/teuthology,ktdreyer/teuthology,dreamhost/teuthology,SUSE/teuthology
|
import docopt
import teuthology.misc
import teuthology.schedule
doc = """
usage: teuthology-schedule -h
teuthology-schedule [options] <conf_file> [<conf_file> ...]
Schedule ceph integration tests
positional arguments:
<conf_file> Config file to read
optional arguments:
-h, --help Show this help message and exit
-v, --verbose Be more verbose
-n <name>, --name <name> Name of suite run the job is part of
-d <desc>, --description <desc> Job description
-o <owner>, --owner <owner> Job owner
-w <worker>, --worker <worker> Which worker to use (type of machine)
[default: plana]
-p <priority>, --priority <priority> Job priority (lower is sooner)
[default: 1000]
-N <num>, --num <num> Number of times to run/queue the job
[default: 1]
--last-in-suite Mark the last job in a suite so suite
post-processing can be run
[default: False]
--email <email> Where to send the results of a suite.
Only applies to the last job in a suite.
--timeout <timeout> How many seconds to wait for jobs to
finish before emailing results. Only
applies to the last job in a suite.
"""
def main():
args = docopt.docopt(doc)
teuthology.schedule.main(args)
Add [--] to usage statement
Signed-off-by: Zack Cerza <f801c831581d4150a2793939287636221d62131e@inktank.com>
|
import docopt
import teuthology.misc
import teuthology.schedule
doc = """
usage: teuthology-schedule -h
teuthology-schedule [options] [--] <conf_file> [<conf_file> ...]
Schedule ceph integration tests
positional arguments:
<conf_file> Config file to read
optional arguments:
-h, --help Show this help message and exit
-v, --verbose Be more verbose
-n <name>, --name <name> Name of suite run the job is part of
-d <desc>, --description <desc> Job description
-o <owner>, --owner <owner> Job owner
-w <worker>, --worker <worker> Which worker to use (type of machine)
[default: plana]
-p <priority>, --priority <priority> Job priority (lower is sooner)
[default: 1000]
-N <num>, --num <num> Number of times to run/queue the job
[default: 1]
--last-in-suite Mark the last job in a suite so suite
post-processing can be run
[default: False]
--email <email> Where to send the results of a suite.
Only applies to the last job in a suite.
--timeout <timeout> How many seconds to wait for jobs to
finish before emailing results. Only
applies to the last job in a suite.
"""
def main():
args = docopt.docopt(doc)
teuthology.schedule.main(args)
|
<commit_before>import docopt
import teuthology.misc
import teuthology.schedule
doc = """
usage: teuthology-schedule -h
teuthology-schedule [options] <conf_file> [<conf_file> ...]
Schedule ceph integration tests
positional arguments:
<conf_file> Config file to read
optional arguments:
-h, --help Show this help message and exit
-v, --verbose Be more verbose
-n <name>, --name <name> Name of suite run the job is part of
-d <desc>, --description <desc> Job description
-o <owner>, --owner <owner> Job owner
-w <worker>, --worker <worker> Which worker to use (type of machine)
[default: plana]
-p <priority>, --priority <priority> Job priority (lower is sooner)
[default: 1000]
-N <num>, --num <num> Number of times to run/queue the job
[default: 1]
--last-in-suite Mark the last job in a suite so suite
post-processing can be run
[default: False]
--email <email> Where to send the results of a suite.
Only applies to the last job in a suite.
--timeout <timeout> How many seconds to wait for jobs to
finish before emailing results. Only
applies to the last job in a suite.
"""
def main():
args = docopt.docopt(doc)
teuthology.schedule.main(args)
<commit_msg>Add [--] to usage statement
Signed-off-by: Zack Cerza <f801c831581d4150a2793939287636221d62131e@inktank.com><commit_after>
|
import docopt
import teuthology.misc
import teuthology.schedule
doc = """
usage: teuthology-schedule -h
teuthology-schedule [options] [--] <conf_file> [<conf_file> ...]
Schedule ceph integration tests
positional arguments:
<conf_file> Config file to read
optional arguments:
-h, --help Show this help message and exit
-v, --verbose Be more verbose
-n <name>, --name <name> Name of suite run the job is part of
-d <desc>, --description <desc> Job description
-o <owner>, --owner <owner> Job owner
-w <worker>, --worker <worker> Which worker to use (type of machine)
[default: plana]
-p <priority>, --priority <priority> Job priority (lower is sooner)
[default: 1000]
-N <num>, --num <num> Number of times to run/queue the job
[default: 1]
--last-in-suite Mark the last job in a suite so suite
post-processing can be run
[default: False]
--email <email> Where to send the results of a suite.
Only applies to the last job in a suite.
--timeout <timeout> How many seconds to wait for jobs to
finish before emailing results. Only
applies to the last job in a suite.
"""
def main():
args = docopt.docopt(doc)
teuthology.schedule.main(args)
|
import docopt
import teuthology.misc
import teuthology.schedule
doc = """
usage: teuthology-schedule -h
teuthology-schedule [options] <conf_file> [<conf_file> ...]
Schedule ceph integration tests
positional arguments:
<conf_file> Config file to read
optional arguments:
-h, --help Show this help message and exit
-v, --verbose Be more verbose
-n <name>, --name <name> Name of suite run the job is part of
-d <desc>, --description <desc> Job description
-o <owner>, --owner <owner> Job owner
-w <worker>, --worker <worker> Which worker to use (type of machine)
[default: plana]
-p <priority>, --priority <priority> Job priority (lower is sooner)
[default: 1000]
-N <num>, --num <num> Number of times to run/queue the job
[default: 1]
--last-in-suite Mark the last job in a suite so suite
post-processing can be run
[default: False]
--email <email> Where to send the results of a suite.
Only applies to the last job in a suite.
--timeout <timeout> How many seconds to wait for jobs to
finish before emailing results. Only
applies to the last job in a suite.
"""
def main():
args = docopt.docopt(doc)
teuthology.schedule.main(args)
Add [--] to usage statement
Signed-off-by: Zack Cerza <f801c831581d4150a2793939287636221d62131e@inktank.com>import docopt
import teuthology.misc
import teuthology.schedule
doc = """
usage: teuthology-schedule -h
teuthology-schedule [options] [--] <conf_file> [<conf_file> ...]
Schedule ceph integration tests
positional arguments:
<conf_file> Config file to read
optional arguments:
-h, --help Show this help message and exit
-v, --verbose Be more verbose
-n <name>, --name <name> Name of suite run the job is part of
-d <desc>, --description <desc> Job description
-o <owner>, --owner <owner> Job owner
-w <worker>, --worker <worker> Which worker to use (type of machine)
[default: plana]
-p <priority>, --priority <priority> Job priority (lower is sooner)
[default: 1000]
-N <num>, --num <num> Number of times to run/queue the job
[default: 1]
--last-in-suite Mark the last job in a suite so suite
post-processing can be run
[default: False]
--email <email> Where to send the results of a suite.
Only applies to the last job in a suite.
--timeout <timeout> How many seconds to wait for jobs to
finish before emailing results. Only
applies to the last job in a suite.
"""
def main():
args = docopt.docopt(doc)
teuthology.schedule.main(args)
|
<commit_before>import docopt
import teuthology.misc
import teuthology.schedule
doc = """
usage: teuthology-schedule -h
teuthology-schedule [options] <conf_file> [<conf_file> ...]
Schedule ceph integration tests
positional arguments:
<conf_file> Config file to read
optional arguments:
-h, --help Show this help message and exit
-v, --verbose Be more verbose
-n <name>, --name <name> Name of suite run the job is part of
-d <desc>, --description <desc> Job description
-o <owner>, --owner <owner> Job owner
-w <worker>, --worker <worker> Which worker to use (type of machine)
[default: plana]
-p <priority>, --priority <priority> Job priority (lower is sooner)
[default: 1000]
-N <num>, --num <num> Number of times to run/queue the job
[default: 1]
--last-in-suite Mark the last job in a suite so suite
post-processing can be run
[default: False]
--email <email> Where to send the results of a suite.
Only applies to the last job in a suite.
--timeout <timeout> How many seconds to wait for jobs to
finish before emailing results. Only
applies to the last job in a suite.
"""
def main():
args = docopt.docopt(doc)
teuthology.schedule.main(args)
<commit_msg>Add [--] to usage statement
Signed-off-by: Zack Cerza <f801c831581d4150a2793939287636221d62131e@inktank.com><commit_after>import docopt
import teuthology.misc
import teuthology.schedule
doc = """
usage: teuthology-schedule -h
teuthology-schedule [options] [--] <conf_file> [<conf_file> ...]
Schedule ceph integration tests
positional arguments:
<conf_file> Config file to read
optional arguments:
-h, --help Show this help message and exit
-v, --verbose Be more verbose
-n <name>, --name <name> Name of suite run the job is part of
-d <desc>, --description <desc> Job description
-o <owner>, --owner <owner> Job owner
-w <worker>, --worker <worker> Which worker to use (type of machine)
[default: plana]
-p <priority>, --priority <priority> Job priority (lower is sooner)
[default: 1000]
-N <num>, --num <num> Number of times to run/queue the job
[default: 1]
--last-in-suite Mark the last job in a suite so suite
post-processing can be run
[default: False]
--email <email> Where to send the results of a suite.
Only applies to the last job in a suite.
--timeout <timeout> How many seconds to wait for jobs to
finish before emailing results. Only
applies to the last job in a suite.
"""
def main():
args = docopt.docopt(doc)
teuthology.schedule.main(args)
|
ec52fee0fbefaa8fe2df1f38aab000456fb44c45
|
website/admin.py
|
website/admin.py
|
from django.contrib import admin
from .models import Card, FaqQuestion, Banner, Rule
admin.site.register(Card)
admin.site.register(FaqQuestion)
admin.site.register(Rule)
admin.site.register(Banner)
|
from django.contrib import admin
from django.contrib.admin import EmptyFieldListFilter
from django.contrib.auth.admin import UserAdmin
from django.contrib.auth.models import User
from .models import Card, FaqQuestion, Banner, Rule
class WatchlistFilter(EmptyFieldListFilter):
def __init__(self, field, request, params, model, model_admin, field_path):
super().__init__(field, request, params, model, model_admin, field_path)
self.title = "vaktliste"
def choices(self, changelist):
for lookup, title in (
(None, 'Alle'),
('1', 'På vaktliste'),
('0', 'Ikke på vaktliste'),
):
yield {
'selected': self.lookup_val == lookup,
'query_string': changelist.get_query_string({self.lookup_kwarg: lookup}),
'display': title,
}
class CustomUserAdmin(UserAdmin):
list_filter = UserAdmin.list_filter + (("watches", WatchlistFilter),)
admin.site.unregister(User)
admin.site.register(User, CustomUserAdmin)
admin.site.register(Card)
admin.site.register(FaqQuestion)
admin.site.register(Rule)
admin.site.register(Banner)
|
Add filter for users on watchlist
|
Add filter for users on watchlist
|
Python
|
mit
|
hackerspace-ntnu/website,hackerspace-ntnu/website,hackerspace-ntnu/website
|
from django.contrib import admin
from .models import Card, FaqQuestion, Banner, Rule
admin.site.register(Card)
admin.site.register(FaqQuestion)
admin.site.register(Rule)
admin.site.register(Banner)
Add filter for users on watchlist
|
from django.contrib import admin
from django.contrib.admin import EmptyFieldListFilter
from django.contrib.auth.admin import UserAdmin
from django.contrib.auth.models import User
from .models import Card, FaqQuestion, Banner, Rule
class WatchlistFilter(EmptyFieldListFilter):
def __init__(self, field, request, params, model, model_admin, field_path):
super().__init__(field, request, params, model, model_admin, field_path)
self.title = "vaktliste"
def choices(self, changelist):
for lookup, title in (
(None, 'Alle'),
('1', 'På vaktliste'),
('0', 'Ikke på vaktliste'),
):
yield {
'selected': self.lookup_val == lookup,
'query_string': changelist.get_query_string({self.lookup_kwarg: lookup}),
'display': title,
}
class CustomUserAdmin(UserAdmin):
list_filter = UserAdmin.list_filter + (("watches", WatchlistFilter),)
admin.site.unregister(User)
admin.site.register(User, CustomUserAdmin)
admin.site.register(Card)
admin.site.register(FaqQuestion)
admin.site.register(Rule)
admin.site.register(Banner)
|
<commit_before>from django.contrib import admin
from .models import Card, FaqQuestion, Banner, Rule
admin.site.register(Card)
admin.site.register(FaqQuestion)
admin.site.register(Rule)
admin.site.register(Banner)
<commit_msg>Add filter for users on watchlist<commit_after>
|
from django.contrib import admin
from django.contrib.admin import EmptyFieldListFilter
from django.contrib.auth.admin import UserAdmin
from django.contrib.auth.models import User
from .models import Card, FaqQuestion, Banner, Rule
class WatchlistFilter(EmptyFieldListFilter):
def __init__(self, field, request, params, model, model_admin, field_path):
super().__init__(field, request, params, model, model_admin, field_path)
self.title = "vaktliste"
def choices(self, changelist):
for lookup, title in (
(None, 'Alle'),
('1', 'På vaktliste'),
('0', 'Ikke på vaktliste'),
):
yield {
'selected': self.lookup_val == lookup,
'query_string': changelist.get_query_string({self.lookup_kwarg: lookup}),
'display': title,
}
class CustomUserAdmin(UserAdmin):
list_filter = UserAdmin.list_filter + (("watches", WatchlistFilter),)
admin.site.unregister(User)
admin.site.register(User, CustomUserAdmin)
admin.site.register(Card)
admin.site.register(FaqQuestion)
admin.site.register(Rule)
admin.site.register(Banner)
|
from django.contrib import admin
from .models import Card, FaqQuestion, Banner, Rule
admin.site.register(Card)
admin.site.register(FaqQuestion)
admin.site.register(Rule)
admin.site.register(Banner)
Add filter for users on watchlistfrom django.contrib import admin
from django.contrib.admin import EmptyFieldListFilter
from django.contrib.auth.admin import UserAdmin
from django.contrib.auth.models import User
from .models import Card, FaqQuestion, Banner, Rule
class WatchlistFilter(EmptyFieldListFilter):
def __init__(self, field, request, params, model, model_admin, field_path):
super().__init__(field, request, params, model, model_admin, field_path)
self.title = "vaktliste"
def choices(self, changelist):
for lookup, title in (
(None, 'Alle'),
('1', 'På vaktliste'),
('0', 'Ikke på vaktliste'),
):
yield {
'selected': self.lookup_val == lookup,
'query_string': changelist.get_query_string({self.lookup_kwarg: lookup}),
'display': title,
}
class CustomUserAdmin(UserAdmin):
list_filter = UserAdmin.list_filter + (("watches", WatchlistFilter),)
admin.site.unregister(User)
admin.site.register(User, CustomUserAdmin)
admin.site.register(Card)
admin.site.register(FaqQuestion)
admin.site.register(Rule)
admin.site.register(Banner)
|
<commit_before>from django.contrib import admin
from .models import Card, FaqQuestion, Banner, Rule
admin.site.register(Card)
admin.site.register(FaqQuestion)
admin.site.register(Rule)
admin.site.register(Banner)
<commit_msg>Add filter for users on watchlist<commit_after>from django.contrib import admin
from django.contrib.admin import EmptyFieldListFilter
from django.contrib.auth.admin import UserAdmin
from django.contrib.auth.models import User
from .models import Card, FaqQuestion, Banner, Rule
class WatchlistFilter(EmptyFieldListFilter):
def __init__(self, field, request, params, model, model_admin, field_path):
super().__init__(field, request, params, model, model_admin, field_path)
self.title = "vaktliste"
def choices(self, changelist):
for lookup, title in (
(None, 'Alle'),
('1', 'På vaktliste'),
('0', 'Ikke på vaktliste'),
):
yield {
'selected': self.lookup_val == lookup,
'query_string': changelist.get_query_string({self.lookup_kwarg: lookup}),
'display': title,
}
class CustomUserAdmin(UserAdmin):
list_filter = UserAdmin.list_filter + (("watches", WatchlistFilter),)
admin.site.unregister(User)
admin.site.register(User, CustomUserAdmin)
admin.site.register(Card)
admin.site.register(FaqQuestion)
admin.site.register(Rule)
admin.site.register(Banner)
|
9293a72faf8cd41dc68bb1f2220e10459bbe09ff
|
pycom/oslo_i18n.py
|
pycom/oslo_i18n.py
|
# coding: utf-8
import oslo_i18n
def reset_i18n(domain="app", localedir=None):
global _translators, _, _C, _P, _LI, _LW, _LE, _LC
# Enable lazy translation
oslo_i18n.enable_lazy()
_translators = oslo_i18n.TranslatorFactory(domain=domain, localedir=localedir)
# The primary translation function using the well-known name "_"
_ = _translators.primary
# The contextual translation function using the name "_C"
_C = _translators.contextual_form
# The plural translation function using the name "_P"
_P = _translators.plural_form
# Translators for log levels.
#
# The abbreviated names are meant to reflect the usual use of a short
# name like '_'. The "L" is for "log" and the other letter comes from
# the level.
_LI = _translators.log_info
_LW = _translators.log_warning
_LE = _translators.log_error
_LC = _translators.log_critical
_translators, _, _C, _P, _LI, _LW, _LE, _LC = None
reset_i18n()
|
# coding: utf-8
import oslo_i18n
def reset_i18n(domain="app", localedir=None, lazy=True):
global _translators, _, _C, _P, _LI, _LW, _LE, _LC
# Enable lazy translation
if lazy:
oslo_i18n.enable_lazy()
_translators = oslo_i18n.TranslatorFactory(domain=domain, localedir=localedir)
# The primary translation function using the well-known name "_"
_ = _translators.primary
# The contextual translation function using the name "_C"
_C = _translators.contextual_form
# The plural translation function using the name "_P"
_P = _translators.plural_form
# Translators for log levels.
#
# The abbreviated names are meant to reflect the usual use of a short
# name like '_'. The "L" is for "log" and the other letter comes from
# the level.
_LI = _translators.log_info
_LW = _translators.log_warning
_LE = _translators.log_error
_LC = _translators.log_critical
_translators = _ = _C = _P = _LI = _LW = _LE = _LC = None
reset_i18n()
|
Add a argument and fix a error.
|
Add a argument and fix a error.
|
Python
|
mit
|
xgfone/pycom,xgfone/xutils
|
# coding: utf-8
import oslo_i18n
def reset_i18n(domain="app", localedir=None):
global _translators, _, _C, _P, _LI, _LW, _LE, _LC
# Enable lazy translation
oslo_i18n.enable_lazy()
_translators = oslo_i18n.TranslatorFactory(domain=domain, localedir=localedir)
# The primary translation function using the well-known name "_"
_ = _translators.primary
# The contextual translation function using the name "_C"
_C = _translators.contextual_form
# The plural translation function using the name "_P"
_P = _translators.plural_form
# Translators for log levels.
#
# The abbreviated names are meant to reflect the usual use of a short
# name like '_'. The "L" is for "log" and the other letter comes from
# the level.
_LI = _translators.log_info
_LW = _translators.log_warning
_LE = _translators.log_error
_LC = _translators.log_critical
_translators, _, _C, _P, _LI, _LW, _LE, _LC = None
reset_i18n()
Add a argument and fix a error.
|
# coding: utf-8
import oslo_i18n
def reset_i18n(domain="app", localedir=None, lazy=True):
global _translators, _, _C, _P, _LI, _LW, _LE, _LC
# Enable lazy translation
if lazy:
oslo_i18n.enable_lazy()
_translators = oslo_i18n.TranslatorFactory(domain=domain, localedir=localedir)
# The primary translation function using the well-known name "_"
_ = _translators.primary
# The contextual translation function using the name "_C"
_C = _translators.contextual_form
# The plural translation function using the name "_P"
_P = _translators.plural_form
# Translators for log levels.
#
# The abbreviated names are meant to reflect the usual use of a short
# name like '_'. The "L" is for "log" and the other letter comes from
# the level.
_LI = _translators.log_info
_LW = _translators.log_warning
_LE = _translators.log_error
_LC = _translators.log_critical
_translators = _ = _C = _P = _LI = _LW = _LE = _LC = None
reset_i18n()
|
<commit_before># coding: utf-8
import oslo_i18n
def reset_i18n(domain="app", localedir=None):
global _translators, _, _C, _P, _LI, _LW, _LE, _LC
# Enable lazy translation
oslo_i18n.enable_lazy()
_translators = oslo_i18n.TranslatorFactory(domain=domain, localedir=localedir)
# The primary translation function using the well-known name "_"
_ = _translators.primary
# The contextual translation function using the name "_C"
_C = _translators.contextual_form
# The plural translation function using the name "_P"
_P = _translators.plural_form
# Translators for log levels.
#
# The abbreviated names are meant to reflect the usual use of a short
# name like '_'. The "L" is for "log" and the other letter comes from
# the level.
_LI = _translators.log_info
_LW = _translators.log_warning
_LE = _translators.log_error
_LC = _translators.log_critical
_translators, _, _C, _P, _LI, _LW, _LE, _LC = None
reset_i18n()
<commit_msg>Add a argument and fix a error.<commit_after>
|
# coding: utf-8
import oslo_i18n
def reset_i18n(domain="app", localedir=None, lazy=True):
global _translators, _, _C, _P, _LI, _LW, _LE, _LC
# Enable lazy translation
if lazy:
oslo_i18n.enable_lazy()
_translators = oslo_i18n.TranslatorFactory(domain=domain, localedir=localedir)
# The primary translation function using the well-known name "_"
_ = _translators.primary
# The contextual translation function using the name "_C"
_C = _translators.contextual_form
# The plural translation function using the name "_P"
_P = _translators.plural_form
# Translators for log levels.
#
# The abbreviated names are meant to reflect the usual use of a short
# name like '_'. The "L" is for "log" and the other letter comes from
# the level.
_LI = _translators.log_info
_LW = _translators.log_warning
_LE = _translators.log_error
_LC = _translators.log_critical
_translators = _ = _C = _P = _LI = _LW = _LE = _LC = None
reset_i18n()
|
# coding: utf-8
import oslo_i18n
def reset_i18n(domain="app", localedir=None):
global _translators, _, _C, _P, _LI, _LW, _LE, _LC
# Enable lazy translation
oslo_i18n.enable_lazy()
_translators = oslo_i18n.TranslatorFactory(domain=domain, localedir=localedir)
# The primary translation function using the well-known name "_"
_ = _translators.primary
# The contextual translation function using the name "_C"
_C = _translators.contextual_form
# The plural translation function using the name "_P"
_P = _translators.plural_form
# Translators for log levels.
#
# The abbreviated names are meant to reflect the usual use of a short
# name like '_'. The "L" is for "log" and the other letter comes from
# the level.
_LI = _translators.log_info
_LW = _translators.log_warning
_LE = _translators.log_error
_LC = _translators.log_critical
_translators, _, _C, _P, _LI, _LW, _LE, _LC = None
reset_i18n()
Add a argument and fix a error.# coding: utf-8
import oslo_i18n
def reset_i18n(domain="app", localedir=None, lazy=True):
global _translators, _, _C, _P, _LI, _LW, _LE, _LC
# Enable lazy translation
if lazy:
oslo_i18n.enable_lazy()
_translators = oslo_i18n.TranslatorFactory(domain=domain, localedir=localedir)
# The primary translation function using the well-known name "_"
_ = _translators.primary
# The contextual translation function using the name "_C"
_C = _translators.contextual_form
# The plural translation function using the name "_P"
_P = _translators.plural_form
# Translators for log levels.
#
# The abbreviated names are meant to reflect the usual use of a short
# name like '_'. The "L" is for "log" and the other letter comes from
# the level.
_LI = _translators.log_info
_LW = _translators.log_warning
_LE = _translators.log_error
_LC = _translators.log_critical
_translators = _ = _C = _P = _LI = _LW = _LE = _LC = None
reset_i18n()
|
<commit_before># coding: utf-8
import oslo_i18n
def reset_i18n(domain="app", localedir=None):
global _translators, _, _C, _P, _LI, _LW, _LE, _LC
# Enable lazy translation
oslo_i18n.enable_lazy()
_translators = oslo_i18n.TranslatorFactory(domain=domain, localedir=localedir)
# The primary translation function using the well-known name "_"
_ = _translators.primary
# The contextual translation function using the name "_C"
_C = _translators.contextual_form
# The plural translation function using the name "_P"
_P = _translators.plural_form
# Translators for log levels.
#
# The abbreviated names are meant to reflect the usual use of a short
# name like '_'. The "L" is for "log" and the other letter comes from
# the level.
_LI = _translators.log_info
_LW = _translators.log_warning
_LE = _translators.log_error
_LC = _translators.log_critical
_translators, _, _C, _P, _LI, _LW, _LE, _LC = None
reset_i18n()
<commit_msg>Add a argument and fix a error.<commit_after># coding: utf-8
import oslo_i18n
def reset_i18n(domain="app", localedir=None, lazy=True):
global _translators, _, _C, _P, _LI, _LW, _LE, _LC
# Enable lazy translation
if lazy:
oslo_i18n.enable_lazy()
_translators = oslo_i18n.TranslatorFactory(domain=domain, localedir=localedir)
# The primary translation function using the well-known name "_"
_ = _translators.primary
# The contextual translation function using the name "_C"
_C = _translators.contextual_form
# The plural translation function using the name "_P"
_P = _translators.plural_form
# Translators for log levels.
#
# The abbreviated names are meant to reflect the usual use of a short
# name like '_'. The "L" is for "log" and the other letter comes from
# the level.
_LI = _translators.log_info
_LW = _translators.log_warning
_LE = _translators.log_error
_LC = _translators.log_critical
_translators = _ = _C = _P = _LI = _LW = _LE = _LC = None
reset_i18n()
|
b43fd1f4451f11f63b8657594c089b309cb140dd
|
pyfr/ctypesutil.py
|
pyfr/ctypesutil.py
|
# -*- coding: utf-8 -*-
import ctypes
import ctypes.util
import os
import sys
def find_libc():
if sys.platform == 'win32':
return ctypes.util.find_msvcrt()
else:
return ctypes.util.find_library('c')
def load_library(name):
lname = platform_libname(name)
sdirs = platform_libdirs()
# First attempt to utilise the system search path
try:
return ctypes.CDLL(lname)
# Otherwise, if this fails then run our own search
except OSError:
for sd in sdirs:
try:
return ctypes.CDLL(os.path.abspath(os.path.join(sd, lname)))
except OSError:
pass
else:
raise OSError('Unable to load {0}'.format(name))
def platform_libname(name):
if sys.platform == 'darwin':
return 'lib{0}.dylib'.format(name)
elif sys.platform == 'Windows':
return '{0}.dll'.format(name)
else:
return 'lib{0}.so'.format(name)
def platform_libdirs():
path = os.environ.get('PYFR_LIBRARY_PATH', '')
dirs = [d for d in path.split(':') if d]
# On Mac OS X append the default path used by MacPorts
if sys.platform == 'darwin':
return dirs + ['/opt/local/lib']
# Otherwise just return
else:
return dirs
|
# -*- coding: utf-8 -*-
import ctypes
import ctypes.util
import os
import sys
def find_libc():
if sys.platform == 'win32':
return ctypes.util.find_msvcrt()
else:
return ctypes.util.find_library('c')
def load_library(name):
lname = platform_libname(name)
sdirs = platform_libdirs()
# First attempt to utilise the system search path
try:
return ctypes.CDLL(lname)
# Otherwise, if this fails then run our own search
except OSError:
for sd in sdirs:
try:
return ctypes.CDLL(os.path.abspath(os.path.join(sd, lname)))
except OSError:
pass
else:
raise OSError('Unable to load {0}'.format(name))
def platform_libname(name):
if sys.platform == 'darwin':
return 'lib{0}.dylib'.format(name)
elif sys.platform == 'win32':
return '{0}.dll'.format(name)
else:
return 'lib{0}.so'.format(name)
def platform_libdirs():
path = os.environ.get('PYFR_LIBRARY_PATH', '')
dirs = [d for d in path.split(':') if d]
# On Mac OS X append the default path used by MacPorts
if sys.platform == 'darwin':
return dirs + ['/opt/local/lib']
# Otherwise just return
else:
return dirs
|
Fix how we check for Windows in platform_libname.
|
Fix how we check for Windows in platform_libname.
|
Python
|
bsd-3-clause
|
BrianVermeire/PyFR
|
# -*- coding: utf-8 -*-
import ctypes
import ctypes.util
import os
import sys
def find_libc():
if sys.platform == 'win32':
return ctypes.util.find_msvcrt()
else:
return ctypes.util.find_library('c')
def load_library(name):
lname = platform_libname(name)
sdirs = platform_libdirs()
# First attempt to utilise the system search path
try:
return ctypes.CDLL(lname)
# Otherwise, if this fails then run our own search
except OSError:
for sd in sdirs:
try:
return ctypes.CDLL(os.path.abspath(os.path.join(sd, lname)))
except OSError:
pass
else:
raise OSError('Unable to load {0}'.format(name))
def platform_libname(name):
if sys.platform == 'darwin':
return 'lib{0}.dylib'.format(name)
elif sys.platform == 'Windows':
return '{0}.dll'.format(name)
else:
return 'lib{0}.so'.format(name)
def platform_libdirs():
path = os.environ.get('PYFR_LIBRARY_PATH', '')
dirs = [d for d in path.split(':') if d]
# On Mac OS X append the default path used by MacPorts
if sys.platform == 'darwin':
return dirs + ['/opt/local/lib']
# Otherwise just return
else:
return dirs
Fix how we check for Windows in platform_libname.
|
# -*- coding: utf-8 -*-
import ctypes
import ctypes.util
import os
import sys
def find_libc():
if sys.platform == 'win32':
return ctypes.util.find_msvcrt()
else:
return ctypes.util.find_library('c')
def load_library(name):
lname = platform_libname(name)
sdirs = platform_libdirs()
# First attempt to utilise the system search path
try:
return ctypes.CDLL(lname)
# Otherwise, if this fails then run our own search
except OSError:
for sd in sdirs:
try:
return ctypes.CDLL(os.path.abspath(os.path.join(sd, lname)))
except OSError:
pass
else:
raise OSError('Unable to load {0}'.format(name))
def platform_libname(name):
if sys.platform == 'darwin':
return 'lib{0}.dylib'.format(name)
elif sys.platform == 'win32':
return '{0}.dll'.format(name)
else:
return 'lib{0}.so'.format(name)
def platform_libdirs():
path = os.environ.get('PYFR_LIBRARY_PATH', '')
dirs = [d for d in path.split(':') if d]
# On Mac OS X append the default path used by MacPorts
if sys.platform == 'darwin':
return dirs + ['/opt/local/lib']
# Otherwise just return
else:
return dirs
|
<commit_before># -*- coding: utf-8 -*-
import ctypes
import ctypes.util
import os
import sys
def find_libc():
if sys.platform == 'win32':
return ctypes.util.find_msvcrt()
else:
return ctypes.util.find_library('c')
def load_library(name):
lname = platform_libname(name)
sdirs = platform_libdirs()
# First attempt to utilise the system search path
try:
return ctypes.CDLL(lname)
# Otherwise, if this fails then run our own search
except OSError:
for sd in sdirs:
try:
return ctypes.CDLL(os.path.abspath(os.path.join(sd, lname)))
except OSError:
pass
else:
raise OSError('Unable to load {0}'.format(name))
def platform_libname(name):
if sys.platform == 'darwin':
return 'lib{0}.dylib'.format(name)
elif sys.platform == 'Windows':
return '{0}.dll'.format(name)
else:
return 'lib{0}.so'.format(name)
def platform_libdirs():
path = os.environ.get('PYFR_LIBRARY_PATH', '')
dirs = [d for d in path.split(':') if d]
# On Mac OS X append the default path used by MacPorts
if sys.platform == 'darwin':
return dirs + ['/opt/local/lib']
# Otherwise just return
else:
return dirs
<commit_msg>Fix how we check for Windows in platform_libname.<commit_after>
|
# -*- coding: utf-8 -*-
import ctypes
import ctypes.util
import os
import sys
def find_libc():
if sys.platform == 'win32':
return ctypes.util.find_msvcrt()
else:
return ctypes.util.find_library('c')
def load_library(name):
lname = platform_libname(name)
sdirs = platform_libdirs()
# First attempt to utilise the system search path
try:
return ctypes.CDLL(lname)
# Otherwise, if this fails then run our own search
except OSError:
for sd in sdirs:
try:
return ctypes.CDLL(os.path.abspath(os.path.join(sd, lname)))
except OSError:
pass
else:
raise OSError('Unable to load {0}'.format(name))
def platform_libname(name):
if sys.platform == 'darwin':
return 'lib{0}.dylib'.format(name)
elif sys.platform == 'win32':
return '{0}.dll'.format(name)
else:
return 'lib{0}.so'.format(name)
def platform_libdirs():
path = os.environ.get('PYFR_LIBRARY_PATH', '')
dirs = [d for d in path.split(':') if d]
# On Mac OS X append the default path used by MacPorts
if sys.platform == 'darwin':
return dirs + ['/opt/local/lib']
# Otherwise just return
else:
return dirs
|
# -*- coding: utf-8 -*-
import ctypes
import ctypes.util
import os
import sys
def find_libc():
if sys.platform == 'win32':
return ctypes.util.find_msvcrt()
else:
return ctypes.util.find_library('c')
def load_library(name):
lname = platform_libname(name)
sdirs = platform_libdirs()
# First attempt to utilise the system search path
try:
return ctypes.CDLL(lname)
# Otherwise, if this fails then run our own search
except OSError:
for sd in sdirs:
try:
return ctypes.CDLL(os.path.abspath(os.path.join(sd, lname)))
except OSError:
pass
else:
raise OSError('Unable to load {0}'.format(name))
def platform_libname(name):
if sys.platform == 'darwin':
return 'lib{0}.dylib'.format(name)
elif sys.platform == 'Windows':
return '{0}.dll'.format(name)
else:
return 'lib{0}.so'.format(name)
def platform_libdirs():
path = os.environ.get('PYFR_LIBRARY_PATH', '')
dirs = [d for d in path.split(':') if d]
# On Mac OS X append the default path used by MacPorts
if sys.platform == 'darwin':
return dirs + ['/opt/local/lib']
# Otherwise just return
else:
return dirs
Fix how we check for Windows in platform_libname.# -*- coding: utf-8 -*-
import ctypes
import ctypes.util
import os
import sys
def find_libc():
if sys.platform == 'win32':
return ctypes.util.find_msvcrt()
else:
return ctypes.util.find_library('c')
def load_library(name):
lname = platform_libname(name)
sdirs = platform_libdirs()
# First attempt to utilise the system search path
try:
return ctypes.CDLL(lname)
# Otherwise, if this fails then run our own search
except OSError:
for sd in sdirs:
try:
return ctypes.CDLL(os.path.abspath(os.path.join(sd, lname)))
except OSError:
pass
else:
raise OSError('Unable to load {0}'.format(name))
def platform_libname(name):
if sys.platform == 'darwin':
return 'lib{0}.dylib'.format(name)
elif sys.platform == 'win32':
return '{0}.dll'.format(name)
else:
return 'lib{0}.so'.format(name)
def platform_libdirs():
path = os.environ.get('PYFR_LIBRARY_PATH', '')
dirs = [d for d in path.split(':') if d]
# On Mac OS X append the default path used by MacPorts
if sys.platform == 'darwin':
return dirs + ['/opt/local/lib']
# Otherwise just return
else:
return dirs
|
<commit_before># -*- coding: utf-8 -*-
import ctypes
import ctypes.util
import os
import sys
def find_libc():
if sys.platform == 'win32':
return ctypes.util.find_msvcrt()
else:
return ctypes.util.find_library('c')
def load_library(name):
lname = platform_libname(name)
sdirs = platform_libdirs()
# First attempt to utilise the system search path
try:
return ctypes.CDLL(lname)
# Otherwise, if this fails then run our own search
except OSError:
for sd in sdirs:
try:
return ctypes.CDLL(os.path.abspath(os.path.join(sd, lname)))
except OSError:
pass
else:
raise OSError('Unable to load {0}'.format(name))
def platform_libname(name):
if sys.platform == 'darwin':
return 'lib{0}.dylib'.format(name)
elif sys.platform == 'Windows':
return '{0}.dll'.format(name)
else:
return 'lib{0}.so'.format(name)
def platform_libdirs():
path = os.environ.get('PYFR_LIBRARY_PATH', '')
dirs = [d for d in path.split(':') if d]
# On Mac OS X append the default path used by MacPorts
if sys.platform == 'darwin':
return dirs + ['/opt/local/lib']
# Otherwise just return
else:
return dirs
<commit_msg>Fix how we check for Windows in platform_libname.<commit_after># -*- coding: utf-8 -*-
import ctypes
import ctypes.util
import os
import sys
def find_libc():
if sys.platform == 'win32':
return ctypes.util.find_msvcrt()
else:
return ctypes.util.find_library('c')
def load_library(name):
lname = platform_libname(name)
sdirs = platform_libdirs()
# First attempt to utilise the system search path
try:
return ctypes.CDLL(lname)
# Otherwise, if this fails then run our own search
except OSError:
for sd in sdirs:
try:
return ctypes.CDLL(os.path.abspath(os.path.join(sd, lname)))
except OSError:
pass
else:
raise OSError('Unable to load {0}'.format(name))
def platform_libname(name):
if sys.platform == 'darwin':
return 'lib{0}.dylib'.format(name)
elif sys.platform == 'win32':
return '{0}.dll'.format(name)
else:
return 'lib{0}.so'.format(name)
def platform_libdirs():
path = os.environ.get('PYFR_LIBRARY_PATH', '')
dirs = [d for d in path.split(':') if d]
# On Mac OS X append the default path used by MacPorts
if sys.platform == 'darwin':
return dirs + ['/opt/local/lib']
# Otherwise just return
else:
return dirs
|
82b65fee35960a1f47f92aac1d8031bb0b57b2e7
|
jsonapiquery/drivers/__init__.py
|
jsonapiquery/drivers/__init__.py
|
from abc import ABCMeta, abstractmethod
class DriverBase(metaclass=ABCMeta):
def __init__(self, obj):
self.obj = obj
def __repr__(self):
return '{}(type={})'.format(self.__class__.__name__, self.obj)
def parse(self, item):
"""Return a new, typed item instance."""
obj = self.obj
relationships = []
for relationship in item.relationships:
relationship = self.parse_relationship(relationship, obj)
relationships.append(relationship)
obj = relationship.type
init_kwargs = item._asdict()
init_kwargs['relationships'] = relationships
if hasattr(item, 'attribute'):
attribute = self.parse_attribute(item.attribute, obj)
init_kwargs['attribute'] = attribute
return type(item)(**init_kwargs)
@abstractmethod
def parse_attribute(self, attribute, type):
return None
@abstractmethod
def parse_relationship(self, relationship, type):
return None
from .model import *
from .schema import *
|
from abc import ABCMeta, abstractmethod
class DriverBase(metaclass=ABCMeta):
def __init__(self, obj):
self.obj = obj
def __repr__(self):
return '{}(type={})'.format(self.__class__.__name__, self.obj)
def init_type(self, type, **init_kwargs):
"""Initialize a new type.
:param type: Type of namedtuple.
:param init_kwargs: Type keyword arguments.
"""
return type(**init_kwargs)
def parse(self, item):
"""Return a new typed item instance."""
obj = self.obj
relationships = []
for relationship in item.relationships:
relationship = self.parse_relationship(relationship, obj)
relationships.append(relationship)
obj = relationship.type
init_kwargs = item._asdict()
init_kwargs['relationships'] = relationships
if hasattr(item, 'attribute'):
attribute = self.parse_attribute(item.attribute, obj)
init_kwargs['attribute'] = attribute
return self.init_type(type(item), **init_kwargs)
@abstractmethod
def parse_attribute(self, attribute, type):
return None
@abstractmethod
def parse_relationship(self, relationship, type):
return None
from .model import *
from .schema import *
|
Move type initialization to its own method
|
Move type initialization to its own method
|
Python
|
apache-2.0
|
caxiam/sqlalchemy-jsonapi-collections
|
from abc import ABCMeta, abstractmethod
class DriverBase(metaclass=ABCMeta):
def __init__(self, obj):
self.obj = obj
def __repr__(self):
return '{}(type={})'.format(self.__class__.__name__, self.obj)
def parse(self, item):
"""Return a new, typed item instance."""
obj = self.obj
relationships = []
for relationship in item.relationships:
relationship = self.parse_relationship(relationship, obj)
relationships.append(relationship)
obj = relationship.type
init_kwargs = item._asdict()
init_kwargs['relationships'] = relationships
if hasattr(item, 'attribute'):
attribute = self.parse_attribute(item.attribute, obj)
init_kwargs['attribute'] = attribute
return type(item)(**init_kwargs)
@abstractmethod
def parse_attribute(self, attribute, type):
return None
@abstractmethod
def parse_relationship(self, relationship, type):
return None
from .model import *
from .schema import *
Move type initialization to its own method
|
from abc import ABCMeta, abstractmethod
class DriverBase(metaclass=ABCMeta):
def __init__(self, obj):
self.obj = obj
def __repr__(self):
return '{}(type={})'.format(self.__class__.__name__, self.obj)
def init_type(self, type, **init_kwargs):
"""Initialize a new type.
:param type: Type of namedtuple.
:param init_kwargs: Type keyword arguments.
"""
return type(**init_kwargs)
def parse(self, item):
"""Return a new typed item instance."""
obj = self.obj
relationships = []
for relationship in item.relationships:
relationship = self.parse_relationship(relationship, obj)
relationships.append(relationship)
obj = relationship.type
init_kwargs = item._asdict()
init_kwargs['relationships'] = relationships
if hasattr(item, 'attribute'):
attribute = self.parse_attribute(item.attribute, obj)
init_kwargs['attribute'] = attribute
return self.init_type(type(item), **init_kwargs)
@abstractmethod
def parse_attribute(self, attribute, type):
return None
@abstractmethod
def parse_relationship(self, relationship, type):
return None
from .model import *
from .schema import *
|
<commit_before>from abc import ABCMeta, abstractmethod
class DriverBase(metaclass=ABCMeta):
def __init__(self, obj):
self.obj = obj
def __repr__(self):
return '{}(type={})'.format(self.__class__.__name__, self.obj)
def parse(self, item):
"""Return a new, typed item instance."""
obj = self.obj
relationships = []
for relationship in item.relationships:
relationship = self.parse_relationship(relationship, obj)
relationships.append(relationship)
obj = relationship.type
init_kwargs = item._asdict()
init_kwargs['relationships'] = relationships
if hasattr(item, 'attribute'):
attribute = self.parse_attribute(item.attribute, obj)
init_kwargs['attribute'] = attribute
return type(item)(**init_kwargs)
@abstractmethod
def parse_attribute(self, attribute, type):
return None
@abstractmethod
def parse_relationship(self, relationship, type):
return None
from .model import *
from .schema import *
<commit_msg>Move type initialization to its own method<commit_after>
|
from abc import ABCMeta, abstractmethod
class DriverBase(metaclass=ABCMeta):
def __init__(self, obj):
self.obj = obj
def __repr__(self):
return '{}(type={})'.format(self.__class__.__name__, self.obj)
def init_type(self, type, **init_kwargs):
"""Initialize a new type.
:param type: Type of namedtuple.
:param init_kwargs: Type keyword arguments.
"""
return type(**init_kwargs)
def parse(self, item):
"""Return a new typed item instance."""
obj = self.obj
relationships = []
for relationship in item.relationships:
relationship = self.parse_relationship(relationship, obj)
relationships.append(relationship)
obj = relationship.type
init_kwargs = item._asdict()
init_kwargs['relationships'] = relationships
if hasattr(item, 'attribute'):
attribute = self.parse_attribute(item.attribute, obj)
init_kwargs['attribute'] = attribute
return self.init_type(type(item), **init_kwargs)
@abstractmethod
def parse_attribute(self, attribute, type):
return None
@abstractmethod
def parse_relationship(self, relationship, type):
return None
from .model import *
from .schema import *
|
from abc import ABCMeta, abstractmethod
class DriverBase(metaclass=ABCMeta):
def __init__(self, obj):
self.obj = obj
def __repr__(self):
return '{}(type={})'.format(self.__class__.__name__, self.obj)
def parse(self, item):
"""Return a new, typed item instance."""
obj = self.obj
relationships = []
for relationship in item.relationships:
relationship = self.parse_relationship(relationship, obj)
relationships.append(relationship)
obj = relationship.type
init_kwargs = item._asdict()
init_kwargs['relationships'] = relationships
if hasattr(item, 'attribute'):
attribute = self.parse_attribute(item.attribute, obj)
init_kwargs['attribute'] = attribute
return type(item)(**init_kwargs)
@abstractmethod
def parse_attribute(self, attribute, type):
return None
@abstractmethod
def parse_relationship(self, relationship, type):
return None
from .model import *
from .schema import *
Move type initialization to its own methodfrom abc import ABCMeta, abstractmethod
class DriverBase(metaclass=ABCMeta):
def __init__(self, obj):
self.obj = obj
def __repr__(self):
return '{}(type={})'.format(self.__class__.__name__, self.obj)
def init_type(self, type, **init_kwargs):
"""Initialize a new type.
:param type: Type of namedtuple.
:param init_kwargs: Type keyword arguments.
"""
return type(**init_kwargs)
def parse(self, item):
"""Return a new typed item instance."""
obj = self.obj
relationships = []
for relationship in item.relationships:
relationship = self.parse_relationship(relationship, obj)
relationships.append(relationship)
obj = relationship.type
init_kwargs = item._asdict()
init_kwargs['relationships'] = relationships
if hasattr(item, 'attribute'):
attribute = self.parse_attribute(item.attribute, obj)
init_kwargs['attribute'] = attribute
return self.init_type(type(item), **init_kwargs)
@abstractmethod
def parse_attribute(self, attribute, type):
return None
@abstractmethod
def parse_relationship(self, relationship, type):
return None
from .model import *
from .schema import *
|
<commit_before>from abc import ABCMeta, abstractmethod
class DriverBase(metaclass=ABCMeta):
def __init__(self, obj):
self.obj = obj
def __repr__(self):
return '{}(type={})'.format(self.__class__.__name__, self.obj)
def parse(self, item):
"""Return a new, typed item instance."""
obj = self.obj
relationships = []
for relationship in item.relationships:
relationship = self.parse_relationship(relationship, obj)
relationships.append(relationship)
obj = relationship.type
init_kwargs = item._asdict()
init_kwargs['relationships'] = relationships
if hasattr(item, 'attribute'):
attribute = self.parse_attribute(item.attribute, obj)
init_kwargs['attribute'] = attribute
return type(item)(**init_kwargs)
@abstractmethod
def parse_attribute(self, attribute, type):
return None
@abstractmethod
def parse_relationship(self, relationship, type):
return None
from .model import *
from .schema import *
<commit_msg>Move type initialization to its own method<commit_after>from abc import ABCMeta, abstractmethod
class DriverBase(metaclass=ABCMeta):
def __init__(self, obj):
self.obj = obj
def __repr__(self):
return '{}(type={})'.format(self.__class__.__name__, self.obj)
def init_type(self, type, **init_kwargs):
"""Initialize a new type.
:param type: Type of namedtuple.
:param init_kwargs: Type keyword arguments.
"""
return type(**init_kwargs)
def parse(self, item):
"""Return a new typed item instance."""
obj = self.obj
relationships = []
for relationship in item.relationships:
relationship = self.parse_relationship(relationship, obj)
relationships.append(relationship)
obj = relationship.type
init_kwargs = item._asdict()
init_kwargs['relationships'] = relationships
if hasattr(item, 'attribute'):
attribute = self.parse_attribute(item.attribute, obj)
init_kwargs['attribute'] = attribute
return self.init_type(type(item), **init_kwargs)
@abstractmethod
def parse_attribute(self, attribute, type):
return None
@abstractmethod
def parse_relationship(self, relationship, type):
return None
from .model import *
from .schema import *
|
45c4f2455b453ee361cbb38ed1add996012b1c5e
|
datahelper.py
|
datahelper.py
|
"""
Just some useful Flask stuff for ndb.
"""
from google.appengine.ext import ndb
from flask import g
from app import app
def put_later(*objs):
"""
Any ndb model instances passed to this method will be put after the flask request has been processed.
"""
for obj in objs:
if obj not in g.dirty_ndb:
g.dirty_ndb.append(obj)
@app.after_request
def store_ndb(response):
"""
Puts the contents of g.dirty_ndb
"""
try:
if g.dirty_ndb:
ndb.put_multi(g.dirty_ndb)
g.dirty_ndb = []
finally:
return response
|
"""
Just some useful Flask stuff for ndb.
"""
from google.appengine.ext import ndb
from flask import g
from app import app
def put_later(*objs):
"""
Any ndb model instances passed to this method will be put after the flask request has been processed.
"""
for obj in objs:
if obj not in g.dirty_ndb:
g.dirty_ndb.append(obj)
@app.after_request
def store_ndb(response):
"""
Puts the contents of g.dirty_ndb
"""
if g.dirty_ndb:
ndb.put_multi(g.dirty_ndb)
g.dirty_ndb = []
return response
|
Fix bug where dirty_ndb was silently failing.
|
Fix bug where dirty_ndb was silently failing.
|
Python
|
apache-2.0
|
kkinder/GAEStarterKit,kkinder/GAEStarterKit,kkinder/GAEStarterKit
|
"""
Just some useful Flask stuff for ndb.
"""
from google.appengine.ext import ndb
from flask import g
from app import app
def put_later(*objs):
"""
Any ndb model instances passed to this method will be put after the flask request has been processed.
"""
for obj in objs:
if obj not in g.dirty_ndb:
g.dirty_ndb.append(obj)
@app.after_request
def store_ndb(response):
"""
Puts the contents of g.dirty_ndb
"""
try:
if g.dirty_ndb:
ndb.put_multi(g.dirty_ndb)
g.dirty_ndb = []
finally:
return response
Fix bug where dirty_ndb was silently failing.
|
"""
Just some useful Flask stuff for ndb.
"""
from google.appengine.ext import ndb
from flask import g
from app import app
def put_later(*objs):
"""
Any ndb model instances passed to this method will be put after the flask request has been processed.
"""
for obj in objs:
if obj not in g.dirty_ndb:
g.dirty_ndb.append(obj)
@app.after_request
def store_ndb(response):
"""
Puts the contents of g.dirty_ndb
"""
if g.dirty_ndb:
ndb.put_multi(g.dirty_ndb)
g.dirty_ndb = []
return response
|
<commit_before>"""
Just some useful Flask stuff for ndb.
"""
from google.appengine.ext import ndb
from flask import g
from app import app
def put_later(*objs):
"""
Any ndb model instances passed to this method will be put after the flask request has been processed.
"""
for obj in objs:
if obj not in g.dirty_ndb:
g.dirty_ndb.append(obj)
@app.after_request
def store_ndb(response):
"""
Puts the contents of g.dirty_ndb
"""
try:
if g.dirty_ndb:
ndb.put_multi(g.dirty_ndb)
g.dirty_ndb = []
finally:
return response
<commit_msg>Fix bug where dirty_ndb was silently failing.<commit_after>
|
"""
Just some useful Flask stuff for ndb.
"""
from google.appengine.ext import ndb
from flask import g
from app import app
def put_later(*objs):
"""
Any ndb model instances passed to this method will be put after the flask request has been processed.
"""
for obj in objs:
if obj not in g.dirty_ndb:
g.dirty_ndb.append(obj)
@app.after_request
def store_ndb(response):
"""
Puts the contents of g.dirty_ndb
"""
if g.dirty_ndb:
ndb.put_multi(g.dirty_ndb)
g.dirty_ndb = []
return response
|
"""
Just some useful Flask stuff for ndb.
"""
from google.appengine.ext import ndb
from flask import g
from app import app
def put_later(*objs):
"""
Any ndb model instances passed to this method will be put after the flask request has been processed.
"""
for obj in objs:
if obj not in g.dirty_ndb:
g.dirty_ndb.append(obj)
@app.after_request
def store_ndb(response):
"""
Puts the contents of g.dirty_ndb
"""
try:
if g.dirty_ndb:
ndb.put_multi(g.dirty_ndb)
g.dirty_ndb = []
finally:
return response
Fix bug where dirty_ndb was silently failing."""
Just some useful Flask stuff for ndb.
"""
from google.appengine.ext import ndb
from flask import g
from app import app
def put_later(*objs):
"""
Any ndb model instances passed to this method will be put after the flask request has been processed.
"""
for obj in objs:
if obj not in g.dirty_ndb:
g.dirty_ndb.append(obj)
@app.after_request
def store_ndb(response):
"""
Puts the contents of g.dirty_ndb
"""
if g.dirty_ndb:
ndb.put_multi(g.dirty_ndb)
g.dirty_ndb = []
return response
|
<commit_before>"""
Just some useful Flask stuff for ndb.
"""
from google.appengine.ext import ndb
from flask import g
from app import app
def put_later(*objs):
"""
Any ndb model instances passed to this method will be put after the flask request has been processed.
"""
for obj in objs:
if obj not in g.dirty_ndb:
g.dirty_ndb.append(obj)
@app.after_request
def store_ndb(response):
"""
Puts the contents of g.dirty_ndb
"""
try:
if g.dirty_ndb:
ndb.put_multi(g.dirty_ndb)
g.dirty_ndb = []
finally:
return response
<commit_msg>Fix bug where dirty_ndb was silently failing.<commit_after>"""
Just some useful Flask stuff for ndb.
"""
from google.appengine.ext import ndb
from flask import g
from app import app
def put_later(*objs):
"""
Any ndb model instances passed to this method will be put after the flask request has been processed.
"""
for obj in objs:
if obj not in g.dirty_ndb:
g.dirty_ndb.append(obj)
@app.after_request
def store_ndb(response):
"""
Puts the contents of g.dirty_ndb
"""
if g.dirty_ndb:
ndb.put_multi(g.dirty_ndb)
g.dirty_ndb = []
return response
|
545fef9216559d08f58f7e20082b36352203a8cf
|
python/decorators.py
|
python/decorators.py
|
def passive(cls):
passive_note = '''
.. note:: This object is a "passive" object. Any changes you make to it will not be reflected in the core and vice-versa. If you wish to update a core version of this object you should use the appropriate API.
'''
if hasattr(cls, "__doc__") and cls.__doc__:
cls.__doc__ += passive_note
else:
cls.__doc__ = passive_note
return cls
def deprecated(cls):
deprecated_note = '''
.. warning:: This object is deprecated. Please migrate code away from using this class or method.
'''
if hasattr(cls, "__doc__") and cls.__doc__:
cls.__doc__ = deprecated_note + cls.__doc__
else:
cls.__doc__ = deprecated_note
return cls
|
def passive(cls):
passive_note = '''
.. note:: This object is a "passive" object. Any changes you make to it will not be reflected in the core and vice-versa. If you wish to update a core version of this object you should use the appropriate API.
'''
if hasattr(cls, "__doc__") and cls.__doc__:
cls.__doc__ += passive_note
else:
cls.__doc__ = passive_note
return cls
def deprecated(cls):
deprecated_note = '''
.. warning:: This object is deprecated. Please migrate code away from using this class or method.
'''
if hasattr(cls, "__doc__") and cls.__doc__:
cls.__doc__ = deprecated_note + cls.__doc__
else:
cls.__doc__ = deprecated_note
return cls
def enterprise(cls):
enterprise_note = '''
.. note: This object is only available in the Enterprise edition of Binary Ninja.
'''
if hasattr(cls, "__doc__") and cls.__doc__:
cls.__doc__ = enterprise_note + cls.__doc__
else:
cls.__doc__ = enterprise_note
return cls
|
Add documentaiton decorator for Enterprise.
|
Add documentaiton decorator for Enterprise.
|
Python
|
mit
|
Vector35/binaryninja-api,Vector35/binaryninja-api,Vector35/binaryninja-api,Vector35/binaryninja-api,Vector35/binaryninja-api,Vector35/binaryninja-api,Vector35/binaryninja-api
|
def passive(cls):
passive_note = '''
.. note:: This object is a "passive" object. Any changes you make to it will not be reflected in the core and vice-versa. If you wish to update a core version of this object you should use the appropriate API.
'''
if hasattr(cls, "__doc__") and cls.__doc__:
cls.__doc__ += passive_note
else:
cls.__doc__ = passive_note
return cls
def deprecated(cls):
deprecated_note = '''
.. warning:: This object is deprecated. Please migrate code away from using this class or method.
'''
if hasattr(cls, "__doc__") and cls.__doc__:
cls.__doc__ = deprecated_note + cls.__doc__
else:
cls.__doc__ = deprecated_note
return clsAdd documentaiton decorator for Enterprise.
|
def passive(cls):
passive_note = '''
.. note:: This object is a "passive" object. Any changes you make to it will not be reflected in the core and vice-versa. If you wish to update a core version of this object you should use the appropriate API.
'''
if hasattr(cls, "__doc__") and cls.__doc__:
cls.__doc__ += passive_note
else:
cls.__doc__ = passive_note
return cls
def deprecated(cls):
deprecated_note = '''
.. warning:: This object is deprecated. Please migrate code away from using this class or method.
'''
if hasattr(cls, "__doc__") and cls.__doc__:
cls.__doc__ = deprecated_note + cls.__doc__
else:
cls.__doc__ = deprecated_note
return cls
def enterprise(cls):
enterprise_note = '''
.. note: This object is only available in the Enterprise edition of Binary Ninja.
'''
if hasattr(cls, "__doc__") and cls.__doc__:
cls.__doc__ = enterprise_note + cls.__doc__
else:
cls.__doc__ = enterprise_note
return cls
|
<commit_before>def passive(cls):
passive_note = '''
.. note:: This object is a "passive" object. Any changes you make to it will not be reflected in the core and vice-versa. If you wish to update a core version of this object you should use the appropriate API.
'''
if hasattr(cls, "__doc__") and cls.__doc__:
cls.__doc__ += passive_note
else:
cls.__doc__ = passive_note
return cls
def deprecated(cls):
deprecated_note = '''
.. warning:: This object is deprecated. Please migrate code away from using this class or method.
'''
if hasattr(cls, "__doc__") and cls.__doc__:
cls.__doc__ = deprecated_note + cls.__doc__
else:
cls.__doc__ = deprecated_note
return cls<commit_msg>Add documentaiton decorator for Enterprise.<commit_after>
|
def passive(cls):
passive_note = '''
.. note:: This object is a "passive" object. Any changes you make to it will not be reflected in the core and vice-versa. If you wish to update a core version of this object you should use the appropriate API.
'''
if hasattr(cls, "__doc__") and cls.__doc__:
cls.__doc__ += passive_note
else:
cls.__doc__ = passive_note
return cls
def deprecated(cls):
deprecated_note = '''
.. warning:: This object is deprecated. Please migrate code away from using this class or method.
'''
if hasattr(cls, "__doc__") and cls.__doc__:
cls.__doc__ = deprecated_note + cls.__doc__
else:
cls.__doc__ = deprecated_note
return cls
def enterprise(cls):
enterprise_note = '''
.. note: This object is only available in the Enterprise edition of Binary Ninja.
'''
if hasattr(cls, "__doc__") and cls.__doc__:
cls.__doc__ = enterprise_note + cls.__doc__
else:
cls.__doc__ = enterprise_note
return cls
|
def passive(cls):
passive_note = '''
.. note:: This object is a "passive" object. Any changes you make to it will not be reflected in the core and vice-versa. If you wish to update a core version of this object you should use the appropriate API.
'''
if hasattr(cls, "__doc__") and cls.__doc__:
cls.__doc__ += passive_note
else:
cls.__doc__ = passive_note
return cls
def deprecated(cls):
deprecated_note = '''
.. warning:: This object is deprecated. Please migrate code away from using this class or method.
'''
if hasattr(cls, "__doc__") and cls.__doc__:
cls.__doc__ = deprecated_note + cls.__doc__
else:
cls.__doc__ = deprecated_note
return clsAdd documentaiton decorator for Enterprise.def passive(cls):
passive_note = '''
.. note:: This object is a "passive" object. Any changes you make to it will not be reflected in the core and vice-versa. If you wish to update a core version of this object you should use the appropriate API.
'''
if hasattr(cls, "__doc__") and cls.__doc__:
cls.__doc__ += passive_note
else:
cls.__doc__ = passive_note
return cls
def deprecated(cls):
deprecated_note = '''
.. warning:: This object is deprecated. Please migrate code away from using this class or method.
'''
if hasattr(cls, "__doc__") and cls.__doc__:
cls.__doc__ = deprecated_note + cls.__doc__
else:
cls.__doc__ = deprecated_note
return cls
def enterprise(cls):
enterprise_note = '''
.. note: This object is only available in the Enterprise edition of Binary Ninja.
'''
if hasattr(cls, "__doc__") and cls.__doc__:
cls.__doc__ = enterprise_note + cls.__doc__
else:
cls.__doc__ = enterprise_note
return cls
|
<commit_before>def passive(cls):
passive_note = '''
.. note:: This object is a "passive" object. Any changes you make to it will not be reflected in the core and vice-versa. If you wish to update a core version of this object you should use the appropriate API.
'''
if hasattr(cls, "__doc__") and cls.__doc__:
cls.__doc__ += passive_note
else:
cls.__doc__ = passive_note
return cls
def deprecated(cls):
deprecated_note = '''
.. warning:: This object is deprecated. Please migrate code away from using this class or method.
'''
if hasattr(cls, "__doc__") and cls.__doc__:
cls.__doc__ = deprecated_note + cls.__doc__
else:
cls.__doc__ = deprecated_note
return cls<commit_msg>Add documentaiton decorator for Enterprise.<commit_after>def passive(cls):
passive_note = '''
.. note:: This object is a "passive" object. Any changes you make to it will not be reflected in the core and vice-versa. If you wish to update a core version of this object you should use the appropriate API.
'''
if hasattr(cls, "__doc__") and cls.__doc__:
cls.__doc__ += passive_note
else:
cls.__doc__ = passive_note
return cls
def deprecated(cls):
deprecated_note = '''
.. warning:: This object is deprecated. Please migrate code away from using this class or method.
'''
if hasattr(cls, "__doc__") and cls.__doc__:
cls.__doc__ = deprecated_note + cls.__doc__
else:
cls.__doc__ = deprecated_note
return cls
def enterprise(cls):
enterprise_note = '''
.. note: This object is only available in the Enterprise edition of Binary Ninja.
'''
if hasattr(cls, "__doc__") and cls.__doc__:
cls.__doc__ = enterprise_note + cls.__doc__
else:
cls.__doc__ = enterprise_note
return cls
|
73e15928a8427eb5a6e4a886660b9493e50cd699
|
currencies/models.py
|
currencies/models.py
|
from django.db import models
from django.utils.translation import gettext_lazy as _
class Currency(models.Model):
code = models.CharField(_('code'), max_length=3)
name = models.CharField(_('name'), max_length=35)
symbol = models.CharField(_('symbol'), max_length=1, blank=True)
factor = models.DecimalField(_('factor'), max_digits=10, decimal_places=4,
help_text=_('Specifies the difference of the currency to default one.'))
is_active = models.BooleanField(_('active'), default=True,
help_text=_('The currency will be available.'))
is_default = models.BooleanField(_('default'), default=False,
help_text=_('Make this the default currency.'))
class Meta:
verbose_name = _('currency')
verbose_name_plural = _('currencies')
def __unicode__(self):
return self.code
def save(self, **kwargs):
# Make sure the default currency is unique
if self.is_default:
Currency.objects.filter(is_default=True).update(is_default=False)
super(Currency, self).save(**kwargs)
|
from django.db import models
from django.utils.translation import gettext_lazy as _
class Currency(models.Model):
code = models.CharField(_('code'), max_length=3)
name = models.CharField(_('name'), max_length=35)
symbol = models.CharField(_('symbol'), max_length=1, blank=True)
factor = models.DecimalField(_('factor'), max_digits=10, decimal_places=4,
help_text=_('Specifies the difference of the currency to default one.'))
is_active = models.BooleanField(_('active'), default=True,
help_text=_('The currency will be available.'))
is_base = models.BooleanField(_('base'), default=False,
help_text=_('Make this the base currency against which rates are calculated.'))
is_default = models.BooleanField(_('default'), default=False,
help_text=_('Make this the default user currency.'))
class Meta:
verbose_name = _('currency')
verbose_name_plural = _('currencies')
def __unicode__(self):
return self.code
def save(self, **kwargs):
# Make sure the base and default currencies are unique
if self.is_base:
Currency.objects.filter(is_base=True).update(is_base=False)
if self.is_default:
Currency.objects.filter(is_default=True).update(is_default=False)
super(Currency, self).save(**kwargs)
|
Add a Currency.is_base field (currently unused)
|
Add a Currency.is_base field (currently unused)
|
Python
|
bsd-3-clause
|
pathakamit88/django-currencies,panosl/django-currencies,ydaniv/django-currencies,mysociety/django-currencies,panosl/django-currencies,barseghyanartur/django-currencies,bashu/django-simple-currencies,pathakamit88/django-currencies,ydaniv/django-currencies,marcosalcazar/django-currencies,jmp0xf/django-currencies,racitup/django-currencies,mysociety/django-currencies,marcosalcazar/django-currencies,bashu/django-simple-currencies,racitup/django-currencies
|
from django.db import models
from django.utils.translation import gettext_lazy as _
class Currency(models.Model):
code = models.CharField(_('code'), max_length=3)
name = models.CharField(_('name'), max_length=35)
symbol = models.CharField(_('symbol'), max_length=1, blank=True)
factor = models.DecimalField(_('factor'), max_digits=10, decimal_places=4,
help_text=_('Specifies the difference of the currency to default one.'))
is_active = models.BooleanField(_('active'), default=True,
help_text=_('The currency will be available.'))
is_default = models.BooleanField(_('default'), default=False,
help_text=_('Make this the default currency.'))
class Meta:
verbose_name = _('currency')
verbose_name_plural = _('currencies')
def __unicode__(self):
return self.code
def save(self, **kwargs):
# Make sure the default currency is unique
if self.is_default:
Currency.objects.filter(is_default=True).update(is_default=False)
super(Currency, self).save(**kwargs)
Add a Currency.is_base field (currently unused)
|
from django.db import models
from django.utils.translation import gettext_lazy as _
class Currency(models.Model):
code = models.CharField(_('code'), max_length=3)
name = models.CharField(_('name'), max_length=35)
symbol = models.CharField(_('symbol'), max_length=1, blank=True)
factor = models.DecimalField(_('factor'), max_digits=10, decimal_places=4,
help_text=_('Specifies the difference of the currency to default one.'))
is_active = models.BooleanField(_('active'), default=True,
help_text=_('The currency will be available.'))
is_base = models.BooleanField(_('base'), default=False,
help_text=_('Make this the base currency against which rates are calculated.'))
is_default = models.BooleanField(_('default'), default=False,
help_text=_('Make this the default user currency.'))
class Meta:
verbose_name = _('currency')
verbose_name_plural = _('currencies')
def __unicode__(self):
return self.code
def save(self, **kwargs):
# Make sure the base and default currencies are unique
if self.is_base:
Currency.objects.filter(is_base=True).update(is_base=False)
if self.is_default:
Currency.objects.filter(is_default=True).update(is_default=False)
super(Currency, self).save(**kwargs)
|
<commit_before>from django.db import models
from django.utils.translation import gettext_lazy as _
class Currency(models.Model):
code = models.CharField(_('code'), max_length=3)
name = models.CharField(_('name'), max_length=35)
symbol = models.CharField(_('symbol'), max_length=1, blank=True)
factor = models.DecimalField(_('factor'), max_digits=10, decimal_places=4,
help_text=_('Specifies the difference of the currency to default one.'))
is_active = models.BooleanField(_('active'), default=True,
help_text=_('The currency will be available.'))
is_default = models.BooleanField(_('default'), default=False,
help_text=_('Make this the default currency.'))
class Meta:
verbose_name = _('currency')
verbose_name_plural = _('currencies')
def __unicode__(self):
return self.code
def save(self, **kwargs):
# Make sure the default currency is unique
if self.is_default:
Currency.objects.filter(is_default=True).update(is_default=False)
super(Currency, self).save(**kwargs)
<commit_msg>Add a Currency.is_base field (currently unused)<commit_after>
|
from django.db import models
from django.utils.translation import gettext_lazy as _
class Currency(models.Model):
code = models.CharField(_('code'), max_length=3)
name = models.CharField(_('name'), max_length=35)
symbol = models.CharField(_('symbol'), max_length=1, blank=True)
factor = models.DecimalField(_('factor'), max_digits=10, decimal_places=4,
help_text=_('Specifies the difference of the currency to default one.'))
is_active = models.BooleanField(_('active'), default=True,
help_text=_('The currency will be available.'))
is_base = models.BooleanField(_('base'), default=False,
help_text=_('Make this the base currency against which rates are calculated.'))
is_default = models.BooleanField(_('default'), default=False,
help_text=_('Make this the default user currency.'))
class Meta:
verbose_name = _('currency')
verbose_name_plural = _('currencies')
def __unicode__(self):
return self.code
def save(self, **kwargs):
# Make sure the base and default currencies are unique
if self.is_base:
Currency.objects.filter(is_base=True).update(is_base=False)
if self.is_default:
Currency.objects.filter(is_default=True).update(is_default=False)
super(Currency, self).save(**kwargs)
|
from django.db import models
from django.utils.translation import gettext_lazy as _
class Currency(models.Model):
code = models.CharField(_('code'), max_length=3)
name = models.CharField(_('name'), max_length=35)
symbol = models.CharField(_('symbol'), max_length=1, blank=True)
factor = models.DecimalField(_('factor'), max_digits=10, decimal_places=4,
help_text=_('Specifies the difference of the currency to default one.'))
is_active = models.BooleanField(_('active'), default=True,
help_text=_('The currency will be available.'))
is_default = models.BooleanField(_('default'), default=False,
help_text=_('Make this the default currency.'))
class Meta:
verbose_name = _('currency')
verbose_name_plural = _('currencies')
def __unicode__(self):
return self.code
def save(self, **kwargs):
# Make sure the default currency is unique
if self.is_default:
Currency.objects.filter(is_default=True).update(is_default=False)
super(Currency, self).save(**kwargs)
Add a Currency.is_base field (currently unused)from django.db import models
from django.utils.translation import gettext_lazy as _
class Currency(models.Model):
code = models.CharField(_('code'), max_length=3)
name = models.CharField(_('name'), max_length=35)
symbol = models.CharField(_('symbol'), max_length=1, blank=True)
factor = models.DecimalField(_('factor'), max_digits=10, decimal_places=4,
help_text=_('Specifies the difference of the currency to default one.'))
is_active = models.BooleanField(_('active'), default=True,
help_text=_('The currency will be available.'))
is_base = models.BooleanField(_('base'), default=False,
help_text=_('Make this the base currency against which rates are calculated.'))
is_default = models.BooleanField(_('default'), default=False,
help_text=_('Make this the default user currency.'))
class Meta:
verbose_name = _('currency')
verbose_name_plural = _('currencies')
def __unicode__(self):
return self.code
def save(self, **kwargs):
# Make sure the base and default currencies are unique
if self.is_base:
Currency.objects.filter(is_base=True).update(is_base=False)
if self.is_default:
Currency.objects.filter(is_default=True).update(is_default=False)
super(Currency, self).save(**kwargs)
|
<commit_before>from django.db import models
from django.utils.translation import gettext_lazy as _
class Currency(models.Model):
code = models.CharField(_('code'), max_length=3)
name = models.CharField(_('name'), max_length=35)
symbol = models.CharField(_('symbol'), max_length=1, blank=True)
factor = models.DecimalField(_('factor'), max_digits=10, decimal_places=4,
help_text=_('Specifies the difference of the currency to default one.'))
is_active = models.BooleanField(_('active'), default=True,
help_text=_('The currency will be available.'))
is_default = models.BooleanField(_('default'), default=False,
help_text=_('Make this the default currency.'))
class Meta:
verbose_name = _('currency')
verbose_name_plural = _('currencies')
def __unicode__(self):
return self.code
def save(self, **kwargs):
# Make sure the default currency is unique
if self.is_default:
Currency.objects.filter(is_default=True).update(is_default=False)
super(Currency, self).save(**kwargs)
<commit_msg>Add a Currency.is_base field (currently unused)<commit_after>from django.db import models
from django.utils.translation import gettext_lazy as _
class Currency(models.Model):
code = models.CharField(_('code'), max_length=3)
name = models.CharField(_('name'), max_length=35)
symbol = models.CharField(_('symbol'), max_length=1, blank=True)
factor = models.DecimalField(_('factor'), max_digits=10, decimal_places=4,
help_text=_('Specifies the difference of the currency to default one.'))
is_active = models.BooleanField(_('active'), default=True,
help_text=_('The currency will be available.'))
is_base = models.BooleanField(_('base'), default=False,
help_text=_('Make this the base currency against which rates are calculated.'))
is_default = models.BooleanField(_('default'), default=False,
help_text=_('Make this the default user currency.'))
class Meta:
verbose_name = _('currency')
verbose_name_plural = _('currencies')
def __unicode__(self):
return self.code
def save(self, **kwargs):
# Make sure the base and default currencies are unique
if self.is_base:
Currency.objects.filter(is_base=True).update(is_base=False)
if self.is_default:
Currency.objects.filter(is_default=True).update(is_default=False)
super(Currency, self).save(**kwargs)
|
e7ecb09238d35974f90fdc2d974ebbebecc5ef17
|
quijy/misc.py
|
quijy/misc.py
|
"""
Misc. functions not quantum related.
"""
import matplotlib.pyplot as plt
from matplotlib import cm
from numpy import (atleast_2d, squeeze, array, shape, argwhere, linspace)
def ezplot(x, y_i, fignum=1, xlog=False, ylog=False, **kwargs):
"""
Function for automatically plotting multiple sets of data
"""
# TODO colormap data and legend
y_i = atleast_2d(squeeze(y_i))
dimsy = array(shape(y_i))
xaxis = argwhere(len(x) == dimsy)[0] # 0 or 1
fig = plt.figure(fignum, figsize=(8, 6), dpi=100)
axes = fig.add_axes([0.1, 0.1, 0.8, 0.8])
colors = linspace(0, 1, dimsy[1 - xaxis])
for i in range(dimsy[xaxis - 1]):
if xaxis:
y = y_i[i, :]
else:
y = y_i[:, i]
if xlog:
axes.set_xscale("log")
if ylog:
axes.set_yscale("log")
axes.plot(x, y, '.-', c=cm.jet(colors[i], 1), **kwargs)
return axes
|
"""
Misc. functions not quantum related.
"""
import matplotlib.pyplot as plt
from matplotlib import cm
from numpy import (atleast_2d, squeeze, array, shape, argwhere, linspace)
def ezplot(x, y_i, fignum=1, xlog=False, ylog=False, **kwargs):
"""
Function for automatically plotting multiple sets of data
"""
# TODO colormap data and legend
y_i = atleast_2d(squeeze(y_i))
dimsy = array(shape(y_i))
xaxis = argwhere(len(x) == dimsy)[0] # 0 or 1
fig = plt.figure(fignum, figsize=(8, 6), dpi=100)
axes = fig.add_axes([0.1, 0.1, 0.8, 0.8])
colors = linspace(0, 1, dimsy[1 - xaxis])
for i in range(dimsy[xaxis - 1]):
if xaxis:
y = y_i[i, :]
else:
y = y_i[:, i]
if xlog:
axes.set_xscale("log")
if ylog:
axes.set_yscale("log")
axes.plot(x, y, '.-', c=cm.viridis(colors[i], 1), **kwargs)
return axes
|
Change default colormap in ezplot
|
Change default colormap in ezplot
From 'jet' to 'viridis'
|
Python
|
mit
|
jcmgray/quijy
|
"""
Misc. functions not quantum related.
"""
import matplotlib.pyplot as plt
from matplotlib import cm
from numpy import (atleast_2d, squeeze, array, shape, argwhere, linspace)
def ezplot(x, y_i, fignum=1, xlog=False, ylog=False, **kwargs):
"""
Function for automatically plotting multiple sets of data
"""
# TODO colormap data and legend
y_i = atleast_2d(squeeze(y_i))
dimsy = array(shape(y_i))
xaxis = argwhere(len(x) == dimsy)[0] # 0 or 1
fig = plt.figure(fignum, figsize=(8, 6), dpi=100)
axes = fig.add_axes([0.1, 0.1, 0.8, 0.8])
colors = linspace(0, 1, dimsy[1 - xaxis])
for i in range(dimsy[xaxis - 1]):
if xaxis:
y = y_i[i, :]
else:
y = y_i[:, i]
if xlog:
axes.set_xscale("log")
if ylog:
axes.set_yscale("log")
axes.plot(x, y, '.-', c=cm.jet(colors[i], 1), **kwargs)
return axes
Change default colormap in ezplot
From 'jet' to 'viridis'
|
"""
Misc. functions not quantum related.
"""
import matplotlib.pyplot as plt
from matplotlib import cm
from numpy import (atleast_2d, squeeze, array, shape, argwhere, linspace)
def ezplot(x, y_i, fignum=1, xlog=False, ylog=False, **kwargs):
"""
Function for automatically plotting multiple sets of data
"""
# TODO colormap data and legend
y_i = atleast_2d(squeeze(y_i))
dimsy = array(shape(y_i))
xaxis = argwhere(len(x) == dimsy)[0] # 0 or 1
fig = plt.figure(fignum, figsize=(8, 6), dpi=100)
axes = fig.add_axes([0.1, 0.1, 0.8, 0.8])
colors = linspace(0, 1, dimsy[1 - xaxis])
for i in range(dimsy[xaxis - 1]):
if xaxis:
y = y_i[i, :]
else:
y = y_i[:, i]
if xlog:
axes.set_xscale("log")
if ylog:
axes.set_yscale("log")
axes.plot(x, y, '.-', c=cm.viridis(colors[i], 1), **kwargs)
return axes
|
<commit_before>"""
Misc. functions not quantum related.
"""
import matplotlib.pyplot as plt
from matplotlib import cm
from numpy import (atleast_2d, squeeze, array, shape, argwhere, linspace)
def ezplot(x, y_i, fignum=1, xlog=False, ylog=False, **kwargs):
"""
Function for automatically plotting multiple sets of data
"""
# TODO colormap data and legend
y_i = atleast_2d(squeeze(y_i))
dimsy = array(shape(y_i))
xaxis = argwhere(len(x) == dimsy)[0] # 0 or 1
fig = plt.figure(fignum, figsize=(8, 6), dpi=100)
axes = fig.add_axes([0.1, 0.1, 0.8, 0.8])
colors = linspace(0, 1, dimsy[1 - xaxis])
for i in range(dimsy[xaxis - 1]):
if xaxis:
y = y_i[i, :]
else:
y = y_i[:, i]
if xlog:
axes.set_xscale("log")
if ylog:
axes.set_yscale("log")
axes.plot(x, y, '.-', c=cm.jet(colors[i], 1), **kwargs)
return axes
<commit_msg>Change default colormap in ezplot
From 'jet' to 'viridis'<commit_after>
|
"""
Misc. functions not quantum related.
"""
import matplotlib.pyplot as plt
from matplotlib import cm
from numpy import (atleast_2d, squeeze, array, shape, argwhere, linspace)
def ezplot(x, y_i, fignum=1, xlog=False, ylog=False, **kwargs):
"""
Function for automatically plotting multiple sets of data
"""
# TODO colormap data and legend
y_i = atleast_2d(squeeze(y_i))
dimsy = array(shape(y_i))
xaxis = argwhere(len(x) == dimsy)[0] # 0 or 1
fig = plt.figure(fignum, figsize=(8, 6), dpi=100)
axes = fig.add_axes([0.1, 0.1, 0.8, 0.8])
colors = linspace(0, 1, dimsy[1 - xaxis])
for i in range(dimsy[xaxis - 1]):
if xaxis:
y = y_i[i, :]
else:
y = y_i[:, i]
if xlog:
axes.set_xscale("log")
if ylog:
axes.set_yscale("log")
axes.plot(x, y, '.-', c=cm.viridis(colors[i], 1), **kwargs)
return axes
|
"""
Misc. functions not quantum related.
"""
import matplotlib.pyplot as plt
from matplotlib import cm
from numpy import (atleast_2d, squeeze, array, shape, argwhere, linspace)
def ezplot(x, y_i, fignum=1, xlog=False, ylog=False, **kwargs):
"""
Function for automatically plotting multiple sets of data
"""
# TODO colormap data and legend
y_i = atleast_2d(squeeze(y_i))
dimsy = array(shape(y_i))
xaxis = argwhere(len(x) == dimsy)[0] # 0 or 1
fig = plt.figure(fignum, figsize=(8, 6), dpi=100)
axes = fig.add_axes([0.1, 0.1, 0.8, 0.8])
colors = linspace(0, 1, dimsy[1 - xaxis])
for i in range(dimsy[xaxis - 1]):
if xaxis:
y = y_i[i, :]
else:
y = y_i[:, i]
if xlog:
axes.set_xscale("log")
if ylog:
axes.set_yscale("log")
axes.plot(x, y, '.-', c=cm.jet(colors[i], 1), **kwargs)
return axes
Change default colormap in ezplot
From 'jet' to 'viridis'"""
Misc. functions not quantum related.
"""
import matplotlib.pyplot as plt
from matplotlib import cm
from numpy import (atleast_2d, squeeze, array, shape, argwhere, linspace)
def ezplot(x, y_i, fignum=1, xlog=False, ylog=False, **kwargs):
"""
Function for automatically plotting multiple sets of data
"""
# TODO colormap data and legend
y_i = atleast_2d(squeeze(y_i))
dimsy = array(shape(y_i))
xaxis = argwhere(len(x) == dimsy)[0] # 0 or 1
fig = plt.figure(fignum, figsize=(8, 6), dpi=100)
axes = fig.add_axes([0.1, 0.1, 0.8, 0.8])
colors = linspace(0, 1, dimsy[1 - xaxis])
for i in range(dimsy[xaxis - 1]):
if xaxis:
y = y_i[i, :]
else:
y = y_i[:, i]
if xlog:
axes.set_xscale("log")
if ylog:
axes.set_yscale("log")
axes.plot(x, y, '.-', c=cm.viridis(colors[i], 1), **kwargs)
return axes
|
<commit_before>"""
Misc. functions not quantum related.
"""
import matplotlib.pyplot as plt
from matplotlib import cm
from numpy import (atleast_2d, squeeze, array, shape, argwhere, linspace)
def ezplot(x, y_i, fignum=1, xlog=False, ylog=False, **kwargs):
"""
Function for automatically plotting multiple sets of data
"""
# TODO colormap data and legend
y_i = atleast_2d(squeeze(y_i))
dimsy = array(shape(y_i))
xaxis = argwhere(len(x) == dimsy)[0] # 0 or 1
fig = plt.figure(fignum, figsize=(8, 6), dpi=100)
axes = fig.add_axes([0.1, 0.1, 0.8, 0.8])
colors = linspace(0, 1, dimsy[1 - xaxis])
for i in range(dimsy[xaxis - 1]):
if xaxis:
y = y_i[i, :]
else:
y = y_i[:, i]
if xlog:
axes.set_xscale("log")
if ylog:
axes.set_yscale("log")
axes.plot(x, y, '.-', c=cm.jet(colors[i], 1), **kwargs)
return axes
<commit_msg>Change default colormap in ezplot
From 'jet' to 'viridis'<commit_after>"""
Misc. functions not quantum related.
"""
import matplotlib.pyplot as plt
from matplotlib import cm
from numpy import (atleast_2d, squeeze, array, shape, argwhere, linspace)
def ezplot(x, y_i, fignum=1, xlog=False, ylog=False, **kwargs):
"""
Function for automatically plotting multiple sets of data
"""
# TODO colormap data and legend
y_i = atleast_2d(squeeze(y_i))
dimsy = array(shape(y_i))
xaxis = argwhere(len(x) == dimsy)[0] # 0 or 1
fig = plt.figure(fignum, figsize=(8, 6), dpi=100)
axes = fig.add_axes([0.1, 0.1, 0.8, 0.8])
colors = linspace(0, 1, dimsy[1 - xaxis])
for i in range(dimsy[xaxis - 1]):
if xaxis:
y = y_i[i, :]
else:
y = y_i[:, i]
if xlog:
axes.set_xscale("log")
if ylog:
axes.set_yscale("log")
axes.plot(x, y, '.-', c=cm.viridis(colors[i], 1), **kwargs)
return axes
|
d6a020778343567e671a671ca9fd5b40eed1ee9c
|
rename-pdf.py
|
rename-pdf.py
|
#!/usr/bin/env python
__author__ = 'Jacob Bieker'
import os
DATA_DIRECTORY = os.path.join("test_file")
DATA = os.listdir(DATA_DIRECTORY)
file_name_dict = {}
for file_name in DATA:
split_name = file_name.split("_")
print split_name
file_name_dict.setdefault(split_name[0], [])
# Name has the extra _NUM extension
if len(split_name) > 1:
file_name_dict[split_name[0]].append(split_name[1])
else:
file_name_dict[split_name[0]].append(0)
for key in file_name_dict:
if len(file_name_dict[key]) == 1:
continue
else:
max = 0
for value in file_name_dict[key]:
if int(value) > max:
max = value
elif int(value) == 0:
path = os.path.join(DATA_DIRECTORY, str(key))
os.remove(path)
else:
path = os.path.join(DATA_DIRECTORY, str(key) + "_" + str(value))
os.remove(path)
print file_name_dict
|
#!/usr/bin/env python
__author__ = 'Jacob Bieker'
import os
DATA_DIRECTORY = os.path.join("test_file")
DATA = os.listdir(DATA_DIRECTORY)
file_name_dict = {}
for file_name in DATA:
split_name = file_name.split("_")
print split_name
file_name_dict.setdefault(split_name[0], [])
# Name has the extra _NUM extension
if len(split_name) > 1:
file_name_dict[split_name[0]].append(split_name[1])
else:
file_name_dict[split_name[0]].append(0)
for key in file_name_dict:
if len(file_name_dict[key]) == 1:
continue
else:
max = 0
for value in file_name_dict[key]:
if int(value) > max:
max = value
elif int(value) == 0:
path = os.path.join(DATA_DIRECTORY, str(key))
os.remove(path)
else:
path = os.path.join(DATA_DIRECTORY, str(key) + "_" + str(value))
os.remove(path)
# Now rename the extra files to the original name
for value in file_name_dict[key]:
if len(value) > 1:
path = os.path.join(DATA_DIRECTORY, str(key) + "_" + str(value))
out_path = os.path.join(DATA_DIRECTORY, str(key))
os.rename(path, out_path)
|
Add renaming file to correct file name
|
Add renaming file to correct file name
|
Python
|
apache-2.0
|
UO-SPUR/misc
|
#!/usr/bin/env python
__author__ = 'Jacob Bieker'
import os
DATA_DIRECTORY = os.path.join("test_file")
DATA = os.listdir(DATA_DIRECTORY)
file_name_dict = {}
for file_name in DATA:
split_name = file_name.split("_")
print split_name
file_name_dict.setdefault(split_name[0], [])
# Name has the extra _NUM extension
if len(split_name) > 1:
file_name_dict[split_name[0]].append(split_name[1])
else:
file_name_dict[split_name[0]].append(0)
for key in file_name_dict:
if len(file_name_dict[key]) == 1:
continue
else:
max = 0
for value in file_name_dict[key]:
if int(value) > max:
max = value
elif int(value) == 0:
path = os.path.join(DATA_DIRECTORY, str(key))
os.remove(path)
else:
path = os.path.join(DATA_DIRECTORY, str(key) + "_" + str(value))
os.remove(path)
print file_name_dictAdd renaming file to correct file name
|
#!/usr/bin/env python
__author__ = 'Jacob Bieker'
import os
DATA_DIRECTORY = os.path.join("test_file")
DATA = os.listdir(DATA_DIRECTORY)
file_name_dict = {}
for file_name in DATA:
split_name = file_name.split("_")
print split_name
file_name_dict.setdefault(split_name[0], [])
# Name has the extra _NUM extension
if len(split_name) > 1:
file_name_dict[split_name[0]].append(split_name[1])
else:
file_name_dict[split_name[0]].append(0)
for key in file_name_dict:
if len(file_name_dict[key]) == 1:
continue
else:
max = 0
for value in file_name_dict[key]:
if int(value) > max:
max = value
elif int(value) == 0:
path = os.path.join(DATA_DIRECTORY, str(key))
os.remove(path)
else:
path = os.path.join(DATA_DIRECTORY, str(key) + "_" + str(value))
os.remove(path)
# Now rename the extra files to the original name
for value in file_name_dict[key]:
if len(value) > 1:
path = os.path.join(DATA_DIRECTORY, str(key) + "_" + str(value))
out_path = os.path.join(DATA_DIRECTORY, str(key))
os.rename(path, out_path)
|
<commit_before>#!/usr/bin/env python
__author__ = 'Jacob Bieker'
import os
DATA_DIRECTORY = os.path.join("test_file")
DATA = os.listdir(DATA_DIRECTORY)
file_name_dict = {}
for file_name in DATA:
split_name = file_name.split("_")
print split_name
file_name_dict.setdefault(split_name[0], [])
# Name has the extra _NUM extension
if len(split_name) > 1:
file_name_dict[split_name[0]].append(split_name[1])
else:
file_name_dict[split_name[0]].append(0)
for key in file_name_dict:
if len(file_name_dict[key]) == 1:
continue
else:
max = 0
for value in file_name_dict[key]:
if int(value) > max:
max = value
elif int(value) == 0:
path = os.path.join(DATA_DIRECTORY, str(key))
os.remove(path)
else:
path = os.path.join(DATA_DIRECTORY, str(key) + "_" + str(value))
os.remove(path)
print file_name_dict<commit_msg>Add renaming file to correct file name<commit_after>
|
#!/usr/bin/env python
__author__ = 'Jacob Bieker'
import os
DATA_DIRECTORY = os.path.join("test_file")
DATA = os.listdir(DATA_DIRECTORY)
file_name_dict = {}
for file_name in DATA:
split_name = file_name.split("_")
print split_name
file_name_dict.setdefault(split_name[0], [])
# Name has the extra _NUM extension
if len(split_name) > 1:
file_name_dict[split_name[0]].append(split_name[1])
else:
file_name_dict[split_name[0]].append(0)
for key in file_name_dict:
if len(file_name_dict[key]) == 1:
continue
else:
max = 0
for value in file_name_dict[key]:
if int(value) > max:
max = value
elif int(value) == 0:
path = os.path.join(DATA_DIRECTORY, str(key))
os.remove(path)
else:
path = os.path.join(DATA_DIRECTORY, str(key) + "_" + str(value))
os.remove(path)
# Now rename the extra files to the original name
for value in file_name_dict[key]:
if len(value) > 1:
path = os.path.join(DATA_DIRECTORY, str(key) + "_" + str(value))
out_path = os.path.join(DATA_DIRECTORY, str(key))
os.rename(path, out_path)
|
#!/usr/bin/env python
__author__ = 'Jacob Bieker'
import os
DATA_DIRECTORY = os.path.join("test_file")
DATA = os.listdir(DATA_DIRECTORY)
file_name_dict = {}
for file_name in DATA:
split_name = file_name.split("_")
print split_name
file_name_dict.setdefault(split_name[0], [])
# Name has the extra _NUM extension
if len(split_name) > 1:
file_name_dict[split_name[0]].append(split_name[1])
else:
file_name_dict[split_name[0]].append(0)
for key in file_name_dict:
if len(file_name_dict[key]) == 1:
continue
else:
max = 0
for value in file_name_dict[key]:
if int(value) > max:
max = value
elif int(value) == 0:
path = os.path.join(DATA_DIRECTORY, str(key))
os.remove(path)
else:
path = os.path.join(DATA_DIRECTORY, str(key) + "_" + str(value))
os.remove(path)
print file_name_dictAdd renaming file to correct file name#!/usr/bin/env python
__author__ = 'Jacob Bieker'
import os
DATA_DIRECTORY = os.path.join("test_file")
DATA = os.listdir(DATA_DIRECTORY)
file_name_dict = {}
for file_name in DATA:
split_name = file_name.split("_")
print split_name
file_name_dict.setdefault(split_name[0], [])
# Name has the extra _NUM extension
if len(split_name) > 1:
file_name_dict[split_name[0]].append(split_name[1])
else:
file_name_dict[split_name[0]].append(0)
for key in file_name_dict:
if len(file_name_dict[key]) == 1:
continue
else:
max = 0
for value in file_name_dict[key]:
if int(value) > max:
max = value
elif int(value) == 0:
path = os.path.join(DATA_DIRECTORY, str(key))
os.remove(path)
else:
path = os.path.join(DATA_DIRECTORY, str(key) + "_" + str(value))
os.remove(path)
# Now rename the extra files to the original name
for value in file_name_dict[key]:
if len(value) > 1:
path = os.path.join(DATA_DIRECTORY, str(key) + "_" + str(value))
out_path = os.path.join(DATA_DIRECTORY, str(key))
os.rename(path, out_path)
|
<commit_before>#!/usr/bin/env python
__author__ = 'Jacob Bieker'
import os
DATA_DIRECTORY = os.path.join("test_file")
DATA = os.listdir(DATA_DIRECTORY)
file_name_dict = {}
for file_name in DATA:
split_name = file_name.split("_")
print split_name
file_name_dict.setdefault(split_name[0], [])
# Name has the extra _NUM extension
if len(split_name) > 1:
file_name_dict[split_name[0]].append(split_name[1])
else:
file_name_dict[split_name[0]].append(0)
for key in file_name_dict:
if len(file_name_dict[key]) == 1:
continue
else:
max = 0
for value in file_name_dict[key]:
if int(value) > max:
max = value
elif int(value) == 0:
path = os.path.join(DATA_DIRECTORY, str(key))
os.remove(path)
else:
path = os.path.join(DATA_DIRECTORY, str(key) + "_" + str(value))
os.remove(path)
print file_name_dict<commit_msg>Add renaming file to correct file name<commit_after>#!/usr/bin/env python
__author__ = 'Jacob Bieker'
import os
DATA_DIRECTORY = os.path.join("test_file")
DATA = os.listdir(DATA_DIRECTORY)
file_name_dict = {}
for file_name in DATA:
split_name = file_name.split("_")
print split_name
file_name_dict.setdefault(split_name[0], [])
# Name has the extra _NUM extension
if len(split_name) > 1:
file_name_dict[split_name[0]].append(split_name[1])
else:
file_name_dict[split_name[0]].append(0)
for key in file_name_dict:
if len(file_name_dict[key]) == 1:
continue
else:
max = 0
for value in file_name_dict[key]:
if int(value) > max:
max = value
elif int(value) == 0:
path = os.path.join(DATA_DIRECTORY, str(key))
os.remove(path)
else:
path = os.path.join(DATA_DIRECTORY, str(key) + "_" + str(value))
os.remove(path)
# Now rename the extra files to the original name
for value in file_name_dict[key]:
if len(value) > 1:
path = os.path.join(DATA_DIRECTORY, str(key) + "_" + str(value))
out_path = os.path.join(DATA_DIRECTORY, str(key))
os.rename(path, out_path)
|
512e6aac47e1fc73837a16b24024081e1407220b
|
kolla/common/task.py
|
kolla/common/task.py
|
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import abc
class Task(object, metaclass=abc.ABCMeta):
def __init__(self):
self.success = False
@abc.abstractproperty
def name(self):
pass
def reset(self):
self.success = False
@property
def followups(self):
return []
@abc.abstractmethod
def run(self):
pass
|
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import abc
class Task(object, metaclass=abc.ABCMeta):
def __init__(self):
self.success = False
@property
@abc.abstractmethod
def name(self):
pass
def reset(self):
self.success = False
@property
def followups(self):
return []
@abc.abstractmethod
def run(self):
pass
|
Replace abc.abstractproperty with property and abc.abstractmethod
|
Replace abc.abstractproperty with property and abc.abstractmethod
Replace abc.abstractproperty with property and abc.abstractmethod,
as abc.abstractproperty has been deprecated since python3.3[1]
[1]https://docs.python.org/3.8/whatsnew/3.3.html?highlight=deprecated#abc
Change-Id: Ibb048b879fa58b5e144ae228628b3ffaeae65bbb
|
Python
|
apache-2.0
|
openstack/kolla,openstack/kolla
|
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import abc
class Task(object, metaclass=abc.ABCMeta):
def __init__(self):
self.success = False
@abc.abstractproperty
def name(self):
pass
def reset(self):
self.success = False
@property
def followups(self):
return []
@abc.abstractmethod
def run(self):
pass
Replace abc.abstractproperty with property and abc.abstractmethod
Replace abc.abstractproperty with property and abc.abstractmethod,
as abc.abstractproperty has been deprecated since python3.3[1]
[1]https://docs.python.org/3.8/whatsnew/3.3.html?highlight=deprecated#abc
Change-Id: Ibb048b879fa58b5e144ae228628b3ffaeae65bbb
|
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import abc
class Task(object, metaclass=abc.ABCMeta):
def __init__(self):
self.success = False
@property
@abc.abstractmethod
def name(self):
pass
def reset(self):
self.success = False
@property
def followups(self):
return []
@abc.abstractmethod
def run(self):
pass
|
<commit_before># Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import abc
class Task(object, metaclass=abc.ABCMeta):
def __init__(self):
self.success = False
@abc.abstractproperty
def name(self):
pass
def reset(self):
self.success = False
@property
def followups(self):
return []
@abc.abstractmethod
def run(self):
pass
<commit_msg>Replace abc.abstractproperty with property and abc.abstractmethod
Replace abc.abstractproperty with property and abc.abstractmethod,
as abc.abstractproperty has been deprecated since python3.3[1]
[1]https://docs.python.org/3.8/whatsnew/3.3.html?highlight=deprecated#abc
Change-Id: Ibb048b879fa58b5e144ae228628b3ffaeae65bbb<commit_after>
|
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import abc
class Task(object, metaclass=abc.ABCMeta):
def __init__(self):
self.success = False
@property
@abc.abstractmethod
def name(self):
pass
def reset(self):
self.success = False
@property
def followups(self):
return []
@abc.abstractmethod
def run(self):
pass
|
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import abc
class Task(object, metaclass=abc.ABCMeta):
def __init__(self):
self.success = False
@abc.abstractproperty
def name(self):
pass
def reset(self):
self.success = False
@property
def followups(self):
return []
@abc.abstractmethod
def run(self):
pass
Replace abc.abstractproperty with property and abc.abstractmethod
Replace abc.abstractproperty with property and abc.abstractmethod,
as abc.abstractproperty has been deprecated since python3.3[1]
[1]https://docs.python.org/3.8/whatsnew/3.3.html?highlight=deprecated#abc
Change-Id: Ibb048b879fa58b5e144ae228628b3ffaeae65bbb# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import abc
class Task(object, metaclass=abc.ABCMeta):
def __init__(self):
self.success = False
@property
@abc.abstractmethod
def name(self):
pass
def reset(self):
self.success = False
@property
def followups(self):
return []
@abc.abstractmethod
def run(self):
pass
|
<commit_before># Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import abc
class Task(object, metaclass=abc.ABCMeta):
def __init__(self):
self.success = False
@abc.abstractproperty
def name(self):
pass
def reset(self):
self.success = False
@property
def followups(self):
return []
@abc.abstractmethod
def run(self):
pass
<commit_msg>Replace abc.abstractproperty with property and abc.abstractmethod
Replace abc.abstractproperty with property and abc.abstractmethod,
as abc.abstractproperty has been deprecated since python3.3[1]
[1]https://docs.python.org/3.8/whatsnew/3.3.html?highlight=deprecated#abc
Change-Id: Ibb048b879fa58b5e144ae228628b3ffaeae65bbb<commit_after># Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import abc
class Task(object, metaclass=abc.ABCMeta):
def __init__(self):
self.success = False
@property
@abc.abstractmethod
def name(self):
pass
def reset(self):
self.success = False
@property
def followups(self):
return []
@abc.abstractmethod
def run(self):
pass
|
f1a2991ed8ff463255ad6a254fe049ffd1cbc46e
|
workshopvenues/venues/models.py
|
workshopvenues/venues/models.py
|
from django.db import models
class Facility(models.Model):
name = models.CharField(max_length=30)
def __unicode__(self):
return self.name
class Address(models.Model):
street = models.CharField(max_length=200)
town = models.CharField(max_length=30)
postcode = models.CharField(max_length=10)
country = models.CharField(max_length=30, blank=True)
class Venue(models.Model):
name = models.CharField(max_length=30)
website = models.CharField(max_length=50)
address = models.ForeignKey(Address)
facilities = models.ManyToManyField(Facility)
def __unicode__(self):
return self.name
|
from django.db import models
class Facility(models.Model):
name = models.CharField(max_length=30)
def __unicode__(self):
return self.name
class Address(models.Model):
street = models.CharField(max_length=200)
town = models.CharField(max_length=30)
postcode = models.CharField(max_length=10)
country = models.CharField(max_length=30, blank=True)
class Venue(models.Model):
name = models.CharField(max_length=30)
website = models.CharField(max_length=50)
address = models.ForeignKey(Address)
facilities = models.ManyToManyField(Facility)
style = models.CharField(max_length=200, blank=True)
twitter = models.CharField(max_length=200, blank=True)
phone = models.CharField(max_length=30, blank=True)
contact = models.CharField(max_length=50, blank=True)
contact_email = models.CharField(max_length=50, blank=True)
contact_twitter = models.CharField(max_length=200, blank=True)
cost = models.CharField(max_length=200, blank=True)
capacity = models.CharField(max_length=200, blank=True)
def __unicode__(self):
return self.name
|
Add migration for new fields in Venue
|
Add migration for new fields in Venue
|
Python
|
bsd-3-clause
|
andreagrandi/workshopvenues
|
from django.db import models
class Facility(models.Model):
name = models.CharField(max_length=30)
def __unicode__(self):
return self.name
class Address(models.Model):
street = models.CharField(max_length=200)
town = models.CharField(max_length=30)
postcode = models.CharField(max_length=10)
country = models.CharField(max_length=30, blank=True)
class Venue(models.Model):
name = models.CharField(max_length=30)
website = models.CharField(max_length=50)
address = models.ForeignKey(Address)
facilities = models.ManyToManyField(Facility)
def __unicode__(self):
return self.name
Add migration for new fields in Venue
|
from django.db import models
class Facility(models.Model):
name = models.CharField(max_length=30)
def __unicode__(self):
return self.name
class Address(models.Model):
street = models.CharField(max_length=200)
town = models.CharField(max_length=30)
postcode = models.CharField(max_length=10)
country = models.CharField(max_length=30, blank=True)
class Venue(models.Model):
name = models.CharField(max_length=30)
website = models.CharField(max_length=50)
address = models.ForeignKey(Address)
facilities = models.ManyToManyField(Facility)
style = models.CharField(max_length=200, blank=True)
twitter = models.CharField(max_length=200, blank=True)
phone = models.CharField(max_length=30, blank=True)
contact = models.CharField(max_length=50, blank=True)
contact_email = models.CharField(max_length=50, blank=True)
contact_twitter = models.CharField(max_length=200, blank=True)
cost = models.CharField(max_length=200, blank=True)
capacity = models.CharField(max_length=200, blank=True)
def __unicode__(self):
return self.name
|
<commit_before>from django.db import models
class Facility(models.Model):
name = models.CharField(max_length=30)
def __unicode__(self):
return self.name
class Address(models.Model):
street = models.CharField(max_length=200)
town = models.CharField(max_length=30)
postcode = models.CharField(max_length=10)
country = models.CharField(max_length=30, blank=True)
class Venue(models.Model):
name = models.CharField(max_length=30)
website = models.CharField(max_length=50)
address = models.ForeignKey(Address)
facilities = models.ManyToManyField(Facility)
def __unicode__(self):
return self.name
<commit_msg>Add migration for new fields in Venue<commit_after>
|
from django.db import models
class Facility(models.Model):
name = models.CharField(max_length=30)
def __unicode__(self):
return self.name
class Address(models.Model):
street = models.CharField(max_length=200)
town = models.CharField(max_length=30)
postcode = models.CharField(max_length=10)
country = models.CharField(max_length=30, blank=True)
class Venue(models.Model):
name = models.CharField(max_length=30)
website = models.CharField(max_length=50)
address = models.ForeignKey(Address)
facilities = models.ManyToManyField(Facility)
style = models.CharField(max_length=200, blank=True)
twitter = models.CharField(max_length=200, blank=True)
phone = models.CharField(max_length=30, blank=True)
contact = models.CharField(max_length=50, blank=True)
contact_email = models.CharField(max_length=50, blank=True)
contact_twitter = models.CharField(max_length=200, blank=True)
cost = models.CharField(max_length=200, blank=True)
capacity = models.CharField(max_length=200, blank=True)
def __unicode__(self):
return self.name
|
from django.db import models
class Facility(models.Model):
name = models.CharField(max_length=30)
def __unicode__(self):
return self.name
class Address(models.Model):
street = models.CharField(max_length=200)
town = models.CharField(max_length=30)
postcode = models.CharField(max_length=10)
country = models.CharField(max_length=30, blank=True)
class Venue(models.Model):
name = models.CharField(max_length=30)
website = models.CharField(max_length=50)
address = models.ForeignKey(Address)
facilities = models.ManyToManyField(Facility)
def __unicode__(self):
return self.name
Add migration for new fields in Venuefrom django.db import models
class Facility(models.Model):
name = models.CharField(max_length=30)
def __unicode__(self):
return self.name
class Address(models.Model):
street = models.CharField(max_length=200)
town = models.CharField(max_length=30)
postcode = models.CharField(max_length=10)
country = models.CharField(max_length=30, blank=True)
class Venue(models.Model):
name = models.CharField(max_length=30)
website = models.CharField(max_length=50)
address = models.ForeignKey(Address)
facilities = models.ManyToManyField(Facility)
style = models.CharField(max_length=200, blank=True)
twitter = models.CharField(max_length=200, blank=True)
phone = models.CharField(max_length=30, blank=True)
contact = models.CharField(max_length=50, blank=True)
contact_email = models.CharField(max_length=50, blank=True)
contact_twitter = models.CharField(max_length=200, blank=True)
cost = models.CharField(max_length=200, blank=True)
capacity = models.CharField(max_length=200, blank=True)
def __unicode__(self):
return self.name
|
<commit_before>from django.db import models
class Facility(models.Model):
name = models.CharField(max_length=30)
def __unicode__(self):
return self.name
class Address(models.Model):
street = models.CharField(max_length=200)
town = models.CharField(max_length=30)
postcode = models.CharField(max_length=10)
country = models.CharField(max_length=30, blank=True)
class Venue(models.Model):
name = models.CharField(max_length=30)
website = models.CharField(max_length=50)
address = models.ForeignKey(Address)
facilities = models.ManyToManyField(Facility)
def __unicode__(self):
return self.name
<commit_msg>Add migration for new fields in Venue<commit_after>from django.db import models
class Facility(models.Model):
name = models.CharField(max_length=30)
def __unicode__(self):
return self.name
class Address(models.Model):
street = models.CharField(max_length=200)
town = models.CharField(max_length=30)
postcode = models.CharField(max_length=10)
country = models.CharField(max_length=30, blank=True)
class Venue(models.Model):
name = models.CharField(max_length=30)
website = models.CharField(max_length=50)
address = models.ForeignKey(Address)
facilities = models.ManyToManyField(Facility)
style = models.CharField(max_length=200, blank=True)
twitter = models.CharField(max_length=200, blank=True)
phone = models.CharField(max_length=30, blank=True)
contact = models.CharField(max_length=50, blank=True)
contact_email = models.CharField(max_length=50, blank=True)
contact_twitter = models.CharField(max_length=200, blank=True)
cost = models.CharField(max_length=200, blank=True)
capacity = models.CharField(max_length=200, blank=True)
def __unicode__(self):
return self.name
|
6e6993f95a2e99830dd697a83a20353bcded6102
|
makerbase/__init__.py
|
makerbase/__init__.py
|
from flask import Flask
app = Flask(__name__)
import makerbase.tags
import makerbase.views
|
from flask import Flask
app = Flask(__name__)
app.debug_log_format = '%(asctime)-8s %(levelname)-8s %(name)-15s %(message)s'
import makerbase.tags
import makerbase.views
|
Use less stupid debug log format
|
Use less stupid debug log format
|
Python
|
mit
|
markpasc/makerbase,markpasc/makerbase
|
from flask import Flask
app = Flask(__name__)
import makerbase.tags
import makerbase.views
Use less stupid debug log format
|
from flask import Flask
app = Flask(__name__)
app.debug_log_format = '%(asctime)-8s %(levelname)-8s %(name)-15s %(message)s'
import makerbase.tags
import makerbase.views
|
<commit_before>from flask import Flask
app = Flask(__name__)
import makerbase.tags
import makerbase.views
<commit_msg>Use less stupid debug log format<commit_after>
|
from flask import Flask
app = Flask(__name__)
app.debug_log_format = '%(asctime)-8s %(levelname)-8s %(name)-15s %(message)s'
import makerbase.tags
import makerbase.views
|
from flask import Flask
app = Flask(__name__)
import makerbase.tags
import makerbase.views
Use less stupid debug log formatfrom flask import Flask
app = Flask(__name__)
app.debug_log_format = '%(asctime)-8s %(levelname)-8s %(name)-15s %(message)s'
import makerbase.tags
import makerbase.views
|
<commit_before>from flask import Flask
app = Flask(__name__)
import makerbase.tags
import makerbase.views
<commit_msg>Use less stupid debug log format<commit_after>from flask import Flask
app = Flask(__name__)
app.debug_log_format = '%(asctime)-8s %(levelname)-8s %(name)-15s %(message)s'
import makerbase.tags
import makerbase.views
|
fae33cf7d42559384deb7a9949f47b0881b0a29b
|
Cython/Tests/TestCythonUtils.py
|
Cython/Tests/TestCythonUtils.py
|
import unittest
from ..Utils import build_hex_version
class TestCythonUtils(unittest.TestCase):
def test_build_hex_version(self):
self.assertEqual('0x001D00A1', build_hex_version('0.29a1'))
self.assertEqual('0x001D00A1', build_hex_version('0.29a1'))
self.assertEqual('0x001D03C4', build_hex_version('0.29.3rc4'))
self.assertEqual('0x001D00F0', build_hex_version('0.29'))
self.assertEqual('0x040000F0', build_hex_version('4.0'))
|
import unittest
from ..Utils import build_hex_version
class TestCythonUtils(unittest.TestCase):
def test_build_hex_version(self):
self.assertEqual('0x001D00A1', build_hex_version('0.29a1'))
self.assertEqual('0x001D03C4', build_hex_version('0.29.3rc4'))
self.assertEqual('0x001D00F0', build_hex_version('0.29'))
self.assertEqual('0x040000F0', build_hex_version('4.0'))
|
Remove accidentally duplicated test code.
|
Remove accidentally duplicated test code.
|
Python
|
apache-2.0
|
da-woods/cython,scoder/cython,da-woods/cython,cython/cython,scoder/cython,scoder/cython,da-woods/cython,cython/cython,scoder/cython,cython/cython,da-woods/cython,cython/cython
|
import unittest
from ..Utils import build_hex_version
class TestCythonUtils(unittest.TestCase):
def test_build_hex_version(self):
self.assertEqual('0x001D00A1', build_hex_version('0.29a1'))
self.assertEqual('0x001D00A1', build_hex_version('0.29a1'))
self.assertEqual('0x001D03C4', build_hex_version('0.29.3rc4'))
self.assertEqual('0x001D00F0', build_hex_version('0.29'))
self.assertEqual('0x040000F0', build_hex_version('4.0'))
Remove accidentally duplicated test code.
|
import unittest
from ..Utils import build_hex_version
class TestCythonUtils(unittest.TestCase):
def test_build_hex_version(self):
self.assertEqual('0x001D00A1', build_hex_version('0.29a1'))
self.assertEqual('0x001D03C4', build_hex_version('0.29.3rc4'))
self.assertEqual('0x001D00F0', build_hex_version('0.29'))
self.assertEqual('0x040000F0', build_hex_version('4.0'))
|
<commit_before>import unittest
from ..Utils import build_hex_version
class TestCythonUtils(unittest.TestCase):
def test_build_hex_version(self):
self.assertEqual('0x001D00A1', build_hex_version('0.29a1'))
self.assertEqual('0x001D00A1', build_hex_version('0.29a1'))
self.assertEqual('0x001D03C4', build_hex_version('0.29.3rc4'))
self.assertEqual('0x001D00F0', build_hex_version('0.29'))
self.assertEqual('0x040000F0', build_hex_version('4.0'))
<commit_msg>Remove accidentally duplicated test code.<commit_after>
|
import unittest
from ..Utils import build_hex_version
class TestCythonUtils(unittest.TestCase):
def test_build_hex_version(self):
self.assertEqual('0x001D00A1', build_hex_version('0.29a1'))
self.assertEqual('0x001D03C4', build_hex_version('0.29.3rc4'))
self.assertEqual('0x001D00F0', build_hex_version('0.29'))
self.assertEqual('0x040000F0', build_hex_version('4.0'))
|
import unittest
from ..Utils import build_hex_version
class TestCythonUtils(unittest.TestCase):
def test_build_hex_version(self):
self.assertEqual('0x001D00A1', build_hex_version('0.29a1'))
self.assertEqual('0x001D00A1', build_hex_version('0.29a1'))
self.assertEqual('0x001D03C4', build_hex_version('0.29.3rc4'))
self.assertEqual('0x001D00F0', build_hex_version('0.29'))
self.assertEqual('0x040000F0', build_hex_version('4.0'))
Remove accidentally duplicated test code.import unittest
from ..Utils import build_hex_version
class TestCythonUtils(unittest.TestCase):
def test_build_hex_version(self):
self.assertEqual('0x001D00A1', build_hex_version('0.29a1'))
self.assertEqual('0x001D03C4', build_hex_version('0.29.3rc4'))
self.assertEqual('0x001D00F0', build_hex_version('0.29'))
self.assertEqual('0x040000F0', build_hex_version('4.0'))
|
<commit_before>import unittest
from ..Utils import build_hex_version
class TestCythonUtils(unittest.TestCase):
def test_build_hex_version(self):
self.assertEqual('0x001D00A1', build_hex_version('0.29a1'))
self.assertEqual('0x001D00A1', build_hex_version('0.29a1'))
self.assertEqual('0x001D03C4', build_hex_version('0.29.3rc4'))
self.assertEqual('0x001D00F0', build_hex_version('0.29'))
self.assertEqual('0x040000F0', build_hex_version('4.0'))
<commit_msg>Remove accidentally duplicated test code.<commit_after>import unittest
from ..Utils import build_hex_version
class TestCythonUtils(unittest.TestCase):
def test_build_hex_version(self):
self.assertEqual('0x001D00A1', build_hex_version('0.29a1'))
self.assertEqual('0x001D03C4', build_hex_version('0.29.3rc4'))
self.assertEqual('0x001D00F0', build_hex_version('0.29'))
self.assertEqual('0x040000F0', build_hex_version('4.0'))
|
3417c9260e5718c76978af8c276bea6b0b1cf126
|
oj_helper/__init__.py
|
oj_helper/__init__.py
|
import json
import logging
import re
import requests
__all__ = ['config', 'session', 'submit', 'SubmitInfo', 'username']
logging.basicConfig(format='%(asctime)s %(levelname)s:%(name)s:%(message)s', level=logging.INFO)
logger = logging.getLogger(__name__)
# Import config
config = json.load(open('config.json'))
logger.debug('Configuration loaded')
# Create session
session = requests.Session()
_r = session.get(config['profile_url'], cookies=config['cookies'])
logger.debug('Profile page got')
_m = re.search(r'<h2>(\w+)\b', _r.text)
username = _m.group(1)
logger.debug('User name read: %s', username)
logger.info('Prepare works done')
from .submit import submit, SubmitInfo
|
import json
import logging
import re
import requests
__all__ = ['config', 'session', 'submit', 'SubmitInfo', 'username']
logging.basicConfig(format='%(asctime)s %(levelname)s:%(name)s:%(message)s',
level=logging.WARNING)
logger = logging.getLogger(__name__)
# Import config
config = json.load(open('config.json'))
logger.debug('Configuration loaded')
# Create session
session = requests.Session()
_r = session.get(config['profile_url'], cookies=config['cookies'])
logger.debug('Profile page got')
_m = re.search(r'<h2>(\w+)\b', _r.text)
username = _m.group(1)
logger.debug('User name read: %s', username)
logger.info('Prepare works done')
from .submit import submit, SubmitInfo
|
Set logging level to WARNING for incoming release
|
Set logging level to WARNING for incoming release
|
Python
|
mit
|
ThomasLee969/oj-helper
|
import json
import logging
import re
import requests
__all__ = ['config', 'session', 'submit', 'SubmitInfo', 'username']
logging.basicConfig(format='%(asctime)s %(levelname)s:%(name)s:%(message)s', level=logging.INFO)
logger = logging.getLogger(__name__)
# Import config
config = json.load(open('config.json'))
logger.debug('Configuration loaded')
# Create session
session = requests.Session()
_r = session.get(config['profile_url'], cookies=config['cookies'])
logger.debug('Profile page got')
_m = re.search(r'<h2>(\w+)\b', _r.text)
username = _m.group(1)
logger.debug('User name read: %s', username)
logger.info('Prepare works done')
from .submit import submit, SubmitInfo
Set logging level to WARNING for incoming release
|
import json
import logging
import re
import requests
__all__ = ['config', 'session', 'submit', 'SubmitInfo', 'username']
logging.basicConfig(format='%(asctime)s %(levelname)s:%(name)s:%(message)s',
level=logging.WARNING)
logger = logging.getLogger(__name__)
# Import config
config = json.load(open('config.json'))
logger.debug('Configuration loaded')
# Create session
session = requests.Session()
_r = session.get(config['profile_url'], cookies=config['cookies'])
logger.debug('Profile page got')
_m = re.search(r'<h2>(\w+)\b', _r.text)
username = _m.group(1)
logger.debug('User name read: %s', username)
logger.info('Prepare works done')
from .submit import submit, SubmitInfo
|
<commit_before>import json
import logging
import re
import requests
__all__ = ['config', 'session', 'submit', 'SubmitInfo', 'username']
logging.basicConfig(format='%(asctime)s %(levelname)s:%(name)s:%(message)s', level=logging.INFO)
logger = logging.getLogger(__name__)
# Import config
config = json.load(open('config.json'))
logger.debug('Configuration loaded')
# Create session
session = requests.Session()
_r = session.get(config['profile_url'], cookies=config['cookies'])
logger.debug('Profile page got')
_m = re.search(r'<h2>(\w+)\b', _r.text)
username = _m.group(1)
logger.debug('User name read: %s', username)
logger.info('Prepare works done')
from .submit import submit, SubmitInfo
<commit_msg>Set logging level to WARNING for incoming release<commit_after>
|
import json
import logging
import re
import requests
__all__ = ['config', 'session', 'submit', 'SubmitInfo', 'username']
logging.basicConfig(format='%(asctime)s %(levelname)s:%(name)s:%(message)s',
level=logging.WARNING)
logger = logging.getLogger(__name__)
# Import config
config = json.load(open('config.json'))
logger.debug('Configuration loaded')
# Create session
session = requests.Session()
_r = session.get(config['profile_url'], cookies=config['cookies'])
logger.debug('Profile page got')
_m = re.search(r'<h2>(\w+)\b', _r.text)
username = _m.group(1)
logger.debug('User name read: %s', username)
logger.info('Prepare works done')
from .submit import submit, SubmitInfo
|
import json
import logging
import re
import requests
__all__ = ['config', 'session', 'submit', 'SubmitInfo', 'username']
logging.basicConfig(format='%(asctime)s %(levelname)s:%(name)s:%(message)s', level=logging.INFO)
logger = logging.getLogger(__name__)
# Import config
config = json.load(open('config.json'))
logger.debug('Configuration loaded')
# Create session
session = requests.Session()
_r = session.get(config['profile_url'], cookies=config['cookies'])
logger.debug('Profile page got')
_m = re.search(r'<h2>(\w+)\b', _r.text)
username = _m.group(1)
logger.debug('User name read: %s', username)
logger.info('Prepare works done')
from .submit import submit, SubmitInfo
Set logging level to WARNING for incoming releaseimport json
import logging
import re
import requests
__all__ = ['config', 'session', 'submit', 'SubmitInfo', 'username']
logging.basicConfig(format='%(asctime)s %(levelname)s:%(name)s:%(message)s',
level=logging.WARNING)
logger = logging.getLogger(__name__)
# Import config
config = json.load(open('config.json'))
logger.debug('Configuration loaded')
# Create session
session = requests.Session()
_r = session.get(config['profile_url'], cookies=config['cookies'])
logger.debug('Profile page got')
_m = re.search(r'<h2>(\w+)\b', _r.text)
username = _m.group(1)
logger.debug('User name read: %s', username)
logger.info('Prepare works done')
from .submit import submit, SubmitInfo
|
<commit_before>import json
import logging
import re
import requests
__all__ = ['config', 'session', 'submit', 'SubmitInfo', 'username']
logging.basicConfig(format='%(asctime)s %(levelname)s:%(name)s:%(message)s', level=logging.INFO)
logger = logging.getLogger(__name__)
# Import config
config = json.load(open('config.json'))
logger.debug('Configuration loaded')
# Create session
session = requests.Session()
_r = session.get(config['profile_url'], cookies=config['cookies'])
logger.debug('Profile page got')
_m = re.search(r'<h2>(\w+)\b', _r.text)
username = _m.group(1)
logger.debug('User name read: %s', username)
logger.info('Prepare works done')
from .submit import submit, SubmitInfo
<commit_msg>Set logging level to WARNING for incoming release<commit_after>import json
import logging
import re
import requests
__all__ = ['config', 'session', 'submit', 'SubmitInfo', 'username']
logging.basicConfig(format='%(asctime)s %(levelname)s:%(name)s:%(message)s',
level=logging.WARNING)
logger = logging.getLogger(__name__)
# Import config
config = json.load(open('config.json'))
logger.debug('Configuration loaded')
# Create session
session = requests.Session()
_r = session.get(config['profile_url'], cookies=config['cookies'])
logger.debug('Profile page got')
_m = re.search(r'<h2>(\w+)\b', _r.text)
username = _m.group(1)
logger.debug('User name read: %s', username)
logger.info('Prepare works done')
from .submit import submit, SubmitInfo
|
c1de1a5406de7e36b3a36f5591aa16f315b1e368
|
opps/images/models.py
|
opps/images/models.py
|
# -*- coding: utf-8 -*-
import uuid
import os
from datetime import datetime
from django.db import models
from django.utils.translation import ugettext_lazy as _
from django.utils import timezone
from taggit.managers import TaggableManager
from opps.core.models import Publishable
def get_file_path(instance, filename):
ext = filename.split('.')[-1]
filename = "{0}-{1}.{2}".format(uuid.uuid4(), instance.slug, ext)
d = datetime.now()
folder = "images/{0}".format(d.strftime("%Y/%m/%d/"))
return os.path.join(folder, filename)
class Image(Publishable):
title = models.CharField(_(u"Title"), max_length=140, db_index=True)
slug = models.SlugField(_(u"Slug"), max_length=150, blank=True,
db_index=True)
image = models.ImageField(upload_to=get_file_path)
description = models.TextField(_(u"Description"), null=True, blank=True)
tags = TaggableManager(blank=True)
source = models.ForeignKey('sources.Source', null=True, blank=True)
def __unicode__(self):
return u"{0}-{1}".format(self.id, self.slug)
def get_absolute_url(self):
if self.date_available <= timezone.now() and self.published:
return self.image.url
return u""
|
# -*- coding: utf-8 -*-
import uuid
import os
from datetime import datetime
from django.db import models
from django.utils.translation import ugettext_lazy as _
from django.utils import timezone
from taggit.models import TaggedItemBase
from taggit.managers import TaggableManager
from opps.core.models import Publishable
def get_file_path(instance, filename):
ext = filename.split('.')[-1]
filename = "{0}-{1}.{2}".format(uuid.uuid4(), instance.slug, ext)
d = datetime.now()
folder = "images/{0}".format(d.strftime("%Y/%m/%d/"))
return os.path.join(folder, filename)
class TaggedImage(TaggedItemBase):
"""Tag for images """
content_object = models.ForeignKey('images.Image')
pass
class Image(Publishable):
title = models.CharField(_(u"Title"), max_length=140, db_index=True)
slug = models.SlugField(_(u"Slug"), max_length=150, blank=True,
db_index=True)
image = models.ImageField(upload_to=get_file_path)
description = models.TextField(_(u"Description"), null=True, blank=True)
tags = TaggableManager(blank=True, through=TaggedImage)
source = models.ForeignKey('sources.Source', null=True, blank=True)
def __unicode__(self):
return u"{0}-{1}".format(self.id, self.slug)
def get_absolute_url(self):
if self.date_available <= timezone.now() and self.published:
return self.image.url
return u""
|
Create TaggedImage, unique marker for image
|
Create TaggedImage, unique marker for image
|
Python
|
mit
|
YACOWS/opps,williamroot/opps,YACOWS/opps,opps/opps,jeanmask/opps,williamroot/opps,opps/opps,jeanmask/opps,opps/opps,williamroot/opps,opps/opps,williamroot/opps,jeanmask/opps,jeanmask/opps,YACOWS/opps,YACOWS/opps
|
# -*- coding: utf-8 -*-
import uuid
import os
from datetime import datetime
from django.db import models
from django.utils.translation import ugettext_lazy as _
from django.utils import timezone
from taggit.managers import TaggableManager
from opps.core.models import Publishable
def get_file_path(instance, filename):
ext = filename.split('.')[-1]
filename = "{0}-{1}.{2}".format(uuid.uuid4(), instance.slug, ext)
d = datetime.now()
folder = "images/{0}".format(d.strftime("%Y/%m/%d/"))
return os.path.join(folder, filename)
class Image(Publishable):
title = models.CharField(_(u"Title"), max_length=140, db_index=True)
slug = models.SlugField(_(u"Slug"), max_length=150, blank=True,
db_index=True)
image = models.ImageField(upload_to=get_file_path)
description = models.TextField(_(u"Description"), null=True, blank=True)
tags = TaggableManager(blank=True)
source = models.ForeignKey('sources.Source', null=True, blank=True)
def __unicode__(self):
return u"{0}-{1}".format(self.id, self.slug)
def get_absolute_url(self):
if self.date_available <= timezone.now() and self.published:
return self.image.url
return u""
Create TaggedImage, unique marker for image
|
# -*- coding: utf-8 -*-
import uuid
import os
from datetime import datetime
from django.db import models
from django.utils.translation import ugettext_lazy as _
from django.utils import timezone
from taggit.models import TaggedItemBase
from taggit.managers import TaggableManager
from opps.core.models import Publishable
def get_file_path(instance, filename):
ext = filename.split('.')[-1]
filename = "{0}-{1}.{2}".format(uuid.uuid4(), instance.slug, ext)
d = datetime.now()
folder = "images/{0}".format(d.strftime("%Y/%m/%d/"))
return os.path.join(folder, filename)
class TaggedImage(TaggedItemBase):
"""Tag for images """
content_object = models.ForeignKey('images.Image')
pass
class Image(Publishable):
title = models.CharField(_(u"Title"), max_length=140, db_index=True)
slug = models.SlugField(_(u"Slug"), max_length=150, blank=True,
db_index=True)
image = models.ImageField(upload_to=get_file_path)
description = models.TextField(_(u"Description"), null=True, blank=True)
tags = TaggableManager(blank=True, through=TaggedImage)
source = models.ForeignKey('sources.Source', null=True, blank=True)
def __unicode__(self):
return u"{0}-{1}".format(self.id, self.slug)
def get_absolute_url(self):
if self.date_available <= timezone.now() and self.published:
return self.image.url
return u""
|
<commit_before># -*- coding: utf-8 -*-
import uuid
import os
from datetime import datetime
from django.db import models
from django.utils.translation import ugettext_lazy as _
from django.utils import timezone
from taggit.managers import TaggableManager
from opps.core.models import Publishable
def get_file_path(instance, filename):
ext = filename.split('.')[-1]
filename = "{0}-{1}.{2}".format(uuid.uuid4(), instance.slug, ext)
d = datetime.now()
folder = "images/{0}".format(d.strftime("%Y/%m/%d/"))
return os.path.join(folder, filename)
class Image(Publishable):
title = models.CharField(_(u"Title"), max_length=140, db_index=True)
slug = models.SlugField(_(u"Slug"), max_length=150, blank=True,
db_index=True)
image = models.ImageField(upload_to=get_file_path)
description = models.TextField(_(u"Description"), null=True, blank=True)
tags = TaggableManager(blank=True)
source = models.ForeignKey('sources.Source', null=True, blank=True)
def __unicode__(self):
return u"{0}-{1}".format(self.id, self.slug)
def get_absolute_url(self):
if self.date_available <= timezone.now() and self.published:
return self.image.url
return u""
<commit_msg>Create TaggedImage, unique marker for image<commit_after>
|
# -*- coding: utf-8 -*-
import uuid
import os
from datetime import datetime
from django.db import models
from django.utils.translation import ugettext_lazy as _
from django.utils import timezone
from taggit.models import TaggedItemBase
from taggit.managers import TaggableManager
from opps.core.models import Publishable
def get_file_path(instance, filename):
ext = filename.split('.')[-1]
filename = "{0}-{1}.{2}".format(uuid.uuid4(), instance.slug, ext)
d = datetime.now()
folder = "images/{0}".format(d.strftime("%Y/%m/%d/"))
return os.path.join(folder, filename)
class TaggedImage(TaggedItemBase):
"""Tag for images """
content_object = models.ForeignKey('images.Image')
pass
class Image(Publishable):
title = models.CharField(_(u"Title"), max_length=140, db_index=True)
slug = models.SlugField(_(u"Slug"), max_length=150, blank=True,
db_index=True)
image = models.ImageField(upload_to=get_file_path)
description = models.TextField(_(u"Description"), null=True, blank=True)
tags = TaggableManager(blank=True, through=TaggedImage)
source = models.ForeignKey('sources.Source', null=True, blank=True)
def __unicode__(self):
return u"{0}-{1}".format(self.id, self.slug)
def get_absolute_url(self):
if self.date_available <= timezone.now() and self.published:
return self.image.url
return u""
|
# -*- coding: utf-8 -*-
import uuid
import os
from datetime import datetime
from django.db import models
from django.utils.translation import ugettext_lazy as _
from django.utils import timezone
from taggit.managers import TaggableManager
from opps.core.models import Publishable
def get_file_path(instance, filename):
ext = filename.split('.')[-1]
filename = "{0}-{1}.{2}".format(uuid.uuid4(), instance.slug, ext)
d = datetime.now()
folder = "images/{0}".format(d.strftime("%Y/%m/%d/"))
return os.path.join(folder, filename)
class Image(Publishable):
title = models.CharField(_(u"Title"), max_length=140, db_index=True)
slug = models.SlugField(_(u"Slug"), max_length=150, blank=True,
db_index=True)
image = models.ImageField(upload_to=get_file_path)
description = models.TextField(_(u"Description"), null=True, blank=True)
tags = TaggableManager(blank=True)
source = models.ForeignKey('sources.Source', null=True, blank=True)
def __unicode__(self):
return u"{0}-{1}".format(self.id, self.slug)
def get_absolute_url(self):
if self.date_available <= timezone.now() and self.published:
return self.image.url
return u""
Create TaggedImage, unique marker for image# -*- coding: utf-8 -*-
import uuid
import os
from datetime import datetime
from django.db import models
from django.utils.translation import ugettext_lazy as _
from django.utils import timezone
from taggit.models import TaggedItemBase
from taggit.managers import TaggableManager
from opps.core.models import Publishable
def get_file_path(instance, filename):
ext = filename.split('.')[-1]
filename = "{0}-{1}.{2}".format(uuid.uuid4(), instance.slug, ext)
d = datetime.now()
folder = "images/{0}".format(d.strftime("%Y/%m/%d/"))
return os.path.join(folder, filename)
class TaggedImage(TaggedItemBase):
"""Tag for images """
content_object = models.ForeignKey('images.Image')
pass
class Image(Publishable):
title = models.CharField(_(u"Title"), max_length=140, db_index=True)
slug = models.SlugField(_(u"Slug"), max_length=150, blank=True,
db_index=True)
image = models.ImageField(upload_to=get_file_path)
description = models.TextField(_(u"Description"), null=True, blank=True)
tags = TaggableManager(blank=True, through=TaggedImage)
source = models.ForeignKey('sources.Source', null=True, blank=True)
def __unicode__(self):
return u"{0}-{1}".format(self.id, self.slug)
def get_absolute_url(self):
if self.date_available <= timezone.now() and self.published:
return self.image.url
return u""
|
<commit_before># -*- coding: utf-8 -*-
import uuid
import os
from datetime import datetime
from django.db import models
from django.utils.translation import ugettext_lazy as _
from django.utils import timezone
from taggit.managers import TaggableManager
from opps.core.models import Publishable
def get_file_path(instance, filename):
ext = filename.split('.')[-1]
filename = "{0}-{1}.{2}".format(uuid.uuid4(), instance.slug, ext)
d = datetime.now()
folder = "images/{0}".format(d.strftime("%Y/%m/%d/"))
return os.path.join(folder, filename)
class Image(Publishable):
title = models.CharField(_(u"Title"), max_length=140, db_index=True)
slug = models.SlugField(_(u"Slug"), max_length=150, blank=True,
db_index=True)
image = models.ImageField(upload_to=get_file_path)
description = models.TextField(_(u"Description"), null=True, blank=True)
tags = TaggableManager(blank=True)
source = models.ForeignKey('sources.Source', null=True, blank=True)
def __unicode__(self):
return u"{0}-{1}".format(self.id, self.slug)
def get_absolute_url(self):
if self.date_available <= timezone.now() and self.published:
return self.image.url
return u""
<commit_msg>Create TaggedImage, unique marker for image<commit_after># -*- coding: utf-8 -*-
import uuid
import os
from datetime import datetime
from django.db import models
from django.utils.translation import ugettext_lazy as _
from django.utils import timezone
from taggit.models import TaggedItemBase
from taggit.managers import TaggableManager
from opps.core.models import Publishable
def get_file_path(instance, filename):
ext = filename.split('.')[-1]
filename = "{0}-{1}.{2}".format(uuid.uuid4(), instance.slug, ext)
d = datetime.now()
folder = "images/{0}".format(d.strftime("%Y/%m/%d/"))
return os.path.join(folder, filename)
class TaggedImage(TaggedItemBase):
"""Tag for images """
content_object = models.ForeignKey('images.Image')
pass
class Image(Publishable):
title = models.CharField(_(u"Title"), max_length=140, db_index=True)
slug = models.SlugField(_(u"Slug"), max_length=150, blank=True,
db_index=True)
image = models.ImageField(upload_to=get_file_path)
description = models.TextField(_(u"Description"), null=True, blank=True)
tags = TaggableManager(blank=True, through=TaggedImage)
source = models.ForeignKey('sources.Source', null=True, blank=True)
def __unicode__(self):
return u"{0}-{1}".format(self.id, self.slug)
def get_absolute_url(self):
if self.date_available <= timezone.now() and self.published:
return self.image.url
return u""
|
143ad06a39a4ee971f3f65e58cf17fbf8ebc81cd
|
app/decorators.py
|
app/decorators.py
|
# -*- coding: utf-8 -*-
from functools import wraps
from flask import abort
def admin_required(f):
@wraps(f)
def decorated_function(*args, **kwargs):
from flask_login import current_user
if not current_user.is_authenticated and not current_user.is_admin:
abort(403)
return f(*args, **kwargs)
return decorated_function
|
# -*- coding: utf-8 -*-
from functools import wraps
from flask import abort
def admin_required(f):
@wraps(f)
def decorated_function(*args, **kwargs):
from flask_login import current_user
if not current_user.is_authenticated or not current_user.is_admin:
abort(403)
return f(*args, **kwargs)
return decorated_function
|
Abort when user is not authenticated or not an admin
|
Abort when user is not authenticated or not an admin
|
Python
|
mit
|
0xfoo/flask-todolist,rtzll/flask-todolist,polyfunc/flask-todolist,polyfunc/flask-todolist,rtzll/flask-todolist,polyfunc/flask-todolist,0xfoo/flask-todolist,0xfoo/flask-todolist,rtzll/flask-todolist,rtzll/flask-todolist
|
# -*- coding: utf-8 -*-
from functools import wraps
from flask import abort
def admin_required(f):
@wraps(f)
def decorated_function(*args, **kwargs):
from flask_login import current_user
if not current_user.is_authenticated and not current_user.is_admin:
abort(403)
return f(*args, **kwargs)
return decorated_function
Abort when user is not authenticated or not an admin
|
# -*- coding: utf-8 -*-
from functools import wraps
from flask import abort
def admin_required(f):
@wraps(f)
def decorated_function(*args, **kwargs):
from flask_login import current_user
if not current_user.is_authenticated or not current_user.is_admin:
abort(403)
return f(*args, **kwargs)
return decorated_function
|
<commit_before># -*- coding: utf-8 -*-
from functools import wraps
from flask import abort
def admin_required(f):
@wraps(f)
def decorated_function(*args, **kwargs):
from flask_login import current_user
if not current_user.is_authenticated and not current_user.is_admin:
abort(403)
return f(*args, **kwargs)
return decorated_function
<commit_msg>Abort when user is not authenticated or not an admin<commit_after>
|
# -*- coding: utf-8 -*-
from functools import wraps
from flask import abort
def admin_required(f):
@wraps(f)
def decorated_function(*args, **kwargs):
from flask_login import current_user
if not current_user.is_authenticated or not current_user.is_admin:
abort(403)
return f(*args, **kwargs)
return decorated_function
|
# -*- coding: utf-8 -*-
from functools import wraps
from flask import abort
def admin_required(f):
@wraps(f)
def decorated_function(*args, **kwargs):
from flask_login import current_user
if not current_user.is_authenticated and not current_user.is_admin:
abort(403)
return f(*args, **kwargs)
return decorated_function
Abort when user is not authenticated or not an admin# -*- coding: utf-8 -*-
from functools import wraps
from flask import abort
def admin_required(f):
@wraps(f)
def decorated_function(*args, **kwargs):
from flask_login import current_user
if not current_user.is_authenticated or not current_user.is_admin:
abort(403)
return f(*args, **kwargs)
return decorated_function
|
<commit_before># -*- coding: utf-8 -*-
from functools import wraps
from flask import abort
def admin_required(f):
@wraps(f)
def decorated_function(*args, **kwargs):
from flask_login import current_user
if not current_user.is_authenticated and not current_user.is_admin:
abort(403)
return f(*args, **kwargs)
return decorated_function
<commit_msg>Abort when user is not authenticated or not an admin<commit_after># -*- coding: utf-8 -*-
from functools import wraps
from flask import abort
def admin_required(f):
@wraps(f)
def decorated_function(*args, **kwargs):
from flask_login import current_user
if not current_user.is_authenticated or not current_user.is_admin:
abort(403)
return f(*args, **kwargs)
return decorated_function
|
d91d6951c146ba4611d1b3869cbc08d396facd54
|
collections/set.py
|
collections/set.py
|
# Set
#Removes the duplicates from the given group of values to create the set.
flight_set = {500,520,600,345,520,634,600,500,200,200}
print("Flight Set : ", flight_set)
# Converting List into Set
passengers_list=["George", "Annie", "Jack", "Annie", "Henry", "Helen", "Maria", "George", "Jack", "Remo"]
unique_passengers=set(passengers_list)# set function - removes the duplicates from the list and returns a set
print("Unique Passengers : ", unique_passengers)
|
# Set
#Removes the duplicates from the given group of values to create the set.
flight_set = {500,520,600,345,520,634,600,500,200,200}
print("Flight Set : ", flight_set)
# Converting List into Set
passengers_list=["George", "Annie", "Jack", "Annie", "Henry", "Helen", "Maria", "George", "Jack", "Remo"]
unique_passengers=set(passengers_list)# set function - removes the duplicates from the list and returns a set
print("Unique Passengers : ", unique_passengers)
# Set can also contain any data types, even tuples are also allowed.
# Add function .add()
unique_passengers.add("Robin")
# .update() function works only for iterable objects
unique_passengers.update(["Rahul", "Robin"])
unique_passengers.update({"Kanvi", "Beatriz"})
# Both the discard() and remove() functions take a single value as an argument and
# removes that value from the set. If that value is not present, discard() does nothing,
# but remove() will raise a KeyError exception.
unique_passengers.discard("Annie")
unique_passengers.remove("George")
a = {2, 4, 5, 9}
b = {2, 4, 11, 12}
a.union(b) # Values which exist in a or b
a.intersection(b) # Values which exist in a and b
a.difference(b) # Values which exist in a but not in b
|
Set with add, update, discard, remove, union, intersection, difference
|
Set with add, update, discard, remove, union, intersection, difference
|
Python
|
mit
|
pk-python/basics
|
# Set
#Removes the duplicates from the given group of values to create the set.
flight_set = {500,520,600,345,520,634,600,500,200,200}
print("Flight Set : ", flight_set)
# Converting List into Set
passengers_list=["George", "Annie", "Jack", "Annie", "Henry", "Helen", "Maria", "George", "Jack", "Remo"]
unique_passengers=set(passengers_list)# set function - removes the duplicates from the list and returns a set
print("Unique Passengers : ", unique_passengers)Set with add, update, discard, remove, union, intersection, difference
|
# Set
#Removes the duplicates from the given group of values to create the set.
flight_set = {500,520,600,345,520,634,600,500,200,200}
print("Flight Set : ", flight_set)
# Converting List into Set
passengers_list=["George", "Annie", "Jack", "Annie", "Henry", "Helen", "Maria", "George", "Jack", "Remo"]
unique_passengers=set(passengers_list)# set function - removes the duplicates from the list and returns a set
print("Unique Passengers : ", unique_passengers)
# Set can also contain any data types, even tuples are also allowed.
# Add function .add()
unique_passengers.add("Robin")
# .update() function works only for iterable objects
unique_passengers.update(["Rahul", "Robin"])
unique_passengers.update({"Kanvi", "Beatriz"})
# Both the discard() and remove() functions take a single value as an argument and
# removes that value from the set. If that value is not present, discard() does nothing,
# but remove() will raise a KeyError exception.
unique_passengers.discard("Annie")
unique_passengers.remove("George")
a = {2, 4, 5, 9}
b = {2, 4, 11, 12}
a.union(b) # Values which exist in a or b
a.intersection(b) # Values which exist in a and b
a.difference(b) # Values which exist in a but not in b
|
<commit_before># Set
#Removes the duplicates from the given group of values to create the set.
flight_set = {500,520,600,345,520,634,600,500,200,200}
print("Flight Set : ", flight_set)
# Converting List into Set
passengers_list=["George", "Annie", "Jack", "Annie", "Henry", "Helen", "Maria", "George", "Jack", "Remo"]
unique_passengers=set(passengers_list)# set function - removes the duplicates from the list and returns a set
print("Unique Passengers : ", unique_passengers)<commit_msg>Set with add, update, discard, remove, union, intersection, difference<commit_after>
|
# Set
#Removes the duplicates from the given group of values to create the set.
flight_set = {500,520,600,345,520,634,600,500,200,200}
print("Flight Set : ", flight_set)
# Converting List into Set
passengers_list=["George", "Annie", "Jack", "Annie", "Henry", "Helen", "Maria", "George", "Jack", "Remo"]
unique_passengers=set(passengers_list)# set function - removes the duplicates from the list and returns a set
print("Unique Passengers : ", unique_passengers)
# Set can also contain any data types, even tuples are also allowed.
# Add function .add()
unique_passengers.add("Robin")
# .update() function works only for iterable objects
unique_passengers.update(["Rahul", "Robin"])
unique_passengers.update({"Kanvi", "Beatriz"})
# Both the discard() and remove() functions take a single value as an argument and
# removes that value from the set. If that value is not present, discard() does nothing,
# but remove() will raise a KeyError exception.
unique_passengers.discard("Annie")
unique_passengers.remove("George")
a = {2, 4, 5, 9}
b = {2, 4, 11, 12}
a.union(b) # Values which exist in a or b
a.intersection(b) # Values which exist in a and b
a.difference(b) # Values which exist in a but not in b
|
# Set
#Removes the duplicates from the given group of values to create the set.
flight_set = {500,520,600,345,520,634,600,500,200,200}
print("Flight Set : ", flight_set)
# Converting List into Set
passengers_list=["George", "Annie", "Jack", "Annie", "Henry", "Helen", "Maria", "George", "Jack", "Remo"]
unique_passengers=set(passengers_list)# set function - removes the duplicates from the list and returns a set
print("Unique Passengers : ", unique_passengers)Set with add, update, discard, remove, union, intersection, difference# Set
#Removes the duplicates from the given group of values to create the set.
flight_set = {500,520,600,345,520,634,600,500,200,200}
print("Flight Set : ", flight_set)
# Converting List into Set
passengers_list=["George", "Annie", "Jack", "Annie", "Henry", "Helen", "Maria", "George", "Jack", "Remo"]
unique_passengers=set(passengers_list)# set function - removes the duplicates from the list and returns a set
print("Unique Passengers : ", unique_passengers)
# Set can also contain any data types, even tuples are also allowed.
# Add function .add()
unique_passengers.add("Robin")
# .update() function works only for iterable objects
unique_passengers.update(["Rahul", "Robin"])
unique_passengers.update({"Kanvi", "Beatriz"})
# Both the discard() and remove() functions take a single value as an argument and
# removes that value from the set. If that value is not present, discard() does nothing,
# but remove() will raise a KeyError exception.
unique_passengers.discard("Annie")
unique_passengers.remove("George")
a = {2, 4, 5, 9}
b = {2, 4, 11, 12}
a.union(b) # Values which exist in a or b
a.intersection(b) # Values which exist in a and b
a.difference(b) # Values which exist in a but not in b
|
<commit_before># Set
#Removes the duplicates from the given group of values to create the set.
flight_set = {500,520,600,345,520,634,600,500,200,200}
print("Flight Set : ", flight_set)
# Converting List into Set
passengers_list=["George", "Annie", "Jack", "Annie", "Henry", "Helen", "Maria", "George", "Jack", "Remo"]
unique_passengers=set(passengers_list)# set function - removes the duplicates from the list and returns a set
print("Unique Passengers : ", unique_passengers)<commit_msg>Set with add, update, discard, remove, union, intersection, difference<commit_after># Set
#Removes the duplicates from the given group of values to create the set.
flight_set = {500,520,600,345,520,634,600,500,200,200}
print("Flight Set : ", flight_set)
# Converting List into Set
passengers_list=["George", "Annie", "Jack", "Annie", "Henry", "Helen", "Maria", "George", "Jack", "Remo"]
unique_passengers=set(passengers_list)# set function - removes the duplicates from the list and returns a set
print("Unique Passengers : ", unique_passengers)
# Set can also contain any data types, even tuples are also allowed.
# Add function .add()
unique_passengers.add("Robin")
# .update() function works only for iterable objects
unique_passengers.update(["Rahul", "Robin"])
unique_passengers.update({"Kanvi", "Beatriz"})
# Both the discard() and remove() functions take a single value as an argument and
# removes that value from the set. If that value is not present, discard() does nothing,
# but remove() will raise a KeyError exception.
unique_passengers.discard("Annie")
unique_passengers.remove("George")
a = {2, 4, 5, 9}
b = {2, 4, 11, 12}
a.union(b) # Values which exist in a or b
a.intersection(b) # Values which exist in a and b
a.difference(b) # Values which exist in a but not in b
|
51caae36a10cf5616982c78506c5dcec593259a3
|
test_suite.py
|
test_suite.py
|
import os
os.environ['DJANGO_SETTINGS_MODULE'] = 'tests.settings'
from django.core import management
apps = [
'test',
'core',
'exporting',
'formatters',
'lexicon',
'events',
'history',
'models',
'query',
'sets',
'stats',
'search',
'subcommands',
'validation',
]
management.call_command(*apps)
|
import os
import sys
os.environ['DJANGO_SETTINGS_MODULE'] = 'tests.settings'
from django.core import management
apps = sys.argv[1:]
if not apps:
apps = [
'core',
'exporting',
'formatters',
'lexicon',
'events',
'history',
'models',
'query',
'sets',
'stats',
'search',
'subcommands',
'validation',
]
management.call_command('test', *apps)
|
Allow apps to be specified from the command line
|
Allow apps to be specified from the command line
|
Python
|
bsd-2-clause
|
murphyke/avocado,murphyke/avocado,murphyke/avocado,murphyke/avocado
|
import os
os.environ['DJANGO_SETTINGS_MODULE'] = 'tests.settings'
from django.core import management
apps = [
'test',
'core',
'exporting',
'formatters',
'lexicon',
'events',
'history',
'models',
'query',
'sets',
'stats',
'search',
'subcommands',
'validation',
]
management.call_command(*apps)
Allow apps to be specified from the command line
|
import os
import sys
os.environ['DJANGO_SETTINGS_MODULE'] = 'tests.settings'
from django.core import management
apps = sys.argv[1:]
if not apps:
apps = [
'core',
'exporting',
'formatters',
'lexicon',
'events',
'history',
'models',
'query',
'sets',
'stats',
'search',
'subcommands',
'validation',
]
management.call_command('test', *apps)
|
<commit_before>import os
os.environ['DJANGO_SETTINGS_MODULE'] = 'tests.settings'
from django.core import management
apps = [
'test',
'core',
'exporting',
'formatters',
'lexicon',
'events',
'history',
'models',
'query',
'sets',
'stats',
'search',
'subcommands',
'validation',
]
management.call_command(*apps)
<commit_msg>Allow apps to be specified from the command line<commit_after>
|
import os
import sys
os.environ['DJANGO_SETTINGS_MODULE'] = 'tests.settings'
from django.core import management
apps = sys.argv[1:]
if not apps:
apps = [
'core',
'exporting',
'formatters',
'lexicon',
'events',
'history',
'models',
'query',
'sets',
'stats',
'search',
'subcommands',
'validation',
]
management.call_command('test', *apps)
|
import os
os.environ['DJANGO_SETTINGS_MODULE'] = 'tests.settings'
from django.core import management
apps = [
'test',
'core',
'exporting',
'formatters',
'lexicon',
'events',
'history',
'models',
'query',
'sets',
'stats',
'search',
'subcommands',
'validation',
]
management.call_command(*apps)
Allow apps to be specified from the command lineimport os
import sys
os.environ['DJANGO_SETTINGS_MODULE'] = 'tests.settings'
from django.core import management
apps = sys.argv[1:]
if not apps:
apps = [
'core',
'exporting',
'formatters',
'lexicon',
'events',
'history',
'models',
'query',
'sets',
'stats',
'search',
'subcommands',
'validation',
]
management.call_command('test', *apps)
|
<commit_before>import os
os.environ['DJANGO_SETTINGS_MODULE'] = 'tests.settings'
from django.core import management
apps = [
'test',
'core',
'exporting',
'formatters',
'lexicon',
'events',
'history',
'models',
'query',
'sets',
'stats',
'search',
'subcommands',
'validation',
]
management.call_command(*apps)
<commit_msg>Allow apps to be specified from the command line<commit_after>import os
import sys
os.environ['DJANGO_SETTINGS_MODULE'] = 'tests.settings'
from django.core import management
apps = sys.argv[1:]
if not apps:
apps = [
'core',
'exporting',
'formatters',
'lexicon',
'events',
'history',
'models',
'query',
'sets',
'stats',
'search',
'subcommands',
'validation',
]
management.call_command('test', *apps)
|
7e29758c43e6448d06f96d1d79777364560a7393
|
bugzilla/bugsy.py
|
bugzilla/bugsy.py
|
import requests
from bug import Bug
class BugsyException(Exception):
"""If trying to do something to a Bug this will be thrown"""
def __init__(self, msg):
self.msg = msg
def __str__(self):
return "Message: %s" % self.msg
class Bugsy(object):
"""docstring for Bugsy"""
def __init__(self, username=None, password=None, bugzilla_url='https://bugzilla.mozilla.org/rest'):
self.username = username
self.password = password
self.token = None
self.bugzilla_url = bugzilla_url
if self.username and self.password:
result = requests.get(bugzilla_url + '/login?login=%s&password=%s' % (self.username, self.password)).json()
self.token = result['token']
def get(self, bug_number):
bug = requests.get(self.bugzilla_url + "/bug/%s" % bug_number).json()
return Bug(**bug['bugs'][0])
def put(self, bug):
if not self.username or not self.password:
raise BugsyException("Unfortunately you can't put bugs in Bugzilla without credentials")
if not isinstance(bug, Bug):
raise BugsyException("Please pass in a Bug object when posting to Bugzilla")
|
import requests
from bug import Bug
class BugsyException(Exception):
"""If trying to do something to a Bug this will be thrown"""
def __init__(self, msg):
self.msg = msg
def __str__(self):
return "Message: %s" % self.msg
class Bugsy(object):
"""docstring for Bugsy"""
def __init__(self, username=None, password=None, bugzilla_url='https://bugzilla.mozilla.org/rest'):
self.username = username
self.password = password
self.token = None
self.bugzilla_url = bugzilla_url
if self.username and self.password:
result = requests.get(bugzilla_url + '/login?login=%s&password=%s' % (self.username, self.password)).json()
if not result.get('error', True):
self.token = result['token']
def get(self, bug_number):
bug = requests.get(self.bugzilla_url + "/bug/%s" % bug_number).json()
return Bug(**bug['bugs'][0])
def put(self, bug):
if not self.username or not self.password:
raise BugsyException("Unfortunately you can't put bugs in Bugzilla without credentials")
if not isinstance(bug, Bug):
raise BugsyException("Please pass in a Bug object when posting to Bugzilla")
|
Check if the result from Bugzilla has an error when trying to get a token
|
Check if the result from Bugzilla has an error when trying to get a token
|
Python
|
apache-2.0
|
parkouss/Bugsy,AutomatedTester/Bugsy,indygreg/Bugsy
|
import requests
from bug import Bug
class BugsyException(Exception):
"""If trying to do something to a Bug this will be thrown"""
def __init__(self, msg):
self.msg = msg
def __str__(self):
return "Message: %s" % self.msg
class Bugsy(object):
"""docstring for Bugsy"""
def __init__(self, username=None, password=None, bugzilla_url='https://bugzilla.mozilla.org/rest'):
self.username = username
self.password = password
self.token = None
self.bugzilla_url = bugzilla_url
if self.username and self.password:
result = requests.get(bugzilla_url + '/login?login=%s&password=%s' % (self.username, self.password)).json()
self.token = result['token']
def get(self, bug_number):
bug = requests.get(self.bugzilla_url + "/bug/%s" % bug_number).json()
return Bug(**bug['bugs'][0])
def put(self, bug):
if not self.username or not self.password:
raise BugsyException("Unfortunately you can't put bugs in Bugzilla without credentials")
if not isinstance(bug, Bug):
raise BugsyException("Please pass in a Bug object when posting to Bugzilla")
Check if the result from Bugzilla has an error when trying to get a token
|
import requests
from bug import Bug
class BugsyException(Exception):
"""If trying to do something to a Bug this will be thrown"""
def __init__(self, msg):
self.msg = msg
def __str__(self):
return "Message: %s" % self.msg
class Bugsy(object):
"""docstring for Bugsy"""
def __init__(self, username=None, password=None, bugzilla_url='https://bugzilla.mozilla.org/rest'):
self.username = username
self.password = password
self.token = None
self.bugzilla_url = bugzilla_url
if self.username and self.password:
result = requests.get(bugzilla_url + '/login?login=%s&password=%s' % (self.username, self.password)).json()
if not result.get('error', True):
self.token = result['token']
def get(self, bug_number):
bug = requests.get(self.bugzilla_url + "/bug/%s" % bug_number).json()
return Bug(**bug['bugs'][0])
def put(self, bug):
if not self.username or not self.password:
raise BugsyException("Unfortunately you can't put bugs in Bugzilla without credentials")
if not isinstance(bug, Bug):
raise BugsyException("Please pass in a Bug object when posting to Bugzilla")
|
<commit_before>import requests
from bug import Bug
class BugsyException(Exception):
"""If trying to do something to a Bug this will be thrown"""
def __init__(self, msg):
self.msg = msg
def __str__(self):
return "Message: %s" % self.msg
class Bugsy(object):
"""docstring for Bugsy"""
def __init__(self, username=None, password=None, bugzilla_url='https://bugzilla.mozilla.org/rest'):
self.username = username
self.password = password
self.token = None
self.bugzilla_url = bugzilla_url
if self.username and self.password:
result = requests.get(bugzilla_url + '/login?login=%s&password=%s' % (self.username, self.password)).json()
self.token = result['token']
def get(self, bug_number):
bug = requests.get(self.bugzilla_url + "/bug/%s" % bug_number).json()
return Bug(**bug['bugs'][0])
def put(self, bug):
if not self.username or not self.password:
raise BugsyException("Unfortunately you can't put bugs in Bugzilla without credentials")
if not isinstance(bug, Bug):
raise BugsyException("Please pass in a Bug object when posting to Bugzilla")
<commit_msg>Check if the result from Bugzilla has an error when trying to get a token<commit_after>
|
import requests
from bug import Bug
class BugsyException(Exception):
"""If trying to do something to a Bug this will be thrown"""
def __init__(self, msg):
self.msg = msg
def __str__(self):
return "Message: %s" % self.msg
class Bugsy(object):
"""docstring for Bugsy"""
def __init__(self, username=None, password=None, bugzilla_url='https://bugzilla.mozilla.org/rest'):
self.username = username
self.password = password
self.token = None
self.bugzilla_url = bugzilla_url
if self.username and self.password:
result = requests.get(bugzilla_url + '/login?login=%s&password=%s' % (self.username, self.password)).json()
if not result.get('error', True):
self.token = result['token']
def get(self, bug_number):
bug = requests.get(self.bugzilla_url + "/bug/%s" % bug_number).json()
return Bug(**bug['bugs'][0])
def put(self, bug):
if not self.username or not self.password:
raise BugsyException("Unfortunately you can't put bugs in Bugzilla without credentials")
if not isinstance(bug, Bug):
raise BugsyException("Please pass in a Bug object when posting to Bugzilla")
|
import requests
from bug import Bug
class BugsyException(Exception):
"""If trying to do something to a Bug this will be thrown"""
def __init__(self, msg):
self.msg = msg
def __str__(self):
return "Message: %s" % self.msg
class Bugsy(object):
"""docstring for Bugsy"""
def __init__(self, username=None, password=None, bugzilla_url='https://bugzilla.mozilla.org/rest'):
self.username = username
self.password = password
self.token = None
self.bugzilla_url = bugzilla_url
if self.username and self.password:
result = requests.get(bugzilla_url + '/login?login=%s&password=%s' % (self.username, self.password)).json()
self.token = result['token']
def get(self, bug_number):
bug = requests.get(self.bugzilla_url + "/bug/%s" % bug_number).json()
return Bug(**bug['bugs'][0])
def put(self, bug):
if not self.username or not self.password:
raise BugsyException("Unfortunately you can't put bugs in Bugzilla without credentials")
if not isinstance(bug, Bug):
raise BugsyException("Please pass in a Bug object when posting to Bugzilla")
Check if the result from Bugzilla has an error when trying to get a tokenimport requests
from bug import Bug
class BugsyException(Exception):
"""If trying to do something to a Bug this will be thrown"""
def __init__(self, msg):
self.msg = msg
def __str__(self):
return "Message: %s" % self.msg
class Bugsy(object):
"""docstring for Bugsy"""
def __init__(self, username=None, password=None, bugzilla_url='https://bugzilla.mozilla.org/rest'):
self.username = username
self.password = password
self.token = None
self.bugzilla_url = bugzilla_url
if self.username and self.password:
result = requests.get(bugzilla_url + '/login?login=%s&password=%s' % (self.username, self.password)).json()
if not result.get('error', True):
self.token = result['token']
def get(self, bug_number):
bug = requests.get(self.bugzilla_url + "/bug/%s" % bug_number).json()
return Bug(**bug['bugs'][0])
def put(self, bug):
if not self.username or not self.password:
raise BugsyException("Unfortunately you can't put bugs in Bugzilla without credentials")
if not isinstance(bug, Bug):
raise BugsyException("Please pass in a Bug object when posting to Bugzilla")
|
<commit_before>import requests
from bug import Bug
class BugsyException(Exception):
"""If trying to do something to a Bug this will be thrown"""
def __init__(self, msg):
self.msg = msg
def __str__(self):
return "Message: %s" % self.msg
class Bugsy(object):
"""docstring for Bugsy"""
def __init__(self, username=None, password=None, bugzilla_url='https://bugzilla.mozilla.org/rest'):
self.username = username
self.password = password
self.token = None
self.bugzilla_url = bugzilla_url
if self.username and self.password:
result = requests.get(bugzilla_url + '/login?login=%s&password=%s' % (self.username, self.password)).json()
self.token = result['token']
def get(self, bug_number):
bug = requests.get(self.bugzilla_url + "/bug/%s" % bug_number).json()
return Bug(**bug['bugs'][0])
def put(self, bug):
if not self.username or not self.password:
raise BugsyException("Unfortunately you can't put bugs in Bugzilla without credentials")
if not isinstance(bug, Bug):
raise BugsyException("Please pass in a Bug object when posting to Bugzilla")
<commit_msg>Check if the result from Bugzilla has an error when trying to get a token<commit_after>import requests
from bug import Bug
class BugsyException(Exception):
"""If trying to do something to a Bug this will be thrown"""
def __init__(self, msg):
self.msg = msg
def __str__(self):
return "Message: %s" % self.msg
class Bugsy(object):
"""docstring for Bugsy"""
def __init__(self, username=None, password=None, bugzilla_url='https://bugzilla.mozilla.org/rest'):
self.username = username
self.password = password
self.token = None
self.bugzilla_url = bugzilla_url
if self.username and self.password:
result = requests.get(bugzilla_url + '/login?login=%s&password=%s' % (self.username, self.password)).json()
if not result.get('error', True):
self.token = result['token']
def get(self, bug_number):
bug = requests.get(self.bugzilla_url + "/bug/%s" % bug_number).json()
return Bug(**bug['bugs'][0])
def put(self, bug):
if not self.username or not self.password:
raise BugsyException("Unfortunately you can't put bugs in Bugzilla without credentials")
if not isinstance(bug, Bug):
raise BugsyException("Please pass in a Bug object when posting to Bugzilla")
|
6776041c66a17fab0ad81799b44c3722e202dddf
|
builder/states.py
|
builder/states.py
|
# State identifiers (begin and end).
NO_STATE = -1
BIND_START = 0
BIND_END = BIND_START + 1
INTER_SSH_START = 10
INTER_SSH_END = INTER_SSH_START + 1
GIT_SETUP_START = 20
GIT_SETUP_END = GIT_SETUP_START + 1
UPLOAD_REPO_START = 30
UPLOAD_REPO_END = UPLOAD_REPO_START + 1
INSTALL_PKG_START = 40
INSTALL_PKG_END = INSTALL_PKG_START + 1
CLONE_STACK_START = 50
CLONE_STACK_END = CLONE_STACK_START + 1
PATCH_STACK_START = 60
PATCH_STACK_END = PATCH_STACK_START + 1
UPLOAD_EXTRAS_START = 70
UPLOAD_EXTRAS_END = UPLOAD_EXTRAS_START + 1
CREATE_LOCAL_START = 80
CREATE_LOCAL_END = CREATE_LOCAL_START + 1
STACK_SH_START = 100
STACK_SH_END = STACK_START + 1
|
# State identifiers (begin and end).
NO_STATE = -1
BIND_START = 0
BIND_END = BIND_START + 1
INTER_SSH_START = 10
INTER_SSH_END = INTER_SSH_START + 1
GIT_SETUP_START = 20
GIT_SETUP_END = GIT_SETUP_START + 1
UPLOAD_REPO_START = 30
UPLOAD_REPO_END = UPLOAD_REPO_START + 1
INSTALL_PKG_START = 40
INSTALL_PKG_END = INSTALL_PKG_START + 1
CLONE_STACK_START = 50
CLONE_STACK_END = CLONE_STACK_START + 1
PATCH_STACK_START = 60
PATCH_STACK_END = PATCH_STACK_START + 1
UPLOAD_EXTRAS_START = 70
UPLOAD_EXTRAS_END = UPLOAD_EXTRAS_START + 1
CREATE_LOCAL_START = 80
CREATE_LOCAL_END = CREATE_LOCAL_START + 1
STACK_SH_START = 100
STACK_SH_END = STACK_SH_START + 1
|
Fix old variable STACK_START name (not used anymore)
|
Fix old variable STACK_START name (not used anymore)
|
Python
|
apache-2.0
|
harlowja/multi-devstack,harlowja/multi-devstack
|
# State identifiers (begin and end).
NO_STATE = -1
BIND_START = 0
BIND_END = BIND_START + 1
INTER_SSH_START = 10
INTER_SSH_END = INTER_SSH_START + 1
GIT_SETUP_START = 20
GIT_SETUP_END = GIT_SETUP_START + 1
UPLOAD_REPO_START = 30
UPLOAD_REPO_END = UPLOAD_REPO_START + 1
INSTALL_PKG_START = 40
INSTALL_PKG_END = INSTALL_PKG_START + 1
CLONE_STACK_START = 50
CLONE_STACK_END = CLONE_STACK_START + 1
PATCH_STACK_START = 60
PATCH_STACK_END = PATCH_STACK_START + 1
UPLOAD_EXTRAS_START = 70
UPLOAD_EXTRAS_END = UPLOAD_EXTRAS_START + 1
CREATE_LOCAL_START = 80
CREATE_LOCAL_END = CREATE_LOCAL_START + 1
STACK_SH_START = 100
STACK_SH_END = STACK_START + 1
Fix old variable STACK_START name (not used anymore)
|
# State identifiers (begin and end).
NO_STATE = -1
BIND_START = 0
BIND_END = BIND_START + 1
INTER_SSH_START = 10
INTER_SSH_END = INTER_SSH_START + 1
GIT_SETUP_START = 20
GIT_SETUP_END = GIT_SETUP_START + 1
UPLOAD_REPO_START = 30
UPLOAD_REPO_END = UPLOAD_REPO_START + 1
INSTALL_PKG_START = 40
INSTALL_PKG_END = INSTALL_PKG_START + 1
CLONE_STACK_START = 50
CLONE_STACK_END = CLONE_STACK_START + 1
PATCH_STACK_START = 60
PATCH_STACK_END = PATCH_STACK_START + 1
UPLOAD_EXTRAS_START = 70
UPLOAD_EXTRAS_END = UPLOAD_EXTRAS_START + 1
CREATE_LOCAL_START = 80
CREATE_LOCAL_END = CREATE_LOCAL_START + 1
STACK_SH_START = 100
STACK_SH_END = STACK_SH_START + 1
|
<commit_before># State identifiers (begin and end).
NO_STATE = -1
BIND_START = 0
BIND_END = BIND_START + 1
INTER_SSH_START = 10
INTER_SSH_END = INTER_SSH_START + 1
GIT_SETUP_START = 20
GIT_SETUP_END = GIT_SETUP_START + 1
UPLOAD_REPO_START = 30
UPLOAD_REPO_END = UPLOAD_REPO_START + 1
INSTALL_PKG_START = 40
INSTALL_PKG_END = INSTALL_PKG_START + 1
CLONE_STACK_START = 50
CLONE_STACK_END = CLONE_STACK_START + 1
PATCH_STACK_START = 60
PATCH_STACK_END = PATCH_STACK_START + 1
UPLOAD_EXTRAS_START = 70
UPLOAD_EXTRAS_END = UPLOAD_EXTRAS_START + 1
CREATE_LOCAL_START = 80
CREATE_LOCAL_END = CREATE_LOCAL_START + 1
STACK_SH_START = 100
STACK_SH_END = STACK_START + 1
<commit_msg>Fix old variable STACK_START name (not used anymore)<commit_after>
|
# State identifiers (begin and end).
NO_STATE = -1
BIND_START = 0
BIND_END = BIND_START + 1
INTER_SSH_START = 10
INTER_SSH_END = INTER_SSH_START + 1
GIT_SETUP_START = 20
GIT_SETUP_END = GIT_SETUP_START + 1
UPLOAD_REPO_START = 30
UPLOAD_REPO_END = UPLOAD_REPO_START + 1
INSTALL_PKG_START = 40
INSTALL_PKG_END = INSTALL_PKG_START + 1
CLONE_STACK_START = 50
CLONE_STACK_END = CLONE_STACK_START + 1
PATCH_STACK_START = 60
PATCH_STACK_END = PATCH_STACK_START + 1
UPLOAD_EXTRAS_START = 70
UPLOAD_EXTRAS_END = UPLOAD_EXTRAS_START + 1
CREATE_LOCAL_START = 80
CREATE_LOCAL_END = CREATE_LOCAL_START + 1
STACK_SH_START = 100
STACK_SH_END = STACK_SH_START + 1
|
# State identifiers (begin and end).
NO_STATE = -1
BIND_START = 0
BIND_END = BIND_START + 1
INTER_SSH_START = 10
INTER_SSH_END = INTER_SSH_START + 1
GIT_SETUP_START = 20
GIT_SETUP_END = GIT_SETUP_START + 1
UPLOAD_REPO_START = 30
UPLOAD_REPO_END = UPLOAD_REPO_START + 1
INSTALL_PKG_START = 40
INSTALL_PKG_END = INSTALL_PKG_START + 1
CLONE_STACK_START = 50
CLONE_STACK_END = CLONE_STACK_START + 1
PATCH_STACK_START = 60
PATCH_STACK_END = PATCH_STACK_START + 1
UPLOAD_EXTRAS_START = 70
UPLOAD_EXTRAS_END = UPLOAD_EXTRAS_START + 1
CREATE_LOCAL_START = 80
CREATE_LOCAL_END = CREATE_LOCAL_START + 1
STACK_SH_START = 100
STACK_SH_END = STACK_START + 1
Fix old variable STACK_START name (not used anymore)# State identifiers (begin and end).
NO_STATE = -1
BIND_START = 0
BIND_END = BIND_START + 1
INTER_SSH_START = 10
INTER_SSH_END = INTER_SSH_START + 1
GIT_SETUP_START = 20
GIT_SETUP_END = GIT_SETUP_START + 1
UPLOAD_REPO_START = 30
UPLOAD_REPO_END = UPLOAD_REPO_START + 1
INSTALL_PKG_START = 40
INSTALL_PKG_END = INSTALL_PKG_START + 1
CLONE_STACK_START = 50
CLONE_STACK_END = CLONE_STACK_START + 1
PATCH_STACK_START = 60
PATCH_STACK_END = PATCH_STACK_START + 1
UPLOAD_EXTRAS_START = 70
UPLOAD_EXTRAS_END = UPLOAD_EXTRAS_START + 1
CREATE_LOCAL_START = 80
CREATE_LOCAL_END = CREATE_LOCAL_START + 1
STACK_SH_START = 100
STACK_SH_END = STACK_SH_START + 1
|
<commit_before># State identifiers (begin and end).
NO_STATE = -1
BIND_START = 0
BIND_END = BIND_START + 1
INTER_SSH_START = 10
INTER_SSH_END = INTER_SSH_START + 1
GIT_SETUP_START = 20
GIT_SETUP_END = GIT_SETUP_START + 1
UPLOAD_REPO_START = 30
UPLOAD_REPO_END = UPLOAD_REPO_START + 1
INSTALL_PKG_START = 40
INSTALL_PKG_END = INSTALL_PKG_START + 1
CLONE_STACK_START = 50
CLONE_STACK_END = CLONE_STACK_START + 1
PATCH_STACK_START = 60
PATCH_STACK_END = PATCH_STACK_START + 1
UPLOAD_EXTRAS_START = 70
UPLOAD_EXTRAS_END = UPLOAD_EXTRAS_START + 1
CREATE_LOCAL_START = 80
CREATE_LOCAL_END = CREATE_LOCAL_START + 1
STACK_SH_START = 100
STACK_SH_END = STACK_START + 1
<commit_msg>Fix old variable STACK_START name (not used anymore)<commit_after># State identifiers (begin and end).
NO_STATE = -1
BIND_START = 0
BIND_END = BIND_START + 1
INTER_SSH_START = 10
INTER_SSH_END = INTER_SSH_START + 1
GIT_SETUP_START = 20
GIT_SETUP_END = GIT_SETUP_START + 1
UPLOAD_REPO_START = 30
UPLOAD_REPO_END = UPLOAD_REPO_START + 1
INSTALL_PKG_START = 40
INSTALL_PKG_END = INSTALL_PKG_START + 1
CLONE_STACK_START = 50
CLONE_STACK_END = CLONE_STACK_START + 1
PATCH_STACK_START = 60
PATCH_STACK_END = PATCH_STACK_START + 1
UPLOAD_EXTRAS_START = 70
UPLOAD_EXTRAS_END = UPLOAD_EXTRAS_START + 1
CREATE_LOCAL_START = 80
CREATE_LOCAL_END = CREATE_LOCAL_START + 1
STACK_SH_START = 100
STACK_SH_END = STACK_SH_START + 1
|
74af1019c7a21b69586ee55af2fa4ded6fe2eb03
|
refmanage/__init__.py
|
refmanage/__init__.py
|
# -*- coding: utf-8 -*-
"""
===============================
Base Library (:mod:`refmanage`)
===============================
.. currentmodule:: refmanage
"""
from version import __version__
from fs_utils import *
|
# -*- coding: utf-8 -*-
"""
===============================
Base Library (:mod:`refmanage`)
===============================
.. currentmodule:: refmanage
"""
from version import __version__
import fs_utils
|
Fix import into refmanage namespace
|
Fix import into refmanage namespace
|
Python
|
mit
|
jrsmith3/refmanage
|
# -*- coding: utf-8 -*-
"""
===============================
Base Library (:mod:`refmanage`)
===============================
.. currentmodule:: refmanage
"""
from version import __version__
from fs_utils import *
Fix import into refmanage namespace
|
# -*- coding: utf-8 -*-
"""
===============================
Base Library (:mod:`refmanage`)
===============================
.. currentmodule:: refmanage
"""
from version import __version__
import fs_utils
|
<commit_before># -*- coding: utf-8 -*-
"""
===============================
Base Library (:mod:`refmanage`)
===============================
.. currentmodule:: refmanage
"""
from version import __version__
from fs_utils import *
<commit_msg>Fix import into refmanage namespace<commit_after>
|
# -*- coding: utf-8 -*-
"""
===============================
Base Library (:mod:`refmanage`)
===============================
.. currentmodule:: refmanage
"""
from version import __version__
import fs_utils
|
# -*- coding: utf-8 -*-
"""
===============================
Base Library (:mod:`refmanage`)
===============================
.. currentmodule:: refmanage
"""
from version import __version__
from fs_utils import *
Fix import into refmanage namespace# -*- coding: utf-8 -*-
"""
===============================
Base Library (:mod:`refmanage`)
===============================
.. currentmodule:: refmanage
"""
from version import __version__
import fs_utils
|
<commit_before># -*- coding: utf-8 -*-
"""
===============================
Base Library (:mod:`refmanage`)
===============================
.. currentmodule:: refmanage
"""
from version import __version__
from fs_utils import *
<commit_msg>Fix import into refmanage namespace<commit_after># -*- coding: utf-8 -*-
"""
===============================
Base Library (:mod:`refmanage`)
===============================
.. currentmodule:: refmanage
"""
from version import __version__
import fs_utils
|
9b8d18d52ef6ddd5009a448bcaf003435b387e72
|
wake/views.py
|
wake/views.py
|
from been.couch import CouchStore
from flask import render_template
from wake import app
store = CouchStore().load()
@app.route('/')
def wake():
return render_template('stream.html', events=store.collapsed_events())
|
from been.couch import CouchStore
from flask import render_template, abort
from wake import app
store = CouchStore().load()
@app.route('/')
def wake():
return render_template('stream.html', events=store.collapsed_events())
@app.route('/<slug>')
def by_slug(slug):
events = list(store.events_by_slug(slug))
if not events:
abort(404)
return render_template('stream.html', events=events)
|
Add by_slug view for single events.
|
Add by_slug view for single events.
|
Python
|
bsd-3-clause
|
chromakode/wake
|
from been.couch import CouchStore
from flask import render_template
from wake import app
store = CouchStore().load()
@app.route('/')
def wake():
return render_template('stream.html', events=store.collapsed_events())
Add by_slug view for single events.
|
from been.couch import CouchStore
from flask import render_template, abort
from wake import app
store = CouchStore().load()
@app.route('/')
def wake():
return render_template('stream.html', events=store.collapsed_events())
@app.route('/<slug>')
def by_slug(slug):
events = list(store.events_by_slug(slug))
if not events:
abort(404)
return render_template('stream.html', events=events)
|
<commit_before>from been.couch import CouchStore
from flask import render_template
from wake import app
store = CouchStore().load()
@app.route('/')
def wake():
return render_template('stream.html', events=store.collapsed_events())
<commit_msg>Add by_slug view for single events.<commit_after>
|
from been.couch import CouchStore
from flask import render_template, abort
from wake import app
store = CouchStore().load()
@app.route('/')
def wake():
return render_template('stream.html', events=store.collapsed_events())
@app.route('/<slug>')
def by_slug(slug):
events = list(store.events_by_slug(slug))
if not events:
abort(404)
return render_template('stream.html', events=events)
|
from been.couch import CouchStore
from flask import render_template
from wake import app
store = CouchStore().load()
@app.route('/')
def wake():
return render_template('stream.html', events=store.collapsed_events())
Add by_slug view for single events.from been.couch import CouchStore
from flask import render_template, abort
from wake import app
store = CouchStore().load()
@app.route('/')
def wake():
return render_template('stream.html', events=store.collapsed_events())
@app.route('/<slug>')
def by_slug(slug):
events = list(store.events_by_slug(slug))
if not events:
abort(404)
return render_template('stream.html', events=events)
|
<commit_before>from been.couch import CouchStore
from flask import render_template
from wake import app
store = CouchStore().load()
@app.route('/')
def wake():
return render_template('stream.html', events=store.collapsed_events())
<commit_msg>Add by_slug view for single events.<commit_after>from been.couch import CouchStore
from flask import render_template, abort
from wake import app
store = CouchStore().load()
@app.route('/')
def wake():
return render_template('stream.html', events=store.collapsed_events())
@app.route('/<slug>')
def by_slug(slug):
events = list(store.events_by_slug(slug))
if not events:
abort(404)
return render_template('stream.html', events=events)
|
76c9b7e8e8e6836ad73c81610a82ee2098cea026
|
tests/main/views/test_status.py
|
tests/main/views/test_status.py
|
from tests.bases import BaseApplicationTest
class TestStatus(BaseApplicationTest):
def test_should_return_200_from_elb_status_check(self):
status_response = self.client.get('/_status?ignore-dependencies')
assert status_response.status_code == 200
|
from tests.bases import BaseApplicationTest
from sqlalchemy.exc import SQLAlchemyError
import mock
class TestStatus(BaseApplicationTest):
def test_should_return_200_from_elb_status_check(self):
status_response = self.client.get('/_status?ignore-dependencies')
assert status_response.status_code == 200
@mock.patch('app.status.utils.get_db_version')
def test_catches_db_error_and_return_500(self, get_db_version):
get_db_version.side_effect = SQLAlchemyError()
status_response = self.client.get('/_status')
assert status_response.status_code == 500
|
Test coverage for SQLAlchemyError handling in status view
|
Test coverage for SQLAlchemyError handling in status view
|
Python
|
mit
|
alphagov/digitalmarketplace-api,alphagov/digitalmarketplace-api,alphagov/digitalmarketplace-api
|
from tests.bases import BaseApplicationTest
class TestStatus(BaseApplicationTest):
def test_should_return_200_from_elb_status_check(self):
status_response = self.client.get('/_status?ignore-dependencies')
assert status_response.status_code == 200
Test coverage for SQLAlchemyError handling in status view
|
from tests.bases import BaseApplicationTest
from sqlalchemy.exc import SQLAlchemyError
import mock
class TestStatus(BaseApplicationTest):
def test_should_return_200_from_elb_status_check(self):
status_response = self.client.get('/_status?ignore-dependencies')
assert status_response.status_code == 200
@mock.patch('app.status.utils.get_db_version')
def test_catches_db_error_and_return_500(self, get_db_version):
get_db_version.side_effect = SQLAlchemyError()
status_response = self.client.get('/_status')
assert status_response.status_code == 500
|
<commit_before>from tests.bases import BaseApplicationTest
class TestStatus(BaseApplicationTest):
def test_should_return_200_from_elb_status_check(self):
status_response = self.client.get('/_status?ignore-dependencies')
assert status_response.status_code == 200
<commit_msg>Test coverage for SQLAlchemyError handling in status view<commit_after>
|
from tests.bases import BaseApplicationTest
from sqlalchemy.exc import SQLAlchemyError
import mock
class TestStatus(BaseApplicationTest):
def test_should_return_200_from_elb_status_check(self):
status_response = self.client.get('/_status?ignore-dependencies')
assert status_response.status_code == 200
@mock.patch('app.status.utils.get_db_version')
def test_catches_db_error_and_return_500(self, get_db_version):
get_db_version.side_effect = SQLAlchemyError()
status_response = self.client.get('/_status')
assert status_response.status_code == 500
|
from tests.bases import BaseApplicationTest
class TestStatus(BaseApplicationTest):
def test_should_return_200_from_elb_status_check(self):
status_response = self.client.get('/_status?ignore-dependencies')
assert status_response.status_code == 200
Test coverage for SQLAlchemyError handling in status viewfrom tests.bases import BaseApplicationTest
from sqlalchemy.exc import SQLAlchemyError
import mock
class TestStatus(BaseApplicationTest):
def test_should_return_200_from_elb_status_check(self):
status_response = self.client.get('/_status?ignore-dependencies')
assert status_response.status_code == 200
@mock.patch('app.status.utils.get_db_version')
def test_catches_db_error_and_return_500(self, get_db_version):
get_db_version.side_effect = SQLAlchemyError()
status_response = self.client.get('/_status')
assert status_response.status_code == 500
|
<commit_before>from tests.bases import BaseApplicationTest
class TestStatus(BaseApplicationTest):
def test_should_return_200_from_elb_status_check(self):
status_response = self.client.get('/_status?ignore-dependencies')
assert status_response.status_code == 200
<commit_msg>Test coverage for SQLAlchemyError handling in status view<commit_after>from tests.bases import BaseApplicationTest
from sqlalchemy.exc import SQLAlchemyError
import mock
class TestStatus(BaseApplicationTest):
def test_should_return_200_from_elb_status_check(self):
status_response = self.client.get('/_status?ignore-dependencies')
assert status_response.status_code == 200
@mock.patch('app.status.utils.get_db_version')
def test_catches_db_error_and_return_500(self, get_db_version):
get_db_version.side_effect = SQLAlchemyError()
status_response = self.client.get('/_status')
assert status_response.status_code == 500
|
47044ea3f5ec426358de8a7c735da70f72a9738e
|
tests/test_compute_abundance.py
|
tests/test_compute_abundance.py
|
import os
import sys
import tempfile
CURRENT_DIR = os.path.dirname(os.path.abspath(__file__))
SCRIPTS_DIR = os.path.join(CURRENT_DIR, '..', 'scripts')
sys.path.append(SCRIPTS_DIR)
SAMPLE_DIR = os.path.join(CURRENT_DIR, 'sample')
from compute_abundance import abundance_calculation
import pytest
@pytest.mark.parametrize('blast,expected_abundance',
[ # Basic test
['scaffolds.blast',
{'scaff1': 1.75,
'scaff2': 3.75,
'scaff3': 1.25,
'scaff4': 1.25
}
],
# Test the case where a read can be found several times on the same scaffolds.
# this behavior is tolerated but is not intented to occur often
['scaffolds_multiple_reads.blast',
{ '159': 0.5,
'161': 1,
'175': 0.5,
'240': 3
},
]
]
)
def test_abundance_calculation(blast, expected_abundance):
blast_file = os.path.join(SAMPLE_DIR, blast)
abundance = abundance_calculation(blast_file)
assert set(abundance.keys()) == set(expected_abundance.keys())
assert set(abundance.values()) == pytest.approx(set(expected_abundance.values()))
|
import os
import sys
import tempfile
CURRENT_DIR = os.path.dirname(os.path.abspath(__file__))
SCRIPTS_DIR = os.path.join(CURRENT_DIR, '..', 'scripts')
sys.path.append(SCRIPTS_DIR)
SAMPLE_DIR = os.path.join(CURRENT_DIR, 'sample')
from compute_abundance import abundance_calculation
import pytest
@pytest.mark.parametrize('blast,expected_abundance',
[ # Basic test
['scaffolds.blast',
{'scaff1': 1.75,
'scaff2': 3.75,
'scaff3': 1.25,
'scaff4': 1.25
}
],
# Test the case where a read can be found several times on the same scaffolds.
# this behavior is tolerated but is not intented to occur often
['scaffolds_multiple_reads.blast',
{ '159': 0.5,
'161': 1,
'175': 0.5,
'240': 3
},
]
]
)
def test_abundance_calculation(blast, expected_abundance):
blast_file = os.path.join(SAMPLE_DIR, blast)
abundance = abundance_calculation(blast_file)
assert set(abundance.keys()) == set(expected_abundance.keys())
assert sorted(abundance.values()) == pytest.approx(sorted(expected_abundance.values()))
|
Update unittest to be complient with a newer version of pytest
|
Update unittest to be complient with a newer version of pytest
|
Python
|
agpl-3.0
|
bonsai-team/matam,ppericard/matamog,bonsai-team/matam,ppericard/matam,ppericard/matamog,ppericard/matamog,ppericard/matamog,ppericard/matam,bonsai-team/matam,ppericard/matam,bonsai-team/matam
|
import os
import sys
import tempfile
CURRENT_DIR = os.path.dirname(os.path.abspath(__file__))
SCRIPTS_DIR = os.path.join(CURRENT_DIR, '..', 'scripts')
sys.path.append(SCRIPTS_DIR)
SAMPLE_DIR = os.path.join(CURRENT_DIR, 'sample')
from compute_abundance import abundance_calculation
import pytest
@pytest.mark.parametrize('blast,expected_abundance',
[ # Basic test
['scaffolds.blast',
{'scaff1': 1.75,
'scaff2': 3.75,
'scaff3': 1.25,
'scaff4': 1.25
}
],
# Test the case where a read can be found several times on the same scaffolds.
# this behavior is tolerated but is not intented to occur often
['scaffolds_multiple_reads.blast',
{ '159': 0.5,
'161': 1,
'175': 0.5,
'240': 3
},
]
]
)
def test_abundance_calculation(blast, expected_abundance):
blast_file = os.path.join(SAMPLE_DIR, blast)
abundance = abundance_calculation(blast_file)
assert set(abundance.keys()) == set(expected_abundance.keys())
assert set(abundance.values()) == pytest.approx(set(expected_abundance.values()))
Update unittest to be complient with a newer version of pytest
|
import os
import sys
import tempfile
CURRENT_DIR = os.path.dirname(os.path.abspath(__file__))
SCRIPTS_DIR = os.path.join(CURRENT_DIR, '..', 'scripts')
sys.path.append(SCRIPTS_DIR)
SAMPLE_DIR = os.path.join(CURRENT_DIR, 'sample')
from compute_abundance import abundance_calculation
import pytest
@pytest.mark.parametrize('blast,expected_abundance',
[ # Basic test
['scaffolds.blast',
{'scaff1': 1.75,
'scaff2': 3.75,
'scaff3': 1.25,
'scaff4': 1.25
}
],
# Test the case where a read can be found several times on the same scaffolds.
# this behavior is tolerated but is not intented to occur often
['scaffolds_multiple_reads.blast',
{ '159': 0.5,
'161': 1,
'175': 0.5,
'240': 3
},
]
]
)
def test_abundance_calculation(blast, expected_abundance):
blast_file = os.path.join(SAMPLE_DIR, blast)
abundance = abundance_calculation(blast_file)
assert set(abundance.keys()) == set(expected_abundance.keys())
assert sorted(abundance.values()) == pytest.approx(sorted(expected_abundance.values()))
|
<commit_before>import os
import sys
import tempfile
CURRENT_DIR = os.path.dirname(os.path.abspath(__file__))
SCRIPTS_DIR = os.path.join(CURRENT_DIR, '..', 'scripts')
sys.path.append(SCRIPTS_DIR)
SAMPLE_DIR = os.path.join(CURRENT_DIR, 'sample')
from compute_abundance import abundance_calculation
import pytest
@pytest.mark.parametrize('blast,expected_abundance',
[ # Basic test
['scaffolds.blast',
{'scaff1': 1.75,
'scaff2': 3.75,
'scaff3': 1.25,
'scaff4': 1.25
}
],
# Test the case where a read can be found several times on the same scaffolds.
# this behavior is tolerated but is not intented to occur often
['scaffolds_multiple_reads.blast',
{ '159': 0.5,
'161': 1,
'175': 0.5,
'240': 3
},
]
]
)
def test_abundance_calculation(blast, expected_abundance):
blast_file = os.path.join(SAMPLE_DIR, blast)
abundance = abundance_calculation(blast_file)
assert set(abundance.keys()) == set(expected_abundance.keys())
assert set(abundance.values()) == pytest.approx(set(expected_abundance.values()))
<commit_msg>Update unittest to be complient with a newer version of pytest<commit_after>
|
import os
import sys
import tempfile
CURRENT_DIR = os.path.dirname(os.path.abspath(__file__))
SCRIPTS_DIR = os.path.join(CURRENT_DIR, '..', 'scripts')
sys.path.append(SCRIPTS_DIR)
SAMPLE_DIR = os.path.join(CURRENT_DIR, 'sample')
from compute_abundance import abundance_calculation
import pytest
@pytest.mark.parametrize('blast,expected_abundance',
[ # Basic test
['scaffolds.blast',
{'scaff1': 1.75,
'scaff2': 3.75,
'scaff3': 1.25,
'scaff4': 1.25
}
],
# Test the case where a read can be found several times on the same scaffolds.
# this behavior is tolerated but is not intented to occur often
['scaffolds_multiple_reads.blast',
{ '159': 0.5,
'161': 1,
'175': 0.5,
'240': 3
},
]
]
)
def test_abundance_calculation(blast, expected_abundance):
blast_file = os.path.join(SAMPLE_DIR, blast)
abundance = abundance_calculation(blast_file)
assert set(abundance.keys()) == set(expected_abundance.keys())
assert sorted(abundance.values()) == pytest.approx(sorted(expected_abundance.values()))
|
import os
import sys
import tempfile
CURRENT_DIR = os.path.dirname(os.path.abspath(__file__))
SCRIPTS_DIR = os.path.join(CURRENT_DIR, '..', 'scripts')
sys.path.append(SCRIPTS_DIR)
SAMPLE_DIR = os.path.join(CURRENT_DIR, 'sample')
from compute_abundance import abundance_calculation
import pytest
@pytest.mark.parametrize('blast,expected_abundance',
[ # Basic test
['scaffolds.blast',
{'scaff1': 1.75,
'scaff2': 3.75,
'scaff3': 1.25,
'scaff4': 1.25
}
],
# Test the case where a read can be found several times on the same scaffolds.
# this behavior is tolerated but is not intented to occur often
['scaffolds_multiple_reads.blast',
{ '159': 0.5,
'161': 1,
'175': 0.5,
'240': 3
},
]
]
)
def test_abundance_calculation(blast, expected_abundance):
blast_file = os.path.join(SAMPLE_DIR, blast)
abundance = abundance_calculation(blast_file)
assert set(abundance.keys()) == set(expected_abundance.keys())
assert set(abundance.values()) == pytest.approx(set(expected_abundance.values()))
Update unittest to be complient with a newer version of pytestimport os
import sys
import tempfile
CURRENT_DIR = os.path.dirname(os.path.abspath(__file__))
SCRIPTS_DIR = os.path.join(CURRENT_DIR, '..', 'scripts')
sys.path.append(SCRIPTS_DIR)
SAMPLE_DIR = os.path.join(CURRENT_DIR, 'sample')
from compute_abundance import abundance_calculation
import pytest
@pytest.mark.parametrize('blast,expected_abundance',
[ # Basic test
['scaffolds.blast',
{'scaff1': 1.75,
'scaff2': 3.75,
'scaff3': 1.25,
'scaff4': 1.25
}
],
# Test the case where a read can be found several times on the same scaffolds.
# this behavior is tolerated but is not intented to occur often
['scaffolds_multiple_reads.blast',
{ '159': 0.5,
'161': 1,
'175': 0.5,
'240': 3
},
]
]
)
def test_abundance_calculation(blast, expected_abundance):
blast_file = os.path.join(SAMPLE_DIR, blast)
abundance = abundance_calculation(blast_file)
assert set(abundance.keys()) == set(expected_abundance.keys())
assert sorted(abundance.values()) == pytest.approx(sorted(expected_abundance.values()))
|
<commit_before>import os
import sys
import tempfile
CURRENT_DIR = os.path.dirname(os.path.abspath(__file__))
SCRIPTS_DIR = os.path.join(CURRENT_DIR, '..', 'scripts')
sys.path.append(SCRIPTS_DIR)
SAMPLE_DIR = os.path.join(CURRENT_DIR, 'sample')
from compute_abundance import abundance_calculation
import pytest
@pytest.mark.parametrize('blast,expected_abundance',
[ # Basic test
['scaffolds.blast',
{'scaff1': 1.75,
'scaff2': 3.75,
'scaff3': 1.25,
'scaff4': 1.25
}
],
# Test the case where a read can be found several times on the same scaffolds.
# this behavior is tolerated but is not intented to occur often
['scaffolds_multiple_reads.blast',
{ '159': 0.5,
'161': 1,
'175': 0.5,
'240': 3
},
]
]
)
def test_abundance_calculation(blast, expected_abundance):
blast_file = os.path.join(SAMPLE_DIR, blast)
abundance = abundance_calculation(blast_file)
assert set(abundance.keys()) == set(expected_abundance.keys())
assert set(abundance.values()) == pytest.approx(set(expected_abundance.values()))
<commit_msg>Update unittest to be complient with a newer version of pytest<commit_after>import os
import sys
import tempfile
CURRENT_DIR = os.path.dirname(os.path.abspath(__file__))
SCRIPTS_DIR = os.path.join(CURRENT_DIR, '..', 'scripts')
sys.path.append(SCRIPTS_DIR)
SAMPLE_DIR = os.path.join(CURRENT_DIR, 'sample')
from compute_abundance import abundance_calculation
import pytest
@pytest.mark.parametrize('blast,expected_abundance',
[ # Basic test
['scaffolds.blast',
{'scaff1': 1.75,
'scaff2': 3.75,
'scaff3': 1.25,
'scaff4': 1.25
}
],
# Test the case where a read can be found several times on the same scaffolds.
# this behavior is tolerated but is not intented to occur often
['scaffolds_multiple_reads.blast',
{ '159': 0.5,
'161': 1,
'175': 0.5,
'240': 3
},
]
]
)
def test_abundance_calculation(blast, expected_abundance):
blast_file = os.path.join(SAMPLE_DIR, blast)
abundance = abundance_calculation(blast_file)
assert set(abundance.keys()) == set(expected_abundance.keys())
assert sorted(abundance.values()) == pytest.approx(sorted(expected_abundance.values()))
|
5514450bbc72ad9ed181a79ffc546ba8015b5fd0
|
vcs/models.py
|
vcs/models.py
|
from django.db import models
class Activity(models.Model):
group = models.CharField(max_length=4)
grouptype = models.TextField()
groupdetail = models.TextField()
details = models.TextField()
disabled = models.BooleanField()
time = models.DecimalField(decimal_places=2, max_digits=10)
unique_together = (("group", "grouptype", "disabled", "time"),)
class ActivityEntry(models.Model):
user = models.ManyToManyField(
'tracker.Tbluser',
related_name="user_foreign"
)
activity = models.ManyToManyField(
Activity,
related_name="activity_foreign"
)
amount = models.BigIntegerField()
def time(self):
return self.activity.time * self.amount
|
from django.db import models
class Activity(models.Model):
group = models.CharField(max_length=4)
grouptype = models.TextField()
groupdetail = models.TextField()
details = models.TextField()
disabled = models.BooleanField()
time = models.DecimalField(decimal_places=2, max_digits=10)
unique_together = (("group", "grouptype", "disabled", "time"),)
class ActivityEntry(models.Model):
user = models.OneToOneField(
'tracker.Tbluser',
related_name="user_foreign"
)
activity = models.OneToOneField(
Activity,
related_name="activity_foreign"
)
amount = models.BigIntegerField()
def time(self):
return self.activity.time * self.amount
|
Use a OneToMany field for the activity joiner.
|
Use a OneToMany field for the activity joiner.
|
Python
|
bsd-3-clause
|
AeroNotix/django-timetracker,AeroNotix/django-timetracker,AeroNotix/django-timetracker
|
from django.db import models
class Activity(models.Model):
group = models.CharField(max_length=4)
grouptype = models.TextField()
groupdetail = models.TextField()
details = models.TextField()
disabled = models.BooleanField()
time = models.DecimalField(decimal_places=2, max_digits=10)
unique_together = (("group", "grouptype", "disabled", "time"),)
class ActivityEntry(models.Model):
user = models.ManyToManyField(
'tracker.Tbluser',
related_name="user_foreign"
)
activity = models.ManyToManyField(
Activity,
related_name="activity_foreign"
)
amount = models.BigIntegerField()
def time(self):
return self.activity.time * self.amount
Use a OneToMany field for the activity joiner.
|
from django.db import models
class Activity(models.Model):
group = models.CharField(max_length=4)
grouptype = models.TextField()
groupdetail = models.TextField()
details = models.TextField()
disabled = models.BooleanField()
time = models.DecimalField(decimal_places=2, max_digits=10)
unique_together = (("group", "grouptype", "disabled", "time"),)
class ActivityEntry(models.Model):
user = models.OneToOneField(
'tracker.Tbluser',
related_name="user_foreign"
)
activity = models.OneToOneField(
Activity,
related_name="activity_foreign"
)
amount = models.BigIntegerField()
def time(self):
return self.activity.time * self.amount
|
<commit_before>from django.db import models
class Activity(models.Model):
group = models.CharField(max_length=4)
grouptype = models.TextField()
groupdetail = models.TextField()
details = models.TextField()
disabled = models.BooleanField()
time = models.DecimalField(decimal_places=2, max_digits=10)
unique_together = (("group", "grouptype", "disabled", "time"),)
class ActivityEntry(models.Model):
user = models.ManyToManyField(
'tracker.Tbluser',
related_name="user_foreign"
)
activity = models.ManyToManyField(
Activity,
related_name="activity_foreign"
)
amount = models.BigIntegerField()
def time(self):
return self.activity.time * self.amount
<commit_msg>Use a OneToMany field for the activity joiner.<commit_after>
|
from django.db import models
class Activity(models.Model):
group = models.CharField(max_length=4)
grouptype = models.TextField()
groupdetail = models.TextField()
details = models.TextField()
disabled = models.BooleanField()
time = models.DecimalField(decimal_places=2, max_digits=10)
unique_together = (("group", "grouptype", "disabled", "time"),)
class ActivityEntry(models.Model):
user = models.OneToOneField(
'tracker.Tbluser',
related_name="user_foreign"
)
activity = models.OneToOneField(
Activity,
related_name="activity_foreign"
)
amount = models.BigIntegerField()
def time(self):
return self.activity.time * self.amount
|
from django.db import models
class Activity(models.Model):
group = models.CharField(max_length=4)
grouptype = models.TextField()
groupdetail = models.TextField()
details = models.TextField()
disabled = models.BooleanField()
time = models.DecimalField(decimal_places=2, max_digits=10)
unique_together = (("group", "grouptype", "disabled", "time"),)
class ActivityEntry(models.Model):
user = models.ManyToManyField(
'tracker.Tbluser',
related_name="user_foreign"
)
activity = models.ManyToManyField(
Activity,
related_name="activity_foreign"
)
amount = models.BigIntegerField()
def time(self):
return self.activity.time * self.amount
Use a OneToMany field for the activity joiner.from django.db import models
class Activity(models.Model):
group = models.CharField(max_length=4)
grouptype = models.TextField()
groupdetail = models.TextField()
details = models.TextField()
disabled = models.BooleanField()
time = models.DecimalField(decimal_places=2, max_digits=10)
unique_together = (("group", "grouptype", "disabled", "time"),)
class ActivityEntry(models.Model):
user = models.OneToOneField(
'tracker.Tbluser',
related_name="user_foreign"
)
activity = models.OneToOneField(
Activity,
related_name="activity_foreign"
)
amount = models.BigIntegerField()
def time(self):
return self.activity.time * self.amount
|
<commit_before>from django.db import models
class Activity(models.Model):
group = models.CharField(max_length=4)
grouptype = models.TextField()
groupdetail = models.TextField()
details = models.TextField()
disabled = models.BooleanField()
time = models.DecimalField(decimal_places=2, max_digits=10)
unique_together = (("group", "grouptype", "disabled", "time"),)
class ActivityEntry(models.Model):
user = models.ManyToManyField(
'tracker.Tbluser',
related_name="user_foreign"
)
activity = models.ManyToManyField(
Activity,
related_name="activity_foreign"
)
amount = models.BigIntegerField()
def time(self):
return self.activity.time * self.amount
<commit_msg>Use a OneToMany field for the activity joiner.<commit_after>from django.db import models
class Activity(models.Model):
group = models.CharField(max_length=4)
grouptype = models.TextField()
groupdetail = models.TextField()
details = models.TextField()
disabled = models.BooleanField()
time = models.DecimalField(decimal_places=2, max_digits=10)
unique_together = (("group", "grouptype", "disabled", "time"),)
class ActivityEntry(models.Model):
user = models.OneToOneField(
'tracker.Tbluser',
related_name="user_foreign"
)
activity = models.OneToOneField(
Activity,
related_name="activity_foreign"
)
amount = models.BigIntegerField()
def time(self):
return self.activity.time * self.amount
|
8c68e23dc95bd77b1ccf9e8c989caa4673620fab
|
wallace/db.py
|
wallace/db.py
|
"""Create a connection to the database."""
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker, scoped_session
from sqlalchemy.ext.declarative import declarative_base
import os
db_url = os.environ.get("DATABASE_URL", "postgresql://postgres@localhost/wallace")
engine = create_engine(db_url)
Session = scoped_session(sessionmaker(autoflush=True, bind=engine))
Base = declarative_base()
Base.query = Session.query_property()
def get_session():
return Session
def init_db(drop_all=False):
"""Initialize the database, optionally dropping existing tables."""
if drop_all:
Base.metadata.drop_all(bind=engine)
Base.metadata.create_all(bind=engine)
return Session
|
"""Create a connection to the database."""
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker, scoped_session
from sqlalchemy.ext.declarative import declarative_base
import os
db_url = os.environ.get("DATABASE_URL", "postgresql://postgres@localhost/wallace")
engine = create_engine(db_url, pool_size=100)
Session = scoped_session(sessionmaker(autoflush=True, bind=engine))
Base = declarative_base()
Base.query = Session.query_property()
def get_session():
return Session
def init_db(drop_all=False):
"""Initialize the database, optionally dropping existing tables."""
if drop_all:
Base.metadata.drop_all(bind=engine)
Base.metadata.create_all(bind=engine)
return Session
|
Allow a pool size of 100
|
Allow a pool size of 100
|
Python
|
mit
|
berkeley-cocosci/Wallace,Dallinger/Dallinger,Dallinger/Dallinger,jcpeterson/Dallinger,jcpeterson/Dallinger,berkeley-cocosci/Wallace,Dallinger/Dallinger,berkeley-cocosci/Wallace,Dallinger/Dallinger,jcpeterson/Dallinger,suchow/Wallace,jcpeterson/Dallinger,jcpeterson/Dallinger,Dallinger/Dallinger,suchow/Wallace,suchow/Wallace
|
"""Create a connection to the database."""
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker, scoped_session
from sqlalchemy.ext.declarative import declarative_base
import os
db_url = os.environ.get("DATABASE_URL", "postgresql://postgres@localhost/wallace")
engine = create_engine(db_url)
Session = scoped_session(sessionmaker(autoflush=True, bind=engine))
Base = declarative_base()
Base.query = Session.query_property()
def get_session():
return Session
def init_db(drop_all=False):
"""Initialize the database, optionally dropping existing tables."""
if drop_all:
Base.metadata.drop_all(bind=engine)
Base.metadata.create_all(bind=engine)
return Session
Allow a pool size of 100
|
"""Create a connection to the database."""
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker, scoped_session
from sqlalchemy.ext.declarative import declarative_base
import os
db_url = os.environ.get("DATABASE_URL", "postgresql://postgres@localhost/wallace")
engine = create_engine(db_url, pool_size=100)
Session = scoped_session(sessionmaker(autoflush=True, bind=engine))
Base = declarative_base()
Base.query = Session.query_property()
def get_session():
return Session
def init_db(drop_all=False):
"""Initialize the database, optionally dropping existing tables."""
if drop_all:
Base.metadata.drop_all(bind=engine)
Base.metadata.create_all(bind=engine)
return Session
|
<commit_before>"""Create a connection to the database."""
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker, scoped_session
from sqlalchemy.ext.declarative import declarative_base
import os
db_url = os.environ.get("DATABASE_URL", "postgresql://postgres@localhost/wallace")
engine = create_engine(db_url)
Session = scoped_session(sessionmaker(autoflush=True, bind=engine))
Base = declarative_base()
Base.query = Session.query_property()
def get_session():
return Session
def init_db(drop_all=False):
"""Initialize the database, optionally dropping existing tables."""
if drop_all:
Base.metadata.drop_all(bind=engine)
Base.metadata.create_all(bind=engine)
return Session
<commit_msg>Allow a pool size of 100<commit_after>
|
"""Create a connection to the database."""
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker, scoped_session
from sqlalchemy.ext.declarative import declarative_base
import os
db_url = os.environ.get("DATABASE_URL", "postgresql://postgres@localhost/wallace")
engine = create_engine(db_url, pool_size=100)
Session = scoped_session(sessionmaker(autoflush=True, bind=engine))
Base = declarative_base()
Base.query = Session.query_property()
def get_session():
return Session
def init_db(drop_all=False):
"""Initialize the database, optionally dropping existing tables."""
if drop_all:
Base.metadata.drop_all(bind=engine)
Base.metadata.create_all(bind=engine)
return Session
|
"""Create a connection to the database."""
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker, scoped_session
from sqlalchemy.ext.declarative import declarative_base
import os
db_url = os.environ.get("DATABASE_URL", "postgresql://postgres@localhost/wallace")
engine = create_engine(db_url)
Session = scoped_session(sessionmaker(autoflush=True, bind=engine))
Base = declarative_base()
Base.query = Session.query_property()
def get_session():
return Session
def init_db(drop_all=False):
"""Initialize the database, optionally dropping existing tables."""
if drop_all:
Base.metadata.drop_all(bind=engine)
Base.metadata.create_all(bind=engine)
return Session
Allow a pool size of 100"""Create a connection to the database."""
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker, scoped_session
from sqlalchemy.ext.declarative import declarative_base
import os
db_url = os.environ.get("DATABASE_URL", "postgresql://postgres@localhost/wallace")
engine = create_engine(db_url, pool_size=100)
Session = scoped_session(sessionmaker(autoflush=True, bind=engine))
Base = declarative_base()
Base.query = Session.query_property()
def get_session():
return Session
def init_db(drop_all=False):
"""Initialize the database, optionally dropping existing tables."""
if drop_all:
Base.metadata.drop_all(bind=engine)
Base.metadata.create_all(bind=engine)
return Session
|
<commit_before>"""Create a connection to the database."""
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker, scoped_session
from sqlalchemy.ext.declarative import declarative_base
import os
db_url = os.environ.get("DATABASE_URL", "postgresql://postgres@localhost/wallace")
engine = create_engine(db_url)
Session = scoped_session(sessionmaker(autoflush=True, bind=engine))
Base = declarative_base()
Base.query = Session.query_property()
def get_session():
return Session
def init_db(drop_all=False):
"""Initialize the database, optionally dropping existing tables."""
if drop_all:
Base.metadata.drop_all(bind=engine)
Base.metadata.create_all(bind=engine)
return Session
<commit_msg>Allow a pool size of 100<commit_after>"""Create a connection to the database."""
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker, scoped_session
from sqlalchemy.ext.declarative import declarative_base
import os
db_url = os.environ.get("DATABASE_URL", "postgresql://postgres@localhost/wallace")
engine = create_engine(db_url, pool_size=100)
Session = scoped_session(sessionmaker(autoflush=True, bind=engine))
Base = declarative_base()
Base.query = Session.query_property()
def get_session():
return Session
def init_db(drop_all=False):
"""Initialize the database, optionally dropping existing tables."""
if drop_all:
Base.metadata.drop_all(bind=engine)
Base.metadata.create_all(bind=engine)
return Session
|
c3fc313964676aec079b826fd4868fe27a27c54b
|
mollie/api/objects/capture.py
|
mollie/api/objects/capture.py
|
from .base import Base
class Capture(Base):
@classmethod
def get_resource_class(cls, client):
from ..resources.captures import Captures
return Captures(client)
@property
def id(self):
return self._get_property('id')
@property
def mode(self):
return self._get_property('mode')
@property
def amount(self):
return self._get_property('amount')
@property
def settlement_amount(self):
return self._get_property('settlementAmount')
@property
def payment_id(self):
return self._get_property('paymentId')
@property
def shipment_id(self):
return self._get_property('shipmentId')
@property
def settlement_id(self):
return self._get_property('settlementId')
@property
def created_at(self):
return self._get_property('createdAt')
@property
def payment(self):
"""Return the payment for this capture."""
return self.client.payments.get(self.payment_id)
@property
def shipment(self):
"""Return the shipment for this capture."""
from .shipment import Shipment
url = self._get_link('shipment')
if url:
resp = self.client.orders.perform_api_call(self.client.orders.REST_READ, url)
return Shipment(resp)
@property
def settlement(self):
"""Return the payment for this capture."""
return self.client.settlements.get(self.settlement_id)
|
from .base import Base
class Capture(Base):
@classmethod
def get_resource_class(cls, client):
from ..resources.captures import Captures
return Captures(client)
@property
def id(self):
return self._get_property('id')
@property
def mode(self):
return self._get_property('mode')
@property
def amount(self):
return self._get_property('amount')
@property
def settlement_amount(self):
return self._get_property('settlementAmount')
@property
def payment_id(self):
return self._get_property('paymentId')
@property
def shipment_id(self):
return self._get_property('shipmentId')
@property
def settlement_id(self):
return self._get_property('settlementId')
@property
def created_at(self):
return self._get_property('createdAt')
@property
def payment(self):
"""Return the payment for this capture."""
return self.client.payments.get(self.payment_id)
@property
def shipment(self):
"""Return the shipment for this capture."""
from .shipment import Shipment
url = self._get_link('shipment')
if url:
resp = self.client.orders.perform_api_call(self.client.orders.REST_READ, url)
return Shipment(resp)
@property
def settlement(self):
"""Return the settlement for this capture."""
return self.client.settlements.get(self.settlement_id)
|
Fix docstring, return the settlement
|
Fix docstring, return the settlement
|
Python
|
bsd-2-clause
|
mollie/mollie-api-python
|
from .base import Base
class Capture(Base):
@classmethod
def get_resource_class(cls, client):
from ..resources.captures import Captures
return Captures(client)
@property
def id(self):
return self._get_property('id')
@property
def mode(self):
return self._get_property('mode')
@property
def amount(self):
return self._get_property('amount')
@property
def settlement_amount(self):
return self._get_property('settlementAmount')
@property
def payment_id(self):
return self._get_property('paymentId')
@property
def shipment_id(self):
return self._get_property('shipmentId')
@property
def settlement_id(self):
return self._get_property('settlementId')
@property
def created_at(self):
return self._get_property('createdAt')
@property
def payment(self):
"""Return the payment for this capture."""
return self.client.payments.get(self.payment_id)
@property
def shipment(self):
"""Return the shipment for this capture."""
from .shipment import Shipment
url = self._get_link('shipment')
if url:
resp = self.client.orders.perform_api_call(self.client.orders.REST_READ, url)
return Shipment(resp)
@property
def settlement(self):
"""Return the payment for this capture."""
return self.client.settlements.get(self.settlement_id)
Fix docstring, return the settlement
|
from .base import Base
class Capture(Base):
@classmethod
def get_resource_class(cls, client):
from ..resources.captures import Captures
return Captures(client)
@property
def id(self):
return self._get_property('id')
@property
def mode(self):
return self._get_property('mode')
@property
def amount(self):
return self._get_property('amount')
@property
def settlement_amount(self):
return self._get_property('settlementAmount')
@property
def payment_id(self):
return self._get_property('paymentId')
@property
def shipment_id(self):
return self._get_property('shipmentId')
@property
def settlement_id(self):
return self._get_property('settlementId')
@property
def created_at(self):
return self._get_property('createdAt')
@property
def payment(self):
"""Return the payment for this capture."""
return self.client.payments.get(self.payment_id)
@property
def shipment(self):
"""Return the shipment for this capture."""
from .shipment import Shipment
url = self._get_link('shipment')
if url:
resp = self.client.orders.perform_api_call(self.client.orders.REST_READ, url)
return Shipment(resp)
@property
def settlement(self):
"""Return the settlement for this capture."""
return self.client.settlements.get(self.settlement_id)
|
<commit_before>from .base import Base
class Capture(Base):
@classmethod
def get_resource_class(cls, client):
from ..resources.captures import Captures
return Captures(client)
@property
def id(self):
return self._get_property('id')
@property
def mode(self):
return self._get_property('mode')
@property
def amount(self):
return self._get_property('amount')
@property
def settlement_amount(self):
return self._get_property('settlementAmount')
@property
def payment_id(self):
return self._get_property('paymentId')
@property
def shipment_id(self):
return self._get_property('shipmentId')
@property
def settlement_id(self):
return self._get_property('settlementId')
@property
def created_at(self):
return self._get_property('createdAt')
@property
def payment(self):
"""Return the payment for this capture."""
return self.client.payments.get(self.payment_id)
@property
def shipment(self):
"""Return the shipment for this capture."""
from .shipment import Shipment
url = self._get_link('shipment')
if url:
resp = self.client.orders.perform_api_call(self.client.orders.REST_READ, url)
return Shipment(resp)
@property
def settlement(self):
"""Return the payment for this capture."""
return self.client.settlements.get(self.settlement_id)
<commit_msg>Fix docstring, return the settlement<commit_after>
|
from .base import Base
class Capture(Base):
@classmethod
def get_resource_class(cls, client):
from ..resources.captures import Captures
return Captures(client)
@property
def id(self):
return self._get_property('id')
@property
def mode(self):
return self._get_property('mode')
@property
def amount(self):
return self._get_property('amount')
@property
def settlement_amount(self):
return self._get_property('settlementAmount')
@property
def payment_id(self):
return self._get_property('paymentId')
@property
def shipment_id(self):
return self._get_property('shipmentId')
@property
def settlement_id(self):
return self._get_property('settlementId')
@property
def created_at(self):
return self._get_property('createdAt')
@property
def payment(self):
"""Return the payment for this capture."""
return self.client.payments.get(self.payment_id)
@property
def shipment(self):
"""Return the shipment for this capture."""
from .shipment import Shipment
url = self._get_link('shipment')
if url:
resp = self.client.orders.perform_api_call(self.client.orders.REST_READ, url)
return Shipment(resp)
@property
def settlement(self):
"""Return the settlement for this capture."""
return self.client.settlements.get(self.settlement_id)
|
from .base import Base
class Capture(Base):
@classmethod
def get_resource_class(cls, client):
from ..resources.captures import Captures
return Captures(client)
@property
def id(self):
return self._get_property('id')
@property
def mode(self):
return self._get_property('mode')
@property
def amount(self):
return self._get_property('amount')
@property
def settlement_amount(self):
return self._get_property('settlementAmount')
@property
def payment_id(self):
return self._get_property('paymentId')
@property
def shipment_id(self):
return self._get_property('shipmentId')
@property
def settlement_id(self):
return self._get_property('settlementId')
@property
def created_at(self):
return self._get_property('createdAt')
@property
def payment(self):
"""Return the payment for this capture."""
return self.client.payments.get(self.payment_id)
@property
def shipment(self):
"""Return the shipment for this capture."""
from .shipment import Shipment
url = self._get_link('shipment')
if url:
resp = self.client.orders.perform_api_call(self.client.orders.REST_READ, url)
return Shipment(resp)
@property
def settlement(self):
"""Return the payment for this capture."""
return self.client.settlements.get(self.settlement_id)
Fix docstring, return the settlementfrom .base import Base
class Capture(Base):
@classmethod
def get_resource_class(cls, client):
from ..resources.captures import Captures
return Captures(client)
@property
def id(self):
return self._get_property('id')
@property
def mode(self):
return self._get_property('mode')
@property
def amount(self):
return self._get_property('amount')
@property
def settlement_amount(self):
return self._get_property('settlementAmount')
@property
def payment_id(self):
return self._get_property('paymentId')
@property
def shipment_id(self):
return self._get_property('shipmentId')
@property
def settlement_id(self):
return self._get_property('settlementId')
@property
def created_at(self):
return self._get_property('createdAt')
@property
def payment(self):
"""Return the payment for this capture."""
return self.client.payments.get(self.payment_id)
@property
def shipment(self):
"""Return the shipment for this capture."""
from .shipment import Shipment
url = self._get_link('shipment')
if url:
resp = self.client.orders.perform_api_call(self.client.orders.REST_READ, url)
return Shipment(resp)
@property
def settlement(self):
"""Return the settlement for this capture."""
return self.client.settlements.get(self.settlement_id)
|
<commit_before>from .base import Base
class Capture(Base):
@classmethod
def get_resource_class(cls, client):
from ..resources.captures import Captures
return Captures(client)
@property
def id(self):
return self._get_property('id')
@property
def mode(self):
return self._get_property('mode')
@property
def amount(self):
return self._get_property('amount')
@property
def settlement_amount(self):
return self._get_property('settlementAmount')
@property
def payment_id(self):
return self._get_property('paymentId')
@property
def shipment_id(self):
return self._get_property('shipmentId')
@property
def settlement_id(self):
return self._get_property('settlementId')
@property
def created_at(self):
return self._get_property('createdAt')
@property
def payment(self):
"""Return the payment for this capture."""
return self.client.payments.get(self.payment_id)
@property
def shipment(self):
"""Return the shipment for this capture."""
from .shipment import Shipment
url = self._get_link('shipment')
if url:
resp = self.client.orders.perform_api_call(self.client.orders.REST_READ, url)
return Shipment(resp)
@property
def settlement(self):
"""Return the payment for this capture."""
return self.client.settlements.get(self.settlement_id)
<commit_msg>Fix docstring, return the settlement<commit_after>from .base import Base
class Capture(Base):
@classmethod
def get_resource_class(cls, client):
from ..resources.captures import Captures
return Captures(client)
@property
def id(self):
return self._get_property('id')
@property
def mode(self):
return self._get_property('mode')
@property
def amount(self):
return self._get_property('amount')
@property
def settlement_amount(self):
return self._get_property('settlementAmount')
@property
def payment_id(self):
return self._get_property('paymentId')
@property
def shipment_id(self):
return self._get_property('shipmentId')
@property
def settlement_id(self):
return self._get_property('settlementId')
@property
def created_at(self):
return self._get_property('createdAt')
@property
def payment(self):
"""Return the payment for this capture."""
return self.client.payments.get(self.payment_id)
@property
def shipment(self):
"""Return the shipment for this capture."""
from .shipment import Shipment
url = self._get_link('shipment')
if url:
resp = self.client.orders.perform_api_call(self.client.orders.REST_READ, url)
return Shipment(resp)
@property
def settlement(self):
"""Return the settlement for this capture."""
return self.client.settlements.get(self.settlement_id)
|
baab2698f4dda3190eb62896ccbc7c174dd63113
|
mysite/deployment_settings.py
|
mysite/deployment_settings.py
|
from settings import *
OHLOH_API_KEY='SXvLaGPJFaKXQC0VOocAg'
DEBUG=False
ADMINS=[
('All OH devs', 'devel@lists.openhatch.org'),
]
INVITE_MODE=True # Enabled on production site
INVITATIONS_PER_USER=20
TEMPLTE_DEBUG=False
|
from settings import *
OHLOH_API_KEY='SXvLaGPJFaKXQC0VOocAg'
DEBUG=False
ADMINS=[
('All OH devs', 'devel@lists.openhatch.org'),
]
INVITE_MODE=True # Enabled on production site
INVITATIONS_PER_USER=20
TEMPLTE_DEBUG=False
EMAIL_SUBJECT_PREFIX='[Kaboom@OH] '
SEND_BROKEN_LINK_EMAILS=True
MANAGERS=ADMINS
SERVER_EMAIL='mr_website@linode.openhatch.org'
|
Send broken link emails out; send them from mr_website
|
Send broken link emails out; send them from mr_website
|
Python
|
agpl-3.0
|
eeshangarg/oh-mainline,SnappleCap/oh-mainline,mzdaniel/oh-mainline,Changaco/oh-mainline,jledbetter/openhatch,mzdaniel/oh-mainline,vipul-sharma20/oh-mainline,heeraj123/oh-mainline,willingc/oh-mainline,eeshangarg/oh-mainline,eeshangarg/oh-mainline,openhatch/oh-mainline,Changaco/oh-mainline,campbe13/openhatch,eeshangarg/oh-mainline,Changaco/oh-mainline,eeshangarg/oh-mainline,sudheesh001/oh-mainline,moijes12/oh-mainline,sudheesh001/oh-mainline,ojengwa/oh-mainline,ehashman/oh-mainline,mzdaniel/oh-mainline,mzdaniel/oh-mainline,waseem18/oh-mainline,heeraj123/oh-mainline,SnappleCap/oh-mainline,onceuponatimeforever/oh-mainline,campbe13/openhatch,moijes12/oh-mainline,nirmeshk/oh-mainline,moijes12/oh-mainline,heeraj123/oh-mainline,ehashman/oh-mainline,openhatch/oh-mainline,heeraj123/oh-mainline,vipul-sharma20/oh-mainline,ehashman/oh-mainline,onceuponatimeforever/oh-mainline,vipul-sharma20/oh-mainline,vipul-sharma20/oh-mainline,jledbetter/openhatch,sudheesh001/oh-mainline,onceuponatimeforever/oh-mainline,campbe13/openhatch,Changaco/oh-mainline,ehashman/oh-mainline,moijes12/oh-mainline,SnappleCap/oh-mainline,SnappleCap/oh-mainline,onceuponatimeforever/oh-mainline,sudheesh001/oh-mainline,ojengwa/oh-mainline,mzdaniel/oh-mainline,willingc/oh-mainline,vipul-sharma20/oh-mainline,sudheesh001/oh-mainline,mzdaniel/oh-mainline,waseem18/oh-mainline,campbe13/openhatch,campbe13/openhatch,onceuponatimeforever/oh-mainline,willingc/oh-mainline,nirmeshk/oh-mainline,waseem18/oh-mainline,ojengwa/oh-mainline,heeraj123/oh-mainline,openhatch/oh-mainline,willingc/oh-mainline,willingc/oh-mainline,nirmeshk/oh-mainline,SnappleCap/oh-mainline,jledbetter/openhatch,nirmeshk/oh-mainline,nirmeshk/oh-mainline,jledbetter/openhatch,ojengwa/oh-mainline,waseem18/oh-mainline,waseem18/oh-mainline,Changaco/oh-mainline,openhatch/oh-mainline,moijes12/oh-mainline,ojengwa/oh-mainline,ehashman/oh-mainline,openhatch/oh-mainline,jledbetter/openhatch,mzdaniel/oh-mainline
|
from settings import *
OHLOH_API_KEY='SXvLaGPJFaKXQC0VOocAg'
DEBUG=False
ADMINS=[
('All OH devs', 'devel@lists.openhatch.org'),
]
INVITE_MODE=True # Enabled on production site
INVITATIONS_PER_USER=20
TEMPLTE_DEBUG=False
Send broken link emails out; send them from mr_website
|
from settings import *
OHLOH_API_KEY='SXvLaGPJFaKXQC0VOocAg'
DEBUG=False
ADMINS=[
('All OH devs', 'devel@lists.openhatch.org'),
]
INVITE_MODE=True # Enabled on production site
INVITATIONS_PER_USER=20
TEMPLTE_DEBUG=False
EMAIL_SUBJECT_PREFIX='[Kaboom@OH] '
SEND_BROKEN_LINK_EMAILS=True
MANAGERS=ADMINS
SERVER_EMAIL='mr_website@linode.openhatch.org'
|
<commit_before>from settings import *
OHLOH_API_KEY='SXvLaGPJFaKXQC0VOocAg'
DEBUG=False
ADMINS=[
('All OH devs', 'devel@lists.openhatch.org'),
]
INVITE_MODE=True # Enabled on production site
INVITATIONS_PER_USER=20
TEMPLTE_DEBUG=False
<commit_msg>Send broken link emails out; send them from mr_website<commit_after>
|
from settings import *
OHLOH_API_KEY='SXvLaGPJFaKXQC0VOocAg'
DEBUG=False
ADMINS=[
('All OH devs', 'devel@lists.openhatch.org'),
]
INVITE_MODE=True # Enabled on production site
INVITATIONS_PER_USER=20
TEMPLTE_DEBUG=False
EMAIL_SUBJECT_PREFIX='[Kaboom@OH] '
SEND_BROKEN_LINK_EMAILS=True
MANAGERS=ADMINS
SERVER_EMAIL='mr_website@linode.openhatch.org'
|
from settings import *
OHLOH_API_KEY='SXvLaGPJFaKXQC0VOocAg'
DEBUG=False
ADMINS=[
('All OH devs', 'devel@lists.openhatch.org'),
]
INVITE_MODE=True # Enabled on production site
INVITATIONS_PER_USER=20
TEMPLTE_DEBUG=False
Send broken link emails out; send them from mr_websitefrom settings import *
OHLOH_API_KEY='SXvLaGPJFaKXQC0VOocAg'
DEBUG=False
ADMINS=[
('All OH devs', 'devel@lists.openhatch.org'),
]
INVITE_MODE=True # Enabled on production site
INVITATIONS_PER_USER=20
TEMPLTE_DEBUG=False
EMAIL_SUBJECT_PREFIX='[Kaboom@OH] '
SEND_BROKEN_LINK_EMAILS=True
MANAGERS=ADMINS
SERVER_EMAIL='mr_website@linode.openhatch.org'
|
<commit_before>from settings import *
OHLOH_API_KEY='SXvLaGPJFaKXQC0VOocAg'
DEBUG=False
ADMINS=[
('All OH devs', 'devel@lists.openhatch.org'),
]
INVITE_MODE=True # Enabled on production site
INVITATIONS_PER_USER=20
TEMPLTE_DEBUG=False
<commit_msg>Send broken link emails out; send them from mr_website<commit_after>from settings import *
OHLOH_API_KEY='SXvLaGPJFaKXQC0VOocAg'
DEBUG=False
ADMINS=[
('All OH devs', 'devel@lists.openhatch.org'),
]
INVITE_MODE=True # Enabled on production site
INVITATIONS_PER_USER=20
TEMPLTE_DEBUG=False
EMAIL_SUBJECT_PREFIX='[Kaboom@OH] '
SEND_BROKEN_LINK_EMAILS=True
MANAGERS=ADMINS
SERVER_EMAIL='mr_website@linode.openhatch.org'
|
52731e9eb254b77b54f1434b44d73ecd8f9f437d
|
src/parser/banner.py
|
src/parser/banner.py
|
"""(c) All rights reserved. ECOLE POLYTECHNIQUE FEDERALE DE LAUSANNE, Switzerland, VPSI, 2018"""
from bs4 import BeautifulSoup
import re
class Banner:
""" To store website banner information. """
# FIXME: extend class with more information if html content is not enough to handle banner
def __init__(self, content):
""" Constructor
content - HTML content of the banner """
# If there are image in banner, they may have following code aspect, so cleaning is necessary :
# ###file:/content/sites/skyrmions/files/Image-1.jpg?uuid=default:d1c1c1d4-7d23-45d7-b6fc-c10df12ef91e
soup = BeautifulSoup(content, 'html.parser')
images = soup.find_all('img')
for image in images:
# Cleaning image source
# FIXME: Maybe there's a better way to remove the /content/sites/<sitename> from URL...
image['src'] = re.sub(r"###file:/content/sites/[a-zA-Z0-9-\.]+|\?.+", "", image.get('src'))
self.content = str(soup)
|
"""(c) All rights reserved. ECOLE POLYTECHNIQUE FEDERALE DE LAUSANNE, Switzerland, VPSI, 2018"""
class Banner:
""" To store website banner information. """
# FIXME: extend class with more information if html content is not enough to handle banner
def __init__(self, content):
""" Constructor
content - HTML content of the banner """
self.content = content
|
Remove images URL cleaning because will be done later during parsing
|
Remove images URL cleaning because will be done later during parsing
|
Python
|
mit
|
epfl-idevelop/jahia2wp,epfl-idevelop/jahia2wp,epfl-idevelop/jahia2wp,epfl-idevelop/jahia2wp
|
"""(c) All rights reserved. ECOLE POLYTECHNIQUE FEDERALE DE LAUSANNE, Switzerland, VPSI, 2018"""
from bs4 import BeautifulSoup
import re
class Banner:
""" To store website banner information. """
# FIXME: extend class with more information if html content is not enough to handle banner
def __init__(self, content):
""" Constructor
content - HTML content of the banner """
# If there are image in banner, they may have following code aspect, so cleaning is necessary :
# ###file:/content/sites/skyrmions/files/Image-1.jpg?uuid=default:d1c1c1d4-7d23-45d7-b6fc-c10df12ef91e
soup = BeautifulSoup(content, 'html.parser')
images = soup.find_all('img')
for image in images:
# Cleaning image source
# FIXME: Maybe there's a better way to remove the /content/sites/<sitename> from URL...
image['src'] = re.sub(r"###file:/content/sites/[a-zA-Z0-9-\.]+|\?.+", "", image.get('src'))
self.content = str(soup)
Remove images URL cleaning because will be done later during parsing
|
"""(c) All rights reserved. ECOLE POLYTECHNIQUE FEDERALE DE LAUSANNE, Switzerland, VPSI, 2018"""
class Banner:
""" To store website banner information. """
# FIXME: extend class with more information if html content is not enough to handle banner
def __init__(self, content):
""" Constructor
content - HTML content of the banner """
self.content = content
|
<commit_before>"""(c) All rights reserved. ECOLE POLYTECHNIQUE FEDERALE DE LAUSANNE, Switzerland, VPSI, 2018"""
from bs4 import BeautifulSoup
import re
class Banner:
""" To store website banner information. """
# FIXME: extend class with more information if html content is not enough to handle banner
def __init__(self, content):
""" Constructor
content - HTML content of the banner """
# If there are image in banner, they may have following code aspect, so cleaning is necessary :
# ###file:/content/sites/skyrmions/files/Image-1.jpg?uuid=default:d1c1c1d4-7d23-45d7-b6fc-c10df12ef91e
soup = BeautifulSoup(content, 'html.parser')
images = soup.find_all('img')
for image in images:
# Cleaning image source
# FIXME: Maybe there's a better way to remove the /content/sites/<sitename> from URL...
image['src'] = re.sub(r"###file:/content/sites/[a-zA-Z0-9-\.]+|\?.+", "", image.get('src'))
self.content = str(soup)
<commit_msg>Remove images URL cleaning because will be done later during parsing<commit_after>
|
"""(c) All rights reserved. ECOLE POLYTECHNIQUE FEDERALE DE LAUSANNE, Switzerland, VPSI, 2018"""
class Banner:
""" To store website banner information. """
# FIXME: extend class with more information if html content is not enough to handle banner
def __init__(self, content):
""" Constructor
content - HTML content of the banner """
self.content = content
|
"""(c) All rights reserved. ECOLE POLYTECHNIQUE FEDERALE DE LAUSANNE, Switzerland, VPSI, 2018"""
from bs4 import BeautifulSoup
import re
class Banner:
""" To store website banner information. """
# FIXME: extend class with more information if html content is not enough to handle banner
def __init__(self, content):
""" Constructor
content - HTML content of the banner """
# If there are image in banner, they may have following code aspect, so cleaning is necessary :
# ###file:/content/sites/skyrmions/files/Image-1.jpg?uuid=default:d1c1c1d4-7d23-45d7-b6fc-c10df12ef91e
soup = BeautifulSoup(content, 'html.parser')
images = soup.find_all('img')
for image in images:
# Cleaning image source
# FIXME: Maybe there's a better way to remove the /content/sites/<sitename> from URL...
image['src'] = re.sub(r"###file:/content/sites/[a-zA-Z0-9-\.]+|\?.+", "", image.get('src'))
self.content = str(soup)
Remove images URL cleaning because will be done later during parsing"""(c) All rights reserved. ECOLE POLYTECHNIQUE FEDERALE DE LAUSANNE, Switzerland, VPSI, 2018"""
class Banner:
""" To store website banner information. """
# FIXME: extend class with more information if html content is not enough to handle banner
def __init__(self, content):
""" Constructor
content - HTML content of the banner """
self.content = content
|
<commit_before>"""(c) All rights reserved. ECOLE POLYTECHNIQUE FEDERALE DE LAUSANNE, Switzerland, VPSI, 2018"""
from bs4 import BeautifulSoup
import re
class Banner:
""" To store website banner information. """
# FIXME: extend class with more information if html content is not enough to handle banner
def __init__(self, content):
""" Constructor
content - HTML content of the banner """
# If there are image in banner, they may have following code aspect, so cleaning is necessary :
# ###file:/content/sites/skyrmions/files/Image-1.jpg?uuid=default:d1c1c1d4-7d23-45d7-b6fc-c10df12ef91e
soup = BeautifulSoup(content, 'html.parser')
images = soup.find_all('img')
for image in images:
# Cleaning image source
# FIXME: Maybe there's a better way to remove the /content/sites/<sitename> from URL...
image['src'] = re.sub(r"###file:/content/sites/[a-zA-Z0-9-\.]+|\?.+", "", image.get('src'))
self.content = str(soup)
<commit_msg>Remove images URL cleaning because will be done later during parsing<commit_after>"""(c) All rights reserved. ECOLE POLYTECHNIQUE FEDERALE DE LAUSANNE, Switzerland, VPSI, 2018"""
class Banner:
""" To store website banner information. """
# FIXME: extend class with more information if html content is not enough to handle banner
def __init__(self, content):
""" Constructor
content - HTML content of the banner """
self.content = content
|
515e3b4da9d8c793c57e8cb8deeda93e42aa3871
|
nereid/ctx.py
|
nereid/ctx.py
|
#This file is part of Tryton & Nereid. The COPYRIGHT file at the top level of
#this repository contains the full copyright notices and license terms.
from flask.ctx import RequestContext as RequestContextBase
from flask.ctx import has_request_context # noqa
class RequestContext(RequestContextBase):
"""
The request context contains all request relevant information. It is
created at the beginning of the request and pushed to the
`_request_ctx_stack` and removed at the end of it. It will create the
URL adapter and request object for the WSGI environment provided.
"""
def __init__(self, app, environ):
super(RequestContext, self).__init__(app, environ)
self.transaction = None
self.cache = app.cache
|
#This file is part of Tryton & Nereid. The COPYRIGHT file at the top level of
#this repository contains the full copyright notices and license terms.
from flask.ctx import RequestContext as RequestContextBase
from flask.ctx import has_request_context # noqa
class RequestContext(RequestContextBase):
"""
The request context contains all request relevant information. It is
created at the beginning of the request and pushed to the
`_request_ctx_stack` and removed at the end of it. It will create the
URL adapter and request object for the WSGI environment provided.
"""
def __init__(self, app, environ, request=None):
super(RequestContext, self).__init__(app, environ, request)
self.transaction = None
self.cache = app.cache
|
Add request argument for RequestContext
|
Add request argument for RequestContext
See: cb2055bbcb345e367b6bdfe177a407546286695c@097353695e3178a38403b204ae4889c8a32bf997
|
Python
|
bsd-3-clause
|
riteshshrv/nereid,fulfilio/nereid,riteshshrv/nereid,fulfilio/nereid,usudaysingh/nereid,prakashpp/nereid,usudaysingh/nereid,prakashpp/nereid
|
#This file is part of Tryton & Nereid. The COPYRIGHT file at the top level of
#this repository contains the full copyright notices and license terms.
from flask.ctx import RequestContext as RequestContextBase
from flask.ctx import has_request_context # noqa
class RequestContext(RequestContextBase):
"""
The request context contains all request relevant information. It is
created at the beginning of the request and pushed to the
`_request_ctx_stack` and removed at the end of it. It will create the
URL adapter and request object for the WSGI environment provided.
"""
def __init__(self, app, environ):
super(RequestContext, self).__init__(app, environ)
self.transaction = None
self.cache = app.cache
Add request argument for RequestContext
See: cb2055bbcb345e367b6bdfe177a407546286695c@097353695e3178a38403b204ae4889c8a32bf997
|
#This file is part of Tryton & Nereid. The COPYRIGHT file at the top level of
#this repository contains the full copyright notices and license terms.
from flask.ctx import RequestContext as RequestContextBase
from flask.ctx import has_request_context # noqa
class RequestContext(RequestContextBase):
"""
The request context contains all request relevant information. It is
created at the beginning of the request and pushed to the
`_request_ctx_stack` and removed at the end of it. It will create the
URL adapter and request object for the WSGI environment provided.
"""
def __init__(self, app, environ, request=None):
super(RequestContext, self).__init__(app, environ, request)
self.transaction = None
self.cache = app.cache
|
<commit_before>#This file is part of Tryton & Nereid. The COPYRIGHT file at the top level of
#this repository contains the full copyright notices and license terms.
from flask.ctx import RequestContext as RequestContextBase
from flask.ctx import has_request_context # noqa
class RequestContext(RequestContextBase):
"""
The request context contains all request relevant information. It is
created at the beginning of the request and pushed to the
`_request_ctx_stack` and removed at the end of it. It will create the
URL adapter and request object for the WSGI environment provided.
"""
def __init__(self, app, environ):
super(RequestContext, self).__init__(app, environ)
self.transaction = None
self.cache = app.cache
<commit_msg>Add request argument for RequestContext
See: cb2055bbcb345e367b6bdfe177a407546286695c@097353695e3178a38403b204ae4889c8a32bf997<commit_after>
|
#This file is part of Tryton & Nereid. The COPYRIGHT file at the top level of
#this repository contains the full copyright notices and license terms.
from flask.ctx import RequestContext as RequestContextBase
from flask.ctx import has_request_context # noqa
class RequestContext(RequestContextBase):
"""
The request context contains all request relevant information. It is
created at the beginning of the request and pushed to the
`_request_ctx_stack` and removed at the end of it. It will create the
URL adapter and request object for the WSGI environment provided.
"""
def __init__(self, app, environ, request=None):
super(RequestContext, self).__init__(app, environ, request)
self.transaction = None
self.cache = app.cache
|
#This file is part of Tryton & Nereid. The COPYRIGHT file at the top level of
#this repository contains the full copyright notices and license terms.
from flask.ctx import RequestContext as RequestContextBase
from flask.ctx import has_request_context # noqa
class RequestContext(RequestContextBase):
"""
The request context contains all request relevant information. It is
created at the beginning of the request and pushed to the
`_request_ctx_stack` and removed at the end of it. It will create the
URL adapter and request object for the WSGI environment provided.
"""
def __init__(self, app, environ):
super(RequestContext, self).__init__(app, environ)
self.transaction = None
self.cache = app.cache
Add request argument for RequestContext
See: cb2055bbcb345e367b6bdfe177a407546286695c@097353695e3178a38403b204ae4889c8a32bf997#This file is part of Tryton & Nereid. The COPYRIGHT file at the top level of
#this repository contains the full copyright notices and license terms.
from flask.ctx import RequestContext as RequestContextBase
from flask.ctx import has_request_context # noqa
class RequestContext(RequestContextBase):
"""
The request context contains all request relevant information. It is
created at the beginning of the request and pushed to the
`_request_ctx_stack` and removed at the end of it. It will create the
URL adapter and request object for the WSGI environment provided.
"""
def __init__(self, app, environ, request=None):
super(RequestContext, self).__init__(app, environ, request)
self.transaction = None
self.cache = app.cache
|
<commit_before>#This file is part of Tryton & Nereid. The COPYRIGHT file at the top level of
#this repository contains the full copyright notices and license terms.
from flask.ctx import RequestContext as RequestContextBase
from flask.ctx import has_request_context # noqa
class RequestContext(RequestContextBase):
"""
The request context contains all request relevant information. It is
created at the beginning of the request and pushed to the
`_request_ctx_stack` and removed at the end of it. It will create the
URL adapter and request object for the WSGI environment provided.
"""
def __init__(self, app, environ):
super(RequestContext, self).__init__(app, environ)
self.transaction = None
self.cache = app.cache
<commit_msg>Add request argument for RequestContext
See: cb2055bbcb345e367b6bdfe177a407546286695c@097353695e3178a38403b204ae4889c8a32bf997<commit_after>#This file is part of Tryton & Nereid. The COPYRIGHT file at the top level of
#this repository contains the full copyright notices and license terms.
from flask.ctx import RequestContext as RequestContextBase
from flask.ctx import has_request_context # noqa
class RequestContext(RequestContextBase):
"""
The request context contains all request relevant information. It is
created at the beginning of the request and pushed to the
`_request_ctx_stack` and removed at the end of it. It will create the
URL adapter and request object for the WSGI environment provided.
"""
def __init__(self, app, environ, request=None):
super(RequestContext, self).__init__(app, environ, request)
self.transaction = None
self.cache = app.cache
|
d5bca737d19f7bfd34fd37d00f1210f8bc777c76
|
crmapp/accounts/views.py
|
crmapp/accounts/views.py
|
from django.shortcuts import render
# Create your views here.
|
from django.views.generic import ListView
from django.contrib.auth.decorators import login_required
from django.utils.decorators import method_decorator
from .models import Account
class AccountList(ListView):
model = Account
template_name = 'accounts/account_list.html'
context_object_name = 'accounts'
def get_queryset(self):
account_list = Account.objects.filter(owner=self.request.user)
return account_list
@method_decorator(login_required)
def dispatch(self, *args, **kwargs):
return super(AccountList, self).dispatch(*args, **kwargs)
|
Create the Account List > List Accounts - Create View
|
Create the Account List > List Accounts - Create View
|
Python
|
mit
|
tabdon/crmeasyapp,tabdon/crmeasyapp,deenaariff/Django
|
from django.shortcuts import render
# Create your views here.
Create the Account List > List Accounts - Create View
|
from django.views.generic import ListView
from django.contrib.auth.decorators import login_required
from django.utils.decorators import method_decorator
from .models import Account
class AccountList(ListView):
model = Account
template_name = 'accounts/account_list.html'
context_object_name = 'accounts'
def get_queryset(self):
account_list = Account.objects.filter(owner=self.request.user)
return account_list
@method_decorator(login_required)
def dispatch(self, *args, **kwargs):
return super(AccountList, self).dispatch(*args, **kwargs)
|
<commit_before>from django.shortcuts import render
# Create your views here.
<commit_msg>Create the Account List > List Accounts - Create View<commit_after>
|
from django.views.generic import ListView
from django.contrib.auth.decorators import login_required
from django.utils.decorators import method_decorator
from .models import Account
class AccountList(ListView):
model = Account
template_name = 'accounts/account_list.html'
context_object_name = 'accounts'
def get_queryset(self):
account_list = Account.objects.filter(owner=self.request.user)
return account_list
@method_decorator(login_required)
def dispatch(self, *args, **kwargs):
return super(AccountList, self).dispatch(*args, **kwargs)
|
from django.shortcuts import render
# Create your views here.
Create the Account List > List Accounts - Create Viewfrom django.views.generic import ListView
from django.contrib.auth.decorators import login_required
from django.utils.decorators import method_decorator
from .models import Account
class AccountList(ListView):
model = Account
template_name = 'accounts/account_list.html'
context_object_name = 'accounts'
def get_queryset(self):
account_list = Account.objects.filter(owner=self.request.user)
return account_list
@method_decorator(login_required)
def dispatch(self, *args, **kwargs):
return super(AccountList, self).dispatch(*args, **kwargs)
|
<commit_before>from django.shortcuts import render
# Create your views here.
<commit_msg>Create the Account List > List Accounts - Create View<commit_after>from django.views.generic import ListView
from django.contrib.auth.decorators import login_required
from django.utils.decorators import method_decorator
from .models import Account
class AccountList(ListView):
model = Account
template_name = 'accounts/account_list.html'
context_object_name = 'accounts'
def get_queryset(self):
account_list = Account.objects.filter(owner=self.request.user)
return account_list
@method_decorator(login_required)
def dispatch(self, *args, **kwargs):
return super(AccountList, self).dispatch(*args, **kwargs)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.