commit
stringlengths 40
40
| old_file
stringlengths 4
150
| new_file
stringlengths 4
150
| old_contents
stringlengths 0
3.26k
| new_contents
stringlengths 1
4.43k
| subject
stringlengths 15
501
| message
stringlengths 15
4.06k
| lang
stringclasses 4
values | license
stringclasses 13
values | repos
stringlengths 5
91.5k
| diff
stringlengths 0
4.35k
|
|---|---|---|---|---|---|---|---|---|---|---|
43338ffa4d9703585c78f6613ed1bd047c076748
|
setup.py
|
setup.py
|
from codecs import open as codecs_open
from setuptools import setup, find_packages
# Get the long description from the relevant file
with codecs_open('README.rst', encoding='utf-8') as f:
long_description = f.read()
setup(name='tile-stitcher',
version='0.0.1',
description=u"Stitch image tiles into composite TIFs",
long_description=long_description,
classifiers=[],
keywords='',
author=u"Damon Burgett",
author_email='damon@mapbox.com',
url='https://github.com/mapbox/tile-stitcher',
license='MIT',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
include_package_data=True,
zip_safe=False,
install_requires=[
'click'
],
extras_require={
'test': ['pytest'],
},
entry_points="""
[console_scripts]
tile-stitch=tile_stitcher.scripts.cli:cli
"""
)
|
from codecs import open as codecs_open
from setuptools import setup, find_packages
# Get the long description from the relevant file
with codecs_open('README.rst', encoding='utf-8') as f:
long_description = f.read()
setup(name='tile-stitcher',
version='0.0.1',
description=u"Stitch image tiles into composite TIFs",
long_description=long_description,
classifiers=[],
keywords='',
author=u"Damon Burgett",
author_email='damon@mapbox.com',
url='https://github.com/mapbox/tile-stitcher',
license='MIT',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
include_package_data=True,
zip_safe=False,
install_requires=[
'click'
],
extras_require={
'test': ['pytest', 'pytest-cov'],
},
entry_points="""
[console_scripts]
tile-stitch=tile_stitcher.scripts.cli:cli
"""
)
|
Add pytest-cov to test extras
|
Add pytest-cov to test extras
|
Python
|
mit
|
mapbox/untiler
|
---
+++
@@ -24,7 +24,7 @@
'click'
],
extras_require={
- 'test': ['pytest'],
+ 'test': ['pytest', 'pytest-cov'],
},
entry_points="""
[console_scripts]
|
f97ea075866cf67e873c072613e058be160d5340
|
setup.py
|
setup.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from setuptools import setup
setup(
name='django-scheduler',
version='0.7.1',
description='A calendaring app for Django.',
author='Leonardo Lazzaro',
author_email='lazzaroleonardo@gmail.com',
url='https://github.com/llazzaro/django-scheduler',
packages=[
'schedule',
'schedule.conf',
'schedule.feeds',
'schedule.management',
'schedule.management.commands',
'schedule.models',
'schedule.migrations',
'schedule.templatetags',
'schedule.tests',
],
include_package_data=True,
zip_safe=False,
classifiers=['Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Utilities'],
install_requires=[
'Django>=1.5',
'argparse==1.1',
'python-dateutil>=2.1',
'pytz>=2013.9',
'six>=1.3.0',
'vobject>=0.8.1c',
'South==0.8.4',
'django-annoying==0.7.9',
'coverage==3.6',
],
license='BSD',
test_suite="schedule.tests",
)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from setuptools import setup
setup(
name='django-scheduler',
version='0.7.2',
description='A calendaring app for Django.',
author='Leonardo Lazzaro',
author_email='lazzaroleonardo@gmail.com',
url='https://github.com/llazzaro/django-scheduler',
packages=[
'schedule',
'schedule.conf',
'schedule.feeds',
'schedule.management',
'schedule.management.commands',
'schedule.models',
'schedule.migrations',
'schedule.templatetags',
'schedule.tests',
],
include_package_data=True,
zip_safe=False,
classifiers=['Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Utilities'],
install_requires=[
'Django>=1.5',
'argparse==1.1',
'python-dateutil>=2.1',
'pytz>=2013.9',
'six>=1.3.0',
'vobject>=0.8.1c',
'South==0.8.4',
'django-annoying==0.7.9',
'coverage==3.6',
],
license='BSD',
test_suite="schedule.tests",
)
|
Update scheduler version for fixing a broken pypi build
|
Update scheduler version for fixing a broken pypi build
|
Python
|
bsd-3-clause
|
GrahamDigital/django-scheduler,GrahamDigital/django-scheduler,rowbot-dev/django-scheduler,sprightco/django-scheduler,nharsch/django-scheduler,nwaxiomatic/django-scheduler,drodger/django-scheduler,drodger/django-scheduler,jrutila/django-scheduler,nharsch/django-scheduler,GrahamDigital/django-scheduler,jrutila/django-scheduler,llazzaro/django-scheduler,sprightco/django-scheduler,mbrondani/django-scheduler,llazzaro/django-scheduler,drodger/django-scheduler,Gustavosdo/django-scheduler,sprightco/django-scheduler,llazzaro/django-scheduler,erezlife/django-scheduler,Gustavosdo/django-scheduler,nwaxiomatic/django-scheduler,mbrondani/django-scheduler,nwaxiomatic/django-scheduler,erezlife/django-scheduler,rowbot-dev/django-scheduler
|
---
+++
@@ -5,7 +5,7 @@
setup(
name='django-scheduler',
- version='0.7.1',
+ version='0.7.2',
description='A calendaring app for Django.',
author='Leonardo Lazzaro',
author_email='lazzaroleonardo@gmail.com',
|
21febe61699787a0a47bd0ee2ba5e58727d75c17
|
setup.py
|
setup.py
|
"""Mailmerge build and install configuration."""
from pathlib import Path
import setuptools
# Read the contents of README file
PROJECT_DIR = Path(__file__).parent
README = PROJECT_DIR/"README.md"
LONG_DESCRIPTION = README.open().read()
setuptools.setup(
name="mailmerge",
description="A simple, command line mail merge tool",
long_description=LONG_DESCRIPTION,
long_description_content_type="text/markdown",
version="2.2.0",
author="Andrew DeOrio",
author_email="awdeorio@umich.edu",
url="https://github.com/awdeorio/mailmerge/",
license="MIT",
packages=["mailmerge"],
keywords=["mail merge", "mailmerge", "email"],
install_requires=[
"click",
"jinja2",
"markdown",
"html5"
],
extras_require={
"dev": [
"pdbpp",
"twine",
"tox",
],
"test": [
"check-manifest",
"codecov>=1.4.0",
"freezegun",
"pycodestyle",
"pydocstyle",
"pylint",
"pytest",
"pytest-cov",
"pytest-mock",
"sh",
],
},
python_requires='>=3.6',
entry_points={
"console_scripts": [
"mailmerge = mailmerge.__main__:main",
]
},
)
|
"""Mailmerge build and install configuration."""
from pathlib import Path
import setuptools
# Read the contents of README file
PROJECT_DIR = Path(__file__).parent
README = PROJECT_DIR/"README.md"
LONG_DESCRIPTION = README.open().read()
setuptools.setup(
name="mailmerge",
description="A simple, command line mail merge tool",
long_description=LONG_DESCRIPTION,
long_description_content_type="text/markdown",
version="2.2.0",
author="Andrew DeOrio",
author_email="awdeorio@umich.edu",
url="https://github.com/awdeorio/mailmerge/",
license="MIT",
packages=["mailmerge"],
keywords=["mail merge", "mailmerge", "email"],
install_requires=[
"click",
"jinja2",
"markdown",
"html5"
],
extras_require={
"dev": [
"pdbpp",
"twine",
"tox",
],
"test": [
"check-manifest",
"freezegun",
"pycodestyle",
"pydocstyle",
"pylint",
"pytest",
"pytest-cov",
"pytest-mock",
"sh",
],
},
python_requires='>=3.6',
entry_points={
"console_scripts": [
"mailmerge = mailmerge.__main__:main",
]
},
)
|
Remove codecov dependency because it's now a github action
|
Remove codecov dependency because it's now a github action
|
Python
|
mit
|
awdeorio/mailmerge
|
---
+++
@@ -35,7 +35,6 @@
],
"test": [
"check-manifest",
- "codecov>=1.4.0",
"freezegun",
"pycodestyle",
"pydocstyle",
|
2150f71c723d85d7ba2c5756f90c0174b3dc6666
|
setup.py
|
setup.py
|
#!/usr/bin/env python
from setuptools import setup, find_packages
setup(
name='Flask-RESTful',
version='0.2.5',
url='https://www.github.com/twilio/flask-restful/',
author='Kyle Conroy',
author_email='help@twilio.com',
description='Simple framework for creating REST APIs',
packages=find_packages(),
zip_safe=False,
include_package_data=True,
platforms='any',
test_suite = 'nose.collector',
#setup_requires=[
# 'nose==1.3.1',
# 'mock==1.0.1',
# 'six==1.5.2',
# 'blinker==1.3',
#],
install_requires=[
'Flask==0.12.3',
],
# Install these with "pip install -e '.[paging]'" or '.[docs]'
extras_require={
'paging': 'pycrypto>=2.6',
'docs': 'sphinx',
}
)
|
#!/usr/bin/env python
from setuptools import setup, find_packages
setup(
name='Flask-RESTful',
version='0.2.5',
url='https://www.github.com/twilio/flask-restful/',
author='Kyle Conroy',
author_email='help@twilio.com',
description='Simple framework for creating REST APIs',
packages=find_packages(),
zip_safe=False,
include_package_data=True,
platforms='any',
test_suite = 'nose.collector',
#setup_requires=[
# 'nose==1.3.1',
# 'mock==1.0.1',
# 'six==1.5.2',
# 'blinker==1.3',
#],
install_requires=[
'Flask==1.0',
],
# Install these with "pip install -e '.[paging]'" or '.[docs]'
extras_require={
'paging': 'pycrypto>=2.6',
'docs': 'sphinx',
}
)
|
Fix for chaos security warning CVE-2019-1010083
|
Fix for chaos security warning CVE-2019-1010083
|
Python
|
bsd-3-clause
|
CanalTP/flask-restful
|
---
+++
@@ -21,7 +21,7 @@
# 'blinker==1.3',
#],
install_requires=[
- 'Flask==0.12.3',
+ 'Flask==1.0',
],
# Install these with "pip install -e '.[paging]'" or '.[docs]'
extras_require={
|
40f40d442964d4eb5a03bbd7580959ecfc530f18
|
setup.py
|
setup.py
|
import setuptools
from distutils.core import setup
setup(
name='pydeps',
version='0.9.2',
packages=['pydeps'],
install_requires=[
'enum34'
],
entry_points={
'console_scripts': [
'py2dep = pydeps.py2depgraph:py2depgraph',
'dep2dot = pydeps.depgraph2dot:depgraph2dot',
'pydeps = pydeps.pydeps:pydeps',
]
},
url='https://github.com/thebjorn/pydeps',
license='BSD',
author='bjorn',
author_email='bp@datakortet.no',
description='Display module dependencies'
)
|
import setuptools
from setuptools import setup
#from distutils.core import setup
setup(
name='pydeps',
version='0.9.2',
packages=['pydeps'],
install_requires=[
'enum34'
],
entry_points={
'console_scripts': [
#'py2dep = pydeps.py2depgraph:py2depgraph',
#'dep2dot = pydeps.depgraph2dot:depgraph2dot',
'pydeps = pydeps.pydeps:pydeps',
]
},
url='https://github.com/thebjorn/pydeps',
license='BSD',
author='bjorn',
author_email='bp@datakortet.no',
description='Display module dependencies'
)
|
Drop distutils and alternate entry points.
|
Drop distutils and alternate entry points.
|
Python
|
bsd-2-clause
|
thebjorn/pydeps,thebjorn/pydeps
|
---
+++
@@ -1,6 +1,7 @@
import setuptools
-from distutils.core import setup
+from setuptools import setup
+#from distutils.core import setup
setup(
name='pydeps',
@@ -11,8 +12,8 @@
],
entry_points={
'console_scripts': [
- 'py2dep = pydeps.py2depgraph:py2depgraph',
- 'dep2dot = pydeps.depgraph2dot:depgraph2dot',
+ #'py2dep = pydeps.py2depgraph:py2depgraph',
+ #'dep2dot = pydeps.depgraph2dot:depgraph2dot',
'pydeps = pydeps.pydeps:pydeps',
]
},
|
9da7843cc9add30af30186aa83ebd157cfb5a1c0
|
setup.py
|
setup.py
|
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
from ansible_vault_diff import __version__ as version
config = {
'description': 'Tool used to display a diff of two ansible-vault encrypted repos',
'author': 'Brandon Myers',
'url': 'https://github.com/pwnbus/ansible-vault-diff',
'download_url': 'https://github.com/pwnbus/ansible-vault-diff/archive/master.zip',
'author_email': 'pwnbus@mozilla.com',
'version': version,
'install_requires': [
'six',
],
'packages': ['ansible_vault_diff', 'bin'],
'scripts': [],
'name': 'ansible_vault_diff'
}
setup(**config)
|
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
from ansible_vault_diff import __version__ as version
config = {
'description': 'Tool used to display a diff of two ansible-vault encrypted repos',
'author': 'Brandon Myers',
'url': 'https://github.com/pwnbus/ansible-vault-diff',
'download_url': 'https://github.com/pwnbus/ansible-vault-diff/archive/master.zip',
'author_email': 'pwnbus@mozilla.com',
'version': version,
'install_requires': [
'six',
'ansible'
],
'packages': ['ansible_vault_diff', 'bin'],
'scripts': [],
'name': 'ansible_vault_diff'
}
setup(**config)
|
Add ansible as a dependency
|
Add ansible as a dependency
|
Python
|
mit
|
pwnbus/ansible-vault-diff
|
---
+++
@@ -16,6 +16,7 @@
'version': version,
'install_requires': [
'six',
+ 'ansible'
],
'packages': ['ansible_vault_diff', 'bin'],
'scripts': [],
|
8fa28f4d78c4c2ffb23c8088a4983a77a41cd805
|
setup.py
|
setup.py
|
from setuptools import setup, find_packages
setup(name='pyscores',
version='0.2',
description='Football (soccer) scores in your command line',
url='https://github.com/conormag94/pyscores',
author='Conor Maguire',
author_email='conormag94@gmail.com',
license='MIT',
packages=find_packages(),
entry_points={
'console_scripts': [
'scores=pyscores.__main__:main'
]
},
install_requires=[
'click==5.1',
'requests==2.8.1',
'tabulate==0.7.5',
'termcolor==1.1.0'
])
|
from setuptools import setup, find_packages
setup(name='pyscores',
version='0.2',
description='Football (soccer) scores in your command line',
url='https://github.com/conormag94/pyscores',
author='Conor Maguire',
author_email='conormag94@gmail.com',
license='MIT',
packages=find_packages(),
entry_points={
'console_scripts': [
'scores=pyscores.cli:main'
]
},
install_requires=[
'click==5.1',
'requests==2.8.1',
'tabulate==0.7.5',
'termcolor==1.1.0'
])
|
Rename console entry point from __main__ to cli
|
Rename console entry point from __main__ to cli
|
Python
|
mit
|
conormag94/pyscores
|
---
+++
@@ -10,7 +10,7 @@
packages=find_packages(),
entry_points={
'console_scripts': [
- 'scores=pyscores.__main__:main'
+ 'scores=pyscores.cli:main'
]
},
install_requires=[
|
62288de60f1407fa03dfa3e4605a8383b580f881
|
setup.py
|
setup.py
|
from setuptools import setup
setup(
name='tangled.mako',
version='1.0a4.dev0',
description='Tangled Mako integration',
long_description=open('README.rst').read(),
url='http://tangledframework.org/',
download_url='https://github.com/TangledWeb/tangled.mako/tags',
author='Wyatt Baldwin',
author_email='self@wyattbaldwin.com',
include_package_data=True,
packages=[
'tangled',
'tangled.mako',
'tangled.mako.tests',
],
install_requires=[
'tangled.web>=0.1a10',
'Mako>=1.0',
],
extras_require={
'dev': [
'tangled.web[dev]>=0.1a10',
'tangled[dev]>=1.0a11',
],
},
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
)
|
from setuptools import setup
setup(
name='tangled.mako',
version='1.0a4.dev0',
description='Tangled Mako integration',
long_description=open('README.rst').read(),
url='http://tangledframework.org/',
download_url='https://github.com/TangledWeb/tangled.mako/tags',
author='Wyatt Baldwin',
author_email='self@wyattbaldwin.com',
include_package_data=True,
packages=[
'tangled',
'tangled.mako',
'tangled.mako.tests',
],
install_requires=[
'tangled.web>=1.0a12',
'Mako>=1.0',
],
extras_require={
'dev': [
'tangled[dev]>=1.0a11',
'tangled.web[dev]>=1.0a12',
],
},
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
)
|
Upgrade tangled.web 0.1a10 => 1.0a12
|
Upgrade tangled.web 0.1a10 => 1.0a12
|
Python
|
mit
|
TangledWeb/tangled.mako
|
---
+++
@@ -17,13 +17,13 @@
'tangled.mako.tests',
],
install_requires=[
- 'tangled.web>=0.1a10',
+ 'tangled.web>=1.0a12',
'Mako>=1.0',
],
extras_require={
'dev': [
- 'tangled.web[dev]>=0.1a10',
'tangled[dev]>=1.0a11',
+ 'tangled.web[dev]>=1.0a12',
],
},
classifiers=[
|
a50bc1908661daf0014ab7068488abbe1245d368
|
setup.py
|
setup.py
|
#!/usr/bin/env python
import os
from distutils.core import setup
from setuptools import find_packages
from pip.req import parse_requirements
os.chdir('OIPA')
install_requirements = parse_requirements('requirements.txt', session=False)
requirements = [str(ir.req) for ir in install_requirements]
setup(name='OIPA',
version='2.1',
description='',
author='Zimmerman & Zimmerman',
url="OIPA is an open-source framework that renders IATI compliant XML and \
related indicator #opendata into the OIPA datamodel for storage. \
This ETL approach provides I/O using the OIPA Tastypie RESTless API (soon DRF!) \
providing you with direct XML or JSON output. Does Django and MySQL. \
Codebase maintained by Zimmerman & Zimmerman in Amsterdam. http://www.oipa.nl/",
packages=find_packages(),
install_requires=requirements,
)
|
#!/usr/bin/env python
from setuptools import setup, find_packages
from pip.req import parse_requirements
install_requirements = parse_requirements('OIPA/requirements.txt', session=False)
requirements = [str(ir.req) for ir in install_requirements]
setup(name='OIPA',
version='2.1.3',
author='Zimmerman & Zimmerman',
description="OIPA is an open-source framework that renders IATI compliant XML and \
related indicator #opendata into the OIPA datamodel for storage. \
This ETL approach provides I/O using the OIPA Tastypie RESTless API (soon DRF!) \
providing you with direct XML or JSON output. Does Django and MySQL. \
Codebase maintained by Zimmerman & Zimmerman in Amsterdam. http://www.oipa.nl/",
url='https://github.com/catalpainternational/oipa',
packages=find_packages('OIPA'), # iati, etc
package_dir={'': 'OIPA'},
install_requires=requirements,
zip_safe=False
)
|
Fix packaging to allow non-editable installation
|
Fix packaging to allow non-editable installation
|
Python
|
agpl-3.0
|
catalpainternational/OIPA,catalpainternational/OIPA,catalpainternational/OIPA,catalpainternational/OIPA
|
---
+++
@@ -1,23 +1,22 @@
#!/usr/bin/env python
-import os
-from distutils.core import setup
-from setuptools import find_packages
+from setuptools import setup, find_packages
from pip.req import parse_requirements
-os.chdir('OIPA')
-install_requirements = parse_requirements('requirements.txt', session=False)
+install_requirements = parse_requirements('OIPA/requirements.txt', session=False)
requirements = [str(ir.req) for ir in install_requirements]
setup(name='OIPA',
- version='2.1',
- description='',
+ version='2.1.3',
author='Zimmerman & Zimmerman',
- url="OIPA is an open-source framework that renders IATI compliant XML and \
+ description="OIPA is an open-source framework that renders IATI compliant XML and \
related indicator #opendata into the OIPA datamodel for storage. \
This ETL approach provides I/O using the OIPA Tastypie RESTless API (soon DRF!) \
providing you with direct XML or JSON output. Does Django and MySQL. \
Codebase maintained by Zimmerman & Zimmerman in Amsterdam. http://www.oipa.nl/",
- packages=find_packages(),
+ url='https://github.com/catalpainternational/oipa',
+ packages=find_packages('OIPA'), # iati, etc
+ package_dir={'': 'OIPA'},
install_requires=requirements,
- )
+ zip_safe=False
+ )
|
e44821e50b4d8eb1aff8906703696ae2ae7fb889
|
setup.py
|
setup.py
|
#!/usr/bin/env python
from setuptools import setup, find_packages
setup(
name='django-fsm-log',
version='1.2.1',
description='Logging for django-fsm',
author='Gizmag',
author_email='tech@gizmag.com',
url='https://github.com/gizmag/django-fsm-log',
packages=find_packages(),
install_requires=['django>=1.6', 'django_fsm>=2', 'django_appconf'],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Framework :: Django',
'Framework :: Django :: 1.6',
'Framework :: Django :: 1.7',
'Framework :: Django :: 1.8',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Topic :: Software Development :: Libraries :: Python Modules',
]
)
|
#!/usr/bin/env python
from setuptools import setup, find_packages
setup(
name='django-fsm-log',
version='1.2.1',
description='Logging for django-fsm',
author='Gizmag',
author_email='tech@gizmag.com',
url='https://github.com/gizmag/django-fsm-log',
packages=find_packages(),
install_requires=['django>=1.6', 'django_fsm>=2', 'django_appconf'],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Framework :: Django',
'Framework :: Django :: 1.6',
'Framework :: Django :: 1.7',
'Framework :: Django :: 1.8',
'Framework :: Django :: 1.9',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Topic :: Software Development :: Libraries :: Python Modules',
]
)
|
Add Django 1.9 to classifiers
|
Add Django 1.9 to classifiers
|
Python
|
mit
|
gizmag/django-fsm-log,ticosax/django-fsm-log
|
---
+++
@@ -18,6 +18,7 @@
'Framework :: Django :: 1.6',
'Framework :: Django :: 1.7',
'Framework :: Django :: 1.8',
+ 'Framework :: Django :: 1.9',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
|
9d248a665c3dd17688e16253c484f9e05de4f1cb
|
setup.py
|
setup.py
|
import os
from setuptools import setup
README = open(os.path.join(os.path.dirname(__file__), 'README.md')).read()
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name='mdot-rest',
version='0.1',
packages=['mdot_rest'],
include_package_data=True,
install_requires=[
'setuptools',
'django<1.9rc1',
'djangorestframework',
'django-filter',
'Pillow',
'mock==1.0.1',
],
license='Apache License, Version 2.0',
description='A RESTful API server for references to mobile resources.',
long_description=README,
url='',
author='Craig M. Stimmel',
author_email='cstimmel@uw.edu',
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
],
)
|
import os
from setuptools import setup
README = open(os.path.join(os.path.dirname(__file__), 'README.md')).read()
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name='mdot-rest',
version='0.1',
packages=['mdot_rest'],
include_package_data=True,
install_requires=[
'setuptools',
'django<1.9',
'djangorestframework',
'django-filter',
'Pillow',
'mock==1.0.1',
],
license='Apache License, Version 2.0',
description='A RESTful API server for references to mobile resources.',
long_description=README,
url='',
author='Craig M. Stimmel',
author_email='cstimmel@uw.edu',
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
],
)
|
Revert "<1.9 gets you 1.9rc1 >_>"
|
Revert "<1.9 gets you 1.9rc1 >_>"
This reverts commit 22c05944a233e245f7f013e4aa872c938f8a663f.
|
Python
|
apache-2.0
|
uw-it-aca/mdot-rest,uw-it-aca/mdot-rest
|
---
+++
@@ -13,7 +13,7 @@
include_package_data=True,
install_requires=[
'setuptools',
- 'django<1.9rc1',
+ 'django<1.9',
'djangorestframework',
'django-filter',
'Pillow',
|
f71b166523ee3772d2ff931f9e2a893303654cb4
|
IPython/core/tests/test_imports.py
|
IPython/core/tests/test_imports.py
|
# encoding: utf-8
def test_import_completer():
from IPython.core import completer
def test_import_crashhandler():
from IPython.core import crashhandler
def test_import_debugger():
from IPython.core import debugger
def test_import_fakemodule():
from IPython.core import fakemodule
def test_import_excolors():
from IPython.core import excolors
def test_import_history():
from IPython.core import history
def test_import_hooks():
from IPython.core import hooks
def test_import_getipython():
from IPython.core import getipython
def test_import_interactiveshell():
from IPython.core import interactiveshell
def test_import_logger():
from IPython.core import logger
def test_import_macro():
from IPython.core import macro
def test_import_magic():
from IPython.core import magic
def test_import_oinspect():
from IPython.core import oinspect
def test_import_prefilter():
from IPython.core import prefilter
def test_import_prompts():
from IPython.core import prompts
def test_import_release():
from IPython.core import release
def test_import_shadowns():
from IPython.core import shadowns
def test_import_ultratb():
from IPython.core import ultratb
def test_import_usage():
from IPython.core import usage
|
# encoding: utf-8
def test_import_completer():
from IPython.core import completer
def test_import_crashhandler():
from IPython.core import crashhandler
def test_import_debugger():
from IPython.core import debugger
def test_import_excolors():
from IPython.core import excolors
def test_import_history():
from IPython.core import history
def test_import_hooks():
from IPython.core import hooks
def test_import_getipython():
from IPython.core import getipython
def test_import_interactiveshell():
from IPython.core import interactiveshell
def test_import_logger():
from IPython.core import logger
def test_import_macro():
from IPython.core import macro
def test_import_magic():
from IPython.core import magic
def test_import_oinspect():
from IPython.core import oinspect
def test_import_prefilter():
from IPython.core import prefilter
def test_import_prompts():
from IPython.core import prompts
def test_import_release():
from IPython.core import release
def test_import_shadowns():
from IPython.core import shadowns
def test_import_ultratb():
from IPython.core import ultratb
def test_import_usage():
from IPython.core import usage
|
Drop test for importing fakemodule
|
Drop test for importing fakemodule
|
Python
|
bsd-3-clause
|
ipython/ipython,ipython/ipython
|
---
+++
@@ -8,9 +8,6 @@
def test_import_debugger():
from IPython.core import debugger
-
-def test_import_fakemodule():
- from IPython.core import fakemodule
def test_import_excolors():
from IPython.core import excolors
|
902b2b0929dad116664d37a13ff325a10b67db7b
|
catalog/queue/sqs.py
|
catalog/queue/sqs.py
|
from Queue import Queue, Empty
import json
from .base import BaseQueue
sqs = None
def do_delayed_imports():
global sqs
from boto import sqs
class SQSQueue(BaseQueue):
_cache = Queue()
def __init__(self):
BaseQueue.__init__(self)
do_delayed_imports()
self.conn = sqs.connect_to_region('us-west-2')
self.unprocessed = self.conn.create_queue('structured-catalog-unprocessed')
def push(self, job):
m = sqs.message.Message()
m.set_body(json.dumps(job))
self.unprocessed.write(m)
def get(self):
try:
msg = self._cache.get(block=False)
self.remove(msg)
return json.loads(msg.get_body())
except Empty:
rs = self.unprocessed.get_messages(num_messages=10)
if not rs:
return
for msg in rs:
self._cache.put(msg)
return self.get()
def remove(self, msg):
self.unprocessed.delete_message(msg)
|
from multiprocessing import Queue
from Queue import Empty
import json
from .base import BaseQueue
sqs = None
def do_delayed_imports():
global sqs
from boto import sqs
class SQSQueue(BaseQueue):
_cache = Queue()
def __init__(self):
BaseQueue.__init__(self)
do_delayed_imports()
self.conn = sqs.connect_to_region('us-west-2')
self.unprocessed = self.conn.create_queue('structured-catalog-unprocessed')
def push(self, job):
m = sqs.message.Message()
m.set_body(json.dumps(job))
self.unprocessed.write(m)
def get(self):
try:
msg = self._cache.get(block=False)
self.remove(msg)
return json.loads(msg.get_body())
except Empty:
rs = self.unprocessed.get_messages(num_messages=10)
if not rs:
return
for msg in rs:
self._cache.put(msg)
return self.get()
def remove(self, msg):
self.unprocessed.delete_message(msg)
|
Use queue from multiprocessing library instead of Queue
|
Use queue from multiprocessing library instead of Queue
|
Python
|
mpl-2.0
|
mozilla/structured-catalog
|
---
+++
@@ -1,4 +1,5 @@
-from Queue import Queue, Empty
+from multiprocessing import Queue
+from Queue import Empty
import json
from .base import BaseQueue
|
37cb8d17871045b47af684e856e216d628c964f4
|
src/mmw/apps/modeling/migrations/0024_fix_gwlfe_gis_data.py
|
src/mmw/apps/modeling/migrations/0024_fix_gwlfe_gis_data.py
|
# -*- coding: utf-8 -*-
from django.db import migrations
def fix_gis_data_serialization(apps, schema_editor):
"""
Release 1.20.0 introduced a change which let the project "gis_data"
field get updated by modifications on a scenario. This effectively
meant that modifications were being applied to all scenarios and that
removing them did not actually remove their effect from the gwlf-e
input. For projects that were created and suffered from that bug,
clearing out the gis_data on Project and the results on Scenario
will force them to be recomputed with the fix applied.
"""
Project = apps.get_model('modeling', 'Project')
bug_released_date = '2017-10-17'
# Apply fix to Multi-Year projects created after the release
for project in Project.objects.filter(created_at__gte=bug_released_date,
model_package='gwlfe'):
project.gis_data = None
for scenario in project.scenarios.all():
scenario.results = None
scenario.save()
project.save()
class Migration(migrations.Migration):
dependencies = [
('modeling', '0023_fix_gis_data_serialization'),
]
operations = [
migrations.RunPython(fix_gis_data_serialization,
migrations.RunPython.noop)
]
|
# -*- coding: utf-8 -*-
from datetime import datetime
from django.db import migrations
from django.utils.timezone import make_aware
def fix_gis_data_serialization(apps, schema_editor):
"""
Release 1.20.0 introduced a change which let the project "gis_data"
field get updated by modifications on a scenario. This effectively
meant that modifications were being applied to all scenarios and that
removing them did not actually remove their effect from the gwlf-e
input. For projects that were created and suffered from that bug,
clearing out the gis_data on Project and the results on Scenario
will force them to be recomputed with the fix applied.
"""
Project = apps.get_model('modeling', 'Project')
bug_released_date = make_aware(datetime.fromisoformat('2017-10-17'))
# Apply fix to Multi-Year projects created after the release
for project in Project.objects.filter(created_at__gte=bug_released_date,
model_package='gwlfe'):
project.gis_data = None
for scenario in project.scenarios.all():
scenario.results = None
scenario.save()
project.save()
class Migration(migrations.Migration):
dependencies = [
('modeling', '0023_fix_gis_data_serialization'),
]
operations = [
migrations.RunPython(fix_gis_data_serialization,
migrations.RunPython.noop)
]
|
Update old migration with tz-aware dates
|
Update old migration with tz-aware dates
This won't have a real affect since this migration has been run
years ago. However, it was causing RuntimeWarnings in Django, saying
that a DateTimeField received a naive datetime.
By wrapping it in Django's make_aware, the correct timezone is
applied.
|
Python
|
apache-2.0
|
WikiWatershed/model-my-watershed,WikiWatershed/model-my-watershed,WikiWatershed/model-my-watershed,WikiWatershed/model-my-watershed,WikiWatershed/model-my-watershed
|
---
+++
@@ -1,5 +1,8 @@
# -*- coding: utf-8 -*-
+from datetime import datetime
+
from django.db import migrations
+from django.utils.timezone import make_aware
def fix_gis_data_serialization(apps, schema_editor):
@@ -14,7 +17,7 @@
"""
Project = apps.get_model('modeling', 'Project')
- bug_released_date = '2017-10-17'
+ bug_released_date = make_aware(datetime.fromisoformat('2017-10-17'))
# Apply fix to Multi-Year projects created after the release
for project in Project.objects.filter(created_at__gte=bug_released_date,
|
15463168ed715761eaf483a1e53eb74d92b83e04
|
tests.py
|
tests.py
|
import unittest
import fuckit_commit
class Fuckit_CommitTestCase(unittest.TestCase):
'''
Unit Test cases for fuckit_commit
'''
def setUp(self):
pass
def test_send_sms(self):
pass
def tearDown(self):
pass
if __name__ == '__main__':
unittest.main()
|
import unittest
import json
from twilio.rest import TwilioRestClient
import fuckit_commit
class Fuckit_CommitTestCase(unittest.TestCase):
'''
Unit Test cases for fuckit_commit
'''
def setUp(self):
with open('configuration.json') as f:
self.config = json.load(f)
def test_send_sms(self):
client = TwilioRestClient(self.config['twilio']['sid'], self.config['twilio']['auth_token'])
message = client.messages.create(to=self.config['twilio']["number_to"], from_=self.config['twilio']["number_from"],
body="You need to commit today!!!\nFuck it!!! Commit!!!")
self.assertEqual(message.account_sid, self.config['twilio']['sid'])
def tearDown(self):
pass
if __name__ == '__main__':
unittest.main()
|
Add test to send sms
|
Add test to send sms
|
Python
|
mit
|
ueg1990/fuckit_commit
|
---
+++
@@ -1,4 +1,7 @@
import unittest
+import json
+from twilio.rest import TwilioRestClient
+
import fuckit_commit
class Fuckit_CommitTestCase(unittest.TestCase):
@@ -6,10 +9,14 @@
Unit Test cases for fuckit_commit
'''
def setUp(self):
- pass
+ with open('configuration.json') as f:
+ self.config = json.load(f)
def test_send_sms(self):
- pass
+ client = TwilioRestClient(self.config['twilio']['sid'], self.config['twilio']['auth_token'])
+ message = client.messages.create(to=self.config['twilio']["number_to"], from_=self.config['twilio']["number_from"],
+ body="You need to commit today!!!\nFuck it!!! Commit!!!")
+ self.assertEqual(message.account_sid, self.config['twilio']['sid'])
def tearDown(self):
pass
|
1902ed44f41eabf1c8207e47d5c31dd58471146f
|
pymunk/transform.py
|
pymunk/transform.py
|
from typing import NamedTuple
class Transform(NamedTuple):
"""Type used for 2x3 affine transforms.
See wikipedia for details:
http://en.wikipedia.org/wiki/Affine_transformation
The properties map to the matrix in this way:
= = ==
= = ==
a c tx
b d ty
= = ==
An instance can be created in this way::
>>> Transform(1,2,3,4,5,6)
Transform(a=1, b=2, c=3, d=4, tx=5, ty=6)
Or using the default identity in this way::
>>> Transform.identity()
Transform(a=1, b=0, c=0, d=1, tx=0, ty=0)
Or overriding only some of the values (on a identity matrix):
>>> Transform(b=3,ty=5)
Transform(a=1, b=3, c=0, d=1, tx=0, ty=5)
"""
a: float = 1
b: float = 0
c: float = 0
d: float = 1
tx: float = 0
ty: float = 0
@staticmethod
def identity() -> "Transform":
"""The identity transform"""
return Transform(1, 0, 0, 1, 0, 0)
|
import math
from typing import NamedTuple
class Transform(NamedTuple):
"""Type used for 2x3 affine transforms.
See wikipedia for details:
http://en.wikipedia.org/wiki/Affine_transformation
The properties map to the matrix in this way:
= = ==
= = ==
a c tx
b d ty
= = ==
An instance can be created in this way::
>>> Transform(1,2,3,4,5,6)
Transform(a=1, b=2, c=3, d=4, tx=5, ty=6)
Or using the default identity in this way::
>>> Transform.identity()
Transform(a=1, b=0, c=0, d=1, tx=0, ty=0)
Or overriding only some of the values (on a identity matrix):
>>> Transform(b=3,ty=5)
Transform(a=1, b=3, c=0, d=1, tx=0, ty=5)
"""
a: float = 1
b: float = 0
c: float = 0
d: float = 1
tx: float = 0
ty: float = 0
@staticmethod
def identity() -> "Transform":
"""The identity transform"""
return Transform(1, 0, 0, 1, 0, 0)
@staticmethod
def translation(x, y) -> "Transform":
return Transform(tx=x, ty=y)
# split into scale and scale_non-uniform
@staticmethod
def scaling(v) -> "Transform":
return Transform(a=v, d=v)
@staticmethod
def rotation(t) -> "Transform":
c = math.cos(t)
s = math.sin(t)
return Transform(a=c, b=s, c=-s, d=c)
|
Add some helper methods to create translate, scale and rotate Transforms.
|
Add some helper methods to create translate, scale and rotate Transforms.
|
Python
|
mit
|
viblo/pymunk,viblo/pymunk
|
---
+++
@@ -1,3 +1,4 @@
+import math
from typing import NamedTuple
@@ -39,3 +40,18 @@
def identity() -> "Transform":
"""The identity transform"""
return Transform(1, 0, 0, 1, 0, 0)
+
+ @staticmethod
+ def translation(x, y) -> "Transform":
+ return Transform(tx=x, ty=y)
+
+ # split into scale and scale_non-uniform
+ @staticmethod
+ def scaling(v) -> "Transform":
+ return Transform(a=v, d=v)
+
+ @staticmethod
+ def rotation(t) -> "Transform":
+ c = math.cos(t)
+ s = math.sin(t)
+ return Transform(a=c, b=s, c=-s, d=c)
|
feb88aa30b362e02671d51d8b3e03a7194d99646
|
kobra/urls.py
|
kobra/urls.py
|
# -*- coding: utf-8 -*-
from django.conf.urls import include, url
from django.contrib import admin
from .views import web_client_view
urlpatterns = [
# url(r'^', include('kobra.api.v1.urls', namespace='legacy')),
url(r'^api/v1/', include('kobra.api.v1.urls', namespace='v1')),
url(r'^admin/', include(admin.site.urls)),
# Matches everything and therefore must come last.
url(r'^', include([
url(r'^$', web_client_view, name='home'),
url(r'^.*/$', web_client_view)
], namespace='web-client'))
]
|
# -*- coding: utf-8 -*-
from django.conf.urls import include, url
from django.contrib import admin
from .views import web_client_view
urlpatterns = [
# url(r'^', include('kobra.api.v1.urls', namespace='legacy')),
url(r'^api/v1/', include('kobra.api.v1.urls', namespace='v1')),
url(r'^admin/', include(admin.site.urls)),
# Matches everything* and therefore must come last.
# *everything except /static/... since this breaks the static file serving.
url(r'^(?!static/)', include([
url(r'^$', web_client_view, name='home'),
url(r'^.*/$', web_client_view)
], namespace='web-client'))
]
|
Fix for broken static file serving
|
Fix for broken static file serving
|
Python
|
mit
|
karservice/kobra,karservice/kobra,karservice/kobra,karservice/kobra
|
---
+++
@@ -11,8 +11,9 @@
url(r'^admin/', include(admin.site.urls)),
- # Matches everything and therefore must come last.
- url(r'^', include([
+ # Matches everything* and therefore must come last.
+ # *everything except /static/... since this breaks the static file serving.
+ url(r'^(?!static/)', include([
url(r'^$', web_client_view, name='home'),
url(r'^.*/$', web_client_view)
], namespace='web-client'))
|
459916c800f09e7600ae7442bb34236b9f418f53
|
feedhq/utils.py
|
feedhq/utils.py
|
# -*- coding: utf-8 -*-
from django.conf import settings
from django.core.validators import EmailValidator, ValidationError
import redis
def get_redis_connection():
"""
Helper used for obtain a raw redis client.
"""
from redis_cache.cache import pool
connection_pool = pool.get_connection_pool(
parser_class=redis.connection.HiredisParser,
connection_pool_class=redis.ConnectionPool,
connection_pool_class_kwargs={},
**settings.REDIS)
return redis.Redis(connection_pool=connection_pool, **settings.REDIS)
def is_email(value):
try:
EmailValidator()(value)
except ValidationError:
return False
else:
return True
|
# -*- coding: utf-8 -*-
from django.conf import settings
from django.core.validators import EmailValidator, ValidationError
import redis
def get_redis_connection():
"""
Helper used for obtain a raw redis client.
"""
from redis_cache.cache import pool
client = redis.Redis(**settings.REDIS)
client.connection_pool = pool.get_connection_pool(
client,
parser_class=redis.connection.HiredisParser,
connection_pool_class=redis.ConnectionPool,
connection_pool_class_kwargs={},
**settings.REDIS)
return client
def is_email(value):
try:
EmailValidator()(value)
except ValidationError:
return False
else:
return True
|
Update method of getting redis connection
|
Update method of getting redis connection
|
Python
|
bsd-3-clause
|
feedhq/feedhq,rmoorman/feedhq,rmoorman/feedhq,rmoorman/feedhq,rmoorman/feedhq,feedhq/feedhq,feedhq/feedhq,feedhq/feedhq,feedhq/feedhq,rmoorman/feedhq
|
---
+++
@@ -10,12 +10,14 @@
Helper used for obtain a raw redis client.
"""
from redis_cache.cache import pool
- connection_pool = pool.get_connection_pool(
+ client = redis.Redis(**settings.REDIS)
+ client.connection_pool = pool.get_connection_pool(
+ client,
parser_class=redis.connection.HiredisParser,
connection_pool_class=redis.ConnectionPool,
connection_pool_class_kwargs={},
**settings.REDIS)
- return redis.Redis(connection_pool=connection_pool, **settings.REDIS)
+ return client
def is_email(value):
|
9ad85436f2b47aa76246eccc8b9fc4361db86f4f
|
xmantissa/examples/autoapp.py
|
xmantissa/examples/autoapp.py
|
from axiom.store import Store
from axiom.userbase import LoginSystem
from xmantissa.webadmin import DeveloperApplication, DONTUSETHISBenefactor
from xmantissa.webapp import PrivateApplication
from xmantissa.website import WebSite
from xmantissa.signup import FreeTicketSignup, TicketBooth
s = Store("test.axiom", debug=True)
def _():
ls = LoginSystem(store=s)
ls.install()
s.checkpoint()
WebSite(store=s, portno=8080).install()
la = ls.addAccount('admin', 'localhost', 'password')
s2 = la.avatars.open()
LoginSystem(store=s2).install()
WebSite(store=s2).install()
PrivateApplication(store=s2).install()
DeveloperApplication(store=s2).install()
brok = ls.addAccount('broken', 'localhost', 'password')
s3 = brok.avatars.open()
LoginSystem(store=s3).install()
WebSite(store=s3).install()
PrivateApplication(store=s3).install()
# MECHANISM
bth = TicketBooth(store=s)
bth.install()
# POLICY
ben = DONTUSETHISBenefactor(store=s)
fre = FreeTicketSignup(store=s,
benefactor=ben,
prefixURL=u'admin-signup',
booth=bth)
fre.install()
s.transact(_)
|
from axiom.store import Store
from axiom.userbase import LoginSystem
from xmantissa.webadmin import DeveloperSite, DeveloperApplication, DONTUSETHISBenefactor
from xmantissa.webapp import PrivateApplication
from xmantissa.website import WebSite
from xmantissa.signup import FreeTicketSignup, TicketBooth
s = Store("test.axiom", debug=True)
def _():
ls = LoginSystem(store=s)
ls.install()
s.checkpoint()
WebSite(store=s, portno=8080).install()
DeveloperSite(store=s).install()
la = ls.addAccount('admin', 'localhost', 'password')
s2 = la.avatars.open()
LoginSystem(store=s2).install()
WebSite(store=s2).install()
PrivateApplication(store=s2).install()
DeveloperApplication(store=s2).install()
brok = ls.addAccount('broken', 'localhost', 'password')
s3 = brok.avatars.open()
LoginSystem(store=s3).install()
WebSite(store=s3).install()
PrivateApplication(store=s3).install()
# MECHANISM
bth = TicketBooth(store=s)
bth.install()
# POLICY
ben = DONTUSETHISBenefactor(store=s)
fre = FreeTicketSignup(store=s,
benefactor=ben,
prefixURL=u'admin-signup',
booth=bth)
fre.install()
s.transact(_)
|
Install DeveloperSite on the main store so the repl can workish
|
Install DeveloperSite on the main store so the repl can workish
|
Python
|
mit
|
twisted/mantissa,twisted/mantissa,twisted/mantissa
|
---
+++
@@ -2,7 +2,7 @@
from axiom.store import Store
from axiom.userbase import LoginSystem
-from xmantissa.webadmin import DeveloperApplication, DONTUSETHISBenefactor
+from xmantissa.webadmin import DeveloperSite, DeveloperApplication, DONTUSETHISBenefactor
from xmantissa.webapp import PrivateApplication
from xmantissa.website import WebSite
from xmantissa.signup import FreeTicketSignup, TicketBooth
@@ -13,6 +13,7 @@
ls.install()
s.checkpoint()
WebSite(store=s, portno=8080).install()
+ DeveloperSite(store=s).install()
la = ls.addAccount('admin', 'localhost', 'password')
s2 = la.avatars.open()
|
cb30232b201934622efb2f972cca5087a1373cf7
|
src/waldur_mastermind/marketplace_remote/extension.py
|
src/waldur_mastermind/marketplace_remote/extension.py
|
from waldur_core.core import WaldurExtension
class MarketplaceRemoteExtension(WaldurExtension):
@staticmethod
def django_app():
return 'waldur_mastermind.marketplace_remote'
@staticmethod
def is_assembly():
return True
@staticmethod
def django_urls():
from .urls import urlpatterns
return urlpatterns
@staticmethod
def celery_tasks():
from datetime import timedelta
return {
'waldur-remote-pull-offerings': {
'task': 'waldur_mastermind.marketplace_remote.pull_offerings',
'schedule': timedelta(minutes=60),
'args': (),
},
'waldur-remote-pull-order-items': {
'task': 'waldur_mastermind.marketplace_remote.pull_order_items',
'schedule': timedelta(minutes=60),
'args': (),
},
'waldur-remote-pull-usage': {
'task': 'waldur_mastermind.marketplace_remote.pull_usage',
'schedule': timedelta(minutes=60),
'args': (),
},
'waldur-remote-sync-remote-project-permissions': {
'task': 'waldur_mastermind.marketplace_remote.sync_remote_project_permissions',
'schedule': timedelta(hours=6),
'args': (),
},
'waldur-remote-pull-invoices': {
'task': 'waldur_mastermind.marketplace_remote.pull_invoices',
'schedule': timedelta(minutes=60),
'args': (),
},
}
|
from waldur_core.core import WaldurExtension
class MarketplaceRemoteExtension(WaldurExtension):
@staticmethod
def django_app():
return 'waldur_mastermind.marketplace_remote'
@staticmethod
def is_assembly():
return True
@staticmethod
def django_urls():
from .urls import urlpatterns
return urlpatterns
@staticmethod
def celery_tasks():
from datetime import timedelta
return {
'waldur-remote-pull-offerings': {
'task': 'waldur_mastermind.marketplace_remote.pull_offerings',
'schedule': timedelta(minutes=60),
'args': (),
},
'waldur-remote-pull-order-items': {
'task': 'waldur_mastermind.marketplace_remote.pull_order_items',
'schedule': timedelta(minutes=5),
'args': (),
},
'waldur-remote-pull-usage': {
'task': 'waldur_mastermind.marketplace_remote.pull_usage',
'schedule': timedelta(minutes=60),
'args': (),
},
'waldur-remote-sync-remote-project-permissions': {
'task': 'waldur_mastermind.marketplace_remote.sync_remote_project_permissions',
'schedule': timedelta(hours=6),
'args': (),
},
'waldur-remote-pull-invoices': {
'task': 'waldur_mastermind.marketplace_remote.pull_invoices',
'schedule': timedelta(minutes=60),
'args': (),
},
}
|
Increase frequency of order items pulling.
|
Increase frequency of order items pulling.
|
Python
|
mit
|
opennode/waldur-mastermind,opennode/waldur-mastermind,opennode/waldur-mastermind,opennode/waldur-mastermind
|
---
+++
@@ -28,7 +28,7 @@
},
'waldur-remote-pull-order-items': {
'task': 'waldur_mastermind.marketplace_remote.pull_order_items',
- 'schedule': timedelta(minutes=60),
+ 'schedule': timedelta(minutes=5),
'args': (),
},
'waldur-remote-pull-usage': {
|
2c1282680bc9d84e37c40923e8ca288bf8547998
|
fabfile/daemons.py
|
fabfile/daemons.py
|
#!/usr/bin/env python
from time import sleep, time
from fabric.api import execute, task, env
import app_config
import sys
import traceback
def safe_execute(*args, **kwargs):
try:
execute(*args, **kwargs)
except:
print "ERROR [timestamp: %d]: Here's the traceback" % time()
ex_type, ex, tb = sys.exc_info()
traceback.print_tb(tb)
del tb
@task
def deploy():
"""
Harvest data and deploy slides indefinitely
"""
while True:
safe_execute('ap.update')
safe_execute('data.load_updates', 'data/update.json')
safe_execute('liveblog.update')
safe_execute('deploy_slides')
safe_execute('deploy_big_boards')
sleep(app_config.DEPLOY_INTERVAL)
|
#!/usr/bin/env python
from time import sleep, time
from fabric.api import execute, task, env
import app_config
import sys
import traceback
def safe_execute(*args, **kwargs):
"""
Wrap execute() so that all exceptions are caught and logged.
"""
try:
execute(*args, **kwargs)
except:
print "ERROR [timestamp: %d]: Here's the traceback" % time()
ex_type, ex, tb = sys.exc_info()
traceback.print_tb(tb)
del tb
@task
def deploy():
"""
Harvest data and deploy slides indefinitely
"""
while True:
safe_execute('ap.update')
safe_execute('data.load_updates', 'data/update.json')
safe_execute('liveblog.update')
safe_execute('deploy_slides')
safe_execute('deploy_big_boards')
sleep(app_config.DEPLOY_INTERVAL)
|
Add comment to new safe_execute function
|
Add comment to new safe_execute function
|
Python
|
mit
|
nprapps/elections14,nprapps/elections14,nprapps/elections14,nprapps/elections14
|
---
+++
@@ -8,6 +8,9 @@
import traceback
def safe_execute(*args, **kwargs):
+ """
+ Wrap execute() so that all exceptions are caught and logged.
+ """
try:
execute(*args, **kwargs)
except:
|
b89c94cb55db1d8252b75949b5cba919e0b69a6e
|
skeleton/website/jasyscript.py
|
skeleton/website/jasyscript.py
|
import konstrukteur.Konstrukteur
@task
def build(regenerate = False):
"""Generate source (development) version"""
konstrukteur.Konstrukteur.build(regenerate)
|
import konstrukteur.Konstrukteur
@task
def build(regenerate = False):
"""Generate source (development) version"""
# Initialize assets
AssetManager.AssetManager(profile, session)
# Build static website
konstrukteur.Konstrukteur.build(regenerate)
|
Fix asset loading in skeleton
|
Fix asset loading in skeleton
|
Python
|
mit
|
fastner/konstrukteur,fastner/konstrukteur,fastner/konstrukteur
|
---
+++
@@ -4,4 +4,8 @@
def build(regenerate = False):
"""Generate source (development) version"""
+ # Initialize assets
+ AssetManager.AssetManager(profile, session)
+
+ # Build static website
konstrukteur.Konstrukteur.build(regenerate)
|
909f36eecdf38f0915f945144966c892e09670ff
|
src/logger.py
|
src/logger.py
|
#
# License: MIT (doc/LICENSE)
# Author: Todd Gaunt
#
# File: imgfetch/fourchan.py
# This file contains the logging functions for writing to stdout stderr etc...
from sys import stderr
PROGRAM_NAME = "imgfetch: "
def error(level, msg):
global PROGRAM_NAME
if level < 0:
errmsg=PROGRAM_NAME + "error: internal error"
if level >= 0:
errmsg=PROGRAM_NAME + "error: " + msg
print(errmsg, file=stderr)
if level >= 1 or level < 0:
quit()
def warning(level, msg):
global PROGRAM_NAME
if level < 0:
error(-1, "")
if level >= 0:
warnmsg=PROGRAM_NAME + "warning: " + msg
print(warnmsg)
def output(level, msg):
global PROGRAM_NAME
if level < 0:
error(-1,"")
if level == 0:
return
elif level >= 1:
outmsg = PROGRAM_NAME + msg
print(outmsg)
# End of File
|
#
# License: MIT (doc/LICENSE)
# Author: Todd Gaunt
from sys import stderr
PROGRAM_NAME = "imgfetch: "
def error(level, msg):
global PROGRAM_NAME
if level < 0:
quit()
if level >= 0:
errmsg=PROGRAM_NAME + "error: " + msg
print(errmsg, file=stderr)
quit()
def warning(level, msg):
global PROGRAM_NAME
if level < 0:
error(-1, "")
elif level == 0:
return
elif level >= 1:
nmsg=PROGRAM_NAME + "warning: " + msg
print(nmsg)
def output(level, msg):
global PROGRAM_NAME
if level < 0:
error(-1,"")
elif level == 0:
return
elif level >= 1:
nmsg = PROGRAM_NAME + msg
print(nmsg)
# End of File
|
Update level checks to allow a verbosity level of 0 or greater
|
Update level checks to allow a verbosity level of 0 or greater
|
Python
|
isc
|
toddgaunt/imgfetch
|
---
+++
@@ -1,9 +1,6 @@
#
# License: MIT (doc/LICENSE)
# Author: Todd Gaunt
-#
-# File: imgfetch/fourchan.py
-# This file contains the logging functions for writing to stdout stderr etc...
from sys import stderr
@@ -12,32 +9,33 @@
def error(level, msg):
global PROGRAM_NAME
if level < 0:
- errmsg=PROGRAM_NAME + "error: internal error"
+ quit()
if level >= 0:
errmsg=PROGRAM_NAME + "error: " + msg
print(errmsg, file=stderr)
- if level >= 1 or level < 0:
- quit()
+ quit()
def warning(level, msg):
global PROGRAM_NAME
if level < 0:
error(-1, "")
- if level >= 0:
- warnmsg=PROGRAM_NAME + "warning: " + msg
+ elif level == 0:
+ return
+ elif level >= 1:
+ nmsg=PROGRAM_NAME + "warning: " + msg
- print(warnmsg)
+ print(nmsg)
def output(level, msg):
global PROGRAM_NAME
if level < 0:
error(-1,"")
- if level == 0:
+ elif level == 0:
return
elif level >= 1:
- outmsg = PROGRAM_NAME + msg
+ nmsg = PROGRAM_NAME + msg
- print(outmsg)
+ print(nmsg)
# End of File
|
3a571e45e0bb0e11d84f5e0013d5a5f0f2a568ec
|
c2corg_ui/views/index.py
|
c2corg_ui/views/index.py
|
from pyramid.view import view_config
class Pages(object):
def __init__(self, request):
self.request = request
self.settings = request.registry.settings
self.template_input = {
'debug': 'debug' in self.request.params,
'api_url': self.settings['api_url'],
'ign_api_key': self.settings['ign_api_key'],
'bing_api_key': self.settings['bing_api_key'],
'image_backend_url': self.settings['image_backend_url'],
'image_url': self.settings['image_url']
}
@view_config(route_name='index', renderer='c2corg_ui:templates/index.html')
@view_config(route_name='auth', renderer='c2corg_ui:templates/auth.html')
@view_config(
route_name='account',
renderer='c2corg_ui:templates/account.html')
def index(self):
return self.template_input
|
from pyramid.view import view_config
from c2corg_ui.views import get_or_create_page
class Pages(object):
def __init__(self, request):
self.request = request
self.settings = request.registry.settings
self.debug = 'debug' in self.request.params
self.template_input = {
'debug': self.debug,
'api_url': self.settings['api_url'],
'ign_api_key': self.settings['ign_api_key'],
'bing_api_key': self.settings['bing_api_key'],
'image_backend_url': self.settings['image_backend_url'],
'image_url': self.settings['image_url']
}
@view_config(route_name='index')
def index(self):
return self._get_page('index', 'c2corg_ui:templates/index.html')
@view_config(route_name='auth')
def auth(self):
return self._get_page('auth', 'c2corg_ui:templates/auth.html')
@view_config(route_name='account')
def account(self):
return self._get_page('account', 'c2corg_ui:templates/account.html')
def _get_page(self, page_key, template):
return get_or_create_page(
page_key,
template,
self.template_input,
self.request,
self.debug
)
|
Set up caching/etag for static pages
|
Set up caching/etag for static pages
|
Python
|
agpl-3.0
|
c2corg/v6_ui,Courgetteandratatouille/v6_ui,Courgetteandratatouille/v6_ui,Courgetteandratatouille/v6_ui,c2corg/v6_ui,Courgetteandratatouille/v6_ui,c2corg/v6_ui,c2corg/v6_ui,olaurendeau/v6_ui,olaurendeau/v6_ui,olaurendeau/v6_ui,olaurendeau/v6_ui
|
---
+++
@@ -1,4 +1,6 @@
from pyramid.view import view_config
+
+from c2corg_ui.views import get_or_create_page
class Pages(object):
@@ -6,8 +8,9 @@
def __init__(self, request):
self.request = request
self.settings = request.registry.settings
+ self.debug = 'debug' in self.request.params
self.template_input = {
- 'debug': 'debug' in self.request.params,
+ 'debug': self.debug,
'api_url': self.settings['api_url'],
'ign_api_key': self.settings['ign_api_key'],
'bing_api_key': self.settings['bing_api_key'],
@@ -15,10 +18,23 @@
'image_url': self.settings['image_url']
}
- @view_config(route_name='index', renderer='c2corg_ui:templates/index.html')
- @view_config(route_name='auth', renderer='c2corg_ui:templates/auth.html')
- @view_config(
- route_name='account',
- renderer='c2corg_ui:templates/account.html')
+ @view_config(route_name='index')
def index(self):
- return self.template_input
+ return self._get_page('index', 'c2corg_ui:templates/index.html')
+
+ @view_config(route_name='auth')
+ def auth(self):
+ return self._get_page('auth', 'c2corg_ui:templates/auth.html')
+
+ @view_config(route_name='account')
+ def account(self):
+ return self._get_page('account', 'c2corg_ui:templates/account.html')
+
+ def _get_page(self, page_key, template):
+ return get_or_create_page(
+ page_key,
+ template,
+ self.template_input,
+ self.request,
+ self.debug
+ )
|
8ab21dc5148c43a5ba9473c03e3fb5baaf686ed6
|
tests/settings/test-sqlite.py
|
tests/settings/test-sqlite.py
|
# Test configuration for quick execution.
#
# This settings file will not work for tests against
# Django 1.6, as it does not support Auto incrementing primary
# keys in way required by django-name.
from .base import *
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'TEST_NAME': ':memory:',
}
}
|
# Test configuration for quick execution.
#
# This settings file will not work for tests against
# Django 1.6, as it does not support Auto incrementing primary
# keys in way required by django-name.
from .base import *
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
}
}
|
Change db name for django1.8.
|
Change db name for django1.8.
|
Python
|
bsd-3-clause
|
damonkelley/django-name,unt-libraries/django-name,damonkelley/django-name,unt-libraries/django-name,damonkelley/django-name,unt-libraries/django-name
|
---
+++
@@ -9,6 +9,6 @@
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
- 'TEST_NAME': ':memory:',
+ 'NAME': ':memory:',
}
}
|
d1628356c7981748e2446c7b43d33d21cdef7e02
|
geoengine_partner/geo_partner.py
|
geoengine_partner/geo_partner.py
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Author: Nicolas Bessi
# Copyright 2011-2012 Camptocamp SA
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields
from base_geoengine import geo_model
class ResPartner(geo_model.GeoModel):
"""Add geo_point to partner using a function filed"""
_name = "res.partner"
_inherit = "res.partner"
_columns = {
'geo_point': fields.geo_point('Addresses coordinate')
}
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Author: Nicolas Bessi
# Copyright 2011-2012 Camptocamp SA
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields
from openerp.addons.base_geoengine import geo_model
class ResPartner(geo_model.GeoModel):
"""Add geo_point to partner using a function filed"""
_name = "res.partner"
_inherit = "res.partner"
_columns = {
'geo_point': fields.geo_point('Addresses coordinate')
}
|
Use absolute imports on opnerp.addons
|
[FIX] Use absolute imports on opnerp.addons
|
Python
|
agpl-3.0
|
OCA/geospatial,OCA/geospatial,OCA/geospatial
|
---
+++
@@ -20,7 +20,7 @@
##############################################################################
from openerp.osv import fields
-from base_geoengine import geo_model
+from openerp.addons.base_geoengine import geo_model
class ResPartner(geo_model.GeoModel):
|
8f5849a90c63c82b036e21d36b9d77b20e1aa60b
|
src/pretix/testutils/settings.py
|
src/pretix/testutils/settings.py
|
import atexit
import os
import tempfile
tmpdir = tempfile.TemporaryDirectory()
os.environ.setdefault('DATA_DIR', tmpdir.name)
from pretix.settings import * # NOQA
DATA_DIR = tmpdir.name
LOG_DIR = os.path.join(DATA_DIR, 'logs')
MEDIA_ROOT = os.path.join(DATA_DIR, 'media')
atexit.register(tmpdir.cleanup)
EMAIL_BACKEND = 'django.core.mail.outbox'
COMPRESS_ENABLED = COMPRESS_OFFLINE = False
DEBUG = True
PASSWORD_HASHERS = ['django.contrib.auth.hashers.MD5PasswordHasher']
# Disable celery
CELERY_ALWAYS_EAGER = True
HAS_CELERY = False
# Don't use redis
SESSION_ENGINE = "django.contrib.sessions.backends.db"
HAS_REDIS = False
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.dummy.DummyCache',
}
}
|
import atexit
import os
import tempfile
tmpdir = tempfile.TemporaryDirectory()
os.environ.setdefault('DATA_DIR', tmpdir.name)
from pretix.settings import * # NOQA
DATA_DIR = tmpdir.name
LOG_DIR = os.path.join(DATA_DIR, 'logs')
MEDIA_ROOT = os.path.join(DATA_DIR, 'media')
atexit.register(tmpdir.cleanup)
EMAIL_BACKEND = 'django.core.mail.outbox'
COMPRESS_ENABLED = COMPRESS_OFFLINE = False
DEBUG = True
PASSWORD_HASHERS = ['django.contrib.auth.hashers.MD5PasswordHasher']
# Disable celery
CELERY_ALWAYS_EAGER = True
HAS_CELERY = False
# Don't use redis
SESSION_ENGINE = "django.contrib.sessions.backends.db"
HAS_REDIS = False
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.dummy.DummyCache',
}
}
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:'
}
}
|
Test on SQLite if not configured otherwise
|
Test on SQLite if not configured otherwise
|
Python
|
apache-2.0
|
Flamacue/pretix,Flamacue/pretix,Flamacue/pretix,Flamacue/pretix
|
---
+++
@@ -33,3 +33,10 @@
'BACKEND': 'django.core.cache.backends.dummy.DummyCache',
}
}
+
+DATABASES = {
+ 'default': {
+ 'ENGINE': 'django.db.backends.sqlite3',
+ 'NAME': ':memory:'
+ }
+}
|
7d7209bbe92045c3becc770cee004bcdc9640098
|
heltour/tournament/decorators.py
|
heltour/tournament/decorators.py
|
from cacheops.query import cached_as, \
cached_view_as as _cacheops_cached_view_as, \
install_cacheops
# TODO: This should be run automatically by django. I have no idea why it isn't.
install_cacheops()
# Modify the cacheops.cached_view_as decorator to take a "vary_request" lambda
# that allows us to serve different copies of the view to different types of users
# e.g. logged-in vs anonymous users
def cached_view_as(*cva_args, **cva_kwargs):
vary_request = cva_kwargs.pop('vary_request', None)
def wrap(func):
def proxy(request, vary_value, *proxy_args, **proxy_kwargs):
return func(request, *proxy_args, **proxy_kwargs)
wrapped_proxy = _cacheops_cached_view_as(*cva_args, **cva_kwargs)(proxy)
def wrapped(request, *args, **kwargs):
if vary_request is None:
return wrapped_proxy(request, None, *args, **kwargs)
else:
return wrapped_proxy(request, vary_request(request), *args, **kwargs)
return wrapped
return wrap
|
from cacheops.query import cached_as, \
cached_view_as as _cacheops_cached_view_as, \
install_cacheops
from heltour import settings
# TODO: This should be run automatically by django. I have no idea why it isn't.
install_cacheops()
# Modify the cacheops.cached_view_as decorator to take a "vary_request" lambda
# that allows us to serve different copies of the view to different types of users
# e.g. logged-in vs anonymous users
def cached_view_as(*cva_args, **cva_kwargs):
vary_request = cva_kwargs.pop('vary_request', None)
def wrap(func):
if settings.DEBUG:
# Disable view caching during development
return func
def proxy(request, vary_value, *proxy_args, **proxy_kwargs):
return func(request, *proxy_args, **proxy_kwargs)
wrapped_proxy = _cacheops_cached_view_as(*cva_args, **cva_kwargs)(proxy)
def wrapped(request, *args, **kwargs):
if vary_request is None:
return wrapped_proxy(request, None, *args, **kwargs)
else:
return wrapped_proxy(request, vary_request(request), *args, **kwargs)
return wrapped
return wrap
|
Disable view caching during development
|
Disable view caching during development
Not ideal, but useful when you change the source code. Otherwise you
would have to run "manage.py invalidate all" for every change.
|
Python
|
mit
|
cyanfish/heltour,cyanfish/heltour,cyanfish/heltour,cyanfish/heltour
|
---
+++
@@ -1,6 +1,7 @@
from cacheops.query import cached_as, \
cached_view_as as _cacheops_cached_view_as, \
install_cacheops
+from heltour import settings
# TODO: This should be run automatically by django. I have no idea why it isn't.
install_cacheops()
@@ -13,6 +14,10 @@
vary_request = cva_kwargs.pop('vary_request', None)
def wrap(func):
+ if settings.DEBUG:
+ # Disable view caching during development
+ return func
+
def proxy(request, vary_value, *proxy_args, **proxy_kwargs):
return func(request, *proxy_args, **proxy_kwargs)
|
2403cbe2aa8f515bdd8f575112478010389ee48b
|
conan/ConanServerToArtifactory/migrate.py
|
conan/ConanServerToArtifactory/migrate.py
|
import os
import subprocess
def run(cmd):
ret = os.system(cmd)
if ret != 0:
raise Exception("Command failed: %s" % cmd)
# Assuming local = conan_server and Artifactory remotes
output = subprocess.check_output("conan search -r=local --raw")
packages = output.splitlines()
for package in packages:
print("Downloading %s" % package)
run("conan download %s -r=local" % package)
run("conan upload * --all --confirm -r=artifactory")
|
import os
import subprocess
def run(cmd):
ret = os.system(cmd)
if ret != 0:
raise Exception("Command failed: %s" % cmd)
# Assuming local = conan_server and Artifactory remotes
output = subprocess.check_output("conan search * --remote=local --raw")
packages = output.decode("utf-8").splitlines()
for package in packages[:1]:
print("Downloading %s" % package)
run("conan download {} --remote=local".format(package))
run("conan upload * --all --confirm -r=artifactory")
|
Update Conan server migration script
|
Update Conan server migration script
|
Python
|
apache-2.0
|
JFrogDev/artifactory-scripts,JFrogDev/artifactory-scripts,JFrogDev/artifactory-scripts,JFrogDev/artifactory-scripts,JFrogDev/artifactory-scripts,JFrogDev/artifactory-scripts
|
---
+++
@@ -8,11 +8,11 @@
raise Exception("Command failed: %s" % cmd)
# Assuming local = conan_server and Artifactory remotes
-output = subprocess.check_output("conan search -r=local --raw")
-packages = output.splitlines()
+output = subprocess.check_output("conan search * --remote=local --raw")
+packages = output.decode("utf-8").splitlines()
-for package in packages:
+for package in packages[:1]:
print("Downloading %s" % package)
- run("conan download %s -r=local" % package)
+ run("conan download {} --remote=local".format(package))
run("conan upload * --all --confirm -r=artifactory")
|
fd77e3211e2298457b9778f409c56c70a36bf3db
|
farmers_api/farmers/views.py
|
farmers_api/farmers/views.py
|
from rest_framework import viewsets
from .models import Farmer
from .serializers import FarmerSerializer
class FarmerViewSet(viewsets.ReadOnlyModelViewSet):
queryset = Farmer.objects.all()
serializer_class = FarmerSerializer
filter_fields = ('town',)
|
from rest_framework import viewsets, permissions
from .models import Farmer
from .serializers import FarmerSerializer
class FarmerViewSet(viewsets.ModelViewSet):
queryset = Farmer.objects.all()
serializer_class = FarmerSerializer
filter_fields = ('town',)
permissions = permissions.DjangoModelPermissionsOrAnonReadOnly
|
Add permission settings on FarmerViewSet
|
Add permission settings on FarmerViewSet
|
Python
|
bsd-2-clause
|
tm-kn/farmers-api
|
---
+++
@@ -1,10 +1,11 @@
-from rest_framework import viewsets
+from rest_framework import viewsets, permissions
from .models import Farmer
from .serializers import FarmerSerializer
-class FarmerViewSet(viewsets.ReadOnlyModelViewSet):
+class FarmerViewSet(viewsets.ModelViewSet):
queryset = Farmer.objects.all()
serializer_class = FarmerSerializer
filter_fields = ('town',)
+ permissions = permissions.DjangoModelPermissionsOrAnonReadOnly
|
4cec5250a3f9058fea5af5ef432a5b230ca94963
|
images/singleuser/user-config.py
|
images/singleuser/user-config.py
|
import os
mylang = 'test'
family = 'wikipedia'
custom_path = os.path.expanduser('~/user-config.py')
if os.path.exists(custom_path):
with open(custom_path, 'rb') as f:
exec(compile(f.read(), custom_path, 'exec'), globals())
del f
# Clean up temp variables, since pwb issues a warning otherwise
# to help people catch misspelt config
del custom_path
# Things that should be non-easily-overridable
for fam in (
'wikipedia', 'commons', 'meta', 'wikiboots', 'wikimedia',
'wikiquote', 'wikisource', 'wikisource', 'wiktionary', 'wikiversity',
'wikidata', 'mediawiki'
):
usernames[fam]['*'] = os.environ['USER']
del fam
# If OAuth integration is available, take it
if 'CLIENT_ID' in os.environ:
authenticate['*'] = (
os.environ['CLIENT_ID'],
os.environ['CLIENT_SECRET'],
os.environ['ACCESS_KEY'],
os.environ['ACCESS_SECRET']
)
|
import os
mylang = 'test'
family = 'wikipedia'
custom_path = os.path.expanduser('~/user-config.py')
if os.path.exists(custom_path):
with open(custom_path, 'rb') as f:
exec(compile(f.read(), custom_path, 'exec'), globals())
del f
# Clean up temp variables, since pwb issues a warning otherwise
# to help people catch misspelt config
del custom_path
# Things that should be non-easily-overridable
for fam in (
'wikipedia', 'commons', 'meta', 'wikiboots', 'wikimedia',
'wikiquote', 'wikisource', 'wikisource', 'wiktionary', 'wikiversity',
'wikidata', 'mediawiki'
):
usernames[fam]['*'] = os.environ['USER']
if 'ACCESS_KEY' in os.environ:
# If OAuth integration is available, take it
authenticate[fam]['*'] = (
os.environ['CLIENT_ID'],
os.environ['CLIENT_SECRET'],
os.environ['ACCESS_KEY'],
os.environ['ACCESS_SECRET']
)
del fam
|
Update to use newer oauth style
|
Update to use newer oauth style
|
Python
|
mit
|
yuvipanda/paws,yuvipanda/paws
|
---
+++
@@ -20,15 +20,16 @@
'wikiquote', 'wikisource', 'wikisource', 'wiktionary', 'wikiversity',
'wikidata', 'mediawiki'
):
+
usernames[fam]['*'] = os.environ['USER']
+ if 'ACCESS_KEY' in os.environ:
+ # If OAuth integration is available, take it
+ authenticate[fam]['*'] = (
+ os.environ['CLIENT_ID'],
+ os.environ['CLIENT_SECRET'],
+ os.environ['ACCESS_KEY'],
+ os.environ['ACCESS_SECRET']
+ )
+
del fam
-
-# If OAuth integration is available, take it
-if 'CLIENT_ID' in os.environ:
- authenticate['*'] = (
- os.environ['CLIENT_ID'],
- os.environ['CLIENT_SECRET'],
- os.environ['ACCESS_KEY'],
- os.environ['ACCESS_SECRET']
- )
|
3d3862b0c7ea872e690999f46de88be287598758
|
lib/__init__.py
|
lib/__init__.py
|
import redis
import json
import time
class DHTStorage():
def __init__(self, key):
self.redis = redis.StrictRedis(host='localhost', port=6379, db=0)
self.key = key
def get_key(self, name):
return "%s:%s" % (self.key, name)
def send(self, name, data):
pushData = {'time': time.time(), 'value': data}
self.redis.lpush(self.get_key(name), json.dumps(pushData))
def add_humidity(self, value):
self.send('humidity', value)
def add_temperature(self, value):
self.send('temperature', value)
def get_data(self, name, start=0, end=200):
return self.redis.lrange(name, start, end)
def get_temperature(self, start=0, end=200):
return self.get_data(self.get_key('temperature'), start, end)
def get_humidity(self, start=0, end=200):
return self.get_data(self.get_key('humidity'), start, end)
|
import redis
import json
import time
class DHTStorage():
def __init__(self, key):
self.redis = redis.StrictRedis(host='localhost', port=6379, db=0)
self.key = key
def get_key(self, name):
return "%s:%s" % (self.key, name)
def send(self, name, data):
pushData = {'time': time.time(), 'value': data}
self.redis.lpush(self.get_key(name), json.dumps(pushData))
def add_humidity(self, value):
self.send('humidity', value)
self.remove_old('humidity')
def add_temperature(self, value):
self.send('temperature', value)
self.remove_old('temperature')
def get_data(self, name, start=0, end=200):
return self.redis.lrange(name, start, end)
def get_temperature(self, start=0, end=200):
return self.get_data(self.get_key('temperature'), start, end)
def get_humidity(self, start=0, end=200):
return self.get_data(self.get_key('humidity'), start, end)
def remove_old(self, name, len=100000):
self.redis.ltrim(self.get_key(name), 0, len)
|
Remove old entries (~2 months, keep 100000 entries)
|
Remove old entries (~2 months, keep 100000 entries)
|
Python
|
mit
|
Ajnasz/pippo,Ajnasz/pippo,Ajnasz/pippo
|
---
+++
@@ -16,9 +16,11 @@
def add_humidity(self, value):
self.send('humidity', value)
+ self.remove_old('humidity')
def add_temperature(self, value):
self.send('temperature', value)
+ self.remove_old('temperature')
def get_data(self, name, start=0, end=200):
return self.redis.lrange(name, start, end)
@@ -28,3 +30,6 @@
def get_humidity(self, start=0, end=200):
return self.get_data(self.get_key('humidity'), start, end)
+
+ def remove_old(self, name, len=100000):
+ self.redis.ltrim(self.get_key(name), 0, len)
|
e676f59b445157d1cc247ada74e0b7b1fc1afced
|
demos/burgers_sim.py
|
demos/burgers_sim.py
|
from phi.flow import *
domain = Domain([64, 64], boundaries=PERIODIC)
world.add(BurgersVelocity(domain, velocity=lambda s: math.randfreq(s) * 2), physics=Burgers())
show(App('Burgers Equation in %dD' % len(domain.resolution), framerate=5))
|
from phi.flow import *
domain = Domain([64, 64], boundaries=PERIODIC, box=box[0:100, 0:100])
world.add(BurgersVelocity(domain, velocity=Noise(channels=domain.rank) * 2), physics=Burgers())
show(App('Burgers Equation in %dD' % len(domain.resolution), framerate=5))
|
Use Noise in Burgers demo
|
Use Noise in Burgers demo
|
Python
|
mit
|
tum-pbs/PhiFlow,tum-pbs/PhiFlow
|
---
+++
@@ -1,6 +1,6 @@
from phi.flow import *
-domain = Domain([64, 64], boundaries=PERIODIC)
-world.add(BurgersVelocity(domain, velocity=lambda s: math.randfreq(s) * 2), physics=Burgers())
+domain = Domain([64, 64], boundaries=PERIODIC, box=box[0:100, 0:100])
+world.add(BurgersVelocity(domain, velocity=Noise(channels=domain.rank) * 2), physics=Burgers())
show(App('Burgers Equation in %dD' % len(domain.resolution), framerate=5))
|
d024177d3b060e6219074bf1500ebc6ae947ad1a
|
openassessment/fileupload/backends/__init__.py
|
openassessment/fileupload/backends/__init__.py
|
""" File Upload backends. """
from django.conf import settings
from . import django_storage, filesystem, s3, swift
def get_backend():
# .. setting_name: ORA2_FILEUPLOAD_BACKEND
# .. setting_default: s3
# .. setting_description: The backend used to upload the ora2 submissions attachments
# the supported values are: s3, filesystem, swift and django.
backend_setting = getattr(settings, "ORA2_FILEUPLOAD_BACKEND", "s3")
if backend_setting == "s3":
return s3.Backend()
elif backend_setting == "filesystem":
return filesystem.Backend()
elif backend_setting == "swift":
return swift.Backend()
elif backend_setting == "django":
return django_storage.Backend()
else:
raise ValueError("Invalid ORA2_FILEUPLOAD_BACKEND setting value: %s" % backend_setting)
|
""" File Upload backends. """
from django.conf import settings
from . import django_storage, filesystem, s3, swift
def get_backend():
# .. setting_name: ORA2_FILEUPLOAD_BACKEND
# .. setting_default: 's3'
# .. setting_description: The backend used to upload the ora2 submissions attachments.
# The supported values are: 's3', 'filesystem', 'swift' and 'django'.
backend_setting = getattr(settings, "ORA2_FILEUPLOAD_BACKEND", "s3")
if backend_setting == "s3":
return s3.Backend()
elif backend_setting == "filesystem":
return filesystem.Backend()
elif backend_setting == "swift":
return swift.Backend()
elif backend_setting == "django":
return django_storage.Backend()
else:
raise ValueError("Invalid ORA2_FILEUPLOAD_BACKEND setting value: %s" % backend_setting)
|
Fix annotation: The default value should be a string
|
Fix annotation: The default value should be a string
|
Python
|
agpl-3.0
|
edx/edx-ora2,edx/edx-ora2,EDUlib/edx-ora2,EDUlib/edx-ora2,edx/edx-ora2,EDUlib/edx-ora2,EDUlib/edx-ora2,edx/edx-ora2
|
---
+++
@@ -8,9 +8,9 @@
def get_backend():
# .. setting_name: ORA2_FILEUPLOAD_BACKEND
- # .. setting_default: s3
- # .. setting_description: The backend used to upload the ora2 submissions attachments
- # the supported values are: s3, filesystem, swift and django.
+ # .. setting_default: 's3'
+ # .. setting_description: The backend used to upload the ora2 submissions attachments.
+ # The supported values are: 's3', 'filesystem', 'swift' and 'django'.
backend_setting = getattr(settings, "ORA2_FILEUPLOAD_BACKEND", "s3")
if backend_setting == "s3":
return s3.Backend()
|
26b1d4f47c742f33c4ecdac68e88dbbc958e5756
|
tests/create_minimal_image_test.py
|
tests/create_minimal_image_test.py
|
from unittest import TestCase
import create_minimal_image
from create_minimal_image import main
POPEN_COMMAND_LIST = ""
class CreateMinimalImageTest(TestCase):
def setUp(self):
global POPEN_COMMAND_LIST
POPEN_COMMAND_LIST = ""
create_minimal_image._run_popen_command = stubbed_run_popen_command
def test_main_will_correctly_return_shared_objects_and_locations(self):
self.maxDiff = None
main("/usr/lib/jvm")
self.assertEquals(POPEN_COMMAND_LIST, get_expected_popen_comands())
def stubbed_run_popen_command(command):
global POPEN_COMMAND_LIST
POPEN_COMMAND_LIST += " ".join(command) + "\n"
try:
with open("tests/fixtures/{0}.txt".format("_".join(command).replace("/", "_")), "r") as f:
std_out = f.read()
return std_out
except:
return ""
def get_expected_popen_comands():
with open("tests/fixtures/expected_popen_commands.txt", "r") as f:
expected_popen_commands = f.read()
return expected_popen_commands
|
from unittest import TestCase
import create_minimal_image
from create_minimal_image import main
POPEN_COMMAND_LIST = []
class CreateMinimalImageTest(TestCase):
def setUp(self):
global POPEN_COMMAND_LIST
POPEN_COMMAND_LIST = []
create_minimal_image._run_popen_command = stubbed_run_popen_command
def test_main_will_correctly_return_shared_objects_and_locations(self):
self.maxDiff = None
main("/usr/lib/jvm")
self.assertEquals(POPEN_COMMAND_LIST, get_expected_popen_comands())
def stubbed_run_popen_command(command):
POPEN_COMMAND_LIST.append(" ".join(command))
try:
with open("tests/fixtures/{0}.txt".format("_".join(command).replace("/", "_")), "r") as f:
std_out = f.read()
return std_out
except:
return ""
def get_expected_popen_comands():
with open("tests/fixtures/expected_popen_commands.txt", "r") as f:
expected_popen_commands = f.read().split("\n")
return [command for command in expected_popen_commands if command != ""]
|
Revert "[TEST] refactor test to get it passing on Travis CI"
|
Revert "[TEST] refactor test to get it passing on Travis CI"
This reverts commit b92684d252e92a75115ce8617a15c107b5a34b09.
|
Python
|
mit
|
williamsbdev/minimal-docker-image-maker,williamsbdev/minimal-docker-image-maker
|
---
+++
@@ -3,14 +3,14 @@
import create_minimal_image
from create_minimal_image import main
-POPEN_COMMAND_LIST = ""
+POPEN_COMMAND_LIST = []
class CreateMinimalImageTest(TestCase):
def setUp(self):
global POPEN_COMMAND_LIST
- POPEN_COMMAND_LIST = ""
+ POPEN_COMMAND_LIST = []
create_minimal_image._run_popen_command = stubbed_run_popen_command
def test_main_will_correctly_return_shared_objects_and_locations(self):
@@ -20,8 +20,7 @@
def stubbed_run_popen_command(command):
- global POPEN_COMMAND_LIST
- POPEN_COMMAND_LIST += " ".join(command) + "\n"
+ POPEN_COMMAND_LIST.append(" ".join(command))
try:
with open("tests/fixtures/{0}.txt".format("_".join(command).replace("/", "_")), "r") as f:
std_out = f.read()
@@ -32,5 +31,5 @@
def get_expected_popen_comands():
with open("tests/fixtures/expected_popen_commands.txt", "r") as f:
- expected_popen_commands = f.read()
- return expected_popen_commands
+ expected_popen_commands = f.read().split("\n")
+ return [command for command in expected_popen_commands if command != ""]
|
0ddaed24e0f011ca1bb777af49936f64684a7d4c
|
bin/scripts/contig_length_filter.py
|
bin/scripts/contig_length_filter.py
|
#!/usr/bin/env python
import sys
from Bio import SeqIO
from Bio.SeqRecord import SeqRecord
if len(sys.argv) < 5:
print("Usage: %s <length threshold> <contigs_file> <suffix> <output>" % sys.argv[0])
sys.exit(1)
f_n = sys.argv[2]
suffix = sys.argv[3]
input_seq_iterator = SeqIO.parse(open(f_n, "r"), "fasta")
output_handle = open(sys.argv[4], "w")
SeqIO.write((SeqRecord(record.seq, (record.name + "_" + suffix).replace(".", "_"), "","") for record in input_seq_iterator \
if len(record.seq) > int(sys.argv[1])), output_handle, "fasta")
output_handle.close()
|
#!/usr/bin/env python
import sys
from Bio import SeqIO
if len(sys.argv) < 4:
print("Usage: %s <length threshold> <contigs_file> <output>" % sys.argv[0])
sys.exit(1)
f_n = sys.argv[2]
input_seq_iterator = SeqIO.parse(open(f_n, "r"), "fasta")
filtered_iterator = (record for record in input_seq_iterator \
if len(record.seq) > int(sys.argv[1]))
output_handle = open(sys.argv[3], "w")
SeqIO.write(filtered_iterator, output_handle, "fasta")
output_handle.close()
|
Revert "length filter script now adds provided suffix to contig names"
|
Revert "length filter script now adds provided suffix to contig names"
This reverts commit 4d3985f667465eb5564de4fada8820e23607a58b.
|
Python
|
mit
|
tanaes/snakemake_assemble,tanaes/snakemake_assemble,tanaes/snakemake_assemble
|
---
+++
@@ -1,17 +1,16 @@
#!/usr/bin/env python
import sys
from Bio import SeqIO
-from Bio.SeqRecord import SeqRecord
-if len(sys.argv) < 5:
- print("Usage: %s <length threshold> <contigs_file> <suffix> <output>" % sys.argv[0])
+if len(sys.argv) < 4:
+ print("Usage: %s <length threshold> <contigs_file> <output>" % sys.argv[0])
sys.exit(1)
f_n = sys.argv[2]
-suffix = sys.argv[3]
input_seq_iterator = SeqIO.parse(open(f_n, "r"), "fasta")
-
-output_handle = open(sys.argv[4], "w")
-SeqIO.write((SeqRecord(record.seq, (record.name + "_" + suffix).replace(".", "_"), "","") for record in input_seq_iterator \
- if len(record.seq) > int(sys.argv[1])), output_handle, "fasta")
+filtered_iterator = (record for record in input_seq_iterator \
+ if len(record.seq) > int(sys.argv[1]))
+
+output_handle = open(sys.argv[3], "w")
+SeqIO.write(filtered_iterator, output_handle, "fasta")
output_handle.close()
|
2dadeef44576ac5ecbb67b929c4190675c449c7f
|
devops/settings.py
|
devops/settings.py
|
DRIVER = 'devops.driver.libvirt.libvirt_driver'
DRIVER_PARAMETERS = {
'connection_string': 'qemu:///system',
}
INSTALLED_APPS = ['devops']
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'postgres',
'USER': 'postgres',
'PASSWORD': '',
'HOST': '',
'PORT': '',
'TEST_CHARSET': 'UTF8'
}
}
|
import os
DRIVER = 'devops.driver.libvirt.libvirt_driver'
DRIVER_PARAMETERS = {
'connection_string': os.environ.get('CONNECTION_STRING', 'qemu:///system'),
}
INSTALLED_APPS = ['devops']
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'postgres',
'USER': 'postgres',
'PASSWORD': '',
'HOST': '',
'PORT': '',
'TEST_CHARSET': 'UTF8'
}
}
|
Use environment variable for connection_string driver parameter
|
Use environment variable for connection_string driver parameter
|
Python
|
apache-2.0
|
stackforge/fuel-devops,stackforge/fuel-devops
|
---
+++
@@ -1,7 +1,9 @@
+import os
+
DRIVER = 'devops.driver.libvirt.libvirt_driver'
DRIVER_PARAMETERS = {
- 'connection_string': 'qemu:///system',
- }
+ 'connection_string': os.environ.get('CONNECTION_STRING', 'qemu:///system'),
+}
INSTALLED_APPS = ['devops']
|
a7c447db15a0b07043b8973ffa318aed858c5ded
|
targets/pythoncard/basesource.py
|
targets/pythoncard/basesource.py
|
"""The main form for the application"""
from PythonCard import model
# Allow importing of our custom controls
import PythonCard.resource
PythonCard.resource.APP_COMPONENTS_PACKAGE = "vb2py.targets.pythoncard.vbcontrols"
class Background(model.Background):
def __getattr__(self, name):
"""If a name was not found then look for it in components"""
return getattr(self.components, name)
def __init__(self, *args, **kw):
"""Initialize the form"""
model.Background.__init__(self, *args, **kw)
# Call the VB Form_Load
# TODO: This is brittle - depends on how the private indicator is set
if hasattr(self, "_Background__Form_Load"):
self._Background__Form_Load()
elif hasattr(self, "Form_Load"):
self.Form_Load()
# CODE_GOES_HERE
if __name__ == '__main__':
app = model.Application(Background)
app.MainLoop()
|
"""The main form for the application"""
from PythonCard import model
# Allow importing of our custom controls
import PythonCard.resource
PythonCard.resource.APP_COMPONENTS_PACKAGE = "vb2py.targets.pythoncard.vbcontrols"
class Background(model.Background):
def __getattr__(self, name):
"""If a name was not found then look for it in components"""
return getattr(self.components, name)
def __init__(self, *args, **kw):
"""Initialize the form"""
model.Background.__init__(self, *args, **kw)
# Call the VB Form_Load
# TODO: This is brittle - depends on how the private indicator is set
if hasattr(self, "_MAINFORM__Form_Load"):
self._MAINFORM__Form_Load()
elif hasattr(self, "Form_Load"):
self.Form_Load()
# CODE_GOES_HERE
if __name__ == '__main__':
app = model.Application(MAINFORM)
app.MainLoop()
|
Fix base form source to use correct class name
|
Fix base form source to use correct class name
|
Python
|
bsd-3-clause
|
mvz/vb2py,mvz/vb2py,mvz/vb2py
|
---
+++
@@ -18,8 +18,8 @@
model.Background.__init__(self, *args, **kw)
# Call the VB Form_Load
# TODO: This is brittle - depends on how the private indicator is set
- if hasattr(self, "_Background__Form_Load"):
- self._Background__Form_Load()
+ if hasattr(self, "_MAINFORM__Form_Load"):
+ self._MAINFORM__Form_Load()
elif hasattr(self, "Form_Load"):
self.Form_Load()
@@ -28,5 +28,5 @@
if __name__ == '__main__':
- app = model.Application(Background)
+ app = model.Application(MAINFORM)
app.MainLoop()
|
0cb41062401670a3e423b610d1f128657a9ce623
|
_tests/test_links.py
|
_tests/test_links.py
|
#!/usr/bin/env python
# -*- encoding: utf-8
import pytest
import requests
@pytest.mark.parametrize('path', [
# Check pagination is working correctly
'/page/2/', '/page/3/',
])
def test_pages_appear_correctly(path):
resp = requests.get(f'http://localhost:5757/{path}')
assert resp.status_code == 200
@pytest.mark.parametrize('path, text_in_page', [
('2017/', 'Posts from 2017'),
('2017/07/', 'Posts from July 2017'),
('', 'Older posts'),
('', '<title>alexwlchan</title>'),
])
def test_text_appears_in_pages(path, text_in_page):
resp = requests.get(f'http://localhost:5757/{path}')
assert resp.status_code == 200
assert text_in_page in resp.text
|
#!/usr/bin/env python
# -*- encoding: utf-8
import pytest
import requests
@pytest.mark.parametrize('path', [
# Check pagination is working correctly
'/page/2/', '/page/3/',
])
def test_pages_appear_correctly(path):
resp = requests.get(f'http://localhost:5757/{path}')
assert resp.status_code == 200
@pytest.mark.parametrize('path, text_in_page', [
('2017/', 'Posts from 2017'),
('2017/07/', 'Posts from July 2017'),
('', 'Older posts'),
('', '<title>alexwlchan</title>'),
('archive/', '<h3>2017</h3>'),
])
def test_text_appears_in_pages(path, text_in_page):
resp = requests.get(f'http://localhost:5757/{path}')
assert resp.status_code == 200
assert text_in_page in resp.text
@pytest.mark.parametrize('path, text', [
# Year markers only appear in the global archives, not year or month pages
('2017/', '<h3>2017</h3>'),
('2017/07/', '<h3>2017</h3>'),
])
def test_text_does_not_appear_in_pages(path, text):
resp = requests.get(f'http://localhost:5757/{path}')
assert resp.status_code == 200
assert text not in resp.text
|
Add a couple of tests for the formatting pieces
|
Add a couple of tests for the formatting pieces
|
Python
|
mit
|
alexwlchan/alexwlchan.net,alexwlchan/alexwlchan.net,alexwlchan/alexwlchan.net,alexwlchan/alexwlchan.net,alexwlchan/alexwlchan.net
|
---
+++
@@ -19,8 +19,20 @@
('2017/07/', 'Posts from July 2017'),
('', 'Older posts'),
('', '<title>alexwlchan</title>'),
+ ('archive/', '<h3>2017</h3>'),
])
def test_text_appears_in_pages(path, text_in_page):
resp = requests.get(f'http://localhost:5757/{path}')
assert resp.status_code == 200
assert text_in_page in resp.text
+
+
+@pytest.mark.parametrize('path, text', [
+ # Year markers only appear in the global archives, not year or month pages
+ ('2017/', '<h3>2017</h3>'),
+ ('2017/07/', '<h3>2017</h3>'),
+])
+def test_text_does_not_appear_in_pages(path, text):
+ resp = requests.get(f'http://localhost:5757/{path}')
+ assert resp.status_code == 200
+ assert text not in resp.text
|
8362216a009763d4bf70c55819a74cc98c8e9ffe
|
_pytest/test_server.py
|
_pytest/test_server.py
|
from slackclient._user import User
from slackclient._server import Server, SlackLoginError
from slackclient._channel import Channel
import json
import pytest
@pytest.fixture
def login_data():
login_data = open('_pytest/data/rtm.start.json','r').read()
login_data = json.loads(login_data)
return login_data
def test_Server(server):
assert type(server) == Server
def test_Server_parse_channel_data(server, login_data):
server.parse_channel_data(login_data["channels"])
assert type(server.channels.find('general')) == Channel
def test_Server_parse_user_data(server, login_data):
server.parse_user_data(login_data["users"])
assert type(server.users.find('fakeuser')) == User
def test_Server_cantconnect(server):
with pytest.raises(SlackLoginError):
reply = server.ping()
@pytest.mark.xfail
def test_Server_ping(server, monkeypatch):
#monkeypatch.setattr("", lambda: True)
monkeypatch.setattr("websocket.create_connection", lambda: True)
reply = server.ping()
|
from slackclient._user import User
from slackclient._server import Server, SlackLoginError
from slackclient._channel import Channel
import json
import pytest
@pytest.fixture
def login_data():
login_data = open('_pytest/data/rtm.start.json', 'r').read()
login_data = json.loads(login_data)
return login_data
def test_Server(server):
assert type(server) == Server
def test_Server_parse_channel_data(server, login_data):
server.parse_channel_data(login_data["channels"])
assert type(server.channels.find('general')) == Channel
def test_Server_parse_user_data(server, login_data):
server.parse_user_data(login_data["users"])
assert type(server.users.find('fakeuser')) == User
def test_Server_cantconnect(server):
with pytest.raises(SlackLoginError):
reply = server.ping()
@pytest.mark.xfail
def test_Server_ping(server, monkeypatch):
#monkeypatch.setattr("", lambda: True)
monkeypatch.setattr("websocket.create_connection", lambda: True)
reply = server.ping()
|
Fix PEP8 white spacing (space after comma)
|
Fix PEP8 white spacing (space after comma)
|
Python
|
mit
|
slackhq/python-slackclient,slackapi/python-slackclient,slackapi/python-slackclient,slackapi/python-slackclient
|
---
+++
@@ -7,7 +7,7 @@
@pytest.fixture
def login_data():
- login_data = open('_pytest/data/rtm.start.json','r').read()
+ login_data = open('_pytest/data/rtm.start.json', 'r').read()
login_data = json.loads(login_data)
return login_data
|
21af3dbed471c9f6c860db4d2ae84d1e0fed4077
|
demo/option_example.py
|
demo/option_example.py
|
from sparts.tasks.periodic import PeriodicTask
from sparts.vservice import VService
from sparts.sparts import option
import socket
class HostCheckTask(PeriodicTask):
INTERVAL=5
check_name = option(default=socket.getfqdn(), type=str,
help='Name to check [%(default)s]')
def execute(self, *args, **kwargs):
self.logger.info("LOOKUP %s => %s", self.check_name,
socket.gethostbyname(self.check_name))
class DNSChecker(VService):
TASKS=[HostCheckTask]
if __name__ == '__main__':
DNSChecker.initFromCLI()
|
from sparts.tasks.periodic import PeriodicTask
from sparts.vservice import VService
from sparts.sparts import option, samples, SampleType
import socket
class HostCheckTask(PeriodicTask):
INTERVAL=5
check_name = option(default=socket.getfqdn(), type=str,
help='Name to check [%(default)s]')
def execute(self, *args, **kwargs):
self.logger.info("LOOKUP %s => %s", self.check_name,
socket.gethostbyname(self.check_name))
class PrintCountersTask(PeriodicTask):
INTERVAL=6
execute_duration = samples(windows=[60],
types=[SampleType.MAX, SampleType.MIN])
def execute(self, *args, **kwargs):
hostcheck = self.service.requireTask(HostCheckTask)
self.logger.info("hostcheck.duration :: %s",
hostcheck.execute_duration.getCounters())
self.logger.info("this.duration :: %s",
self.execute_duration.getCounters())
class DNSChecker(VService):
TASKS=[HostCheckTask, PrintCountersTask]
if __name__ == '__main__':
DNSChecker.initFromCLI()
|
Update option example to highlight samples as well
|
Update option example to highlight samples as well
And overriding samples
|
Python
|
bsd-3-clause
|
facebook/sparts,fmoo/sparts,bboozzoo/sparts,djipko/sparts,pshuff/sparts,pshuff/sparts,fmoo/sparts,facebook/sparts,djipko/sparts,bboozzoo/sparts
|
---
+++
@@ -1,6 +1,6 @@
from sparts.tasks.periodic import PeriodicTask
from sparts.vservice import VService
-from sparts.sparts import option
+from sparts.sparts import option, samples, SampleType
import socket
class HostCheckTask(PeriodicTask):
@@ -11,9 +11,21 @@
self.logger.info("LOOKUP %s => %s", self.check_name,
socket.gethostbyname(self.check_name))
+class PrintCountersTask(PeriodicTask):
+ INTERVAL=6
+ execute_duration = samples(windows=[60],
+ types=[SampleType.MAX, SampleType.MIN])
+ def execute(self, *args, **kwargs):
+ hostcheck = self.service.requireTask(HostCheckTask)
+ self.logger.info("hostcheck.duration :: %s",
+ hostcheck.execute_duration.getCounters())
+ self.logger.info("this.duration :: %s",
+ self.execute_duration.getCounters())
+
+
class DNSChecker(VService):
- TASKS=[HostCheckTask]
+ TASKS=[HostCheckTask, PrintCountersTask]
if __name__ == '__main__':
|
1661174b80e00ff04a2df245abf73b92825ec01a
|
libs/qr_tools.py
|
libs/qr_tools.py
|
#!/usr/bin/python3
import pyqrcode # sudo pip install pyqrcode
def getQRArray(text, errorCorrection):
""" Takes in text and errorCorrection (letter), returns 2D array of the QR code"""
# White is True (1)
# Black is False (0)
# ECC: L7, M15, Q25, H30
# Create the object
qr = pyqrcode.create(text, error=errorCorrection)
# Get the terminal representation and split by lines (get rid of top and bottom white spaces)
plainOut = qr.terminal().split("\n")[5:-5]
print(qr.terminal())
# Initialize the output 2D list
out = []
for line in plainOut:
thisOut = []
for char in line:
if char == u'7':
# This is white
thisOut.append(1)
elif char == u'4':
# This is black, it's part of the u'49'
thisOut.append(0)
# Finally add everything to the output, stipping whitespaces at start and end
out.append(thisOut[4:-4])
# Everything is done, return the qr code list
return out
|
#!/usr/bin/python3
import pyqrcode # sudo pip install pyqrcode
def getQRArray(text, errorCorrection):
""" Takes in text and errorCorrection (letter), returns 2D array of the QR code"""
# White is True (1)
# Black is False (0)
# ECC: L7, M15, Q25, H30
# Create the object
qr = pyqrcode.create(text, error=errorCorrection)
# Get the terminal representation and split by lines (get rid of top and bottom white spaces)
plainOut = qr.terminal().split("\n")[5:-5]
# Initialize the output 2D list
out = []
for line in plainOut:
thisOut = []
for char in line:
if char == u'7':
# This is white
thisOut.append(1)
elif char == u'4':
# This is black, it's part of the u'49'
thisOut.append(0)
# Finally add everything to the output, stipping whitespaces at start and end
out.append(thisOut[4:-4])
# Everything is done, return the qr code list
return out
|
Remove print of terminal output for debugging
|
Remove print of terminal output for debugging
|
Python
|
mit
|
btcspry/3d-wallet-generator
|
---
+++
@@ -13,8 +13,6 @@
# Get the terminal representation and split by lines (get rid of top and bottom white spaces)
plainOut = qr.terminal().split("\n")[5:-5]
-
- print(qr.terminal())
# Initialize the output 2D list
out = []
|
7bf6d13389391c1f3cbea7873cf12f345546d789
|
tools/send-echo-to-worker.py
|
tools/send-echo-to-worker.py
|
#!/usr/bin/env python
# Copyright 2014 - Rackspace Hosting
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
from oslo.config import cfg
from solum.common import context
from solum.openstack.common import log as logging
from solum.worker import api
LOG = logging.getLogger(__name__)
if __name__ == '__main__':
conf_files = ['--config-file=/etc/solum/solum.conf']
cfg.CONF(conf_files, project='solum')
message = ' '.join(sys.argv[1:])
api.API(context=context.RequestContext()).echo(message)
|
#!/usr/bin/env python
# Copyright 2014 - Rackspace Hosting
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
from oslo_config import cfg
from solum.common import context
from solum.openstack.common import log as logging
from solum.worker import api
LOG = logging.getLogger(__name__)
if __name__ == '__main__':
conf_files = ['--config-file=/etc/solum/solum.conf']
cfg.CONF(conf_files, project='solum')
message = ' '.join(sys.argv[1:])
api.API(context=context.RequestContext()).echo(message)
|
Fix the accurate file to prevent import errors
|
Fix the accurate file to prevent import errors
Change-Id: I0e4d246518bbdd9745d61a06d6960b89cf9ed611
|
Python
|
apache-2.0
|
devdattakulkarni/test-solum,stackforge/solum,stackforge/solum,openstack/solum,openstack/solum,devdattakulkarni/test-solum
|
---
+++
@@ -15,7 +15,7 @@
import sys
-from oslo.config import cfg
+from oslo_config import cfg
from solum.common import context
from solum.openstack.common import log as logging
|
898bd274329bf0f279565091f6ceee9073640bf6
|
deactivate.py
|
deactivate.py
|
from activate import deactivate
"""
Script to deactivate LUFA for Arduino.
More info can be found in the activate.py script.
"""
if __name__ == '__main__':
deactivate()
|
from activate import deactivate
"""
Script to deactivate LUFA for Arduino.
More info can be found in the activate.py script.
"""
if __name__ == '__main__':
deactivate()
|
Add final newline to make pylint happy
|
Add final newline to make pylint happy
|
Python
|
mit
|
Palatis/Arduino-Lufa,Palatis/Arduino-Lufa,Palatis/Arduino-Lufa
| |
d4b3e03c91428ae4de2729991d2815153ec7ccb8
|
cronos/accounts/models.py
|
cronos/accounts/models.py
|
from django.contrib.auth.models import User
from django.db import models
class UserProfile(models.Model):
user = models.ForeignKey(User, unique = True)
dionysos_username = models.CharField(max_length = 15, unique = True)
dionysos_password = models.CharField(max_length = 30)
eclass_username = models.CharField(max_length = 30, null = True, blank = True)
eclass_password = models.CharField(max_length = 30, null = True, blank = True)
eclass_lessons = models.TextField(null = True, blank = True)
introduction_year = models.CharField(max_length = 5)
registration_number = models.CharField(max_length = 8)
school = models.CharField(max_length = 5)
semester = models.CharField(max_length = 2)
webmail_username = models.CharField(max_length = 30, null = True, blank = True)
webmail_password = models.CharField(max_length = 30, null = True, blank = True)
teacher_announcements = models.TextField(null = True, blank = True)
other_announcements = models.TextField(null = True, blank = True)
declaration = models.TextField(null = True, blank = True)
grades = models.TextField(null = True, blank = True)
def __str__(self):
return self.user.username
|
from django.contrib.auth.models import User
from django.db import models
class UserProfile(models.Model):
user = models.ForeignKey(User, unique = True)
dionysos_username = models.CharField(max_length = 15, unique = True)
dionysos_password = models.CharField(max_length = 30)
eclass_username = models.CharField(max_length = 30, null = True, blank = True)
eclass_password = models.CharField(max_length = 30, null = True, blank = True)
eclass_lessons = models.TextField(null = True, blank = True)
introduction_year = models.CharField(max_length = 5)
registration_number = models.CharField(max_length = 8)
school = models.CharField(max_length = 5)
semester = models.CharField(max_length = 2)
webmail_username = models.CharField(max_length = 30, null = True, blank = True)
webmail_password = models.CharField(max_length = 30, null = True, blank = True)
teacher_announcements = models.TextField(null = True, blank = True)
other_announcements = models.TextField(null = True, blank = True)
declaration = models.TextField(null = True, blank = True)
grades = models.TextField(null = True, blank = True)
def __unicode__(self):
return self.user.username
|
Switch student model to unicode
|
Switch student model to unicode
|
Python
|
agpl-3.0
|
LinuxTeam-teilar/cronos.teilar.gr,LinuxTeam-teilar/cronos.teilar.gr,LinuxTeam-teilar/cronos.teilar.gr
|
---
+++
@@ -19,5 +19,5 @@
declaration = models.TextField(null = True, blank = True)
grades = models.TextField(null = True, blank = True)
- def __str__(self):
+ def __unicode__(self):
return self.user.username
|
b0edec6bc9a4d77a1f0ea0f803ea892f35cc2f4f
|
text_field.py
|
text_field.py
|
# Created On: 2012/01/23
# Copyright 2011 Hardcoded Software (http://www.hardcoded.net)
#
# This software is licensed under the "BSD" License as described in the "LICENSE" file,
# which should be included with this package. The terms are also available at
# http://www.hardcoded.net/licenses/bsd_license
class TextField:
def __init__(self, model, view):
self.model = model
self.view = view
self.model.view = self
self.view.editingFinished.connect(self.editingFinished)
def editingFinished(self):
self.model.text = self.view.text()
# model --> view
def refresh(self):
self.view.setText(self.model.text)
|
# Created On: 2012/01/23
# Copyright 2011 Hardcoded Software (http://www.hardcoded.net)
#
# This software is licensed under the "BSD" License as described in the "LICENSE" file,
# which should be included with this package. The terms are also available at
# http://www.hardcoded.net/licenses/bsd_license
class TextField:
def __init__(self, model, view):
self.model = model
self.view = view
self.model.view = self
# Make TextField also work for QLabel, which doesn't allow editing
if hasattr(self.view, 'editingFinished'):
self.view.editingFinished.connect(self.editingFinished)
def editingFinished(self):
self.model.text = self.view.text()
# model --> view
def refresh(self):
self.view.setText(self.model.text)
|
Make TextField also work with a QLabel view, which doesn't allow editing.
|
Make TextField also work with a QLabel view, which doesn't allow editing.
|
Python
|
bsd-3-clause
|
hsoft/qtlib
|
---
+++
@@ -10,7 +10,9 @@
self.model = model
self.view = view
self.model.view = self
- self.view.editingFinished.connect(self.editingFinished)
+ # Make TextField also work for QLabel, which doesn't allow editing
+ if hasattr(self.view, 'editingFinished'):
+ self.view.editingFinished.connect(self.editingFinished)
def editingFinished(self):
self.model.text = self.view.text()
|
b801df9acdc13460ecc5d36bcb6bd300f5de16c3
|
flatten-array/flatten_array.py
|
flatten-array/flatten_array.py
|
def flatten(lst):
"""Completely flatten an arbitrarily-deep list"""
return [*_flatten(lst)]
def _flatten(lst):
"""Generator for flattening arbitrarily-deep lists"""
if isinstance(lst, (list, tuple)):
for item in lst:
if item is None:
continue
else:
yield from _flatten(item)
else:
yield lst
|
def flatten(lst):
"""Completely flatten an arbitrarily-deep list"""
return [*_flatten(lst)]
def _flatten(lst):
"""Generator for flattening arbitrarily-deep lists"""
for item in lst:
if isinstance(item, (list, tuple)):
yield from _flatten(item)
elif item is not None:
yield item
|
Tidy and simplify generator code
|
Tidy and simplify generator code
|
Python
|
agpl-3.0
|
CubicComet/exercism-python-solutions
|
---
+++
@@ -5,11 +5,8 @@
def _flatten(lst):
"""Generator for flattening arbitrarily-deep lists"""
- if isinstance(lst, (list, tuple)):
- for item in lst:
- if item is None:
- continue
- else:
- yield from _flatten(item)
- else:
- yield lst
+ for item in lst:
+ if isinstance(item, (list, tuple)):
+ yield from _flatten(item)
+ elif item is not None:
+ yield item
|
58cbb8b3dbe8d1275743c3fd5d043cfa12914cb3
|
data_structures/bitorrent/client.py
|
data_structures/bitorrent/client.py
|
from urlparse import urlparse
from torrent import Torrent
from trackers.udp import UDPTracker
class Client(object):
__TORRENTS = {}
@property
def torrents(self):
return self.__TORRENTS
@torrents.setter
def torrents(self, new_torrent):
self.__TORRENTS[new_torrent] = Torrent(new_torrent)
def download(self, torrent):
if not torrent in self.__TORRENTS:
raise ValueError('%s not here' % torrent)
torrent = self.__TORRENTS[torrent]
for url in torrent.urls:
parsed = urlparse(url)
if parsed.scheme == 'udp':
_, url, port = url.split(":")
tracker = UDPTracker(url[2:], int(port), torrent)
print tracker.peers
|
import urllib
from random import randint
from urlparse import urlparse
from torrent import Torrent
from trackers.udp import UDPTracker
class Client(object):
__TORRENTS = {}
def __init__(self):
self.peer_id = urllib.quote("-AZ2470-" + "".join([str(randint(0, 9)) for i in xrange(12)]))
@property
def torrents(self):
return self.__TORRENTS
@torrents.setter
def torrents(self, new_torrent):
self.__TORRENTS[new_torrent] = Torrent(new_torrent)
def _get_peers(self, torrent):
peers = {}
for url in torrent.urls:
parsed = urlparse(url)
if parsed.scheme == 'udp':
_, url, port = url.split(":")
tracker = UDPTracker(url[2:], int(port), torrent, self.peer_id)
peers.update({ip: port for ip, port in tracker.peers})
return peers
def download(self, torrent):
if not torrent in self.__TORRENTS:
raise ValueError('%s not here' % torrent)
torrent = self.__TORRENTS[torrent]
peers = self._get_peers(torrent)
print peers
|
Use a separate method to get all peers of a torrent
|
Use a separate method to get all peers of a torrent
|
Python
|
apache-2.0
|
vtemian/university_projects,vtemian/university_projects,vtemian/university_projects
|
---
+++
@@ -1,3 +1,5 @@
+import urllib
+from random import randint
from urlparse import urlparse
from torrent import Torrent
@@ -7,6 +9,9 @@
class Client(object):
__TORRENTS = {}
+ def __init__(self):
+ self.peer_id = urllib.quote("-AZ2470-" + "".join([str(randint(0, 9)) for i in xrange(12)]))
+
@property
def torrents(self):
return self.__TORRENTS
@@ -15,14 +20,23 @@
def torrents(self, new_torrent):
self.__TORRENTS[new_torrent] = Torrent(new_torrent)
+ def _get_peers(self, torrent):
+ peers = {}
+
+ for url in torrent.urls:
+ parsed = urlparse(url)
+ if parsed.scheme == 'udp':
+ _, url, port = url.split(":")
+ tracker = UDPTracker(url[2:], int(port), torrent, self.peer_id)
+
+ peers.update({ip: port for ip, port in tracker.peers})
+
+ return peers
+
def download(self, torrent):
if not torrent in self.__TORRENTS:
raise ValueError('%s not here' % torrent)
torrent = self.__TORRENTS[torrent]
- for url in torrent.urls:
- parsed = urlparse(url)
- if parsed.scheme == 'udp':
- _, url, port = url.split(":")
- tracker = UDPTracker(url[2:], int(port), torrent)
- print tracker.peers
+ peers = self._get_peers(torrent)
+ print peers
|
848723c943cfb8995c6f2a68ea19b203c75e4aaa
|
tests/test_scan.py
|
tests/test_scan.py
|
#!/usr/bin/env python
# coding=utf-8
try:
import unittest.mock as mock
except ImportError:
import mock
import unittest
try:
from StringIO import StringIO
except ImportError:
from io import StringIO
from nessusapi.scan import Scan
from nessusapi.session import Session
class SessionTestCase(unittest.TestCase):
def test_init(self):
fake_session = mock.MagicMock(Session)
fake_session.request.return_value = {'uuid': 'e3b4f63f-de03-ec8b'}
scan = Scan('192.0.2.9', 'TestScan', '5', fake_session)
self.assertEqual(scan.uuid, 'e3b4f63f-de03-ec8b')
fake_session.request.assert_called_with('scan/new',
target='192.0.2.9',
scan_name='TestScan',
policy_id='5')
if __name__ == '__main__':
unittest.main()
|
# coding=utf-8
try:
import unittest.mock as mock
except ImportError:
import mock
import unittest
import nessusapi.scan
class TestScan(unittest.TestCase):
def test_init(self):
fake_nessus = mock.Mock(request_single=
mock.Mock(return_value='e3b4f63f-de03-ec8b'))
scan = nessusapi.scan.Scan(fake_nessus,'192.0.2.9', 'TestScan', 5)
self.assertEqual(scan.uuid, 'e3b4f63f-de03-ec8b')
fake_nessus.request_single.assert_called_with('scan/new',
'scan', 'uuid',
target='192.0.2.9',
scan_name='TestScan',
policy_id=5)
if __name__ == '__main__':
unittest.main()
|
Update test scan to work for new model
|
Update test scan to work for new model
|
Python
|
mit
|
sait-berkeley-infosec/pynessus-api
|
---
+++
@@ -1,29 +1,25 @@
-#!/usr/bin/env python
# coding=utf-8
try:
import unittest.mock as mock
except ImportError:
import mock
+
import unittest
-try:
- from StringIO import StringIO
-except ImportError:
- from io import StringIO
+import nessusapi.scan
-from nessusapi.scan import Scan
-from nessusapi.session import Session
+class TestScan(unittest.TestCase):
+ def test_init(self):
+ fake_nessus = mock.Mock(request_single=
+ mock.Mock(return_value='e3b4f63f-de03-ec8b'))
-class SessionTestCase(unittest.TestCase):
- def test_init(self):
- fake_session = mock.MagicMock(Session)
- fake_session.request.return_value = {'uuid': 'e3b4f63f-de03-ec8b'}
- scan = Scan('192.0.2.9', 'TestScan', '5', fake_session)
+ scan = nessusapi.scan.Scan(fake_nessus,'192.0.2.9', 'TestScan', 5)
self.assertEqual(scan.uuid, 'e3b4f63f-de03-ec8b')
- fake_session.request.assert_called_with('scan/new',
+ fake_nessus.request_single.assert_called_with('scan/new',
+ 'scan', 'uuid',
target='192.0.2.9',
scan_name='TestScan',
- policy_id='5')
+ policy_id=5)
if __name__ == '__main__':
unittest.main()
|
1030381f6a22d38fa48222f44858a8396970494e
|
nucleus/urls.py
|
nucleus/urls.py
|
from django.conf import settings
from django.conf.urls import include, url
from django.contrib import admin
from django.http import HttpResponse
from watchman import views as watchman_views
admin.autodiscover() # Discover admin.py files for the admin interface.
urlpatterns = [
url(r'', include('nucleus.base.urls')),
url(r'^admin/', include(admin.site.urls)),
url(r'^api-token-auth/',
'rest_framework.authtoken.views.obtain_auth_token'),
url(r'^rna/', include('rna.urls')),
url(r'^robots\.txt$', lambda r: HttpResponse(
"User-agent: *\n%s: /" % ('Allow' if settings.ENGAGE_ROBOTS else 'Disallow'),
content_type="text/plain")),
url(r'^healthz/$', watchman_views.ping, name="watchman.ping"),
url(r'^readiness/$', watchman_views.status, name="watchman.status"),
]
if settings.OIDC_ENABLE:
urlpatterns.append(url(r'^oidc/', include('mozilla_django_oidc.urls')))
|
from django.conf import settings
from django.conf.urls import include, url
from django.contrib import admin
from django.http import HttpResponse
from watchman import views as watchman_views
admin.autodiscover() # Discover admin.py files for the admin interface.
admin.site.site_header = 'Release Notes Administration'
admin.site.site_title = 'Release Notes Administration'
urlpatterns = [
url(r'', include('nucleus.base.urls')),
url(r'^admin/', include(admin.site.urls)),
url(r'^api-token-auth/',
'rest_framework.authtoken.views.obtain_auth_token'),
url(r'^rna/', include('rna.urls')),
url(r'^robots\.txt$', lambda r: HttpResponse(
"User-agent: *\n%s: /" % ('Allow' if settings.ENGAGE_ROBOTS else 'Disallow'),
content_type="text/plain")),
url(r'^healthz/$', watchman_views.ping, name="watchman.ping"),
url(r'^readiness/$', watchman_views.status, name="watchman.status"),
]
if settings.OIDC_ENABLE:
urlpatterns.append(url(r'^oidc/', include('mozilla_django_oidc.urls')))
|
Customize admin site title and header
|
Customize admin site title and header
|
Python
|
mpl-2.0
|
mozilla/nucleus,mozilla/nucleus,mozilla/nucleus,mozilla/nucleus
|
---
+++
@@ -7,6 +7,8 @@
admin.autodiscover() # Discover admin.py files for the admin interface.
+admin.site.site_header = 'Release Notes Administration'
+admin.site.site_title = 'Release Notes Administration'
urlpatterns = [
url(r'', include('nucleus.base.urls')),
|
d017ca19b6d810387424e388656d5ff63244a1f7
|
tests/engine/file_loader_test.py
|
tests/engine/file_loader_test.py
|
import unittest
from engine import file_loader
class FileLoaderTest(unittest.TestCase):
def test_load_units(self):
dicts = file_loader.read_and_parse_json('units')
self.assertIsInstance(dicts, list)
self.assertGreater(len(dicts), 0)
for dict_ in dicts:
self.assertIsInstance(dict_, dict)
if __name__ == '__main__':
unittest.main()
|
import unittest
from engine import file_loader
class FileLoaderTest(unittest.TestCase):
def test_load_units(self):
dicts = file_loader.read_and_parse_json('units')
self.assertIsInstance(dicts, list)
self.assertGreater(len(dicts), 0)
for dict_ in dicts:
self.assertIsInstance(dict_, dict)
def testLoadStruct(self):
unit_map = file_loader.load_struct('units')
for unit_name, unit_args in unit_map.items():
self.assertIsInstance(unit_name, str)
self.assertIsInstance(unit_args, dict)
def testLoadEnum(self):
unit_map = file_loader.load_enum('attack_types')
self.assertIsInstance(unit_map, dict)
for unit_name, unit_enum in unit_map.items():
self.assertIsInstance(unit_name, str)
self.assertIsInstance(unit_enum, int)
if __name__ == '__main__':
unittest.main()
|
Include tests for file loading helpers
|
Include tests for file loading helpers
|
Python
|
mit
|
Tactique/game_engine,Tactique/game_engine
|
---
+++
@@ -11,5 +11,18 @@
for dict_ in dicts:
self.assertIsInstance(dict_, dict)
+ def testLoadStruct(self):
+ unit_map = file_loader.load_struct('units')
+ for unit_name, unit_args in unit_map.items():
+ self.assertIsInstance(unit_name, str)
+ self.assertIsInstance(unit_args, dict)
+
+ def testLoadEnum(self):
+ unit_map = file_loader.load_enum('attack_types')
+ self.assertIsInstance(unit_map, dict)
+ for unit_name, unit_enum in unit_map.items():
+ self.assertIsInstance(unit_name, str)
+ self.assertIsInstance(unit_enum, int)
+
if __name__ == '__main__':
unittest.main()
|
4f6400e9ecf9bbc1cee62567673c619f9a975f95
|
lib/python/opendiamond/bundle.py
|
lib/python/opendiamond/bundle.py
|
#
# The OpenDiamond Platform for Interactive Search
# Version 5
#
# Copyright (c) 2011 Carnegie Mellon University
# All rights reserved.
#
# This software is distributed under the terms of the Eclipse Public
# License, Version 1.0 which can be found in the file named LICENSE.
# ANY USE, REPRODUCTION OR DISTRIBUTION OF THIS SOFTWARE CONSTITUTES
# RECIPIENT'S ACCEPTANCE OF THIS AGREEMENT
#
import os
import subprocess
import zipfile
def make_zipfile(path, manifest, files):
'''manifest is a string, files is a dict of filename => path pairs'''
if os.path.exists(path):
raise Exception("Refusing to clobber destination file")
zip = zipfile.ZipFile(path, mode = 'w', compression = zipfile.ZIP_DEFLATED)
zip.writestr('opendiamond-manifest.txt', manifest)
for name, path in files.items():
zip.write(path, name)
zip.close()
def bundle_python(out, filter, blob = None):
try:
proc = subprocess.Popen(['python', os.path.realpath(filter),
'--get-manifest'], stdout = subprocess.PIPE)
except OSError:
raise Exception("Couldn't execute filter program")
manifest = proc.communicate()[0]
if proc.returncode != 0:
raise Exception("Couldn't generate filter manifest")
files = {'filter': filter}
if blob is not None:
files['blob'] = blob
make_zipfile(out, manifest, files)
|
#
# The OpenDiamond Platform for Interactive Search
# Version 5
#
# Copyright (c) 2011 Carnegie Mellon University
# All rights reserved.
#
# This software is distributed under the terms of the Eclipse Public
# License, Version 1.0 which can be found in the file named LICENSE.
# ANY USE, REPRODUCTION OR DISTRIBUTION OF THIS SOFTWARE CONSTITUTES
# RECIPIENT'S ACCEPTANCE OF THIS AGREEMENT
#
import os
import subprocess
import zipfile
def make_zipfile(path, manifest, files):
'''manifest is a string, files is a dict of filename => path pairs'''
zip = zipfile.ZipFile(path, mode = 'w', compression = zipfile.ZIP_DEFLATED)
zip.writestr('opendiamond-manifest.txt', manifest)
for name, path in files.items():
zip.write(path, name)
zip.close()
def bundle_python(out, filter, blob = None):
try:
proc = subprocess.Popen(['python', os.path.realpath(filter),
'--get-manifest'], stdout = subprocess.PIPE)
except OSError:
raise Exception("Couldn't execute filter program")
manifest = proc.communicate()[0]
if proc.returncode != 0:
raise Exception("Couldn't generate filter manifest")
files = {'filter': filter}
if blob is not None:
files['blob'] = blob
make_zipfile(out, manifest, files)
|
Allow make_zipfile() to clobber the destination file
|
Allow make_zipfile() to clobber the destination file
|
Python
|
epl-1.0
|
cmusatyalab/opendiamond,cmusatyalab/opendiamond,cmusatyalab/opendiamond,cmusatyalab/opendiamond,cmusatyalab/opendiamond
|
---
+++
@@ -17,8 +17,6 @@
def make_zipfile(path, manifest, files):
'''manifest is a string, files is a dict of filename => path pairs'''
- if os.path.exists(path):
- raise Exception("Refusing to clobber destination file")
zip = zipfile.ZipFile(path, mode = 'w', compression = zipfile.ZIP_DEFLATED)
zip.writestr('opendiamond-manifest.txt', manifest)
for name, path in files.items():
|
5bb6cc3ffb92736515df94b62d7d1981eadd7c44
|
tilequeue/postgresql.py
|
tilequeue/postgresql.py
|
from itertools import cycle
from psycopg2.extras import register_hstore, register_json
import psycopg2
import threading
import ujson
class DBAffinityConnectionsNoLimit(object):
# Similar to the db affinity pool, but without keeping track of
# the connections. It's the caller's responsibility to call us
# back with the connection objects so that we can close them.
def __init__(self, dbnames, conn_info):
self.dbnames = dbnames
self.conn_info = conn_info
self.conn_mapping = {}
self.lock = threading.Lock()
self.dbname_index = 0
def _make_conn(self, conn_info):
conn = psycopg2.connect(**conn_info)
conn.set_session(readonly=True, autocommit=True)
register_hstore(conn)
register_json(conn, loads=ujson.loads)
return conn
def get_conns(self, n_conn):
with self.lock:
dbname = self.dbnames[self.dbname_index]
self.dbname_index += 1
if self.dbname_index >= len(self.dbnames):
self.dbname_index = 0
conn_info_with_db = dict(self.conn_info, dbname=dbname)
conns = [self._make_conn(conn_info_with_db)
for i in range(n_conn)]
return conns
def put_conns(self, conns):
for conn in conns:
try:
conn.close()
except:
pass
def closeall(self):
raise Exception('DBAffinityConnectionsNoLimit pool does not track '
'connections')
|
from itertools import cycle
from psycopg2.extras import register_hstore, register_json
import psycopg2
import threading
import ujson
class DBAffinityConnectionsNoLimit(object):
# Similar to the db affinity pool, but without keeping track of
# the connections. It's the caller's responsibility to call us
# back with the connection objects so that we can close them.
def __init__(self, dbnames, conn_info):
self.dbnames = cycle(dbnames)
self.conn_info = conn_info
self.conn_mapping = {}
self.lock = threading.Lock()
def _make_conn(self, conn_info):
conn = psycopg2.connect(**conn_info)
conn.set_session(readonly=True, autocommit=True)
register_hstore(conn)
register_json(conn, loads=ujson.loads)
return conn
def get_conns(self, n_conn):
with self.lock:
dbname = self.dbnames.next()
conn_info_with_db = dict(self.conn_info, dbname=dbname)
conns = [self._make_conn(conn_info_with_db)
for i in range(n_conn)]
return conns
def put_conns(self, conns):
for conn in conns:
try:
conn.close()
except:
pass
def closeall(self):
raise Exception('DBAffinityConnectionsNoLimit pool does not track '
'connections')
|
Use cycle instead of counting an index ourselves
|
Use cycle instead of counting an index ourselves
|
Python
|
mit
|
tilezen/tilequeue,mapzen/tilequeue
|
---
+++
@@ -12,11 +12,10 @@
# back with the connection objects so that we can close them.
def __init__(self, dbnames, conn_info):
- self.dbnames = dbnames
+ self.dbnames = cycle(dbnames)
self.conn_info = conn_info
self.conn_mapping = {}
self.lock = threading.Lock()
- self.dbname_index = 0
def _make_conn(self, conn_info):
conn = psycopg2.connect(**conn_info)
@@ -27,10 +26,7 @@
def get_conns(self, n_conn):
with self.lock:
- dbname = self.dbnames[self.dbname_index]
- self.dbname_index += 1
- if self.dbname_index >= len(self.dbnames):
- self.dbname_index = 0
+ dbname = self.dbnames.next()
conn_info_with_db = dict(self.conn_info, dbname=dbname)
conns = [self._make_conn(conn_info_with_db)
for i in range(n_conn)]
|
b3ed7ade10d18e8ecd9bfc64d056bdbb2a1501f6
|
tests/periph_rtt/tests/01-run.py
|
tests/periph_rtt/tests/01-run.py
|
#!/usr/bin/env python3
# Copyright (C) 2019 Inria
#
# This file is subject to the terms and conditions of the GNU Lesser
# General Public License v2.1. See the file LICENSE in the top level
# directory for more details.
import sys
import time
from testrunner import run
PRECISION = 0.05 # 5%
MAX_HELLOS = 5
def testfunc(child):
child.expect(r'This test will display \'Hello\' every (\d+) seconds')
period = int(child.match[1])
child.expect_exact('Initializing the RTT driver')
child.expect(r'RTT now: \d+')
child.expect(r'Setting initial alarm to now \+ {} s \(\d+\)'
.format(period))
child.expect_exact('Done setting up the RTT, wait for many Hellos')
start = time.time()
for _ in range(MAX_HELLOS):
child.expect_exact('Hello\r\n', timeout=period + 1)
# Verify timings
elapsed = time.time() - start
assert elapsed > (MAX_HELLOS * period * (1 - PRECISION))
assert elapsed < (MAX_HELLOS * period * (1 + PRECISION))
if __name__ == "__main__":
sys.exit(run(testfunc))
|
#!/usr/bin/env python3
# Copyright (C) 2019 Inria
#
# This file is subject to the terms and conditions of the GNU Lesser
# General Public License v2.1. See the file LICENSE in the top level
# directory for more details.
import sys
import time
from testrunner import run
PRECISION = 0.05 # 5%
MAX_HELLOS = 5
def testfunc(child):
child.expect(r'This test will display \'Hello\' every (\d+) seconds')
period = int(child.match.group(1))
child.expect_exact('Initializing the RTT driver')
child.expect(r'RTT now: \d+')
child.expect(r'Setting initial alarm to now \+ {} s \(\d+\)'
.format(period))
child.expect_exact('Done setting up the RTT, wait for many Hellos')
start = time.time()
for _ in range(MAX_HELLOS):
child.expect_exact('Hello\r\n', timeout=period + 1)
# Verify timings
elapsed = time.time() - start
assert elapsed > (MAX_HELLOS * period * (1 - PRECISION))
assert elapsed < (MAX_HELLOS * period * (1 + PRECISION))
if __name__ == "__main__":
sys.exit(run(testfunc))
|
Correct the test script syntax
|
tests/periph_rtt: Correct the test script syntax
Previously the test was failing due to apparently incorrect Python syntax in
the testrunner script. This fix corrects this and the test now passes.
|
Python
|
lgpl-2.1
|
OlegHahm/RIOT,RIOT-OS/RIOT,OlegHahm/RIOT,jasonatran/RIOT,mfrey/RIOT,x3ro/RIOT,aeneby/RIOT,rfuentess/RIOT,josephnoir/RIOT,toonst/RIOT,ant9000/RIOT,kYc0o/RIOT,jasonatran/RIOT,cladmi/RIOT,josephnoir/RIOT,cladmi/RIOT,mtausig/RIOT,toonst/RIOT,OTAkeys/RIOT,toonst/RIOT,yogo1212/RIOT,smlng/RIOT,miri64/RIOT,mfrey/RIOT,rfuentess/RIOT,cladmi/RIOT,basilfx/RIOT,OlegHahm/RIOT,yogo1212/RIOT,kYc0o/RIOT,OTAkeys/RIOT,basilfx/RIOT,jasonatran/RIOT,miri64/RIOT,kaspar030/RIOT,mfrey/RIOT,mtausig/RIOT,mtausig/RIOT,smlng/RIOT,OlegHahm/RIOT,kYc0o/RIOT,aeneby/RIOT,mfrey/RIOT,x3ro/RIOT,kaspar030/RIOT,yogo1212/RIOT,ant9000/RIOT,authmillenon/RIOT,rfuentess/RIOT,authmillenon/RIOT,authmillenon/RIOT,yogo1212/RIOT,smlng/RIOT,aeneby/RIOT,cladmi/RIOT,RIOT-OS/RIOT,mfrey/RIOT,kYc0o/RIOT,miri64/RIOT,aeneby/RIOT,kYc0o/RIOT,toonst/RIOT,jasonatran/RIOT,miri64/RIOT,ant9000/RIOT,basilfx/RIOT,mtausig/RIOT,rfuentess/RIOT,josephnoir/RIOT,josephnoir/RIOT,kaspar030/RIOT,authmillenon/RIOT,ant9000/RIOT,authmillenon/RIOT,basilfx/RIOT,x3ro/RIOT,x3ro/RIOT,OlegHahm/RIOT,miri64/RIOT,josephnoir/RIOT,basilfx/RIOT,authmillenon/RIOT,OTAkeys/RIOT,RIOT-OS/RIOT,aeneby/RIOT,ant9000/RIOT,RIOT-OS/RIOT,kaspar030/RIOT,OTAkeys/RIOT,jasonatran/RIOT,smlng/RIOT,RIOT-OS/RIOT,cladmi/RIOT,x3ro/RIOT,rfuentess/RIOT,OTAkeys/RIOT,smlng/RIOT,toonst/RIOT,kaspar030/RIOT,mtausig/RIOT,yogo1212/RIOT,yogo1212/RIOT
|
---
+++
@@ -17,7 +17,7 @@
def testfunc(child):
child.expect(r'This test will display \'Hello\' every (\d+) seconds')
- period = int(child.match[1])
+ period = int(child.match.group(1))
child.expect_exact('Initializing the RTT driver')
child.expect(r'RTT now: \d+')
child.expect(r'Setting initial alarm to now \+ {} s \(\d+\)'
|
648189583d78efef9ec8f65e861e1321c397c1a6
|
app/views/main_view.py
|
app/views/main_view.py
|
from flask import render_template
from flask_classy import FlaskView
from ..models import PostModel
class Main(FlaskView):
""" Main page view. """
route_base = "/"
def index(self):
posts = PostModel.fetch()
return render_template("index.html", posts=posts)
|
from flask import render_template
from flask_classy import FlaskView
from ..models import PostModel
class Main(FlaskView):
""" Main page view. """
route_base = "/"
def index(self):
PostModel.set_query()
PostModel.query.order = ['-updated', 'title']
posts = PostModel.fetch()
return render_template("index.html", posts=posts)
|
Set index main view to return post ordered by updated and title field
|
Set index main view to return post ordered by updated and title field
|
Python
|
mit
|
oldani/nanodegree-blog,oldani/nanodegree-blog,oldani/nanodegree-blog
|
---
+++
@@ -9,5 +9,7 @@
route_base = "/"
def index(self):
+ PostModel.set_query()
+ PostModel.query.order = ['-updated', 'title']
posts = PostModel.fetch()
return render_template("index.html", posts=posts)
|
e0e53c594569c24eee4e4b58f567f6d9099806df
|
tests/test_replay.py
|
tests/test_replay.py
|
import pytest
# Successful retrieval
# Accurate retrieval
# Comprehensive retrieval of sub-resources
@pytest.mark.skip(reason='not implemented')
def test_retrieveWARCRecord_fromIPFSHash():
pass
@pytest.mark.skip(reason='not implemented')
def test_retrieveWARCRecord_fromCDXJFile():
pass
@pytest.mark.skip(reason='not implemented')
def test_accuracy_retrievedContent_vsWARC():
pass
@pytest.mark.skip(reason='not implemented')
def test_availability_subResources():
pass
@pytest.mark.skip(reason='not implemented')
def test_inclusionInWebpage_selectResources():
pass
@pytest.mark.skip(reason='not implemented')
def test_exclusionInWebpage_selectIrrelevantResources():
pass
# TODO: Have unit tests for each function in replay.py
|
import pytest
# Successful retrieval
# Accurate retrieval
# Comprehensive retrieval of sub-resources
@pytest.mark.skip(reason='not implemented')
def test_retrieveWARCRecord_fromIPFSHash():
pass
@pytest.mark.skip(reason='not implemented')
def test_retrieveWARCRecord_fromLocalCDXJFile():
pass
@pytest.mark.skip(reason='not implemented')
def test_retrieveWARCRecord_fromRemoteCDXJFile_ByIPFSHash():
pass
@pytest.mark.skip(reason='not implemented')
def test_retrieveWARCRecord_fromRemoteCDXJFile_ByHTTP():
pass
@pytest.mark.skip(reason='not implemented')
def test_retrieveWARCRecord_fromRemoteCDXJFile_ByHTTPS():
pass
@pytest.mark.skip(reason='not implemented')
def test_retrieveWARCRecord_fromRemoteCDXJFile_ByFTP():
pass
@pytest.mark.skip(reason='not implemented')
def test_retrieveWARCRecord_fromRemoteCDXJFile_ByBitTorrentMagnetLink():
pass
@pytest.mark.skip(reason='not implemented')
def test_retrieveWARCRecord_fromRemoteCDXJFile_BySMB():
pass
@pytest.mark.skip(reason='not implemented')
def test_accuracy_retrievedContent_vsWARC():
pass
@pytest.mark.skip(reason='not implemented')
def test_availability_subResources():
pass
@pytest.mark.skip(reason='not implemented')
def test_inclusionInWebpage_selectResources():
pass
@pytest.mark.skip(reason='not implemented')
def test_exclusionInWebpage_selectIrrelevantResources():
pass
# TODO: Have unit tests for each function in replay.py
|
Add more tests to be written
|
Add more tests to be written
|
Python
|
mit
|
oduwsdl/ipwb,oduwsdl/ipwb,oduwsdl/ipwb,oduwsdl/ipwb
|
---
+++
@@ -11,7 +11,37 @@
@pytest.mark.skip(reason='not implemented')
-def test_retrieveWARCRecord_fromCDXJFile():
+def test_retrieveWARCRecord_fromLocalCDXJFile():
+ pass
+
+
+@pytest.mark.skip(reason='not implemented')
+def test_retrieveWARCRecord_fromRemoteCDXJFile_ByIPFSHash():
+ pass
+
+
+@pytest.mark.skip(reason='not implemented')
+def test_retrieveWARCRecord_fromRemoteCDXJFile_ByHTTP():
+ pass
+
+
+@pytest.mark.skip(reason='not implemented')
+def test_retrieveWARCRecord_fromRemoteCDXJFile_ByHTTPS():
+ pass
+
+
+@pytest.mark.skip(reason='not implemented')
+def test_retrieveWARCRecord_fromRemoteCDXJFile_ByFTP():
+ pass
+
+
+@pytest.mark.skip(reason='not implemented')
+def test_retrieveWARCRecord_fromRemoteCDXJFile_ByBitTorrentMagnetLink():
+ pass
+
+
+@pytest.mark.skip(reason='not implemented')
+def test_retrieveWARCRecord_fromRemoteCDXJFile_BySMB():
pass
|
b103c02815a7819e9cb4f1cc0061202cfcfd0fa6
|
bidb/api/views.py
|
bidb/api/views.py
|
from django.conf import settings
from django.http import HttpResponse, HttpResponseBadRequest
from django.views.decorators.csrf import csrf_exempt
from django.views.decorators.http import require_http_methods
from .utils import parse_submission, InvalidSubmission
@csrf_exempt
@require_http_methods(['PUT'])
def submit(request):
try:
submission, created = parse_submission(request)
except InvalidSubmission as exc:
return HttpResponseBadRequest("{}\n".format(exc))
return HttpResponse('{}{}\n'.format(
settings.SITE_URL,
submission.buildinfo.get_absolute_url(),
), status=201 if created else 200)
|
from django.conf import settings
from django.http import HttpResponse, HttpResponseBadRequest
from django.views.decorators.csrf import csrf_exempt
from django.views.decorators.http import require_http_methods
from .utils import parse_submission, InvalidSubmission
@csrf_exempt
@require_http_methods(['PUT'])
def submit(request):
try:
submission, created = parse_submission(request)
except InvalidSubmission as exc:
return HttpResponseBadRequest("Rejecting submission: {}\n".format(exc))
return HttpResponse('{}{}\n'.format(
settings.SITE_URL,
submission.buildinfo.get_absolute_url(),
), status=201 if created else 200)
|
Make it clearer that we are rejecting the submission.
|
Make it clearer that we are rejecting the submission.
|
Python
|
agpl-3.0
|
lamby/buildinfo.debian.net,lamby/buildinfo.debian.net
|
---
+++
@@ -12,7 +12,7 @@
try:
submission, created = parse_submission(request)
except InvalidSubmission as exc:
- return HttpResponseBadRequest("{}\n".format(exc))
+ return HttpResponseBadRequest("Rejecting submission: {}\n".format(exc))
return HttpResponse('{}{}\n'.format(
settings.SITE_URL,
|
e5e61e4d2575a39d585b3c51c082b2b53bade7bb
|
django_sphinx_db/backend/sphinx/base.py
|
django_sphinx_db/backend/sphinx/base.py
|
from django.db.backends.mysql.base import DatabaseWrapper as MySQLDatabaseWrapper
from django.db.backends.mysql.base import DatabaseOperations as MySQLDatabaseOperations
from django.db.backends.mysql.creation import DatabaseCreation as MySQLDatabaseCreation
class SphinxOperations(MySQLDatabaseOperations):
compiler_module = "django_sphinx_db.backend.sphinx.compiler"
def fulltext_search_sql(self, field_name):
return 'MATCH (%s)'
class SphinxCreation(MySQLDatabaseCreation):
def create_test_db(self, verbosity=1, autoclobber=False):
# NOOP, test using regular sphinx database.
if self.connection.settings_dict['TEST_NAME']:
test_name = self.connection.settings_dict['TEST_NAME']
self.connection.close()
self.connection.settings_dict['NAME'] = test_name
cursor = self.connection.cursor()
return test_name
return self.connection.settings_dict['NAME']
def destroy_test_db(self, old_database_name, verbosity=1):
# NOOP, we created nothing, nothing to destroy.
return
class DatabaseWrapper(MySQLDatabaseWrapper):
def __init__(self, *args, **kwargs):
super(DatabaseWrapper, self).__init__(*args, **kwargs)
self.ops = SphinxOperations(self)
self.creation = SphinxCreation(self)
|
from django.db.backends.mysql.base import DatabaseWrapper as MySQLDatabaseWrapper
from django.db.backends.mysql.base import DatabaseOperations as MySQLDatabaseOperations
from django.db.backends.mysql.creation import DatabaseCreation as MySQLDatabaseCreation
class SphinxOperations(MySQLDatabaseOperations):
compiler_module = "django_sphinx_db.backend.sphinx.compiler"
def fulltext_search_sql(self, field_name):
return 'MATCH (%s)'
class SphinxCreation(MySQLDatabaseCreation):
def create_test_db(self, verbosity=1, autoclobber=False):
# NOOP, test using regular sphinx database.
if self.connection.settings_dict['TEST_NAME']:
test_name = self.connection.settings_dict['TEST_NAME']
self.connection.close()
self.connection.settings_dict['NAME'] = test_name
cursor = self.connection.cursor()
return test_name
return self.connection.settings_dict['NAME']
def destroy_test_db(self, old_database_name, verbosity=1):
# NOOP, we created nothing, nothing to destroy.
return
class DatabaseWrapper(MySQLDatabaseWrapper):
def __init__(self, *args, **kwargs):
super(DatabaseWrapper, self).__init__(*args, **kwargs)
self.ops = SphinxOperations(self)
self.creation = SphinxCreation(self)
# The following can be useful for unit testing, with multiple databases
# configured in Django, if one of them does not support transactions,
# Django will fall back to using clear/create (instead of begin...rollback)
# between each test. The method Django uses to detect transactions uses
# CREATE TABLE and DROP TABLE, which ARE NOT supported by Sphinx, even though
# transactions ARE. Therefore, we can just set this to True, and Django will
# use transactions for clearing data between tests when all OTHER backends
# support it.
self.features.supports_transactions = True
|
Speed up unit tests when Sphinx DB is configured
|
Speed up unit tests when Sphinx DB is configured
|
Python
|
bsd-3-clause
|
smartfile/django-sphinx-db,rutube/django-sphinx-db,anatoliy-larin/django-sphinx-db,jnormore/django-sphinx-db,petekalo/django-sphinx-db
|
---
+++
@@ -31,3 +31,12 @@
super(DatabaseWrapper, self).__init__(*args, **kwargs)
self.ops = SphinxOperations(self)
self.creation = SphinxCreation(self)
+ # The following can be useful for unit testing, with multiple databases
+ # configured in Django, if one of them does not support transactions,
+ # Django will fall back to using clear/create (instead of begin...rollback)
+ # between each test. The method Django uses to detect transactions uses
+ # CREATE TABLE and DROP TABLE, which ARE NOT supported by Sphinx, even though
+ # transactions ARE. Therefore, we can just set this to True, and Django will
+ # use transactions for clearing data between tests when all OTHER backends
+ # support it.
+ self.features.supports_transactions = True
|
88f0c284b01bf5b4545fe63bdd1fde7cc66ad937
|
us_ignite/apps/admin.py
|
us_ignite/apps/admin.py
|
from django.contrib import admin
from us_ignite.apps.models import (Application, ApplicationURL,
ApplicationImage, Domain, Feature)
class ApplicationURLInline(admin.TabularInline):
model = ApplicationURL
class ApplicationImageInline(admin.TabularInline):
model = ApplicationImage
class ApplicationAdmin(admin.ModelAdmin):
list_display = ('name', 'slug', 'stage', 'status')
search_fields = ['name', 'slug', 'short_description', 'description']
list_filter = ['stage', 'status', 'created']
date_hierarchy = 'created'
inlines = [ApplicationURLInline, ApplicationImageInline]
class DomainAdmin(admin.ModelAdmin):
list_display = ('name', 'slug')
class FeatureAdmin(admin. ModelAdmin):
list_display = ('name', 'slug')
admin.site.register(Application, ApplicationAdmin)
admin.site.register(Domain, DomainAdmin)
admin.site.register(Feature, FeatureAdmin)
|
from django.contrib import admin
from us_ignite.apps.models import (Application, ApplicationURL,
ApplicationImage, Domain, Feature,
Page, PageApplication)
class ApplicationURLInline(admin.TabularInline):
model = ApplicationURL
class ApplicationImageInline(admin.TabularInline):
model = ApplicationImage
class ApplicationAdmin(admin.ModelAdmin):
list_display = ('name', 'slug', 'stage', 'status')
search_fields = ['name', 'slug', 'short_description', 'description']
list_filter = ['stage', 'status', 'created']
date_hierarchy = 'created'
inlines = [ApplicationURLInline, ApplicationImageInline]
class DomainAdmin(admin.ModelAdmin):
list_display = ('name', 'slug')
class FeatureAdmin(admin.ModelAdmin):
list_display = ('name', 'slug')
class PageApplicationInline(admin.TabularInline):
raw_id_fields = ('application', )
model = PageApplication
class PageAdmin(admin.ModelAdmin):
list_display = ('name', 'slug', 'status', 'created', )
list_filter = ('status', 'created', )
date_hierarchy = 'created'
inlines = [PageApplicationInline]
admin.site.register(Application, ApplicationAdmin)
admin.site.register(Domain, DomainAdmin)
admin.site.register(Feature, FeatureAdmin)
admin.site.register(Page, PageAdmin)
|
Add admi to add Applications to the Pages.
|
Add admi to add Applications to the Pages.
https://github.com/madewithbytes/us_ignite/issues/79
The applications can be added to a page and ordered
in the admin.
|
Python
|
bsd-3-clause
|
us-ignite/us_ignite,us-ignite/us_ignite,us-ignite/us_ignite,us-ignite/us_ignite,us-ignite/us_ignite
|
---
+++
@@ -1,7 +1,8 @@
from django.contrib import admin
from us_ignite.apps.models import (Application, ApplicationURL,
- ApplicationImage, Domain, Feature)
+ ApplicationImage, Domain, Feature,
+ Page, PageApplication)
class ApplicationURLInline(admin.TabularInline):
@@ -24,9 +25,22 @@
list_display = ('name', 'slug')
-class FeatureAdmin(admin. ModelAdmin):
+class FeatureAdmin(admin.ModelAdmin):
list_display = ('name', 'slug')
+
+
+class PageApplicationInline(admin.TabularInline):
+ raw_id_fields = ('application', )
+ model = PageApplication
+
+
+class PageAdmin(admin.ModelAdmin):
+ list_display = ('name', 'slug', 'status', 'created', )
+ list_filter = ('status', 'created', )
+ date_hierarchy = 'created'
+ inlines = [PageApplicationInline]
admin.site.register(Application, ApplicationAdmin)
admin.site.register(Domain, DomainAdmin)
admin.site.register(Feature, FeatureAdmin)
+admin.site.register(Page, PageAdmin)
|
a9bb32b91e2b742705b6292bd52fc869a8130766
|
dymport/import_file.py
|
dymport/import_file.py
|
"""
Various functions to dynamically import (abitrary names from) arbitrary files.
To import a file like it is a module, use `import_file`.
"""
from importlib.util import module_from_spec, spec_from_file_location
def import_file(name, file):
"""
Import `file` as a module with _name_.
Raises an ImportError if it could not be imported.
"""
spec = spec_from_file_location(name, file)
if not spec:
raise ImportError("Could not import: '{}'".format(file))
module = module_from_spec(spec)
spec.loader.exec_module(module)
return module
|
"""
Various functions to dynamically import (abitrary names from) arbitrary files.
To import a file like it is a module, use `import_file`.
"""
from sys import version_info
def import_file(name, file):
"""
Import `file` as a module with _name_.
Raises an ImportError if it could not be imported.
"""
if version_info > (3, 5):
from importlib.util import module_from_spec, spec_from_file_location
spec = spec_from_file_location(name, file)
if not spec:
raise ImportError("Could not import: '{}'".format(file))
module = module_from_spec(spec)
spec.loader.exec_module(module)
return module
else:
raise ImportError("Dymport: unsupported Python version, could not "
"import '{}'".format(file))
|
Add check for supported Python versions
|
Add check for supported Python versions
Not all Python versions are supported by this package, because the import
mechanism changes in the different Python versions.
If an unsupported Python version is used, an ImportError is raised.
|
Python
|
mit
|
ErwinJanssen/dymport.py
|
---
+++
@@ -4,7 +4,7 @@
To import a file like it is a module, use `import_file`.
"""
-from importlib.util import module_from_spec, spec_from_file_location
+from sys import version_info
def import_file(name, file):
@@ -13,11 +13,18 @@
Raises an ImportError if it could not be imported.
"""
- spec = spec_from_file_location(name, file)
+ if version_info > (3, 5):
+ from importlib.util import module_from_spec, spec_from_file_location
- if not spec:
- raise ImportError("Could not import: '{}'".format(file))
+ spec = spec_from_file_location(name, file)
- module = module_from_spec(spec)
- spec.loader.exec_module(module)
- return module
+ if not spec:
+ raise ImportError("Could not import: '{}'".format(file))
+
+ module = module_from_spec(spec)
+ spec.loader.exec_module(module)
+ return module
+
+ else:
+ raise ImportError("Dymport: unsupported Python version, could not "
+ "import '{}'".format(file))
|
bf5532f405df8869b4869c2d839e6093ebf963bc
|
wisp/utils.py
|
wisp/utils.py
|
import importlib
import importlib.machinery
import sys
from module import Module
import json
def message_to_function(raw_message):
"""
converting json formatted string to a executable module.
Args:
raw_message (str): json formatted.
Returns:
None if raw_message is in wrong format, else
return the executable module.
"""
if raw_message is None:
return None
try:
wisp = json.loads(raw_message)
except json.JSONDecodeError:
return None
function_object = wisp["function_object"]
path = function_object["function_path"]
force_update = function_object["validate"]
params = wisp["params"]
name = wisp["uFid"]
loader = importlib.machinery.SourceFileLoader(name, path)
# if force update is enabled, and module exists.
if force_update and sys.modules[name]:
del sys.modules[name]
# if force update is not enabled and module exists.
elif sys.modules[name]:
return sys.modules[name]
# Whether force update or module does not exists.
mod = None
try:
mod = loader.load_module()
except FileNotFoundError:
print("Module not found")
finally:
if mod is not None:
wisp_module = Module(mod, params)
return wisp_module
return mod
|
import importlib
import importlib.machinery
import sys
from module import Module
import json
def message_to_function(raw_message):
"""
converting json formatted string to a executable module.
Args:
raw_message (str): json formatted.
Returns:
None if raw_message is in wrong format, else
return the executable module.
"""
if raw_message is None:
return None
try:
wisp = json.loads(raw_message)
except json.JSONDecodeError:
return None
function_object = wisp["function_object"]
path = function_object["function_path"]
force_update = function_object["validate"]
params = wisp["params"]
name = str(wisp["uFid"])
loader = importlib.machinery.SourceFileLoader(name, path)
# if modules exists on the memory.
if name in sys.modules.keys():
if force_update:
del sys.modules[name]
else:
return sys.modules[name]
mod = None
try:
mod = loader.load_module()
except FileNotFoundError:
print("Module not found")
finally:
if mod is not None:
wisp_module = Module(mod, params)
return wisp_module
return mod
|
Fix errors cause by key error in sys.modules and wrong type error by uFid.
|
Fix errors cause by key error in sys.modules and wrong type error by uFid.
|
Python
|
apache-2.0
|
hoonkim/rune,hoonkim/rune,hoonkim/rune
|
---
+++
@@ -33,17 +33,16 @@
force_update = function_object["validate"]
params = wisp["params"]
- name = wisp["uFid"]
+ name = str(wisp["uFid"])
loader = importlib.machinery.SourceFileLoader(name, path)
- # if force update is enabled, and module exists.
- if force_update and sys.modules[name]:
- del sys.modules[name]
- # if force update is not enabled and module exists.
- elif sys.modules[name]:
- return sys.modules[name]
- # Whether force update or module does not exists.
+ # if modules exists on the memory.
+ if name in sys.modules.keys():
+ if force_update:
+ del sys.modules[name]
+ else:
+ return sys.modules[name]
mod = None
try:
|
4a5e798fe23d720315a7cab60824b70ce0983f8e
|
Kane1985/Chapter2/Ex4.1.py
|
Kane1985/Chapter2/Ex4.1.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Exercise 4.1 from Kane 1985"""
from sympy.physics.mechanics import dot, dynamicsymbols, MechanicsStrPrinter
from sympy.physics.mechanics import ReferenceFrame, Point
from sympy import solve, symbols, pi
from sympy.simplify.simplify import trigsimp
def msprint(expr):
pr = MechanicsStrPrinter()
return pr.doprint(expr)
theta1, theta2, theta3 = symbols('theta1 theta2 theta3')
x1, x2, x3 = symbols('x1 x2 x3')
A = ReferenceFrame('A')
A_1 = A.orientnew('A_1', 'Axis', [theta1, A.x])
A_2 = A_1.orientnew('A_2', 'Axis', [theta2, A.y])
B = A_2.orientnew('B', 'Axis', [theta3, A.z])
O = Point('O')
P = O.locatenew('P', x1 * A.x + x2 * A.y + x3 * A.z)
p = P.pos_from(O)
# Point P is on L (span(B.x)) when:
print("{0} = 0".format(trigsimp(dot(p, B.x))))
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Exercise 4.1 from Kane 1985"""
from sympy.physics.mechanics import dot, dynamicsymbols, MechanicsStrPrinter
from sympy.physics.mechanics import ReferenceFrame, Point
from sympy import solve, symbols, pi, sin, cos
from sympy.simplify.simplify import trigsimp
def msprint(expr):
pr = MechanicsStrPrinter()
return pr.doprint(expr)
theta = symbols('theta:3')
x = symbols('x:3')
q = symbols('q')
A = ReferenceFrame('A')
B = A.orientnew('B', 'SPACE', theta, 'xyz')
O = Point('O')
P = O.locatenew('P', x[0] * A.x + x[1] * A.y + x[2] * A.z)
p = P.pos_from(O)
# From problem, point P is on L (span(B.x)) when:
constraint_eqs = {x[0] : q*cos(theta[1])*cos(theta[2]),
x[1] : q*cos(theta[1])*sin(theta[2]),
x[2] : -q*sin(theta[1])}
# If point P is on line L then r^{P/O} will have no components in the B.y or
# B.z directions since point O is also on line L and B.x is parallel to L.
assert(trigsimp(dot(P.pos_from(O), B.y).subs(constraint_eqs)) == 0)
assert(trigsimp(dot(P.pos_from(O), B.z).subs(constraint_eqs)) == 0)
|
Simplify formulation and change from print() to assert()
|
Simplify formulation and change from print() to assert()
|
Python
|
bsd-3-clause
|
jcrist/pydy,Shekharrajak/pydy,oliverlee/pydy,jcrist/pydy,jcrist/pydy,oliverlee/pydy,jcrist/pydy,Shekharrajak/pydy,oliverlee/pydy,jcrist/pydy,Shekharrajak/pydy,jcrist/pydy,skidzo/pydy,Shekharrajak/pydy,skidzo/pydy,skidzo/pydy,jcrist/pydy,skidzo/pydy
|
---
+++
@@ -4,26 +4,31 @@
from sympy.physics.mechanics import dot, dynamicsymbols, MechanicsStrPrinter
from sympy.physics.mechanics import ReferenceFrame, Point
-from sympy import solve, symbols, pi
+from sympy import solve, symbols, pi, sin, cos
from sympy.simplify.simplify import trigsimp
def msprint(expr):
pr = MechanicsStrPrinter()
return pr.doprint(expr)
-theta1, theta2, theta3 = symbols('theta1 theta2 theta3')
-x1, x2, x3 = symbols('x1 x2 x3')
+theta = symbols('theta:3')
+x = symbols('x:3')
+q = symbols('q')
A = ReferenceFrame('A')
-A_1 = A.orientnew('A_1', 'Axis', [theta1, A.x])
-A_2 = A_1.orientnew('A_2', 'Axis', [theta2, A.y])
-B = A_2.orientnew('B', 'Axis', [theta3, A.z])
+B = A.orientnew('B', 'SPACE', theta, 'xyz')
O = Point('O')
-P = O.locatenew('P', x1 * A.x + x2 * A.y + x3 * A.z)
+P = O.locatenew('P', x[0] * A.x + x[1] * A.y + x[2] * A.z)
p = P.pos_from(O)
-# Point P is on L (span(B.x)) when:
-print("{0} = 0".format(trigsimp(dot(p, B.x))))
+# From problem, point P is on L (span(B.x)) when:
+constraint_eqs = {x[0] : q*cos(theta[1])*cos(theta[2]),
+ x[1] : q*cos(theta[1])*sin(theta[2]),
+ x[2] : -q*sin(theta[1])}
+# If point P is on line L then r^{P/O} will have no components in the B.y or
+# B.z directions since point O is also on line L and B.x is parallel to L.
+assert(trigsimp(dot(P.pos_from(O), B.y).subs(constraint_eqs)) == 0)
+assert(trigsimp(dot(P.pos_from(O), B.z).subs(constraint_eqs)) == 0)
|
82954c638aa013a037125e0c9f167045f38da504
|
genes/docker/main.py
|
genes/docker/main.py
|
from genes.apt import commands as apt
from genes.brew import commands as brew
from genes import debian
from genes.debian.traits import is_debian
from genes.ubuntu.traits import is_ubuntu
from genes.mac.traits import is_osx
def main():
if is_debian() or is_ubuntu():
repo = debian.traits.distribution.lower() + '-' + \
debian.traits.codename.lower()
apt.recv_keys('58118E89F3A912897C070ADBF76221572C52609D')
apt.add_repo('deb', 'https://apt.dockerproject.org/repo', repo, 'main')
apt.update()
apt.install('docker-engine')
# FIXME: add compose, machine, etc
elif is_osx():
brew.cask_install('dockertoolbox')
else:
# FIXME: print failure, handle osx/windows
pass
|
from genes.apt import commands as apt
from genes.brew import commands as brew
from genes.debian.traits import is_debian, get_distro, get_codename
from genes.ubuntu.traits import is_ubuntu
from genes.mac.traits import is_osx
def main():
if is_debian() or is_ubuntu():
repo = get_distro().lower() + '-' + \
get_codename().lower()
apt.recv_keys('58118E89F3A912897C070ADBF76221572C52609D')
apt.add_repo('deb', 'https://apt.dockerproject.org/repo', repo, 'main')
apt.update()
apt.install('docker-engine')
# FIXME: add compose, machine, etc
elif is_osx():
brew.cask_install('dockertoolbox')
else:
# FIXME: print failure, handle osx/windows
pass
|
Migrate docker to new debian semantics
|
Migrate docker to new debian semantics
|
Python
|
mit
|
hatchery/genepool,hatchery/Genepool2
|
---
+++
@@ -1,15 +1,14 @@
from genes.apt import commands as apt
from genes.brew import commands as brew
-from genes import debian
-from genes.debian.traits import is_debian
+from genes.debian.traits import is_debian, get_distro, get_codename
from genes.ubuntu.traits import is_ubuntu
from genes.mac.traits import is_osx
def main():
if is_debian() or is_ubuntu():
- repo = debian.traits.distribution.lower() + '-' + \
- debian.traits.codename.lower()
+ repo = get_distro().lower() + '-' + \
+ get_codename().lower()
apt.recv_keys('58118E89F3A912897C070ADBF76221572C52609D')
apt.add_repo('deb', 'https://apt.dockerproject.org/repo', repo, 'main')
apt.update()
|
4130c082ae3008365c854ad65c4510cb04dfbf27
|
webcrawler.py
|
webcrawler.py
|
import requests
from bs4 import BeautifulSoup
def parse_html(html, **kwargs):
parsed_html = BeautifulSoup(html, 'lxml')
headline = parsed_html.body.find('h1')
paragraph = None
# Parse Paragraph
content_container = parsed_html.body.find(
'div',
attrs={'id': 'bodyContent'}
)
for p in content_container.findAll('p'):
if not p.findParents('table'):
paragraph = p
break
# Parse Image
infobox = parsed_html.body.find('table', attrs={'class': 'infobox'})
image = infobox.find('img') if infobox else None
return {
'headline': headline.text.strip() if headline else '',
'paragraph': paragraph.text.strip() if paragraph else '',
'image_url': image.attrs.get('src') if image else '',
}
def crawl(url, **kwargs):
response = requests.get(url)
response.raise_for_status()
data = parse_html(response.content, **kwargs)
# TODOs: Persist data
return data
|
import re
import requests
from bs4 import BeautifulSoup
def parse_html(html, **kwargs):
is_wikipedia_page = kwargs.get('is_wikipedia_page')
parsed_html = BeautifulSoup(html, 'html.parser')
headline = parsed_html.body.find('h1')
paragraph = None
if is_wikipedia_page:
# Parse Paragraph
content_container = parsed_html.body.find(
'div',
attrs={'id': 'bodyContent'}
)
for p in content_container.findAll('p'):
if not p.findParents('table'):
paragraph = p
break
# Parse Image
infobox = parsed_html.body.find('table', attrs={'class': 'infobox'})
image = infobox.find('img') if infobox else None
else:
content_container_class = kwargs.get('content_container_class')
image_container_class = kwargs.get('image_container_class')
if not all([
content_container_class,
image_container_class
]):
return
content_container = parsed_html.body.find('div', attrs={'class': content_container_class})
paragraph = content_container.find('p')
image_container = parsed_html.body.find('div', attrs={'class': image_container_class})
image = image_container.find('img')
return {
'headline': headline.text.strip() if headline else '',
'paragraph': paragraph.text.strip() if paragraph else '',
'image_url': image.attrs.get('src') if image else '',
}
def crawl(url, **kwargs):
response = requests.get(url)
response.raise_for_status()
is_wikipedia_page = re.compile(r'.*(wikipedia.org)').match(url) is not None
if is_wikipedia_page:
kwargs.update({
'is_wikipedia_page': is_wikipedia_page
})
data = parse_html(response.content, **kwargs)
# TODOs: Persist data
return data
|
Add support for generic pages
|
Add support for generic pages
|
Python
|
mit
|
alamasfu10/webcrawler
|
---
+++
@@ -1,27 +1,45 @@
+import re
import requests
from bs4 import BeautifulSoup
def parse_html(html, **kwargs):
- parsed_html = BeautifulSoup(html, 'lxml')
+ is_wikipedia_page = kwargs.get('is_wikipedia_page')
+ parsed_html = BeautifulSoup(html, 'html.parser')
headline = parsed_html.body.find('h1')
paragraph = None
- # Parse Paragraph
- content_container = parsed_html.body.find(
- 'div',
- attrs={'id': 'bodyContent'}
- )
- for p in content_container.findAll('p'):
- if not p.findParents('table'):
- paragraph = p
- break
+ if is_wikipedia_page:
+ # Parse Paragraph
+ content_container = parsed_html.body.find(
+ 'div',
+ attrs={'id': 'bodyContent'}
+ )
+ for p in content_container.findAll('p'):
+ if not p.findParents('table'):
+ paragraph = p
+ break
- # Parse Image
- infobox = parsed_html.body.find('table', attrs={'class': 'infobox'})
- image = infobox.find('img') if infobox else None
+ # Parse Image
+ infobox = parsed_html.body.find('table', attrs={'class': 'infobox'})
+ image = infobox.find('img') if infobox else None
+ else:
+ content_container_class = kwargs.get('content_container_class')
+ image_container_class = kwargs.get('image_container_class')
+
+ if not all([
+ content_container_class,
+ image_container_class
+ ]):
+ return
+
+ content_container = parsed_html.body.find('div', attrs={'class': content_container_class})
+ paragraph = content_container.find('p')
+
+ image_container = parsed_html.body.find('div', attrs={'class': image_container_class})
+ image = image_container.find('img')
return {
'headline': headline.text.strip() if headline else '',
@@ -33,6 +51,13 @@
def crawl(url, **kwargs):
response = requests.get(url)
response.raise_for_status()
+ is_wikipedia_page = re.compile(r'.*(wikipedia.org)').match(url) is not None
+
+ if is_wikipedia_page:
+ kwargs.update({
+ 'is_wikipedia_page': is_wikipedia_page
+ })
+
data = parse_html(response.content, **kwargs)
# TODOs: Persist data
|
d4e3609cf6f749d6ac95bc8332844f63b61b41b1
|
oslo/__init__.py
|
oslo/__init__.py
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
__import__('pkg_resources').declare_namespace(__name__)
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
__import__('pkg_resources').declare_namespace(__name__)
|
Remove extraneous vim editor configuration comments
|
Remove extraneous vim editor configuration comments
Change-Id: I173c838765efd527b3bc652d9b8c32ac89c756b6
Partial-Bug: #1229324
|
Python
|
apache-2.0
|
varunarya10/oslo.serialization,openstack/oslo.serialization
|
---
+++
@@ -1,5 +1,3 @@
-# vim: tabstop=4 shiftwidth=4 softtabstop=4
-
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
|
d961c644f74d83150e3f5a3ea9599af0d2b839ae
|
hash_table.py
|
hash_table.py
|
#!/usr/bin/env python
'''Implementation of a simple hash table.
The table has `hash`, `get` and `set` methods.
The hash function uses a very basic hash algorithm to insert the value
into the table.
'''
class HashItem(object):
def __init__(self):
pass
class Hash(object):
def __init__(self):
pass
def hash(self):
pass
def get(self):
pass
def set(self):
pass
|
#!/usr/bin/env python
'''Implementation of a simple hash table.
The table has `hash`, `get` and `set` methods.
The hash function uses a very basic hash algorithm to insert the value
into the table.
'''
class HashItem(object):
def __init__(self):
pass
class Hash(object):
def __init__(self, size=1024):
self.table = []
for i in range(size):
self.table.append(list())
def hash(self):
pass
def get(self):
pass
def set(self):
pass
|
Build out init function of hash table class
|
Build out init function of hash table class
|
Python
|
mit
|
jwarren116/data-structures-deux
|
---
+++
@@ -2,7 +2,7 @@
'''Implementation of a simple hash table.
The table has `hash`, `get` and `set` methods.
-The hash function uses a very basic hash algorithm to insert the value
+The hash function uses a very basic hash algorithm to insert the value
into the table.
'''
@@ -13,8 +13,10 @@
class Hash(object):
- def __init__(self):
- pass
+ def __init__(self, size=1024):
+ self.table = []
+ for i in range(size):
+ self.table.append(list())
def hash(self):
pass
|
0b41bdf6897bb070fc3d90aa5d90228e744dee60
|
sunpy/util/map_manager.py
|
sunpy/util/map_manager.py
|
import weakref
import sunpy
class MapManager(weakref.WeakSet):
"""Weak referenced set of maps created using functions decorated by manage_maps."""
def __repr__(self):
return str(self.data)
def manage_maps(fn):
"""Maps returned by functions decorated with manage_maps (eg. sunpy.make_map)
will be registered in the sunpy.map_manager list."""
def fn_manage_maps(*args, **kwargs):
ret = fn(*args, **kwargs)
sunpy.map_manager.add(ret)
return ret
return fn_manage_maps
|
import weakref
import sunpy
class MapManager(weakref.WeakSet):
"""Weak referenced set of maps created using functions decorated by manage_maps."""
pass
def manage_maps(fn):
"""Maps returned by functions decorated with manage_maps (eg. sunpy.make_map)
will be registered in the sunpy.map_manager list."""
def fn_manage_maps(*args, **kwargs):
ret = fn(*args, **kwargs)
sunpy.map_manager.add(ret)
return ret
return fn_manage_maps
|
Remove manager repr (user should not need to view contents)
|
Remove manager repr (user should not need to view contents)
|
Python
|
bsd-2-clause
|
mjm159/sunpy,dpshelio/sunpy,dpshelio/sunpy,Alex-Ian-Hamilton/sunpy,dpshelio/sunpy,Alex-Ian-Hamilton/sunpy,Alex-Ian-Hamilton/sunpy,mjm159/sunpy
|
---
+++
@@ -3,9 +3,7 @@
class MapManager(weakref.WeakSet):
"""Weak referenced set of maps created using functions decorated by manage_maps."""
-
- def __repr__(self):
- return str(self.data)
+ pass
def manage_maps(fn):
"""Maps returned by functions decorated with manage_maps (eg. sunpy.make_map)
|
c4dd6502bc7b9d5970a659c57e6aa2d25cc00fe5
|
catwatch/lib/util_datetime.py
|
catwatch/lib/util_datetime.py
|
import datetime
def timedelta_months(months, compare_date=None):
"""
Return a JSON response.
:param months: Amount of months to offset
:type months: int
:param compare_date: Date to compare at
:type compare_date: date
:return: Flask response
"""
if compare_date is None:
compare_date = datetime.date.today()
delta = months * 365 / 12
compare_date_with_delta = compare_date + datetime.timedelta(delta)
return compare_date_with_delta
|
import datetime
def timedelta_months(months, compare_date=None):
"""
Return a new datetime with a month offset applied.
:param months: Amount of months to offset
:type months: int
:param compare_date: Date to compare at
:type compare_date: date
:return: datetime
"""
if compare_date is None:
compare_date = datetime.date.today()
delta = months * 365 / 12
compare_date_with_delta = compare_date + datetime.timedelta(delta)
return compare_date_with_delta
|
Update timedelta_months docstring to be accurate
|
Update timedelta_months docstring to be accurate
|
Python
|
mit
|
z123/build-a-saas-app-with-flask,nickjj/build-a-saas-app-with-flask,z123/build-a-saas-app-with-flask,nickjj/build-a-saas-app-with-flask,nickjj/build-a-saas-app-with-flask,z123/build-a-saas-app-with-flask,nickjj/build-a-saas-app-with-flask
|
---
+++
@@ -3,13 +3,13 @@
def timedelta_months(months, compare_date=None):
"""
- Return a JSON response.
+ Return a new datetime with a month offset applied.
:param months: Amount of months to offset
:type months: int
:param compare_date: Date to compare at
:type compare_date: date
- :return: Flask response
+ :return: datetime
"""
if compare_date is None:
compare_date = datetime.date.today()
|
4a528978e9a783b9fb4f25d31a32a2ca524d7ce1
|
infosystem/subsystem/domain/resource.py
|
infosystem/subsystem/domain/resource.py
|
from infosystem.database import db
from infosystem.common.subsystem import entity
class Domain(entity.Entity, db.Model):
attributes = ['name', 'parent_id']
attributes += entity.Entity.attributes
name = db.Column(db.String(60), nullable=False, unique=True)
parent_id = db.Column(
db.CHAR(32), db.ForeignKey("domain.id"), nullable=True)
def __init__(self, id, name, active=True, parent_id=None,
created_at=None, created_by=None,
updated_at=None, updated_by=None):
super().__init__(id, active, created_at, created_by,
updated_at, updated_by)
self.name = name
self.parent_id = parent_id
|
from infosystem.database import db
from infosystem.common.subsystem import entity
class Domain(entity.Entity, db.Model):
attributes = ['name', 'parent_id']
attributes += entity.Entity.attributes
name = db.Column(db.String(60), nullable=False, unique=True)
parent_id = db.Column(
db.CHAR(32), db.ForeignKey("domain.id"), nullable=True)
def __init__(self, id, name, parent_id=None,
active=True, created_at=None, created_by=None,
updated_at=None, updated_by=None):
super().__init__(id, active, created_at, created_by,
updated_at, updated_by)
self.name = name
self.parent_id = parent_id
|
Change attributes order on constructor
|
Change attributes order on constructor
|
Python
|
apache-2.0
|
samueldmq/infosystem
|
---
+++
@@ -11,8 +11,8 @@
parent_id = db.Column(
db.CHAR(32), db.ForeignKey("domain.id"), nullable=True)
- def __init__(self, id, name, active=True, parent_id=None,
- created_at=None, created_by=None,
+ def __init__(self, id, name, parent_id=None,
+ active=True, created_at=None, created_by=None,
updated_at=None, updated_by=None):
super().__init__(id, active, created_at, created_by,
updated_at, updated_by)
|
cffa73ae51f63ec5e107d9df98b1eb5b44fa64b8
|
backend/catalogue/serializers.py
|
backend/catalogue/serializers.py
|
from rest_framework import serializers
from .models import Release, Track, Comment
class CommentSerializer(serializers.ModelSerializer):
class Meta:
model = Comment
fields = ('id', 'comment')
class TrackSerializer(serializers.ModelSerializer):
cdid = serializers.StringRelatedField(
read_only=True
)
class Meta:
model = Track
fields = ('trackid', 'tracknum', 'trackartist', 'tracktitle', 'tracklength', 'cdid')
class ReleaseSerializer(serializers.ModelSerializer):
tracks = serializers.HyperlinkedIdentityField(view_name='release-tracks')
comments = serializers.HyperlinkedIdentityField(view_name='release-comments')
class Meta:
model = Release
fields = ('id', 'arrivaldate', 'artist', 'title', 'year', 'local', 'compilation', 'female', 'tracks', 'comments')
|
from rest_framework import serializers
from .models import Release, Track, Comment
class CommentSerializer(serializers.ModelSerializer):
class Meta:
model = Comment
fields = ('id', 'comment')
class TrackSerializer(serializers.ModelSerializer):
cdid = serializers.StringRelatedField(
read_only=True
)
class Meta:
model = Track
fields = ('trackid', 'tracknum', 'trackartist', 'tracktitle', 'tracklength', 'cdid')
class ReleaseSerializer(serializers.ModelSerializer):
tracks = serializers.HyperlinkedIdentityField(view_name='release-tracks')
comments = serializers.HyperlinkedIdentityField(view_name='release-comments')
class Meta:
model = Release
fields = ('id', 'arrivaldate', 'artist', 'title', 'year', 'local', 'cpa', 'compilation', 'female', 'tracks', 'comments')
|
Add country field to release serializer.
|
Add country field to release serializer.
|
Python
|
mit
|
ThreeDRadio/playlists,ThreeDRadio/playlists,ThreeDRadio/playlists
|
---
+++
@@ -22,4 +22,4 @@
class Meta:
model = Release
- fields = ('id', 'arrivaldate', 'artist', 'title', 'year', 'local', 'compilation', 'female', 'tracks', 'comments')
+ fields = ('id', 'arrivaldate', 'artist', 'title', 'year', 'local', 'cpa', 'compilation', 'female', 'tracks', 'comments')
|
3ee00fad1965dae23f83da870d7df1cb37727c7a
|
structlog/migrations/0001_initial.py
|
structlog/migrations/0001_initial.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.5 on 2016-04-10 14:33
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
]
operations = [
]
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.5 on 2016-04-10 14:33
from __future__ import unicode_literals
from django.db import migrations
from django.contrib.postgres.operations import HStoreExtension
class Migration(migrations.Migration):
dependencies = [
]
operations = [
HStoreExtension(),
]
|
Add HStore extension to initial migration.
|
Add HStore extension to initial migration.
|
Python
|
bsd-2-clause
|
carlohamalainen/django-struct-log
|
---
+++
@@ -3,7 +3,7 @@
from __future__ import unicode_literals
from django.db import migrations
-
+from django.contrib.postgres.operations import HStoreExtension
class Migration(migrations.Migration):
@@ -11,4 +11,5 @@
]
operations = [
+ HStoreExtension(),
]
|
8ecf9d95cf7f085b0245b07422ccda007937a5c6
|
visu3d/array_dataclass.py
|
visu3d/array_dataclass.py
|
# Copyright 2022 The visu3d Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Dataclass array wrapper."""
from __future__ import annotations
import dataclass_array as dca
from visu3d.plotly import fig_utils
class DataclassArray(dca.DataclassArray, fig_utils.Visualizable):
pass
|
# Copyright 2022 The visu3d Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Dataclass array wrapper."""
from __future__ import annotations
import dataclass_array as dca
from visu3d.plotly import fig_utils
@dca.dataclass_array(broadcast=True, cast_dtype=True)
class DataclassArray(dca.DataclassArray, fig_utils.Visualizable):
pass
|
Add `@dca.dataclass_array` decorator to customize dca params. Change default values
|
Add `@dca.dataclass_array` decorator to customize dca params. Change default values
PiperOrigin-RevId: 475563717
|
Python
|
apache-2.0
|
google-research/visu3d
|
---
+++
@@ -20,5 +20,6 @@
from visu3d.plotly import fig_utils
+@dca.dataclass_array(broadcast=True, cast_dtype=True)
class DataclassArray(dca.DataclassArray, fig_utils.Visualizable):
pass
|
fb10e4b8ae37f1442bdb643c27ea0b984da6a374
|
cherrypy/test/test_httputil.py
|
cherrypy/test/test_httputil.py
|
"""Tests for cherrypy/lib/httputil.py."""
import unittest
from cherrypy.lib import httputil
class UtilityTests(unittest.TestCase):
def test_urljoin(self):
# Test all slash+atom combinations for SCRIPT_NAME and PATH_INFO
self.assertEqual(httputil.urljoin('/sn/', '/pi/'), '/sn/pi/')
self.assertEqual(httputil.urljoin('/sn/', '/pi'), '/sn/pi')
self.assertEqual(httputil.urljoin('/sn/', '/'), '/sn/')
self.assertEqual(httputil.urljoin('/sn/', ''), '/sn/')
self.assertEqual(httputil.urljoin('/sn', '/pi/'), '/sn/pi/')
self.assertEqual(httputil.urljoin('/sn', '/pi'), '/sn/pi')
self.assertEqual(httputil.urljoin('/sn', '/'), '/sn/')
self.assertEqual(httputil.urljoin('/sn', ''), '/sn')
self.assertEqual(httputil.urljoin('/', '/pi/'), '/pi/')
self.assertEqual(httputil.urljoin('/', '/pi'), '/pi')
self.assertEqual(httputil.urljoin('/', '/'), '/')
self.assertEqual(httputil.urljoin('/', ''), '/')
self.assertEqual(httputil.urljoin('', '/pi/'), '/pi/')
self.assertEqual(httputil.urljoin('', '/pi'), '/pi')
self.assertEqual(httputil.urljoin('', '/'), '/')
self.assertEqual(httputil.urljoin('', ''), '/')
if __name__ == '__main__':
unittest.main()
|
"""Tests for ``cherrypy.lib.httputil``."""
import pytest
from cherrypy.lib import httputil
class TestUtility(object):
@pytest.mark.parametrize(
'script_name,path_info,expected_url',
[
('/sn/', '/pi/', '/sn/pi/'),
('/sn/', '/pi', '/sn/pi'),
('/sn/', '/', '/sn/'),
('/sn/', '', '/sn/'),
('/sn', '/pi/', '/sn/pi/'),
('/sn', '/pi', '/sn/pi'),
('/sn', '/', '/sn/'),
('/sn', '', '/sn'),
('/', '/pi/', '/pi/'),
('/', '/pi', '/pi'),
('/', '/', '/'),
('/', '', '/'),
('', '/pi/', '/pi/'),
('', '/pi', '/pi'),
('', '/', '/'),
('', '', '/'),
]
)
def test_urljoin(self, script_name, path_info, expected_url):
"""Test all slash+atom combinations for SCRIPT_NAME and PATH_INFO."""
actual_url = httputil.urljoin(script_name, path_info)
assert actual_url == expected_url
|
Rewrite httputil test module via pytest
|
Rewrite httputil test module via pytest
|
Python
|
bsd-3-clause
|
cherrypy/cherrypy,Safihre/cherrypy,Safihre/cherrypy,cherrypy/cherrypy
|
---
+++
@@ -1,30 +1,32 @@
-"""Tests for cherrypy/lib/httputil.py."""
+"""Tests for ``cherrypy.lib.httputil``."""
+import pytest
-import unittest
from cherrypy.lib import httputil
-class UtilityTests(unittest.TestCase):
-
- def test_urljoin(self):
- # Test all slash+atom combinations for SCRIPT_NAME and PATH_INFO
- self.assertEqual(httputil.urljoin('/sn/', '/pi/'), '/sn/pi/')
- self.assertEqual(httputil.urljoin('/sn/', '/pi'), '/sn/pi')
- self.assertEqual(httputil.urljoin('/sn/', '/'), '/sn/')
- self.assertEqual(httputil.urljoin('/sn/', ''), '/sn/')
- self.assertEqual(httputil.urljoin('/sn', '/pi/'), '/sn/pi/')
- self.assertEqual(httputil.urljoin('/sn', '/pi'), '/sn/pi')
- self.assertEqual(httputil.urljoin('/sn', '/'), '/sn/')
- self.assertEqual(httputil.urljoin('/sn', ''), '/sn')
- self.assertEqual(httputil.urljoin('/', '/pi/'), '/pi/')
- self.assertEqual(httputil.urljoin('/', '/pi'), '/pi')
- self.assertEqual(httputil.urljoin('/', '/'), '/')
- self.assertEqual(httputil.urljoin('/', ''), '/')
- self.assertEqual(httputil.urljoin('', '/pi/'), '/pi/')
- self.assertEqual(httputil.urljoin('', '/pi'), '/pi')
- self.assertEqual(httputil.urljoin('', '/'), '/')
- self.assertEqual(httputil.urljoin('', ''), '/')
-
-
-if __name__ == '__main__':
- unittest.main()
+class TestUtility(object):
+ @pytest.mark.parametrize(
+ 'script_name,path_info,expected_url',
+ [
+ ('/sn/', '/pi/', '/sn/pi/'),
+ ('/sn/', '/pi', '/sn/pi'),
+ ('/sn/', '/', '/sn/'),
+ ('/sn/', '', '/sn/'),
+ ('/sn', '/pi/', '/sn/pi/'),
+ ('/sn', '/pi', '/sn/pi'),
+ ('/sn', '/', '/sn/'),
+ ('/sn', '', '/sn'),
+ ('/', '/pi/', '/pi/'),
+ ('/', '/pi', '/pi'),
+ ('/', '/', '/'),
+ ('/', '', '/'),
+ ('', '/pi/', '/pi/'),
+ ('', '/pi', '/pi'),
+ ('', '/', '/'),
+ ('', '', '/'),
+ ]
+ )
+ def test_urljoin(self, script_name, path_info, expected_url):
+ """Test all slash+atom combinations for SCRIPT_NAME and PATH_INFO."""
+ actual_url = httputil.urljoin(script_name, path_info)
+ assert actual_url == expected_url
|
638d7f38a0e22f72680437372b873d69ead973b1
|
config/run_distutils/__init__.py
|
config/run_distutils/__init__.py
|
from SCons.Script import *
def generate(env):
env.SetDefault(RUN_DISTUTILS = 'python')
env.SetDefault(RUN_DISTUTILSOPTS = 'build')
env['RUN_DISTUTILS'] = 'python'
env['RUN_DISTUTILSOPTS'] = 'build'
bld = Builder(action = '$RUN_DISTUTILS $SOURCE $RUN_DISTUTILSOPTS')
env.Append(BUILDERS = {'RunDistUtils' : bld})
def exists():
return 1
|
from SCons.Script import *
import os
def generate(env):
env.SetDefault(RUN_DISTUTILS = 'python')
env.SetDefault(RUN_DISTUTILSOPTS = 'build')
if 'RUN_DISTUTILS' in os.environ:
env['RUN_DISTUTILS'] = os.environ['RUN_DISTUTILS']
if 'RUN_DISTUTILSOPTS' in os.environ:
env['RUN_DISTUTILSOPTS'] = os.environ['RUN_DISTUTILSOPTS']
bld = Builder(action = '$RUN_DISTUTILS $SOURCE $RUN_DISTUTILSOPTS')
env.Append(BUILDERS = {'RunDistUtils' : bld})
def exists():
return 1
|
Allow env vars for RUN_DISTUTILS
|
Allow env vars for RUN_DISTUTILS
Allow use of env vars RUN_DISTUTILS, RUN_DISTUTILOPTS as defaults.
With this, on macos, macports doesn't need to be in PATH to build FAHControl. One just needs
export RUN_DISTUTILS="/opt/local/bin/python"
or the equivalent in dockbot.json env.
|
Python
|
lgpl-2.1
|
CauldronDevelopmentLLC/cbang,CauldronDevelopmentLLC/cbang,CauldronDevelopmentLLC/cbang,CauldronDevelopmentLLC/cbang
|
---
+++
@@ -1,12 +1,15 @@
from SCons.Script import *
+import os
def generate(env):
env.SetDefault(RUN_DISTUTILS = 'python')
env.SetDefault(RUN_DISTUTILSOPTS = 'build')
- env['RUN_DISTUTILS'] = 'python'
- env['RUN_DISTUTILSOPTS'] = 'build'
+ if 'RUN_DISTUTILS' in os.environ:
+ env['RUN_DISTUTILS'] = os.environ['RUN_DISTUTILS']
+ if 'RUN_DISTUTILSOPTS' in os.environ:
+ env['RUN_DISTUTILSOPTS'] = os.environ['RUN_DISTUTILSOPTS']
bld = Builder(action = '$RUN_DISTUTILS $SOURCE $RUN_DISTUTILSOPTS')
env.Append(BUILDERS = {'RunDistUtils' : bld})
|
4da79cbec5880da6fb16b5a474786247a820d09c
|
nowplaying.py
|
nowplaying.py
|
#!/usr/bin/env python
from termcolor import colored
from appscript import *
from track import Track
def main():
print(get_song())
def get_song():
itunes_open = bool(app('System Events').processes[its.name == 'iTunes'].count())
if itunes_open: # check if application open
itunes = app('iTunes')
if itunes.player_state.get() == k.playing: # check if song playing
track = Track(itunes.current_track.get())
return track
else:
return colored('No song currently playing.', 'red')
else:
return colored('iTunes not open.', 'red')
if __name__ == '__main__':
main()
|
#!/usr/bin/env python
from termcolor import colored
from appscript import *
from track import Track
def main():
print(get_song())
def get_song():
itunes_open = bool(app('System Events').processes[its.name == 'iTunes'].count())
if itunes_open: # check if application open
itunes = app('iTunes')
if itunes.player_state.get() == k.playing: # check if song playing
track = Track(itunes.current_track.get())
return track
return colored('No song currently playing.', 'red')
return colored('iTunes not open.', 'red')
if __name__ == '__main__':
main()
|
Return error instead of if/else
|
Return error instead of if/else
|
Python
|
mit
|
kshvmdn/nowplaying
|
---
+++
@@ -16,10 +16,8 @@
if itunes.player_state.get() == k.playing: # check if song playing
track = Track(itunes.current_track.get())
return track
- else:
- return colored('No song currently playing.', 'red')
- else:
- return colored('iTunes not open.', 'red')
+ return colored('No song currently playing.', 'red')
+ return colored('iTunes not open.', 'red')
if __name__ == '__main__':
main()
|
b570dc035110ca8b27af30fde78d2bb6e364417f
|
test/basic.py
|
test/basic.py
|
import unittest
from anser import Anser, Client
class BasicAnserTest(unittest.TestCase):
def test_creation(self):
server = Anser(__file__)
self.assertEquals(server.name, __file__)
def test_creation_explicit_no_debug(self):
server = Anser(__file__, debug=False)
self.assertFalse(server.debug)
def test_creation_implicit_no_debug(self):
server = Anser(__file__)
self.assertFalse(server.debug)
def test_creation_explicit_debug(self):
server = Anser(__file__, debug=True)
self.assertTrue(server.debug)
def test_add_action(self):
server = Anser(__file__)
@server.action('default')
def dummy_action(message, address):
pass
self.assertTrue(dummy_action in server.actions)
class BasicClientTest(unittest.TestCase):
def test_creation(self):
client = Client('10.0.0.1', 4000)
self.assertEquals(client.address, '10.0.0.1')
self.assertEquals(client.port, 4000)
def test_creation_implicit_no_debug(self):
client = Client('10.0.0.1', 4000)
self.assertFalse(client.debug)
def test_creation_explicit_debug(self):
client = Client('10.0.0.1', 4000, debug=True)
self.assertTrue(client.debug)
if __name__ == '__main__':
unittest.main()
|
import unittest
from anser import Anser, Client
class BasicAnserTest(unittest.TestCase):
def test_creation(self):
server = Anser(__file__)
self.assertEqual(server.name, __file__)
def test_creation_explicit_no_debug(self):
server = Anser(__file__, debug=False)
self.assertFalse(server.debug)
def test_creation_implicit_no_debug(self):
server = Anser(__file__)
self.assertFalse(server.debug)
def test_creation_explicit_debug(self):
server = Anser(__file__, debug=True)
self.assertTrue(server.debug)
def test_add_action(self):
server = Anser(__file__)
@server.action('default')
def dummy_action(message, address):
pass
self.assertTrue(dummy_action in server.actions)
class BasicClientTest(unittest.TestCase):
def test_creation(self):
client = Client('10.0.0.1', 4000)
self.assertEqual(client.address, '10.0.0.1')
self.assertEqual(client.port, 4000)
def test_creation_implicit_no_debug(self):
client = Client('10.0.0.1', 4000)
self.assertFalse(client.debug)
def test_creation_explicit_debug(self):
client = Client('10.0.0.1', 4000, debug=True)
self.assertTrue(client.debug)
if __name__ == '__main__':
unittest.main()
|
Use assertEqual instead of assertEquals
|
Use assertEqual instead of assertEquals
|
Python
|
mit
|
iconpin/anser
|
---
+++
@@ -6,7 +6,7 @@
def test_creation(self):
server = Anser(__file__)
- self.assertEquals(server.name, __file__)
+ self.assertEqual(server.name, __file__)
def test_creation_explicit_no_debug(self):
server = Anser(__file__, debug=False)
@@ -32,8 +32,8 @@
def test_creation(self):
client = Client('10.0.0.1', 4000)
- self.assertEquals(client.address, '10.0.0.1')
- self.assertEquals(client.port, 4000)
+ self.assertEqual(client.address, '10.0.0.1')
+ self.assertEqual(client.port, 4000)
def test_creation_implicit_no_debug(self):
client = Client('10.0.0.1', 4000)
|
54cfb9864256b27b9f4cd411f170cc12d47727e5
|
appengine/components/components/machine_provider/dimensions.py
|
appengine/components/components/machine_provider/dimensions.py
|
# Copyright 2015 The LUCI Authors. All rights reserved.
# Use of this source code is governed by the Apache v2.0 license that can be
# found in the LICENSE file.
"""Dimensions for the Machine Provider."""
from protorpc import messages
class Backend(messages.Enum):
"""Lists valid backends."""
DUMMY = 0
GCE = 1
class OSFamily(messages.Enum):
"""Lists valid OS families."""
LINUX = 1
OSX = 2
WINDOWS = 3
class Dimensions(messages.Message):
"""Represents the dimensions of a machine."""
# The operating system family of this machine.
os_family = messages.EnumField(OSFamily, 1)
# The backend which should be used to spin up this machine. This should
# generally be left unspecified so the Machine Provider selects the backend
# on its own.
backend = messages.EnumField(Backend, 2)
# The hostname of this machine.
hostname = messages.StringField(3)
# The number of CPUs available to this machine.
num_cpus = messages.IntegerField(4)
# The amount of memory available to this machine.
memory_gb = messages.FloatField(5)
# The disk space available to this machine.
disk_gb = messages.IntegerField(6)
|
# Copyright 2015 The LUCI Authors. All rights reserved.
# Use of this source code is governed by the Apache v2.0 license that can be
# found in the LICENSE file.
"""Dimensions for the Machine Provider."""
from protorpc import messages
class Backend(messages.Enum):
"""Lists valid backends."""
DUMMY = 0
GCE = 1
VSPHERE = 2
class OSFamily(messages.Enum):
"""Lists valid OS families."""
LINUX = 1
OSX = 2
WINDOWS = 3
class Dimensions(messages.Message):
"""Represents the dimensions of a machine."""
# The operating system family of this machine.
os_family = messages.EnumField(OSFamily, 1)
# The backend which should be used to spin up this machine. This should
# generally be left unspecified so the Machine Provider selects the backend
# on its own.
backend = messages.EnumField(Backend, 2)
# The hostname of this machine.
hostname = messages.StringField(3)
# The number of CPUs available to this machine.
num_cpus = messages.IntegerField(4)
# The amount of memory available to this machine.
memory_gb = messages.FloatField(5)
# The disk space available to this machine.
disk_gb = messages.IntegerField(6)
|
Add enum field for vSphere backend
|
Add enum field for vSphere backend
Review-Url: https://codereview.chromium.org/1997903002
|
Python
|
apache-2.0
|
luci/luci-py,luci/luci-py,luci/luci-py,luci/luci-py
|
---
+++
@@ -11,6 +11,7 @@
"""Lists valid backends."""
DUMMY = 0
GCE = 1
+ VSPHERE = 2
class OSFamily(messages.Enum):
|
020b2518efce2d973093a366e0a9abfadbd602fd
|
main/forms.py
|
main/forms.py
|
from django import forms
class IndexForm(forms.Form):
usos_auth_pin = forms.IntegerField(label='USOS Authorization PIN')
id_list = forms.CharField(
widget=forms.Textarea, label='ID List',
help_text='List of students IDs to query, one per line.')
student_id_regex = forms.CharField(
label='Student ID regex',
help_text='Regular expression used to match the student ID in each '
'line. If cannot match (or a student is not found in the '
'database), then the line is left as is.',
initial=r'\b\d{7,}\b',
widget=forms.TextInput(attrs={'placeholder': r'\b\d{7,}\b'}))
|
from django import forms
class IndexForm(forms.Form):
usos_auth_pin = forms.IntegerField(
label='USOS Authorization PIN',
help_text='If not filled out, then only the cache is used. Note that '
'this means that some IDs may fail to be looked up.',
required=False)
id_list = forms.CharField(
widget=forms.Textarea, label='ID List',
help_text='List of students IDs to query, one per line.')
student_id_regex = forms.CharField(
label='Student ID regex',
help_text='Regular expression used to match the student ID in each '
'line. If cannot match (or a student is not found in the '
'database), then the line is left as is.',
initial=r'\b\d{7,}\b',
widget=forms.TextInput(attrs={'placeholder': r'\b\d{7,}\b'}))
|
Add help_text and required=False to the PIN field
|
Add help_text and required=False to the PIN field
|
Python
|
mit
|
m4tx/usos-id-mapper,m4tx/usos-id-mapper
|
---
+++
@@ -2,7 +2,11 @@
class IndexForm(forms.Form):
- usos_auth_pin = forms.IntegerField(label='USOS Authorization PIN')
+ usos_auth_pin = forms.IntegerField(
+ label='USOS Authorization PIN',
+ help_text='If not filled out, then only the cache is used. Note that '
+ 'this means that some IDs may fail to be looked up.',
+ required=False)
id_list = forms.CharField(
widget=forms.Textarea, label='ID List',
help_text='List of students IDs to query, one per line.')
|
3e1408affa823af2ed95decf52b002614d060a26
|
pombola/core/tests/test_templatetags.py
|
pombola/core/tests/test_templatetags.py
|
from django.test import TestCase
from ..templatetags.breadcrumbs import breadcrumbs
class BreadcrumbTest(TestCase):
def test_breadcrumbs(self):
"""Check that the breadcrumbs are generated as expected"""
home_li = '<li><a href="/" title="Breadcrumb link to the homepage.">Home</a> <span class="sep">»</span> </li>'
tests = (
# input, expected output
( '/', '<li>Home</li>'),
( '/foo', home_li + '<li>Foo</li>'),
( '/foo/bar', home_li + '<li><a href="foo/" title="Breadcrumb link to Foo">Foo</a> <span class="sep">»</span> </li><li>Bar</li>'),
# Test that coordinates are passed through correctly
# (don't drop '-', put space after ',')
# See issue #762
( '/-1.23,4.56', home_li + '<li>-1.23, 4.56</li>'),
)
for url, expected in tests:
actual = breadcrumbs(url)
self.assertEqual(actual, expected)
|
from django.test import TestCase
from ..templatetags.breadcrumbs import breadcrumbs
from ..templatetags.active_class import active_class
class BreadcrumbTest(TestCase):
def test_breadcrumbs(self):
"""Check that the breadcrumbs are generated as expected"""
home_li = '<li><a href="/" title="Breadcrumb link to the homepage.">Home</a> <span class="sep">»</span> </li>'
tests = (
# input, expected output
( '/', '<li>Home</li>'),
( '/foo', home_li + '<li>Foo</li>'),
( '/foo/bar', home_li + '<li><a href="foo/" title="Breadcrumb link to Foo">Foo</a> <span class="sep">»</span> </li><li>Bar</li>'),
# Test that coordinates are passed through correctly
# (don't drop '-', put space after ',')
# See issue #762
( '/-1.23,4.56', home_li + '<li>-1.23, 4.56</li>'),
)
for url, expected in tests:
actual = breadcrumbs(url)
self.assertEqual(actual, expected)
class ActiveClassTest(TestCase):
def test_active(self):
"""Check that active is returned when the url matches the input"""
tests = (
('/', 'home', {}),
('/place/foo/', 'place', {'slug': 'foo'}),
)
for current_url, route_name, kwargs in tests:
actual = active_class(current_url, route_name, **kwargs)
self.assertEqual(' active ', actual)
self.assertEqual(active_class('/foo', 'home'), '')
|
Add tests for active_class templatetag
|
Add tests for active_class templatetag
|
Python
|
agpl-3.0
|
hzj123/56th,geoffkilpin/pombola,hzj123/56th,geoffkilpin/pombola,mysociety/pombola,hzj123/56th,mysociety/pombola,hzj123/56th,geoffkilpin/pombola,hzj123/56th,patricmutwiri/pombola,patricmutwiri/pombola,mysociety/pombola,geoffkilpin/pombola,patricmutwiri/pombola,ken-muturi/pombola,ken-muturi/pombola,mysociety/pombola,ken-muturi/pombola,ken-muturi/pombola,patricmutwiri/pombola,patricmutwiri/pombola,ken-muturi/pombola,patricmutwiri/pombola,geoffkilpin/pombola,hzj123/56th,geoffkilpin/pombola,ken-muturi/pombola,mysociety/pombola,mysociety/pombola
|
---
+++
@@ -2,6 +2,8 @@
from django.test import TestCase
from ..templatetags.breadcrumbs import breadcrumbs
+from ..templatetags.active_class import active_class
+
class BreadcrumbTest(TestCase):
@@ -27,3 +29,19 @@
actual = breadcrumbs(url)
self.assertEqual(actual, expected)
+
+class ActiveClassTest(TestCase):
+
+ def test_active(self):
+ """Check that active is returned when the url matches the input"""
+
+ tests = (
+ ('/', 'home', {}),
+ ('/place/foo/', 'place', {'slug': 'foo'}),
+ )
+
+ for current_url, route_name, kwargs in tests:
+ actual = active_class(current_url, route_name, **kwargs)
+ self.assertEqual(' active ', actual)
+
+ self.assertEqual(active_class('/foo', 'home'), '')
|
1421dd89b74bf753cf0b52a5e6fe200d221922b5
|
pirx/utils.py
|
pirx/utils.py
|
import os
def setting(name):
return name.upper()
def path(subpath):
project_root = os.path.dirname(os.path.realpath(__file__))
return os.path.join(project_root, subpath)
|
import os
def setting(name):
return name.upper()
def path(subpath):
import __main__
project_root = os.path.dirname(os.path.realpath(__main__.__file__))
return os.path.join(project_root, subpath)
|
Fix 'path' function: use main's file as project root
|
Fix 'path' function: use main's file as project root
|
Python
|
mit
|
piotrekw/pirx
|
---
+++
@@ -5,6 +5,7 @@
return name.upper()
def path(subpath):
- project_root = os.path.dirname(os.path.realpath(__file__))
+ import __main__
+ project_root = os.path.dirname(os.path.realpath(__main__.__file__))
return os.path.join(project_root, subpath)
|
04541918979c02b6dcd07d2c960cd24b9a745d10
|
mailgun/db.py
|
mailgun/db.py
|
from django.db import models
class MailgunLog(models.Model):
log_hash = models.CharField(max_length=64, unique=True)
data = models.TextField()
timestamp = models.DateTime()
|
from django.db import models
class MailgunLog(models.Model):
log_hash = models.CharField(max_length=64, unique=True)
data = models.TextField()
timestamp = models.DateTimeField()
|
Fix incorrect models field name for MailgunLog
|
Fix incorrect models field name for MailgunLog
|
Python
|
mit
|
p2pu/mechanical-mooc,p2pu/mechanical-mooc,p2pu/mechanical-mooc,p2pu/mechanical-mooc
|
---
+++
@@ -3,4 +3,4 @@
class MailgunLog(models.Model):
log_hash = models.CharField(max_length=64, unique=True)
data = models.TextField()
- timestamp = models.DateTime()
+ timestamp = models.DateTimeField()
|
662046497abfa6f7f6553aeb266a261637ba6407
|
numba/postpasses.py
|
numba/postpasses.py
|
# -*- coding: utf-8 -*-
"""
Postpasses over the LLVM IR.
The signature of each postpass is postpass(env, ee, lmod, lfunc) -> lfunc
"""
from __future__ import print_function, division, absolute_import
from numba.support.math_support import math_support, libs
default_postpasses = {}
def register_default(name):
def dec(f):
default_postpasses[name] = f
return f
return dec
# ______________________________________________________________________
# Postpasses
@register_default('math')
def postpass_link_math(env, ee, lmod, lfunc):
"numba.math.* -> mathcode.*"
replacements = {}
for lf in lmod.functions:
if lf.name.startswith('numba.math.'):
_, _, name = lf.name.rpartition('.')
replacements[lf.name] = name
del lf # this is dead after linking below
math_support.link_llvm_math_intrinsics(ee, lmod, libs.math_library,
math_support.LLVMLinker(),
replacements)
return lfunc
|
# -*- coding: utf-8 -*-
"""
Postpasses over the LLVM IR.
The signature of each postpass is postpass(env, ee, lmod, lfunc) -> lfunc
"""
from __future__ import print_function, division, absolute_import
from numba.support.math_support import linking, libs
default_postpasses = {}
def register_default(name):
def dec(f):
default_postpasses[name] = f
return f
return dec
# ______________________________________________________________________
# Postpasses
@register_default('math')
def postpass_link_math(env, ee, lmod, lfunc):
"numba.math.* -> mathcode.*"
replacements = {}
for lf in lmod.functions:
if lf.name.startswith('numba.math.'):
_, _, name = lf.name.rpartition('.')
replacements[lf.name] = name
del lf # this is dead after linking below
linking.link_llvm_math_intrinsics(ee, lmod, libs.math_library,
linking.LLVMLinker(),
replacements)
return lfunc
|
Clean up old test, pass all tests
|
Clean up old test, pass all tests
|
Python
|
bsd-2-clause
|
jriehl/numba,jriehl/numba,stefanseefeld/numba,seibert/numba,gdementen/numba,sklam/numba,pitrou/numba,stefanseefeld/numba,stuartarchibald/numba,stonebig/numba,numba/numba,GaZ3ll3/numba,ssarangi/numba,sklam/numba,cpcloud/numba,sklam/numba,ssarangi/numba,stonebig/numba,gdementen/numba,pombredanne/numba,GaZ3ll3/numba,seibert/numba,seibert/numba,cpcloud/numba,gmarkall/numba,pombredanne/numba,sklam/numba,stefanseefeld/numba,pombredanne/numba,stonebig/numba,gmarkall/numba,pitrou/numba,GaZ3ll3/numba,ssarangi/numba,gdementen/numba,GaZ3ll3/numba,gdementen/numba,cpcloud/numba,jriehl/numba,stefanseefeld/numba,stuartarchibald/numba,gmarkall/numba,pitrou/numba,stonebig/numba,numba/numba,stuartarchibald/numba,pombredanne/numba,pombredanne/numba,stuartarchibald/numba,cpcloud/numba,jriehl/numba,stuartarchibald/numba,GaZ3ll3/numba,pitrou/numba,numba/numba,IntelLabs/numba,gmarkall/numba,stonebig/numba,ssarangi/numba,seibert/numba,stefanseefeld/numba,IntelLabs/numba,sklam/numba,seibert/numba,IntelLabs/numba,jriehl/numba,numba/numba,gdementen/numba,pitrou/numba,gmarkall/numba,cpcloud/numba,IntelLabs/numba,IntelLabs/numba,numba/numba,ssarangi/numba
|
---
+++
@@ -7,7 +7,7 @@
from __future__ import print_function, division, absolute_import
-from numba.support.math_support import math_support, libs
+from numba.support.math_support import linking, libs
default_postpasses = {}
@@ -30,8 +30,8 @@
replacements[lf.name] = name
del lf # this is dead after linking below
- math_support.link_llvm_math_intrinsics(ee, lmod, libs.math_library,
- math_support.LLVMLinker(),
+ linking.link_llvm_math_intrinsics(ee, lmod, libs.math_library,
+ linking.LLVMLinker(),
replacements)
return lfunc
|
b44b0f68a2dd00df1ec074cf39a66ce81cd0dae2
|
nowplaying.py
|
nowplaying.py
|
#!/usr/bin/env python
from termcolor import colored
from appscript import *
from track import Track
def main():
print(get_song())
def get_song():
itunes_open = bool(app('System Events').processes[its.name == 'iTunes'].count())
if itunes_open: # check if application open
itunes = app('iTunes')
if itunes.player_state.get() == k.playing: # check if song playing
track = Track(itunes.current_track.get())
return track
if __name__ == '__main__':
main()
|
#!/usr/bin/env python
from termcolor import colored
from appscript import *
from track import Track
def main():
print(get_song())
def get_song():
itunes_open = bool(app('System Events').processes[its.name == 'iTunes'].count())
if itunes_open: # check if application open
itunes = app('iTunes')
if itunes.player_state.get() == k.playing: # check if song playing
track = Track(itunes.current_track.get())
return track
else:
return colored('No song currently playing.', 'red')
else:
return colored('iTunes not open.', 'red')
if __name__ == '__main__':
main()
|
Update error output for app not open/song not playing
|
Update error output for app not open/song not playing
|
Python
|
mit
|
kshvmdn/nowplaying
|
---
+++
@@ -16,6 +16,10 @@
if itunes.player_state.get() == k.playing: # check if song playing
track = Track(itunes.current_track.get())
return track
+ else:
+ return colored('No song currently playing.', 'red')
+ else:
+ return colored('iTunes not open.', 'red')
if __name__ == '__main__':
main()
|
5fef15285060b384ec2fd56b328e9848a63d1be0
|
dbaas/integrations/monitoring/manager.py
|
dbaas/integrations/monitoring/manager.py
|
from dbaas_dbmonitor.provider import DBMonitorProvider
from dbaas_zabbix.provider import ZabbixProvider
import logging
LOG = logging.getLogger(__name__)
class MonitoringManager():
@classmethod
def create_monitoring(cls, databaseinfra):
try:
LOG.info("Creating monitoring...")
#ZabbixProvider().create_monitoring(dbinfra=databaseinfra)
return DBMonitorProvider().create_dbmonitor_monitoring(databaseinfra)
except Exception, e:
LOG.warn("Exception: %s" % e)
return None
@classmethod
def remove_monitoring(cls, databaseinfra):
try:
LOG.info("Removing monitoring...")
#ZabbixProvider().destroy_monitoring(dbinfra=databaseinfra)
return DBMonitorProvider().remove_dbmonitor_monitoring(databaseinfra)
except Exception, e:
LOG.warn("Exception: %s" % e)
return None
|
from dbaas_dbmonitor.provider import DBMonitorProvider
from dbaas_zabbix.provider import ZabbixProvider
import logging
LOG = logging.getLogger(__name__)
class MonitoringManager():
@classmethod
def create_monitoring(cls, databaseinfra):
try:
LOG.info("Creating monitoring...")
ZabbixProvider().create_monitoring(dbinfra=databaseinfra)
return DBMonitorProvider().create_dbmonitor_monitoring(databaseinfra)
except Exception, e:
LOG.warn("Exception: %s" % e)
return None
@classmethod
def remove_monitoring(cls, databaseinfra):
try:
LOG.info("Removing monitoring...")
ZabbixProvider().destroy_monitoring(dbinfra=databaseinfra)
return DBMonitorProvider().remove_dbmonitor_monitoring(databaseinfra)
except Exception, e:
LOG.warn("Exception: %s" % e)
return None
|
Remove comment from zabbix integration
|
Remove comment from zabbix integration
|
Python
|
bsd-3-clause
|
globocom/database-as-a-service,globocom/database-as-a-service,globocom/database-as-a-service,globocom/database-as-a-service
|
---
+++
@@ -10,7 +10,7 @@
def create_monitoring(cls, databaseinfra):
try:
LOG.info("Creating monitoring...")
- #ZabbixProvider().create_monitoring(dbinfra=databaseinfra)
+ ZabbixProvider().create_monitoring(dbinfra=databaseinfra)
return DBMonitorProvider().create_dbmonitor_monitoring(databaseinfra)
except Exception, e:
LOG.warn("Exception: %s" % e)
@@ -20,7 +20,7 @@
def remove_monitoring(cls, databaseinfra):
try:
LOG.info("Removing monitoring...")
- #ZabbixProvider().destroy_monitoring(dbinfra=databaseinfra)
+ ZabbixProvider().destroy_monitoring(dbinfra=databaseinfra)
return DBMonitorProvider().remove_dbmonitor_monitoring(databaseinfra)
except Exception, e:
LOG.warn("Exception: %s" % e)
|
3c8067a1b8fb3463fa4c45a6f03c8dc0fbf918b3
|
organizer/models.py
|
organizer/models.py
|
from django.db import models
# Model Field Reference
# https://docs.djangoproject.com/en/1.8/ref/models/fields/
class Tag(models.Model):
name = models.CharField(
max_length=31, unique=True)
slug = models.SlugField(
max_length=31,
unique=True,
help_text='A label for URL config.')
def __str__(self):
return self.name
class Startup(models.Model):
name = models.CharField(
max_length=31, db_index=True)
slug = models.SlugField(
max_length=31,
unique=True,
help_text='A label for URL config.')
description = models.TextField()
founded_date = models.DateField(
'date founded')
contact = models.EmailField()
website = models.URLField(max_length=255)
tags = models.ManyToManyField(Tag)
def __str__(self):
return self.name
class NewsLink(models.Model):
title = models.CharField(max_length=63)
pub_date = models.DateField('date published')
link = models.URLField(max_length=255)
startup = models.ForeignKey(Startup)
def __str__(self):
return "{}: {}".format(
self.startup, self.title)
|
from django.db import models
# Model Field Reference
# https://docs.djangoproject.com/en/1.8/ref/models/fields/
class Tag(models.Model):
name = models.CharField(
max_length=31, unique=True)
slug = models.SlugField(
max_length=31,
unique=True,
help_text='A label for URL config.')
class Meta:
ordering = ['name']
def __str__(self):
return self.name
class Startup(models.Model):
name = models.CharField(
max_length=31, db_index=True)
slug = models.SlugField(
max_length=31,
unique=True,
help_text='A label for URL config.')
description = models.TextField()
founded_date = models.DateField(
'date founded')
contact = models.EmailField()
website = models.URLField(max_length=255)
tags = models.ManyToManyField(Tag)
def __str__(self):
return self.name
class NewsLink(models.Model):
title = models.CharField(max_length=63)
pub_date = models.DateField('date published')
link = models.URLField(max_length=255)
startup = models.ForeignKey(Startup)
def __str__(self):
return "{}: {}".format(
self.startup, self.title)
|
Declare Meta class in Tag model.
|
Ch03: Declare Meta class in Tag model. [skip ci]
|
Python
|
bsd-2-clause
|
jambonrose/DjangoUnleashed-1.8,jambonrose/DjangoUnleashed-1.8
|
---
+++
@@ -12,6 +12,9 @@
max_length=31,
unique=True,
help_text='A label for URL config.')
+
+ class Meta:
+ ordering = ['name']
def __str__(self):
return self.name
|
f02fe06dad6760bb7ba88fbbf1e32b90f2c1d22e
|
src/waldur_openstack/openstack_tenant/utils.py
|
src/waldur_openstack/openstack_tenant/utils.py
|
from django.contrib.contenttypes.models import ContentType
from waldur_core.cost_tracking import ConsumableItem
from waldur_core.cost_tracking.models import DefaultPriceListItem
from . import models, PriceItemTypes
def get_consumable_item(flavor_name):
return ConsumableItem(item_type=PriceItemTypes.FLAVOR, key=flavor_name, name='Flavor: %s' % flavor_name)
def sync_price_list_item(flavor):
resource_content_type = ContentType.objects.get_for_model(models.Flavor)
consumable_item = get_consumable_item(flavor.name)
DefaultPriceListItem._create_or_update_default_price_list_item(
resource_content_type=resource_content_type,
consumable_item=consumable_item,
)
|
from django.contrib.contenttypes.models import ContentType
from waldur_core.cost_tracking import ConsumableItem
from waldur_core.cost_tracking.models import DefaultPriceListItem
from . import models, PriceItemTypes
def get_consumable_item(flavor_name):
return ConsumableItem(item_type=PriceItemTypes.FLAVOR, key=flavor_name, name='Flavor: %s' % flavor_name)
def sync_price_list_item(flavor):
resource_content_type = ContentType.objects.get_for_model(models.Instance)
consumable_item = get_consumable_item(flavor.name)
DefaultPriceListItem._create_or_update_default_price_list_item(
resource_content_type=resource_content_type,
consumable_item=consumable_item,
)
|
Fix default price list item synchronization for OpenStack flavor [SENTRY-2030] Previously invalid resource content type has been used.
|
Fix default price list item synchronization for OpenStack flavor [SENTRY-2030]
Previously invalid resource content type has been used.
|
Python
|
mit
|
opennode/nodeconductor-openstack
|
---
+++
@@ -11,7 +11,7 @@
def sync_price_list_item(flavor):
- resource_content_type = ContentType.objects.get_for_model(models.Flavor)
+ resource_content_type = ContentType.objects.get_for_model(models.Instance)
consumable_item = get_consumable_item(flavor.name)
DefaultPriceListItem._create_or_update_default_price_list_item(
resource_content_type=resource_content_type,
|
776d5a686445712c6b3b1a4da72c45f2a1664e64
|
tests/matchers/test_contain.py
|
tests/matchers/test_contain.py
|
import unittest
from robber import expect
from robber.matchers.contain import Contain
class TestAbove(unittest.TestCase):
def test_matches(self):
expect(Contain({'key': 'value'}, 'key').matches()) == True
expect(Contain([1, 2, 3], 2).matches()) == True
expect(Contain((1, 2, 3), 3).matches()) == True
expect(Contain({'key': 'value'}, 'other').matches()) == False
expect(Contain([1, 2, 3], 4).matches()) == False
expect(Contain((1, 2, 3), 4).matches()) == False
def test_failure_message(self):
contain = Contain([1, 2, 3], 4)
expect(contain.failure_message()) == 'Expected {0} to contain 4'.format([1, 2, 3])
def test_register(self):
expect(expect.matcher('contain')) == Contain
|
import unittest
from robber import expect
from robber.matchers.contain import Contain
class TestContain(unittest.TestCase):
def test_matches(self):
expect(Contain({'key': 'value'}, 'key').matches()) == True
expect(Contain([1, 2, 3], 2).matches()) == True
expect(Contain((1, 2, 3), 3).matches()) == True
expect(Contain({'key': 'value'}, 'other').matches()) == False
expect(Contain([1, 2, 3], 4).matches()) == False
expect(Contain((1, 2, 3), 4).matches()) == False
def test_failure_message(self):
contain = Contain([1, 2, 3], 4)
expect(contain.failure_message()) == 'Expected {0} to contain 4'.format([1, 2, 3])
def test_register(self):
expect(expect.matcher('contain')) == Contain
|
Rename the 'contain' TestCase to TestContain
|
Rename the 'contain' TestCase to TestContain
|
Python
|
mit
|
taoenator/robber.py,vesln/robber.py
|
---
+++
@@ -2,7 +2,7 @@
from robber import expect
from robber.matchers.contain import Contain
-class TestAbove(unittest.TestCase):
+class TestContain(unittest.TestCase):
def test_matches(self):
expect(Contain({'key': 'value'}, 'key').matches()) == True
expect(Contain([1, 2, 3], 2).matches()) == True
|
8791ddc33037f4ab3d93ae834fbbb9d551ab0837
|
genes/debian/traits.py
|
genes/debian/traits.py
|
from functools import wraps
import platform
def is_debian(versions=None, distro_name='Debian'):
operating_system = platform.system()
distribution, version, codename = platform.linux_distribution()
is_version = True
if versions:
is_version = version in versions or codename in versions
return operating_system == 'Linux' \
and distribution == distro_name \
and is_version
def only_debian(warn=True, error=False, versions=None):
def wrapper(func):
@wraps(func)
def run_if_debian(*args, **kwargs):
if is_debian(versions=versions):
return func(*args, **kwargs)
elif error:
# FIXME: logitize me
raise OSError('This command can only be run on Debian')
elif warn:
# FIXME: should log and warn if warn
pass
return run_if_debian
return wrapper
|
from functools import wraps
import platform
# FIXME: had to duplicate this for package level imports. this is a bad design
operating_system = platform.system()
distribution, version, codename = platform.linux_distribution()
def is_debian(versions=None, distro_name='Debian'):
# FIXME: this is duplicated above. Figure out why
operating_system = platform.system()
distribution, version, codename = platform.linux_distribution()
is_version = True
if versions:
is_version = version in versions or codename in versions
return operating_system == 'Linux' \
and distribution == distro_name \
and is_version
def only_debian(warn=True, error=False, versions=None):
def wrapper(func):
@wraps(func)
def run_if_debian(*args, **kwargs):
if is_debian(versions=versions):
return func(*args, **kwargs)
elif error:
# FIXME: logitize me
raise OSError('This command can only be run on Debian')
elif warn:
# FIXME: should log and warn if warn
pass
return run_if_debian
return wrapper
|
Add global level vars for other packages
|
Add global level vars for other packages
|
Python
|
mit
|
hatchery/Genepool2,hatchery/genepool
|
---
+++
@@ -2,7 +2,13 @@
import platform
+# FIXME: had to duplicate this for package level imports. this is a bad design
+operating_system = platform.system()
+distribution, version, codename = platform.linux_distribution()
+
+
def is_debian(versions=None, distro_name='Debian'):
+ # FIXME: this is duplicated above. Figure out why
operating_system = platform.system()
distribution, version, codename = platform.linux_distribution()
|
a8f9f1dffc9dd345504005427f9f02ae8e1e07a4
|
froide/foirequest/search_indexes.py
|
froide/foirequest/search_indexes.py
|
from haystack import indexes
from celery_haystack.indexes import CelerySearchIndex
from .models import FoiRequest
class FoiRequestIndex(CelerySearchIndex, indexes.Indexable):
text = indexes.EdgeNgramField(document=True, use_template=True)
title = indexes.CharField(model_attr='title')
description = indexes.CharField(model_attr='description')
resolution = indexes.CharField(model_attr='resolution', default="")
status = indexes.CharField(model_attr='status')
readable_status = indexes.CharField(model_attr='readable_status')
first_message = indexes.DateTimeField(model_attr='first_message')
last_message = indexes.DateTimeField(model_attr='last_message')
url = indexes.CharField(model_attr='get_absolute_url')
public_body_name = indexes.CharField(model_attr='public_body__name', default="")
def get_model(self):
return FoiRequest
def index_queryset(self):
"""Used when the entire index for model is updated."""
return self.get_model().published.get_for_search_index()
def should_update(self, instance, **kwargs):
return self.instance.visibility > 1
|
from haystack import indexes
from celery_haystack.indexes import CelerySearchIndex
from .models import FoiRequest
class FoiRequestIndex(CelerySearchIndex, indexes.Indexable):
text = indexes.EdgeNgramField(document=True, use_template=True)
title = indexes.CharField(model_attr='title')
description = indexes.CharField(model_attr='description')
resolution = indexes.CharField(model_attr='resolution', default="")
status = indexes.CharField(model_attr='status')
readable_status = indexes.CharField(model_attr='readable_status')
first_message = indexes.DateTimeField(model_attr='first_message')
last_message = indexes.DateTimeField(model_attr='last_message')
url = indexes.CharField(model_attr='get_absolute_url')
public_body_name = indexes.CharField(model_attr='public_body__name', default="")
def get_model(self):
return FoiRequest
def index_queryset(self):
"""Used when the entire index for model is updated."""
return self.get_model().published.get_for_search_index()
def should_update(self, instance, **kwargs):
return instance.visibility > 1
|
Fix mistake in search index update check
|
Fix mistake in search index update check
|
Python
|
mit
|
ryankanno/froide,CodeforHawaii/froide,fin/froide,LilithWittmann/froide,ryankanno/froide,stefanw/froide,ryankanno/froide,okfse/froide,CodeforHawaii/froide,okfse/froide,okfse/froide,catcosmo/froide,stefanw/froide,LilithWittmann/froide,ryankanno/froide,fin/froide,ryankanno/froide,catcosmo/froide,catcosmo/froide,LilithWittmann/froide,CodeforHawaii/froide,stefanw/froide,LilithWittmann/froide,CodeforHawaii/froide,stefanw/froide,okfse/froide,fin/froide,catcosmo/froide,stefanw/froide,catcosmo/froide,LilithWittmann/froide,okfse/froide,CodeforHawaii/froide,fin/froide
|
---
+++
@@ -25,4 +25,4 @@
return self.get_model().published.get_for_search_index()
def should_update(self, instance, **kwargs):
- return self.instance.visibility > 1
+ return instance.visibility > 1
|
4858a17940ec4b4425f743813c0c1ecef391d967
|
tests/test_file_handling.py
|
tests/test_file_handling.py
|
# -*- coding: utf-8 -*-
"""
format-sql
Copyright (c) 2014, Friedrich Paetzke (paetzke@fastmail.fm)
All rights reserved.
"""
import os
from format_sql.file_handling import format_file, load_from_file, main
def get_test_file(filename):
test_data = os.path.dirname(os.path.abspath(os.path.dirname(__file__)))
filename = os.path.join(test_data, 'tests/data', filename)
return filename
def test_format_empty_file():
filename = get_test_file('empty.py')
format_file(filename)
assert load_from_file(filename) == ''
|
# -*- coding: utf-8 -*-
"""
format-sql
Copyright (c) 2014, Friedrich Paetzke (paetzke@fastmail.fm)
All rights reserved.
"""
import os
import sys
from format_sql.file_handling import format_file, load_from_file, main
try:
from unittest.mock import patch
except ImportError:
from mock import patch
def get_test_file(filename):
test_data = os.path.dirname(os.path.abspath(os.path.dirname(__file__)))
filename = os.path.join(test_data, 'tests/data', filename)
return filename
def test_format_empty_file():
filename = get_test_file('empty.py')
format_file(filename)
assert load_from_file(filename) == ''
def test_main():
sys.argv = ['NULL', 'tests']
with patch('format_sql.file_handling.format_file') as mocked:
main()
assert mocked.call_count == 19
|
Add test for file iteration
|
Add test for file iteration
|
Python
|
bsd-2-clause
|
paetzke/format-sql
|
---
+++
@@ -7,8 +7,14 @@
"""
import os
+import sys
from format_sql.file_handling import format_file, load_from_file, main
+
+try:
+ from unittest.mock import patch
+except ImportError:
+ from mock import patch
def get_test_file(filename):
@@ -21,3 +27,10 @@
filename = get_test_file('empty.py')
format_file(filename)
assert load_from_file(filename) == ''
+
+
+def test_main():
+ sys.argv = ['NULL', 'tests']
+ with patch('format_sql.file_handling.format_file') as mocked:
+ main()
+ assert mocked.call_count == 19
|
16214545b301aaba4847ffae5efe67782abe993d
|
toolz/tests/test_curried.py
|
toolz/tests/test_curried.py
|
import toolz
import toolz.curried
from toolz.curried import take, first, second, sorted, merge_with, reduce
from operator import add
def test_take():
assert list(take(2)([1, 2, 3])) == [1, 2]
def test_first():
assert first is toolz.itertoolz.first
def test_merge_with():
assert merge_with(sum)({1: 1}, {1: 2}) == {1: 3}
def test_merge_with_list():
assert merge_with(sum, [{'a': 1}, {'a': 2}]) == {'a': 3}
def test_sorted():
assert sorted(key=second)([(1, 2), (2, 1)]) == [(2, 1), (1, 2)]
def test_reduce():
assert reduce(add)((1, 2, 3)) == 6
def test_module_name():
assert toolz.curried.__name__ == 'toolz.curried'
|
import toolz
import toolz.curried
from toolz.curried import (take, first, second, sorted, merge_with, reduce,
merge)
from collections import defaultdict
from operator import add
def test_take():
assert list(take(2)([1, 2, 3])) == [1, 2]
def test_first():
assert first is toolz.itertoolz.first
def test_merge():
assert merge(factory=lambda: defaultdict(int))({1: 1}) == {1: 1}
assert merge({1: 1}) == {1: 1}
assert merge({1: 1}, factory=lambda: defaultdict(int)) == {1: 1}
def test_merge_with():
assert merge_with(sum)({1: 1}, {1: 2}) == {1: 3}
def test_merge_with_list():
assert merge_with(sum, [{'a': 1}, {'a': 2}]) == {'a': 3}
def test_sorted():
assert sorted(key=second)([(1, 2), (2, 1)]) == [(2, 1), (1, 2)]
def test_reduce():
assert reduce(add)((1, 2, 3)) == 6
def test_module_name():
assert toolz.curried.__name__ == 'toolz.curried'
|
Add tests for curried merge
|
Add tests for curried merge
|
Python
|
bsd-3-clause
|
machinelearningdeveloper/toolz,llllllllll/toolz,jdmcbr/toolz,llllllllll/toolz,pombredanne/toolz,machinelearningdeveloper/toolz,simudream/toolz,jcrist/toolz,cpcloud/toolz,quantopian/toolz,jcrist/toolz,jdmcbr/toolz,pombredanne/toolz,cpcloud/toolz,simudream/toolz,quantopian/toolz
|
---
+++
@@ -1,6 +1,8 @@
import toolz
import toolz.curried
-from toolz.curried import take, first, second, sorted, merge_with, reduce
+from toolz.curried import (take, first, second, sorted, merge_with, reduce,
+ merge)
+from collections import defaultdict
from operator import add
@@ -10,6 +12,12 @@
def test_first():
assert first is toolz.itertoolz.first
+
+
+def test_merge():
+ assert merge(factory=lambda: defaultdict(int))({1: 1}) == {1: 1}
+ assert merge({1: 1}) == {1: 1}
+ assert merge({1: 1}, factory=lambda: defaultdict(int)) == {1: 1}
def test_merge_with():
|
1a8095e71e81eff716524fa75eb9f07615ee61d2
|
pre_commit/languages/python.py
|
pre_commit/languages/python.py
|
from plumbum import local
import subprocess
PY_ENV = 'py_env'
def install_environment():
assert local.path('setup.py').exists()
# Return immediately if we already have a virtualenv
if local.path(PY_ENV).exists():
return
# Install a virtualenv
local['virtualenv'][PY_ENV]()
local['bash']['-c', 'source {0}/bin/activate && pip install .'.format(PY_ENV)]()
def run_hook(hook, file_args):
# TODO: batch filenames
process = subprocess.Popen(
['bash', '-c', ' '.join(
['source {0}/bin/activate &&'.format(PY_ENV)] +
[hook['entry']] + hook.get('args', []) + list(file_args)
)],
stdout=subprocess.PIPE, stderr=subprocess.PIPE,
)
ret = process.communicate()
return (0,) + ret
return local['bash'][
'-c', ' '.join(
['source {0}/bin/activate &&'.format(PY_ENV)] +
[hook['entry']] + hook.get('args', []) + list(file_args)
)
].run()
|
from plumbum import local
import subprocess
PY_ENV = 'py_env'
def install_environment():
assert local.path('setup.py').exists()
# Return immediately if we already have a virtualenv
if local.path(PY_ENV).exists():
return
# Install a virtualenv
local['virtualenv'][PY_ENV]()
local['bash']['-c', 'source {0}/bin/activate && pip install .'.format(PY_ENV)]()
def run_hook(hook, file_args):
# TODO: batch filenames
process = subprocess.Popen(
['bash', '-c', ' '.join(
['source {0}/bin/activate &&'.format(PY_ENV)] +
[hook['entry']] + hook.get('args', []) + list(file_args)
)],
stdout=subprocess.PIPE, stderr=subprocess.PIPE,
)
ret = process.communicate()
return (process.returncode,) + ret
return local['bash'][
'-c', ' '.join(
['source {0}/bin/activate &&'.format(PY_ENV)] +
[hook['entry']] + hook.get('args', []) + list(file_args)
)
].run()
|
Return the actual return code
|
Return the actual return code
|
Python
|
mit
|
chriskuehl/pre-commit,philipgian/pre-commit,chriskuehl/pre-commit,Teino1978-Corp/pre-commit,pre-commit/pre-commit,dnephin/pre-commit,beni55/pre-commit,beni55/pre-commit,philipgian/pre-commit,pre-commit/pre-commit,Lucas-C/pre-commit,barrysteyn/pre-commit,pre-commit/pre-commit,chriskuehl/pre-commit-1,barrysteyn/pre-commit,Teino1978-Corp/pre-commit,dnephin/pre-commit,dnephin/pre-commit,philipgian/pre-commit,chriskuehl/pre-commit,philipgian/pre-commit,pre-commit/pre-commit,pre-commit/pre-commit,chriskuehl/pre-commit-1,pre-commit/pre-commit,Lucas-C/pre-commit,pre-commit/pre-commit,chriskuehl/pre-commit-1,beni55/pre-commit,dnephin/pre-commit,philipgian/pre-commit,pre-commit/pre-commit,Lucas-C/pre-commit,beni55/pre-commit,chriskuehl/pre-commit-1,pre-commit/pre-commit,Lucas-C/pre-commit,Lucas-C/pre-commit,barrysteyn/pre-commit,chriskuehl/pre-commit,Lucas-C/pre-commit,pre-commit/pre-commit,philipgian/pre-commit,barrysteyn/pre-commit,Teino1978-Corp/pre-commit,Teino1978-Corp/pre-commit,pre-commit/pre-commit
|
---
+++
@@ -27,7 +27,7 @@
)
ret = process.communicate()
- return (0,) + ret
+ return (process.returncode,) + ret
return local['bash'][
'-c', ' '.join(
|
5d1a3ffedcb451a6a52b5e3492a56ef7663438d3
|
packages/Python/lldbsuite/test/repl/po_repl_type/TestREPLPOReplType.py
|
packages/Python/lldbsuite/test/repl/po_repl_type/TestREPLPOReplType.py
|
# TestREPLPOReplType.py
#
# This source file is part of the Swift.org open source project
#
# Copyright (c) 2014 - 2015 Apple Inc. and the Swift project authors
# Licensed under Apache License v2.0 with Runtime Library Exception
#
# See http://swift.org/LICENSE.txt for license information
# See http://swift.org/CONTRIBUTORS.txt for the list of Swift project authors
#
# ------------------------------------------------------------------------------
"""Test that types defined in the REPL can be po'ed."""
import os, time
import unittest2
import lldb
from lldbsuite.test.lldbrepl import REPLTest, load_tests
class REPLPOTestCase (REPLTest):
mydir = REPLTest.compute_mydir(__file__)
def doTest(self):
self.command('struct S {}')
self.command(':po S()', patterns=['S'])
self.command('extension S : CustomDebugStringConvertible { public var debugDescription: String { get { return "ABC" } } }')
self.command(':po S()', patterns='ABC')
|
# TestREPLPOReplType.py
#
# This source file is part of the Swift.org open source project
#
# Copyright (c) 2014 - 2015 Apple Inc. and the Swift project authors
# Licensed under Apache License v2.0 with Runtime Library Exception
#
# See http://swift.org/LICENSE.txt for license information
# See http://swift.org/CONTRIBUTORS.txt for the list of Swift project authors
#
# ------------------------------------------------------------------------------
"""Test that types defined in the REPL can be po'ed."""
import os, time
import unittest2
import lldb
from lldbsuite.test.lldbrepl import REPLTest, load_tests
import lldbsuite.test.decorators as decorators
class REPLPOTestCase (REPLTest):
mydir = REPLTest.compute_mydir(__file__)
@decorators.swiftTest
@decorators.no_debug_info_test
@decorators.expectedFailureAll(oslist=["macosx", "linux"], bugnumber="rdar://26725839")
def testREPL(self):
REPLTest.testREPL(self)
def doTest(self):
self.command('struct S {}')
self.command(':po S()', patterns=['S'])
self.command('extension S : CustomDebugStringConvertible { public var debugDescription: String { get { return "ABC" } } }')
self.command(':po S()', patterns='ABC')
|
Mark this test as a known failure to return the bots to blue
|
Mark this test as a known failure to return the bots to blue
|
Python
|
apache-2.0
|
apple/swift-lldb,apple/swift-lldb,apple/swift-lldb,apple/swift-lldb,apple/swift-lldb,apple/swift-lldb
|
---
+++
@@ -15,10 +15,17 @@
import unittest2
import lldb
from lldbsuite.test.lldbrepl import REPLTest, load_tests
+import lldbsuite.test.decorators as decorators
class REPLPOTestCase (REPLTest):
mydir = REPLTest.compute_mydir(__file__)
+
+ @decorators.swiftTest
+ @decorators.no_debug_info_test
+ @decorators.expectedFailureAll(oslist=["macosx", "linux"], bugnumber="rdar://26725839")
+ def testREPL(self):
+ REPLTest.testREPL(self)
def doTest(self):
self.command('struct S {}')
|
43d4b6a3ccf49b3a0307da98344b0fe8f61acaf1
|
brew/rest.py
|
brew/rest.py
|
import json
import time
import jsonschema
from pkg_resources import resource_string
from flask import request, jsonify
from brew import app, controller, machine, mongo
@app.route('/api/recipe', methods=['POST'])
def create_recipe():
schema = resource_string(__name__, 'data/recipe.schema.json').decode('utf-8')
recipe_json = request.get_json()
schema_dict = json.loads(schema)
jsonschema.validate(schema_dict, recipe_json)
mongo.db.recipes.insert(recipe_json)
return jsonify(success=True)
@app.route('/api/status', methods=['GET'])
def status():
return jsonify(timestamp=int(time.time() * 1000),
step=machine.current_step,
temperature=controller.get_temperature())
|
import json
import time
import jsonschema
from pkg_resources import resource_string
from flask import request, jsonify
from brew import app, controller, machine, mongo
@app.route('/api/recipe', methods=['POST'])
def create_recipe():
schema = resource_string(__name__, 'data/recipe.schema.json').decode('utf-8')
recipe_json = request.get_json()
schema_dict = json.loads(schema)
jsonschema.validate(schema_dict, recipe_json)
mongo.db.recipes.insert(recipe_json)
for malt in recipe_json['malts']:
d = {'name': malt['name']}
mongo.db.malts.update(d, d, True)
return jsonify(success=True)
@app.route('/api/status', methods=['GET'])
def status():
return jsonify(timestamp=int(time.time() * 1000),
step=machine.current_step,
temperature=controller.get_temperature())
|
Save malts for future reference
|
Save malts for future reference
|
Python
|
mit
|
brewpeople/brewmeister,brewpeople/brewmeister,brewpeople/brewmeister
|
---
+++
@@ -13,6 +13,11 @@
schema_dict = json.loads(schema)
jsonschema.validate(schema_dict, recipe_json)
mongo.db.recipes.insert(recipe_json)
+
+ for malt in recipe_json['malts']:
+ d = {'name': malt['name']}
+ mongo.db.malts.update(d, d, True)
+
return jsonify(success=True)
|
344457b498f12dfceb8e687b326ba68064d6bda6
|
run-tests.py
|
run-tests.py
|
import os
PROJECT_DIR = os.path.abspath(os.path.dirname( __file__ ))
SRC_DIR = os.path.join(PROJECT_DIR, "src")
TEST_DIR = os.path.join(PROJECT_DIR, "test")
def runtestdir(subdir):
#cwd = os.getcwd()
#subdir = os.path.join(cwd, subdir)
entries = os.listdir(subdir)
total = 0
errs = 0
for f in entries:
if not f.endswith(".py"):
continue
if not f.startswith("test_"):
continue
cmd = "python %s/%s" % (subdir, f)
print "FILE: %s/%s" % (subdir, f)
exit_code = os.system(cmd)
total += 1
if exit_code != 0:
errs += 1
print "SUMMARY: %s -> %s total / %s error" % (subdir, total, errs)
if __name__ == "__main__":
#
os.chdir(TEST_DIR)
#
os.environ["PYTHONPATH"] = ":".join([SRC_DIR, TEST_DIR])
#
runtestdir("bindertest")
|
import os, sys
PROJECT_DIR = os.path.abspath(os.path.dirname( __file__ ))
SRC_DIR = os.path.join(PROJECT_DIR, "src")
TEST_DIR = os.path.join(PROJECT_DIR, "test")
def runtestdir(subdir):
entries = os.listdir(subdir)
total = 0
errs = 0
for f in entries:
if not f.endswith(".py"):
continue
if not f.startswith("test_"):
continue
test_file = os.path.join(subdir, f)
print "FILE:", test_file
exit_code = os.system(sys.executable + " " + test_file)
total += 1
if exit_code != 0:
errs += 1
print "SUMMARY: %s -> %s total / %s error (%s)" \
% (subdir, total, errs, sys.executable)
if __name__ == "__main__":
os.chdir(TEST_DIR)
os.environ["PYTHONPATH"] = ":".join([SRC_DIR, TEST_DIR])
runtestdir("bindertest")
|
Test runner uses current python
|
Test runner uses current python
|
Python
|
mit
|
divtxt/binder
|
---
+++
@@ -1,13 +1,11 @@
-import os
+import os, sys
PROJECT_DIR = os.path.abspath(os.path.dirname( __file__ ))
SRC_DIR = os.path.join(PROJECT_DIR, "src")
TEST_DIR = os.path.join(PROJECT_DIR, "test")
def runtestdir(subdir):
- #cwd = os.getcwd()
- #subdir = os.path.join(cwd, subdir)
entries = os.listdir(subdir)
total = 0
errs = 0
@@ -16,20 +14,18 @@
continue
if not f.startswith("test_"):
continue
- cmd = "python %s/%s" % (subdir, f)
- print "FILE: %s/%s" % (subdir, f)
- exit_code = os.system(cmd)
+ test_file = os.path.join(subdir, f)
+ print "FILE:", test_file
+ exit_code = os.system(sys.executable + " " + test_file)
total += 1
if exit_code != 0:
errs += 1
- print "SUMMARY: %s -> %s total / %s error" % (subdir, total, errs)
+ print "SUMMARY: %s -> %s total / %s error (%s)" \
+ % (subdir, total, errs, sys.executable)
if __name__ == "__main__":
- #
os.chdir(TEST_DIR)
- #
os.environ["PYTHONPATH"] = ":".join([SRC_DIR, TEST_DIR])
- #
runtestdir("bindertest")
|
2e9d4d4b43a59b65dde1bb9196786f88eeb6edf0
|
lib/game_states/select_state_sfx.py
|
lib/game_states/select_state_sfx.py
|
from pygame.mixer import Sound
class SelectStateSFX(object):
"""Plays sound effects that are used by both the Character Select
State and the Stage Select State.
Class Constants:
SCROLL_PATH: A String for the file path to the scroll items
sound effect.
CONFIRM_PATH: A String for the file path to the confirm choice
sound effect.
Attributes:
channel: A PyGame Channel where all of the sounds will be
played.
scroll: A PyGame Sound that plays when the players scroll
through the list of available options.
confirm: A PyGame Sound that plays when the players confirm a
choice.
"""
SCROLL_PATH = 'audio/scroll_char_stage.ogg'
CONFIRM_PATH = 'confirm.wav'
def __init__(self, channel):
"""Declare and initialize instance variables.
Args:
channel: A PyGame Channel that will be used to play the
Sounds.
"""
self.channel = channel
self.scroll = Sound(self.SCROLL_PATH)
self.confirm = Sound(self.CONFIRM_PATH)
def play_scroll(self):
"""Play the 'scroll items' sound effect."""
self.channel.play(self.scroll)
def play_confirm(self):
"""Play the 'confirm choice' sound effect."""
self.channel.play(self.confirm)
|
from pygame.mixer import Sound
class SelectStateSFX(object):
"""Plays sound effects that are used by both the Character Select
State and the Stage Select State.
Class Constants:
SCROLL_PATH: A String for the file path to the scroll items
sound effect.
CONFIRM_PATH: A String for the file path to the confirm choice
sound effect.
Attributes:
channel: A PyGame Channel where all of the sounds will be
played.
scroll: A PyGame Sound that plays when the players scroll
through the list of available options.
confirm: A PyGame Sound that plays when the players confirm a
choice.
"""
SCROLL_PATH = 'audio/scroll_char_stage.ogg'
CONFIRM_PATH = 'confirm.wav'
def __init__(self, channel):
"""Declare and initialize instance variables.
Args:
channel: A PyGame Channel that will be used to play the
Sounds.
"""
self.channel = channel
self.scroll = Sound(self.SCROLL_PATH)
self.confirm = Sound(self.CONFIRM_PATH)
def play_scroll(self):
"""Play the 'scroll items' sound effect."""
self.channel.play(self.scroll)
def play_confirm(self):
"""Play the 'confirm choice' sound effect."""
self.channel.play(self.confirm)
|
Add space between import and class declaration
|
Add space between import and class declaration
All heil PEP.
|
Python
|
unlicense
|
MarquisLP/Sidewalk-Champion
|
---
+++
@@ -1,4 +1,5 @@
from pygame.mixer import Sound
+
class SelectStateSFX(object):
"""Plays sound effects that are used by both the Character Select
|
e5f00a6a5e71d8f5fe98547732f4c9e15a3efc1e
|
src/nodeconductor_paas_oracle/apps.py
|
src/nodeconductor_paas_oracle/apps.py
|
from django.apps import AppConfig
class OracleConfig(AppConfig):
name = 'nodeconductor_paas_oracle'
verbose_name = 'Oracle'
service_name = 'Oracle'
def ready(self):
from nodeconductor.structure import SupportedServices
from .backend import OracleBackend
SupportedServices.register_backend(OracleBackend)
|
from django.apps import AppConfig
class OracleConfig(AppConfig):
name = 'nodeconductor_paas_oracle'
verbose_name = 'Oracle'
service_name = 'Oracle'
def ready(self):
from nodeconductor.structure import SupportedServices
from nodeconductor.cost_tracking import CostTrackingRegister
from .backend import OracleBackend
SupportedServices.register_backend(OracleBackend)
# cost tracking
from .cost_tracking import OracleCostTrackingBackend
CostTrackingRegister.register(self.label, OracleCostTrackingBackend)
|
Add registration to cost tracking
|
Add registration to cost tracking
|
Python
|
mit
|
opennode/nodeconductor-paas-oracle
|
---
+++
@@ -8,5 +8,11 @@
def ready(self):
from nodeconductor.structure import SupportedServices
+ from nodeconductor.cost_tracking import CostTrackingRegister
+
from .backend import OracleBackend
SupportedServices.register_backend(OracleBackend)
+
+ # cost tracking
+ from .cost_tracking import OracleCostTrackingBackend
+ CostTrackingRegister.register(self.label, OracleCostTrackingBackend)
|
bf88702b9a4d76fd3fc0cd3dbcf43d71d7259957
|
sweettooth/auth/context_processors.py
|
sweettooth/auth/context_processors.py
|
from auth import forms
def login_form(request):
if request.user.is_authenticated():
return dict()
return dict(login_popup_form=forms.InlineAuthenticationForm)
|
from auth import forms
def login_form(request):
if request.user.is_authenticated():
return dict()
return dict(login_popup_form=forms.InlineAuthenticationForm())
|
Fix a silly typo in the login popup form
|
Fix a silly typo in the login popup form
I have no idea why Django was OK with this...
|
Python
|
agpl-3.0
|
magcius/sweettooth,GNOME/extensions-web,GNOME/extensions-web,GNOME/extensions-web,magcius/sweettooth,GNOME/extensions-web
|
---
+++
@@ -5,4 +5,4 @@
if request.user.is_authenticated():
return dict()
- return dict(login_popup_form=forms.InlineAuthenticationForm)
+ return dict(login_popup_form=forms.InlineAuthenticationForm())
|
d3438e85ab4158d769b0662729a8aff7d143971a
|
csv_ical/tests/test_convert.py
|
csv_ical/tests/test_convert.py
|
import unittest
from csv_ical import convert
class TestConvert(unittest.TestCase):
def setUp(self):
self.convert = convert.Convert()
def test_generate_configs(self):
self.convert._generate_configs_from_default()
|
import datetime
import os
import tempfile
import unittest
from syspath import get_git_root
from csv_ical import convert
EXAMPLE_ICS = os.path.join(get_git_root(), 'examples', 'arrive.ics')
EXAMPLE_CSV = os.path.join(get_git_root(), 'examples', 'BostonCruiseTerminalSchedule.csv')
CSV_CONFIGS = {
'HEADER_COLUMNS_TO_SKIP': 2,
'CSV_NAME': 3,
'CSV_START_DATE': 7,
'CSV_END_DATE': 8,
'CSV_DESCRIPTION': 6,
'CSV_LOCATION': 9,
}
class TestConvert(unittest.TestCase):
def setUp(self):
self.convert = convert.Convert()
def test_generate_configs(self):
config = self.convert._generate_configs_from_default()
self.assertEqual(config['CSV_NAME'], 0)
def test_generate_configs_override(self):
overrides = {
'CSV_NAME': 5,
}
config = self.convert._generate_configs_from_default(overrides)
self.assertEqual(config['CSV_NAME'], 5)
def test_read_ical(self):
self.convert.read_ical(EXAMPLE_ICS)
self.assertNotEqual(self.convert.cal, None)
def test_read_csv(self):
self.convert.read_csv(EXAMPLE_CSV)
self.assertNotEqual(self.convert.csv_data, [])
def test_make_ical(self):
self.convert.read_csv(EXAMPLE_CSV)
self.convert.csv_data = [self.convert.csv_data[0]]
self.convert.csv_data[0][7] = datetime.datetime.now()
self.convert.csv_data[0][8] = datetime.datetime.now()
self.convert.make_ical(CSV_CONFIGS)
self.assertNotEqual(self.convert.cal, None)
def test_make_csv(self):
self.convert.read_ical(EXAMPLE_ICS)
self.convert.make_csv()
self.assertNotEqual(self.convert.csv_data, [])
def test_make_csv(self):
self.convert.read_ical(EXAMPLE_ICS)
self.convert.make_csv()
self.assertNotEqual(self.convert.csv_data, [])
def test_save_ical(self):
self.convert.read_ical(EXAMPLE_ICS)
with tempfile.NamedTemporaryFile() as temp:
self.convert.save_ical(temp.name)
def test_save_csv(self):
self.convert.read_csv(EXAMPLE_CSV)
with tempfile.NamedTemporaryFile() as temp:
self.convert.save_csv(temp.name)
|
Add tests for all methods
|
Add tests for all methods
|
Python
|
mit
|
albertyw/csv-to-ical
|
---
+++
@@ -1,6 +1,22 @@
+import datetime
+import os
+import tempfile
import unittest
+from syspath import get_git_root
+
from csv_ical import convert
+
+EXAMPLE_ICS = os.path.join(get_git_root(), 'examples', 'arrive.ics')
+EXAMPLE_CSV = os.path.join(get_git_root(), 'examples', 'BostonCruiseTerminalSchedule.csv')
+CSV_CONFIGS = {
+ 'HEADER_COLUMNS_TO_SKIP': 2,
+ 'CSV_NAME': 3,
+ 'CSV_START_DATE': 7,
+ 'CSV_END_DATE': 8,
+ 'CSV_DESCRIPTION': 6,
+ 'CSV_LOCATION': 9,
+}
class TestConvert(unittest.TestCase):
@@ -8,4 +24,48 @@
self.convert = convert.Convert()
def test_generate_configs(self):
- self.convert._generate_configs_from_default()
+ config = self.convert._generate_configs_from_default()
+ self.assertEqual(config['CSV_NAME'], 0)
+
+ def test_generate_configs_override(self):
+ overrides = {
+ 'CSV_NAME': 5,
+ }
+ config = self.convert._generate_configs_from_default(overrides)
+ self.assertEqual(config['CSV_NAME'], 5)
+
+ def test_read_ical(self):
+ self.convert.read_ical(EXAMPLE_ICS)
+ self.assertNotEqual(self.convert.cal, None)
+
+ def test_read_csv(self):
+ self.convert.read_csv(EXAMPLE_CSV)
+ self.assertNotEqual(self.convert.csv_data, [])
+
+ def test_make_ical(self):
+ self.convert.read_csv(EXAMPLE_CSV)
+ self.convert.csv_data = [self.convert.csv_data[0]]
+ self.convert.csv_data[0][7] = datetime.datetime.now()
+ self.convert.csv_data[0][8] = datetime.datetime.now()
+ self.convert.make_ical(CSV_CONFIGS)
+ self.assertNotEqual(self.convert.cal, None)
+
+ def test_make_csv(self):
+ self.convert.read_ical(EXAMPLE_ICS)
+ self.convert.make_csv()
+ self.assertNotEqual(self.convert.csv_data, [])
+
+ def test_make_csv(self):
+ self.convert.read_ical(EXAMPLE_ICS)
+ self.convert.make_csv()
+ self.assertNotEqual(self.convert.csv_data, [])
+
+ def test_save_ical(self):
+ self.convert.read_ical(EXAMPLE_ICS)
+ with tempfile.NamedTemporaryFile() as temp:
+ self.convert.save_ical(temp.name)
+
+ def test_save_csv(self):
+ self.convert.read_csv(EXAMPLE_CSV)
+ with tempfile.NamedTemporaryFile() as temp:
+ self.convert.save_csv(temp.name)
|
f5fd74dac54f657cc64fdaa0b838b00b72ce5ee6
|
dev/make-release-notes.py
|
dev/make-release-notes.py
|
#! /usr/bin/env python3
import re
import sys
_, VERSION, CHANGELOG, LIST = sys.argv
HEADER_REGEX = fr"# {VERSION} \(\d\d\d\d-\d\d-\d\d\)\n"
notes_list = []
def add_to_release_notes(line):
assert line.endswith("."), line
notes_list.append(f"* {line}\n")
with open(CHANGELOG) as f:
first_line = next(f)
if not re.match(HEADER_REGEX, first_line):
sys.exit(
f'First changelog line "{first_line.rstrip()}" must '
f'start with "{HEADER_REGEX.rstrip()}"'
)
notes_list.extend([first_line[2:], "\n"])
for line in f:
if not line.strip():
continue
if line.startswith("* "):
add_to_release_notes(line[2:].strip())
else:
break
def check(name, text):
print("*" * 60)
print(text)
print("*" * 60)
response = input("Accept this %s (Y/n)? " % name).strip().lower()
if response and response != "y":
sys.exit(1)
check("changelog", "".join(notes_list))
with open(LIST, "w") as f:
f.writelines(notes_list)
|
#! /usr/bin/env python3
import re
import sys
_, VERSION, CHANGELOG, LIST = sys.argv
HEADER_REGEX = fr"# {VERSION} \(\d\d\d\d-\d\d-\d\d\)\n"
notes_list = []
def add_to_release_notes(line):
assert line.endswith("."), line
with open(CHANGELOG) as f:
first_line = next(f)
if not re.match(HEADER_REGEX, first_line):
sys.exit(
f'First changelog line "{first_line.rstrip()}" must '
f'start with "{HEADER_REGEX.rstrip()}"'
)
notes_list.extend([first_line[2:], "\n"])
next(f) # Skip empty line.
for line in f:
if not line.strip():
break
else:
notes_list.append(line)
def check(name, text):
print("*" * 60)
print(text)
print("*" * 60)
response = input("Accept this %s (Y/n)? " % name).strip().lower()
if response and response != "y":
sys.exit(1)
check("changelog", "".join(notes_list))
with open(LIST, "w") as f:
f.writelines(notes_list)
|
Allow line breaks in changelog.
|
Allow line breaks in changelog.
|
Python
|
mit
|
jendrikseipp/vulture,jendrikseipp/vulture
|
---
+++
@@ -11,7 +11,7 @@
def add_to_release_notes(line):
assert line.endswith("."), line
- notes_list.append(f"* {line}\n")
+
with open(CHANGELOG) as f:
@@ -22,13 +22,12 @@
f'start with "{HEADER_REGEX.rstrip()}"'
)
notes_list.extend([first_line[2:], "\n"])
+ next(f) # Skip empty line.
for line in f:
if not line.strip():
- continue
- if line.startswith("* "):
- add_to_release_notes(line[2:].strip())
+ break
else:
- break
+ notes_list.append(line)
def check(name, text):
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.